code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python
"""Parse various Windows persistence mechanisms into PersistenceFiles."""
from grr.lib import artifact_utils
from grr.lib import parsers
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import paths as rdf_paths
from grr.lib.rdfvalues import standard as rdf_standard
from grr.path_detection import windows as path_detection_windows
class WindowsPersistenceMechanismsParser(parsers.ArtifactFilesParser):
"""Turn various persistence objects into PersistenceFiles."""
output_types = ["PersistenceFile"]
supported_artifacts = ["WindowsPersistenceMechanisms"]
# Required for environment variable expansion
knowledgebase_dependencies = ["environ_systemdrive", "environ_systemroot"]
def _GetFilePaths(self, path, pathtype, kb):
"""Guess windows filenames from a commandline string."""
environ_vars = artifact_utils.GetWindowsEnvironmentVariablesMap(kb)
path_guesses = path_detection_windows.DetectExecutablePaths(
[path], environ_vars)
if not path_guesses:
# TODO(user): yield a ParserAnomaly object
return []
return [rdf_paths.PathSpec(path=path, pathtype=pathtype)
for path in path_guesses]
def Parse(self, persistence, knowledge_base, download_pathtype):
"""Convert persistence collector output to downloadable rdfvalues."""
pathspecs = []
source_urn = None
if isinstance(persistence, rdf_client.WindowsServiceInformation):
if persistence.HasField("registry_key"):
source_urn = persistence.registry_key
if persistence.HasField("binary"):
pathspecs.append(persistence.binary.pathspec)
elif persistence.HasField("image_path"):
pathspecs = self._GetFilePaths(persistence.image_path,
download_pathtype, knowledge_base)
# TODO(user): handle empty image_path driver default
if isinstance(
persistence,
rdf_client.StatEntry) and persistence.HasField("registry_type"):
pathspecs = self._GetFilePaths(persistence.registry_data.string,
download_pathtype, knowledge_base)
source_urn = persistence.aff4path
for pathspec in pathspecs:
yield rdf_standard.PersistenceFile(pathspec=pathspec,
source_urn=source_urn)
|
[
"grr.lib.rdfvalues.paths.PathSpec",
"grr.lib.artifact_utils.GetWindowsEnvironmentVariablesMap",
"grr.path_detection.windows.DetectExecutablePaths",
"grr.lib.rdfvalues.standard.PersistenceFile"
] |
[((865, 917), 'grr.lib.artifact_utils.GetWindowsEnvironmentVariablesMap', 'artifact_utils.GetWindowsEnvironmentVariablesMap', (['kb'], {}), '(kb)\n', (913, 917), False, 'from grr.lib import artifact_utils\n'), ((937, 1003), 'grr.path_detection.windows.DetectExecutablePaths', 'path_detection_windows.DetectExecutablePaths', (['[path]', 'environ_vars'], {}), '([path], environ_vars)\n', (981, 1003), True, 'from grr.path_detection import windows as path_detection_windows\n'), ((1117, 1165), 'grr.lib.rdfvalues.paths.PathSpec', 'rdf_paths.PathSpec', ([], {'path': 'path', 'pathtype': 'pathtype'}), '(path=path, pathtype=pathtype)\n', (1135, 1165), True, 'from grr.lib.rdfvalues import paths as rdf_paths\n'), ((2230, 2300), 'grr.lib.rdfvalues.standard.PersistenceFile', 'rdf_standard.PersistenceFile', ([], {'pathspec': 'pathspec', 'source_urn': 'source_urn'}), '(pathspec=pathspec, source_urn=source_urn)\n', (2258, 2300), True, 'from grr.lib.rdfvalues import standard as rdf_standard\n')]
|
from django.contrib import admin
from django.urls import path
from django.conf.urls import url, include
from rest_framework import routers
from goods import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('admin/', admin.site.urls),
]
|
[
"django.urls.path",
"django.conf.urls.include",
"rest_framework.routers.DefaultRouter"
] |
[((174, 197), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (195, 197), False, 'from rest_framework import routers\n'), ((433, 464), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (437, 464), False, 'from django.urls import path\n'), ((322, 342), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (329, 342), False, 'from django.conf.urls import url, include\n'), ((368, 426), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (375, 426), False, 'from django.conf.urls import url, include\n')]
|
from http.server import HTTPServer, SimpleHTTPRequestHandler
import re
import json
import time
PORT = 8000
server = None
class CustomHandler(SimpleHTTPRequestHandler):
def do_GET(self):
if re.search('/api/global_regex', self.path):
print(self.path.split('/'))
#This URL will trigger our sample function and send what it returns back to the browser
self.send_response(200)
self.send_header('Content-type','application/json; charset=UTF-8')
self.end_headers()
f = open("api/global_regex.json", "r")
# all_regex = json.loads(f.read())
self.wfile.write(f.read().encode()) # call sample function here
return
if re.search('/api/regex/.*', self.path):
token = self.path.split('/')[-1]
#This URL will trigger our sample function and send what it returns back to the browser
self.send_response(200)
self.send_header('Content-type','application/json; charset=UTF-8')
self.end_headers()
f = open("api/global_regex.json", "r")
all_regex = json.loads(f.read())
res_regex = [x for x in all_regex if x["token"] == token][0]
self.wfile.write(json.dumps(res_regex).encode()) #call sample function here
return
# Path que redirecciona que carga el fichero index.html pero con los datos de pyramid_globals modificados
if re.search('/api/r/.*', self.path):
return
# pyramid_globals = {
# "regex": "Test",
# "description": "No description",
# "title": "Untitled Regex",
# "strFlags": "",
# "testString": "My test data",
# "isOwner": true,
# "token": "MpF1RYmqLxOyd2E0",
# "tier": "",
# "flavor": "javascript",
# "unitTests": "[]",
# "email": ""
# }
if re.search('/tools/off', self.path):
self.send_response(200)
self.send_header('Content-type','text/html; charset=UTF-8')
self.end_headers()
self.wfile.write("Bye!".encode())
time.sleep(2)
server.socket.close()
quit()
else:
#serve files, and directory listings by following self.path from
#current working directory
try:
SimpleHTTPRequestHandler.do_GET(self)
except Exception as err:
print(err)
server.socket.close()
quit()
def do_POST(self):
if re.search('/api/image', self.path):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
self.send_response(200)
self.end_headers()
response = BytesIO()
response.write(b'This is POST request. ')
response.write(b'Received: ')
response.write(post_data)
# req = {
# "title":"Untitled Regex",
# "description":"No description",
# "regex":"My regular expression",
# "flavor":"javascript",
# "strFlags":"",
# "testString":"My test data",
# "unitTests":"[]"
# }
# Generar la imagen en la ruta /i/4XT2kVOAZ2BnfNJ9 osea el token, es donde se ha generado la imagen
# AVERIGUAR COMO GENERA LA IMAGEN EN FORMATO PNG
# DESPUES GUARDAR EN /r/data.json TODAS LAS REGEX ALMACENADAS CARGAR INDEX.HTML CON LOS DATOS DE ABAJO
# res = {
# "description": "No description",
# "isOwner": true,
# "flavor": "javascript",
# "unitTests": "[]",
# "regex": "My regular expression",
# "name": null,
# "title": "Untitled Regex",
# "strFlags": "",
# "testString": "My test data",
# "token": "<KEY>",
# "version": 0
# }
self.wfile.write(response.getvalue())
if re.search('/signup', self.path):
users = json.loads(open("database/users.json", "r").read())
f = open("database/users.json", "w")
return
if re.search('/login', self.path):
users = json.loads(open("database/users.json", "r").read())
# Cargar un fichero con los usuarios y listo, modifica el heml
pass
return
if re.search('/logout', self.path):
pass
return
if __name__ == '__main__':
try:
server = HTTPServer(("localhost", PORT), CustomHandler)
print("SERVING AT PORT", PORT)
server.serve_forever()
except KeyboardInterrupt:
server.socket.close()
|
[
"http.server.HTTPServer",
"http.server.SimpleHTTPRequestHandler.do_GET",
"json.dumps",
"time.sleep",
"re.search"
] |
[((204, 245), 're.search', 're.search', (['"""/api/global_regex"""', 'self.path'], {}), "('/api/global_regex', self.path)\n", (213, 245), False, 'import re\n'), ((1311, 1344), 're.search', 're.search', (['"""/api/r/.*"""', 'self.path'], {}), "('/api/r/.*', self.path)\n", (1320, 1344), False, 'import re\n'), ((1712, 1746), 're.search', 're.search', (['"""/tools/off"""', 'self.path'], {}), "('/tools/off', self.path)\n", (1721, 1746), False, 'import re\n'), ((2238, 2272), 're.search', 're.search', (['"""/api/image"""', 'self.path'], {}), "('/api/image', self.path)\n", (2247, 2272), False, 'import re\n'), ((3468, 3499), 're.search', 're.search', (['"""/signup"""', 'self.path'], {}), "('/signup', self.path)\n", (3477, 3499), False, 'import re\n'), ((3629, 3659), 're.search', 're.search', (['"""/login"""', 'self.path'], {}), "('/login', self.path)\n", (3638, 3659), False, 'import re\n'), ((3820, 3851), 're.search', 're.search', (['"""/logout"""', 'self.path'], {}), "('/logout', self.path)\n", (3829, 3851), False, 'import re\n'), ((3924, 3970), 'http.server.HTTPServer', 'HTTPServer', (["('localhost', PORT)", 'CustomHandler'], {}), "(('localhost', PORT), CustomHandler)\n", (3934, 3970), False, 'from http.server import HTTPServer, SimpleHTTPRequestHandler\n'), ((663, 700), 're.search', 're.search', (['"""/api/regex/.*"""', 'self.path'], {}), "('/api/regex/.*', self.path)\n", (672, 700), False, 'import re\n'), ((1907, 1920), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1917, 1920), False, 'import time\n'), ((2087, 2124), 'http.server.SimpleHTTPRequestHandler.do_GET', 'SimpleHTTPRequestHandler.do_GET', (['self'], {}), '(self)\n', (2118, 2124), False, 'from http.server import HTTPServer, SimpleHTTPRequestHandler\n'), ((1124, 1145), 'json.dumps', 'json.dumps', (['res_regex'], {}), '(res_regex)\n', (1134, 1145), False, 'import json\n')]
|
# Create your models here.
from copy import copy
from datetime import date
from django import forms
from django.core.serializers.json import DjangoJSONEncoder
from django.db import DataError, IntegrityError, models, transaction
from django.db.models import Q
from import_common.core import (
matchuj_autora,
matchuj_funkcja_autora,
matchuj_grupa_pracownicza,
matchuj_jednostke,
matchuj_wymiar_etatu,
)
from import_common.exceptions import (
BPPDatabaseError,
BPPDatabaseMismatch,
XLSMatchError,
XLSParseError,
)
from import_common.forms import ExcelDateField
from import_common.models import ImportRowMixin
from import_common.normalization import (
normalize_funkcja_autora,
normalize_grupa_pracownicza,
normalize_nullboleanfield,
normalize_wymiar_etatu,
)
from import_common.util import XLSImportFile
from long_running.models import Operation
from long_running.notification_mixins import ASGINotificationMixin
from django.contrib.postgres.fields import JSONField
from bpp.models import (
Autor,
Autor_Jednostka,
Funkcja_Autora,
Grupa_Pracownicza,
Jednostka,
Tytul,
Wymiar_Etatu,
)
class JednostkaForm(forms.Form):
nazwa_jednostki = forms.CharField(max_length=10240)
wydział = forms.CharField(max_length=500)
class AutorForm(forms.Form):
nazwisko = forms.CharField(max_length=200)
imię = forms.CharField(max_length=200)
numer = forms.IntegerField(required=False)
orcid = forms.CharField(max_length=19, required=False)
tytuł_stopień = forms.CharField(max_length=200, required=False)
pbn_uuid = forms.UUIDField(required=False)
bpp_id = forms.IntegerField(required=False)
stanowisko = forms.CharField(max_length=200)
grupa_pracownicza = forms.CharField(max_length=200)
data_zatrudnienia = ExcelDateField()
data_końca_zatrudnienia = ExcelDateField(required=False)
podstawowe_miejsce_pracy = forms.BooleanField(required=False)
wymiar_etatu = forms.CharField(max_length=200)
class ImportPracownikow(ASGINotificationMixin, Operation):
plik_xls = models.FileField()
performed = models.BooleanField(default=False)
integrated = models.BooleanField(default=False)
@transaction.atomic
def on_reset(self):
self.performed = self.integrated = False
self.importpracownikowrow_set.all().delete()
self.save()
def perform(self):
xif = XLSImportFile(self.plik_xls.path)
total = xif.count()
for no, elem in enumerate(xif.data()):
jednostka_form = JednostkaForm(data=elem)
jednostka_form.full_clean()
if not jednostka_form.is_valid():
raise XLSParseError(elem, jednostka_form, "weryfikacja nazwy jednostki")
try:
jednostka = matchuj_jednostke(
jednostka_form.cleaned_data.get("nazwa_jednostki"),
wydzial=jednostka_form.cleaned_data.get("wydział"),
)
except Jednostka.MultipleObjectsReturned:
raise XLSMatchError(
elem, "jednostka", "wiele dopasowań w systemie - po nazwie"
)
except Jednostka.DoesNotExist:
raise XLSMatchError(
elem, "jednostka", "brak dopasowania w systemie - po nazwie"
)
autor_form = AutorForm(data=elem)
autor_form.full_clean()
if not autor_form.is_valid():
raise XLSParseError(elem, autor_form, "weryfikacja danych autora")
assert isinstance(autor_form.cleaned_data.get("data_zatrudnienia"), date)
data = autor_form.cleaned_data
# if data.get("tytuł_stopień"):
# try:
# matchuj_tytul(data.get("tytuł_stopień"))
# except Tytul.DoesNotExist:
# raise XLSMatchError(
# elem, "tytuł", "brak takiego tytułu w systemie (nazwa, skrót)"
# )
# except Tytul.MultipleObjectsReturned:
# raise XLSMatchError(
# elem,
# "tytuł",
# "liczne dopasowania dla takiego tytułu w systemie",
# )
try:
funkcja_autora = matchuj_funkcja_autora(data.get("stanowisko"))
except Funkcja_Autora.DoesNotExist:
try:
funkcja_autora = Funkcja_Autora.objects.create(
nazwa=normalize_funkcja_autora(data.get("stanowisko")),
skrot=normalize_funkcja_autora(data.get("stanowisko")),
)
except IntegrityError:
raise XLSParseError(
elem,
autor_form,
"nie można utworzyć nowego stanowiska na bazie takich danych",
)
except Funkcja_Autora.MultipleObjectsReturned:
raise XLSMatchError(
elem,
"stanowisko",
"liczne dopasowania dla takiej funkcji autora (stanowiska) w systemie",
)
try:
grupa_pracownicza = matchuj_grupa_pracownicza(
data.get("grupa_pracownicza")
)
except Grupa_Pracownicza.DoesNotExist:
grupa_pracownicza = Grupa_Pracownicza.objects.create(
nazwa=normalize_grupa_pracownicza(data.get("grupa_pracownicza"))
)
try:
wymiar_etatu = matchuj_wymiar_etatu(data.get("wymiar_etatu"))
except Wymiar_Etatu.DoesNotExist:
wymiar_etatu = Wymiar_Etatu.objects.create(
nazwa=normalize_wymiar_etatu(data.get("wymiar_etatu"))
)
tytul_str = data.get("tytuł_stopień")
autor = matchuj_autora( # noqa
imiona=data.get("imię"),
nazwisko=data.get("nazwisko"),
jednostka=jednostka,
bpp_id=data.get("bpp_id"),
pbn_uid_id=data.get("pbn_uuid"),
system_kadrowy_id=data.get("numer"),
pbn_id=data.get("pbn_id"),
orcid=data.get("orcid"),
tytul_str=tytul_str,
)
if autor is None:
raise XLSMatchError(
elem, "autor", "brak dopasowania - różne kombinacje"
)
# Jeżeli w danych jest podane BPP ID, ale zwrócony autor nie zmatchował po podanym BPP ID
# za to zmatchował po innych danych, sprawdźmy czy zwrócone BPP ID jest identyczne
if data.get("bpp_id") is not None:
if data.get("bpp_id") != autor.pk:
raise XLSMatchError(
elem,
"autor",
"BPP ID zmatchowanego autora i BPP ID w pliku XLS nie zgadzają się",
)
try:
aj = Autor_Jednostka.objects.get(autor=autor, jednostka=jednostka)
except Autor_Jednostka.MultipleObjectsReturned:
if "data_zatrudnienia" in data:
try:
aj = Autor_Jednostka.objects.get(
autor=autor,
jednostka=jednostka,
rozpoczal_prace=data.get("data_zatrudnienia"),
)
except Autor_Jednostka.DoesNotExist:
raise BPPDatabaseMismatch(
elem,
"autor + jednostka",
"brak jednoznacznego powiązania autor+jednostka po stronie BPP",
)
except Autor_Jednostka.DoesNotExist:
aj = Autor_Jednostka.objects.create(
autor=autor, jednostka=jednostka, funkcja=funkcja_autora
)
tytul = None
try:
if tytul_str:
tytul = Tytul.objects.get(Q(nazwa=tytul_str) | Q(skrot=tytul_str))
except Tytul.DoesNotExist:
pass
res = ImportPracownikowRow(
parent=self,
dane_z_xls=elem,
dane_znormalizowane=copy(autor_form.cleaned_data),
autor=autor,
jednostka=jednostka,
autor_jednostka=aj,
tytul=tytul,
funkcja_autora=funkcja_autora,
grupa_pracownicza=grupa_pracownicza,
wymiar_etatu=wymiar_etatu,
podstawowe_miejsce_pracy=normalize_nullboleanfield(
data.get("podstawowe_miejsce_pracy")
),
)
res.zmiany_potrzebne = res.check_if_integration_needed()
res.save()
if no % 10 == 0:
self.send_progress(no / total / 2.0)
self.performed = True
self.save()
self.integrate()
self.integrated = True
self.save()
@property
def zmiany_potrzebne_set(self):
return self.importpracownikowrow_set.filter(zmiany_potrzebne=True)
def get_details_set(self):
return self.importpracownikowrow_set.all().select_related(
"autor",
"jednostka",
"jednostka__wydzial",
"autor__tytul",
"grupa_pracownicza",
"funkcja_autora",
"wymiar_etatu",
)
def on_finished(self):
self.send_processing_finished()
def integrate(self):
total = self.zmiany_potrzebne_set.all().count()
for no, elem in enumerate(
self.zmiany_potrzebne_set.all().select_related(
"autor", "jednostka", "jednostka__wydzial", "autor__tytul"
)
):
elem.integrate()
self.send_progress(0.5 + (no / total / 2.0))
class ImportPracownikowRow(ImportRowMixin, models.Model):
parent = models.ForeignKey(
ImportPracownikow,
on_delete=models.CASCADE, # related_name="row_set"
)
dane_z_xls = JSONField(null=True, blank=True, encoder=DjangoJSONEncoder)
dane_znormalizowane = JSONField(null=True, blank=True, encoder=DjangoJSONEncoder)
autor = models.ForeignKey(Autor, on_delete=models.CASCADE)
jednostka = models.ForeignKey(Jednostka, on_delete=models.CASCADE)
autor_jednostka = models.ForeignKey(Autor_Jednostka, on_delete=models.CASCADE)
podstawowe_miejsce_pracy = models.NullBooleanField()
funkcja_autora = models.ForeignKey(Funkcja_Autora, on_delete=models.CASCADE)
grupa_pracownicza = models.ForeignKey(Grupa_Pracownicza, on_delete=models.CASCADE)
wymiar_etatu = models.ForeignKey(Wymiar_Etatu, on_delete=models.CASCADE)
tytul = models.ForeignKey(Tytul, on_delete=models.SET_NULL, null=True)
zmiany_potrzebne = models.BooleanField()
log_zmian = JSONField(encoder=DjangoJSONEncoder, null=True, blank=True)
MAPPING_DANE_NA_AUTOR = [
("numer", "system_kadrowy_id"),
("orcid", "orcid"),
("pbn_uuid", "pbn_uuid"),
]
@property
def dane_bardziej_znormalizowane(self):
"""parsuje daty w dwóch polach, bo JSON w PostgreSQL to raz, a JSONDecoder
w Django nie ma czegos takiego jak dekoder JSON do pól JSON"""
for fld in ["data_zatrudnienia", "data_końca_zatrudnienia"]:
if self.dane_znormalizowane.get(fld):
v = self.dane_znormalizowane.get(fld)
if v is None or isinstance(v, date) or v == "":
continue
self.dane_znormalizowane[fld] = date.fromisoformat(v)
return self.dane_znormalizowane
def check_if_integration_needed(self):
dane = self.dane_bardziej_znormalizowane
# aktualizacja Autora
a = self.autor
def _spr(klucz_danych, atrybut_autora):
v = dane.get(klucz_danych)
if v is None or str(v) == "":
return
if getattr(a, atrybut_autora) != v:
return True
for klucz_danych, atrybut_autora in self.MAPPING_DANE_NA_AUTOR:
if _spr(klucz_danych, atrybut_autora):
return True
# aktualizacja Autor_Jednostka
aj = self.autor_jednostka
if (
dane.get("data_zatrudnienia") is not None
and aj.rozpoczal_prace != dane["data_zatrudnienia"]
):
return True
if (
dane.get("data_końca_zatrudnienia") is not None
and aj.zakonczyl_prace != dane["data_końca_zatrudnienia"]
):
return True
if aj.funkcja != self.funkcja_autora:
return True
if aj.grupa_pracownicza != self.grupa_pracownicza:
return True
if aj.wymiar_etatu != self.wymiar_etatu:
return True
if self.podstawowe_miejsce_pracy != aj.podstawowe_miejsce_pracy:
return True
if self.tytul_id != a.tytul_id:
return True
return False
def _integrate_autor(self):
dane = self.dane_znormalizowane
a = self.autor
def _spr(klucz_danych, atrybut_autora):
v = dane.get(klucz_danych)
if v is None or (str(v) == ""):
return
if getattr(a, atrybut_autora) != v:
return True
for klucz_danych, atrybut_autora in self.MAPPING_DANE_NA_AUTOR:
if _spr(klucz_danych, atrybut_autora):
self.log_zmian["autor"].append(
f"{atrybut_autora} -> {dane.get(klucz_danych)}"
)
setattr(a, atrybut_autora, dane.get(klucz_danych))
if self.tytul_id is not None:
if a.tytul_id != self.tytul_id:
a.tytul_id = self.tytul_id
self.log_zmian["autor"].append(
f"<NAME> -> {self.tytul.skrot if self.tytul_id else 'brak'}"
)
try:
a.save()
except DataError as e:
raise BPPDatabaseError(self.dane_z_xls, self, f"DataError {e}")
def _integrate_autor_jednostka(self):
aj = self.autor_jednostka
dane = self.dane_bardziej_znormalizowane
if (
dane.get("data_zatrudnienia") is not None
and aj.rozpoczal_prace != dane["data_zatrudnienia"]
):
aj.rozpoczal_prace = dane["data_zatrudnienia"]
self.log_zmian["autor_jednostka"].append(
f"data zatrudnienia na {dane['data_zatrudnienia']}"
)
if (
dane.get("data_końca_zatrudnienia") is not None
and aj.zakonczyl_prace != dane["data_końca_zatrudnienia"]
):
aj.zakonczyl_prace = dane["data_końca_zatrudnienia"]
self.log_zmian["autor_jednostka"].append(
f"data końca zatrudnienia na {dane['data_końca_zatrudnienia']}"
)
if aj.funkcja != self.funkcja_autora:
aj.funkcja = self.funkcja_autora
self.log_zmian["autor_jednostka"].append(
f"funkcja na {self.funkcja_autora}"
)
if aj.grupa_pracownicza != self.grupa_pracownicza:
aj.grupa_pracownicza = self.grupa_pracownicza
self.log_zmian["autor_jednostka"].append(
f"grupa_pracownicza na {self.grupa_pracownicza}"
)
if aj.wymiar_etatu != self.wymiar_etatu:
aj.wymiar_etatu = self.wymiar_etatu
self.log_zmian["autor_jednostka"].append(
f"wymiar_etatu na {self.wymiar_etatu}"
)
if self.podstawowe_miejsce_pracy != aj.podstawowe_miejsce_pracy:
aj.ustaw_podstawowe_miejsce_pracy()
self.log_zmian["autor_jednostka"].append("podstawowe_miejsce_pracy")
aj.save()
@transaction.atomic
def integrate(self):
assert self.zmiany_potrzebne
self.log_zmian = {"autor": [], "autor_jednostka": []}
self._integrate_autor()
self._integrate_autor_jednostka()
self.save()
def sformatowany_log_zmian(self):
if self.log_zmian is None:
return
if self.log_zmian["autor"]:
yield "Zmiany obiektu Autor: " + ", ".join(
[elem for elem in self.log_zmian["autor"]]
)
if self.log_zmian["autor_jednostka"]:
yield "Zmiany obiektu Autor_Jednostka: " + ", ".join(
[elem for elem in self.log_zmian["autor_jednostka"]]
)
if not self.log_zmian:
return "bez zmian!"
|
[
"django.db.models.NullBooleanField",
"django.db.models.ForeignKey",
"import_common.exceptions.BPPDatabaseMismatch",
"django.contrib.postgres.fields.JSONField",
"datetime.date.fromisoformat",
"django.forms.IntegerField",
"django.forms.BooleanField",
"django.db.models.BooleanField",
"bpp.models.Autor_Jednostka.objects.get",
"import_common.util.XLSImportFile",
"bpp.models.Autor_Jednostka.objects.create",
"django.forms.UUIDField",
"import_common.forms.ExcelDateField",
"import_common.exceptions.XLSParseError",
"django.db.models.FileField",
"copy.copy",
"django.db.models.Q",
"import_common.exceptions.XLSMatchError",
"django.forms.CharField",
"import_common.exceptions.BPPDatabaseError"
] |
[((1223, 1256), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(10240)'}), '(max_length=10240)\n', (1238, 1256), False, 'from django import forms\n'), ((1272, 1303), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (1287, 1303), False, 'from django import forms\n'), ((1349, 1380), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1364, 1380), False, 'from django import forms\n'), ((1393, 1424), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1408, 1424), False, 'from django import forms\n'), ((1437, 1471), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)'}), '(required=False)\n', (1455, 1471), False, 'from django import forms\n'), ((1484, 1530), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(19)', 'required': '(False)'}), '(max_length=19, required=False)\n', (1499, 1530), False, 'from django import forms\n'), ((1553, 1600), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)', 'required': '(False)'}), '(max_length=200, required=False)\n', (1568, 1600), False, 'from django import forms\n'), ((1614, 1645), 'django.forms.UUIDField', 'forms.UUIDField', ([], {'required': '(False)'}), '(required=False)\n', (1629, 1645), False, 'from django import forms\n'), ((1659, 1693), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)'}), '(required=False)\n', (1677, 1693), False, 'from django import forms\n'), ((1712, 1743), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1727, 1743), False, 'from django import forms\n'), ((1768, 1799), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1783, 1799), False, 'from django import forms\n'), ((1824, 1840), 'import_common.forms.ExcelDateField', 'ExcelDateField', ([], {}), '()\n', (1838, 1840), False, 'from import_common.forms import ExcelDateField\n'), ((1872, 1902), 'import_common.forms.ExcelDateField', 'ExcelDateField', ([], {'required': '(False)'}), '(required=False)\n', (1886, 1902), False, 'from import_common.forms import ExcelDateField\n'), ((1933, 1967), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (1951, 1967), False, 'from django import forms\n'), ((1987, 2018), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2002, 2018), False, 'from django import forms\n'), ((2095, 2113), 'django.db.models.FileField', 'models.FileField', ([], {}), '()\n', (2111, 2113), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((2131, 2165), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2150, 2165), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((2183, 2217), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2202, 2217), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10134, 10196), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ImportPracownikow'], {'on_delete': 'models.CASCADE'}), '(ImportPracownikow, on_delete=models.CASCADE)\n', (10151, 10196), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10263, 10322), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'null': '(True)', 'blank': '(True)', 'encoder': 'DjangoJSONEncoder'}), '(null=True, blank=True, encoder=DjangoJSONEncoder)\n', (10272, 10322), False, 'from django.contrib.postgres.fields import JSONField\n'), ((10349, 10408), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'null': '(True)', 'blank': '(True)', 'encoder': 'DjangoJSONEncoder'}), '(null=True, blank=True, encoder=DjangoJSONEncoder)\n', (10358, 10408), False, 'from django.contrib.postgres.fields import JSONField\n'), ((10422, 10472), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Autor'], {'on_delete': 'models.CASCADE'}), '(Autor, on_delete=models.CASCADE)\n', (10439, 10472), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10489, 10543), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Jednostka'], {'on_delete': 'models.CASCADE'}), '(Jednostka, on_delete=models.CASCADE)\n', (10506, 10543), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10566, 10626), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Autor_Jednostka'], {'on_delete': 'models.CASCADE'}), '(Autor_Jednostka, on_delete=models.CASCADE)\n', (10583, 10626), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10659, 10684), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {}), '()\n', (10682, 10684), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10706, 10765), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Funkcja_Autora'], {'on_delete': 'models.CASCADE'}), '(Funkcja_Autora, on_delete=models.CASCADE)\n', (10723, 10765), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10790, 10852), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Grupa_Pracownicza'], {'on_delete': 'models.CASCADE'}), '(Grupa_Pracownicza, on_delete=models.CASCADE)\n', (10807, 10852), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10872, 10929), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Wymiar_Etatu'], {'on_delete': 'models.CASCADE'}), '(Wymiar_Etatu, on_delete=models.CASCADE)\n', (10889, 10929), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((10942, 11004), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Tytul'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Tytul, on_delete=models.SET_NULL, null=True)\n', (10959, 11004), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((11029, 11050), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (11048, 11050), False, 'from django.db import DataError, IntegrityError, models, transaction\n'), ((11068, 11127), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'encoder': 'DjangoJSONEncoder', 'null': '(True)', 'blank': '(True)'}), '(encoder=DjangoJSONEncoder, null=True, blank=True)\n', (11077, 11127), False, 'from django.contrib.postgres.fields import JSONField\n'), ((2427, 2460), 'import_common.util.XLSImportFile', 'XLSImportFile', (['self.plik_xls.path'], {}), '(self.plik_xls.path)\n', (2440, 2460), False, 'from import_common.util import XLSImportFile\n'), ((2700, 2766), 'import_common.exceptions.XLSParseError', 'XLSParseError', (['elem', 'jednostka_form', '"""weryfikacja nazwy jednostki"""'], {}), "(elem, jednostka_form, 'weryfikacja nazwy jednostki')\n", (2713, 2766), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((3510, 3570), 'import_common.exceptions.XLSParseError', 'XLSParseError', (['elem', 'autor_form', '"""weryfikacja danych autora"""'], {}), "(elem, autor_form, 'weryfikacja danych autora')\n", (3523, 3570), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((6460, 6527), 'import_common.exceptions.XLSMatchError', 'XLSMatchError', (['elem', '"""autor"""', '"""brak dopasowania - różne kombinacje"""'], {}), "(elem, 'autor', 'brak dopasowania - różne kombinacje')\n", (6473, 6527), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((7120, 7181), 'bpp.models.Autor_Jednostka.objects.get', 'Autor_Jednostka.objects.get', ([], {'autor': 'autor', 'jednostka': 'jednostka'}), '(autor=autor, jednostka=jednostka)\n', (7147, 7181), False, 'from bpp.models import Autor, Autor_Jednostka, Funkcja_Autora, Grupa_Pracownicza, Jednostka, Tytul, Wymiar_Etatu\n'), ((11794, 11815), 'datetime.date.fromisoformat', 'date.fromisoformat', (['v'], {}), '(v)\n', (11812, 11815), False, 'from datetime import date\n'), ((14230, 14287), 'import_common.exceptions.BPPDatabaseError', 'BPPDatabaseError', (['self.dane_z_xls', 'self', 'f"""DataError {e}"""'], {}), "(self.dane_z_xls, self, f'DataError {e}')\n", (14246, 14287), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((3070, 3144), 'import_common.exceptions.XLSMatchError', 'XLSMatchError', (['elem', '"""jednostka"""', '"""wiele dopasowań w systemie - po nazwie"""'], {}), "(elem, 'jednostka', 'wiele dopasowań w systemie - po nazwie')\n", (3083, 3144), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((3249, 3324), 'import_common.exceptions.XLSMatchError', 'XLSMatchError', (['elem', '"""jednostka"""', '"""brak dopasowania w systemie - po nazwie"""'], {}), "(elem, 'jednostka', 'brak dopasowania w systemie - po nazwie')\n", (3262, 3324), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((5054, 5163), 'import_common.exceptions.XLSMatchError', 'XLSMatchError', (['elem', '"""stanowisko"""', '"""liczne dopasowania dla takiej funkcji autora (stanowiska) w systemie"""'], {}), "(elem, 'stanowisko',\n 'liczne dopasowania dla takiej funkcji autora (stanowiska) w systemie')\n", (5067, 5163), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((6888, 6989), 'import_common.exceptions.XLSMatchError', 'XLSMatchError', (['elem', '"""autor"""', '"""BPP ID zmatchowanego autora i BPP ID w pliku XLS nie zgadzają się"""'], {}), "(elem, 'autor',\n 'BPP ID zmatchowanego autora i BPP ID w pliku XLS nie zgadzają się')\n", (6901, 6989), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((7944, 8037), 'bpp.models.Autor_Jednostka.objects.create', 'Autor_Jednostka.objects.create', ([], {'autor': 'autor', 'jednostka': 'jednostka', 'funkcja': 'funkcja_autora'}), '(autor=autor, jednostka=jednostka, funkcja=\n funkcja_autora)\n', (7974, 8037), False, 'from bpp.models import Autor, Autor_Jednostka, Funkcja_Autora, Grupa_Pracownicza, Jednostka, Tytul, Wymiar_Etatu\n'), ((8430, 8459), 'copy.copy', 'copy', (['autor_form.cleaned_data'], {}), '(autor_form.cleaned_data)\n', (8434, 8459), False, 'from copy import copy\n'), ((4782, 4880), 'import_common.exceptions.XLSParseError', 'XLSParseError', (['elem', 'autor_form', '"""nie można utworzyć nowego stanowiska na bazie takich danych"""'], {}), "(elem, autor_form,\n 'nie można utworzyć nowego stanowiska na bazie takich danych')\n", (4795, 4880), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n'), ((8190, 8208), 'django.db.models.Q', 'Q', ([], {'nazwa': 'tytul_str'}), '(nazwa=tytul_str)\n', (8191, 8208), False, 'from django.db.models import Q\n'), ((8211, 8229), 'django.db.models.Q', 'Q', ([], {'skrot': 'tytul_str'}), '(skrot=tytul_str)\n', (8212, 8229), False, 'from django.db.models import Q\n'), ((7651, 7766), 'import_common.exceptions.BPPDatabaseMismatch', 'BPPDatabaseMismatch', (['elem', '"""autor + jednostka"""', '"""brak jednoznacznego powiązania autor+jednostka po stronie BPP"""'], {}), "(elem, 'autor + jednostka',\n 'brak jednoznacznego powiązania autor+jednostka po stronie BPP')\n", (7670, 7766), False, 'from import_common.exceptions import BPPDatabaseError, BPPDatabaseMismatch, XLSMatchError, XLSParseError\n')]
|
import os
from fairing.cloud import gcp
from fairing import utils
from fairing.constants import constants
from fairing.kubernetes.manager import client, KubeManager
from fairing.builders.cluster.context_source import ContextSourceInterface
class GCSContextSource(ContextSourceInterface):
def __init__(self,
gcp_project=None,
credentials_file=os.environ.get(constants.GOOGLE_CREDS_ENV),
namespace='default'):
self.gcp_project = gcp_project
self.credentials_file = credentials_file
self.manager = KubeManager()
self.namespace = namespace
def prepare(self, context_filename):
if self.gcp_project is None:
self.gcp_project = gcp.guess_project_name()
self.uploaded_context_url = self.upload_context(context_filename)
def upload_context(self, context_filename):
gcs_uploader = gcp.GCSUploader()
context_hash = utils.crc(context_filename)
return gcs_uploader.upload_to_bucket(
bucket_name=self.gcp_project,
blob_name='fairing_builds/' + context_hash,
file_to_upload=context_filename)
def cleanup(self):
pass
def generate_pod_spec(self, image_name, push):
args = ["--dockerfile=Dockerfile",
"--destination=" + image_name,
"--context=" + self.uploaded_context_url]
if not push:
args.append("--no-push")
return client.V1PodSpec(
containers=[client.V1Container(
name='kaniko',
image='gcr.io/kaniko-project/executor:v0.7.0',
args=["--dockerfile=Dockerfile",
"--destination=" + image_name,
"--context=" + self.uploaded_context_url],
)],
restart_policy='Never'
)
|
[
"fairing.kubernetes.manager.KubeManager",
"fairing.cloud.gcp.guess_project_name",
"fairing.utils.crc",
"fairing.cloud.gcp.GCSUploader",
"os.environ.get",
"fairing.kubernetes.manager.client.V1Container"
] |
[((383, 425), 'os.environ.get', 'os.environ.get', (['constants.GOOGLE_CREDS_ENV'], {}), '(constants.GOOGLE_CREDS_ENV)\n', (397, 425), False, 'import os\n'), ((577, 590), 'fairing.kubernetes.manager.KubeManager', 'KubeManager', ([], {}), '()\n', (588, 590), False, 'from fairing.kubernetes.manager import client, KubeManager\n'), ((907, 924), 'fairing.cloud.gcp.GCSUploader', 'gcp.GCSUploader', ([], {}), '()\n', (922, 924), False, 'from fairing.cloud import gcp\n'), ((948, 975), 'fairing.utils.crc', 'utils.crc', (['context_filename'], {}), '(context_filename)\n', (957, 975), False, 'from fairing import utils\n'), ((736, 760), 'fairing.cloud.gcp.guess_project_name', 'gcp.guess_project_name', ([], {}), '()\n', (758, 760), False, 'from fairing.cloud import gcp\n'), ((1565, 1766), 'fairing.kubernetes.manager.client.V1Container', 'client.V1Container', ([], {'name': '"""kaniko"""', 'image': '"""gcr.io/kaniko-project/executor:v0.7.0"""', 'args': "['--dockerfile=Dockerfile', '--destination=' + image_name, '--context=' +\n self.uploaded_context_url]"}), "(name='kaniko', image=\n 'gcr.io/kaniko-project/executor:v0.7.0', args=[\n '--dockerfile=Dockerfile', '--destination=' + image_name, '--context=' +\n self.uploaded_context_url])\n", (1583, 1766), False, 'from fairing.kubernetes.manager import client, KubeManager\n')]
|
"""
Bilateral Filtering
A bilateral filter is used for smoothening images and reducing noise, while
preserving edges.
"""
import cv2
# read the image
import numpy as np
img = cv2.imread("../images/taj.jpg")
# apply bilateral filter width s = 15
# sigmaColor = sigmaSpace = 75
bilateral = cv2.bilateralFilter(img, 15, 75, 75)
# average filter
average_filter = cv2.blur(img, (5, 5))
# median filter
median_filter = cv2.medianBlur(img, 5)
# gaussian filter
gaussian_filter = cv2.GaussianBlur(img, (5, 5), 0)
# stacking the image side-by-side
res = np.hstack((img, bilateral, average_filter, median_filter, gaussian_filter))
cv2.imshow("image", res)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"cv2.GaussianBlur",
"cv2.medianBlur",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.blur",
"numpy.hstack",
"cv2.bilateralFilter",
"cv2.imread",
"cv2.imshow"
] |
[((191, 222), 'cv2.imread', 'cv2.imread', (['"""../images/taj.jpg"""'], {}), "('../images/taj.jpg')\n", (201, 222), False, 'import cv2\n'), ((305, 341), 'cv2.bilateralFilter', 'cv2.bilateralFilter', (['img', '(15)', '(75)', '(75)'], {}), '(img, 15, 75, 75)\n', (324, 341), False, 'import cv2\n'), ((377, 398), 'cv2.blur', 'cv2.blur', (['img', '(5, 5)'], {}), '(img, (5, 5))\n', (385, 398), False, 'import cv2\n'), ((432, 454), 'cv2.medianBlur', 'cv2.medianBlur', (['img', '(5)'], {}), '(img, 5)\n', (446, 454), False, 'import cv2\n'), ((492, 524), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['img', '(5, 5)', '(0)'], {}), '(img, (5, 5), 0)\n', (508, 524), False, 'import cv2\n'), ((566, 641), 'numpy.hstack', 'np.hstack', (['(img, bilateral, average_filter, median_filter, gaussian_filter)'], {}), '((img, bilateral, average_filter, median_filter, gaussian_filter))\n', (575, 641), True, 'import numpy as np\n'), ((643, 667), 'cv2.imshow', 'cv2.imshow', (['"""image"""', 'res'], {}), "('image', res)\n", (653, 667), False, 'import cv2\n'), ((668, 682), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (679, 682), False, 'import cv2\n'), ((683, 706), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (704, 706), False, 'import cv2\n')]
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import paddle.fluid.profiler as profiler
import paddle.fluid as fluid
import data_reader
from paddle.fluid.dygraph.base import to_variable
import argparse
import functools
from utility import add_arguments, print_arguments, get_attention_feeder_data
from nets import OCRAttention
from eval import evaluate
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('batch_size', int, 32, "Minibatch size.")
add_arg('epoch_num', int, 30, "Epoch number.")
add_arg('lr', float, 0.001, "Learning rate.")
add_arg('lr_decay_strategy', str, "", "Learning rate decay strategy.")
add_arg('log_period', int, 200, "Log period.")
add_arg('save_model_period', int, 2000, "Save model period. '-1' means never saving the model.")
add_arg('eval_period', int, 2000, "Evaluate period. '-1' means never evaluating the model.")
add_arg('save_model_dir', str, "./output", "The directory the model to be saved to.")
add_arg('train_images', str, None, "The directory of images to be used for training.")
add_arg('train_list', str, None, "The list file of images to be used for training.")
add_arg('test_images', str, None, "The directory of images to be used for test.")
add_arg('test_list', str, None, "The list file of images to be used for training.")
add_arg('init_model', str, None, "The init model file of directory.")
add_arg('use_gpu', bool, True, "Whether use GPU to train.")
add_arg('parallel', bool, False, "Whether use parallel training.")
add_arg('profile', bool, False, "Whether to use profiling.")
add_arg('skip_batch_num', int, 0, "The number of first minibatches to skip as warm-up for better performance test.")
add_arg('skip_test', bool, False, "Whether to skip test phase.")
# model hyper paramters
add_arg('encoder_size', int, 200, "Encoder size.")
add_arg('decoder_size', int, 128, "Decoder size.")
add_arg('word_vector_dim', int, 128, "Word vector dim.")
add_arg('num_classes', int, 95, "Number classes.")
add_arg('gradient_clip', float, 5.0, "Gradient clip value.")
def train(args):
with fluid.dygraph.guard():
backward_strategy = fluid.dygraph.BackwardStrategy()
backward_strategy.sort_sum_gradient = True
ocr_attention = OCRAttention(batch_size=args.batch_size,
encoder_size=args.encoder_size, decoder_size=args.decoder_size,
num_classes=args.num_classes, word_vector_dim=args.word_vector_dim)
LR = args.lr
if args.lr_decay_strategy == "piecewise_decay":
learning_rate = fluid.layers.piecewise_decay([200000, 250000], [LR, LR * 0.1, LR * 0.01])
else:
learning_rate = LR
optimizer = fluid.optimizer.Adam(learning_rate=learning_rate, parameter_list=ocr_attention.parameters())
grad_clip = fluid.clip.GradientClipByGlobalNorm(args.gradient_clip)
train_reader = data_reader.data_reader(
args.batch_size,
shuffle=True,
images_dir=args.train_images,
list_file=args.train_list,
data_type='train')
test_reader = data_reader.data_reader(
args.batch_size,
images_dir=args.test_images,
list_file=args.test_list,
data_type="test")
if not os.path.exists(args.save_model_dir):
os.makedirs(args.save_model_dir)
total_step = 0
epoch_num = args.epoch_num
for epoch in range(epoch_num):
batch_id = 0
total_loss = 0.0
for data in train_reader():
total_step += 1
data_dict = get_attention_feeder_data(data)
label_in = to_variable(data_dict["label_in"])
label_out = to_variable(data_dict["label_out"])
label_out.stop_gradient = True
img = to_variable(data_dict["pixel"])
prediction = ocr_attention(img, label_in)
prediction = fluid.layers.reshape( prediction, [label_out.shape[0] * label_out.shape[1], -1], inplace=False)
label_out = fluid.layers.reshape(label_out, [-1, 1], inplace=False)
loss = fluid.layers.cross_entropy(
input=prediction, label=label_out)
mask = to_variable(data_dict["mask"])
loss = fluid.layers.elementwise_mul( loss, mask, axis=0)
avg_loss = fluid.layers.reduce_sum(loss)
total_loss += avg_loss.numpy()
avg_loss.backward()
optimizer.minimize(avg_loss, grad_clip=grad_clip)
ocr_attention.clear_gradients()
if batch_id > 0 and batch_id % args.log_period == 0:
print("epoch: {}, batch_id: {}, lr: {}, loss {}".format(epoch, batch_id,
optimizer._global_learning_rate().numpy(),
total_loss / args.batch_size / args.log_period))
total_loss = 0.0
if total_step > 0 and total_step % args.save_model_period == 0:
if fluid.dygraph.parallel.Env().dev_id == 0:
model_file = os.path.join(args.save_model_dir, 'step_{}'.format(total_step))
fluid.save_dygraph(ocr_attention.state_dict(), model_file)
print('step_{}.pdparams saved!'.format(total_step))
if total_step > 0 and total_step % args.eval_period == 0:
ocr_attention.eval()
evaluate(ocr_attention, test_reader, args.batch_size)
ocr_attention.train()
batch_id += 1
if __name__ == '__main__':
args = parser.parse_args()
print_arguments(args)
if args.profile:
if args.use_gpu:
with profiler.cuda_profiler("cuda_profiler.txt", 'csv') as nvprof:
train(args)
else:
with profiler.profiler("CPU", sorted_key='total') as cpuprof:
train(args)
else:
train(args)
|
[
"argparse.ArgumentParser",
"paddle.fluid.profiler.profiler",
"utility.get_attention_feeder_data",
"paddle.fluid.layers.reduce_sum",
"paddle.fluid.profiler.cuda_profiler",
"os.path.exists",
"paddle.fluid.dygraph.guard",
"paddle.fluid.layers.cross_entropy",
"paddle.fluid.dygraph.BackwardStrategy",
"nets.OCRAttention",
"paddle.fluid.dygraph.parallel.Env",
"functools.partial",
"paddle.fluid.layers.reshape",
"paddle.fluid.clip.GradientClipByGlobalNorm",
"paddle.fluid.dygraph.base.to_variable",
"eval.evaluate",
"utility.print_arguments",
"os.makedirs",
"data_reader.data_reader",
"paddle.fluid.layers.elementwise_mul",
"paddle.fluid.layers.piecewise_decay"
] |
[((979, 1023), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (1002, 1023), False, 'import argparse\n'), ((1034, 1084), 'functools.partial', 'functools.partial', (['add_arguments'], {'argparser': 'parser'}), '(add_arguments, argparser=parser)\n', (1051, 1084), False, 'import functools\n'), ((6817, 6838), 'utility.print_arguments', 'print_arguments', (['args'], {}), '(args)\n', (6832, 6838), False, 'from utility import add_arguments, print_arguments, get_attention_feeder_data\n'), ((3037, 3058), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ([], {}), '()\n', (3056, 3058), True, 'import paddle.fluid as fluid\n'), ((3088, 3120), 'paddle.fluid.dygraph.BackwardStrategy', 'fluid.dygraph.BackwardStrategy', ([], {}), '()\n', (3118, 3120), True, 'import paddle.fluid as fluid\n'), ((3197, 3377), 'nets.OCRAttention', 'OCRAttention', ([], {'batch_size': 'args.batch_size', 'encoder_size': 'args.encoder_size', 'decoder_size': 'args.decoder_size', 'num_classes': 'args.num_classes', 'word_vector_dim': 'args.word_vector_dim'}), '(batch_size=args.batch_size, encoder_size=args.encoder_size,\n decoder_size=args.decoder_size, num_classes=args.num_classes,\n word_vector_dim=args.word_vector_dim)\n', (3209, 3377), False, 'from nets import OCRAttention\n'), ((3803, 3858), 'paddle.fluid.clip.GradientClipByGlobalNorm', 'fluid.clip.GradientClipByGlobalNorm', (['args.gradient_clip'], {}), '(args.gradient_clip)\n', (3838, 3858), True, 'import paddle.fluid as fluid\n'), ((3883, 4018), 'data_reader.data_reader', 'data_reader.data_reader', (['args.batch_size'], {'shuffle': '(True)', 'images_dir': 'args.train_images', 'list_file': 'args.train_list', 'data_type': '"""train"""'}), "(args.batch_size, shuffle=True, images_dir=args.\n train_images, list_file=args.train_list, data_type='train')\n", (3906, 4018), False, 'import data_reader\n'), ((4098, 4215), 'data_reader.data_reader', 'data_reader.data_reader', (['args.batch_size'], {'images_dir': 'args.test_images', 'list_file': 'args.test_list', 'data_type': '"""test"""'}), "(args.batch_size, images_dir=args.test_images,\n list_file=args.test_list, data_type='test')\n", (4121, 4215), False, 'import data_reader\n'), ((3550, 3623), 'paddle.fluid.layers.piecewise_decay', 'fluid.layers.piecewise_decay', (['[200000, 250000]', '[LR, LR * 0.1, LR * 0.01]'], {}), '([200000, 250000], [LR, LR * 0.1, LR * 0.01])\n', (3578, 3623), True, 'import paddle.fluid as fluid\n'), ((4293, 4328), 'os.path.exists', 'os.path.exists', (['args.save_model_dir'], {}), '(args.save_model_dir)\n', (4307, 4328), False, 'import os\n'), ((4342, 4374), 'os.makedirs', 'os.makedirs', (['args.save_model_dir'], {}), '(args.save_model_dir)\n', (4353, 4374), False, 'import os\n'), ((4628, 4659), 'utility.get_attention_feeder_data', 'get_attention_feeder_data', (['data'], {}), '(data)\n', (4653, 4659), False, 'from utility import add_arguments, print_arguments, get_attention_feeder_data\n'), ((4688, 4722), 'paddle.fluid.dygraph.base.to_variable', 'to_variable', (["data_dict['label_in']"], {}), "(data_dict['label_in'])\n", (4699, 4722), False, 'from paddle.fluid.dygraph.base import to_variable\n'), ((4751, 4786), 'paddle.fluid.dygraph.base.to_variable', 'to_variable', (["data_dict['label_out']"], {}), "(data_dict['label_out'])\n", (4762, 4786), False, 'from paddle.fluid.dygraph.base import to_variable\n'), ((4858, 4889), 'paddle.fluid.dygraph.base.to_variable', 'to_variable', (["data_dict['pixel']"], {}), "(data_dict['pixel'])\n", (4869, 4889), False, 'from paddle.fluid.dygraph.base import to_variable\n'), ((4978, 5077), 'paddle.fluid.layers.reshape', 'fluid.layers.reshape', (['prediction', '[label_out.shape[0] * label_out.shape[1], -1]'], {'inplace': '(False)'}), '(prediction, [label_out.shape[0] * label_out.shape[1], \n -1], inplace=False)\n', (4998, 5077), True, 'import paddle.fluid as fluid\n'), ((5102, 5157), 'paddle.fluid.layers.reshape', 'fluid.layers.reshape', (['label_out', '[-1, 1]'], {'inplace': '(False)'}), '(label_out, [-1, 1], inplace=False)\n', (5122, 5157), True, 'import paddle.fluid as fluid\n'), ((5181, 5242), 'paddle.fluid.layers.cross_entropy', 'fluid.layers.cross_entropy', ([], {'input': 'prediction', 'label': 'label_out'}), '(input=prediction, label=label_out)\n', (5207, 5242), True, 'import paddle.fluid as fluid\n'), ((5288, 5318), 'paddle.fluid.dygraph.base.to_variable', 'to_variable', (["data_dict['mask']"], {}), "(data_dict['mask'])\n", (5299, 5318), False, 'from paddle.fluid.dygraph.base import to_variable\n'), ((5343, 5391), 'paddle.fluid.layers.elementwise_mul', 'fluid.layers.elementwise_mul', (['loss', 'mask'], {'axis': '(0)'}), '(loss, mask, axis=0)\n', (5371, 5391), True, 'import paddle.fluid as fluid\n'), ((5420, 5449), 'paddle.fluid.layers.reduce_sum', 'fluid.layers.reduce_sum', (['loss'], {}), '(loss)\n', (5443, 5449), True, 'import paddle.fluid as fluid\n'), ((6902, 6952), 'paddle.fluid.profiler.cuda_profiler', 'profiler.cuda_profiler', (['"""cuda_profiler.txt"""', '"""csv"""'], {}), "('cuda_profiler.txt', 'csv')\n", (6924, 6952), True, 'import paddle.fluid.profiler as profiler\n'), ((7023, 7067), 'paddle.fluid.profiler.profiler', 'profiler.profiler', (['"""CPU"""'], {'sorted_key': '"""total"""'}), "('CPU', sorted_key='total')\n", (7040, 7067), True, 'import paddle.fluid.profiler as profiler\n'), ((6626, 6679), 'eval.evaluate', 'evaluate', (['ocr_attention', 'test_reader', 'args.batch_size'], {}), '(ocr_attention, test_reader, args.batch_size)\n', (6634, 6679), False, 'from eval import evaluate\n'), ((6189, 6217), 'paddle.fluid.dygraph.parallel.Env', 'fluid.dygraph.parallel.Env', ([], {}), '()\n', (6215, 6217), True, 'import paddle.fluid as fluid\n')]
|
#!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extended protorpc descriptors.
This takes existing protorpc Descriptor classes and adds extra
properties not directly supported in proto itself, notably field and
message descriptions. We need this in order to generate protorpc
message files with comments.
Note that for most of these classes, we can't simply wrap the existing
message, since we need to change the type of the subfields. We could
have a "plain" descriptor attached, but that seems like unnecessary
bookkeeping. Where possible, we purposely reuse existing tag numbers;
for new fields, we start numbering at 100.
"""
import abc
import operator
import textwrap
import six
from apitools.base.protorpclite import descriptor as protorpc_descriptor
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
import apitools.base.py as apitools_base
class ExtendedEnumValueDescriptor(messages.Message):
"""Enum value descriptor with additional fields.
Fields:
name: Name of enumeration value.
number: Number of enumeration value.
description: Description of this enum value.
"""
name = messages.StringField(1)
number = messages.IntegerField(2, variant=messages.Variant.INT32)
description = messages.StringField(100)
class ExtendedEnumDescriptor(messages.Message):
"""Enum class descriptor with additional fields.
Fields:
name: Name of Enum without any qualification.
values: Values defined by Enum class.
description: Description of this enum class.
full_name: Fully qualified name of this enum class.
enum_mappings: Mappings from python to JSON names for enum values.
"""
class JsonEnumMapping(messages.Message):
"""Mapping from a python name to the wire name for an enum."""
python_name = messages.StringField(1)
json_name = messages.StringField(2)
name = messages.StringField(1)
values = messages.MessageField(
ExtendedEnumValueDescriptor, 2, repeated=True)
description = messages.StringField(100)
full_name = messages.StringField(101)
enum_mappings = messages.MessageField(
'JsonEnumMapping', 102, repeated=True)
class ExtendedFieldDescriptor(messages.Message):
"""Field descriptor with additional fields.
Fields:
field_descriptor: The underlying field descriptor.
name: The name of this field.
description: Description of this field.
"""
field_descriptor = messages.MessageField(
protorpc_descriptor.FieldDescriptor, 100)
# We duplicate the names for easier bookkeeping.
name = messages.StringField(101)
description = messages.StringField(102)
class ExtendedMessageDescriptor(messages.Message):
"""Message descriptor with additional fields.
Fields:
name: Name of Message without any qualification.
fields: Fields defined for message.
message_types: Nested Message classes defined on message.
enum_types: Nested Enum classes defined on message.
description: Description of this message.
full_name: Full qualified name of this message.
decorators: Decorators to include in the definition when printing.
Printed in the given order from top to bottom (so the last entry
is the innermost decorator).
alias_for: This type is just an alias for the named type.
field_mappings: Mappings from python to json field names.
"""
class JsonFieldMapping(messages.Message):
"""Mapping from a python name to the wire name for a field."""
python_name = messages.StringField(1)
json_name = messages.StringField(2)
name = messages.StringField(1)
fields = messages.MessageField(ExtendedFieldDescriptor, 2, repeated=True)
message_types = messages.MessageField(
'extended_descriptor.ExtendedMessageDescriptor', 3, repeated=True)
enum_types = messages.MessageField(
ExtendedEnumDescriptor, 4, repeated=True)
description = messages.StringField(100)
full_name = messages.StringField(101)
decorators = messages.StringField(102, repeated=True)
alias_for = messages.StringField(103)
field_mappings = messages.MessageField(
'JsonFieldMapping', 104, repeated=True)
class ExtendedFileDescriptor(messages.Message):
"""File descriptor with additional fields.
Fields:
package: Fully qualified name of package that definitions belong to.
message_types: Message definitions contained in file.
enum_types: Enum definitions contained in file.
description: Description of this file.
additional_imports: Extra imports used in this package.
"""
package = messages.StringField(2)
message_types = messages.MessageField(
ExtendedMessageDescriptor, 4, repeated=True)
enum_types = messages.MessageField(
ExtendedEnumDescriptor, 5, repeated=True)
description = messages.StringField(100)
additional_imports = messages.StringField(101, repeated=True)
def _WriteFile(file_descriptor, package, version, proto_printer):
"""Write the given extended file descriptor to the printer."""
proto_printer.PrintPreamble(package, version, file_descriptor)
_PrintEnums(proto_printer, file_descriptor.enum_types)
_PrintMessages(proto_printer, file_descriptor.message_types)
custom_json_mappings = _FetchCustomMappings(file_descriptor.enum_types)
custom_json_mappings.extend(
_FetchCustomMappings(file_descriptor.message_types))
for mapping in custom_json_mappings:
proto_printer.PrintCustomJsonMapping(mapping)
def WriteMessagesFile(file_descriptor, package, version, printer):
"""Write the given extended file descriptor to out as a message file."""
_WriteFile(file_descriptor, package, version,
_Proto2Printer(printer))
def WritePythonFile(file_descriptor, package, version, printer):
"""Write the given extended file descriptor to out."""
_WriteFile(file_descriptor, package, version,
_ProtoRpcPrinter(printer))
def PrintIndentedDescriptions(printer, ls, name, prefix=''):
if ls:
with printer.Indent(indent=prefix):
with printer.CommentContext():
width = printer.CalculateWidth() - len(prefix)
printer()
printer(name + ':')
for x in ls:
description = '%s: %s' % (x.name, x.description)
for line in textwrap.wrap(description, width,
initial_indent=' ',
subsequent_indent=' '):
printer(line)
def _FetchCustomMappings(descriptor_ls):
"""Find and return all custom mappings for descriptors in descriptor_ls."""
custom_mappings = []
for descriptor in descriptor_ls:
if isinstance(descriptor, ExtendedEnumDescriptor):
custom_mappings.extend(
_FormatCustomJsonMapping('Enum', m, descriptor)
for m in descriptor.enum_mappings)
elif isinstance(descriptor, ExtendedMessageDescriptor):
custom_mappings.extend(
_FormatCustomJsonMapping('Field', m, descriptor)
for m in descriptor.field_mappings)
custom_mappings.extend(
_FetchCustomMappings(descriptor.enum_types))
custom_mappings.extend(
_FetchCustomMappings(descriptor.message_types))
return custom_mappings
def _FormatCustomJsonMapping(mapping_type, mapping, descriptor):
return '\n'.join((
'encoding.AddCustomJson%sMapping(' % mapping_type,
" %s, '%s', '%s')" % (descriptor.full_name, mapping.python_name,
mapping.json_name),
))
def _EmptyMessage(message_type):
return not any((message_type.enum_types,
message_type.message_types,
message_type.fields))
class ProtoPrinter(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for proto printers."""
@abc.abstractmethod
def PrintPreamble(self, package, version, file_descriptor):
"""Print the file docstring and import lines."""
@abc.abstractmethod
def PrintEnum(self, enum_type):
"""Print the given enum declaration."""
@abc.abstractmethod
def PrintMessage(self, message_type):
"""Print the given message declaration."""
class _Proto2Printer(ProtoPrinter):
"""Printer for proto2 definitions."""
def __init__(self, printer):
self.__printer = printer
def __PrintEnumCommentLines(self, enum_type):
description = enum_type.description or '%s enum type.' % enum_type.name
for line in textwrap.wrap(description,
self.__printer.CalculateWidth() - 3):
self.__printer('// %s', line)
PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values',
prefix='// ')
def __PrintEnumValueCommentLines(self, enum_value):
if enum_value.description:
width = self.__printer.CalculateWidth() - 3
for line in textwrap.wrap(enum_value.description, width):
self.__printer('// %s', line)
def PrintEnum(self, enum_type):
self.__PrintEnumCommentLines(enum_type)
self.__printer('enum %s {', enum_type.name)
with self.__printer.Indent():
enum_values = sorted(
enum_type.values, key=operator.attrgetter('number'))
for enum_value in enum_values:
self.__printer()
self.__PrintEnumValueCommentLines(enum_value)
self.__printer('%s = %s;', enum_value.name, enum_value.number)
self.__printer('}')
self.__printer()
def PrintPreamble(self, package, version, file_descriptor):
self.__printer('// Generated message classes for %s version %s.',
package, version)
self.__printer('// NOTE: This file is autogenerated and should not be '
'edited by hand.')
description_lines = textwrap.wrap(file_descriptor.description, 75)
if description_lines:
self.__printer('//')
for line in description_lines:
self.__printer('// %s', line)
self.__printer()
self.__printer('syntax = "proto2";')
self.__printer('package %s;', file_descriptor.package)
def __PrintMessageCommentLines(self, message_type):
"""Print the description of this message."""
description = message_type.description or '%s message type.' % (
message_type.name)
width = self.__printer.CalculateWidth() - 3
for line in textwrap.wrap(description, width):
self.__printer('// %s', line)
PrintIndentedDescriptions(self.__printer, message_type.enum_types,
'Enums', prefix='// ')
PrintIndentedDescriptions(self.__printer, message_type.message_types,
'Messages', prefix='// ')
PrintIndentedDescriptions(self.__printer, message_type.fields,
'Fields', prefix='// ')
def __PrintFieldDescription(self, description):
for line in textwrap.wrap(description,
self.__printer.CalculateWidth() - 3):
self.__printer('// %s', line)
def __PrintFields(self, fields):
for extended_field in fields:
field = extended_field.field_descriptor
field_type = messages.Field.lookup_field_type_by_variant(
field.variant)
self.__printer()
self.__PrintFieldDescription(extended_field.description)
label = str(field.label).lower()
if field_type in (messages.EnumField, messages.MessageField):
proto_type = field.type_name
else:
proto_type = str(field.variant).lower()
default_statement = ''
if field.default_value:
if field_type in [messages.BytesField, messages.StringField]:
default_value = '"%s"' % field.default_value
elif field_type is messages.BooleanField:
default_value = str(field.default_value).lower()
else:
default_value = str(field.default_value)
default_statement = ' [default = %s]' % default_value
self.__printer(
'%s %s %s = %d%s;',
label, proto_type, field.name, field.number, default_statement)
def PrintMessage(self, message_type):
self.__printer()
self.__PrintMessageCommentLines(message_type)
if _EmptyMessage(message_type):
self.__printer('message %s {}', message_type.name)
return
self.__printer('message %s {', message_type.name)
with self.__printer.Indent():
_PrintEnums(self, message_type.enum_types)
_PrintMessages(self, message_type.message_types)
self.__PrintFields(message_type.fields)
self.__printer('}')
def PrintCustomJsonMapping(self, mapping_lines):
raise NotImplementedError(
'Custom JSON encoding not supported for proto2')
class _ProtoRpcPrinter(ProtoPrinter):
"""Printer for ProtoRPC definitions."""
def __init__(self, printer):
self.__printer = printer
def __PrintClassSeparator(self):
self.__printer()
if not self.__printer.indent:
self.__printer()
def __PrintEnumDocstringLines(self, enum_type):
description = enum_type.description or '%s enum type.' % enum_type.name
for line in textwrap.wrap('"""%s' % description,
self.__printer.CalculateWidth()):
self.__printer(line)
PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values')
self.__printer('"""')
def PrintEnum(self, enum_type):
self.__printer('class %s(_messages.Enum):', enum_type.name)
with self.__printer.Indent():
self.__PrintEnumDocstringLines(enum_type)
enum_values = sorted(
enum_type.values, key=operator.attrgetter('number'))
for enum_value in enum_values:
self.__printer('%s = %s', enum_value.name, enum_value.number)
if not enum_type.values:
self.__printer('pass')
self.__PrintClassSeparator()
def __PrintAdditionalImports(self, imports):
"""Print additional imports needed for protorpc."""
google_imports = [x for x in imports if 'google' in x]
other_imports = [x for x in imports if 'google' not in x]
if other_imports:
for import_ in sorted(other_imports):
self.__printer(import_)
self.__printer()
# Note: If we ever were going to add imports from this package, we'd
# need to sort those out and put them at the end.
if google_imports:
for import_ in sorted(google_imports):
self.__printer(import_)
self.__printer()
def PrintPreamble(self, package, version, file_descriptor):
self.__printer('"""Generated message classes for %s version %s.',
package, version)
self.__printer()
for line in textwrap.wrap(file_descriptor.description, 78):
self.__printer(line)
self.__printer('"""')
self.__printer('# NOTE: This file is autogenerated and should not be '
'edited by hand.')
self.__printer()
self.__PrintAdditionalImports(file_descriptor.additional_imports)
self.__printer()
self.__printer("package = '%s'", file_descriptor.package)
self.__printer()
self.__printer()
def __PrintMessageDocstringLines(self, message_type):
"""Print the docstring for this message."""
description = message_type.description or '%s message type.' % (
message_type.name)
short_description = (
_EmptyMessage(message_type) and
len(description) < (self.__printer.CalculateWidth() - 6))
with self.__printer.CommentContext():
if short_description:
# Note that we use explicit string interpolation here since
# we're in comment context.
self.__printer('"""%s"""' % description)
return
for line in textwrap.wrap('"""%s' % description,
self.__printer.CalculateWidth()):
self.__printer(line)
PrintIndentedDescriptions(self.__printer, message_type.enum_types,
'Enums')
PrintIndentedDescriptions(
self.__printer, message_type.message_types, 'Messages')
PrintIndentedDescriptions(
self.__printer, message_type.fields, 'Fields')
self.__printer('"""')
self.__printer()
def PrintMessage(self, message_type):
if message_type.alias_for:
self.__printer(
'%s = %s', message_type.name, message_type.alias_for)
self.__PrintClassSeparator()
return
for decorator in message_type.decorators:
self.__printer('@%s', decorator)
self.__printer('class %s(_messages.Message):', message_type.name)
with self.__printer.Indent():
self.__PrintMessageDocstringLines(message_type)
_PrintEnums(self, message_type.enum_types)
_PrintMessages(self, message_type.message_types)
_PrintFields(message_type.fields, self.__printer)
self.__PrintClassSeparator()
def PrintCustomJsonMapping(self, mapping):
self.__printer(mapping)
def _PrintEnums(proto_printer, enum_types):
"""Print all enums to the given proto_printer."""
enum_types = sorted(enum_types, key=operator.attrgetter('name'))
for enum_type in enum_types:
proto_printer.PrintEnum(enum_type)
def _PrintMessages(proto_printer, message_list):
message_list = sorted(message_list, key=operator.attrgetter('name'))
for message_type in message_list:
proto_printer.PrintMessage(message_type)
_MESSAGE_FIELD_MAP = {
message_types.DateTimeMessage.definition_name(): (
message_types.DateTimeField),
}
def _PrintFields(fields, printer):
for extended_field in fields:
field = extended_field.field_descriptor
printed_field_info = {
'name': field.name,
'module': '_messages',
'type_name': '',
'type_format': '',
'number': field.number,
'label_format': '',
'variant_format': '',
'default_format': '',
}
message_field = _MESSAGE_FIELD_MAP.get(field.type_name)
if message_field:
printed_field_info['module'] = '_message_types'
field_type = message_field
elif field.type_name == 'extra_types.DateField':
printed_field_info['module'] = 'extra_types'
field_type = apitools_base.DateField
else:
field_type = messages.Field.lookup_field_type_by_variant(
field.variant)
if field_type in (messages.EnumField, messages.MessageField):
printed_field_info['type_format'] = "'%s', " % field.type_name
if field.label == protorpc_descriptor.FieldDescriptor.Label.REQUIRED:
printed_field_info['label_format'] = ', required=True'
elif field.label == protorpc_descriptor.FieldDescriptor.Label.REPEATED:
printed_field_info['label_format'] = ', repeated=True'
if field_type.DEFAULT_VARIANT != field.variant:
printed_field_info['variant_format'] = (
', variant=_messages.Variant.%s' % field.variant)
if field.default_value:
if field_type in [messages.BytesField, messages.StringField]:
default_value = repr(field.default_value)
elif field_type is messages.EnumField:
try:
default_value = str(int(field.default_value))
except ValueError:
default_value = repr(field.default_value)
else:
default_value = field.default_value
printed_field_info[
'default_format'] = ', default=%s' % (default_value,)
printed_field_info['type_name'] = field_type.__name__
args = ''.join('%%(%s)s' % field for field in (
'type_format',
'number',
'label_format',
'variant_format',
'default_format'))
format_str = '%%(name)s = %%(module)s.%%(type_name)s(%s)' % args
printer(format_str % printed_field_info)
|
[
"apitools.base.protorpclite.messages.IntegerField",
"six.with_metaclass",
"textwrap.wrap",
"operator.attrgetter",
"apitools.base.protorpclite.message_types.DateTimeMessage.definition_name",
"apitools.base.protorpclite.messages.Field.lookup_field_type_by_variant",
"apitools.base.protorpclite.messages.MessageField",
"apitools.base.protorpclite.messages.StringField"
] |
[((8583, 8622), 'six.with_metaclass', 'six.with_metaclass', (['abc.ABCMeta', 'object'], {}), '(abc.ABCMeta, object)\n', (8601, 8622), False, 'import six\n'), ((1732, 1755), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (1752, 1755), False, 'from apitools.base.protorpclite import messages\n'), ((1769, 1825), 'apitools.base.protorpclite.messages.IntegerField', 'messages.IntegerField', (['(2)'], {'variant': 'messages.Variant.INT32'}), '(2, variant=messages.Variant.INT32)\n', (1790, 1825), False, 'from apitools.base.protorpclite import messages\n'), ((1845, 1870), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(100)'], {}), '(100)\n', (1865, 1870), False, 'from apitools.base.protorpclite import messages\n'), ((2494, 2517), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (2514, 2517), False, 'from apitools.base.protorpclite import messages\n'), ((2531, 2599), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['ExtendedEnumValueDescriptor', '(2)'], {'repeated': '(True)'}), '(ExtendedEnumValueDescriptor, 2, repeated=True)\n', (2552, 2599), False, 'from apitools.base.protorpclite import messages\n'), ((2628, 2653), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(100)'], {}), '(100)\n', (2648, 2653), False, 'from apitools.base.protorpclite import messages\n'), ((2670, 2695), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(101)'], {}), '(101)\n', (2690, 2695), False, 'from apitools.base.protorpclite import messages\n'), ((2716, 2776), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['"""JsonEnumMapping"""', '(102)'], {'repeated': '(True)'}), "('JsonEnumMapping', 102, repeated=True)\n", (2737, 2776), False, 'from apitools.base.protorpclite import messages\n'), ((3069, 3132), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['protorpc_descriptor.FieldDescriptor', '(100)'], {}), '(protorpc_descriptor.FieldDescriptor, 100)\n', (3090, 3132), False, 'from apitools.base.protorpclite import messages\n'), ((3206, 3231), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(101)'], {}), '(101)\n', (3226, 3231), False, 'from apitools.base.protorpclite import messages\n'), ((3250, 3275), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(102)'], {}), '(102)\n', (3270, 3275), False, 'from apitools.base.protorpclite import messages\n'), ((4258, 4281), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (4278, 4281), False, 'from apitools.base.protorpclite import messages\n'), ((4295, 4359), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['ExtendedFieldDescriptor', '(2)'], {'repeated': '(True)'}), '(ExtendedFieldDescriptor, 2, repeated=True)\n', (4316, 4359), False, 'from apitools.base.protorpclite import messages\n'), ((4380, 4472), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['"""extended_descriptor.ExtendedMessageDescriptor"""', '(3)'], {'repeated': '(True)'}), "('extended_descriptor.ExtendedMessageDescriptor', 3,\n repeated=True)\n", (4401, 4472), False, 'from apitools.base.protorpclite import messages\n'), ((4495, 4558), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['ExtendedEnumDescriptor', '(4)'], {'repeated': '(True)'}), '(ExtendedEnumDescriptor, 4, repeated=True)\n', (4516, 4558), False, 'from apitools.base.protorpclite import messages\n'), ((4587, 4612), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(100)'], {}), '(100)\n', (4607, 4612), False, 'from apitools.base.protorpclite import messages\n'), ((4629, 4654), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(101)'], {}), '(101)\n', (4649, 4654), False, 'from apitools.base.protorpclite import messages\n'), ((4672, 4712), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(102)'], {'repeated': '(True)'}), '(102, repeated=True)\n', (4692, 4712), False, 'from apitools.base.protorpclite import messages\n'), ((4729, 4754), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(103)'], {}), '(103)\n', (4749, 4754), False, 'from apitools.base.protorpclite import messages\n'), ((4776, 4837), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['"""JsonFieldMapping"""', '(104)'], {'repeated': '(True)'}), "('JsonFieldMapping', 104, repeated=True)\n", (4797, 4837), False, 'from apitools.base.protorpclite import messages\n'), ((5276, 5299), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(2)'], {}), '(2)\n', (5296, 5299), False, 'from apitools.base.protorpclite import messages\n'), ((5321, 5387), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['ExtendedMessageDescriptor', '(4)'], {'repeated': '(True)'}), '(ExtendedMessageDescriptor, 4, repeated=True)\n', (5342, 5387), False, 'from apitools.base.protorpclite import messages\n'), ((5414, 5477), 'apitools.base.protorpclite.messages.MessageField', 'messages.MessageField', (['ExtendedEnumDescriptor', '(5)'], {'repeated': '(True)'}), '(ExtendedEnumDescriptor, 5, repeated=True)\n', (5435, 5477), False, 'from apitools.base.protorpclite import messages\n'), ((5506, 5531), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(100)'], {}), '(100)\n', (5526, 5531), False, 'from apitools.base.protorpclite import messages\n'), ((5557, 5597), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(101)'], {'repeated': '(True)'}), '(101, repeated=True)\n', (5577, 5597), False, 'from apitools.base.protorpclite import messages\n'), ((19018, 19065), 'apitools.base.protorpclite.message_types.DateTimeMessage.definition_name', 'message_types.DateTimeMessage.definition_name', ([], {}), '()\n', (19063, 19065), False, 'from apitools.base.protorpclite import message_types\n'), ((2414, 2437), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (2434, 2437), False, 'from apitools.base.protorpclite import messages\n'), ((2458, 2481), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(2)'], {}), '(2)\n', (2478, 2481), False, 'from apitools.base.protorpclite import messages\n'), ((4178, 4201), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (4198, 4201), False, 'from apitools.base.protorpclite import messages\n'), ((4222, 4245), 'apitools.base.protorpclite.messages.StringField', 'messages.StringField', (['(2)'], {}), '(2)\n', (4242, 4245), False, 'from apitools.base.protorpclite import messages\n'), ((10747, 10793), 'textwrap.wrap', 'textwrap.wrap', (['file_descriptor.description', '(75)'], {}), '(file_descriptor.description, 75)\n', (10760, 10793), False, 'import textwrap\n'), ((11365, 11398), 'textwrap.wrap', 'textwrap.wrap', (['description', 'width'], {}), '(description, width)\n', (11378, 11398), False, 'import textwrap\n'), ((16051, 16097), 'textwrap.wrap', 'textwrap.wrap', (['file_descriptor.description', '(78)'], {}), '(file_descriptor.description, 78)\n', (16064, 16097), False, 'import textwrap\n'), ((9777, 9821), 'textwrap.wrap', 'textwrap.wrap', (['enum_value.description', 'width'], {}), '(enum_value.description, width)\n', (9790, 9821), False, 'import textwrap\n'), ((12208, 12266), 'apitools.base.protorpclite.messages.Field.lookup_field_type_by_variant', 'messages.Field.lookup_field_type_by_variant', (['field.variant'], {}), '(field.variant)\n', (12251, 12266), False, 'from apitools.base.protorpclite import messages\n'), ((18673, 18700), 'operator.attrgetter', 'operator.attrgetter', (['"""name"""'], {}), "('name')\n", (18692, 18700), False, 'import operator\n'), ((18873, 18900), 'operator.attrgetter', 'operator.attrgetter', (['"""name"""'], {}), "('name')\n", (18892, 18900), False, 'import operator\n'), ((19924, 19982), 'apitools.base.protorpclite.messages.Field.lookup_field_type_by_variant', 'messages.Field.lookup_field_type_by_variant', (['field.variant'], {}), '(field.variant)\n', (19967, 19982), False, 'from apitools.base.protorpclite import messages\n'), ((7059, 7144), 'textwrap.wrap', 'textwrap.wrap', (['description', 'width'], {'initial_indent': '""" """', 'subsequent_indent': '""" """'}), "(description, width, initial_indent=' ', subsequent_indent=' '\n )\n", (7072, 7144), False, 'import textwrap\n'), ((10116, 10145), 'operator.attrgetter', 'operator.attrgetter', (['"""number"""'], {}), "('number')\n", (10135, 10145), False, 'import operator\n'), ((14895, 14924), 'operator.attrgetter', 'operator.attrgetter', (['"""number"""'], {}), "('number')\n", (14914, 14924), False, 'import operator\n')]
|
from django.conf.urls import url
from django.contrib.auth import views as django_views
from . import views
urlpatterns = [
url(r'^login/$', views.login, name='account_login'),
url(r'^logout/$', views.logout, name='account_logout'),
url(r'^signup/$', views.signup, name='account_signup'),
url(r'^password/reset/$', views.password_reset,
name='account_reset_password'),
url(r'^password/reset/done/$', django_views.PasswordResetDoneView.as_view(
template_name='account/password_reset_done.html'),
name='account_reset_password_done'),
url(r'^password/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', # noqa
views.password_reset_confirm, name='account_reset_password_confirm'),
url(r'password/reset/complete/$', django_views.PasswordResetCompleteView.as_view( # noqa
template_name='account/password_reset_from_key_done.html'),
name='account_reset_password_complete'),
]
|
[
"django.contrib.auth.views.PasswordResetDoneView.as_view",
"django.conf.urls.url",
"django.contrib.auth.views.PasswordResetCompleteView.as_view"
] |
[((129, 179), 'django.conf.urls.url', 'url', (['"""^login/$"""', 'views.login'], {'name': '"""account_login"""'}), "('^login/$', views.login, name='account_login')\n", (132, 179), False, 'from django.conf.urls import url\n'), ((186, 239), 'django.conf.urls.url', 'url', (['"""^logout/$"""', 'views.logout'], {'name': '"""account_logout"""'}), "('^logout/$', views.logout, name='account_logout')\n", (189, 239), False, 'from django.conf.urls import url\n'), ((246, 299), 'django.conf.urls.url', 'url', (['"""^signup/$"""', 'views.signup'], {'name': '"""account_signup"""'}), "('^signup/$', views.signup, name='account_signup')\n", (249, 299), False, 'from django.conf.urls import url\n'), ((306, 383), 'django.conf.urls.url', 'url', (['"""^password/reset/$"""', 'views.password_reset'], {'name': '"""account_reset_password"""'}), "('^password/reset/$', views.password_reset, name='account_reset_password')\n", (309, 383), False, 'from django.conf.urls import url\n'), ((581, 755), 'django.conf.urls.url', 'url', (['"""^password/reset/(?P<uidb64>[0-9A-Za-z_\\\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$"""', 'views.password_reset_confirm'], {'name': '"""account_reset_password_confirm"""'}), "('^password/reset/(?P<uidb64>[0-9A-Za-z_\\\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$'\n , views.password_reset_confirm, name='account_reset_password_confirm')\n", (584, 755), False, 'from django.conf.urls import url\n'), ((429, 526), 'django.contrib.auth.views.PasswordResetDoneView.as_view', 'django_views.PasswordResetDoneView.as_view', ([], {'template_name': '"""account/password_reset_done.html"""'}), "(template_name=\n 'account/password_reset_done.html')\n", (471, 526), True, 'from django.contrib.auth import views as django_views\n'), ((806, 916), 'django.contrib.auth.views.PasswordResetCompleteView.as_view', 'django_views.PasswordResetCompleteView.as_view', ([], {'template_name': '"""account/password_reset_from_key_done.html"""'}), "(template_name=\n 'account/password_reset_from_key_done.html')\n", (852, 916), True, 'from django.contrib.auth import views as django_views\n')]
|
# coding=utf-8
from __future__ import unicode_literals
import os
from mock import patch
from django_app.test import PyW4CTestCase
from utils import gui, path, tmp, patterns
class UtilsTest(PyW4CTestCase):
temp_dir = 'temp'
@staticmethod
@patch('subprocess.check_call')
def test_gui_open_file(subprocess_check_call):
filename = 'test.png'
gui.open_file(filename)
subprocess_check_call.assert_called_with([
os.path.join(path.get_bin_dir(), 'open'), filename
])
@patch('utils.tmp.settings', TEMP_DIR=temp_dir)
def test_get_temp_filename(self, settings): # pylint: disable=W0613
nb_temp_files_before = len(os.listdir(self.temp_dir))
temp_filename = tmp.get_temp_filename()
self.assertIn(self.temp_dir, temp_filename)
os.remove(temp_filename)
extension = '.png'
temp_filename = tmp.get_temp_filename(extension)
self.assertIn(extension, temp_filename)
os.remove(temp_filename)
self.assertEquals(len(os.listdir(self.temp_dir)), nb_temp_files_before)
def test_patterns_singleton(self):
val = 42
class _TestSingleton(object):
__metaclass__ = patterns.Singleton
var = val
def get_val(self):
return self.var
def set_var(self, new_val):
self.var = new_val
self.assertTrue(_TestSingleton() is _TestSingleton())
self.assertEqual(id(_TestSingleton()), id(_TestSingleton()))
self.assertEqual(_TestSingleton().get_val(), val)
self.assertEqual(_TestSingleton().var, val)
new_val = 21
_TestSingleton().var = new_val
self.assertEqual(_TestSingleton().var, new_val)
_TestSingleton().set_var(val)
self.assertEqual(_TestSingleton().get_val(), val)
|
[
"os.remove",
"utils.path.get_bin_dir",
"utils.gui.open_file",
"mock.patch",
"utils.tmp.get_temp_filename",
"os.listdir"
] |
[((257, 287), 'mock.patch', 'patch', (['"""subprocess.check_call"""'], {}), "('subprocess.check_call')\n", (262, 287), False, 'from mock import patch\n'), ((534, 580), 'mock.patch', 'patch', (['"""utils.tmp.settings"""'], {'TEMP_DIR': 'temp_dir'}), "('utils.tmp.settings', TEMP_DIR=temp_dir)\n", (539, 580), False, 'from mock import patch\n'), ((378, 401), 'utils.gui.open_file', 'gui.open_file', (['filename'], {}), '(filename)\n', (391, 401), False, 'from utils import gui, path, tmp, patterns\n'), ((741, 764), 'utils.tmp.get_temp_filename', 'tmp.get_temp_filename', ([], {}), '()\n', (762, 764), False, 'from utils import gui, path, tmp, patterns\n'), ((827, 851), 'os.remove', 'os.remove', (['temp_filename'], {}), '(temp_filename)\n', (836, 851), False, 'import os\n'), ((905, 937), 'utils.tmp.get_temp_filename', 'tmp.get_temp_filename', (['extension'], {}), '(extension)\n', (926, 937), False, 'from utils import gui, path, tmp, patterns\n'), ((996, 1020), 'os.remove', 'os.remove', (['temp_filename'], {}), '(temp_filename)\n', (1005, 1020), False, 'import os\n'), ((689, 714), 'os.listdir', 'os.listdir', (['self.temp_dir'], {}), '(self.temp_dir)\n', (699, 714), False, 'import os\n'), ((1052, 1077), 'os.listdir', 'os.listdir', (['self.temp_dir'], {}), '(self.temp_dir)\n', (1062, 1077), False, 'import os\n'), ((479, 497), 'utils.path.get_bin_dir', 'path.get_bin_dir', ([], {}), '()\n', (495, 497), False, 'from utils import gui, path, tmp, patterns\n')]
|
import torch
from tqdm import tqdm
from codes.utils.moving_average import MovingAverage
class ExperimentRunner:
moving_average_window_size = 100
def __init__(self, configuration):
self.configuration = configuration
def evaluate_model(self, model, problem, split, gpu_number=-1):
problem.evaluator.set_mode(split)
device = torch.device('cuda:' + str(gpu_number) if torch.cuda.is_available() and gpu_number >= 0 else 'cpu')
model.set_device(device)
batch_size = 1
with torch.no_grad():
model.eval()
problem.initialize_epoch()
score_moving_average = MovingAverage(window_size=self.moving_average_window_size)
batch_iterator = tqdm(problem.iterate_batches(batch_size=batch_size, split=split),
total=problem.approximate_batch_count(batch_size=batch_size, split=split),
dynamic_ncols=True,
smoothing=0.0)
all_stats = []
for i, batch in enumerate(batch_iterator):
_, predictions = model(batch)
for p, e in zip(predictions, batch):
score = problem.evaluator.score_example(p)
score_moving_average.register(float(score))
stats = problem.evaluator.get_stats(p)
all_stats.append(stats)
batch_iterator.set_description("Evaluation mean score={0:.4f})".format(
score_moving_average.get_value()))
true_score = problem.evaluator.evaluate_stats(all_stats, split)
print("True dev score: " + str(true_score))
return true_score
def train_model(self, model, problem, gpu_number=-1):
batch_size = self.configuration["training"]["batch_size"]
max_epochs = self.configuration["training"]["max_epochs"]
train_split = self.configuration["training"]["train_split"]
test_every_n = self.configuration["training"]["test_every_n"]
save_path = self.configuration["training"]["save_path"]
learning_rate = self.configuration["training"]["learning_rate"]
if "batch_size_multiplier" in self.configuration["training"] and self.configuration["training"]["batch_size_multiplier"] > 1:
batch_size_multiplier = self.configuration["training"]["batch_size_multiplier"]
update_counter = 0
else:
batch_size_multiplier = None
device = torch.device('cuda:' + str(gpu_number) if torch.cuda.is_available() and gpu_number >= 0 else 'cpu')
model.set_device(device)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
best_dev_performance = None
for epoch in range(max_epochs):
problem.evaluator.set_mode("train")
problem.initialize_epoch()
loss_moving_average = MovingAverage(window_size=self.moving_average_window_size)
score_moving_average = MovingAverage(window_size=self.moving_average_window_size)
batch_iterator = tqdm(problem.iterate_batches(batch_size=batch_size, split=train_split),
total=problem.approximate_batch_count(batch_size=batch_size, split=train_split),
dynamic_ncols=True,
smoothing=0.0)
for i, batch in enumerate(batch_iterator):
model.train()
if batch_size_multiplier is not None:
if update_counter % batch_size_multiplier == 0:
optimizer.zero_grad()
update_counter += 1
else:
optimizer.zero_grad()
loss, predictions = model(batch)
loss = loss.mean()
loss.backward()
if batch_size_multiplier is not None:
if update_counter % batch_size_multiplier == 0:
clipping_value = 1
torch.nn.utils.clip_grad_value_(model.parameters(), clipping_value)
optimizer.step()
else:
clipping_value = 1
torch.nn.utils.clip_grad_value_(model.parameters(), clipping_value)
optimizer.step()
loss_moving_average.register(float(loss.detach()))
pred_score = problem.evaluator.score_batch(predictions)
score_moving_average.register(float(pred_score))
batch_iterator.set_description("Epoch " + str(epoch) + " (mean loss={0:.4f}, mean score={1:.4f})".format(
loss_moving_average.get_value(),
score_moving_average.get_value()))
if (epoch + 1) % test_every_n == 0:
dev_performance = self.evaluate_model(model, problem, "dev", gpu_number=gpu_number)
if best_dev_performance is None or problem.evaluator.compare_performance(best_dev_performance, dev_performance):
best_dev_performance = dev_performance
model.save(save_path)
model.load(save_path)
return model
|
[
"torch.no_grad",
"torch.cuda.is_available",
"codes.utils.moving_average.MovingAverage"
] |
[((535, 550), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (548, 550), False, 'import torch\n'), ((651, 709), 'codes.utils.moving_average.MovingAverage', 'MovingAverage', ([], {'window_size': 'self.moving_average_window_size'}), '(window_size=self.moving_average_window_size)\n', (664, 709), False, 'from codes.utils.moving_average import MovingAverage\n'), ((2939, 2997), 'codes.utils.moving_average.MovingAverage', 'MovingAverage', ([], {'window_size': 'self.moving_average_window_size'}), '(window_size=self.moving_average_window_size)\n', (2952, 2997), False, 'from codes.utils.moving_average import MovingAverage\n'), ((3033, 3091), 'codes.utils.moving_average.MovingAverage', 'MovingAverage', ([], {'window_size': 'self.moving_average_window_size'}), '(window_size=self.moving_average_window_size)\n', (3046, 3091), False, 'from codes.utils.moving_average import MovingAverage\n'), ((406, 431), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (429, 431), False, 'import torch\n'), ((2573, 2598), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2596, 2598), False, 'import torch\n')]
|
import logging
import requests
from .auth import MpesaBase
logger = logging.getLogger(__name__)
class C2B(MpesaBase):
def __init__(self, *args, **kwargs):
MpesaBase.__init__(self, *args, **kwargs)
def register(self, shortcode=None, response_type=None, confirmation_url=None, validation_url=None):
"""This method uses Mpesa's C2B API to register validation and confirmation URLs on M-Pesa.
**Args:**
- shortcode (int): The short code of the organization.
- response_type (str): Default response type for timeout. Incase a tranaction times out, Mpesa will by default Complete or Cancel the transaction.
- confirmation_url (str): Confirmation URL for the client.
- validation_url (str): Validation URL for the client.
**Returns:**
- OriginatorConverstionID (str): The unique request ID for tracking a transaction.
- ConversationID (str): The unique request ID returned by mpesa for each request made
- ResponseDescription (str): Response Description message
"""
payload = {
"ShortCode": shortcode,
"ResponseType": response_type,
"ConfirmationURL": confirmation_url,
"ValidationURL": validation_url
}
SAF_URL = "/mpesa/c2b/v1/registerurl"
response = self.request(SAF_URL, json=payload, timeout=self.timeout)
if response is not None:
logger.debug(response)
return response
else:
logger.debug("finished transaction")
def simulate(self, shortcode=None, command_id=None, amount=None, msisdn=None, bill_ref_number=None):
"""This method uses Mpesa's C2B API to simulate a C2B transaction.
**Args:**
- shortcode (int): The short code of the organization.
- command_id (str): Unique command for each transaction type. - CustomerPayBillOnline - CustomerBuyGoodsOnline.
- amount (int): The amount being transacted
- msisdn (int): Phone number (msisdn) initiating the transaction MSISDN(12 digits)
- bill_ref_number: Optional
**Returns:**
- OriginatorConverstionID (str): The unique request ID for tracking a transaction.
- ConversationID (str): The unique request ID returned by mpesa for each request made
- ResponseDescription (str): Response Description message
"""
payload = {
"ShortCode": shortcode,
"CommandID": command_id,
"Amount": amount,
"Msisdn": msisdn,
"BillRefNumber": bill_ref_number
}
logger.debug(payload)
SAF_URL = "/mpesa/c2b/v1/simulate"
response = self.request(SAF_URL, json=payload, timeout=self.timeout)
if response is not None:
logger.debug(response)
return response
else:
logger.debug("finished transaction")
|
[
"logging.getLogger"
] |
[((70, 97), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (87, 97), False, 'import logging\n')]
|
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
__version__ = None
exec(open('wikiscraper/version.py').read())
setup(name='wikiscraper',
version=__version__,
description=__doc__,
author='<NAME>',
author_email='<EMAIL>',
packages=find_packages(),
install_requires=[
'numpy',
'bs4',
'requests',
'urllib3',
'docopt'
],
entry_points = {
'console_scripts': [
'wikiscrape = wikiscraper.main:scrape',
'wikiscrape_user = wikiscraper.main:scrape_user',
'wikiscrape_article = wikiscraper.main:scrape_article',
'wikistats = wikiscraper.main:get_number_users'
]
},
classifiers=[
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
[
"distutils.core.find_packages"
] |
[((339, 354), 'distutils.core.find_packages', 'find_packages', ([], {}), '()\n', (352, 354), False, 'from distutils.core import setup, find_packages\n')]
|
from setuptools import setup
from setuptools.command.build_py import build_py as _build_py
import glob
import platform
import os
import shutil
from openvisualizer import ovVersion
'''
This implementation of the traditional setup.py uses the root package's
package_data parameter to store data files, rather than the application-level
data_files parameter. This arrangement organizes OpenVisualizer within a
single tree of directories, and so is more portable.
In contrast to the native setup, the installer is free to relocate the tree
of directories with install options for setup.py.
This implementation is based on setuptools, and builds the list of module
dependencies by reading 'requirements.txt'.
'''
VERSION = '.'.join([str(v) for v in ovVersion.VERSION])
webstatic = 'data/web_files/static'
webtmpl = 'data/web_files/templates'
simdata = 'data/sim_files'
with open('README.txt') as f:
LONG_DESCRIPTION = f.read()
# Create list of required modules for 'install_requires' parameter. Cannot create
# this list with pip.req.parse_requirements() because it requires the pwd module,
# which is Unix only.
# Assumes requirements file contains only module lines and comments.
deplist = []
with open(os.path.join('openvisualizer', 'data', 'requirements.txt')) as f:
for line in f:
if not line.startswith('#'):
deplist.append(line)
def appdirGlob(globstr, subdir=''):
appdir = 'bin/openVisualizerApp'
if subdir == '':
return glob.glob('/'.join([appdir, globstr]))
else:
return glob.glob('/'.join([appdir, subdir, globstr]))
class build_py(_build_py):
'''
Extends setuptools build of openvisualizer package data at installation time.
Selects and copies the architecture-specific simulation module from an OS-based
subdirectory up to the parent 'sim_files' directory. Excludes the OS subdirectories
from installation.
'''
def build_package_data(self):
_build_py.build_package_data(self)
osname = 'windows' if os.name=='nt' else 'linux'
suffix = 'amd64' if platform.architecture()[0]=='64bit' else 'x86'
fileExt = 'pyd' if os.name=='nt' else 'so'
simPath = None
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
moduleName = 'oos_openwsn-{0}.{1}'.format(suffix, fileExt)
modulePath = os.path.join(osname, moduleName)
if package == 'openvisualizer' and filename.endswith(modulePath):
srcfile = os.path.join(src_dir, filename)
simPath = os.path.join(build_dir, 'data', 'sim_files')
target = os.path.join(simPath, 'oos_openwsn.{0}'.format(fileExt))
self.copy_file(srcfile, target)
if simPath:
shutil.rmtree(os.path.join(simPath, 'linux'))
shutil.rmtree(os.path.join(simPath, 'windows'))
setup(
name = 'openVisualizer',
packages = ['openvisualizer',
'openvisualizer.BspEmulator', 'openvisualizer.eventBus',
'openvisualizer.lbrClient', 'openvisualizer.moteConnector',
'openvisualizer.moteProbe', 'openvisualizer.moteState',
'openvisualizer.openLbr', 'openvisualizer.openTun',
'openvisualizer.openType', 'openvisualizer.openUI',
'openvisualizer.RPL', 'openvisualizer.SimEngine', 'openvisualizer.remoteConnectorServer',
'openvisualizer.JRC'],
scripts = appdirGlob('openVisualizer*.py'),
package_dir = {'': '.', 'openvisualizer': 'openvisualizer'},
# Copy simdata files by extension so don't copy .gitignore in that directory.
package_data = {'openvisualizer': [
'data/*.conf',
'data/requirements.txt',
'/'.join([webstatic, 'css', '*']),
'/'.join([webstatic, 'font-awesome', 'css', '*']),
'/'.join([webstatic, 'font-awesome', 'fonts', '*']),
'/'.join([webstatic, 'images', '*']),
'/'.join([webstatic, 'js', '*.js']),
'/'.join([webstatic, 'js', 'plugins', 'metisMenu', '*']),
'/'.join([webtmpl, '*']),
'/'.join([simdata, 'windows', '*.pyd']),
'/'.join([simdata, 'linux', '*.so']),
'/'.join([simdata, '*.h'])
]},
install_requires = deplist,
# Must extract zip to edit conf files.
zip_safe = False,
version = VERSION,
author = '<NAME>',
author_email = '<EMAIL>',
description = 'Wireless sensor network monitoring, visualization, and debugging tool',
long_description = LONG_DESCRIPTION,
url = 'https://openwsn.atlassian.net/wiki/display/OW/OpenVisualizer',
keywords = ['6TiSCH','Internet of Things','6LoWPAN','802.15.4e','sensor','mote'],
platforms = ['platform-independent'],
license = 'BSD 3-Clause',
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Communications',
'Topic :: Home Automation',
'Topic :: Internet',
'Topic :: Software Development',
],
cmdclass = {'build_py' : build_py},
)
|
[
"setuptools.command.build_py.build_py.build_package_data",
"os.path.join",
"platform.architecture"
] |
[((1226, 1284), 'os.path.join', 'os.path.join', (['"""openvisualizer"""', '"""data"""', '"""requirements.txt"""'], {}), "('openvisualizer', 'data', 'requirements.txt')\n", (1238, 1284), False, 'import os\n'), ((1974, 2008), 'setuptools.command.build_py.build_py.build_package_data', '_build_py.build_package_data', (['self'], {}), '(self)\n', (2002, 2008), True, 'from setuptools.command.build_py import build_py as _build_py\n'), ((2433, 2465), 'os.path.join', 'os.path.join', (['osname', 'moduleName'], {}), '(osname, moduleName)\n', (2445, 2465), False, 'import os\n'), ((2882, 2912), 'os.path.join', 'os.path.join', (['simPath', '"""linux"""'], {}), "(simPath, 'linux')\n", (2894, 2912), False, 'import os\n'), ((2940, 2972), 'os.path.join', 'os.path.join', (['simPath', '"""windows"""'], {}), "(simPath, 'windows')\n", (2952, 2972), False, 'import os\n'), ((2097, 2120), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (2118, 2120), False, 'import platform\n'), ((2579, 2610), 'os.path.join', 'os.path.join', (['src_dir', 'filename'], {}), '(src_dir, filename)\n', (2591, 2610), False, 'import os\n'), ((2642, 2686), 'os.path.join', 'os.path.join', (['build_dir', '"""data"""', '"""sim_files"""'], {}), "(build_dir, 'data', 'sim_files')\n", (2654, 2686), False, 'import os\n')]
|
"""
This script creates an instance of a sacred experiment and defines default configurations.
"""
from src.neural_nets.models import get_model
from src.neural_nets.load_data import get_loader
from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss
import src.regression.logistic_regression as reg
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torchsso.optim as soptim
import torch.nn.functional as F
import random
from torch.utils.data import DataLoader
from sacred import Experiment
from torch import Tensor, device
from copy import deepcopy
from time import sleep
from tqdm import tqdm
from typing import List
from itertools import product
# create a new sacred experiment whose name is an integer
ex = Experiment(name=str(random.randint(0, 1000000)))
# default configurations
@ex.config
def cfg():
# system
cuda = torch.cuda.is_available()
gpu = 0
base_dir = os.getcwd()
# supported datasets
# JSB_Chorales (short)
# Nottingham (medium)
# Piano_midi (long)
# MuseData (extra long)
dataset = "JSB_Chorales"
# training
num_epochs = 150
batch_size = 128
# mask some low notes and some high notes because they never show up
low_off_notes = 0
high_off_notes = 88
lr = 0.001
decay = 1.0
optmzr = "SGD"
regularization = 0.0
# hyperparameter search
do_hpsearch = False
learning_rates = 10**np.linspace(-2, -4, 5)
decays = 1 - np.linspace(0, 0.1, num=5)
regularizations = 10**np.linspace(-2, -4, num=5)
hps_epochs = 50
# Supported architectures
# LINEAR (LDS)
# REGRESSION (regress next note based on last note)
# REGRESSION_8_STEP (regress next note based on last 8 notes)
architecture = 'LDS'
readout = 'linear'
gradient_clipping = 1
jit = False # not fully implemented
# for regression
lag = 1
window = 1
# for neural networks
input_size = 88
hidden_size = 300
num_layers = 1
output_size = 88
# see models.py and initialization.py for details
init = 'default'
scale = 1.0
parity = None # see models.py
t_distrib = torch.distributions.Uniform(0, 0.75)
path = 'results/77/final_state_dict.pt'
# when to save state dictionaries
save_init_model = True
save_final_model = True
save_every_epoch = False
# detect backprop anomalies
detect_anomaly = False
# give all random number generators the same seed
def _seed_all(_seed) -> None:
torch.manual_seed(_seed)
np.random.seed(_seed)
random.seed(_seed)
# this context is used when we are running things on the cpu
class NullContext(object):
def __init__(self):
pass
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
# this function simply trains regression models and logs the results
# see regression.trainer for details
@ex.capture
def sklearn_experiment(dataset: str,
save_dir: str,
num_epochs: int,
high_off_notes: int,
low_off_notes: int,
lag: int,
window: int,
_seed,
_log,
_run):
"""
:param dataset: name of the dataset to be used
:save_dir: temporary directory where artifacts are being stored
:lag: how many time steps into the future the regression model is to predict
:window: how many time steps the regression model is to take into account
:param _seed: sacred random seed
:param _log: sacred object used to output to the command line
:param _run: sacred object used to monitor the runtime
"""
num_notes = high_off_notes - low_off_notes
models = reg.train_models(dataset,
num_epochs,
low_off_notes,
high_off_notes,
_seed,
lag=lag,
window=window)
coefs = np.zeros((num_notes, num_notes*window))
intercepts = np.zeros(num_notes*window)
for i in range(num_notes):
model = models[i]
# if there were no notes played for this channel, a model won't be trained
# simply save all parameters as -1 to discourage the note from being played
if model == None:
coefs[i] = -1
intercepts[i] = -1
else:
coefs[i] = model.coef_
intercepts[i] = model.intercept_
np.save(save_dir + 'coefs.npy', coefs)
np.save(save_dir + 'intercepts.npy', intercepts)
_run.add_artifact(save_dir + 'coefs.npy')
_run.add_artifact(save_dir + 'intercepts.npy')
train_loss = reg.compute_loss(models,
dataset,
'traindata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
test_loss = reg.compute_loss(models,
dataset,
'testdata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
valid_loss = reg.compute_loss(models,
dataset,
'validdata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
_run.log_scalar('trainLoss', train_loss)
_run.log_scalar('testLoss', test_loss)
_run.log_scalar('validLoss', valid_loss)
train_acc = reg.compute_accuracy(models,
dataset,
'traindata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
test_acc = reg.compute_accuracy(models,
dataset,
'testdata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
valid_acc = reg.compute_accuracy(models,
dataset,
'validdata',
low_off_notes,
high_off_notes,
lag=lag,
window=window)
_run.log_scalar('trainAccuracy', train_acc)
_run.log_scalar('testAccuracy', test_acc)
_run.log_scalar('validAccuracy', valid_acc)
# a single optimization step
@ex.capture
def train_iter(device: device,
cuda_device: torch.cuda.device,
input_tensor: Tensor,
target: Tensor,
mask: Tensor,
model: nn.Module,
loss_fcn: nn.Module,
optimizer: optim.Optimizer,
save_every_epoch: bool,
save_dir: str,
train_loader: DataLoader,
test_loader: DataLoader,
valid_loader: DataLoader,
low_off_notes: int,
high_off_notes: int,
_log,
_run,
logging=True):
input_tensor = input_tensor.to(device)
# number of songs in this batch
N = input_tensor.shape[0]
output, hidden_tensors = model(input_tensor)
loss = loss_fcn(output, target, mask, model)/N
optimizer.zero_grad()
loss.backward()
optimizer.step()
# use sacred to log training loss and accuracy
if logging:
train_acc = compute_acc(model, train_loader, low=low_off_notes, high=high_off_notes)
_run.log_scalar("trainLoss", loss.cpu().detach().item())
_run.log_scalar("trainAccuracy", train_acc)
# save a copy of the model and make sacred remember it each epoch
if save_every_epoch and logging:
sd = deepcopy(model.state_dict())
torch.save(init_sd, save_dir + 'state_dict_' + str(epoch) + '.pt')
_run.add_artifact(save_dir + 'state_dict_' + str(epoch) + '.pt')
# train a neural network
# returns the final loss and accuracy on the training, testing, and validation sets
@ex.capture
def pytorch_train_loop(cuda: bool,
model_dict: dict,
initializer: dict,
train_loader: DataLoader,
test_loader: DataLoader,
valid_loader: DataLoader,
low_off_notes: int,
high_off_notes: int,
optmzr: str,
lr: float,
decay: float,
regularization: float,
num_epochs: int,
save_dir: str,
save_init_model,
save_every_epoch,
save_final_model,
_seed,
_log,
_run,
logging=True):
# construct and initialize the model
model = get_model(model_dict, initializer, cuda)
# save a copy of the initial model and make sacred remember it
if save_init_model and logging:
init_sd = deepcopy(model.state_dict())
torch.save(init_sd, save_dir + 'initial_state_dict.pt')
_run.add_artifact(save_dir + 'initial_state_dict.pt')
# if we are on cuda we construct the device and run everything on it
cuda_device = NullContext()
device = torch.device('cpu')
if cuda:
dev_name = 'cuda:' + str(gpu)
cuda_device = torch.cuda.device(dev_name)
device = torch.device(dev_name)
model = model.to(device)
with cuda_device:
# see metrics.py
loss_fcn = MaskedBCE(regularization, low_off_notes=low_off_notes, high_off_notes=high_off_notes)
# compute the metrics before training and log them
if logging:
train_loss = compute_loss(loss_fcn, model, train_loader)
test_loss = compute_loss(loss_fcn, model, test_loader)
val_loss = compute_loss(loss_fcn, model, valid_loader)
_run.log_scalar("trainLoss", train_loss)
_run.log_scalar("testLoss", test_loss)
_run.log_scalar("validLoss", val_loss)
train_acc = compute_acc(model, train_loader, low=low_off_notes, high=high_off_notes)
test_acc = compute_acc(model, test_loader, low=low_off_notes, high=high_off_notes)
val_acc = compute_acc(model, valid_loader, low=low_off_notes, high=high_off_notes)
_run.log_scalar("trainAccuracy", train_acc)
_run.log_scalar("testAccuracy", test_acc)
_run.log_scalar("validAccuracy", val_acc)
# construct the optimizer
optimizer = None
if optmzr == "SGD":
optimizer = optim.SGD(model.parameters(), lr=lr)
elif optmzr == "Adam":
optimizer = optim.Adam(model.parameters(), lr=lr)
elif optmzr == "RMSprop":
optimizer = optim.RMSprop(model.parameters(), lr=lr)
else:
raise ValueError("Optimizer {} not recognized.".format(optmzr))
# learning rate decay
scheduler = None
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lambda epoch: decay**epoch)
# begin training loop
for epoch in tqdm(range(num_epochs)):
for input_tensor, target, mask in train_loader:
train_iter(device,
cuda_device,
input_tensor,
target,
mask,
model,
loss_fcn,
optimizer,
save_every_epoch,
save_dir,
train_loader,
test_loader,
valid_loader,
low_off_notes,
high_off_notes,
_log,
_run,
logging=logging)
# learning rate decay
scheduler.step()
# use sacred to log testing and validation loss and accuracy
if logging:
test_loss = compute_loss(loss_fcn, model, test_loader)
val_loss = compute_loss(loss_fcn, model, valid_loader)
test_acc = compute_acc(model, test_loader, low=low_off_notes, high=high_off_notes)
val_acc = compute_acc(model, valid_loader, low=low_off_notes, high=high_off_notes)
_run.log_scalar("testLoss", test_loss)
_run.log_scalar("validLoss", val_loss)
_run.log_scalar("testAccuracy", test_acc)
_run.log_scalar("validAccuracy", val_acc)
# save a copy of the trained model and make sacred remember it
if save_final_model and logging:
fin_sd = deepcopy(model.state_dict())
torch.save(fin_sd, save_dir + 'final_state_dict.pt')
_run.add_artifact(save_dir + 'final_state_dict.pt')
# recompute the metrics so that this function can return them
train_loss = compute_loss(loss_fcn, model, train_loader)
test_loss = compute_loss(loss_fcn, model, test_loader)
val_loss = compute_loss(loss_fcn, model, valid_loader)
train_acc = compute_acc(model, train_loader, low=low_off_notes, high=high_off_notes)
test_acc = compute_acc(model, test_loader, low=low_off_notes, high=high_off_notes)
val_acc = compute_acc(model, valid_loader, low=low_off_notes, high=high_off_notes)
return ((train_loss, test_loss, val_loss), (train_acc, test_acc, val_acc))
# main function
@ex.automain
def train_loop(cuda,
gpu,
base_dir,
dataset,
num_epochs,
batch_size,
low_off_notes,
high_off_notes,
lr,
decay,
optmzr,
regularization,
do_hpsearch,
learning_rates,
decays,
regularizations,
hps_epochs,
architecture,
readout,
gradient_clipping,
jit,
lag,
window,
input_size,
hidden_size,
num_layers,
output_size,
detect_anomaly,
init,
scale,
parity,
t_distrib,
path,
save_init_model,
save_final_model,
save_every_epoch,
_seed,
_log,
_run):
# save artifacts to a temporary directory that gets erased when the experiment is over
save_dir = base_dir + '/tmp_' + str(_seed)
os.system('mkdir ' + save_dir)
save_dir += '/'
# give all random number generators the same seed
_seed_all(_seed)
sklearn_program = architecture == 'REGRESSION'
# regression models and neural networks are trained very differently
if sklearn_program:
sklearn_experiment(dataset,
save_dir,
num_epochs,
high_off_notes,
low_off_notes,
lag,
window,
_seed,
_log,
_run)
# run a pytorch program
else:
model_dict = {'architecture': architecture,
'readout': readout,
'gradient_clipping': gradient_clipping,
'jit': jit,
'lag': lag,
'window': window,
'input_size': input_size,
'hidden_size': hidden_size,
'num_layers': num_layers,
'output_size': output_size
}
initializer = {'init': init,
'scale': scale,
'parity': parity,
't_distrib': t_distrib,
'path': path
}
# if we are debugging we may want to detect autograd anomalies
torch.autograd.set_detect_anomaly(detect_anomaly)
# construct the pytorch data loaders
train_loader, test_loader, valid_loader = get_loader(dataset, batch_size)
# standard training loop
if not do_hpsearch:
# the training loop function returns the metrics achieved at the end of training
# they will be logged by default, no need to do anything with them here
metrics = pytorch_train_loop(cuda,
model_dict,
initializer,
train_loader,
test_loader,
valid_loader,
low_off_notes,
high_off_notes,
optmzr,
lr,
decay,
regularization,
num_epochs,
save_dir,
save_init_model,
save_every_epoch,
save_final_model,
_seed,
_log,
_run)
# only goal here is to find the best hyper parameters
else:
min_test_loss = float('inf')
best_lr = 0
best_dcay = 0
best_reg = 0
hyperparams = product(learning_rates, decays, regularizations)
for rate, dcay, reg in hyperparams:
# train a model with the given hyperparameters
# don't log anything, otherwise we will have a ridiculous amount of extraneous info
metrics = pytorch_train_loop(cuda,
model_dict,
initializer,
train_loader,
test_loader,
valid_loader,
optmzr,
rate,
dcay,
reg,
hps_epochs,
save_dir,
save_init_model,
save_every_epoch,
save_final_model,
_seed,
_log,
_run,
logging=False)
# loss is first index, test set is second index
test_loss = metrics[0][1]
# compare loss against other hyperparams and update if necessary
if test_loss == test_loss and test_loss < min_test_loss:
min_test_loss = test_loss
best_lr = rate
best_dcay = dcay
best_reg = reg
# record the best hyperparameters
_run.log_scalar("learning_rate", best_lr)
_run.log_scalar("decay", best_dcay)
_run.log_scalar("regularization", best_reg)
# wait a second then remove the temporary directory used for storing artifacts
sleep(1)
os.system('rm -r ' + save_dir)
|
[
"numpy.random.seed",
"src.neural_nets.load_data.get_loader",
"src.neural_nets.metrics.compute_loss",
"src.neural_nets.models.get_model",
"torch.optim.lr_scheduler.LambdaLR",
"torch.autograd.set_detect_anomaly",
"torch.device",
"src.neural_nets.metrics.MaskedBCE",
"random.randint",
"src.regression.logistic_regression.train_models",
"src.neural_nets.metrics.compute_acc",
"random.seed",
"numpy.linspace",
"itertools.product",
"src.regression.logistic_regression.compute_loss",
"numpy.save",
"src.regression.logistic_regression.compute_accuracy",
"torch.manual_seed",
"os.system",
"time.sleep",
"torch.distributions.Uniform",
"torch.cuda.is_available",
"torch.cuda.device",
"os.getcwd",
"numpy.zeros",
"torch.save"
] |
[((916, 941), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (939, 941), False, 'import torch\n'), ((969, 980), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (978, 980), False, 'import os\n'), ((2195, 2231), 'torch.distributions.Uniform', 'torch.distributions.Uniform', (['(0)', '(0.75)'], {}), '(0, 0.75)\n', (2222, 2231), False, 'import torch\n'), ((2545, 2569), 'torch.manual_seed', 'torch.manual_seed', (['_seed'], {}), '(_seed)\n', (2562, 2569), False, 'import torch\n'), ((2574, 2595), 'numpy.random.seed', 'np.random.seed', (['_seed'], {}), '(_seed)\n', (2588, 2595), True, 'import numpy as np\n'), ((2600, 2618), 'random.seed', 'random.seed', (['_seed'], {}), '(_seed)\n', (2611, 2618), False, 'import random\n'), ((3843, 3946), 'src.regression.logistic_regression.train_models', 'reg.train_models', (['dataset', 'num_epochs', 'low_off_notes', 'high_off_notes', '_seed'], {'lag': 'lag', 'window': 'window'}), '(dataset, num_epochs, low_off_notes, high_off_notes, _seed,\n lag=lag, window=window)\n', (3859, 3946), True, 'import src.regression.logistic_regression as reg\n'), ((4136, 4177), 'numpy.zeros', 'np.zeros', (['(num_notes, num_notes * window)'], {}), '((num_notes, num_notes * window))\n', (4144, 4177), True, 'import numpy as np\n'), ((4193, 4221), 'numpy.zeros', 'np.zeros', (['(num_notes * window)'], {}), '(num_notes * window)\n', (4201, 4221), True, 'import numpy as np\n'), ((4630, 4668), 'numpy.save', 'np.save', (["(save_dir + 'coefs.npy')", 'coefs'], {}), "(save_dir + 'coefs.npy', coefs)\n", (4637, 4668), True, 'import numpy as np\n'), ((4673, 4721), 'numpy.save', 'np.save', (["(save_dir + 'intercepts.npy')", 'intercepts'], {}), "(save_dir + 'intercepts.npy', intercepts)\n", (4680, 4721), True, 'import numpy as np\n'), ((4838, 4943), 'src.regression.logistic_regression.compute_loss', 'reg.compute_loss', (['models', 'dataset', '"""traindata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'traindata', low_off_notes,\n high_off_notes, lag=lag, window=window)\n", (4854, 4943), True, 'import src.regression.logistic_regression as reg\n'), ((5160, 5264), 'src.regression.logistic_regression.compute_loss', 'reg.compute_loss', (['models', 'dataset', '"""testdata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'testdata', low_off_notes, high_off_notes,\n lag=lag, window=window)\n", (5176, 5264), True, 'import src.regression.logistic_regression as reg\n'), ((5476, 5581), 'src.regression.logistic_regression.compute_loss', 'reg.compute_loss', (['models', 'dataset', '"""validdata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'validdata', low_off_notes,\n high_off_notes, lag=lag, window=window)\n", (5492, 5581), True, 'import src.regression.logistic_regression as reg\n'), ((5933, 6042), 'src.regression.logistic_regression.compute_accuracy', 'reg.compute_accuracy', (['models', 'dataset', '"""traindata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'traindata', low_off_notes,\n high_off_notes, lag=lag, window=window)\n", (5953, 6042), True, 'import src.regression.logistic_regression as reg\n'), ((6276, 6384), 'src.regression.logistic_regression.compute_accuracy', 'reg.compute_accuracy', (['models', 'dataset', '"""testdata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'testdata', low_off_notes,\n high_off_notes, lag=lag, window=window)\n", (6296, 6384), True, 'import src.regression.logistic_regression as reg\n'), ((6613, 6722), 'src.regression.logistic_regression.compute_accuracy', 'reg.compute_accuracy', (['models', 'dataset', '"""validdata"""', 'low_off_notes', 'high_off_notes'], {'lag': 'lag', 'window': 'window'}), "(models, dataset, 'validdata', low_off_notes,\n high_off_notes, lag=lag, window=window)\n", (6633, 6722), True, 'import src.regression.logistic_regression as reg\n'), ((9611, 9651), 'src.neural_nets.models.get_model', 'get_model', (['model_dict', 'initializer', 'cuda'], {}), '(model_dict, initializer, cuda)\n', (9620, 9651), False, 'from src.neural_nets.models import get_model\n'), ((10048, 10067), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (10060, 10067), False, 'import torch\n'), ((13799, 13842), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'train_loader'], {}), '(loss_fcn, model, train_loader)\n', (13811, 13842), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13859, 13901), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'test_loader'], {}), '(loss_fcn, model, test_loader)\n', (13871, 13901), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13917, 13960), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'valid_loader'], {}), '(loss_fcn, model, valid_loader)\n', (13929, 13960), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13978, 14050), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'train_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, train_loader, low=low_off_notes, high=high_off_notes)\n', (13989, 14050), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((14066, 14137), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'test_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, test_loader, low=low_off_notes, high=high_off_notes)\n', (14077, 14137), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((14152, 14224), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'valid_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, valid_loader, low=low_off_notes, high=high_off_notes)\n', (14163, 14224), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((15487, 15517), 'os.system', 'os.system', (["('mkdir ' + save_dir)"], {}), "('mkdir ' + save_dir)\n", (15496, 15517), False, 'import os\n'), ((20678, 20686), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (20683, 20686), False, 'from time import sleep\n'), ((20691, 20721), 'os.system', 'os.system', (["('rm -r ' + save_dir)"], {}), "('rm -r ' + save_dir)\n", (20700, 20721), False, 'import os\n'), ((1471, 1493), 'numpy.linspace', 'np.linspace', (['(-2)', '(-4)', '(5)'], {}), '(-2, -4, 5)\n', (1482, 1493), True, 'import numpy as np\n'), ((1511, 1537), 'numpy.linspace', 'np.linspace', (['(0)', '(0.1)'], {'num': '(5)'}), '(0, 0.1, num=5)\n', (1522, 1537), True, 'import numpy as np\n'), ((1564, 1590), 'numpy.linspace', 'np.linspace', (['(-2)', '(-4)'], {'num': '(5)'}), '(-2, -4, num=5)\n', (1575, 1590), True, 'import numpy as np\n'), ((8116, 8188), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'train_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, train_loader, low=low_off_notes, high=high_off_notes)\n', (8127, 8188), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((9811, 9866), 'torch.save', 'torch.save', (['init_sd', "(save_dir + 'initial_state_dict.pt')"], {}), "(init_sd, save_dir + 'initial_state_dict.pt')\n", (9821, 9866), False, 'import torch\n'), ((10141, 10168), 'torch.cuda.device', 'torch.cuda.device', (['dev_name'], {}), '(dev_name)\n', (10158, 10168), False, 'import torch\n'), ((10186, 10208), 'torch.device', 'torch.device', (['dev_name'], {}), '(dev_name)\n', (10198, 10208), False, 'import torch\n'), ((10310, 10400), 'src.neural_nets.metrics.MaskedBCE', 'MaskedBCE', (['regularization'], {'low_off_notes': 'low_off_notes', 'high_off_notes': 'high_off_notes'}), '(regularization, low_off_notes=low_off_notes, high_off_notes=\n high_off_notes)\n', (10319, 10400), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((11796, 11864), 'torch.optim.lr_scheduler.LambdaLR', 'optim.lr_scheduler.LambdaLR', (['optimizer', '(lambda epoch: decay ** epoch)'], {}), '(optimizer, lambda epoch: decay ** epoch)\n', (11823, 11864), True, 'import torch.optim as optim\n'), ((16956, 17005), 'torch.autograd.set_detect_anomaly', 'torch.autograd.set_detect_anomaly', (['detect_anomaly'], {}), '(detect_anomaly)\n', (16989, 17005), False, 'import torch\n'), ((17102, 17133), 'src.neural_nets.load_data.get_loader', 'get_loader', (['dataset', 'batch_size'], {}), '(dataset, batch_size)\n', (17112, 17133), False, 'from src.neural_nets.load_data import get_loader\n'), ((813, 839), 'random.randint', 'random.randint', (['(0)', '(1000000)'], {}), '(0, 1000000)\n', (827, 839), False, 'import random\n'), ((10502, 10545), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'train_loader'], {}), '(loss_fcn, model, train_loader)\n', (10514, 10545), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((10570, 10612), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'test_loader'], {}), '(loss_fcn, model, test_loader)\n', (10582, 10612), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((10636, 10679), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'valid_loader'], {}), '(loss_fcn, model, valid_loader)\n', (10648, 10679), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((10861, 10933), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'train_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, train_loader, low=low_off_notes, high=high_off_notes)\n', (10872, 10933), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((10957, 11028), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'test_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, test_loader, low=low_off_notes, high=high_off_notes)\n', (10968, 11028), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((11051, 11123), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'valid_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, valid_loader, low=low_off_notes, high=high_off_notes)\n', (11062, 11123), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13598, 13650), 'torch.save', 'torch.save', (['fin_sd', "(save_dir + 'final_state_dict.pt')"], {}), "(fin_sd, save_dir + 'final_state_dict.pt')\n", (13608, 13650), False, 'import torch\n'), ((18647, 18695), 'itertools.product', 'product', (['learning_rates', 'decays', 'regularizations'], {}), '(learning_rates, decays, regularizations)\n', (18654, 18695), False, 'from itertools import product\n'), ((12884, 12926), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'test_loader'], {}), '(loss_fcn, model, test_loader)\n', (12896, 12926), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((12954, 12997), 'src.neural_nets.metrics.compute_loss', 'compute_loss', (['loss_fcn', 'model', 'valid_loader'], {}), '(loss_fcn, model, valid_loader)\n', (12966, 12997), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13025, 13096), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'test_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, test_loader, low=low_off_notes, high=high_off_notes)\n', (13036, 13096), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n'), ((13123, 13195), 'src.neural_nets.metrics.compute_acc', 'compute_acc', (['model', 'valid_loader'], {'low': 'low_off_notes', 'high': 'high_off_notes'}), '(model, valid_loader, low=low_off_notes, high=high_off_notes)\n', (13134, 13195), False, 'from src.neural_nets.metrics import MaskedBCE, Accuracy, compute_acc, compute_loss\n')]
|
#!/usr/bin/env python
"""polymer.py - prototype bond breaking reactions:
Uses hydrogels to configure the following system
2 reactions:
-A-A-A- + E -> -A-B-A- + E (spatial) r=2.0, k=1.0
{ (structural) k=10000...
-A-B-A- -> -A + A-A-
A-B -> C + C
}
2 particle_types:
E (enzyme)
C (released)
2 topology_particle_types:
A (monomer)
B (unbonded)
1 topology_type
molecule
2 potential_types
harmonic repulsion (pair; all) r0=1.0, k=2.0
harmonic bonding (bond; A-A A-B) r0=1.0, k=5.0
"""
from pathlib import Path
from typing import List, Union
import numpy as np
import readdy
import pandas as pd
import matplotlib.pyplot as plt
import yaml
from softnanotools.logger import Logger
logger = Logger('POLYMER')
from hydrogels.utils.system import System
from hydrogels.utils.topology import Topology, TopologyBond
DEFAULT_DICTIONARY = {
'A': 1.0,
'B': 1.0,
'C': 1.0,
'E': 1.0,
}
def register_bonding(
system: System,
monomer: str = 'A',
unbonded: str = 'B',
length: float = 1.0,
force_constant: float = 2.5,
):
bond = TopologyBond(
'harmonic',
monomer,
monomer,
length=length,
force_constant=force_constant
)
bond.register(system)
bond = TopologyBond(
'harmonic',
monomer,
unbonded,
length=length,
force_constant=0.0
)
bond.register(system)
bond = TopologyBond(
'harmonic',
unbonded,
unbonded,
length=length,
force_constant=0.0
)
bond.register(system)
return
def register_potentials(system: System, spring_constant=2.5, spring_length=1.0):
for pair in [
['A', 'A'],
['A', 'B'],
['A', 'E'],
['A', 'C'],
['B', 'B'],
['B', 'E'],
['B', 'C'],
['E', 'E'],
['E', 'C'],
['C', 'C'],
]:
system.potentials.add_harmonic_repulsion(
*pair,
force_constant=spring_constant,
interaction_distance=spring_length
)
return
def create_topologies(
N: int,
top_type: str = 'molecule',
monomer: str = 'A',
**kwargs
) -> List[Topology]:
result = []
for i in range(N):
x, y, z = np.random.random(3) * 12.5
positions = np.array([
[x, y, z],
[x+1.0, y, z],
[x+1.0, y+1.0, z],
[x, y+1.0, z],
[x, y, z+1.0],
[x+1.0, y, z+1.0],
[x+1.0, y+1.0, z+1.0],
[x, y+1.0, z+1.0],
])
molecule = Topology(
top_type,
sequence=[monomer] * 8,
edges=[
(0, 1),
(1, 2),
(2, 3),
(3, 0),
(4, 5),
(5, 6),
(6, 7),
(7, 4),
(0, 4),
(1, 5),
(2, 6),
(3, 7),
],
positions=positions,
)
result.append(molecule)
return result
def create_system(
box: float = 25.0,
diffusion_dictionary: dict = DEFAULT_DICTIONARY,
reaction_radius: float = 1.0,
reaction_rate: float = 1.0,
**kwargs,
):
system = System([box, box, box], units=None)
# register species
system.add_species('C', diffusion_dictionary['C'])
system.add_species('E', diffusion_dictionary['E'])
# register topology species
system.add_topology_species('B', diffusion_dictionary['B'])
system.add_topology_species('A', diffusion_dictionary['A'])
system.topologies.add_type('molecule')
# register bonding
register_bonding(system)
# add potentials
register_potentials(system)
# register enzymatic reaction
system.topologies.add_spatial_reaction(
f'reaction: molecule(A) + (E) -> molecule(B) + (E)',
rate=reaction_rate,
radius=reaction_radius,
)
def reaction_function(topology):
recipe = readdy.StructuralReactionRecipe(topology)
# it is possible for there to be a lone particle in a topology
# when reactions happen very quickly, this step ensures that
# these are converted to C particles which are not topology-bound
vertices = topology.get_graph().get_vertices()
if len(vertices) == 1:
recipe.separate_vertex(0)
recipe.change_particle_type(vertices[0], 'C')
logger.debug('Structural 1')
# register A-B -> C + C reaction
elif len(vertices) == 2:
types = [topology.particle_type_of_vertex(v) for v in vertices]
if 'B' in types:
recipe.separate_vertex(0)
recipe.change_particle_type(vertices[0], 'C')
recipe.change_particle_type(vertices[1], 'C')
logger.debug('Structural 2')
# register -A-B-A- -> -A + A-A-
else:
# insert reaction
edges = topology.get_graph().get_edges()
for edge in edges:
if topology.particle_type_of_vertex(edge[0]) == 'B':
recipe.remove_edge(edge[0], edge[1])
recipe.change_particle_type(edge[0], 'A')
logger.debug('Structural 3A')
return recipe
elif topology.particle_type_of_vertex(edge[1]) == 'B':
recipe.remove_edge(edge[0], edge[1])
recipe.change_particle_type(edge[1], 'A')
logger.debug('Structural 3B')
return recipe
return recipe
system.topologies.add_structural_reaction(
name="BondBreaking",
topology_type="molecule",
reaction_function=reaction_function,
rate_function=lambda x: 10000.,
)
return system
def run_simulation(
name: str,
stride: int = 100,
timestep: float = 0.01,
length: int = 10000,
**kwargs
) -> Path:
# run equilibration
logger.info('Running equilibration...')
# insert code here
system = create_system(**kwargs)#, reaction=False)
simulation = system.simulation()
box = kwargs['box']
simulation.add_particles(
'E',
np.random.rand(kwargs['enzymes'], 3) * box - (box / 2)
)
# add topologies
topologies = create_topologies(kwargs['molecules'], box=box)
for topology in topologies:
topology.add_to_sim(simulation)
#output = Path(f'{name}.h5')
#if output.exists():
# output.unlink()
#simulation.output_file = str(output.absolute())
#simulation.make_checkpoints(
# stride=stride,
# output_directory="checkpoints/",
# max_n_saves=1
#)
#simulation.evaluate_topology_reactions = False
#simulation.observe.particles(stride)
#simulation.observe.topologies(stride)
#simulation.record_trajectory(stride)
#simulation.run(5 * stride, timestep)
#logger.info('Done!')
# run proper simulaton
logger.info('Configuring simulation...')
#system = create_system(**kwargs)
output = Path(f'{name}.h5')
if output.exists():
output.unlink()
simulation.output_file = str(output.absolute())
#simulation = system.simulation(output_file=str(output.absolute()))
# skip adding particles since these will be loaded
# add_particles(simulation, **kwargs)
#simulation.load_particles_from_latest_checkpoint(
# 'checkpoints/'
#)
#logger.info('Loaded particles successfully from checkpoint')#
#output = Path(f'{name}.h5')
#if output.exists():
# output.unlink()
#simulation = system.simulation(output_file=str(output.absolute()))
# include observables
simulation.observe.particles(stride)
simulation.observe.topologies(stride)
simulation.record_trajectory(stride)
simulation.reaction_handler = 'Gillespie'
logger.info(f'Running simulation {name}...')
simulation.run(length, timestep)
logger.info('Done!')
return output
def analyse_trajectory(
fname: Union[str, Path],
output: Union[str, Path, None] = None,
timestep: float = 0.01,
) -> pd.DataFrame:
logger.info('Analysing trajectory...')
fname = Path(fname).absolute()
trajectory = readdy.Trajectory(str(fname))
particle_types = trajectory.particle_types
particles = trajectory.read_observable_particles()
numbers = {
't': particles[0] * timestep,
'A': [],
'B': [],
'E': [],
'C': [],
}
for row in particles[1]:
numbers['A'].append(len(row[row == particle_types['A']]))
numbers['E'].append(len(row[row == particle_types['E']]))
numbers['B'].append(len(row[row == particle_types['B']]))
numbers['C'].append(len(row[row == particle_types['C']]))
results = pd.DataFrame(numbers)
if output:
results.to_csv(output, index=False)
return results
def gather_results(targets: List[Path]) -> pd.DataFrame:
results = pd.DataFrame()
dfs = {
'A': pd.DataFrame(),
'E': pd.DataFrame(),
'B': pd.DataFrame(),
'C': pd.DataFrame()
}
for i, target in enumerate(targets):
data = pd.read_csv(target)
if i == 0:
results['t'] = data['t']
for kind in dfs:
dfs[kind][i] = data[kind]
for kind in dfs.keys():
results[f'{kind}_mean'] = dfs[kind].mean(axis=1)
results[f'{kind}_std'] = dfs[kind].std(axis=1)
return results
def plot_final(data: pd.DataFrame, name: str = 'polymer'):
fig, ax = plt.subplots()
params = dict(
markevery=len(data) // 30 if len(data) > 50 else 5,
errorevery=len(data) // 30 if len(data) > 50 else 5,
capsize=2
)
ax.errorbar(
data['t'],
data['A_mean'],
yerr=data['A_std'],
fmt='bx-',
label='A',
**params
)
ax.errorbar(
data['t'],
data['B_mean'],
yerr=data['B_std'],
fmt='ro-',
label='B',
**params
)
ax.errorbar(
data['t'],
data['C_mean'],
yerr=data['C_std'],
fmt='go-',
label='C',
**params
)
ax.plot(data['t'], data['E_mean'], 'k:', label='E')
ax.set_xlabel('Timestep', fontsize='xx-large')
ax.set_ylabel('N', fontsize='xx-large')
ax.legend(frameon=False, fontsize='x-large')
fig.tight_layout()
fig.savefig(f'{name}.png')
data.to_csv(f'{name}.csv', index=False)
return
def main(
settings: str,
run: bool = False,
seeds: int = 5,
name: str = 'cube',
**kwargs
):
logger.info('Running cube...')
with open(settings, 'r') as f:
parameters = yaml.safe_load(f)
# insert code here
for seed in range(1, seeds + 1, 1):
prefix = f'{name}.{seed}'
if run:
traj = run_simulation(prefix, **parameters)
analyse_trajectory(
traj,
output=f'{prefix}.csv',
timestep=parameters['timestep']
)
else:
logger.info('Skipping simulation because --run was not passed!')
break
results = gather_results(Path().glob(f'{name}.*.csv'))
logger.info(results)
plot_final(results, name=name)
logger.info('All Done!')
logger.info('Done!')
return
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Enzymatic reaction -A-A-A- + E -> xC + E using ReaDDy'
)
parser.add_argument('settings', default='settings.yml')
parser.add_argument('--run', action='store_true')
parser.add_argument('-s', '--seeds', default=5, type=int)
parser.add_argument('-n', '--name', default='cube')
main(**vars(parser.parse_args()))
|
[
"pandas.DataFrame",
"hydrogels.utils.topology.TopologyBond",
"argparse.ArgumentParser",
"hydrogels.utils.topology.Topology",
"pandas.read_csv",
"softnanotools.logger.Logger",
"hydrogels.utils.system.System",
"pathlib.Path",
"readdy.StructuralReactionRecipe",
"numpy.array",
"yaml.safe_load",
"numpy.random.random",
"numpy.random.rand",
"matplotlib.pyplot.subplots"
] |
[((750, 767), 'softnanotools.logger.Logger', 'Logger', (['"""POLYMER"""'], {}), "('POLYMER')\n", (756, 767), False, 'from softnanotools.logger import Logger\n'), ((1117, 1210), 'hydrogels.utils.topology.TopologyBond', 'TopologyBond', (['"""harmonic"""', 'monomer', 'monomer'], {'length': 'length', 'force_constant': 'force_constant'}), "('harmonic', monomer, monomer, length=length, force_constant=\n force_constant)\n", (1129, 1210), False, 'from hydrogels.utils.topology import Topology, TopologyBond\n'), ((1290, 1368), 'hydrogels.utils.topology.TopologyBond', 'TopologyBond', (['"""harmonic"""', 'monomer', 'unbonded'], {'length': 'length', 'force_constant': '(0.0)'}), "('harmonic', monomer, unbonded, length=length, force_constant=0.0)\n", (1302, 1368), False, 'from hydrogels.utils.topology import Topology, TopologyBond\n'), ((1453, 1532), 'hydrogels.utils.topology.TopologyBond', 'TopologyBond', (['"""harmonic"""', 'unbonded', 'unbonded'], {'length': 'length', 'force_constant': '(0.0)'}), "('harmonic', unbonded, unbonded, length=length, force_constant=0.0)\n", (1465, 1532), False, 'from hydrogels.utils.topology import Topology, TopologyBond\n'), ((3283, 3318), 'hydrogels.utils.system.System', 'System', (['[box, box, box]'], {'units': 'None'}), '([box, box, box], units=None)\n', (3289, 3318), False, 'from hydrogels.utils.system import System\n'), ((7113, 7131), 'pathlib.Path', 'Path', (['f"""{name}.h5"""'], {}), "(f'{name}.h5')\n", (7117, 7131), False, 'from pathlib import Path\n'), ((8850, 8871), 'pandas.DataFrame', 'pd.DataFrame', (['numbers'], {}), '(numbers)\n', (8862, 8871), True, 'import pandas as pd\n'), ((9022, 9036), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9034, 9036), True, 'import pandas as pd\n'), ((9602, 9616), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (9614, 9616), True, 'import matplotlib.pyplot as plt\n'), ((11445, 11542), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Enzymatic reaction -A-A-A- + E -> xC + E using ReaDDy"""'}), "(description=\n 'Enzymatic reaction -A-A-A- + E -> xC + E using ReaDDy')\n", (11468, 11542), False, 'import argparse\n'), ((2334, 2513), 'numpy.array', 'np.array', (['[[x, y, z], [x + 1.0, y, z], [x + 1.0, y + 1.0, z], [x, y + 1.0, z], [x, y,\n z + 1.0], [x + 1.0, y, z + 1.0], [x + 1.0, y + 1.0, z + 1.0], [x, y + \n 1.0, z + 1.0]]'], {}), '([[x, y, z], [x + 1.0, y, z], [x + 1.0, y + 1.0, z], [x, y + 1.0, z\n ], [x, y, z + 1.0], [x + 1.0, y, z + 1.0], [x + 1.0, y + 1.0, z + 1.0],\n [x, y + 1.0, z + 1.0]])\n', (2342, 2513), True, 'import numpy as np\n'), ((2607, 2783), 'hydrogels.utils.topology.Topology', 'Topology', (['top_type'], {'sequence': '([monomer] * 8)', 'edges': '[(0, 1), (1, 2), (2, 3), (3, 0), (4, 5), (5, 6), (6, 7), (7, 4), (0, 4), (1,\n 5), (2, 6), (3, 7)]', 'positions': 'positions'}), '(top_type, sequence=[monomer] * 8, edges=[(0, 1), (1, 2), (2, 3), (\n 3, 0), (4, 5), (5, 6), (6, 7), (7, 4), (0, 4), (1, 5), (2, 6), (3, 7)],\n positions=positions)\n', (2615, 2783), False, 'from hydrogels.utils.topology import Topology, TopologyBond\n'), ((4027, 4068), 'readdy.StructuralReactionRecipe', 'readdy.StructuralReactionRecipe', (['topology'], {}), '(topology)\n', (4058, 4068), False, 'import readdy\n'), ((9062, 9076), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9074, 9076), True, 'import pandas as pd\n'), ((9091, 9105), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9103, 9105), True, 'import pandas as pd\n'), ((9120, 9134), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9132, 9134), True, 'import pandas as pd\n'), ((9149, 9163), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9161, 9163), True, 'import pandas as pd\n'), ((9227, 9246), 'pandas.read_csv', 'pd.read_csv', (['target'], {}), '(target)\n', (9238, 9246), True, 'import pandas as pd\n'), ((10746, 10763), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (10760, 10763), False, 'import yaml\n'), ((2287, 2306), 'numpy.random.random', 'np.random.random', (['(3)'], {}), '(3)\n', (2303, 2306), True, 'import numpy as np\n'), ((8241, 8252), 'pathlib.Path', 'Path', (['fname'], {}), '(fname)\n', (8245, 8252), False, 'from pathlib import Path\n'), ((6254, 6290), 'numpy.random.rand', 'np.random.rand', (["kwargs['enzymes']", '(3)'], {}), "(kwargs['enzymes'], 3)\n", (6268, 6290), True, 'import numpy as np\n'), ((11228, 11234), 'pathlib.Path', 'Path', ([], {}), '()\n', (11232, 11234), False, 'from pathlib import Path\n')]
|
# uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\pushbase\note_layout_switcher.py
# Compiled at: 2018-11-30 15:48:12
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import nop
from ableton.v2.control_surface import Component
from ableton.v2.control_surface.control import ButtonControl
from .message_box_component import Messenger
class ContextualButtonControl(ButtonControl):
class State(ButtonControl.State):
def on_context_changed(self):
if self.is_pressed:
self._release_button()
class ModeSwitcherBase(Component, Messenger):
cycle_button = ContextualButtonControl()
lock_button = ButtonControl()
locked_mode = None
def __init__(self, *a, **k):
super(ModeSwitcherBase, self).__init__(*a, **k)
self._cycle_mode = nop
self._on_unlocked_release = nop
self._get_current_alternative_mode = nop
def release_alternative_layout(self):
self.cycle_button.on_context_changed()
def _should_unlock(self):
return bool(self.locked_mode)
@cycle_button.released_immediately
def cycle_button(self, button):
if self._should_unlock():
self._unlock_alternative_mode(self.locked_mode)
else:
self._on_unlocked_release()
@cycle_button.pressed_delayed
def cycle_button(self, button):
self._cycle_mode()
@cycle_button.released_delayed
def cycle_button(self, button):
self._cycle_mode()
@lock_button.pressed
def lock_button(self, button):
if not self._should_unlock():
self._lock_alternative_mode(self._get_current_alternative_mode())
def _lock_alternative_mode(self, mode):
if mode:
mode.cycle_mode()
self.cycle_button.color = b'DefaultButton.Alert'
self.locked_mode = mode
if mode.get_mode_message():
self.show_notification(mode.get_mode_message() + b': Locked')
def _unlock_alternative_mode(self, locked_mode):
if locked_mode:
if locked_mode.get_mode_message():
self.show_notification(locked_mode.get_mode_message() + b': Unlocked')
locked_mode.cycle_mode(-1)
self.cycle_button.color = b'DefaultButton.On'
def on_enabled_changed(self):
super(ModeSwitcherBase, self).on_enabled_changed()
if not self.is_enabled() and self.cycle_button.is_pressed:
self._cycle_mode()
class NoteLayoutSwitcher(ModeSwitcherBase):
def __init__(self, switch_note_mode_layout=None, get_current_alternative_layout_mode=None, *a, **k):
assert switch_note_mode_layout is not None
super(NoteLayoutSwitcher, self).__init__(*a, **k)
self._get_current_alternative_mode = get_current_alternative_layout_mode
self._cycle_mode = self._cycle_alternative_note_layout
self._on_unlocked_release = switch_note_mode_layout
return
def _should_unlock(self):
return bool(self.song.view.selected_track.get_data(b'alternative_mode_locked', False))
def _lock_alternative_mode(self, mode):
super(NoteLayoutSwitcher, self)._lock_alternative_mode(mode)
if mode:
self.song.view.selected_track.set_data(b'alternative_mode_locked', True)
def _unlock_alternative_mode(self, _mode):
super(NoteLayoutSwitcher, self)._unlock_alternative_mode(self._get_current_alternative_mode())
self.song.view.selected_track.set_data(b'alternative_mode_locked', False)
def _cycle_alternative_note_layout(self):
cyclable_mode = self._get_current_alternative_mode()
if cyclable_mode:
cyclable_mode.cycle_mode()
|
[
"ableton.v2.control_surface.control.ButtonControl"
] |
[((886, 901), 'ableton.v2.control_surface.control.ButtonControl', 'ButtonControl', ([], {}), '()\n', (899, 901), False, 'from ableton.v2.control_surface.control import ButtonControl\n')]
|
import pytest
import os
import json
from injector import Injector
from datetime import timedelta
from backup.config import Config, Setting, VERSION, Version
from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS
from backup.time import Time
@pytest.mark.asyncio
async def test_read_and_write(config: Config, time: Time) -> None:
cache = DataCache(config, time)
assert len(cache.snapshots) == 0
cache.snapshot("test")[KEY_CREATED] = time.now().isoformat()
assert not cache._dirty
cache.makeDirty()
assert cache._dirty
cache.saveIfDirty()
assert not cache._dirty
cache = DataCache(config, time)
assert cache.snapshot("test")[KEY_CREATED] == time.now().isoformat()
assert not cache._dirty
@pytest.mark.asyncio
async def test_snapshot_expiration(config: Config, time: Time) -> None:
cache = DataCache(config, time)
assert len(cache.snapshots) == 0
cache.snapshot("new")[KEY_LAST_SEEN] = time.now().isoformat()
cache.snapshot("old")[KEY_LAST_SEEN] = (
time.now() - timedelta(days=CACHE_EXPIRATION_DAYS + 1)) .isoformat()
cache.makeDirty()
cache.saveIfDirty()
assert len(cache.snapshots) == 1
assert "new" in cache.snapshots
assert "old" not in cache.snapshots
@pytest.mark.asyncio
async def test_version_upgrades(time: Time, injector: Injector, config: Config) -> None:
# Simluate upgrading from an un-tracked version
assert not os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
cache = injector.get(DataCache)
upgrade_time = time.now()
assert cache.previousVersion == Version.default()
assert cache.currentVersion == Version.parse(VERSION)
assert cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
}]
# Reload the data cache, verify there is no upgrade.
time.advance(days=1)
cache = DataCache(config, time)
assert cache.previousVersion == Version.parse(VERSION)
assert cache.currentVersion == Version.parse(VERSION)
assert not cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
}]
# simulate upgrading to a new version, verify an upgrade gets identified.
upgrade_version = Version.parse("200")
class UpgradeCache(DataCache):
def __init__(self):
super().__init__(config, time)
@property
def currentVersion(self):
return upgrade_version
cache = UpgradeCache()
assert cache.previousVersion == Version.parse(VERSION)
assert cache.currentVersion == upgrade_version
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [
{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
},
{
"prev_version": VERSION,
"new_version": str(upgrade_version),
"date": time.now().isoformat()
}
]
next_upgrade_time = time.now()
time.advance(days=1)
# Verify version upgrade time queries work as expected
assert cache.getUpgradeTime(Version.parse(VERSION)) == upgrade_time
assert cache.getUpgradeTime(Version.default()) == upgrade_time
assert cache.getUpgradeTime(upgrade_version) == next_upgrade_time
# degenerate case, should never happen but a sensible value needs to be returned
assert cache.getUpgradeTime(Version.parse("201")) == time.now()
|
[
"json.load",
"backup.util.DataCache",
"backup.config.Version.default",
"datetime.timedelta",
"backup.config.Version.parse"
] |
[((387, 410), 'backup.util.DataCache', 'DataCache', (['config', 'time'], {}), '(config, time)\n', (396, 410), False, 'from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS\n'), ((653, 676), 'backup.util.DataCache', 'DataCache', (['config', 'time'], {}), '(config, time)\n', (662, 676), False, 'from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS\n'), ((885, 908), 'backup.util.DataCache', 'DataCache', (['config', 'time'], {}), '(config, time)\n', (894, 908), False, 'from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS\n'), ((2215, 2238), 'backup.util.DataCache', 'DataCache', (['config', 'time'], {}), '(config, time)\n', (2224, 2238), False, 'from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS\n'), ((2872, 2892), 'backup.config.Version.parse', 'Version.parse', (['"""200"""'], {}), "('200')\n", (2885, 2892), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((1633, 1650), 'backup.config.Version.default', 'Version.default', ([], {}), '()\n', (1648, 1650), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((1686, 1708), 'backup.config.Version.parse', 'Version.parse', (['VERSION'], {}), '(VERSION)\n', (1699, 1708), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((1925, 1937), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1934, 1937), False, 'import json\n'), ((2275, 2297), 'backup.config.Version.parse', 'Version.parse', (['VERSION'], {}), '(VERSION)\n', (2288, 2297), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((2333, 2355), 'backup.config.Version.parse', 'Version.parse', (['VERSION'], {}), '(VERSION)\n', (2346, 2355), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((2576, 2588), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2585, 2588), False, 'import json\n'), ((3152, 3174), 'backup.config.Version.parse', 'Version.parse', (['VERSION'], {}), '(VERSION)\n', (3165, 3174), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((3372, 3384), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3381, 3384), False, 'import json\n'), ((3927, 3949), 'backup.config.Version.parse', 'Version.parse', (['VERSION'], {}), '(VERSION)\n', (3940, 3949), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((3999, 4016), 'backup.config.Version.default', 'Version.default', ([], {}), '()\n', (4014, 4016), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((4222, 4242), 'backup.config.Version.parse', 'Version.parse', (['"""201"""'], {}), "('201')\n", (4235, 4242), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((1079, 1120), 'datetime.timedelta', 'timedelta', ([], {'days': '(CACHE_EXPIRATION_DAYS + 1)'}), '(days=CACHE_EXPIRATION_DAYS + 1)\n', (1088, 1120), False, 'from datetime import timedelta\n'), ((2008, 2025), 'backup.config.Version.default', 'Version.default', ([], {}), '()\n', (2023, 2025), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((2659, 2676), 'backup.config.Version.default', 'Version.default', ([], {}), '()\n', (2674, 2676), False, 'from backup.config import Config, Setting, VERSION, Version\n'), ((3472, 3489), 'backup.config.Version.default', 'Version.default', ([], {}), '()\n', (3487, 3489), False, 'from backup.config import Config, Setting, VERSION, Version\n')]
|
import healpy as hp
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
import scipy.special as spc
import math
import matplotlib as mpl
from scipy.special import lpmn
import scipy.integrate as integrate
from scipy.integrate import quad
from numpy import sin, cos
from matplotlib.cm import ScalarMappable
import random
nside = 64
npix = hp.nside2npix(nside)
SIZE = 400
DPI = 100
hpxmap2 = np.zeros(npix, dtype = np.float)
events = 8000
mult = 2500
for i in range(events):
for k in range(mult):
ipix = random.randint(0, npix-1)
#hpxmap2[indices2[i]] += 1.0
hpxmap2[ipix] += npix*1.0/mult/events
#hp_smoothed = hp.sphtfunc.smoothing(hpxmap2, fwhm=np.radians(1), iter = 1)
hp.mollview(hpxmap2, cmap = cm.jet, xsize = SIZE, min = 0.9, max = 1.1, title='Isotropic randomised')
hp.graticule()
plt.savefig("map_iso.png", dpi = DPI)
|
[
"random.randint",
"healpy.mollview",
"healpy.graticule",
"numpy.zeros",
"healpy.nside2npix",
"matplotlib.pyplot.savefig"
] |
[((365, 385), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (378, 385), True, 'import healpy as hp\n'), ((419, 449), 'numpy.zeros', 'np.zeros', (['npix'], {'dtype': 'np.float'}), '(npix, dtype=np.float)\n', (427, 449), True, 'import numpy as np\n'), ((728, 826), 'healpy.mollview', 'hp.mollview', (['hpxmap2'], {'cmap': 'cm.jet', 'xsize': 'SIZE', 'min': '(0.9)', 'max': '(1.1)', 'title': '"""Isotropic randomised"""'}), "(hpxmap2, cmap=cm.jet, xsize=SIZE, min=0.9, max=1.1, title=\n 'Isotropic randomised')\n", (739, 826), True, 'import healpy as hp\n'), ((831, 845), 'healpy.graticule', 'hp.graticule', ([], {}), '()\n', (843, 845), True, 'import healpy as hp\n'), ((846, 881), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""map_iso.png"""'], {'dpi': 'DPI'}), "('map_iso.png', dpi=DPI)\n", (857, 881), True, 'import matplotlib.pyplot as plt\n'), ((542, 569), 'random.randint', 'random.randint', (['(0)', '(npix - 1)'], {}), '(0, npix - 1)\n', (556, 569), False, 'import random\n')]
|
import speech_recognition as sr
import pyttsx3
import os
speech = sr.Recognizer()
try:
engine = pyttsx3.init()
except ImportError:
print("Requested driver not found")
except RuntimeError:
print("Driver could not be initialized")
voices = engine.getProperty('voices')
engine.setProperty('voice','HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Speech\Voices\Tokens\TTS_MS_EN-US_ZIRA_11.0')
rate = engine.getProperty('rate')
engine.setProperty('rate',rate)
def speak_text_cmd(cmd):
engine.say(cmd)
engine.runAndWait()
def read_voice_cmd():
voice_text=''
print('Listening.....')
with sr.Microphone() as source:
audio = speech.listen(source)
try:
voice_text = speech.recognize_google(audio)
except sr.UnknownValueError:
pass
except sr.RequestError as e:
print('Network error')
return voice_text
if __name__ == '__main__':
speak_text_cmd('Hello sir . This is JARVIS your A.I.')
while True:
voice_note = read_voice_cmd()
print('cmd : {}'.format(voice_note))
if 'hello' in voice_note:
speak_text_cmd('Hello sir,How may I help you?')
continue
elif 'open' in voice_note:
os.system('explorer C:// {}'.format(voice_note.replace('Open','')))
continue
elif 'close' in voice_note:
os.system('explorer C:// {}'.format(voice_note.replace('Close','')))
elif 'bye' in voice_note:
speak_text_cmd('Goodbye sir. Hope I was helpful to you. Have a nice day.')
exit()
|
[
"pyttsx3.init",
"speech_recognition.Recognizer",
"speech_recognition.Microphone"
] |
[((67, 82), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (80, 82), True, 'import speech_recognition as sr\n'), ((101, 115), 'pyttsx3.init', 'pyttsx3.init', ([], {}), '()\n', (113, 115), False, 'import pyttsx3\n'), ((606, 621), 'speech_recognition.Microphone', 'sr.Microphone', ([], {}), '()\n', (619, 621), True, 'import speech_recognition as sr\n')]
|
# Generated by Django 3.0.7 on 2020-07-15 22:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('properties', '0013_monthlymaintenance_renovationteam_renovationteamexpenses'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tenants', '0006_auto_20200715_2254'),
]
operations = [
migrations.AlterField(
model_name='tenant',
name='apartment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='properties.Property'),
),
migrations.AlterField(
model_name='tenant',
name='creator',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tenant',
name='email',
field=models.EmailField(max_length=254, null=True, unique=True),
),
migrations.AlterField(
model_name='tenant',
name='is_active',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='tenant',
name='phone',
field=models.CharField(max_length=20, null=True, unique=True),
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.EmailField"
] |
[((316, 373), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (347, 373), False, 'from django.db import migrations, models\n'), ((561, 678), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""reviews"""', 'to': '"""properties.Property"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='reviews', to='properties.Property')\n", (578, 678), False, 'from django.db import migrations, models\n'), ((796, 903), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL'}), '(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to=settings.AUTH_USER_MODEL)\n', (813, 903), False, 'from django.db import migrations, models\n'), ((1020, 1077), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'null': '(True)', 'unique': '(True)'}), '(max_length=254, null=True, unique=True)\n', (1037, 1077), False, 'from django.db import migrations, models\n'), ((1202, 1236), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1221, 1236), False, 'from django.db import migrations, models\n'), ((1357, 1412), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'null': '(True)', 'unique': '(True)'}), '(max_length=20, null=True, unique=True)\n', (1373, 1412), False, 'from django.db import migrations, models\n')]
|
# Generated by Django 2.2.1 on 2019-05-28 09:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0009_auto_20190528_0936'),
]
operations = [
migrations.AlterField(
model_name='task',
name='figure',
field=models.TextField(blank=True, editable=False, null=True),
),
migrations.AlterField(
model_name='task',
name='score',
field=models.TextField(blank=True, editable=False, null=True),
),
migrations.AlterField(
model_name='task',
name='time',
field=models.TextField(blank=True, editable=False, null=True),
),
]
|
[
"django.db.models.TextField"
] |
[((332, 387), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'editable': '(False)', 'null': '(True)'}), '(blank=True, editable=False, null=True)\n', (348, 387), False, 'from django.db import migrations, models\n'), ((506, 561), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'editable': '(False)', 'null': '(True)'}), '(blank=True, editable=False, null=True)\n', (522, 561), False, 'from django.db import migrations, models\n'), ((679, 734), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'editable': '(False)', 'null': '(True)'}), '(blank=True, editable=False, null=True)\n', (695, 734), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from pathlib import Path
import yaml
from rich.console import Console
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
"This file is intended to be executed as an executable program. You cannot use it as a module."
f"To run this script, run the ./{__file__} command [FILE] ..."
)
console = Console(color_system="standard", width=200)
def check_file(the_file: Path) -> int:
"""Returns number of wrong checkout instructions in the workflow file"""
error_num = 0
res = yaml.safe_load(the_file.read_text())
console.print(f"Checking file [yellow]{the_file}[/]")
for job in res['jobs'].values():
for step in job['steps']:
uses = step.get('uses')
pretty_step = yaml.safe_dump(step, indent=2)
if uses is not None and uses.startswith('actions/checkout'):
with_clause = step.get('with')
if with_clause is None:
console.print(f"\n[red]The `with` clause is missing in step:[/]\n\n{pretty_step}")
error_num += 1
continue
persist_credentials = with_clause.get("persist-credentials")
if persist_credentials is None:
console.print(
"\n[red]The `with` clause does not have persist-credentials in step:[/]"
f"\n\n{pretty_step}"
)
error_num += 1
continue
else:
if persist_credentials:
console.print(
"\n[red]The `with` clause have persist-credentials=True in step:[/]"
f"\n\n{pretty_step}"
)
error_num += 1
continue
return error_num
if __name__ == '__main__':
total_err_num = 0
for a_file in sys.argv[1:]:
total_err_num += check_file(Path(a_file))
if total_err_num:
console.print(
"""
[red]There are are some checkout instructions in github workflows that have no "persist_credentials"
set to False.[/]
For security reasons - make sure all of the checkout actions have persist_credentials set, similar to:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
"""
)
sys.exit(1)
|
[
"rich.console.Console",
"pathlib.Path",
"sys.exit",
"yaml.safe_dump"
] |
[((1153, 1196), 'rich.console.Console', 'Console', ([], {'color_system': '"""standard"""', 'width': '(200)'}), "(color_system='standard', width=200)\n", (1160, 1196), False, 'from rich.console import Console\n'), ((3261, 3272), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3269, 3272), False, 'import sys\n'), ((1571, 1601), 'yaml.safe_dump', 'yaml.safe_dump', (['step'], {'indent': '(2)'}), '(step, indent=2)\n', (1585, 1601), False, 'import yaml\n'), ((2806, 2818), 'pathlib.Path', 'Path', (['a_file'], {}), '(a_file)\n', (2810, 2818), False, 'from pathlib import Path\n')]
|
import torch
from torch import nn
import torch.nn.functional as F
from torchvision import datasets, models
#import numpy as np
#import nn_model
# load a pre-trained network
def load_pretrained_model(arch = 'vgg'):
if arch == 'vgg':
model = models.vgg16(pretrained=True)
input_size = 25088
elif arch == 'densenet':
model = models.densenet121(pretrained=True)
input_size = 1024
# freeze the parameters so that the gradients are not computed in backward()
for param in model.parameters():
param.requires_grad = False
return model, input_size
# Builds a feedforward network with arbitrary hidden layers
class Network(nn.Module):
def __init__(self, input_size, output_size, hidden_layers, drop_p=0.4):
''' Arguments
---------
input_size: integer, size of the input layer
output_size: integer, size of the output layer
hidden_layers: list of integers, the sizes of the hidden layers
'''
super().__init__()
# Input to a hidden layer
self.hidden_layers = nn.ModuleList([nn.Linear(input_size, hidden_layers[0])])
# Add a variable number of more hidden layers
layer_sizes = zip(hidden_layers[:-1], hidden_layers[1:])
self.hidden_layers.extend([nn.Linear(h1, h2) for h1, h2 in layer_sizes])
self.output = nn.Linear(hidden_layers[-1], output_size)
self.dropout = nn.Dropout(p=drop_p)
def forward(self, x):
''' Forward pass through the network, returns the output logits '''
for each in self.hidden_layers:
x = F.relu(each(x))
x = self.dropout(x)
x = self.output(x)
return F.log_softmax(x, dim=1)
# TODO: Save the checkpoint
def save_model_checkpoint(model, input_size, epochs, save_dir, arch, hidden_layers, learning_rate, drop, optimizer, output_size):
"""
Save trained model as checkpoint file.
Parameters:
model - Previously trained and tested CNN model
input_size - Input size of CNN model
epochs - Nr of epochs used to train the CNN
save_dir - Directory to save the checkpoint file(default- current path)
arch - Architecture choosen (Vgg or AlexNet)
hidden_layers - Nr of hidden units
learning_rate
drop
optimizer
output_size
Returns:
None
"""
#model.class_to_idx = image_datasets['train'].class_to_idx
# Save Checkpoint: input, output, hidden layer, epochs, learning rate, model, optimizer, arch,drop and state_dict sure.
checkpoint = {'input_size': input_size,
'output_size': output_size,
'hidden_layers': hidden_layers,
'drop': drop,
'epochs': epochs,
'learning_rate': learning_rate,
'arch': arch,
'optimizer': optimizer.state_dict,
'class_to_idx': model.class_to_idx,
'state_dict': model.state_dict()}
torch.save(checkpoint, save_dir + '/checkpoint.pth')
print('Model checkpoint stored at {}'.format(save_dir))
# TODO: Write a function that loads a checkpoint and rebuilds the model
def load_model_checkpoint(filepath, isgpu):
device = torch.device("cuda:0" if isgpu is True else "cpu")
if device == "cuda:0":
map_loc = 'cuda:0'
else:
map_loc = 'cpu'
checkpoint = torch.load(filepath, map_location=map_loc)
# load a pretrained network
arch = checkpoint['arch']
if arch == 'vgg':
model = getattr(models, "vgg16")(pretrained=True)
elif arch == 'densenet':
model = getattr(models, "densenet121")(pretrained=True)
# Re-build the model
classifier = Network(checkpoint['input_size'],
checkpoint['output_size'],
checkpoint['hidden_layers'],
checkpoint['drop'])
model.classifier = classifier
model.load_state_dict(checkpoint['state_dict'])
model.optimizer = checkpoint['optimizer']
model.epochs = checkpoint['epochs']
model.learning_rate = checkpoint['learning_rate']
model.class_to_idx = checkpoint['class_to_idx']
model.to(device)
return model
|
[
"torch.nn.Dropout",
"torch.load",
"torchvision.models.densenet121",
"torch.save",
"torch.nn.functional.log_softmax",
"torch.nn.Linear",
"torch.device",
"torchvision.models.vgg16"
] |
[((3062, 3114), 'torch.save', 'torch.save', (['checkpoint', "(save_dir + '/checkpoint.pth')"], {}), "(checkpoint, save_dir + '/checkpoint.pth')\n", (3072, 3114), False, 'import torch\n'), ((3307, 3357), 'torch.device', 'torch.device', (["('cuda:0' if isgpu is True else 'cpu')"], {}), "('cuda:0' if isgpu is True else 'cpu')\n", (3319, 3357), False, 'import torch\n'), ((3465, 3507), 'torch.load', 'torch.load', (['filepath'], {'map_location': 'map_loc'}), '(filepath, map_location=map_loc)\n', (3475, 3507), False, 'import torch\n'), ((254, 283), 'torchvision.models.vgg16', 'models.vgg16', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (266, 283), False, 'from torchvision import datasets, models\n'), ((1389, 1430), 'torch.nn.Linear', 'nn.Linear', (['hidden_layers[-1]', 'output_size'], {}), '(hidden_layers[-1], output_size)\n', (1398, 1430), False, 'from torch import nn\n'), ((1455, 1475), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'drop_p'}), '(p=drop_p)\n', (1465, 1475), False, 'from torch import nn\n'), ((1731, 1754), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (1744, 1754), True, 'import torch.nn.functional as F\n'), ((356, 391), 'torchvision.models.densenet121', 'models.densenet121', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (374, 391), False, 'from torchvision import datasets, models\n'), ((1123, 1162), 'torch.nn.Linear', 'nn.Linear', (['input_size', 'hidden_layers[0]'], {}), '(input_size, hidden_layers[0])\n', (1132, 1162), False, 'from torch import nn\n'), ((1320, 1337), 'torch.nn.Linear', 'nn.Linear', (['h1', 'h2'], {}), '(h1, h2)\n', (1329, 1337), False, 'from torch import nn\n')]
|
from moto import mock_dynamodb2
import boto3
import os
import time
phone = "+15551234567"
mock_verification = {
'pk': phone,
'sk': 'verificationCode#123456',
'expiration': str(int(time.time() + 1200))
}
@mock_dynamodb2
def setup_mocks():
dynamodb = boto3.resource('dynamodb')
table = dynamodb.create_table(
TableName= os.environ['TABLE_NAME'],
KeySchema=[
{
'AttributeName': 'pk',
'KeyType': 'HASH' # primary key
},
{
'AttributeName': 'sk',
'KeyType': 'RANGE' # primary key
}
],
AttributeDefinitions=[
{
'AttributeName': 'pk',
'AttributeType': 'S'
},
{
'AttributeName': 'sk',
'AttributeType': 'S'
},
],
)
|
[
"boto3.resource",
"time.time"
] |
[((270, 296), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (284, 296), False, 'import boto3\n'), ((196, 207), 'time.time', 'time.time', ([], {}), '()\n', (205, 207), False, 'import time\n')]
|
from django.conf import settings
from django.urls import reverse
from django.utils.translation import ugettext as _
from django_webtest import WebTest
from brouwers.albums.tests.factories import PhotoFactory
from brouwers.forum_tools.tests.factories import ForumUserFactory, TopicFactory
from brouwers.kits.tests.factories import ModelKitFactory
from brouwers.users.tests.factories import UserFactory
from brouwers.utils.tests.mixins import LoginRequiredMixin, WebTestFormMixin
from ..models import Build
from .factories import BuildFactory, BuildPhotoFactory
class ViewTests(WebTestFormMixin, LoginRequiredMixin, WebTest):
def setUp(self):
self.user = UserFactory.create()
self.builds = BuildFactory.create_batch(5)
def test_index(self):
url = reverse("builds:index")
index = self.app.get(url, status=200)
builds = index.context["builds"]
expected_builds = self.builds
expected_builds.reverse()
self.assertQuerysetEqual(builds, [repr(x) for x in expected_builds])
def test_user_list(self):
user_builds = BuildFactory.create_batch(2, user=self.user)
index_url = reverse("builds:index")
url = reverse("builds:user_build_list", kwargs={"user_id": self.user.id})
# anonymous
index = self.app.get(index_url, status=200)
self.assertNotContains(index, _("My builds"))
# authenticated
index = self.app.get(index_url, status=200, user=self.user)
self.assertContains(index, _("My builds"))
self.assertContains(index, url)
my_builds = index.click(_("My builds"))
self.assertEqual(my_builds.status_code, 200)
self.assertQuerysetEqual(
my_builds.context["builds"], reversed([repr(x) for x in user_builds])
)
self.assertEqual(my_builds.context["request"].path, url)
def test_detail(self):
build = BuildFactory.create()
detail = self.app.get(build.get_absolute_url(), status=200)
self.assertEqual(detail.context["build"], build)
def test_create(self):
url = reverse("builds:create")
index = self.app.get(reverse("builds:index"), status=200)
# anonymous
response = index.click(_("Add build"))
self._test_login_required(url, response)
# authenticated
add = self.app.get(url, user=self.user, status=200)
form = add.forms[0]
form["title"] = "My new build"
self.assertEqual(form["photos-TOTAL_FORMS"].value, "0")
self.assertEqual(form["photos-INITIAL_FORMS"].value, "0")
form["photos-TOTAL_FORMS"] = "2" # add two photos
photos = PhotoFactory.create_batch(2, user=self.user)
self._add_field(form, "photos-0-id", "")
self._add_field(form, "photos-0-build", "")
self._add_field(form, "photos-0-photo", "{}".format(photos[0].pk))
self._add_field(form, "photos-0-photo_url", "")
self._add_field(form, "photos-0-order", "")
self._add_field(form, "photos-1-id", "")
self._add_field(form, "photos-1-build", "")
self._add_field(form, "photos-1-photo", "")
url = "https://modelbrouwers.nl%s" % photos[1].image.url
self._add_field(form, "photos-1-photo_url", url)
self._add_field(form, "photos-1-order", "")
# add some kits
kits = ModelKitFactory.create_batch(2)
for kit in kits:
self._add_field(form, "kits", str(kit.pk))
response = form.submit()
build = Build.objects.order_by("-pk").first()
self.assertRedirects(response, build.get_absolute_url())
self.assertEqual(build.photos.count(), 2)
self.assertEqual(build.title, "My new build")
self.assertEqual(build.user, self.user)
self.assertEqual(build.kits.count(), 2)
def test_update(self):
"""
Tests that updating builds works as expected.
It should be possible to add/remove kits of a build
"""
kits = ModelKitFactory.create_batch(2)
build = BuildFactory.create(user=self.user, kits=kits)
build_photo = BuildPhotoFactory.create(build=build)
url = reverse("builds:update", kwargs={"slug": build.slug})
# test that non-auth can't update
response = self.app.get(url)
self.assertRedirects(response, "{}?next={}".format(settings.LOGIN_URL, url))
# test that different user can't update
other_user = UserFactory.create()
self.app.get(url, user=other_user, status=404)
# owner
page = self.app.get(url, user=self.user, status=200)
# kits field is filled in by React component - make sure we can re-write it
form = page.forms[0]
self._add_field(form, "kits", kits[1].pk)
# test add photo
self.assertEqual(form["photos-TOTAL_FORMS"].value, "1")
self.assertEqual(form["photos-INITIAL_FORMS"].value, "1")
photo = PhotoFactory.create(user=self.user)
form["photos-TOTAL_FORMS"] = "2"
self._add_field(form, "photos-1-id", "")
self._add_field(form, "photos-1-build", "")
self._add_field(form, "photos-1-photo", "{}".format(photo.pk))
self._add_field(form, "photos-1-photo_url", "")
self._add_field(form, "photos-1-order", "")
# test delete photo
form["photos-0-DELETE"].checked = True
redirect = form.submit()
self.assertRedirects(redirect, build.get_absolute_url())
build.refresh_from_db()
_kits = build.kits.all()
self.assertEqual(_kits.count(), 1)
self.assertFalse(_kits.filter(pk=kits[0].pk).exists())
# check photos
self.assertEqual(build.photos.count(), 1)
self.assertNotEqual(build.photos.get(), build_photo)
def test_create_from_external(self):
"""
Asserts that the button with prefilled fields works correctly.
"""
topic = TopicFactory.create()
url = "{}?forum_id={}&topic_id={}&title=Dummy%20title".format(
reverse("builds:create"), topic.forum.pk, topic.pk
)
page = self.app.get(url, user=self.user, status=200)
form = page.forms[0]
self.assertEqual(form["title"].value, "Dummy title")
self.assertTrue(
form["topic"].value.endswith("viewtopic.php?t={}".format(topic.pk))
)
class ForumUserViewTests(WebTest):
"""
Asserts that the url to view a forum users's build works as expected.
"""
def setUp(self):
# user - forumuser linked by id
self.user = UserFactory.create()
self.forum_user = ForumUserFactory.create(username=self.user.username)
self.user.forumuser_id = self.forum_user.pk
self.user.save()
# user -forumuser linked by username
self.user2 = UserFactory.create()
self.forum_user2 = ForumUserFactory.create(username=self.user2.username)
# forum user without user
self.forum_user3 = ForumUserFactory.create()
def test_correct_redirects(self):
def get_forumuser_url(forum_user):
return reverse("builds:forum_user_build_list", kwargs={"pk": forum_user.pk})
def get_expected_url(user):
return reverse("builds:user_build_list", kwargs={"user_id": user.id})
response = self.app.get(get_forumuser_url(self.forum_user))
self.assertRedirects(response, get_expected_url(self.user), status_code=301)
self.assertIsNone(self.user2.forumuser_id)
response = self.app.get(get_forumuser_url(self.forum_user2))
self.assertRedirects(response, get_expected_url(self.user2), status_code=301)
# expected 404
self.app.get(get_forumuser_url(self.forum_user3), status=404)
|
[
"brouwers.users.tests.factories.UserFactory.create",
"brouwers.albums.tests.factories.PhotoFactory.create",
"brouwers.forum_tools.tests.factories.TopicFactory.create",
"brouwers.kits.tests.factories.ModelKitFactory.create_batch",
"django.urls.reverse",
"brouwers.albums.tests.factories.PhotoFactory.create_batch",
"django.utils.translation.ugettext",
"brouwers.forum_tools.tests.factories.ForumUserFactory.create"
] |
[((670, 690), 'brouwers.users.tests.factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (688, 690), False, 'from brouwers.users.tests.factories import UserFactory\n'), ((783, 806), 'django.urls.reverse', 'reverse', (['"""builds:index"""'], {}), "('builds:index')\n", (790, 806), False, 'from django.urls import reverse\n'), ((1161, 1184), 'django.urls.reverse', 'reverse', (['"""builds:index"""'], {}), "('builds:index')\n", (1168, 1184), False, 'from django.urls import reverse\n'), ((1199, 1266), 'django.urls.reverse', 'reverse', (['"""builds:user_build_list"""'], {'kwargs': "{'user_id': self.user.id}"}), "('builds:user_build_list', kwargs={'user_id': self.user.id})\n", (1206, 1266), False, 'from django.urls import reverse\n'), ((2103, 2127), 'django.urls.reverse', 'reverse', (['"""builds:create"""'], {}), "('builds:create')\n", (2110, 2127), False, 'from django.urls import reverse\n'), ((2673, 2717), 'brouwers.albums.tests.factories.PhotoFactory.create_batch', 'PhotoFactory.create_batch', (['(2)'], {'user': 'self.user'}), '(2, user=self.user)\n', (2698, 2717), False, 'from brouwers.albums.tests.factories import PhotoFactory\n'), ((3371, 3402), 'brouwers.kits.tests.factories.ModelKitFactory.create_batch', 'ModelKitFactory.create_batch', (['(2)'], {}), '(2)\n', (3399, 3402), False, 'from brouwers.kits.tests.factories import ModelKitFactory\n'), ((4019, 4050), 'brouwers.kits.tests.factories.ModelKitFactory.create_batch', 'ModelKitFactory.create_batch', (['(2)'], {}), '(2)\n', (4047, 4050), False, 'from brouwers.kits.tests.factories import ModelKitFactory\n'), ((4189, 4242), 'django.urls.reverse', 'reverse', (['"""builds:update"""'], {'kwargs': "{'slug': build.slug}"}), "('builds:update', kwargs={'slug': build.slug})\n", (4196, 4242), False, 'from django.urls import reverse\n'), ((4478, 4498), 'brouwers.users.tests.factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (4496, 4498), False, 'from brouwers.users.tests.factories import UserFactory\n'), ((4969, 5004), 'brouwers.albums.tests.factories.PhotoFactory.create', 'PhotoFactory.create', ([], {'user': 'self.user'}), '(user=self.user)\n', (4988, 5004), False, 'from brouwers.albums.tests.factories import PhotoFactory\n'), ((5962, 5983), 'brouwers.forum_tools.tests.factories.TopicFactory.create', 'TopicFactory.create', ([], {}), '()\n', (5981, 5983), False, 'from brouwers.forum_tools.tests.factories import ForumUserFactory, TopicFactory\n'), ((6605, 6625), 'brouwers.users.tests.factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (6623, 6625), False, 'from brouwers.users.tests.factories import UserFactory\n'), ((6652, 6704), 'brouwers.forum_tools.tests.factories.ForumUserFactory.create', 'ForumUserFactory.create', ([], {'username': 'self.user.username'}), '(username=self.user.username)\n', (6675, 6704), False, 'from brouwers.forum_tools.tests.factories import ForumUserFactory, TopicFactory\n'), ((6849, 6869), 'brouwers.users.tests.factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (6867, 6869), False, 'from brouwers.users.tests.factories import UserFactory\n'), ((6897, 6950), 'brouwers.forum_tools.tests.factories.ForumUserFactory.create', 'ForumUserFactory.create', ([], {'username': 'self.user2.username'}), '(username=self.user2.username)\n', (6920, 6950), False, 'from brouwers.forum_tools.tests.factories import ForumUserFactory, TopicFactory\n'), ((7013, 7038), 'brouwers.forum_tools.tests.factories.ForumUserFactory.create', 'ForumUserFactory.create', ([], {}), '()\n', (7036, 7038), False, 'from brouwers.forum_tools.tests.factories import ForumUserFactory, TopicFactory\n'), ((1378, 1392), 'django.utils.translation.ugettext', '_', (['"""My builds"""'], {}), "('My builds')\n", (1379, 1392), True, 'from django.utils.translation import ugettext as _\n'), ((1522, 1536), 'django.utils.translation.ugettext', '_', (['"""My builds"""'], {}), "('My builds')\n", (1523, 1536), True, 'from django.utils.translation import ugettext as _\n'), ((1610, 1624), 'django.utils.translation.ugettext', '_', (['"""My builds"""'], {}), "('My builds')\n", (1611, 1624), True, 'from django.utils.translation import ugettext as _\n'), ((2157, 2180), 'django.urls.reverse', 'reverse', (['"""builds:index"""'], {}), "('builds:index')\n", (2164, 2180), False, 'from django.urls import reverse\n'), ((2246, 2260), 'django.utils.translation.ugettext', '_', (['"""Add build"""'], {}), "('Add build')\n", (2247, 2260), True, 'from django.utils.translation import ugettext as _\n'), ((6067, 6091), 'django.urls.reverse', 'reverse', (['"""builds:create"""'], {}), "('builds:create')\n", (6074, 6091), False, 'from django.urls import reverse\n'), ((7140, 7209), 'django.urls.reverse', 'reverse', (['"""builds:forum_user_build_list"""'], {'kwargs': "{'pk': forum_user.pk}"}), "('builds:forum_user_build_list', kwargs={'pk': forum_user.pk})\n", (7147, 7209), False, 'from django.urls import reverse\n'), ((7266, 7328), 'django.urls.reverse', 'reverse', (['"""builds:user_build_list"""'], {'kwargs': "{'user_id': user.id}"}), "('builds:user_build_list', kwargs={'user_id': user.id})\n", (7273, 7328), False, 'from django.urls import reverse\n')]
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
from typing import TYPE_CHECKING
from cdm.enums import CdmObjectType, CdmLogCode
from cdm.enums.cdm_operation_type import OperationTypeConvertor, CdmOperationType
from cdm.utilities.logging import logger
from cdm.utilities.string_utils import StringUtils
if TYPE_CHECKING:
from cdm.objectmodel import CdmCorpusContext, CdmOperationBase
from cdm.utilities import ResolveOptions, CopyOptions
def _make_data_object(object_type: 'CdmObjectType'):
"""Instantiates a data object based on the object type."""
from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, \
OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, \
OperationRenameAttributes, OperationReplaceAsForeignKey
data_map = {
CdmObjectType.OPERATION_ADD_ATTRIBUTE_GROUP_DEF: lambda: OperationAddAttributeGroup(),
CdmObjectType.OPERATION_ADD_COUNT_ATTRIBUTE_DEF: lambda: OperationAddCountAttribute(),
CdmObjectType.OPERATION_ADD_SUPPORTING_ATTRIBUTE_DEF: lambda: OperationAddSupportingAttribute(),
CdmObjectType.OPERATION_ADD_TYPE_ATTRIBUTE_DEF: lambda: OperationAddTypeAttribute(),
CdmObjectType.OPERATION_ARRAY_EXPANSION_DEF: lambda: OperationArrayExpansion(),
CdmObjectType.OPERATION_COMBINE_ATTRIBUTES_DEF: lambda: OperationCombineAttributes(),
CdmObjectType.OPERATION_EXCLUDE_ATTRIBUTES_DEF: lambda: OperationExcludeAttributes(),
CdmObjectType.OPERATION_INCLUDE_ATTRIBUTES_DEF: lambda: OperationIncludeAttributes(),
CdmObjectType.OPERATION_RENAME_ATTRIBUTES_DEF: lambda: OperationRenameAttributes(),
CdmObjectType.OPERATION_REPLACE_AS_FOREIGN_KEY_DEF: lambda: OperationReplaceAsForeignKey(),
}
return data_map.get(object_type)()
class OperationBasePersistence:
@staticmethod
def from_data(ctx: 'CdmCorpusContext', object_type: 'CdmObjectType', data: 'OperationBase'):
if data is None:
return None
operation = ctx.corpus.make_object(object_type) # type: CdmOperationBase
operation_type = OperationTypeConvertor._from_object_type(object_type) # type: CdmOperationType
operation_name = OperationTypeConvertor._operation_type_to_string(CdmOperationType.COMBINE_ATTRIBUTES)
if data.type and not StringUtils.equals_with_ignore_case(data.type, operation_name):
logger.error(ctx, operation_name, OperationBasePersistence.from_data.__name__, None, CdmLogCode.ERR_PERSIST_PROJ_INVALID_TYPE, data.type)
else:
operation.type = operation_type
operation.condition = data.condition
operation.explanation = data.explanation
operation.source_input = data.sourceInput
return operation
@staticmethod
def to_data(instance: 'CdmOperationBase', res_opt: 'ResolveOptions', options: 'CopyOptions'):
data = _make_data_object(instance.object_type)
data.type = OperationTypeConvertor._operation_type_to_string(instance.type)
data.condition = instance.condition
data.explanation = instance.explanation
data.sourceInput = instance.source_input
return data
|
[
"cdm.persistence.cdmfolder.types.OperationIncludeAttributes",
"cdm.persistence.cdmfolder.types.OperationAddAttributeGroup",
"cdm.persistence.cdmfolder.types.OperationArrayExpansion",
"cdm.persistence.cdmfolder.types.OperationReplaceAsForeignKey",
"cdm.persistence.cdmfolder.types.OperationAddCountAttribute",
"cdm.utilities.logging.logger.error",
"cdm.persistence.cdmfolder.types.OperationExcludeAttributes",
"cdm.enums.cdm_operation_type.OperationTypeConvertor._operation_type_to_string",
"cdm.persistence.cdmfolder.types.OperationRenameAttributes",
"cdm.persistence.cdmfolder.types.OperationAddTypeAttribute",
"cdm.utilities.string_utils.StringUtils.equals_with_ignore_case",
"cdm.persistence.cdmfolder.types.OperationAddSupportingAttribute",
"cdm.persistence.cdmfolder.types.OperationCombineAttributes",
"cdm.enums.cdm_operation_type.OperationTypeConvertor._from_object_type"
] |
[((2342, 2395), 'cdm.enums.cdm_operation_type.OperationTypeConvertor._from_object_type', 'OperationTypeConvertor._from_object_type', (['object_type'], {}), '(object_type)\n', (2382, 2395), False, 'from cdm.enums.cdm_operation_type import OperationTypeConvertor, CdmOperationType\n'), ((2447, 2537), 'cdm.enums.cdm_operation_type.OperationTypeConvertor._operation_type_to_string', 'OperationTypeConvertor._operation_type_to_string', (['CdmOperationType.COMBINE_ATTRIBUTES'], {}), '(CdmOperationType.\n COMBINE_ATTRIBUTES)\n', (2495, 2537), False, 'from cdm.enums.cdm_operation_type import OperationTypeConvertor, CdmOperationType\n'), ((3197, 3260), 'cdm.enums.cdm_operation_type.OperationTypeConvertor._operation_type_to_string', 'OperationTypeConvertor._operation_type_to_string', (['instance.type'], {}), '(instance.type)\n', (3245, 3260), False, 'from cdm.enums.cdm_operation_type import OperationTypeConvertor, CdmOperationType\n'), ((1105, 1133), 'cdm.persistence.cdmfolder.types.OperationAddAttributeGroup', 'OperationAddAttributeGroup', ([], {}), '()\n', (1131, 1133), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1200, 1228), 'cdm.persistence.cdmfolder.types.OperationAddCountAttribute', 'OperationAddCountAttribute', ([], {}), '()\n', (1226, 1228), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1300, 1333), 'cdm.persistence.cdmfolder.types.OperationAddSupportingAttribute', 'OperationAddSupportingAttribute', ([], {}), '()\n', (1331, 1333), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1399, 1426), 'cdm.persistence.cdmfolder.types.OperationAddTypeAttribute', 'OperationAddTypeAttribute', ([], {}), '()\n', (1424, 1426), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1489, 1514), 'cdm.persistence.cdmfolder.types.OperationArrayExpansion', 'OperationArrayExpansion', ([], {}), '()\n', (1512, 1514), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1580, 1608), 'cdm.persistence.cdmfolder.types.OperationCombineAttributes', 'OperationCombineAttributes', ([], {}), '()\n', (1606, 1608), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1674, 1702), 'cdm.persistence.cdmfolder.types.OperationExcludeAttributes', 'OperationExcludeAttributes', ([], {}), '()\n', (1700, 1702), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1768, 1796), 'cdm.persistence.cdmfolder.types.OperationIncludeAttributes', 'OperationIncludeAttributes', ([], {}), '()\n', (1794, 1796), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1861, 1888), 'cdm.persistence.cdmfolder.types.OperationRenameAttributes', 'OperationRenameAttributes', ([], {}), '()\n', (1886, 1888), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((1958, 1988), 'cdm.persistence.cdmfolder.types.OperationReplaceAsForeignKey', 'OperationReplaceAsForeignKey', ([], {}), '()\n', (1986, 1988), False, 'from cdm.persistence.cdmfolder.types import OperationAddAttributeGroup, OperationAddCountAttribute, OperationAddSupportingAttribute, OperationAddTypeAttribute, OperationArrayExpansion, OperationCombineAttributes, OperationExcludeAttributes, OperationIncludeAttributes, OperationRenameAttributes, OperationReplaceAsForeignKey\n'), ((2638, 2780), 'cdm.utilities.logging.logger.error', 'logger.error', (['ctx', 'operation_name', 'OperationBasePersistence.from_data.__name__', 'None', 'CdmLogCode.ERR_PERSIST_PROJ_INVALID_TYPE', 'data.type'], {}), '(ctx, operation_name, OperationBasePersistence.from_data.\n __name__, None, CdmLogCode.ERR_PERSIST_PROJ_INVALID_TYPE, data.type)\n', (2650, 2780), False, 'from cdm.utilities.logging import logger\n'), ((2562, 2624), 'cdm.utilities.string_utils.StringUtils.equals_with_ignore_case', 'StringUtils.equals_with_ignore_case', (['data.type', 'operation_name'], {}), '(data.type, operation_name)\n', (2597, 2624), False, 'from cdm.utilities.string_utils import StringUtils\n')]
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Password authentication backend"""
from __future__ import unicode_literals
import base64
from functools import wraps
from sys import version_info
from flask import flash, Response
from flask import url_for, redirect, make_response
from flask_bcrypt import generate_password_hash, check_password_hash
import flask_login
# noinspection PyUnresolvedReferences
# pylint: disable=unused-import
from flask_login import login_required, current_user, logout_user # noqa: F401
# pylint: enable=unused-import
from wtforms import Form, PasswordField, StringField
from wtforms.validators import InputRequired
from sqlalchemy import Column, String
from sqlalchemy.ext.hybrid import hybrid_property
from airflow import models
from airflow.utils.db import provide_session, create_session
from airflow.utils.log.logging_mixin import LoggingMixin
LOGIN_MANAGER = flask_login.LoginManager()
LOGIN_MANAGER.login_view = 'airflow.login' # Calls login() below
LOGIN_MANAGER.login_message = None
LOG = LoggingMixin().log
PY3 = version_info[0] == 3
CLIENT_AUTH = None
class AuthenticationError(Exception):
"""Error returned on authentication problems"""
# pylint: disable=no-member
# noinspection PyUnresolvedReferences
class PasswordUser(models.User):
"""Stores user with password"""
_password = Column('password', String(255))
def __init__(self, user):
self.user = user
@hybrid_property
def password(self):
"""Returns password for the user"""
return self._password
@password.setter
def password(self, plaintext):
"""Sets password for the user"""
self._password = generate_password_hash(plaintext, 12)
if PY3:
self._password = str(self._password, 'utf-8')
def authenticate(self, plaintext):
"""Authenticates user"""
return check_password_hash(self._password, plaintext)
@property
def is_active(self):
"""Required by flask_login"""
return True
@property
def is_authenticated(self):
"""Required by flask_login"""
return True
@property
def is_anonymous(self):
"""Required by flask_login"""
return False
def get_id(self):
"""Returns the current user id as required by flask_login"""
return str(self.id)
# pylint: disable=no-self-use
# noinspection PyMethodMayBeStatic
def data_profiling(self):
"""Provides access to data profiling tools"""
return True
# pylint: enable=no-self-use
def is_superuser(self):
"""Returns True if user is superuser"""
return hasattr(self, 'user') and self.user.is_superuser()
# noinspection PyUnresolvedReferences
@LOGIN_MANAGER.user_loader
@provide_session
def load_user(userid, session=None):
"""Loads user from the database"""
LOG.debug("Loading user %s", userid)
if not userid or userid == 'None':
return None
user = session.query(models.User).filter(models.User.id == int(userid)).first()
return PasswordUser(user)
def authenticate(session, username, password):
"""
Authenticate a PasswordUser with the specified
username/password.
:param session: An active SQLAlchemy session
:param username: The username
:param password: The password
:raise AuthenticationError: if an error occurred
:return: a PasswordUser
"""
if not username or not password:
raise AuthenticationError()
user = session.query(PasswordUser).filter(
PasswordUser.username == username).first()
if not user:
raise AuthenticationError()
if not user.authenticate(password):
raise AuthenticationError()
LOG.info("User %s successfully authenticated", username)
return user
@provide_session
def login(self, request, session=None):
"""Logs the user in"""
if current_user.is_authenticated:
flash("You are already logged in")
return redirect(url_for('admin.index'))
username = None
password = None
form = LoginForm(request.form)
if request.method == 'POST' and form.validate():
username = request.form.get("username")
password = request.form.get("password")
try:
user = authenticate(session, username, password)
flask_login.login_user(user)
return redirect(request.args.get("next") or url_for("admin.index"))
except AuthenticationError:
flash("Incorrect login details")
return self.render('airflow/login.html',
title="Airflow - Login",
form=form)
# pylint: disable=too-few-public-methods
class LoginForm(Form):
"""Form for the user"""
username = StringField('Username', [InputRequired()])
password = PasswordField('Password', [InputRequired()])
# pylint: enable=too-few-public-methods
def _unauthorized():
"""
Indicate that authorization is required
:return:
"""
return Response("Unauthorized", 401, {"WWW-Authenticate": "Basic"})
def _forbidden():
return Response("Forbidden", 403)
def init_app(_):
"""Initializes backend"""
def requires_authentication(function):
"""Decorator for functions that require authentication"""
@wraps(function)
def decorated(*args, **kwargs):
from flask import request
header = request.headers.get("Authorization")
if header:
userpass = ''.join(header.split()[1:])
username, password = base64.b64decode(userpass).decode("utf-8").split(":", 1)
with create_session() as session:
try:
authenticate(session, username, password)
response = function(*args, **kwargs)
response = make_response(response)
return response
except AuthenticationError:
return _forbidden()
return _unauthorized()
return decorated
|
[
"airflow.utils.db.create_session",
"flask.flash",
"flask.request.args.get",
"flask.request.form.get",
"flask.request.headers.get",
"wtforms.validators.InputRequired",
"flask_login.login_user",
"flask_bcrypt.check_password_hash",
"base64.b64decode",
"airflow.utils.log.logging_mixin.LoggingMixin",
"flask.url_for",
"functools.wraps",
"sqlalchemy.String",
"flask.make_response",
"flask_login.LoginManager",
"flask_bcrypt.generate_password_hash",
"flask.Response"
] |
[((1668, 1694), 'flask_login.LoginManager', 'flask_login.LoginManager', ([], {}), '()\n', (1692, 1694), False, 'import flask_login\n'), ((1803, 1817), 'airflow.utils.log.logging_mixin.LoggingMixin', 'LoggingMixin', ([], {}), '()\n', (1815, 1817), False, 'from airflow.utils.log.logging_mixin import LoggingMixin\n'), ((5761, 5821), 'flask.Response', 'Response', (['"""Unauthorized"""', '(401)', "{'WWW-Authenticate': 'Basic'}"], {}), "('Unauthorized', 401, {'WWW-Authenticate': 'Basic'})\n", (5769, 5821), False, 'from flask import flash, Response\n'), ((5853, 5879), 'flask.Response', 'Response', (['"""Forbidden"""', '(403)'], {}), "('Forbidden', 403)\n", (5861, 5879), False, 'from flask import flash, Response\n'), ((6037, 6052), 'functools.wraps', 'wraps', (['function'], {}), '(function)\n', (6042, 6052), False, 'from functools import wraps\n'), ((2134, 2145), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (2140, 2145), False, 'from sqlalchemy import Column, String\n'), ((2446, 2483), 'flask_bcrypt.generate_password_hash', 'generate_password_hash', (['plaintext', '(12)'], {}), '(plaintext, 12)\n', (2468, 2483), False, 'from flask_bcrypt import generate_password_hash, check_password_hash\n'), ((2646, 2692), 'flask_bcrypt.check_password_hash', 'check_password_hash', (['self._password', 'plaintext'], {}), '(self._password, plaintext)\n', (2665, 2692), False, 'from flask_bcrypt import generate_password_hash, check_password_hash\n'), ((4699, 4733), 'flask.flash', 'flash', (['"""You are already logged in"""'], {}), "('You are already logged in')\n", (4704, 4733), False, 'from flask import flash, Response\n'), ((4932, 4960), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (4948, 4960), False, 'from flask import request\n'), ((4980, 5008), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (4996, 5008), False, 'from flask import request\n'), ((5084, 5112), 'flask_login.login_user', 'flask_login.login_user', (['user'], {}), '(user)\n', (5106, 5112), False, 'import flask_login\n'), ((6141, 6177), 'flask.request.headers.get', 'request.headers.get', (['"""Authorization"""'], {}), "('Authorization')\n", (6160, 6177), False, 'from flask import request\n'), ((4758, 4780), 'flask.url_for', 'url_for', (['"""admin.index"""'], {}), "('admin.index')\n", (4765, 4780), False, 'from flask import url_for, redirect, make_response\n'), ((5230, 5262), 'flask.flash', 'flash', (['"""Incorrect login details"""'], {}), "('Incorrect login details')\n", (5235, 5262), False, 'from flask import flash, Response\n'), ((5536, 5551), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (5549, 5551), False, 'from wtforms.validators import InputRequired\n'), ((5596, 5611), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (5609, 5611), False, 'from wtforms.validators import InputRequired\n'), ((5138, 5162), 'flask.request.args.get', 'request.args.get', (['"""next"""'], {}), "('next')\n", (5154, 5162), False, 'from flask import request\n'), ((5166, 5188), 'flask.url_for', 'url_for', (['"""admin.index"""'], {}), "('admin.index')\n", (5173, 5188), False, 'from flask import url_for, redirect, make_response\n'), ((6356, 6372), 'airflow.utils.db.create_session', 'create_session', ([], {}), '()\n', (6370, 6372), False, 'from airflow.utils.db import provide_session, create_session\n'), ((6557, 6580), 'flask.make_response', 'make_response', (['response'], {}), '(response)\n', (6570, 6580), False, 'from flask import url_for, redirect, make_response\n'), ((6281, 6307), 'base64.b64decode', 'base64.b64decode', (['userpass'], {}), '(userpass)\n', (6297, 6307), False, 'import base64\n')]
|
import main as m
import os
clientes = []
def mostrar_menu_cliente():
os.system('clear')
print("---------------------------------------------")
print(" Agregar cliente ")
print("---------------------------------------------")
print("[1]Agregar")
print("[2]Listar")
print("[3]Consultar")
print("[4]Actualizar")
print("[5]Eliminar")
print("[6]Volver al menú princiapl")
print("")
opcion = input("*Selecciona su opción: ")
if opcion == "1":
agregar()
elif opcion == "2":
listar()
elif opcion == "3":
consultar()
elif opcion == "4":
actualizar()
elif opcion == "5":
eliminar()
elif opcion == "6":
m.mostrar_menu_principal()
def agregar():
os.system('clear')
inventario = False
while inventario == False:
code = int(input("--> Digite el DNI del cliente que desea registrar: "))
nombre_cliente = input("--> Digite el nombre del cliente que desea registrar: ")
cliente = [code, nombre_cliente]
clientes.append(cliente)
if cliente in clientes:
inventario = True
print("* --> Cliente registrado de forma exitosa!")
opc = input("¿Desea realizar un nuevo registro? [y]Si [n]No: ")
if opc == "y":
inventario = False
else:
mostrar_menu_cliente()
else:
print("Error en registro. Por favor intente de nuevo")
def listar():
os.system('clear')
print("Los clientes registrados en el sistema son:","\n",clientes)
opc = input("presione [e] para volver al menú: ")
if opc == "e":
mostrar_menu_cliente()
def consultar():
os.system('clear')
encontrado = False
while encontrado == False:
codigo = int(input("Ingrese el DNI del cliente que desea buscar (Oprima 0 para salir): "))
if codigo != 0:
for cliente in clientes:
code, nombre = cliente
if codigo == code:
encontrado = True
print("el cliente asignado con ese DNI es: ", cliente)
opc = input("¿Desea realizar una nueva consulta? [y]Si [n]No: ")
if opc == "y":
encontrado = False
else:
mostrar_menu_cliente()
else:
mostrar_menu_cliente()
def actualizar():
modificado = False
while modificado == False:
code = int(input("Ingrese el DNI del cliente que desea modificar (Oprima 0 para salir): "))
if code != 0:
for cliente in clientes:
codigo, nombre = cliente
if code == codigo:
print("El cliente a modificar es: ", cliente)
n_codigo = int(input("Ingrese DNI del nuevo cliente: "))
n_nombre = input("Ingrese nombre del nuevo cliente: ")
clientes[clientes.index(cliente)] = [n_codigo, n_nombre]
modificado = True
print("La lista de clientes ha sido actualizada: ",clientes)
opc = input("¿Desea realizar un nuevo cambio? [y]Si [n]No: ")
if opc == "y":
modificado = False
else:
mostrar_menu_cliente()
else:
mostrar_menu_cliente()
def eliminar():
os.system('clear')
encontrado = False
while encontrado == False:
codigo = int(input("Ingrese DNI del cliente que desea eliminar (Oprima 0 para salir): "))
if codigo != 0:
for cliente in clientes:
code, nombre = cliente
if codigo == code:
encontrado = True
print("El cliente asignado con ese DNI es:","\n",cliente)
eliminar = input("¿Desea eliminarlo? [y]Si [n]No para salir: ")
if eliminar == "y":
clientes.pop(clientes.index(cliente))
print("La lista de clientes ahora es: ", clientes)
opc = input("¿Desea eliminar otro cliente de la lista? [y]Si [n]No: ")
if opc == "y":
encontrado = False
else:
mostrar_menu_cliente()
else:
mostrar_menu_cliente()
else:
mostrar_menu_cliente()
|
[
"main.mostrar_menu_principal",
"os.system"
] |
[((75, 93), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (84, 93), False, 'import os\n'), ((789, 807), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (798, 807), False, 'import os\n'), ((1527, 1545), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1536, 1545), False, 'import os\n'), ((1743, 1761), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1752, 1761), False, 'import os\n'), ((3479, 3497), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (3488, 3497), False, 'import os\n'), ((742, 768), 'main.mostrar_menu_principal', 'm.mostrar_menu_principal', ([], {}), '()\n', (766, 768), True, 'import main as m\n')]
|
import unittest
from unittest.mock import patch
from testfixtures import TempDirectory
import os
from hardware.Utils.logger import Logger
from hardware.gpsPi.gps_reader import GPSReader
@patch("serial.Serial")
class GPSPiTests(unittest.TestCase):
def setUp(self):
self.temp_dir = TempDirectory()
def tearDown(self):
self.temp_dir.cleanup()
def test_init_no_logs(self, mock_port):
# Replace real object os.environ with mock dictionary
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
gps_reader = GPSReader()
mock_port.assert_called_with(
os.environ["GPS_PORT"], os.environ["GPS_BAUDRATE"],
)
self.assertTrue(gps_reader.logging is not None)
self.assertTrue(gps_reader.logging.name == "GPS_LOG_FILE")
self.assertIsInstance(gps_reader.logging, Logger)
def test_init_logs(self, mock_port):
with patch.dict(
os.environ,
{
"GPS_HAT_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
gps_reader = GPSReader("GPS_HAT_LOG_FILE")
mock_port.assert_called_with(
os.environ["GPS_PORT"], os.environ["GPS_BAUDRATE"],
)
self.assertTrue(gps_reader.logging is not None)
self.assertTrue(gps_reader.logging.name == "GPS_HAT_LOG_FILE")
self.assertIsInstance(gps_reader.logging, Logger)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_location_valid_data(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"latitude": 40.71023666666667,
"longitude": -74.00694666666666,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_location_other_valid_data(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,S,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"latitude": -40.71023666666667,
"longitude": -74.00694666666666,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_location_invalid_nmeatype(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPGGA,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_location_invalid_data(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,V,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_speed_in_mph(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
speed_in_mph = 2.03 * 1.151
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"speed": speed_in_mph,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_speed_mph()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_speed_in_mph_invalid_data(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GP,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_speed_mph()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"unittest.mock.patch.dict",
"unittest.mock.patch",
"hardware.gpsPi.gps_reader.GPSReader",
"testfixtures.TempDirectory"
] |
[((191, 213), 'unittest.mock.patch', 'patch', (['"""serial.Serial"""'], {}), "('serial.Serial')\n", (196, 213), False, 'from unittest.mock import patch\n'), ((1807, 1872), 'unittest.mock.patch', 'patch', (['"""hardware.gpsPi.gps_reader.date_str_with_current_timezone"""'], {}), "('hardware.gpsPi.gps_reader.date_str_with_current_timezone')\n", (1812, 1872), False, 'from unittest.mock import patch\n'), ((3014, 3079), 'unittest.mock.patch', 'patch', (['"""hardware.gpsPi.gps_reader.date_str_with_current_timezone"""'], {}), "('hardware.gpsPi.gps_reader.date_str_with_current_timezone')\n", (3019, 3079), False, 'from unittest.mock import patch\n'), ((5906, 5971), 'unittest.mock.patch', 'patch', (['"""hardware.gpsPi.gps_reader.date_str_with_current_timezone"""'], {}), "('hardware.gpsPi.gps_reader.date_str_with_current_timezone')\n", (5911, 5971), False, 'from unittest.mock import patch\n'), ((7951, 7966), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7964, 7966), False, 'import unittest\n'), ((296, 311), 'testfixtures.TempDirectory', 'TempDirectory', ([], {}), '()\n', (309, 311), False, 'from testfixtures import TempDirectory\n'), ((490, 638), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (500, 638), False, 'from unittest.mock import patch\n'), ((774, 785), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (783, 785), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((1160, 1311), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_HAT_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_HAT_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY':\n self.temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (1170, 1311), False, 'from unittest.mock import patch\n'), ((1449, 1478), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', (['"""GPS_HAT_LOG_FILE"""'], {}), "('GPS_HAT_LOG_FILE')\n", (1458, 1478), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((2214, 2362), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (2224, 2362), False, 'from unittest.mock import patch\n'), ((2775, 2786), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (2784, 2786), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((3427, 3575), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (3437, 3575), False, 'from unittest.mock import patch\n'), ((3989, 4000), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (3998, 4000), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((4511, 4659), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (4521, 4659), False, 'from unittest.mock import patch\n'), ((4830, 4841), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (4839, 4841), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((5348, 5496), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (5358, 5496), False, 'from unittest.mock import patch\n'), ((5667, 5678), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (5676, 5678), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((6306, 6454), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (6316, 6454), False, 'from unittest.mock import patch\n'), ((6851, 6862), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (6860, 6862), False, 'from hardware.gpsPi.gps_reader import GPSReader\n'), ((7368, 7516), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self.temp_dir.path,\n 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'}"], {}), "(os.environ, {'GPS_LOG_FILE': 'logger.txt', 'LOG_DIRECTORY': self\n .temp_dir.path, 'GPS_PORT': '/dev/serial0', 'GPS_BAUDRATE': '9600'})\n", (7378, 7516), False, 'from unittest.mock import patch\n'), ((7687, 7698), 'hardware.gpsPi.gps_reader.GPSReader', 'GPSReader', ([], {}), '()\n', (7696, 7698), False, 'from hardware.gpsPi.gps_reader import GPSReader\n')]
|
import argparse
import os
class TestParam( object ):
def __init__( self, host="", port=0, threads=1, http_pool=None, base_url="", ramp=1,
duration=1, conns=1, rand_req=False, max_rand_obj=1, req_dist="", poisson_lam=1.0,
gauss_mean=1.0, gauss_std=1.0, max_iters=5000, loop='closed', engine='' ):
self.host = host
self.port = port
self.threads = threads
self.http_pool = http_pool
self.base_url = base_url
self.ramp = ramp
self.duration = duration
self.conns = conns
self.rand_req = rand_req
self.max_rand_obj = max_rand_obj
self.req_dist = req_dist
self.poisson_lam = poisson_lam
self.gauss_mean = gauss_mean
self.gauss_std = gauss_std
self.max_iters = max_iters
self.loop = loop
self.engine = engine
return
def parse_commandline(cl_args):
# Parse command line arguments
parser = argparse.ArgumentParser( )
parser.add_argument( "--threads", dest="threads", type=int, default=15,
help="Number of threads to use" )
parser.add_argument( "--host", dest="host", default="192.168.127.12",
help="Web server host name" )
parser.add_argument( "--port", dest="port", type=int, default=9111,
help="Web server port number" )
parser.add_argument( "--duration", dest="duration", type=float, default=20,
help="Duration of test in seconds" )
parser.add_argument( "--ramp", dest="ramp", type=float, default=1,
help="Ramp time for duration-based testing" )
parser.add_argument( "--size", dest="transfer_size", type=int, default=1024,
help="Total transfer size in bytes. Overrides duration-based settings" )
parser.add_argument( "--connections", dest="conns", type=int, default=10,
help="Number of connections to use per thread" )
parser.add_argument( "--shards", dest="shards", type=int, default=4,
help="Number of shards for collection" )
parser.add_argument( "--replicas", dest="replicas", type=int, default=3,
help="Number of replicas for collection" )
parser.add_argument( "--test-type", dest="test_type", choices=["duration","size"], default="duration",
help="Type of test to perform" )
parser.add_argument( "--random", dest="rand_req", action="store_true", default=False,
help="Indicates that threads should perform random requests. Otherwise sequential requests are performed." )
parser.add_argument( "--max-rand-obj", dest="max_rand_obj", type=int, default=1000,
help="Maximum number of objects from which clients will make random requests" )
parser.add_argument( "--output-dir", dest="output_dir", default=os.path.dirname(os.path.realpath(__file__)),
help="Directory to store output CSV file." )
parser.add_argument( "--req-dist", dest="req_dist", choices=["gauss", "poisson"], default="gauss",
help="Client wait time distribution type." )
parser.add_argument( "--query", dest="query", choices=["client", "roundrobin"], default="client",
help="queries made using client or roundrobin" )
parser.add_argument( "--loop", dest="loop", choices=["open", "closed"], default="closed",
help="run open or closed loop experiment")
parser.add_argument( "--solrnum", dest="clustersize", default="4",
help="number of nodes running solr")
parser.add_argument( "--instances", dest="instances", default=None,
help="number of solr instances running in each node")
parser.add_argument( "--engine", dest="engine", default=None,
help="solr or elastic",choices=["solr", "elastic"])
#just hardcoded a random generating query
# parser.add_argument( "--query", dest="query", default="summary",
# help="query string to run on Amazon reviews_rf2q" )
main_args = parser.parse_args(cl_args)
return main_args
|
[
"os.path.realpath",
"argparse.ArgumentParser"
] |
[((1054, 1079), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1077, 1079), False, 'import argparse\n'), ((3048, 3074), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (3064, 3074), False, 'import os\n')]
|
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
import dace
from dace.memlet import Memlet
import numpy as np
sr = dace.SDFG('strided_range_test')
s0 = sr.add_state('s0')
A = s0.add_array('A', [2, 16, 4], dace.float32)
B = s0.add_array('B', [16], dace.float32)
tasklet = s0.add_tasklet(
'srtest', {'a'}, {'b'}, """
b[0] = a[0,0] * 2
b[1] = a[0,1] * 2
b[2] = a[1,0] * 2
b[3] = a[1,1] * 2
""")
me, mx = s0.add_map('srmap', dict(i='0:4'))
# Reading A at [1, 2i:2i+8:8:2, 3]
s0.add_memlet_path(A,
me,
tasklet,
dst_conn='a',
memlet=Memlet.simple(A, '1, 2*i:2*i+10:8:2, 3'))
# Writing B at [4*i:4*i+4]
s0.add_memlet_path(tasklet,
mx,
B,
src_conn='b',
memlet=Memlet.simple(B, '4*i:4*i+4'))
def test():
print('Strided range tasklet test')
A = np.random.rand(2, 16, 4).astype(np.float32)
B = np.random.rand(16).astype(np.float32)
sr(A=A, B=B)
diffs = [
B[0:2] - 2 * A[1, 0:2, 3], B[2:4] - 2 * A[1, 8:10, 3],
B[4:6] - 2 * A[1, 2:4, 3], B[6:8] - 2 * A[1, 10:12, 3],
B[8:10] - 2 * A[1, 4:6, 3], B[10:12] - 2 * A[1, 12:14, 3],
B[12:14] - 2 * A[1, 6:8, 3], B[14:16] - 2 * A[1, 14:16, 3]
]
diff = np.linalg.norm(np.array(diffs))
print('Differences:', [np.linalg.norm(d) for d in diffs])
assert diff <= 1e-5
if __name__ == "__main__":
test()
|
[
"dace.memlet.Memlet.simple",
"numpy.array",
"numpy.linalg.norm",
"numpy.random.rand",
"dace.SDFG"
] |
[((144, 175), 'dace.SDFG', 'dace.SDFG', (['"""strided_range_test"""'], {}), "('strided_range_test')\n", (153, 175), False, 'import dace\n'), ((644, 684), 'dace.memlet.Memlet.simple', 'Memlet.simple', (['A', '"""1, 2*i:2*i+10:8:2, 3"""'], {}), "(A, '1, 2*i:2*i+10:8:2, 3')\n", (657, 684), False, 'from dace.memlet import Memlet\n'), ((846, 875), 'dace.memlet.Memlet.simple', 'Memlet.simple', (['B', '"""4*i:4*i+4"""'], {}), "(B, '4*i:4*i+4')\n", (859, 875), False, 'from dace.memlet import Memlet\n'), ((1355, 1370), 'numpy.array', 'np.array', (['diffs'], {}), '(diffs)\n', (1363, 1370), True, 'import numpy as np\n'), ((939, 963), 'numpy.random.rand', 'np.random.rand', (['(2)', '(16)', '(4)'], {}), '(2, 16, 4)\n', (953, 963), True, 'import numpy as np\n'), ((991, 1009), 'numpy.random.rand', 'np.random.rand', (['(16)'], {}), '(16)\n', (1005, 1009), True, 'import numpy as np\n'), ((1399, 1416), 'numpy.linalg.norm', 'np.linalg.norm', (['d'], {}), '(d)\n', (1413, 1416), True, 'import numpy as np\n')]
|
import datetime
import paste
import pylons.test
import ckan
from ckan.lib.helpers import json
from ckan.tests import are_foreign_keys_supported, SkipTest
def datetime_from_string(s):
'''Return a standard datetime.datetime object initialised from a string in
the same format used for timestamps in dictized activities (the format
produced by datetime.datetime.isoformat())
'''
return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
def follow_user(app, follower_id, apikey, object_id, object_arg):
'''Test a user starting to follow another user via the API.
:param follower_id: id of the user that will be following something.
:param apikey: API key of the user that will be following something.
:param object_id: id of the user that will be followed.
:param object_arg: the argument to pass to follow_user as the id of
the object that will be followed, could be the object's id or name.
'''
# Record the user's number of followers before.
params = json.dumps({'id': object_id})
response = app.post('/api/action/user_follower_count',
params=params).json
assert response['success'] is True
count_before = response['result']
# Check that the user is not already following the object.
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is False
# Make the user start following the object.
before = datetime.datetime.now()
params = {'id': object_arg}
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/follow_user',
params=json.dumps(params), extra_environ=extra_environ).json
after = datetime.datetime.now()
assert response['success'] is True
assert response['result']
follower = response['result']
assert follower['follower_id'] == follower_id
assert follower['object_id'] == object_id
timestamp = datetime_from_string(follower['datetime'])
assert (timestamp >= before and timestamp <= after), str(timestamp)
# Check that am_following_user now returns True.
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is True
# Check that the user appears in the object's list of followers.
params = json.dumps({'id': object_id})
response = app.post('/api/action/user_follower_list',
params=params).json
assert response['success'] is True
assert response['result']
followers = response['result']
assert len(followers) == count_before + 1
assert len([follower for follower in followers if follower['id'] ==
follower_id]) == 1
# Check that the object's follower count has increased by 1.
params = json.dumps({'id': object_id})
response = app.post('/api/action/user_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == count_before + 1
def follow_dataset(app, follower_id, apikey, dataset_id, dataset_arg):
'''Test a user starting to follow a dataset via the API.
:param follower_id: id of the user.
:param apikey: API key of the user.
:param dataset_id: id of the dataset.
:param dataset_arg: the argument to pass to follow_dataset as the id of
the dataset that will be followed, could be the dataset's id or name.
'''
# Record the dataset's number of followers before.
params = json.dumps({'id': dataset_id})
response = app.post('/api/action/dataset_follower_count',
params=params).json
assert response['success'] is True
count_before = response['result']
# Check that the user is not already following the dataset.
params = json.dumps({'id': dataset_id})
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is False
# Make the user start following the dataset.
before = datetime.datetime.now()
params = {'id': dataset_arg}
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/follow_dataset',
params=json.dumps(params), extra_environ=extra_environ).json
after = datetime.datetime.now()
assert response['success'] is True
assert response['result']
follower = response['result']
assert follower['follower_id'] == follower_id
assert follower['object_id'] == dataset_id
timestamp = datetime_from_string(follower['datetime'])
assert (timestamp >= before and timestamp <= after), str(timestamp)
# Check that am_following_dataset now returns True.
params = json.dumps({'id': dataset_id})
extra_environ = {'Authorization': str(apikey)}
response = app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is True
# Check that the user appears in the dataset's list of followers.
params = json.dumps({'id': dataset_id})
response = app.post('/api/action/dataset_follower_list',
params=params).json
assert response['success'] is True
assert response['result']
followers = response['result']
assert len(followers) == count_before + 1
assert len([follower for follower in followers if follower['id'] ==
follower_id]) == 1
# Check that the dataset's follower count has increased by 1.
params = json.dumps({'id': dataset_id})
response = app.post('/api/action/dataset_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == count_before + 1
class TestFollow(object):
'''Tests for the follower API.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.testsysadmin = ckan.model.User.get('testsysadmin')
self.annafan = ckan.model.User.get('annafan')
self.russianfan = ckan.model.User.get('russianfan')
self.tester = ckan.model.User.get('tester')
self.joeadmin = ckan.model.User.get('joeadmin')
self.warandpeace = ckan.model.Package.get('warandpeace')
self.annakarenina = ckan.model.Package.get('annakarenina')
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_user_follow_user_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({'id': self.russianfan.id})
extra_environ = {
'Authorization': apikey,
}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_user_follow_dataset_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({'id': self.warandpeace.id})
extra_environ = {
'Authorization': 'bad api key'
}
response = self.app.post('/api/action/follow_dataset',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_user_follow_user_missing_apikey(self):
params = json.dumps({'id': self.russianfan.id})
response = self.app.post('/api/action/follow_user',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_user_follow_dataset_missing_apikey(self):
params = json.dumps({'id': self.warandpeace.id})
response = self.app.post('/api/action/follow_dataset',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_user_follow_user_bad_object_id(self):
for object_id in ('bad id', '', ' ', None, 3, 35.7, 'xxx'):
params = json.dumps({'id': 'bad id'})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == ['Not found: User']
def test_01_user_follow_dataset_bad_object_id(self):
for object_id in ('bad id', '', ' ', None, 3, 35.7, 'xxx'):
params = json.dumps({'id': 'bad id'})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == ['Not found: Dataset']
def test_01_user_follow_user_missing_object_id(self):
params = json.dumps({})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == ['Missing value']
def test_01_user_follow_dataset_missing_object_id(self):
params = json.dumps({})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == ['Missing value']
def test_02_user_follow_user_by_id(self):
follow_user(self.app, self.annafan.id, self.annafan.apikey,
self.russianfan.id, self.russianfan.id)
def test_02_user_follow_dataset_by_id(self):
follow_dataset(self.app, self.annafan.id, self.annafan.apikey,
self.warandpeace.id, self.warandpeace.id)
def test_02_user_follow_user_by_name(self):
follow_user(self.app, self.annafan.id, self.annafan.apikey,
self.testsysadmin.id, self.testsysadmin.name)
def test_02_user_follow_dataset_by_name(self):
follow_dataset(self.app, self.joeadmin.id, self.joeadmin.apikey,
self.warandpeace.id, self.warandpeace.name)
def test_03_user_follow_user_already_following(self):
for object_id in (self.russianfan.id, self.russianfan.name,
self.testsysadmin.id, self.testsysadmin.name):
params = json.dumps({'id': object_id})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['message'].startswith(
'You are already following ')
def test_03_user_follow_dataset_already_following(self):
for object_id in (self.warandpeace.id, self.warandpeace.name):
params = json.dumps({'id': object_id})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['message'].startswith(
'You are already following ')
def test_03_user_cannot_follow_herself(self):
params = json.dumps({'id': self.annafan.id})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['message'] == 'You cannot follow yourself'
def test_04_user_follower_count_bad_id(self):
# user_follower_count always succeeds, but just returns 0 for bad IDs.
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/user_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
def test_04_dataset_follower_count_bad_id(self):
# dataset_follower_count always succeeds, but just returns 0 for bad
# IDs.
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/dataset_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
def test_04_user_follower_count_missing_id(self):
params = json.dumps({})
response = self.app.post('/api/action/user_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Missing value']
def test_04_dataset_follower_count_missing_id(self):
params = json.dumps({})
response = self.app.post('/api/action/dataset_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Missing value']
def test_04_user_follower_count_no_followers(self):
params = json.dumps({'id': self.annafan.id})
response = self.app.post('/api/action/user_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == 0
def test_04_dataset_follower_count_no_followers(self):
params = json.dumps({'id': self.annakarenina.id})
response = self.app.post('/api/action/dataset_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == 0
def test_04_user_follower_list_bad_id(self):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/user_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id']
def test_04_dataset_follower_list_bad_id(self):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx', ''):
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/dataset_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id']
def test_04_user_follower_list_missing_id(self):
params = json.dumps({})
response = self.app.post('/api/action/user_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Missing value']
def test_04_dataset_follower_list_missing_id(self):
params = json.dumps({})
response = self.app.post('/api/action/dataset_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Missing value']
def test_04_user_follower_list_no_followers(self):
params = json.dumps({'id': self.annafan.id})
response = self.app.post('/api/action/user_follower_list',
params=params).json
assert response['success'] is True
assert response['result'] == []
def test_04_dataset_follower_list_no_followers(self):
params = json.dumps({'id': self.annakarenina.id})
response = self.app.post('/api/action/dataset_follower_list',
params=params).json
assert response['success'] is True
assert response['result'] == []
def test_04_am_following_dataset_bad_id(self):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ,
status=409).json
assert response['success'] is False
assert response['error']['id'] == [u'Not found: Dataset']
def test_04_am_following_dataset_missing_id(self):
for id in ('missing', None, ''):
if id == 'missing':
params = json.dumps({})
else:
params = json.dumps({'id':id})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error']['id'] == [u'Missing value']
def test_04_am_following_dataset_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({'id': self.warandpeace.id})
extra_environ = {'Authorization': apikey}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_04_am_following_dataset_missing_apikey(self):
params = json.dumps({'id': self.warandpeace.id})
response = self.app.post('/api/action/am_following_dataset',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_04_am_following_user_bad_id(self):
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ,
status=409).json
assert response['success'] is False
assert response['error']['id'] == [u'Not found: User']
def test_04_am_following_user_missing_id(self):
for id in ('missing', None, ''):
if id == 'missing':
params = json.dumps({})
else:
params = json.dumps({'id':id})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error']['id'] == [u'Missing value']
def test_04_am_following_user_bad_apikey(self):
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({'id': self.annafan.id})
extra_environ = {'Authorization': apikey}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_04_am_following_user_missing_apikey(self):
params = json.dumps({'id': self.annafan.id})
response = self.app.post('/api/action/am_following_user',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
class TestFollowerDelete(object):
'''Tests for the unfollow_user and unfollow_dataset APIs.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.testsysadmin = ckan.model.User.get('testsysadmin')
self.annafan = ckan.model.User.get('annafan')
self.russianfan = ckan.model.User.get('russianfan')
self.tester = ckan.model.User.get('tester')
self.joeadmin = ckan.model.User.get('joeadmin')
self.warandpeace = ckan.model.Package.get('warandpeace')
self.annakarenina = ckan.model.Package.get('annakarenina')
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
follow_user(self.app, self.testsysadmin.id, self.testsysadmin.apikey,
self.joeadmin.id, self.joeadmin.id)
follow_user(self.app, self.tester.id, self.tester.apikey,
self.joeadmin.id, self.joeadmin.id)
follow_user(self.app, self.russianfan.id, self.russianfan.apikey,
self.joeadmin.id, self.joeadmin.id)
follow_user(self.app, self.annafan.id, self.annafan.apikey,
self.joeadmin.id, self.joeadmin.id)
follow_user(self.app, self.annafan.id, self.annafan.apikey,
self.tester.id, self.tester.id)
follow_dataset(self.app, self.testsysadmin.id,
self.testsysadmin.apikey, self.warandpeace.id,
self.warandpeace.id)
follow_dataset(self.app, self.tester.id, self.tester.apikey,
self.warandpeace.id, self.warandpeace.id)
follow_dataset(self.app, self.russianfan.id, self.russianfan.apikey,
self.warandpeace.id, self.warandpeace.id)
follow_dataset(self.app, self.annafan.id, self.annafan.apikey,
self.warandpeace.id, self.warandpeace.id)
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_unfollow_user_not_exists(self):
'''Test the error response when a user tries to unfollow a user that
she is not following.
'''
params = json.dumps({'id': self.russianfan.id})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/unfollow_user',
params=params, extra_environ=extra_environ, status=404).json
assert response['success'] == False
assert response['error']['message'].startswith(
'Not found: Could not find follower ')
def test_01_unfollow_dataset_not_exists(self):
'''Test the error response when a user tries to unfollow a dataset that
she is not following.
'''
params = json.dumps({'id': self.annakarenina.id})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/unfollow_dataset',
params=params, extra_environ=extra_environ, status=404).json
assert response['success'] == False
assert response['error']['message'].startswith(
'Not found: Could not find follower ')
def test_01_unfollow_user_bad_apikey(self):
'''Test the error response when a user tries to unfollow a user
but provides a bad API key.
'''
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({
'id': self.joeadmin.id,
})
extra_environ = {
'Authorization': apikey,
}
response = self.app.post('/api/action/unfollow_user',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_unfollow_dataset_bad_apikey(self):
'''Test the error response when a user tries to unfollow a dataset
but provides a bad API key.
'''
for apikey in ('bad api key', '', ' ', 'None', '3', '35.7', 'xxx'):
params = json.dumps({
'id': self.warandpeace.id,
})
extra_environ = {
'Authorization': apikey,
}
response = self.app.post('/api/action/unfollow_dataset',
params=params, extra_environ=extra_environ, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_unfollow_user_missing_apikey(self):
params = json.dumps({
'id': self.joeadmin.id,
})
response = self.app.post('/api/action/unfollow_user',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_unfollow_dataset_missing_apikey(self):
params = json.dumps({
'id': self.warandpeace.id,
})
response = self.app.post('/api/action/unfollow_dataset',
params=params, status=403).json
assert response['success'] == False
assert response['error']['message'] == 'Access denied'
def test_01_unfollow_user_bad_object_id(self):
'''Test error response when calling unfollow_user with a bad object
id.
'''
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
params = json.dumps({
'id': object_id,
})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/unfollow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == [u'Not found: User']
def test_01_unfollow_dataset_bad_object_id(self):
'''Test error response when calling unfollow_dataset with a bad object
id.
'''
for object_id in ('bad id', ' ', 3, 35.7, 'xxx'):
params = json.dumps({
'id': object_id,
})
extra_environ = {
'Authorization': str(self.annafan.apikey),
}
response = self.app.post('/api/action/unfollow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == [u'Not found: Dataset']
def test_01_unfollow_user_missing_object_id(self):
for id in ('missing', None, ''):
if id == 'missing':
params = json.dumps({})
else:
params = json.dumps({'id':id})
extra_environ = {'Authorization': str(self.annafan.apikey),}
response = self.app.post('/api/action/unfollow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == [u'Missing value']
def test_01_unfollow_dataset_missing_object_id(self):
params = json.dumps({})
extra_environ = {'Authorization': str(self.annafan.apikey),}
response = self.app.post('/api/action/unfollow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] == False
assert response['error']['id'] == ['Missing value']
def _unfollow_user(self, follower_id, apikey, object_id, object_arg):
'''Test a user unfollowing a user via the API.
:param follower_id: id of the follower.
:param apikey: API key of the follower.
:param object_id: id of the object to unfollow.
:param object_arg: the argument to pass to unfollow_user as the id of
the object to unfollow, could be the object's id or name.
'''
# Record the user's number of followers before.
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/user_follower_count',
params=params).json
assert response['success'] is True
count_before = response['result']
# Check that the user is following the object.
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is True
# Make the user unfollow the object.
params = {
'id': object_arg,
}
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/unfollow_user',
params=json.dumps(params), extra_environ=extra_environ).json
assert response['success'] is True
# Check that am_following_user now returns False.
params = json.dumps({'id': object_id})
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is False
# Check that the user doesn't appear in the object's list of followers.
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/user_follower_list',
params=params).json
assert response['success'] is True
assert response['result']
followers = response['result']
assert len([follower for follower in followers if follower['id'] ==
follower_id]) == 0
# Check that the object's follower count has decreased by 1.
params = json.dumps({'id': object_id})
response = self.app.post('/api/action/user_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == count_before - 1
def _unfollow_dataset(self, user_id, apikey, dataset_id, dataset_arg):
'''Test a user unfollowing a dataset via the API.
:param user_id: id of the follower.
:param apikey: API key of the follower.
:param dataset_id: id of the object to unfollow.
:param dataset_arg: the argument to pass to unfollow_dataset as the id
of the object to unfollow, could be the object's id or name.
'''
# Record the dataset's number of followers before.
params = json.dumps({'id': dataset_id})
response = self.app.post('/api/action/dataset_follower_count',
params=params).json
assert response['success'] is True
count_before = response['result']
# Check that the user is following the dataset.
params = json.dumps({'id': dataset_id})
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is True
# Make the user unfollow the dataset.
params = {
'id': dataset_arg,
}
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/unfollow_dataset',
params=json.dumps(params), extra_environ=extra_environ).json
assert response['success'] is True
# Check that am_following_dataset now returns False.
params = json.dumps({'id': dataset_id})
extra_environ = {'Authorization': str(apikey)}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ).json
assert response['success'] is True
assert response['result'] is False
# Check that the user doesn't appear in the dataset's list of
# followers.
params = json.dumps({'id': dataset_id})
response = self.app.post('/api/action/dataset_follower_list',
params=params).json
assert response['success'] is True
assert response['result']
followers = response['result']
assert len([follower for follower in followers if follower['id'] ==
user_id]) == 0
# Check that the dataset's follower count has decreased by 1.
params = json.dumps({'id': dataset_id})
response = self.app.post('/api/action/dataset_follower_count',
params=params).json
assert response['success'] is True
assert response['result'] == count_before - 1
def test_02_follower_delete_by_id(self):
self._unfollow_user(self.annafan.id, self.annafan.apikey,
self.joeadmin.id, self.joeadmin.id)
self._unfollow_dataset(self.annafan.id, self.annafan.apikey,
self.warandpeace.id, self.warandpeace.id)
class TestFollowerCascade(object):
'''Tests for on delete cascade of follower table rows.'''
@classmethod
def setup_class(self):
ckan.tests.CreateTestData.create()
self.testsysadmin = ckan.model.User.get('testsysadmin')
self.annafan = ckan.model.User.get('annafan')
self.russianfan = ckan.model.User.get('russianfan')
self.tester = ckan.model.User.get('tester')
self.joeadmin = ckan.model.User.get('joeadmin')
self.warandpeace = ckan.model.Package.get('warandpeace')
self.annakarenina = ckan.model.Package.get('annakarenina')
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
follow_user(self.app, self.joeadmin.id, self.joeadmin.apikey,
self.testsysadmin.id, self.testsysadmin.id)
follow_user(self.app, self.annafan.id, self.annafan.apikey,
self.testsysadmin.id, self.testsysadmin.id)
follow_user(self.app, self.russianfan.id, self.russianfan.apikey,
self.testsysadmin.id, self.testsysadmin.id)
follow_dataset(self.app, self.joeadmin.id, self.joeadmin.apikey,
self.annakarenina.id, self.annakarenina.id)
follow_dataset(self.app, self.annafan.id, self.annafan.apikey,
self.annakarenina.id, self.annakarenina.id)
follow_dataset(self.app, self.russianfan.id, self.russianfan.apikey,
self.annakarenina.id, self.annakarenina.id)
follow_user(self.app, self.tester.id, self.tester.apikey,
self.joeadmin.id, self.joeadmin.id)
follow_dataset(self.app, self.testsysadmin.id,
self.testsysadmin.apikey, self.warandpeace.id,
self.warandpeace.id)
session = ckan.model.Session()
session.delete(self.joeadmin)
session.commit()
session.delete(self.warandpeace)
session.commit()
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def test_01_on_delete_cascade_api(self):
'''
Test that UserFollowingUser and UserFollowingDataset rows cascade.
'''
# It should no longer be possible to get joeadmin's follower list.
params = json.dumps({'id': 'joeadmin'})
response = self.app.post('/api/action/user_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to get warandpeace's follower list.
params = json.dumps({'id': 'warandpeace'})
response = self.app.post('/api/action/dataset_follower_list',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to get joeadmin's follower count.
params = json.dumps({'id': 'joeadmin'})
response = self.app.post('/api/action/user_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to get warandpeace's follower count.
params = json.dumps({'id': 'warandpeace'})
response = self.app.post('/api/action/dataset_follower_count',
params=params, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to get am_following for joeadmin.
params = json.dumps({'id': 'joeadmin'})
extra_environ = {'Authorization': str(self.testsysadmin.apikey)}
response = self.app.post('/api/action/am_following_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to get am_following for warandpeace.
params = json.dumps({'id': 'warandpeace'})
extra_environ = {'Authorization': str(self.testsysadmin.apikey)}
response = self.app.post('/api/action/am_following_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to unfollow joeadmin.
params = json.dumps({'id': 'joeadmin'})
extra_environ = {'Authorization': str(self.tester.apikey)}
response = self.app.post('/api/action/unfollow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Not found: User']
# It should no longer be possible to unfollow warandpeace.
params = json.dumps({'id': 'warandpeace'})
extra_environ = {'Authorization': str(self.testsysadmin.apikey)}
response = self.app.post('/api/action/unfollow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error']['id'] == ['Not found: Dataset']
# It should no longer be possible to follow joeadmin.
params = json.dumps({'id': 'joeadmin'})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/follow_user',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# It should no longer be possible to follow warandpeace.
params = json.dumps({'id': 'warandpeace'})
extra_environ = {'Authorization': str(self.annafan.apikey)}
response = self.app.post('/api/action/follow_dataset',
params=params, extra_environ=extra_environ, status=409).json
assert response['success'] is False
assert response['error'].has_key('id')
# Users who joeadmin was following should no longer have him in their
# follower list.
params = json.dumps({'id': self.testsysadmin.id})
response = self.app.post('/api/action/user_follower_list',
params=params).json
assert response['success'] is True
followers = [follower['name'] for follower in response['result']]
assert 'joeadmin' not in followers
# Datasets who joeadmin was following should no longer have him in
# their follower list.
params = json.dumps({'id': self.annakarenina.id})
response = self.app.post('/api/action/dataset_follower_list',
params=params).json
assert response['success'] is True
followers = [follower['name'] for follower in response['result']]
assert 'joeadmin' not in followers
def test_02_on_delete_cascade_db(self):
if not are_foreign_keys_supported():
raise SkipTest("Search not supported")
# After the previous test above there should be no rows with joeadmin's
# id in the UserFollowingUser or UserFollowingDataset tables.
from ckan.model import UserFollowingUser, UserFollowingDataset
session = ckan.model.Session()
query = session.query(UserFollowingUser)
query = query.filter(UserFollowingUser.follower_id==self.joeadmin.id)
assert query.count() == 0
query = session.query(UserFollowingUser)
query = query.filter(UserFollowingUser.object_id==self.joeadmin.id)
assert query.count() == 0
query = session.query(UserFollowingDataset)
query = query.filter(UserFollowingUser.follower_id==self.joeadmin.id)
assert query.count() == 0
# There should be no rows with warandpeace's id either.
query = session.query(UserFollowingUser)
query = query.filter(UserFollowingUser.object_id==self.warandpeace.id)
assert query.count() == 0
query = session.query(UserFollowingDataset)
query = query.filter(UserFollowingUser.object_id==self.warandpeace.id)
assert query.count() == 0
|
[
"ckan.model.repo.rebuild_db",
"paste.fixture.TestApp",
"ckan.tests.are_foreign_keys_supported",
"ckan.model.Session",
"ckan.tests.SkipTest",
"ckan.lib.helpers.json.dumps",
"datetime.datetime.strptime",
"ckan.tests.CreateTestData.create",
"ckan.model.Package.get",
"ckan.model.User.get",
"datetime.datetime.now"
] |
[((405, 458), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['s', '"""%Y-%m-%dT%H:%M:%S.%f"""'], {}), "(s, '%Y-%m-%dT%H:%M:%S.%f')\n", (431, 458), False, 'import datetime\n'), ((1019, 1048), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (1029, 1048), False, 'from ckan.lib.helpers import json\n'), ((1294, 1323), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (1304, 1323), False, 'from ckan.lib.helpers import json\n'), ((1634, 1657), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1655, 1657), False, 'import datetime\n'), ((1873, 1896), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1894, 1896), False, 'import datetime\n'), ((2294, 2323), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (2304, 2323), False, 'from ckan.lib.helpers import json\n'), ((2653, 2682), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (2663, 2682), False, 'from ckan.lib.helpers import json\n'), ((3105, 3134), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (3115, 3134), False, 'from ckan.lib.helpers import json\n'), ((3802, 3832), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (3812, 3832), False, 'from ckan.lib.helpers import json\n'), ((4082, 4112), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (4092, 4112), False, 'from ckan.lib.helpers import json\n'), ((4427, 4450), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4448, 4450), False, 'import datetime\n'), ((4670, 4693), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4691, 4693), False, 'import datetime\n'), ((5095, 5125), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (5105, 5125), False, 'from ckan.lib.helpers import json\n'), ((5459, 5489), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (5469, 5489), False, 'from ckan.lib.helpers import json\n'), ((5916, 5946), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (5926, 5946), False, 'from ckan.lib.helpers import json\n'), ((6248, 6282), 'ckan.tests.CreateTestData.create', 'ckan.tests.CreateTestData.create', ([], {}), '()\n', (6280, 6282), False, 'import ckan\n'), ((6311, 6346), 'ckan.model.User.get', 'ckan.model.User.get', (['"""testsysadmin"""'], {}), "('testsysadmin')\n", (6330, 6346), False, 'import ckan\n'), ((6370, 6400), 'ckan.model.User.get', 'ckan.model.User.get', (['"""annafan"""'], {}), "('annafan')\n", (6389, 6400), False, 'import ckan\n'), ((6427, 6460), 'ckan.model.User.get', 'ckan.model.User.get', (['"""russianfan"""'], {}), "('russianfan')\n", (6446, 6460), False, 'import ckan\n'), ((6483, 6512), 'ckan.model.User.get', 'ckan.model.User.get', (['"""tester"""'], {}), "('tester')\n", (6502, 6512), False, 'import ckan\n'), ((6537, 6568), 'ckan.model.User.get', 'ckan.model.User.get', (['"""joeadmin"""'], {}), "('joeadmin')\n", (6556, 6568), False, 'import ckan\n'), ((6596, 6633), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""warandpeace"""'], {}), "('warandpeace')\n", (6618, 6633), False, 'import ckan\n'), ((6662, 6700), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""annakarenina"""'], {}), "('annakarenina')\n", (6684, 6700), False, 'import ckan\n'), ((6720, 6764), 'paste.fixture.TestApp', 'paste.fixture.TestApp', (['pylons.test.pylonsapp'], {}), '(pylons.test.pylonsapp)\n', (6741, 6764), False, 'import paste\n'), ((6821, 6849), 'ckan.model.repo.rebuild_db', 'ckan.model.repo.rebuild_db', ([], {}), '()\n', (6847, 6849), False, 'import ckan\n'), ((8026, 8064), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.russianfan.id}"], {}), "({'id': self.russianfan.id})\n", (8036, 8064), False, 'from ckan.lib.helpers import json\n'), ((8352, 8391), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (8362, 8391), False, 'from ckan.lib.helpers import json\n'), ((9785, 9799), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (9795, 9799), False, 'from ckan.lib.helpers import json\n'), ((10219, 10233), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (10229, 10233), False, 'from ckan.lib.helpers import json\n'), ((12615, 12650), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annafan.id}"], {}), "({'id': self.annafan.id})\n", (12625, 12650), False, 'from ckan.lib.helpers import json\n'), ((14038, 14052), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (14048, 14052), False, 'from ckan.lib.helpers import json\n'), ((14348, 14362), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (14358, 14362), False, 'from ckan.lib.helpers import json\n'), ((14660, 14695), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annafan.id}"], {}), "({'id': self.annafan.id})\n", (14670, 14695), False, 'from ckan.lib.helpers import json\n'), ((14959, 14999), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annakarenina.id}"], {}), "({'id': self.annakarenina.id})\n", (14969, 14999), False, 'from ckan.lib.helpers import json\n'), ((16028, 16042), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (16038, 16042), False, 'from ckan.lib.helpers import json\n'), ((16336, 16350), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (16346, 16350), False, 'from ckan.lib.helpers import json\n'), ((16646, 16681), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annafan.id}"], {}), "({'id': self.annafan.id})\n", (16656, 16681), False, 'from ckan.lib.helpers import json\n'), ((16944, 16984), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annakarenina.id}"], {}), "({'id': self.annakarenina.id})\n", (16954, 16984), False, 'from ckan.lib.helpers import json\n'), ((18869, 18908), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (18879, 18908), False, 'from ckan.lib.helpers import json\n'), ((20796, 20831), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annafan.id}"], {}), "({'id': self.annafan.id})\n", (20806, 20831), False, 'from ckan.lib.helpers import json\n'), ((21202, 21236), 'ckan.tests.CreateTestData.create', 'ckan.tests.CreateTestData.create', ([], {}), '()\n', (21234, 21236), False, 'import ckan\n'), ((21265, 21300), 'ckan.model.User.get', 'ckan.model.User.get', (['"""testsysadmin"""'], {}), "('testsysadmin')\n", (21284, 21300), False, 'import ckan\n'), ((21324, 21354), 'ckan.model.User.get', 'ckan.model.User.get', (['"""annafan"""'], {}), "('annafan')\n", (21343, 21354), False, 'import ckan\n'), ((21381, 21414), 'ckan.model.User.get', 'ckan.model.User.get', (['"""russianfan"""'], {}), "('russianfan')\n", (21400, 21414), False, 'import ckan\n'), ((21437, 21466), 'ckan.model.User.get', 'ckan.model.User.get', (['"""tester"""'], {}), "('tester')\n", (21456, 21466), False, 'import ckan\n'), ((21491, 21522), 'ckan.model.User.get', 'ckan.model.User.get', (['"""joeadmin"""'], {}), "('joeadmin')\n", (21510, 21522), False, 'import ckan\n'), ((21550, 21587), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""warandpeace"""'], {}), "('warandpeace')\n", (21572, 21587), False, 'import ckan\n'), ((21616, 21654), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""annakarenina"""'], {}), "('annakarenina')\n", (21638, 21654), False, 'import ckan\n'), ((21674, 21718), 'paste.fixture.TestApp', 'paste.fixture.TestApp', (['pylons.test.pylonsapp'], {}), '(pylons.test.pylonsapp)\n', (21695, 21718), False, 'import paste\n'), ((22931, 22959), 'ckan.model.repo.rebuild_db', 'ckan.model.repo.rebuild_db', ([], {}), '()\n', (22957, 22959), False, 'import ckan\n'), ((23146, 23184), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.russianfan.id}"], {}), "({'id': self.russianfan.id})\n", (23156, 23184), False, 'from ckan.lib.helpers import json\n'), ((23770, 23810), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annakarenina.id}"], {}), "({'id': self.annakarenina.id})\n", (23780, 23810), False, 'from ckan.lib.helpers import json\n'), ((25685, 25721), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.joeadmin.id}"], {}), "({'id': self.joeadmin.id})\n", (25695, 25721), False, 'from ckan.lib.helpers import json\n'), ((26035, 26074), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (26045, 26074), False, 'from ckan.lib.helpers import json\n'), ((28317, 28331), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (28327, 28331), False, 'from ckan.lib.helpers import json\n'), ((29160, 29189), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (29170, 29189), False, 'from ckan.lib.helpers import json\n'), ((29452, 29481), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (29462, 29481), False, 'from ckan.lib.helpers import json\n'), ((30171, 30200), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (30181, 30200), False, 'from ckan.lib.helpers import json\n'), ((30571, 30600), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (30581, 30600), False, 'from ckan.lib.helpers import json\n'), ((31018, 31047), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (31028, 31047), False, 'from ckan.lib.helpers import json\n'), ((31774, 31804), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (31784, 31804), False, 'from ckan.lib.helpers import json\n'), ((32071, 32101), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (32081, 32101), False, 'from ckan.lib.helpers import json\n'), ((32802, 32832), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (32812, 32832), False, 'from ckan.lib.helpers import json\n'), ((33217, 33247), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (33227, 33247), False, 'from ckan.lib.helpers import json\n'), ((33665, 33695), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': dataset_id}"], {}), "({'id': dataset_id})\n", (33675, 33695), False, 'from ckan.lib.helpers import json\n'), ((34342, 34376), 'ckan.tests.CreateTestData.create', 'ckan.tests.CreateTestData.create', ([], {}), '()\n', (34374, 34376), False, 'import ckan\n'), ((34405, 34440), 'ckan.model.User.get', 'ckan.model.User.get', (['"""testsysadmin"""'], {}), "('testsysadmin')\n", (34424, 34440), False, 'import ckan\n'), ((34464, 34494), 'ckan.model.User.get', 'ckan.model.User.get', (['"""annafan"""'], {}), "('annafan')\n", (34483, 34494), False, 'import ckan\n'), ((34521, 34554), 'ckan.model.User.get', 'ckan.model.User.get', (['"""russianfan"""'], {}), "('russianfan')\n", (34540, 34554), False, 'import ckan\n'), ((34577, 34606), 'ckan.model.User.get', 'ckan.model.User.get', (['"""tester"""'], {}), "('tester')\n", (34596, 34606), False, 'import ckan\n'), ((34631, 34662), 'ckan.model.User.get', 'ckan.model.User.get', (['"""joeadmin"""'], {}), "('joeadmin')\n", (34650, 34662), False, 'import ckan\n'), ((34690, 34727), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""warandpeace"""'], {}), "('warandpeace')\n", (34712, 34727), False, 'import ckan\n'), ((34756, 34794), 'ckan.model.Package.get', 'ckan.model.Package.get', (['"""annakarenina"""'], {}), "('annakarenina')\n", (34778, 34794), False, 'import ckan\n'), ((34814, 34858), 'paste.fixture.TestApp', 'paste.fixture.TestApp', (['pylons.test.pylonsapp'], {}), '(pylons.test.pylonsapp)\n', (34835, 34858), False, 'import paste\n'), ((35950, 35970), 'ckan.model.Session', 'ckan.model.Session', ([], {}), '()\n', (35968, 35970), False, 'import ckan\n'), ((36157, 36185), 'ckan.model.repo.rebuild_db', 'ckan.model.repo.rebuild_db', ([], {}), '()\n', (36183, 36185), False, 'import ckan\n'), ((36425, 36455), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'joeadmin'}"], {}), "({'id': 'joeadmin'})\n", (36435, 36455), False, 'from ckan.lib.helpers import json\n'), ((36758, 36791), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'warandpeace'}"], {}), "({'id': 'warandpeace'})\n", (36768, 36791), False, 'from ckan.lib.helpers import json\n'), ((37095, 37125), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'joeadmin'}"], {}), "({'id': 'joeadmin'})\n", (37105, 37125), False, 'from ckan.lib.helpers import json\n'), ((37430, 37463), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'warandpeace'}"], {}), "({'id': 'warandpeace'})\n", (37440, 37463), False, 'from ckan.lib.helpers import json\n'), ((37768, 37798), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'joeadmin'}"], {}), "({'id': 'joeadmin'})\n", (37778, 37798), False, 'from ckan.lib.helpers import json\n'), ((38203, 38236), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'warandpeace'}"], {}), "({'id': 'warandpeace'})\n", (38213, 38236), False, 'from ckan.lib.helpers import json\n'), ((38629, 38659), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'joeadmin'}"], {}), "({'id': 'joeadmin'})\n", (38639, 38659), False, 'from ckan.lib.helpers import json\n'), ((39057, 39090), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'warandpeace'}"], {}), "({'id': 'warandpeace'})\n", (39067, 39090), False, 'from ckan.lib.helpers import json\n'), ((39495, 39525), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'joeadmin'}"], {}), "({'id': 'joeadmin'})\n", (39505, 39525), False, 'from ckan.lib.helpers import json\n'), ((39905, 39938), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'warandpeace'}"], {}), "({'id': 'warandpeace'})\n", (39915, 39938), False, 'from ckan.lib.helpers import json\n'), ((40359, 40399), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.testsysadmin.id}"], {}), "({'id': self.testsysadmin.id})\n", (40369, 40399), False, 'from ckan.lib.helpers import json\n'), ((40787, 40827), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annakarenina.id}"], {}), "({'id': self.annakarenina.id})\n", (40797, 40827), False, 'from ckan.lib.helpers import json\n'), ((41475, 41495), 'ckan.model.Session', 'ckan.model.Session', ([], {}), '()\n', (41493, 41495), False, 'import ckan\n'), ((7003, 7041), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.russianfan.id}"], {}), "({'id': self.russianfan.id})\n", (7013, 7041), False, 'from ckan.lib.helpers import json\n'), ((7551, 7590), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (7561, 7590), False, 'from ckan.lib.helpers import json\n'), ((8754, 8782), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'bad id'}"], {}), "({'id': 'bad id'})\n", (8764, 8782), False, 'from ckan.lib.helpers import json\n'), ((9304, 9332), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': 'bad id'}"], {}), "({'id': 'bad id'})\n", (9314, 9332), False, 'from ckan.lib.helpers import json\n'), ((11502, 11531), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (11512, 11531), False, 'from ckan.lib.helpers import json\n'), ((12100, 12129), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (12110, 12129), False, 'from ckan.lib.helpers import json\n'), ((13224, 13253), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (13234, 13253), False, 'from ckan.lib.helpers import json\n'), ((13710, 13739), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (13720, 13739), False, 'from ckan.lib.helpers import json\n'), ((15326, 15355), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (15336, 15355), False, 'from ckan.lib.helpers import json\n'), ((15710, 15739), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (15720, 15739), False, 'from ckan.lib.helpers import json\n'), ((17309, 17338), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (17319, 17338), False, 'from ckan.lib.helpers import json\n'), ((18433, 18472), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (18443, 18472), False, 'from ckan.lib.helpers import json\n'), ((19261, 19290), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (19271, 19290), False, 'from ckan.lib.helpers import json\n'), ((20370, 20405), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.annafan.id}"], {}), "({'id': self.annafan.id})\n", (20380, 20405), False, 'from ckan.lib.helpers import json\n'), ((24478, 24514), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.joeadmin.id}"], {}), "({'id': self.joeadmin.id})\n", (24488, 24514), False, 'from ckan.lib.helpers import json\n'), ((25182, 25221), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': self.warandpeace.id}"], {}), "({'id': self.warandpeace.id})\n", (25192, 25221), False, 'from ckan.lib.helpers import json\n'), ((26554, 26583), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (26564, 26583), False, 'from ckan.lib.helpers import json\n'), ((27234, 27263), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': object_id}"], {}), "({'id': object_id})\n", (27244, 27263), False, 'from ckan.lib.helpers import json\n'), ((41154, 41182), 'ckan.tests.are_foreign_keys_supported', 'are_foreign_keys_supported', ([], {}), '()\n', (41180, 41182), False, 'from ckan.tests import are_foreign_keys_supported, SkipTest\n'), ((41202, 41234), 'ckan.tests.SkipTest', 'SkipTest', (['"""Search not supported"""'], {}), "('Search not supported')\n", (41210, 41234), False, 'from ckan.tests import are_foreign_keys_supported, SkipTest\n'), ((1807, 1825), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1817, 1825), False, 'from ckan.lib.helpers import json\n'), ((4604, 4622), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (4614, 4622), False, 'from ckan.lib.helpers import json\n'), ((17857, 17871), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (17867, 17871), False, 'from ckan.lib.helpers import json\n'), ((17915, 17937), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': id}"], {}), "({'id': id})\n", (17925, 17937), False, 'from ckan.lib.helpers import json\n'), ((19800, 19814), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (19810, 19814), False, 'from ckan.lib.helpers import json\n'), ((19858, 19880), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': id}"], {}), "({'id': id})\n", (19868, 19880), False, 'from ckan.lib.helpers import json\n'), ((27832, 27846), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (27842, 27846), False, 'from ckan.lib.helpers import json\n'), ((27890, 27912), 'ckan.lib.helpers.json.dumps', 'json.dumps', (["{'id': id}"], {}), "({'id': id})\n", (27900, 27912), False, 'from ckan.lib.helpers import json\n'), ((29998, 30016), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (30008, 30016), False, 'from ckan.lib.helpers import json\n'), ((32626, 32644), 'ckan.lib.helpers.json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (32636, 32644), False, 'from ckan.lib.helpers import json\n')]
|
# This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
import random
from .heuristics import path_cost
from .rrt_star import RRTStar
class RRTStarBidirectional(RRTStar):
def __init__(self, X, Q, x_init, x_goal, max_samples, r, prc=0.01, rewire_count=None):
"""
Bidirectional RRT* Search
:param X: Search Space
:param Q: list of lengths of edges added to tree
:param x_init: tuple, initial location
:param x_goal: tuple, goal location
:param max_samples: max number of samples to take
:param r: resolution of points to sample along edge when checking for collisions
:param prc: probability of checking whether there is a solution
:param rewire_count: number of nearby vertices to rewire
"""
super().__init__(X, Q, x_init, x_goal, max_samples, r, prc, rewire_count)
self.sigma_best = None # best solution thus far
self.swapped = False
def connect_trees(self, a, b, x_new, L_near):
"""
Check nearby vertices for total cost and connect shortest valid edge if possible
This results in both trees being connected
:param a: first tree to connect
:param b: second tree to connect
:param x_new: new vertex to add
:param L_near: nearby vertices
"""
for c_near, x_near in L_near:
c_tent = c_near + path_cost(self.trees[a].E, self.x_init, x_new)
if c_tent < self.c_best and self.X.collision_free(x_near, x_new, self.r):
self.trees[b].V_count += 1
self.trees[b].E[x_new] = x_near
self.c_best = c_tent
sigma_a = self.reconstruct_path(a, self.x_init, x_new)
sigma_b = self.reconstruct_path(b, self.x_goal, x_new)
del sigma_b[-1]
sigma_b.reverse()
self.sigma_best = sigma_a + sigma_b
break
def swap_trees(self):
"""
Swap trees and start/goal
"""
# swap trees
self.trees[0], self.trees[1] = self.trees[1], self.trees[0]
# swap start/goal
self.x_init, self.x_goal = self.x_goal, self.x_init
self.swapped = not self.swapped
def unswap(self):
"""
Check if trees have been swapped and unswap
Reverse path if needed to correspond with swapped trees
"""
if self.swapped:
self.swap_trees()
if self.sigma_best[0] is not self.x_init:
self.sigma_best.reverse()
def rrt_star_bidirectional(self):
"""
Bidirectional RRT*
:return: set of Vertices; Edges in form: vertex: [neighbor_1, neighbor_2, ...]
"""
# tree a
self.add_vertex(0, self.x_init)
self.add_edge(0, self.x_init, None)
# tree b
self.add_tree()
self.add_vertex(1, self.x_goal)
self.add_edge(1, self.x_goal, None)
while True:
for q in self.Q: # iterate over different edge lengths
for i in range(q[1]): # iterate over number of edges of given length to add
x_new, x_nearest = self.new_and_near(0, q)
if x_new is None:
continue
# get nearby vertices and cost-to-come
L_near = self.get_nearby_vertices(0, self.x_init, x_new)
# check nearby vertices for total cost and connect shortest valid edge
self.connect_shortest_valid(0, x_new, L_near)
if x_new in self.trees[0].E:
# rewire tree
self.rewire(0, x_new, L_near)
# nearby vertices from opposite tree and cost-to-come
L_near = self.get_nearby_vertices(1, self.x_goal, x_new)
self.connect_trees(0, 1, x_new, L_near)
if self.prc and random.random() < self.prc: # probabilistically check if solution found
print("Checking if can connect to goal at", str(self.samples_taken), "samples")
if self.sigma_best is not None:
print("Can connect to goal")
self.unswap()
return self.sigma_best
if self.samples_taken >= self.max_samples:
self.unswap()
if self.sigma_best is not None:
print("Can connect to goal")
return self.sigma_best
else:
print("Could not connect to goal")
return self.sigma_best
self.swap_trees()
|
[
"random.random"
] |
[((4035, 4050), 'random.random', 'random.random', ([], {}), '()\n', (4048, 4050), False, 'import random\n')]
|
from builtins import zip
from builtins import range
import numpy as np
def save_data_regresssion():
# n = 20 # number of labeled/training data
# D = 1 # dimension of input data
x = np.array([[2.083970427750732, -0.821018066101379, -0.617870699182597, -1.183822608860694,\
0.274087442277144, 0.599441729295593, 1.768897919204435, -0.465645549031928,\
0.588852784375935, -0.832982214438054, -0.512106527960363, 0.277883144210116,\
-0.065870426922211, -0.821412363806325, 0.185399443778088, -0.858296174995998,\
0.370786630037059, -1.409869162416639,-0.144668412325022,-0.553299615220374]]).T
y = np.array([[4.549203746331698, 0.371985574437271, 0.711307965514790, -0.013212893618430, 2.255473255338191,\
1.009915749295733, 3.744675937965029, 0.424592771793202, 1.322833652295811, 0.278298293510020,\
0.267229130945574, 2.200112286723833, 1.200609983308969, 0.439971697236094, 2.628580433511255,\
0.503774817336353, 1.942525313820564, 0.579133950013327, 0.670874423968554, 0.377353755100965]]).T
# TEST points
# test points evenly distributed in the interval [-2, 2.5]
xstar = np.array(list(range(-200,250,4)), dtype=np.float64, ndmin=2).T
xstar /= 100
np.savez('Regression/regression_data', x=x, y=y, xstar=xstar)
def save_data_classification():
# Synthetic data for binary classification: two partially overlapping
# Gaussians in two dimensions. 120 data points are generated from two
# Gaussians with different means and covariances. One Gaussian is
# isotropic and contains 2/3 of the data (blue), the other is highly
# correlated and contains 1/3 of the points (red). Note, that the
# labels for the targets are -1/+1 (and not 0/1).
n1 = 80; n2 = 40
x1 = np.array([[0.089450165731417, -0.000700765006939],\
[ 1.171605560541542, 1.177765337635947],\
[ 1.404722675089394, -0.017417915887421],\
[ 0.556096196907929, -1.489370243839215],\
[ 1.213163445267992, 0.044545401368647],\
[ 0.173404742510759, -0.675668036759603],\
[ 2.225008556585363, 0.469803193769368],\
[ 1.470329290331445, 0.887642323697526],\
[ 2.715199208821485, 0.621044646503113],\
[ 0.173640760494328, -0.936054178730056],\
[ 2.038152815025167, 0.262587298316711],\
[ 1.670218375320427, -2.633186886994263],\
[ 0.270098501389591, -0.948779657473203],\
[ 1.396339236138275, -1.114992287201776],\
[-1.482070589718501, -0.654590652482805],\
[-1.493788226272929, 0.382017940248275],\
[ 1.025083846875763, -0.860344923788873],\
[ 0.750316336734172, -0.101864205602753],\
[ 0.184311310148912, -0.258523866245887],\
[ 0.221868667121623, -1.393954437105630],\
[ 2.258881477897777, -0.786806071526136],\
[ 1.211362530151533, -0.423431246029886],\
[ 1.525307406741207, -0.097975367602030],\
[ 0.978930232706465, 0.476154349549524],\
[ 1.347884229346280, -0.248408186838667],\
[ 1.205779546204216, -0.090878327349907],\
[ 0.124388644862000, 0.599612645000285],\
[ 0.784044356662233, 0.356596736271853],\
[ 1.060216683845210, -0.318474838087900],\
[ 1.678114484474938, 0.678735373910422],\
[ 0.973851135005570, 0.024880700382574],\
[ 0.016237746864886, -0.480899874254564],\
[ 0.979406721923196, 0.697708815321128],\
[ 2.217307638531248, -0.956931847027775],\
[ 2.150475558834153, 1.059031573329512],\
[ 1.050502393215048, 0.532141747419667],\
[ 1.210593098269218, -0.318123542280113],\
[ 0.426309208807901, -0.571727978045793],\
[ 0.742552105732714, -0.122112766396886],\
[ 0.757210723588679, 0.862002000781123],\
[-0.431639130160791, -0.763118261936640],\
[-0.748398486307095, -0.603667649379360],\
[ 0.975086541108249, -1.525297946453790],\
[ 0.074503762788667, -0.092155036190678],\
[-0.668889572018935, 1.305400680048752],\
[ 0.725632503186580, 0.096286255882168],\
[-1.042270707136463, 1.297009698531055],\
[ 1.943144890398260, -1.051176922438962],\
[ 1.191448645802597, 0.261349747400059],\
[ 0.778004017505022, -1.046301123377022],\
[ 0.628873970760607, 1.103926629619643],\
[ 1.295113890591403, -0.479519217798997],\
[ 1.522065175744686, 0.993476032742058],\
[ 1.100255776045601, 0.961069161713818],\
[-0.593243832838153, -0.479418953496258],\
[ 2.023196521366462, -0.275055494808503],\
[-0.788103134597041, -1.090707985778480],\
[-0.085168420896236, 1.226858390046108],\
[ 1.691706923196703, -1.153144804780540],\
[ 1.989279380395157, 1.974704317386435],\
[ 0.398799861652602, 3.051291814188982],\
[-0.707217210772927, 0.185505264874794],\
[ 0.697550136765320, 0.222287208720035],\
[ 2.186126058382323, -0.327829143438683],\
[ 1.368068331060010, 1.708138258453435],\
[ 0.883049126818189, -1.334269372314072],\
[ 1.737643116893527, 0.618452933813739],\
[ 2.002228743955222, 0.103381966018445],\
[-0.202638622737115, 0.495024938090909],\
[ 0.543309203560769, -0.802120609128192],\
[-1.796161599703804, -0.054795478648902],\
[ 1.460693782000059, 0.750052171180825],\
[ 0.133277872804608, -1.154891068006907],\
[ 0.203670382700157, -0.480336687666025],\
[-0.278985011909341, 0.030578590108392],\
[ 2.070490237052893, 2.420782751903098],\
[ 0.599023881366768, -1.673208560658818],\
[ 0.140506592147238, 0.804938444757444],\
[-0.980799204108985, -1.847987723222053],\
[-0.102350006007740, -0.822093851434857]])
x2 = np.array([[1.160257057434194, 1.544111720606185],\
[-0.458434595629321, 0.205667827100987],\
[-1.053562345687376, -0.614938261650010],\
[-1.687901005751336, -0.780028275457715],\
[-0.467035854712698, 0.561692074343868],\
[-0.703391186121452, 0.281301267639200],\
[-1.568557779993616, -0.629129013661319],\
[-2.176478596101226, -1.176211396013793],\
[ 0.768109265900499, 1.376893437232103],\
[-0.514772970064353, 0.474264363701950],\
[-1.301924381487904, -0.525179228127957],\
[-1.312024947004566, -0.049469442305628],\
[-0.623417800418214, 0.226456899059445],\
[ 0.020290591370131, 0.374055846421580],\
[-1.002901826023476, 0.076597486786743],\
[-2.553713136283273, -1.731788289864902],\
[-1.788156378743716, -0.742460481943494],\
[-1.119582270077321, -0.256154464598782],\
[-0.423084091988017, 0.395108309297119],\
[-1.645945345460644, -1.216319293733455],\
[ 0.227805611684674, 0.925948003854262],\
[-1.298719171366801, -0.965511301629466],\
[-0.618292817021891, 0.140045887498202],\
[ 0.794935039731655, 1.917830760420081],\
[-0.213709179946402, 0.617751634356751],\
[-0.474251035850546, -0.054854432018974],\
[ 0.056077816960464, 1.046282980014428],\
[ 0.887136693467512, 1.536490289895764],\
[ 1.377161915854166, 1.764872700787871],\
[-0.901195709427863, -0.340855547886558],\
[-0.783104424735034, -0.330927422324566],\
[-1.507139570543989, 0.137504213149820],\
[-0.348999111724700, 0.235931187612453],\
[-0.367309385513174, 0.655996377722041],\
[-0.050622309620072, 0.410969334468070],\
[ 1.734919039047271, 2.611080177877894],\
[-0.567413078682755, -0.458249564234885],\
[-0.622230797920433, 0.258401595566888],\
[-1.642146761593230, -1.138579130251617],\
[-0.285298076847255, 0.085451489400687]])
x = np.concatenate((x1,x2),axis=0)
y = np.concatenate((-np.ones((1,n1)),np.ones((1,n2))),axis=1).T
# For plotting, we superimpose the data points with the posterior equi-probability contour
# lines for the probability of class two given complete information about the generating mechanism.
t1,t2 = np.meshgrid(np.arange(-4,4.1,0.1),np.arange(-4,4.1,0.1))
t = np.array(list(zip(np.reshape(t1,(np.prod(t1.shape),)),np.reshape(t2,(np.prod(t2.shape),))))) # these are the test inputs
n = t.shape[0]
tmm = np.zeros_like(t)
S1 = np.eye(2); S2 = np.array([[1, 0.95], [0.95, 1]])
m1 = np.array([0.75, 0]); m2 = np.array([-0.75, 0])
tmm[:,0] = t[:,0] - m1[0]; tmm[:,1] = t[:,1] - m1[1]
p1 = n1*np.exp( (-np.dot(tmm,np.linalg.inv(S1))*tmm/2).sum(axis=1) )
tmm[:,0] = t[:,0] - m2[0]; tmm[:,1] = t[:,1] - m2[1]
S2i = np.linalg.inv(S2)
p2 = n2*np.exp( (-np.dot(tmm,S2i)*tmm/2).sum(axis=1) ) / np.sqrt(0.0975)
np.savez('Classification/classification_data', x=x, y=y, xstar=t, x1=x1,x2=x2,t1=t1,t2=t2,p1=p1,p2=p2)
if __name__=='__main__':
save_data_regresssion()
#save_data_classification()
|
[
"numpy.zeros_like",
"numpy.ones",
"numpy.prod",
"numpy.array",
"numpy.linalg.inv",
"numpy.arange",
"numpy.dot",
"numpy.eye",
"numpy.savez",
"builtins.range",
"numpy.concatenate",
"numpy.sqrt"
] |
[((1348, 1409), 'numpy.savez', 'np.savez', (['"""Regression/regression_data"""'], {'x': 'x', 'y': 'y', 'xstar': 'xstar'}), "('Regression/regression_data', x=x, y=y, xstar=xstar)\n", (1356, 1409), True, 'import numpy as np\n'), ((1899, 5413), 'numpy.array', 'np.array', (['[[0.089450165731417, -0.000700765006939], [1.171605560541542, \n 1.177765337635947], [1.404722675089394, -0.017417915887421], [\n 0.556096196907929, -1.489370243839215], [1.213163445267992, \n 0.044545401368647], [0.173404742510759, -0.675668036759603], [\n 2.225008556585363, 0.469803193769368], [1.470329290331445, \n 0.887642323697526], [2.715199208821485, 0.621044646503113], [\n 0.173640760494328, -0.936054178730056], [2.038152815025167, \n 0.262587298316711], [1.670218375320427, -2.633186886994263], [\n 0.270098501389591, -0.948779657473203], [1.396339236138275, -\n 1.114992287201776], [-1.482070589718501, -0.654590652482805], [-\n 1.493788226272929, 0.382017940248275], [1.025083846875763, -\n 0.860344923788873], [0.750316336734172, -0.101864205602753], [\n 0.184311310148912, -0.258523866245887], [0.221868667121623, -\n 1.39395443710563], [2.258881477897777, -0.786806071526136], [\n 1.211362530151533, -0.423431246029886], [1.525307406741207, -\n 0.09797536760203], [0.978930232706465, 0.476154349549524], [\n 1.34788422934628, -0.248408186838667], [1.205779546204216, -\n 0.090878327349907], [0.124388644862, 0.599612645000285], [\n 0.784044356662233, 0.356596736271853], [1.06021668384521, -\n 0.3184748380879], [1.678114484474938, 0.678735373910422], [\n 0.97385113500557, 0.024880700382574], [0.016237746864886, -\n 0.480899874254564], [0.979406721923196, 0.697708815321128], [\n 2.217307638531248, -0.956931847027775], [2.150475558834153, \n 1.059031573329512], [1.050502393215048, 0.532141747419667], [\n 1.210593098269218, -0.318123542280113], [0.426309208807901, -\n 0.571727978045793], [0.742552105732714, -0.122112766396886], [\n 0.757210723588679, 0.862002000781123], [-0.431639130160791, -\n 0.76311826193664], [-0.748398486307095, -0.60366764937936], [\n 0.975086541108249, -1.52529794645379], [0.074503762788667, -\n 0.092155036190678], [-0.668889572018935, 1.305400680048752], [\n 0.72563250318658, 0.096286255882168], [-1.042270707136463, \n 1.297009698531055], [1.94314489039826, -1.051176922438962], [\n 1.191448645802597, 0.261349747400059], [0.778004017505022, -\n 1.046301123377022], [0.628873970760607, 1.103926629619643], [\n 1.295113890591403, -0.479519217798997], [1.522065175744686, \n 0.993476032742058], [1.100255776045601, 0.961069161713818], [-\n 0.593243832838153, -0.479418953496258], [2.023196521366462, -\n 0.275055494808503], [-0.788103134597041, -1.09070798577848], [-\n 0.085168420896236, 1.226858390046108], [1.691706923196703, -\n 1.15314480478054], [1.989279380395157, 1.974704317386435], [\n 0.398799861652602, 3.051291814188982], [-0.707217210772927, \n 0.185505264874794], [0.69755013676532, 0.222287208720035], [\n 2.186126058382323, -0.327829143438683], [1.36806833106001, \n 1.708138258453435], [0.883049126818189, -1.334269372314072], [\n 1.737643116893527, 0.618452933813739], [2.002228743955222, \n 0.103381966018445], [-0.202638622737115, 0.495024938090909], [\n 0.543309203560769, -0.802120609128192], [-1.796161599703804, -\n 0.054795478648902], [1.460693782000059, 0.750052171180825], [\n 0.133277872804608, -1.154891068006907], [0.203670382700157, -\n 0.480336687666025], [-0.278985011909341, 0.030578590108392], [\n 2.070490237052893, 2.420782751903098], [0.599023881366768, -\n 1.673208560658818], [0.140506592147238, 0.804938444757444], [-\n 0.980799204108985, -1.847987723222053], [-0.10235000600774, -\n 0.822093851434857]]'], {}), '([[0.089450165731417, -0.000700765006939], [1.171605560541542, \n 1.177765337635947], [1.404722675089394, -0.017417915887421], [\n 0.556096196907929, -1.489370243839215], [1.213163445267992, \n 0.044545401368647], [0.173404742510759, -0.675668036759603], [\n 2.225008556585363, 0.469803193769368], [1.470329290331445, \n 0.887642323697526], [2.715199208821485, 0.621044646503113], [\n 0.173640760494328, -0.936054178730056], [2.038152815025167, \n 0.262587298316711], [1.670218375320427, -2.633186886994263], [\n 0.270098501389591, -0.948779657473203], [1.396339236138275, -\n 1.114992287201776], [-1.482070589718501, -0.654590652482805], [-\n 1.493788226272929, 0.382017940248275], [1.025083846875763, -\n 0.860344923788873], [0.750316336734172, -0.101864205602753], [\n 0.184311310148912, -0.258523866245887], [0.221868667121623, -\n 1.39395443710563], [2.258881477897777, -0.786806071526136], [\n 1.211362530151533, -0.423431246029886], [1.525307406741207, -\n 0.09797536760203], [0.978930232706465, 0.476154349549524], [\n 1.34788422934628, -0.248408186838667], [1.205779546204216, -\n 0.090878327349907], [0.124388644862, 0.599612645000285], [\n 0.784044356662233, 0.356596736271853], [1.06021668384521, -\n 0.3184748380879], [1.678114484474938, 0.678735373910422], [\n 0.97385113500557, 0.024880700382574], [0.016237746864886, -\n 0.480899874254564], [0.979406721923196, 0.697708815321128], [\n 2.217307638531248, -0.956931847027775], [2.150475558834153, \n 1.059031573329512], [1.050502393215048, 0.532141747419667], [\n 1.210593098269218, -0.318123542280113], [0.426309208807901, -\n 0.571727978045793], [0.742552105732714, -0.122112766396886], [\n 0.757210723588679, 0.862002000781123], [-0.431639130160791, -\n 0.76311826193664], [-0.748398486307095, -0.60366764937936], [\n 0.975086541108249, -1.52529794645379], [0.074503762788667, -\n 0.092155036190678], [-0.668889572018935, 1.305400680048752], [\n 0.72563250318658, 0.096286255882168], [-1.042270707136463, \n 1.297009698531055], [1.94314489039826, -1.051176922438962], [\n 1.191448645802597, 0.261349747400059], [0.778004017505022, -\n 1.046301123377022], [0.628873970760607, 1.103926629619643], [\n 1.295113890591403, -0.479519217798997], [1.522065175744686, \n 0.993476032742058], [1.100255776045601, 0.961069161713818], [-\n 0.593243832838153, -0.479418953496258], [2.023196521366462, -\n 0.275055494808503], [-0.788103134597041, -1.09070798577848], [-\n 0.085168420896236, 1.226858390046108], [1.691706923196703, -\n 1.15314480478054], [1.989279380395157, 1.974704317386435], [\n 0.398799861652602, 3.051291814188982], [-0.707217210772927, \n 0.185505264874794], [0.69755013676532, 0.222287208720035], [\n 2.186126058382323, -0.327829143438683], [1.36806833106001, \n 1.708138258453435], [0.883049126818189, -1.334269372314072], [\n 1.737643116893527, 0.618452933813739], [2.002228743955222, \n 0.103381966018445], [-0.202638622737115, 0.495024938090909], [\n 0.543309203560769, -0.802120609128192], [-1.796161599703804, -\n 0.054795478648902], [1.460693782000059, 0.750052171180825], [\n 0.133277872804608, -1.154891068006907], [0.203670382700157, -\n 0.480336687666025], [-0.278985011909341, 0.030578590108392], [\n 2.070490237052893, 2.420782751903098], [0.599023881366768, -\n 1.673208560658818], [0.140506592147238, 0.804938444757444], [-\n 0.980799204108985, -1.847987723222053], [-0.10235000600774, -\n 0.822093851434857]])\n', (1907, 5413), True, 'import numpy as np\n'), ((6069, 7846), 'numpy.array', 'np.array', (['[[1.160257057434194, 1.544111720606185], [-0.458434595629321, \n 0.205667827100987], [-1.053562345687376, -0.61493826165001], [-\n 1.687901005751336, -0.780028275457715], [-0.467035854712698, \n 0.561692074343868], [-0.703391186121452, 0.2813012676392], [-\n 1.568557779993616, -0.629129013661319], [-2.176478596101226, -\n 1.176211396013793], [0.768109265900499, 1.376893437232103], [-\n 0.514772970064353, 0.47426436370195], [-1.301924381487904, -\n 0.525179228127957], [-1.312024947004566, -0.049469442305628], [-\n 0.623417800418214, 0.226456899059445], [0.020290591370131, \n 0.37405584642158], [-1.002901826023476, 0.076597486786743], [-\n 2.553713136283273, -1.731788289864902], [-1.788156378743716, -\n 0.742460481943494], [-1.119582270077321, -0.256154464598782], [-\n 0.423084091988017, 0.395108309297119], [-1.645945345460644, -\n 1.216319293733455], [0.227805611684674, 0.925948003854262], [-\n 1.298719171366801, -0.965511301629466], [-0.618292817021891, \n 0.140045887498202], [0.794935039731655, 1.917830760420081], [-\n 0.213709179946402, 0.617751634356751], [-0.474251035850546, -\n 0.054854432018974], [0.056077816960464, 1.046282980014428], [\n 0.887136693467512, 1.536490289895764], [1.377161915854166, \n 1.764872700787871], [-0.901195709427863, -0.340855547886558], [-\n 0.783104424735034, -0.330927422324566], [-1.507139570543989, \n 0.13750421314982], [-0.3489991117247, 0.235931187612453], [-\n 0.367309385513174, 0.655996377722041], [-0.050622309620072, \n 0.41096933446807], [1.734919039047271, 2.611080177877894], [-\n 0.567413078682755, -0.458249564234885], [-0.622230797920433, \n 0.258401595566888], [-1.64214676159323, -1.138579130251617], [-\n 0.285298076847255, 0.085451489400687]]'], {}), '([[1.160257057434194, 1.544111720606185], [-0.458434595629321, \n 0.205667827100987], [-1.053562345687376, -0.61493826165001], [-\n 1.687901005751336, -0.780028275457715], [-0.467035854712698, \n 0.561692074343868], [-0.703391186121452, 0.2813012676392], [-\n 1.568557779993616, -0.629129013661319], [-2.176478596101226, -\n 1.176211396013793], [0.768109265900499, 1.376893437232103], [-\n 0.514772970064353, 0.47426436370195], [-1.301924381487904, -\n 0.525179228127957], [-1.312024947004566, -0.049469442305628], [-\n 0.623417800418214, 0.226456899059445], [0.020290591370131, \n 0.37405584642158], [-1.002901826023476, 0.076597486786743], [-\n 2.553713136283273, -1.731788289864902], [-1.788156378743716, -\n 0.742460481943494], [-1.119582270077321, -0.256154464598782], [-\n 0.423084091988017, 0.395108309297119], [-1.645945345460644, -\n 1.216319293733455], [0.227805611684674, 0.925948003854262], [-\n 1.298719171366801, -0.965511301629466], [-0.618292817021891, \n 0.140045887498202], [0.794935039731655, 1.917830760420081], [-\n 0.213709179946402, 0.617751634356751], [-0.474251035850546, -\n 0.054854432018974], [0.056077816960464, 1.046282980014428], [\n 0.887136693467512, 1.536490289895764], [1.377161915854166, \n 1.764872700787871], [-0.901195709427863, -0.340855547886558], [-\n 0.783104424735034, -0.330927422324566], [-1.507139570543989, \n 0.13750421314982], [-0.3489991117247, 0.235931187612453], [-\n 0.367309385513174, 0.655996377722041], [-0.050622309620072, \n 0.41096933446807], [1.734919039047271, 2.611080177877894], [-\n 0.567413078682755, -0.458249564234885], [-0.622230797920433, \n 0.258401595566888], [-1.64214676159323, -1.138579130251617], [-\n 0.285298076847255, 0.085451489400687]])\n', (6077, 7846), True, 'import numpy as np\n'), ((8236, 8268), 'numpy.concatenate', 'np.concatenate', (['(x1, x2)'], {'axis': '(0)'}), '((x1, x2), axis=0)\n', (8250, 8268), True, 'import numpy as np\n'), ((8764, 8780), 'numpy.zeros_like', 'np.zeros_like', (['t'], {}), '(t)\n', (8777, 8780), True, 'import numpy as np\n'), ((8790, 8799), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (8796, 8799), True, 'import numpy as np\n'), ((8806, 8838), 'numpy.array', 'np.array', (['[[1, 0.95], [0.95, 1]]'], {}), '([[1, 0.95], [0.95, 1]])\n', (8814, 8838), True, 'import numpy as np\n'), ((8848, 8867), 'numpy.array', 'np.array', (['[0.75, 0]'], {}), '([0.75, 0])\n', (8856, 8867), True, 'import numpy as np\n'), ((8874, 8894), 'numpy.array', 'np.array', (['[-0.75, 0]'], {}), '([-0.75, 0])\n', (8882, 8894), True, 'import numpy as np\n'), ((9098, 9115), 'numpy.linalg.inv', 'np.linalg.inv', (['S2'], {}), '(S2)\n', (9111, 9115), True, 'import numpy as np\n'), ((9198, 9310), 'numpy.savez', 'np.savez', (['"""Classification/classification_data"""'], {'x': 'x', 'y': 'y', 'xstar': 't', 'x1': 'x1', 'x2': 'x2', 't1': 't1', 't2': 't2', 'p1': 'p1', 'p2': 'p2'}), "('Classification/classification_data', x=x, y=y, xstar=t, x1=x1, x2\n =x2, t1=t1, t2=t2, p1=p1, p2=p2)\n", (9206, 9310), True, 'import numpy as np\n'), ((206, 640), 'numpy.array', 'np.array', (['[[2.083970427750732, -0.821018066101379, -0.617870699182597, -\n 1.183822608860694, 0.274087442277144, 0.599441729295593, \n 1.768897919204435, -0.465645549031928, 0.588852784375935, -\n 0.832982214438054, -0.512106527960363, 0.277883144210116, -\n 0.065870426922211, -0.821412363806325, 0.185399443778088, -\n 0.858296174995998, 0.370786630037059, -1.409869162416639, -\n 0.144668412325022, -0.553299615220374]]'], {}), '([[2.083970427750732, -0.821018066101379, -0.617870699182597, -\n 1.183822608860694, 0.274087442277144, 0.599441729295593, \n 1.768897919204435, -0.465645549031928, 0.588852784375935, -\n 0.832982214438054, -0.512106527960363, 0.277883144210116, -\n 0.065870426922211, -0.821412363806325, 0.185399443778088, -\n 0.858296174995998, 0.370786630037059, -1.409869162416639, -\n 0.144668412325022, -0.553299615220374]])\n', (214, 640), True, 'import numpy as np\n'), ((698, 1118), 'numpy.array', 'np.array', (['[[4.549203746331698, 0.371985574437271, 0.71130796551479, -0.01321289361843,\n 2.255473255338191, 1.009915749295733, 3.744675937965029, \n 0.424592771793202, 1.322833652295811, 0.27829829351002, \n 0.267229130945574, 2.200112286723833, 1.200609983308969, \n 0.439971697236094, 2.628580433511255, 0.503774817336353, \n 1.942525313820564, 0.579133950013327, 0.670874423968554, 0.377353755100965]\n ]'], {}), '([[4.549203746331698, 0.371985574437271, 0.71130796551479, -\n 0.01321289361843, 2.255473255338191, 1.009915749295733, \n 3.744675937965029, 0.424592771793202, 1.322833652295811, \n 0.27829829351002, 0.267229130945574, 2.200112286723833, \n 1.200609983308969, 0.439971697236094, 2.628580433511255, \n 0.503774817336353, 1.942525313820564, 0.579133950013327, \n 0.670874423968554, 0.377353755100965]])\n', (706, 1118), True, 'import numpy as np\n'), ((8560, 8583), 'numpy.arange', 'np.arange', (['(-4)', '(4.1)', '(0.1)'], {}), '(-4, 4.1, 0.1)\n', (8569, 8583), True, 'import numpy as np\n'), ((8582, 8605), 'numpy.arange', 'np.arange', (['(-4)', '(4.1)', '(0.1)'], {}), '(-4, 4.1, 0.1)\n', (8591, 8605), True, 'import numpy as np\n'), ((9177, 9192), 'numpy.sqrt', 'np.sqrt', (['(0.0975)'], {}), '(0.0975)\n', (9184, 9192), True, 'import numpy as np\n'), ((1278, 1297), 'builtins.range', 'range', (['(-200)', '(250)', '(4)'], {}), '(-200, 250, 4)\n', (1283, 1297), False, 'from builtins import range\n'), ((8308, 8324), 'numpy.ones', 'np.ones', (['(1, n2)'], {}), '((1, n2))\n', (8315, 8324), True, 'import numpy as np\n'), ((8292, 8308), 'numpy.ones', 'np.ones', (['(1, n1)'], {}), '((1, n1))\n', (8299, 8308), True, 'import numpy as np\n'), ((8646, 8663), 'numpy.prod', 'np.prod', (['t1.shape'], {}), '(t1.shape)\n', (8653, 8663), True, 'import numpy as np\n'), ((8682, 8699), 'numpy.prod', 'np.prod', (['t2.shape'], {}), '(t2.shape)\n', (8689, 8699), True, 'import numpy as np\n'), ((8985, 9002), 'numpy.linalg.inv', 'np.linalg.inv', (['S1'], {}), '(S1)\n', (8998, 9002), True, 'import numpy as np\n'), ((9138, 9154), 'numpy.dot', 'np.dot', (['tmm', 'S2i'], {}), '(tmm, S2i)\n', (9144, 9154), True, 'import numpy as np\n')]
|
import torch
import torch.optim as optim
import numpy as np
from PIL import Image
#import pano
import pano_gen as pano
import time
def vecang(vec1, vec2):
vec1 = vec1 / np.sqrt((vec1 ** 2).sum())
vec2 = vec2 / np.sqrt((vec2 ** 2).sum())
return np.arccos(np.dot(vec1, vec2))
def rotatevec(vec, theta):
x = vec[0] * torch.cos(theta) - vec[1] * torch.sin(theta)
y = vec[0] * torch.sin(theta) + vec[1] * torch.cos(theta)
return torch.cat([x, y])
def pts_linspace(pa, pb, pts=300):
pa = pa.view(1, 2)
pb = pb.view(1, 2)
w = torch.arange(0, pts + 1, dtype=pa.dtype).view(-1, 1)
return (pa * (pts - w) + pb * w) / pts
def xyz2uv(xy, z=-1):
c = torch.sqrt((xy ** 2).sum(1))
u = torch.atan2(xy[:, 1], xy[:, 0]).view(-1, 1)
v = torch.atan2(torch.zeros_like(c) + z, c).view(-1, 1)
return torch.cat([u, v], dim=1)
def uv2idx(uv, w, h):
col = (uv[:, 0] / (2 * np.pi) + 0.5) * w - 0.5
row = (uv[:, 1] / np.pi + 0.5) * h - 0.5
return torch.cat([col.view(-1, 1), row.view(-1, 1)], dim=1)
def wallidx(xy, w, h, z1, z2):
col = (torch.atan2(xy[1], xy[0]) / (2 * np.pi) + 0.5) * w - 0.5
c = torch.sqrt((xy ** 2).sum())
row_s = (torch.atan2(torch.zeros_like(c) + z1, c) / np.pi + 0.5) * h - 0.5
row_t = (torch.atan2(torch.zeros_like(c) + z2, c) / np.pi + 0.5) * h - 0.5
pa = torch.cat([col.view(1), row_s.view(1)])
pb = torch.cat([col.view(1), row_t.view(1)])
return pts_linspace(pa, pb)
def map_coordinates(input, coordinates):
''' PyTorch version of scipy.ndimage.interpolation.map_coordinates
input: (H, W)
coordinates: (2, ...)
'''
h = input.shape[0]
w = input.shape[1]
def _coordinates_pad_wrap(h, w, coordinates):
coordinates[0] = coordinates[0] % h
coordinates[1] = coordinates[1] % w
return coordinates
co_floor = torch.floor(coordinates).long()
co_ceil = torch.ceil(coordinates).long()
d1 = (coordinates[1] - co_floor[1].float())
d2 = (coordinates[0] - co_floor[0].float())
co_floor = _coordinates_pad_wrap(h, w, co_floor)
co_ceil = _coordinates_pad_wrap(h, w, co_ceil)
f00 = input[co_floor[0], co_floor[1]]
f10 = input[co_floor[0], co_ceil[1]]
f01 = input[co_ceil[0], co_floor[1]]
f11 = input[co_ceil[0], co_ceil[1]]
fx1 = f00 + d1 * (f10 - f00)
fx2 = f01 + d1 * (f11 - f01)
return fx1 + d2 * (fx2 - fx1)
def pc2cor_id(pc, pc_vec, pc_theta, pc_height):
if pc_theta.numel()==1:
ps = torch.stack([
(pc + pc_vec),
(pc + rotatevec(pc_vec, pc_theta)),
(pc - pc_vec),
(pc + rotatevec(pc_vec, pc_theta - np.pi))
])
else:
ps = pc + pc_vec
ps = ps.view(-1,2)
for c_num in range(pc_theta.shape[1]):
ps = torch.cat((ps, ps[c_num:,:]),0)
if (c_num % 2) == 0:
ps[-1,1] = pc_theta[0,c_num]
else:
ps[-1,0] = pc_theta[0,c_num]
ps = torch.cat((ps, ps[-1:,:]),0)
ps[-1,1] = ps[0,1]
return torch.cat([
uv2idx(xyz2uv(ps, z=-1), 1024, 512),
uv2idx(xyz2uv(ps, z=pc_height), 1024, 512),
], dim=0)
def project2sphere_score(pc, pc_vec, pc_theta, pc_height, scoreedg, scorecor, i_step=None):
# Sample corner loss
corid = pc2cor_id(pc, pc_vec, pc_theta, pc_height)
corid_coordinates = torch.stack([corid[:, 1], corid[:, 0]])
loss_cor = -map_coordinates(scorecor, corid_coordinates).mean()
# Sample boundary loss
if pc_theta.numel()==1:
p1 = pc + pc_vec
p2 = pc + rotatevec(pc_vec, pc_theta)
p3 = pc - pc_vec
p4 = pc + rotatevec(pc_vec, pc_theta - np.pi)
segs = [
pts_linspace(p1, p2),
pts_linspace(p2, p3),
pts_linspace(p3, p4),
pts_linspace(p4, p1),
]
else:
ps = pc + pc_vec
ps = ps.view(-1,2)
for c_num in range(pc_theta.shape[1]):
ps = torch.cat((ps, ps[c_num:,:]),0)
if (c_num % 2) == 0:
ps[-1,1] = pc_theta[0,c_num]
else:
ps[-1,0] = pc_theta[0,c_num]
ps = torch.cat((ps, ps[-1:,:]),0)
ps[-1,1] = ps[0,1]
segs = []
for c_num in range(ps.shape[0]-1):
segs.append(pts_linspace(ps[c_num,:], ps[c_num+1,:]))
segs.append(pts_linspace(ps[-1,:], ps[0,:]))
# ceil-wall
loss_ceilwall = 0
for seg in segs:
ceil_uv = xyz2uv(seg, z=-1)
ceil_idx = uv2idx(ceil_uv, 1024, 512)
ceil_coordinates = torch.stack([ceil_idx[:, 1], ceil_idx[:, 0]])
loss_ceilwall -= map_coordinates(scoreedg[..., 1], ceil_coordinates).mean() / len(segs)
# floor-wall
loss_floorwall = 0
for seg in segs:
floor_uv = xyz2uv(seg, z=pc_height)
floor_idx = uv2idx(floor_uv, 1024, 512)
floor_coordinates = torch.stack([floor_idx[:, 1], floor_idx[:, 0]])
loss_floorwall -= map_coordinates(scoreedg[..., 2], floor_coordinates).mean() / len(segs)
#losses = 1.0 * loss_cor + 0.1 * loss_wallwall + 0.5 * loss_ceilwall + 1.0 * loss_floorwall
losses = 1.0 * loss_cor + 1.0 * loss_ceilwall + 1.0 * loss_floorwall
if i_step is not None:
with torch.no_grad():
print('step %d: %.3f (cor %.3f, wall %.3f, ceil %.3f, floor %.3f)' % (
i_step, losses,
loss_cor, loss_wallwall,
loss_ceilwall, loss_floorwall))
return losses
def optimize_cor_id(cor_id, scoreedg, scorecor, num_iters=100, verbose=False):
assert scoreedg.shape == (512, 1024, 3)
assert scorecor.shape == (512, 1024)
Z = -1
ceil_cor_id = cor_id[0::2]
floor_cor_id = cor_id[1::2]
ceil_cor_id, ceil_cor_id_xy = pano.constraint_cor_id_same_z(ceil_cor_id, scorecor, Z)
#ceil_cor_id_xyz = np.hstack([ceil_cor_id_xy, np.zeros(4).reshape(-1, 1) + Z])
ceil_cor_id_xyz = np.hstack([ceil_cor_id_xy, np.zeros(ceil_cor_id.shape[0]).reshape(-1, 1) + Z])
# TODO: revise here to general layout
#pc = (ceil_cor_id_xy[0] + ceil_cor_id_xy[2]) / 2
#print(ceil_cor_id_xy)
if abs(ceil_cor_id_xy[0,0]-ceil_cor_id_xy[1,0])>abs(ceil_cor_id_xy[0,1]-ceil_cor_id_xy[1,1]):
ceil_cor_id_xy = np.concatenate((ceil_cor_id_xy[1:,:],ceil_cor_id_xy[:1,:]), axis=0)
#print(cor_id)
#print(ceil_cor_id_xy)
pc = np.mean(ceil_cor_id_xy, axis=0)
pc_vec = ceil_cor_id_xy[0] - pc
pc_theta = vecang(pc_vec, ceil_cor_id_xy[1] - pc)
pc_height = pano.fit_avg_z(floor_cor_id, ceil_cor_id_xy, scorecor)
if ceil_cor_id_xy.shape[0] > 4:
pc_theta = np.array([ceil_cor_id_xy[1,1]])
for c_num in range(2, ceil_cor_id_xy.shape[0]-1):
if (c_num % 2) == 0:
pc_theta = np.append(pc_theta, ceil_cor_id_xy[c_num,0])
else:
pc_theta = np.append(pc_theta, ceil_cor_id_xy[c_num,1])
scoreedg = torch.FloatTensor(scoreedg)
scorecor = torch.FloatTensor(scorecor)
pc = torch.FloatTensor(pc)
pc_vec = torch.FloatTensor(pc_vec)
pc_theta = torch.FloatTensor([pc_theta])
pc_height = torch.FloatTensor([pc_height])
pc.requires_grad = True
pc_vec.requires_grad = True
pc_theta.requires_grad = True
pc_height.requires_grad = True
#print(pc_theta)
#time.sleep(2)
#return cor_id
optimizer = optim.SGD([
pc, pc_vec, pc_theta, pc_height
], lr=1e-3, momentum=0.9)
best = {'score': 1e9}
for i_step in range(num_iters):
i = i_step if verbose else None
optimizer.zero_grad()
score = project2sphere_score(pc, pc_vec, pc_theta, pc_height, scoreedg, scorecor, i)
if score.item() < best['score']:
best['score'] = score.item()
best['pc'] = pc.clone()
best['pc_vec'] = pc_vec.clone()
best['pc_theta'] = pc_theta.clone()
best['pc_height'] = pc_height.clone()
score.backward()
optimizer.step()
pc = best['pc']
pc_vec = best['pc_vec']
pc_theta = best['pc_theta']
pc_height = best['pc_height']
opt_cor_id = pc2cor_id(pc, pc_vec, pc_theta, pc_height).detach().numpy()
split_num = int(opt_cor_id.shape[0]//2)
opt_cor_id = np.stack([opt_cor_id[:split_num], opt_cor_id[split_num:]], axis=1).reshape(split_num*2, 2)
#print(opt_cor_id)
#print(cor_id)
#time.sleep(500)
return opt_cor_id
|
[
"torch.cat",
"torch.cos",
"torch.ceil",
"numpy.mean",
"torch.arange",
"torch.no_grad",
"pano_gen.fit_avg_z",
"torch.FloatTensor",
"numpy.append",
"torch.atan2",
"numpy.stack",
"torch.zeros_like",
"pano_gen.constraint_cor_id_same_z",
"torch.floor",
"numpy.dot",
"numpy.concatenate",
"torch.stack",
"numpy.zeros",
"numpy.array",
"torch.sin",
"torch.optim.SGD"
] |
[((451, 468), 'torch.cat', 'torch.cat', (['[x, y]'], {}), '([x, y])\n', (460, 468), False, 'import torch\n'), ((840, 864), 'torch.cat', 'torch.cat', (['[u, v]'], {'dim': '(1)'}), '([u, v], dim=1)\n', (849, 864), False, 'import torch\n'), ((3390, 3429), 'torch.stack', 'torch.stack', (['[corid[:, 1], corid[:, 0]]'], {}), '([corid[:, 1], corid[:, 0]])\n', (3401, 3429), False, 'import torch\n'), ((5786, 5841), 'pano_gen.constraint_cor_id_same_z', 'pano.constraint_cor_id_same_z', (['ceil_cor_id', 'scorecor', 'Z'], {}), '(ceil_cor_id, scorecor, Z)\n', (5815, 5841), True, 'import pano_gen as pano\n'), ((6396, 6427), 'numpy.mean', 'np.mean', (['ceil_cor_id_xy'], {'axis': '(0)'}), '(ceil_cor_id_xy, axis=0)\n', (6403, 6427), True, 'import numpy as np\n'), ((6534, 6588), 'pano_gen.fit_avg_z', 'pano.fit_avg_z', (['floor_cor_id', 'ceil_cor_id_xy', 'scorecor'], {}), '(floor_cor_id, ceil_cor_id_xy, scorecor)\n', (6548, 6588), True, 'import pano_gen as pano\n'), ((6950, 6977), 'torch.FloatTensor', 'torch.FloatTensor', (['scoreedg'], {}), '(scoreedg)\n', (6967, 6977), False, 'import torch\n'), ((6993, 7020), 'torch.FloatTensor', 'torch.FloatTensor', (['scorecor'], {}), '(scorecor)\n', (7010, 7020), False, 'import torch\n'), ((7030, 7051), 'torch.FloatTensor', 'torch.FloatTensor', (['pc'], {}), '(pc)\n', (7047, 7051), False, 'import torch\n'), ((7065, 7090), 'torch.FloatTensor', 'torch.FloatTensor', (['pc_vec'], {}), '(pc_vec)\n', (7082, 7090), False, 'import torch\n'), ((7106, 7135), 'torch.FloatTensor', 'torch.FloatTensor', (['[pc_theta]'], {}), '([pc_theta])\n', (7123, 7135), False, 'import torch\n'), ((7152, 7182), 'torch.FloatTensor', 'torch.FloatTensor', (['[pc_height]'], {}), '([pc_height])\n', (7169, 7182), False, 'import torch\n'), ((7388, 7456), 'torch.optim.SGD', 'optim.SGD', (['[pc, pc_vec, pc_theta, pc_height]'], {'lr': '(0.001)', 'momentum': '(0.9)'}), '([pc, pc_vec, pc_theta, pc_height], lr=0.001, momentum=0.9)\n', (7397, 7456), True, 'import torch.optim as optim\n'), ((267, 285), 'numpy.dot', 'np.dot', (['vec1', 'vec2'], {}), '(vec1, vec2)\n', (273, 285), True, 'import numpy as np\n'), ((3000, 3030), 'torch.cat', 'torch.cat', (['(ps, ps[-1:, :])', '(0)'], {}), '((ps, ps[-1:, :]), 0)\n', (3009, 3030), False, 'import torch\n'), ((4180, 4210), 'torch.cat', 'torch.cat', (['(ps, ps[-1:, :])', '(0)'], {}), '((ps, ps[-1:, :]), 0)\n', (4189, 4210), False, 'import torch\n'), ((4585, 4630), 'torch.stack', 'torch.stack', (['[ceil_idx[:, 1], ceil_idx[:, 0]]'], {}), '([ceil_idx[:, 1], ceil_idx[:, 0]])\n', (4596, 4630), False, 'import torch\n'), ((4909, 4956), 'torch.stack', 'torch.stack', (['[floor_idx[:, 1], floor_idx[:, 0]]'], {}), '([floor_idx[:, 1], floor_idx[:, 0]])\n', (4920, 4956), False, 'import torch\n'), ((6273, 6343), 'numpy.concatenate', 'np.concatenate', (['(ceil_cor_id_xy[1:, :], ceil_cor_id_xy[:1, :])'], {'axis': '(0)'}), '((ceil_cor_id_xy[1:, :], ceil_cor_id_xy[:1, :]), axis=0)\n', (6287, 6343), True, 'import numpy as np\n'), ((6649, 6681), 'numpy.array', 'np.array', (['[ceil_cor_id_xy[1, 1]]'], {}), '([ceil_cor_id_xy[1, 1]])\n', (6657, 6681), True, 'import numpy as np\n'), ((333, 349), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (342, 349), False, 'import torch\n'), ((361, 377), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (370, 377), False, 'import torch\n'), ((395, 411), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (404, 411), False, 'import torch\n'), ((423, 439), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (432, 439), False, 'import torch\n'), ((560, 600), 'torch.arange', 'torch.arange', (['(0)', '(pts + 1)'], {'dtype': 'pa.dtype'}), '(0, pts + 1, dtype=pa.dtype)\n', (572, 600), False, 'import torch\n'), ((725, 756), 'torch.atan2', 'torch.atan2', (['xy[:, 1]', 'xy[:, 0]'], {}), '(xy[:, 1], xy[:, 0])\n', (736, 756), False, 'import torch\n'), ((1869, 1893), 'torch.floor', 'torch.floor', (['coordinates'], {}), '(coordinates)\n', (1880, 1893), False, 'import torch\n'), ((1915, 1938), 'torch.ceil', 'torch.ceil', (['coordinates'], {}), '(coordinates)\n', (1925, 1938), False, 'import torch\n'), ((2814, 2847), 'torch.cat', 'torch.cat', (['(ps, ps[c_num:, :])', '(0)'], {}), '((ps, ps[c_num:, :]), 0)\n', (2823, 2847), False, 'import torch\n'), ((3994, 4027), 'torch.cat', 'torch.cat', (['(ps, ps[c_num:, :])', '(0)'], {}), '((ps, ps[c_num:, :]), 0)\n', (4003, 4027), False, 'import torch\n'), ((5266, 5281), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5279, 5281), False, 'import torch\n'), ((8260, 8326), 'numpy.stack', 'np.stack', (['[opt_cor_id[:split_num], opt_cor_id[split_num:]]'], {'axis': '(1)'}), '([opt_cor_id[:split_num], opt_cor_id[split_num:]], axis=1)\n', (8268, 8326), True, 'import numpy as np\n'), ((6799, 6844), 'numpy.append', 'np.append', (['pc_theta', 'ceil_cor_id_xy[c_num, 0]'], {}), '(pc_theta, ceil_cor_id_xy[c_num, 0])\n', (6808, 6844), True, 'import numpy as np\n'), ((6889, 6934), 'numpy.append', 'np.append', (['pc_theta', 'ceil_cor_id_xy[c_num, 1]'], {}), '(pc_theta, ceil_cor_id_xy[c_num, 1])\n', (6898, 6934), True, 'import numpy as np\n'), ((789, 808), 'torch.zeros_like', 'torch.zeros_like', (['c'], {}), '(c)\n', (805, 808), False, 'import torch\n'), ((1093, 1118), 'torch.atan2', 'torch.atan2', (['xy[1]', 'xy[0]'], {}), '(xy[1], xy[0])\n', (1104, 1118), False, 'import torch\n'), ((5974, 6004), 'numpy.zeros', 'np.zeros', (['ceil_cor_id.shape[0]'], {}), '(ceil_cor_id.shape[0])\n', (5982, 6004), True, 'import numpy as np\n'), ((1211, 1230), 'torch.zeros_like', 'torch.zeros_like', (['c'], {}), '(c)\n', (1227, 1230), False, 'import torch\n'), ((1290, 1309), 'torch.zeros_like', 'torch.zeros_like', (['c'], {}), '(c)\n', (1306, 1309), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Softbank Robotics Europe
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Third-party libraries
from xmp.xmp import XMPFile, registerNamespace
# Local modules
from qidata import DataType
from qidata.metadata_objects import Transform, TimeStamp
from qidata.qidatafile import QiDataFile, throwIfClosed
from qidata.qidataobject import QiDataObject
from qidata.qidatasensorobject import QiDataSensorObject
import _mixin as xmp_tools
QIDATA_SENSOR_NS=u"http://softbank-robotics.com/qidatasensor/1"
registerNamespace(QIDATA_SENSOR_NS, "qidatasensor")
class QiDataSensorFile(QiDataSensorObject, QiDataFile):
# ──────────
# Public API
def close(self):
"""
Closes the file after writing the metadata
"""
if self.mode != "r":
_raw_metadata = self._xmp_file.metadata[QIDATA_SENSOR_NS]
setattr(_raw_metadata, "data_type", self.type)
setattr(_raw_metadata, "transform", self.transform)
setattr(_raw_metadata, "timestamp", self.timestamp)
super(QiDataSensorFile, self).close()
# ───────────
# Private API
@throwIfClosed
def _loadAnnotations(self):
super(QiDataSensorFile, self)._loadAnnotations()
# Load data type
_raw_metadata = self._xmp_file.metadata[QIDATA_SENSOR_NS]
if _raw_metadata.children:
data = _raw_metadata.value
xmp_tools._removePrefixes(data)
self._type = DataType[data["data_type"]]
self._position = Transform(**data["transform"])
self._timestamp = TimeStamp(**data["timestamp"])
# ──────────────
# Textualization
def __unicode__(self):
res_str = ""
res_str += "File name: " + self.name + "\n"
res_str += "Object type: " + unicode(self.type) + "\n"
res_str += "Object timestamp: " + unicode(self.timestamp) + "\n"
res_str += "Object transform: " + unicode(self.transform) + "\n"
res_str += QiDataObject.__unicode__(self)
return res_str
|
[
"xmp.xmp.registerNamespace",
"qidata.metadata_objects.Transform",
"qidata.metadata_objects.TimeStamp",
"qidata.qidataobject.QiDataObject.__unicode__",
"_mixin._removePrefixes"
] |
[((1995, 2046), 'xmp.xmp.registerNamespace', 'registerNamespace', (['QIDATA_SENSOR_NS', '"""qidatasensor"""'], {}), "(QIDATA_SENSOR_NS, 'qidatasensor')\n", (2012, 2046), False, 'from xmp.xmp import XMPFile, registerNamespace\n'), ((3270, 3300), 'qidata.qidataobject.QiDataObject.__unicode__', 'QiDataObject.__unicode__', (['self'], {}), '(self)\n', (3294, 3300), False, 'from qidata.qidataobject import QiDataObject\n'), ((2764, 2795), '_mixin._removePrefixes', 'xmp_tools._removePrefixes', (['data'], {}), '(data)\n', (2789, 2795), True, 'import _mixin as xmp_tools\n'), ((2860, 2890), 'qidata.metadata_objects.Transform', 'Transform', ([], {}), "(**data['transform'])\n", (2869, 2890), False, 'from qidata.metadata_objects import Transform, TimeStamp\n'), ((2912, 2942), 'qidata.metadata_objects.TimeStamp', 'TimeStamp', ([], {}), "(**data['timestamp'])\n", (2921, 2942), False, 'from qidata.metadata_objects import Transform, TimeStamp\n')]
|
import cStringIO
from utils import makeIndentString
def writerFor(obj):
return {
dict: writeDict,
list: writeList,
str: writeString,
bool: writeBoolean
}[type(obj)]
def write(obj, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
indStr = makeIndentString(indentStr, level)
pos = outstream.tell()
outstream.write("""%s<?xml version="1.0" encoding="UTF-8"?>""", indStr)
outstream.write("""%s<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">""", indStr)
outstream.write("""%s<plist version="1.0">""", indStr)
writeObject(obj, outstream, indentStr, level)
outstream.write("""%s</plist>""", indStr)
outstream.seek(pos)
return outstream
def writeObject(obj, outstream=None, indentStr=None, level=0):
"""
Writes an object to an output stream
"""
outstream = outstream or cStringIO.StringIO()
return writerFor(obj)(obj, outstream, indentStr, level)
def writeList(listObject, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
indentString = makeIndentString(indentStr, level)
outstream.write("%s<array>" % indentString)
for value in listObject:
writeObject(value, outstream, indentStr, level + 1)
outstream.write("%s</array>" % indentString)
return outstream
def writeDict(dictObject, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
indentString = makeIndentString(indentStr, level)
outstream.write("%s<dict>" % indentString)
for key, value in dictObject.iteritems():
writeKey(key, outstream, indentStr, level + 1)
writeObject(value, outstream, indentStr, level + 1)
outstream.write("%s</dict>" % indentString)
return outstream
def writeBoolean(value, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
outstream.write("%s<%s/>" %
(makeIndentString(indentStr, level),
("true" if value else "false")))
return outstream
def writeKey(key, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
outstream.write("%s<key>%s</key>" %
(makeIndentString(indentStr, level), str(key)))
return outstream
def writeString(value, outstream=None, indentStr=None, level=0):
outstream = outstream or cStringIO.StringIO()
outstream.write("%s<string>%s</string>" %
(makeIndentString(indentStr, level), value))
return outstream
|
[
"cStringIO.StringIO",
"utils.makeIndentString"
] |
[((330, 364), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (346, 364), False, 'from utils import makeIndentString\n'), ((1179, 1213), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (1195, 1213), False, 'from utils import makeIndentString\n'), ((1560, 1594), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (1576, 1594), False, 'from utils import makeIndentString\n'), ((296, 316), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (314, 316), False, 'import cStringIO\n'), ((959, 979), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (977, 979), False, 'import cStringIO\n'), ((1139, 1159), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (1157, 1159), False, 'import cStringIO\n'), ((1520, 1540), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (1538, 1540), False, 'import cStringIO\n'), ((1969, 1989), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (1987, 1989), False, 'import cStringIO\n'), ((2245, 2265), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (2263, 2265), False, 'import cStringIO\n'), ((2491, 2511), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (2509, 2511), False, 'import cStringIO\n'), ((2043, 2077), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (2059, 2077), False, 'from utils import makeIndentString\n'), ((2327, 2361), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (2343, 2361), False, 'from utils import makeIndentString\n'), ((2579, 2613), 'utils.makeIndentString', 'makeIndentString', (['indentStr', 'level'], {}), '(indentStr, level)\n', (2595, 2613), False, 'from utils import makeIndentString\n')]
|
import os
from datetime import datetime
from flask import (
Flask, flash, render_template, redirect, request, session, url_for)
from flask_pymongo import PyMongo
import cloudinary as Cloud
from cloudinary.uploader import upload, destroy
from bson.objectid import ObjectId
from werkzeug.security import generate_password_hash, check_password_hash
if os.path.exists('env.py'):
import env
app = Flask(__name__)
app.config['MONGO_DBNAME'] = os.environ.get('MONGO_DBNAME')
app.config['MONGO_URI'] = os.environ.get('MONGO_URI')
app.secret_key = os.environ.get('SECRET_KEY')
mongo = PyMongo(app)
Cloud.config.update = ({
'cloud_name': os.environ.get('CLOUD_NAME'),
'api_key': os.environ.get('CLOUD_KEY'),
'api_secret': os.environ.get('CLOUD_SECRET')
})
@app.route('/')
def index():
"""Home page
* Retreives the websites from the database in 3 different ways:
1. Sorted by stars in descending order
2. Sorted by _id in descending order (this sorts by timestamp)
3. Takes a random sample of 5 websites and returns
* These arrays populate an object and are returned.
"""
websites_popular = [
x for x in mongo.db.websites.find().sort("stars", -1).limit(10)]
websites_recent = [
x for x in mongo.db.websites.find().sort("_id", -1).limit(10)]
websites_random = [x for x in mongo.db.websites.aggregate(
[{"$sample": {"size": 10}}]
)]
websites = {
'popular': websites_popular,
'recent': websites_recent,
'random': websites_random
}
return render_template('index.html', websites=websites)
@app.route('/store', methods=["GET"])
def store():
""" Searching:
* Retreives the search query the user typed
and use it to search the database.
* User can search by website name or URL.
* Return result as an object, appending boolean True,
as the page this to reformat the layout.
"""
query = request.args.get('query')
if query:
websites_search = list(mongo.db.websites.find(
{"$text": {"$search": query}}))
websites = {
'sites': websites_search,
'searched': True
}
return render_template('store.html', websites=websites)
else:
websites_all = list(mongo.db.websites.find())
websites = {
'sites': websites_all,
'searched': False
}
return render_template('store.html', websites=websites)
@app.route('/site_details/<websiteid>', methods=['GET', 'POST'])
def siteDetails(websiteid):
""" Website page:
GET: Builds the website page that the user sees when they click on a website.
POST: Handles a user posting a comment on the website page.
Args:
1. websiteid (str): The objectId associated with the MongoDB document
for that specific website.
Returns:
* POST: comment object is constructed, and based on the 3 comment variations
available this is what happens:
1. COMMENT: star rating is retrieved from comment and website's rating is updated
accordingly. Comment object is constructed and appended to comments array for
that website's MongoDB document.
2. UPDATE: Previous update comment is replaced with this. Comment object is constructed
and appended to comments array for that website's MongoDB document.
3. BUG: Comment object is constructed and appended to comments array for
that website's MongoDB document.
* GET: The information contained in the MongoDB website document.
"""
if request.method == 'POST':
if "user" in session:
if request.form.get('type') == 'comment':
comment = {
'username': session['user'],
'timestamp': datetime.now(),
'stars': int(request.form.get('site-stars')),
'value': request.form.get('site-description'),
'comment_type': request.form.get('type')
}
website = mongo.db.websites.find_one(
{"_id": ObjectId(websiteid)})
reviews = website["reviews"]
stars_total = website["stars_total"]
new_reviews = reviews + 1
new_stars_total = stars_total + \
int(request.form.get('site-stars'))
new_stars = int(new_stars_total/new_reviews)
mongo.db.websites.find_one_and_update(
{"_id": ObjectId(websiteid)},
{"$push": {"comments": comment},
"$set": {"reviews": new_reviews, "stars": new_stars, "stars_total": new_stars_total}},
upsert=True)
elif request.form.get('type') == 'update':
comment = {
'username': session['user'],
'timestamp': datetime.now(),
'value': request.form.get('site-description'),
'comment_type': request.form.get('type')
}
mongo.db.websites.find_one_and_update(
{"_id": ObjectId(websiteid)},
{"$push": {"comments": comment},
"$set": {"last_update": request.form.get('site-description')}},
upsert=True)
else:
comment = {
'username': session['user'],
'timestamp': datetime.now(),
'value': request.form.get('site-description'),
'comment_type': request.form.get('type')
}
mongo.db.websites.find_one_and_update(
{"_id": ObjectId(websiteid)},
{"$push": {"comments": comment}},
upsert=True)
flash('Commented successfully', 'success')
else:
flash('You need to be logged in to add a comment', 'info')
return redirect(url_for('siteDetails', websiteid=websiteid))
website = mongo.db.websites.find_one({"_id": ObjectId(websiteid)})
return render_template('site_details.html', website=website)
@app.route('/user/<username>', methods=['GET', 'POST'])
def user(username):
""" User page:
* If there is a user logged in, builds the user page
from that User's MongoDB document.
Args:
1. username (str): The user's username.
Returns:
* The username, user's data and any website's the user has added.
"""
if session.get("user") and session["user"] == username:
user_data = mongo.db.users.find_one({"username": session["user"]})
user_websites = []
if "websites" in user_data:
for website in user_data["websites"]:
user_websites.append(mongo.db.websites.find_one(
{"_id": ObjectId(website)}))
if not user_websites:
flash(
'Click the "ADD" button to publish your first website!', 'info')
return render_template(
'user.html', username=user_data["username"],
user_data=user_data,
user_websites=user_websites)
return redirect(url_for("index"))
@app.route("/create_site", methods=["GET", "POST"])
def createSite():
""" Adding a site page:
* GET: Returns the create site page template
* POST: Constructs a MongoDB website document, and updates both
the website collection, as well as the user's document with this
website.
* The added image associated with the website (required) is uploaded
to cloudinary, and the url and id in the cloudinary response is stored
against the website's document for later retreival.
"""
if session.get("user"):
if request.method == 'POST':
existing_website = mongo.db.websites.find_one(
{"url": request.form.get('site_url')})
if existing_website:
flash("Website with this url already exists.", 'error')
return render_template('create_site.html')
file = request.files['site_img']
cloudinary_response = upload(
file,
folder="webapp_store/site_images/",
public_id=request.form.get('site_name'),
overwrite=True,
resource_type='image',
transformation=[{'width': 640}]
)
site = {
"title": request.form.get('site_name'),
"url": request.form.get('site_url'),
"owner": session["user"],
"description": request.form.get('site_description'),
"stars": 0,
"reviews": 0,
"stars_total": 0,
"last_update": "",
"image": cloudinary_response["url"],
"image_id": cloudinary_response["public_id"],
"comments": [],
}
website = mongo.db.websites.insert_one(site)
mongo.db.users.find_one_and_update(
{'username': session["user"]},
{"$push": {"websites": website.inserted_id}},
upsert=True)
flash("Your website was published successfully", 'success')
return redirect(url_for('user', username=session['user']))
return render_template('create_site.html')
return redirect(url_for('index'))
@app.route('/update_site/<websiteid>', methods=['GET', 'POST'])
def updateSite(websiteid):
""" Update your site page:
* GET: Returns the update site page template
* POST: Updates the website's MongoDB document with new information
Args:
1. websiteid (str): The associated website id.
Returns:
* When successfully updated, it renders the site details page with
updated information.
"""
if session.get("user"):
if request.method == 'POST':
existing_website = mongo.db.websites.find_one(
{"$and": [
{"url": request.form.get('site_url')},
{"_id": {
"$ne": ObjectId(websiteid)
}}
]})
if existing_website:
flash("Website with this url already exists.", 'error')
return render_template('create_site.html')
website = mongo.db.websites.find_one_and_update(
{"_id": ObjectId(websiteid)},
{"$set": {
"title": request.form.get('site_name'),
"url": request.form.get('site_url'),
"owner": session["user"],
"description": request.form.get('site_description'),
}})
updated_website = mongo.db.websites.find_one(
{"_id": ObjectId(websiteid)})
flash("Your website was updated successfully", "success")
return render_template('siteDetails.html', website=updated_website)
user_data = mongo.db.users.find_one({"username": session["user"]})
if "websites" in user_data:
if ObjectId(websiteid) in user_data["websites"]:
website = mongo.db.websites.find_one(
{"_id": ObjectId(websiteid)})
return render_template('update_site.html', website=website)
return redirect(url_for('index'))
return redirect(url_for('index'))
@app.route('/delete_site/<websiteid>')
def deleteSite(websiteid):
""" Delete site route
* Deletes the website from the database
Args:
1. websiteid (str): The website id
Returns:
* Removes the website document as well as reference to website in user's
array of websites. Also removes the website's image from cloudinary.
"""
if session.get("user"):
user_data = mongo.db.users.find_one({"username": session["user"]})
if "websites" in user_data:
if ObjectId(websiteid) in user_data["websites"]:
mongo.db.users.find_one_and_update(
{'username': session["user"]},
{"$pull": {"websites": ObjectId(websiteid)}},
upsert=True)
website = mongo.db.websites.find_one(
{"_id": ObjectId(websiteid)})
destroy(website["image_id"])
mongo.db.websites.delete_one({"_id": ObjectId(websiteid)})
flash('Website was successfully removed', 'success')
return redirect(url_for('user', username=session['user']))
return redirect(url_for('index'))
@app.route('/signup', methods=['POST'])
def signup():
""" Sign up route
* The passwords entered are checked to validate they match, and
it is checked that username and email are not already taken,
before creating a new user document in the Users collection.
* The user's password is hashed for security purposes.
Return:
* Redirected to user page
"""
if request.method == 'POST':
pwd = request.form.get('password')
re_pwd = request.form.get('repassword')
if pwd != re_pwd:
flash('Passwords do not match.', 'error')
return redirect(url_for('index'))
existing_username = mongo.db.users.find_one(
{'username': request.form.get('username').lower()})
existing_email = mongo.db.users.find_one(
{'email': request.form.get('email').lower()})
if existing_username or existing_email:
flash("Username or email already exists.", 'error')
return redirect(url_for('index'))
register = {
"username": request.form.get('username').lower(),
"email": request.form.get('email').lower(),
"password": generate_password_hash(
request.form.get('password').lower()),
}
mongo.db.users.insert_one(register)
session["user"] = request.form.get('username').lower()
flash('Welcome ' + request.form.get('username') +
', you are now logged in', 'success')
return redirect(url_for('user', username=session["user"]))
@app.route('/login', methods=['POST'])
def login():
""" Log in route
* If the user's email matches that of a user in the Users collection, it
is then checked that the hashed passwords match.
Returns:
* If successful login, the user is redirected to their User page
* If password does not match or email is not found, we give the same error message
and redirect to home page.
Return:
* Redirected to user page
"""
if request.method == 'POST':
existing_user = mongo.db.users.find_one(
{'email': request.form.get('email').lower()})
if existing_user:
if check_password_hash(
existing_user["password"], request.form.get('password')):
session["user"] = existing_user["username"].lower()
flash(
'Welcome ' + existing_user["username"] + ', you are now logged in', 'success')
return redirect(url_for('user', username=session["user"]))
else:
flash('Invalid email or password', 'error')
return redirect(url_for('index'))
else:
flash('Invalid email or password', 'error')
return redirect(url_for('index'))
@app.route("/logout")
def logout():
""" Log out route
* If user clicks logout, we simple pop the user cookie from the session array.
Returns:
* Redirects to the home page
"""
flash("You have successfully been logged out", "success")
session.pop("user")
return redirect(url_for('index'))
@app.errorhandler(404)
def page_not_found(e):
# note that we set the 404 status explicitly
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(host=os.environ.get('IP'),
port=int(os.environ.get('PORT')),
debug=False)
|
[
"cloudinary.uploader.destroy",
"flask.flash",
"flask.session.pop",
"flask.request.args.get",
"flask.request.form.get",
"bson.objectid.ObjectId",
"flask.Flask",
"os.path.exists",
"flask.session.get",
"os.environ.get",
"flask.url_for",
"flask.render_template",
"flask_pymongo.PyMongo",
"datetime.datetime.now"
] |
[((353, 377), 'os.path.exists', 'os.path.exists', (['"""env.py"""'], {}), "('env.py')\n", (367, 377), False, 'import os\n'), ((402, 417), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (407, 417), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((448, 478), 'os.environ.get', 'os.environ.get', (['"""MONGO_DBNAME"""'], {}), "('MONGO_DBNAME')\n", (462, 478), False, 'import os\n'), ((505, 532), 'os.environ.get', 'os.environ.get', (['"""MONGO_URI"""'], {}), "('MONGO_URI')\n", (519, 532), False, 'import os\n'), ((550, 578), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (564, 578), False, 'import os\n'), ((588, 600), 'flask_pymongo.PyMongo', 'PyMongo', (['app'], {}), '(app)\n', (595, 600), False, 'from flask_pymongo import PyMongo\n'), ((645, 673), 'os.environ.get', 'os.environ.get', (['"""CLOUD_NAME"""'], {}), "('CLOUD_NAME')\n", (659, 673), False, 'import os\n'), ((690, 717), 'os.environ.get', 'os.environ.get', (['"""CLOUD_KEY"""'], {}), "('CLOUD_KEY')\n", (704, 717), False, 'import os\n'), ((737, 767), 'os.environ.get', 'os.environ.get', (['"""CLOUD_SECRET"""'], {}), "('CLOUD_SECRET')\n", (751, 767), False, 'import os\n'), ((1560, 1608), 'flask.render_template', 'render_template', (['"""index.html"""'], {'websites': 'websites'}), "('index.html', websites=websites)\n", (1575, 1608), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((1948, 1973), 'flask.request.args.get', 'request.args.get', (['"""query"""'], {}), "('query')\n", (1964, 1973), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((6157, 6210), 'flask.render_template', 'render_template', (['"""site_details.html"""'], {'website': 'website'}), "('site_details.html', website=website)\n", (6172, 6210), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((7798, 7817), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (7809, 7817), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((9938, 9957), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (9949, 9957), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11884, 11903), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (11895, 11903), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15733, 15790), 'flask.flash', 'flash', (['"""You have successfully been logged out"""', '"""success"""'], {}), "('You have successfully been logged out', 'success')\n", (15738, 15790), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15795, 15814), 'flask.session.pop', 'session.pop', (['"""user"""'], {}), "('user')\n", (15806, 15814), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((2201, 2249), 'flask.render_template', 'render_template', (['"""store.html"""'], {'websites': 'websites'}), "('store.html', websites=websites)\n", (2216, 2249), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((2426, 2474), 'flask.render_template', 'render_template', (['"""store.html"""'], {'websites': 'websites'}), "('store.html', websites=websites)\n", (2441, 2474), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((6567, 6586), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (6578, 6586), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((7070, 7185), 'flask.render_template', 'render_template', (['"""user.html"""'], {'username': "user_data['username']", 'user_data': 'user_data', 'user_websites': 'user_websites'}), "('user.html', username=user_data['username'], user_data=\n user_data, user_websites=user_websites)\n", (7085, 7185), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((7238, 7254), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (7245, 7254), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((9419, 9454), 'flask.render_template', 'render_template', (['"""create_site.html"""'], {}), "('create_site.html')\n", (9434, 9454), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((9476, 9492), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (9483, 9492), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11490, 11506), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (11497, 11506), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((12669, 12685), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (12676, 12685), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13136, 13164), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (13152, 13164), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13182, 13212), 'flask.request.form.get', 'request.form.get', (['"""repassword"""'], {}), "('repassword')\n", (13198, 13212), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15835, 15851), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (15842, 15851), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15961, 15988), 'flask.render_template', 'render_template', (['"""404.html"""'], {}), "('404.html')\n", (15976, 15988), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5875, 5917), 'flask.flash', 'flash', (['"""Commented successfully"""', '"""success"""'], {}), "('Commented successfully', 'success')\n", (5880, 5917), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5945, 6003), 'flask.flash', 'flash', (['"""You need to be logged in to add a comment"""', '"""info"""'], {}), "('You need to be logged in to add a comment', 'info')\n", (5950, 6003), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((6029, 6072), 'flask.url_for', 'url_for', (['"""siteDetails"""'], {'websiteid': 'websiteid'}), "('siteDetails', websiteid=websiteid)\n", (6036, 6072), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((6124, 6143), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (6132, 6143), False, 'from bson.objectid import ObjectId\n'), ((6966, 7036), 'flask.flash', 'flash', (['"""Click the "ADD" button to publish your first website!"""', '"""info"""'], {}), '(\'Click the "ADD" button to publish your first website!\', \'info\')\n', (6971, 7036), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((9272, 9331), 'flask.flash', 'flash', (['"""Your website was published successfully"""', '"""success"""'], {}), "('Your website was published successfully', 'success')\n", (9277, 9331), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10936, 10993), 'flask.flash', 'flash', (['"""Your website was updated successfully"""', '"""success"""'], {}), "('Your website was updated successfully', 'success')\n", (10941, 10993), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11013, 11073), 'flask.render_template', 'render_template', (['"""siteDetails.html"""'], {'website': 'updated_website'}), "('siteDetails.html', website=updated_website)\n", (11028, 11073), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11451, 11467), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (11458, 11467), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13252, 13293), 'flask.flash', 'flash', (['"""Passwords do not match."""', '"""error"""'], {}), "('Passwords do not match.', 'error')\n", (13257, 13293), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13627, 13678), 'flask.flash', 'flash', (['"""Username or email already exists."""', '"""error"""'], {}), "('Username or email already exists.', 'error')\n", (13632, 13678), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((14220, 14261), 'flask.url_for', 'url_for', (['"""user"""'], {'username': "session['user']"}), "('user', username=session['user'])\n", (14227, 14261), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15437, 15480), 'flask.flash', 'flash', (['"""Invalid email or password"""', '"""error"""'], {}), "('Invalid email or password', 'error')\n", (15442, 15480), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((16040, 16060), 'os.environ.get', 'os.environ.get', (['"""IP"""'], {}), "('IP')\n", (16054, 16060), False, 'import os\n'), ((3686, 3710), 'flask.request.form.get', 'request.form.get', (['"""type"""'], {}), "('type')\n", (3702, 3710), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8020, 8075), 'flask.flash', 'flash', (['"""Website with this url already exists."""', '"""error"""'], {}), "('Website with this url already exists.', 'error')\n", (8025, 8075), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8099, 8134), 'flask.render_template', 'render_template', (['"""create_site.html"""'], {}), "('create_site.html')\n", (8114, 8134), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8533, 8562), 'flask.request.form.get', 'request.form.get', (['"""site_name"""'], {}), "('site_name')\n", (8549, 8562), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8587, 8615), 'flask.request.form.get', 'request.form.get', (['"""site_url"""'], {}), "('site_url')\n", (8603, 8615), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8690, 8726), 'flask.request.form.get', 'request.form.get', (['"""site_description"""'], {}), "('site_description')\n", (8706, 8726), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((9360, 9401), 'flask.url_for', 'url_for', (['"""user"""'], {'username': "session['user']"}), "('user', username=session['user'])\n", (9367, 9401), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10314, 10369), 'flask.flash', 'flash', (['"""Website with this url already exists."""', '"""error"""'], {}), "('Website with this url already exists.', 'error')\n", (10319, 10369), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10393, 10428), 'flask.render_template', 'render_template', (['"""create_site.html"""'], {}), "('create_site.html')\n", (10408, 10428), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11201, 11220), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (11209, 11220), False, 'from bson.objectid import ObjectId\n'), ((11374, 11426), 'flask.render_template', 'render_template', (['"""update_site.html"""'], {'website': 'website'}), "('update_site.html', website=website)\n", (11389, 11426), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((12031, 12050), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (12039, 12050), False, 'from bson.objectid import ObjectId\n'), ((12399, 12427), 'cloudinary.uploader.destroy', 'destroy', (["website['image_id']"], {}), "(website['image_id'])\n", (12406, 12427), False, 'from cloudinary.uploader import upload, destroy\n'), ((12520, 12572), 'flask.flash', 'flash', (['"""Website was successfully removed"""', '"""success"""'], {}), "('Website was successfully removed', 'success')\n", (12525, 12572), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13322, 13338), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (13329, 13338), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13707, 13723), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (13714, 13723), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((14049, 14077), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (14065, 14077), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((14987, 15015), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (15003, 15015), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15102, 15190), 'flask.flash', 'flash', (["('Welcome ' + existing_user['username'] + ', you are now logged in')", '"""success"""'], {}), "('Welcome ' + existing_user['username'] + ', you are now logged in',\n 'success')\n", (15107, 15190), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15317, 15360), 'flask.flash', 'flash', (['"""Invalid email or password"""', '"""error"""'], {}), "('Invalid email or password', 'error')\n", (15322, 15360), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15509, 15525), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (15516, 15525), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((16083, 16105), 'os.environ.get', 'os.environ.get', (['"""PORT"""'], {}), "('PORT')\n", (16097, 16105), False, 'import os\n'), ((3835, 3849), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3847, 3849), False, 'from datetime import datetime\n'), ((3946, 3982), 'flask.request.form.get', 'request.form.get', (['"""site-description"""'], {}), "('site-description')\n", (3962, 3982), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((4020, 4044), 'flask.request.form.get', 'request.form.get', (['"""type"""'], {}), "('type')\n", (4036, 4044), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((4794, 4818), 'flask.request.form.get', 'request.form.get', (['"""type"""'], {}), "('type')\n", (4810, 4818), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((7939, 7967), 'flask.request.form.get', 'request.form.get', (['"""site_url"""'], {}), "('site_url')\n", (7955, 7967), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((8323, 8352), 'flask.request.form.get', 'request.form.get', (['"""site_name"""'], {}), "('site_name')\n", (8339, 8352), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10515, 10534), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (10523, 10534), False, 'from bson.objectid import ObjectId\n'), ((10902, 10921), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (10910, 10921), False, 'from bson.objectid import ObjectId\n'), ((12605, 12646), 'flask.url_for', 'url_for', (['"""user"""'], {'username': "session['user']"}), "('user', username=session['user'])\n", (12612, 12646), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13771, 13799), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (13787, 13799), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13830, 13855), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (13846, 13855), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((14113, 14141), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (14129, 14141), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15240, 15281), 'flask.url_for', 'url_for', (['"""user"""'], {'username': "session['user']"}), "('user', username=session['user'])\n", (15247, 15281), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((15393, 15409), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (15400, 15409), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((3884, 3914), 'flask.request.form.get', 'request.form.get', (['"""site-stars"""'], {}), "('site-stars')\n", (3900, 3914), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((4145, 4164), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (4153, 4164), False, 'from bson.objectid import ObjectId\n'), ((4383, 4413), 'flask.request.form.get', 'request.form.get', (['"""site-stars"""'], {}), "('site-stars')\n", (4399, 4413), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((4560, 4579), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (4568, 4579), False, 'from bson.objectid import ObjectId\n'), ((4942, 4956), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4954, 4956), False, 'from datetime import datetime\n'), ((4987, 5023), 'flask.request.form.get', 'request.form.get', (['"""site-description"""'], {}), "('site-description')\n", (5003, 5023), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5061, 5085), 'flask.request.form.get', 'request.form.get', (['"""type"""'], {}), "('type')\n", (5077, 5085), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5509, 5523), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5521, 5523), False, 'from datetime import datetime\n'), ((5554, 5590), 'flask.request.form.get', 'request.form.get', (['"""site-description"""'], {}), "('site-description')\n", (5570, 5590), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5628, 5652), 'flask.request.form.get', 'request.form.get', (['"""type"""'], {}), "('type')\n", (5644, 5652), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10593, 10622), 'flask.request.form.get', 'request.form.get', (['"""site_name"""'], {}), "('site_name')\n", (10609, 10622), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10651, 10679), 'flask.request.form.get', 'request.form.get', (['"""site_url"""'], {}), "('site_url')\n", (10667, 10679), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10762, 10798), 'flask.request.form.get', 'request.form.get', (['"""site_description"""'], {}), "('site_description')\n", (10778, 10798), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((11329, 11348), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (11337, 11348), False, 'from bson.objectid import ObjectId\n'), ((12361, 12380), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (12369, 12380), False, 'from bson.objectid import ObjectId\n'), ((12481, 12500), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (12489, 12500), False, 'from bson.objectid import ObjectId\n'), ((13419, 13447), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (13435, 13447), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13530, 13555), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (13546, 13555), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((13929, 13957), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (13945, 13957), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((14841, 14866), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (14857, 14866), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((5187, 5206), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (5195, 5206), False, 'from bson.objectid import ObjectId\n'), ((5754, 5773), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (5762, 5773), False, 'from bson.objectid import ObjectId\n'), ((6902, 6919), 'bson.objectid.ObjectId', 'ObjectId', (['website'], {}), '(website)\n', (6910, 6919), False, 'from bson.objectid import ObjectId\n'), ((10110, 10138), 'flask.request.form.get', 'request.form.get', (['"""site_url"""'], {}), "('site_url')\n", (10126, 10138), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((12223, 12242), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (12231, 12242), False, 'from bson.objectid import ObjectId\n'), ((5307, 5343), 'flask.request.form.get', 'request.form.get', (['"""site-description"""'], {}), "('site-description')\n", (5323, 5343), False, 'from flask import Flask, flash, render_template, redirect, request, session, url_for\n'), ((10202, 10221), 'bson.objectid.ObjectId', 'ObjectId', (['websiteid'], {}), '(websiteid)\n', (10210, 10221), False, 'from bson.objectid import ObjectId\n')]
|
import datetime
import logging as logme
from .tweet import utc_to_local
class Datelock:
until = None
since = None
_since_def_user = None
def convertToDateTime(string):
dateTimeList = string.split()
ListLength = len(dateTimeList)
if ListLength == 2:
return string
if ListLength == 1:
return string + " 00:00:00"
else:
return ""
def Set(Until, Since):
logme.debug(__name__+':Set')
d = Datelock()
d.until = Until
d.since = Since
if Until:
d.until = datetime.datetime.strptime(Until, "%Y-%m-%d")
d.until = utc_to_local(d.until)
else:
d.until = datetime.datetime.today()
if Since:
d.since = datetime.datetime.strptime(Since, "%Y-%m-%d")
d.since = utc_to_local(d.since)
d._since_def_user = True
else:
d.since = datetime.datetime.strptime("2006-03-21 00:00:00", "%Y-%m-%d %H:%M:%S")
d.since = utc_to_local(d.since)
d._since_def_user = False
return d
|
[
"datetime.datetime.strptime",
"logging.debug",
"datetime.datetime.today"
] |
[((418, 448), 'logging.debug', 'logme.debug', (["(__name__ + ':Set')"], {}), "(__name__ + ':Set')\n", (429, 448), True, 'import logging as logme\n'), ((538, 583), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['Until', '"""%Y-%m-%d"""'], {}), "(Until, '%Y-%m-%d')\n", (564, 583), False, 'import datetime\n'), ((652, 677), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (675, 677), False, 'import datetime\n'), ((711, 756), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['Since', '"""%Y-%m-%d"""'], {}), "(Since, '%Y-%m-%d')\n", (737, 756), False, 'import datetime\n'), ((858, 928), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2006-03-21 00:00:00"""', '"""%Y-%m-%d %H:%M:%S"""'], {}), "('2006-03-21 00:00:00', '%Y-%m-%d %H:%M:%S')\n", (884, 928), False, 'import datetime\n')]
|
import pandas as pd
import time
import calendar
from datetime import datetime, timedelta
# Filenames (Files are in same folder as the python script)
chicago = 'chicago.csv'
new_york_city = 'new_york_city.csv'
washington = 'washington.csv'
def get_city():
'''Asks the user for a city and returns the filename for that city's bike share data.
Args:
None.
Returns:
(str) Filename for a city's bikeshare data.
'''
city = input('\nHello! Let\'s explore some US bikeshare data!\n'
'Would you like to see data for Chicago, New York, or Washington?\n')
city = city.lower()
if city == 'chicago':
return pd.read_csv(chicago)
elif city == 'new york':
return pd.read_csv(new_york_city)
elif city == 'washington':
return pd.read_csv(washington)
else:
print('Please choose a value from options')
return get_city()
def get_time_period():
'''Asks the user for a time period and returns the specified filter.
Args:
None.
Returns:
(str) Time period to filter data by.
'''
time_period = input('\nWould you like to filter the data by "month", "day", "both" or not at'
' all? Type "none" for no time filter.\n')
time_period = time_period.lower()
if time_period == 'month' or time_period == 'day' or time_period == 'both' or time_period == 'none':
return time_period
else:
print('Please choose a value from options')
return get_time_period()
def get_month():
'''Asks the user for a month and returns the specified month.
Args:
None.
Returns:
(str) Specified month value selected by user
'''
month_ = input(
'\nWhich month? January, February, March, April, May, or June?\n')
if month_.lower() in months_list:
return month_.lower()
else:
print('Please choose a value from options')
return get_month()
def get_day(month):
'''Asks the user for a day and returns the specified day.
Args:
(str) Month
Returns:
(str) Specified day value selected by user
'''
year = 2017
day = input('\nWhich day? Please type your response as an integer.\n')
try:
int_day = int(day)
if int_day > 0 and int_day <= calendar.monthrange(year, month)[1]:
return int_day
else:
print('Please choose a valid day value for the specified month')
return get_day(month)
except:
print('Please enter an integer day value')
return get_day(month)
def get_weekday():
'''Asks the user for a weekday(Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday)
and returns the specified day.
Args:
None
Returns:
(str) Specified weekday value selected by user
'''
week_day = input(
'\nWhich weekday? Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday?\n')
if week_day.lower() in day_list:
return day_list.index(week_day)
else:
print('Please choose a valid weekday from the values specified above')
return get_weekday()
def popular_month(city_file):
'''Asks the user for a day and returns the specified day.
Args:
city_file
Returns:
(str) The most popular month for start time
'''
each_month_start_date_count = city_file['Start Time'].dt.month.value_counts(
)
return months_list[each_month_start_date_count.index[0]-1]
def popular_day(city_file):
'''This method finds the most popular day of the week for start time
Args:
city_file
Returns:
(str) The most popular day of week for start time
'''
popular_start_day_count = city_file['Start Time'].dt.weekday.value_counts()
popular_start_day = day_list[popular_start_day_count.index[0]]
return popular_start_day
def popular_hour(city_file):
'''This method finds the most popular hour of the day for the start time
Args:
city_file
Returns:
(str) The most popular hour of day for start time
'''
return city_file['Start Time'].dt.hour.value_counts().index[0]
def trip_duration(city_file):
'''This method calculate the total trip duration and mean trip duration
Args:
city_file
Returns:
None
'''
total_trip_duration = datetime(
1, 1, 1) + timedelta(seconds=int(city_file['Trip Duration'].sum()))
mean_trip_duration = city_file['Trip Duration'].mean()
print("Total trip duration is : %d years" %
(total_trip_duration.year-1), end=',')
print(" %d months" % (total_trip_duration.month-1), end=',')
print(" %d days" % (total_trip_duration.day-1), end=',')
print(" %d hours" % (total_trip_duration.hour), end=',')
print(" %d minutes" % (total_trip_duration.minute), end=',')
print(" %d seconds" % (total_trip_duration.second), end='\n\n')
print("Mean trip duration is : %.4f seconds" %
(mean_trip_duration))
def popular_stations(city_file):
'''This method finds out the most popular start and end stations
Args:
city_file
Returns:
(tuple) The most popular start station and most popular end station
'''
frequent_start_station = city_file['Start Station'].value_counts(
).index[0]
frequent_end_station = city_file['End Station'].value_counts().index[0]
return frequent_start_station, frequent_end_station
def popular_trip(city_file):
'''This method prints out the most popular trip for start and end station
Args:
city_file
Returns:
None
'''
trip = city_file.groupby(['Start Station', 'End Station'], as_index=False)[
'Trip Duration'].count().nlargest(1, 'Trip Duration')
print("The most popular trip is from '{}' to '{}'".format(
trip.iloc[0, 0], trip.iloc[0, 1]))
def users(city_file):
'''This method print out the counts of each user type
Args:
city_file
Returns:
None
'''
user_type_count_df = city_file['User Type'].value_counts()
print('The counts of each user type are given below:')
for index, row in user_type_count_df.iteritems():
print('{} : {}'.format(index, row))
def gender(city_file):
'''This method prints the total counts of different gender
Args:
city_file
Returns:
None
'''
if 'Gender' in city_file.columns:
gender_count_df = city_file['Gender'].value_counts()
print('The count of each gender is given below:')
for index, row in gender_count_df.iteritems():
print('{} : {}'.format(index, row))
else:
print('This file doesn\'t contains gender data')
def birth_years(city_file):
'''This function prints the earliest (i.e. oldest user), most recent (i.e. youngest user),
and most popular birth years.
Args:
city_file
Returns:
None
'''
if 'Birth Year' in city_file.columns:
birth_year = city_file.sort_values('Birth Year', ascending=True)[
'Birth Year'].dropna()
oldest_year = birth_year.iloc[0]
youngest_year = birth_year.iloc[-1]
popular_birth_year = birth_year.value_counts().index[0]
print("The most oldest birth year is %d, the most youngest birth year is %d and the popular birth year is %d" % (
oldest_year, youngest_year, popular_birth_year))
else:
print('This file doesn\'t contains birth year data')
def display_data(city_file):
'''Displays five lines of data if the user specifies that they would like to.
After displaying five lines, ask the user if they would like to see five more,
continuing asking until they say stop.
Args:
city_file
Returns:
None
'''
display = input('\nWould you like to view individual trip data?'
'Type \'yes\' or \'no\'.\n')
if display.lower() == 'yes':
index = 0
while index+5 <= len(city_file):
print(city_file[index: index+5])
index += 5
choice = input(
'\nWould you like to see 5 more rows?\nType \'no\' to stop.\n')
if choice.lower() == 'no':
break
def statistics():
'''Calculates and prints out the descriptive statistics about a city and time period
specified by the user via raw input.
Args:
None
Returns:
None
'''
# Filter by city (Chicago, New York, Washington)
city_df = get_city()
convert_datetime(city_df)
# Filter by time period (month, day, none)
time_period = get_time_period()
#########################################################
# #
# This logic will filter the data set on the basis #
# of time period selected by the user i.e. 'month', #
# 'day' or 'both'. Filtering only required once now. #
# #
#########################################################
if time_period == 'month':
m = get_month()
selected_month = months_list.index(m) + 1
city_df = city_df[city_df['Start Time'].dt.month == selected_month]
elif time_period == 'day':
week_day = get_weekday()
city_df = city_df[city_df['Start Time'].dt.weekday == week_day]
elif time_period == 'both':
m = get_month()
selected_month = months_list.index(m) + 1
selected_day = get_day(selected_month)
city_df = city_df[city_df['Start Time'].dt.month == selected_month]
city_df = city_df[city_df['Start Time'].dt.day == selected_day]
print('Calculating the first statistic...')
# What is the most popular month for start time?
if time_period == 'none':
start_time = time.time()
print("The most popular month for start time is {}.".format(
popular_month(city_df)).title())
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
# What is the most popular day of week (Monday, Tuesday, etc.) for start time?
if time_period == 'none' or time_period == 'month':
start_time = time.time()
print("The most popular day for start time is {}.".format(
popular_day(city_df)).title())
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What is the most popular hour of day for start time?
print("The popular hour of the day for start time is {}th hour.".format(
popular_hour(city_df)))
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What is the total trip duration and average trip duration?
trip_duration(city_df)
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What is the most popular start station and most popular end station?
freq_start_station, freq_end_station = popular_stations(
city_df)
print("The most popular start station is {} and the most popular end station is {}.".format(
freq_start_station, freq_end_station))
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What is the most popular trip?
popular_trip(city_df)
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What are the counts of each user type?
users(city_df)
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What are the counts of gender?
gender(city_df)
print("That took %s seconds." % (time.time() - start_time))
print("Calculating the next statistic...")
start_time = time.time()
# What are the earliest (i.e. oldest user), most recent (i.e. youngest user), and
# most popular birth years?
birth_years(city_df)
print("That took %s seconds." % (time.time() - start_time))
# Display five lines of data at a time if user specifies that they would like to
display_data(city_df)
# Restart?
restart = input('\nWould you like to restart? Type \'yes\' or \'no\'.\n')
if restart.lower() == 'yes':
statistics()
def convert_datetime(city_file):
''' This method will convert the Start Time and End Time columns from string to datetime
Args:
city_file
Return:
None
'''
city_file['Start Time'] = pd.to_datetime(city_file['Start Time'])
city_file['End Time'] = pd.to_datetime(city_file['End Time'])
if __name__ == "__main__":
day_list = ['monday', 'tuesday', 'wednesday',
'thursday', 'friday', 'saturday', 'sunday']
months_list = ['january', 'february', 'march', 'april', 'may', 'june']
statistics()
|
[
"pandas.read_csv",
"datetime.datetime",
"time.time",
"pandas.to_datetime",
"calendar.monthrange"
] |
[((10547, 10558), 'time.time', 'time.time', ([], {}), '()\n', (10556, 10558), False, 'import time\n'), ((10856, 10867), 'time.time', 'time.time', ([], {}), '()\n', (10865, 10867), False, 'import time\n'), ((11089, 11100), 'time.time', 'time.time', ([], {}), '()\n', (11098, 11100), False, 'import time\n'), ((11527, 11538), 'time.time', 'time.time', ([], {}), '()\n', (11536, 11538), False, 'import time\n'), ((11731, 11742), 'time.time', 'time.time', ([], {}), '()\n', (11740, 11742), False, 'import time\n'), ((11936, 11947), 'time.time', 'time.time', ([], {}), '()\n', (11945, 11947), False, 'import time\n'), ((12134, 12145), 'time.time', 'time.time', ([], {}), '()\n', (12143, 12145), False, 'import time\n'), ((12834, 12873), 'pandas.to_datetime', 'pd.to_datetime', (["city_file['Start Time']"], {}), "(city_file['Start Time'])\n", (12848, 12873), True, 'import pandas as pd\n'), ((12902, 12939), 'pandas.to_datetime', 'pd.to_datetime', (["city_file['End Time']"], {}), "(city_file['End Time'])\n", (12916, 12939), True, 'import pandas as pd\n'), ((667, 687), 'pandas.read_csv', 'pd.read_csv', (['chicago'], {}), '(chicago)\n', (678, 687), True, 'import pandas as pd\n'), ((4391, 4408), 'datetime.datetime', 'datetime', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (4399, 4408), False, 'from datetime import datetime, timedelta\n'), ((9880, 9891), 'time.time', 'time.time', ([], {}), '()\n', (9889, 9891), False, 'import time\n'), ((10287, 10298), 'time.time', 'time.time', ([], {}), '()\n', (10296, 10298), False, 'import time\n'), ((732, 758), 'pandas.read_csv', 'pd.read_csv', (['new_york_city'], {}), '(new_york_city)\n', (743, 758), True, 'import pandas as pd\n'), ((805, 828), 'pandas.read_csv', 'pd.read_csv', (['washington'], {}), '(washington)\n', (816, 828), True, 'import pandas as pd\n'), ((10765, 10776), 'time.time', 'time.time', ([], {}), '()\n', (10774, 10776), False, 'import time\n'), ((10998, 11009), 'time.time', 'time.time', ([], {}), '()\n', (11007, 11009), False, 'import time\n'), ((11436, 11447), 'time.time', 'time.time', ([], {}), '()\n', (11445, 11447), False, 'import time\n'), ((11640, 11651), 'time.time', 'time.time', ([], {}), '()\n', (11649, 11651), False, 'import time\n'), ((11845, 11856), 'time.time', 'time.time', ([], {}), '()\n', (11854, 11856), False, 'import time\n'), ((12043, 12054), 'time.time', 'time.time', ([], {}), '()\n', (12052, 12054), False, 'import time\n'), ((12327, 12338), 'time.time', 'time.time', ([], {}), '()\n', (12336, 12338), False, 'import time\n'), ((2324, 2356), 'calendar.monthrange', 'calendar.monthrange', (['year', 'month'], {}), '(year, month)\n', (2343, 2356), False, 'import calendar\n'), ((10048, 10059), 'time.time', 'time.time', ([], {}), '()\n', (10057, 10059), False, 'import time\n'), ((10451, 10462), 'time.time', 'time.time', ([], {}), '()\n', (10460, 10462), False, 'import time\n')]
|
from datetime import date
from email import message
from vigilancia.order_screenshot import Orders
from django.db.models.fields import DateField
from django.http import Http404
from django.http import HttpResponse
from django.template import loader
from django.shortcuts import get_object_or_404, render
from .models import Camera, Trap
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.parsers import JSONParser
from .serializers import AlertSerializer, CameraOrderSerializer, CameraSerializer, MediaSerializer, TrapSerializer
from rest_framework.viewsets import ViewSet
from rest_framework.response import Response
from .serializers import UploadSerializer
from ftplib import FTP
import json
from vigilancia import serializers
import vigilancia.services.send_email
import vigilancia.services.camera_service
import logging
from datetime import datetime
import os
logger = logging.getLogger("django")
@csrf_exempt
def test(request):
print(request)
return JsonResponse({message: "test"}, status=201)
@csrf_exempt
def report_alert(request):
if request.method == 'POST':
data = JSONParser().parse(request)
print(data)
logger.info("Alerta detectada. Detalles: Armadilha %s, Detectado %s.", str(data['trap']), data['detected_at'])
serializer = AlertSerializer(data=data)
if serializer.is_valid():
serializer.save()
message = 'Oi, a armadilha ' + str(data['trap']) + ' notificou movimento as ' + data['detected_at']
print(message)
vigilancia.services.send_email.send_gmail_test(str(data['trap']) )
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def camera_order(request):
data = JSONParser().parse(request)
serializer = CameraOrderSerializer(data=data)
response = 'OK'
orders_object = Orders()
if serializer.is_valid():
response = serializer.data
response = Orders().take_screeshot()
return JsonResponse(response, safe=False)
@csrf_exempt
def trap_general(request):
"""
(GET) List all traps
(POST) Create one trap
"""
if request.method == 'GET':
allObjects = Trap.objects.all()
serializer = TrapSerializer(allObjects, many=True)
return JsonResponse(serializer.data, safe=False)
elif request.method == 'POST':
data = JSONParser().parse(request)
serializer = TrapSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def trap_specific(request, pk):
"""
(GET) Obtain the detail of one specific trap asdadas
"""
data = JSONParser().parse(request)
serializer = CameraSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def movement_detected(request):
"""
(POST) Register movement detected in a trap
"""
try:
body_unicode = request.body.decode('utf-8')
body = json.loads(body_unicode)
print(request)
# trap = Trap.objects.get(pk=trap_pk)
except Trap.DoesNotExist:
return HttpResponse(status=404)
# trap = Trap()
# trap.last_detected_movement_date = date()
# Trap.objects.update(trap)
return JsonResponse({'request': body}, status=201)
data = JSONParser().parse(request)
serializer = TrapSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def camera_list(request):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'GET':
cameras = Camera.objects.all()
serializer = CameraSerializer(cameras, many=True)
return JsonResponse(serializer.data, safe=False)
elif request.method == 'POST':
data = JSONParser().parse(request)
serializer = CameraSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def take_photo(request, pk):
try:
camera = Camera.objects.get(pk=pk)
except Camera.DoesNotExist:
return HttpResponse(status=404)
if request.method == 'POST':
base_folder = os.path.dirname(__file__)
image_path = os.path.join(base_folder, 'test.jpg')
print('IMAGE PATH', image_path)
file = open(image_path, 'rb')
file_name = 'TRAP' + str(pk) + '-' + str(datetime.now().strftime("%Y%m%d-%H%M%S")) + '.jpg'
#serializer = MediaSerializer(data={"trap": 1, "date": "2021-06-25T01:55:19"})
#if serializer.is_valid():
# serializer.save()
print(file.__dict__, file_name)
vigilancia.services.camera_service.CameraService().upload_photo(file, file_name)
return JsonResponse({"file_name": file_name})
@csrf_exempt
def camera_detail(request, pk):
"""
Retrieve, update or delete a code snippet.
"""
try:
camera = Camera.objects.get(pk=pk)
except Camera.DoesNotExist:
return HttpResponse(status=404)
if request.method == 'GET':
serializer = CameraSerializer(camera)
return JsonResponse(serializer.data)
elif request.method == 'PUT':
data = JSONParser().parse(request)
serializer = CameraSerializer(camera, data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data)
return JsonResponse(serializer.errors, status=400)
elif request.method == 'DELETE':
camera.delete()
return HttpResponse(status=204)
@csrf_exempt
def upload_photo(request):
try:
file_uploaded = request.FILES.get('file_uploaded')
content_type = file_uploaded.content_type
response = "POST API and you have uploaded a {} file".format(content_type) + " called {}".format(file_uploaded)
response = "dir {}, dict {}, content_type {}".format(dir(file_uploaded), file_uploaded.__dict__, file_uploaded.content_type)
session = FTP(host='maonamata.com.br', user='<EMAIL>', passwd='<PASSWORD>')
# response = session.pwd()
session.storbinary('STOR ' + file_uploaded._name, file_uploaded.file)
#session.cwd('./../../public_html/pipe1/trapassets/trap1test/')
session.quit()
return HttpResponse(response)
except Camera.DoesNotExist:
return HttpResponse(status=404)
@csrf_exempt
def send_email(request):
vigilancia.services.send_email.send_gmail_test(['<EMAIL>'], 'texto test')
return HttpResponse('OK')
class UploadViewSet(ViewSet):
serializer_class = UploadSerializer
def list(self, request):
return Response("GET API")
def create(self, request):
file_uploaded = request.FILES.get('file_uploaded')
content_type = file_uploaded.content_type
response = "POST API and you have uploaded a {} file".format(content_type)
return Response(response)
#def index(request):
# list = Camera.objects.order_by('-id')[:5]
# context = {'list': list}
# return render(request, 'cameras/index.html', context)
#def detail(request, camera_id):
# camera = get_object_or_404(Camera, pk=camera_id)
# return render(request, 'cameras/detail.html', {'camera': camera})
|
[
"json.loads",
"django.http.HttpResponse",
"os.path.dirname",
"django.http.JsonResponse",
"datetime.datetime.now",
"vigilancia.order_screenshot.Orders",
"rest_framework.response.Response",
"ftplib.FTP",
"rest_framework.parsers.JSONParser",
"os.path.join",
"logging.getLogger"
] |
[((952, 979), 'logging.getLogger', 'logging.getLogger', (['"""django"""'], {}), "('django')\n", (969, 979), False, 'import logging\n'), ((1043, 1086), 'django.http.JsonResponse', 'JsonResponse', (["{message: 'test'}"], {'status': '(201)'}), "({message: 'test'}, status=201)\n", (1055, 1086), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1963, 1971), 'vigilancia.order_screenshot.Orders', 'Orders', ([], {}), '()\n', (1969, 1971), False, 'from vigilancia.order_screenshot import Orders\n'), ((2093, 2127), 'django.http.JsonResponse', 'JsonResponse', (['response'], {'safe': '(False)'}), '(response, safe=False)\n', (2105, 2127), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3062, 3105), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (3074, 3105), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3567, 3610), 'django.http.JsonResponse', 'JsonResponse', (["{'request': body}"], {'status': '(201)'}), "({'request': body}, status=201)\n", (3579, 3610), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3818, 3861), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (3830, 3861), False, 'from django.http import HttpResponse, JsonResponse\n'), ((7013, 7031), 'django.http.HttpResponse', 'HttpResponse', (['"""OK"""'], {}), "('OK')\n", (7025, 7031), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1749, 1792), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (1761, 1792), False, 'from django.http import HttpResponse, JsonResponse\n'), ((2383, 2424), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'safe': '(False)'}), '(serializer.data, safe=False)\n', (2395, 2424), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3009, 3050), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (3021, 3050), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3292, 3316), 'json.loads', 'json.loads', (['body_unicode'], {}), '(body_unicode)\n', (3302, 3316), False, 'import json\n'), ((3765, 3806), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (3777, 3806), False, 'from django.http import HttpResponse, JsonResponse\n'), ((4115, 4156), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'safe': '(False)'}), '(serializer.data, safe=False)\n', (4127, 4156), False, 'from django.http import HttpResponse, JsonResponse\n'), ((4696, 4721), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4711, 4721), False, 'import os\n'), ((4743, 4780), 'os.path.join', 'os.path.join', (['base_folder', '"""test.jpg"""'], {}), "(base_folder, 'test.jpg')\n", (4755, 4780), False, 'import os\n'), ((5256, 5294), 'django.http.JsonResponse', 'JsonResponse', (["{'file_name': file_name}"], {}), "({'file_name': file_name})\n", (5268, 5294), False, 'from django.http import HttpResponse, JsonResponse\n'), ((5622, 5651), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {}), '(serializer.data)\n', (5634, 5651), False, 'from django.http import HttpResponse, JsonResponse\n'), ((6500, 6565), 'ftplib.FTP', 'FTP', ([], {'host': '"""maonamata.com.br"""', 'user': '"""<EMAIL>"""', 'passwd': '"""<PASSWORD>"""'}), "(host='maonamata.com.br', user='<EMAIL>', passwd='<PASSWORD>')\n", (6503, 6565), False, 'from ftplib import FTP\n'), ((6790, 6812), 'django.http.HttpResponse', 'HttpResponse', (['response'], {}), '(response)\n', (6802, 6812), False, 'from django.http import HttpResponse, JsonResponse\n'), ((7148, 7167), 'rest_framework.response.Response', 'Response', (['"""GET API"""'], {}), "('GET API')\n", (7156, 7167), False, 'from rest_framework.response import Response\n'), ((7407, 7425), 'rest_framework.response.Response', 'Response', (['response'], {}), '(response)\n', (7415, 7425), False, 'from rest_framework.response import Response\n'), ((1692, 1733), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (1704, 1733), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1845, 1857), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (1855, 1857), False, 'from rest_framework.parsers import JSONParser\n'), ((2691, 2734), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (2703, 2734), False, 'from django.http import HttpResponse, JsonResponse\n'), ((2865, 2877), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (2875, 2877), False, 'from rest_framework.parsers import JSONParser\n'), ((3431, 3455), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (3443, 3455), False, 'from django.http import HttpResponse, JsonResponse\n'), ((3623, 3635), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (3633, 3635), False, 'from rest_framework.parsers import JSONParser\n'), ((4425, 4468), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (4437, 4468), False, 'from django.http import HttpResponse, JsonResponse\n'), ((4611, 4635), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (4623, 4635), False, 'from django.http import HttpResponse, JsonResponse\n'), ((5503, 5527), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (5515, 5527), False, 'from django.http import HttpResponse, JsonResponse\n'), ((5915, 5958), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (5927, 5958), False, 'from django.http import HttpResponse, JsonResponse\n'), ((6860, 6884), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (6872, 6884), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1176, 1188), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (1186, 1188), False, 'from rest_framework.parsers import JSONParser\n'), ((2056, 2064), 'vigilancia.order_screenshot.Orders', 'Orders', ([], {}), '()\n', (2062, 2064), False, 'from vigilancia.order_screenshot import Orders\n'), ((2634, 2675), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (2646, 2675), False, 'from django.http import HttpResponse, JsonResponse\n'), ((4368, 4409), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (4380, 4409), False, 'from django.http import HttpResponse, JsonResponse\n'), ((5870, 5899), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {}), '(serializer.data)\n', (5882, 5899), False, 'from django.http import HttpResponse, JsonResponse\n'), ((6036, 6060), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (6048, 6060), False, 'from django.http import HttpResponse, JsonResponse\n'), ((2476, 2488), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (2486, 2488), False, 'from rest_framework.parsers import JSONParser\n'), ((4208, 4220), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (4218, 4220), False, 'from rest_framework.parsers import JSONParser\n'), ((5702, 5714), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (5712, 5714), False, 'from rest_framework.parsers import JSONParser\n'), ((4908, 4922), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4920, 4922), False, 'from datetime import datetime\n')]
|
'''
@author: <NAME>
Created on May 12, 2011
'''
from django.conf.urls.defaults import patterns
import handlers
urlpatterns = patterns(
'',
(r'^notification/$',
handlers.notification_resource,
{},
'notification'),
)
|
[
"django.conf.urls.defaults.patterns"
] |
[((127, 216), 'django.conf.urls.defaults.patterns', 'patterns', (['""""""', "('^notification/$', handlers.notification_resource, {}, 'notification')"], {}), "('', ('^notification/$', handlers.notification_resource, {},\n 'notification'))\n", (135, 216), False, 'from django.conf.urls.defaults import patterns\n')]
|
"""
A basic python script that demonstrates how to use the FST8 reader, writer, and wrapper in a purely
python setting. These functions are constructed to provide a simple interface for controlling FAST
programmatically with minimal additional dependencies.
"""
# Hacky way of doing relative imports
from __future__ import print_function
import os, sys, time
import multiprocessing as mp
# sys.path.insert(0, os.path.abspath(".."))
from ROSCO_toolbox.ofTools.fast_io.FAST_reader import InputReader_Common, InputReader_OpenFAST, InputReader_FAST7
from ROSCO_toolbox.ofTools.fast_io.FAST_writer import InputWriter_Common, InputWriter_OpenFAST, InputWriter_FAST7
from ROSCO_toolbox.ofTools.fast_io.FAST_wrapper import FastWrapper
from ROSCO_toolbox.ofTools.fast_io.FAST_post import FAST_IO_timeseries
import numpy as np
class runFAST_pywrapper(object):
def __init__(self, **kwargs):
self.FAST_ver = 'OPENFAST' #(FAST7, FAST8, OPENFAST)
self.FAST_exe = None
self.FAST_InputFile = None
self.FAST_directory = None
self.FAST_runDirectory = None
self.FAST_namingOut = None
self.read_yaml = False
self.write_yaml = False
self.fst_vt = {}
self.case = {} # dictionary of variable values to change
self.channels = {} # dictionary of output channels to change
self.debug_level = 0
self.overwrite_outfiles = True # True: existing output files will be overwritten, False: if output file with the same name already exists, OpenFAST WILL NOT RUN; This is primarily included for code debugging with OpenFAST in the loop or for specific Optimization Workflows where OpenFAST is to be run periodically instead of for every objective function anaylsis
# Optional population class attributes from key word arguments
for (k, w) in kwargs.items():
try:
setattr(self, k, w)
except:
pass
super(runFAST_pywrapper, self).__init__()
def execute(self):
# FAST version specific initialization
if self.FAST_ver.lower() == 'fast7':
reader = InputReader_FAST7(FAST_ver=self.FAST_ver)
writer = InputWriter_FAST7(FAST_ver=self.FAST_ver)
elif self.FAST_ver.lower() in ['fast8','openfast']:
reader = InputReader_OpenFAST(FAST_ver=self.FAST_ver)
writer = InputWriter_OpenFAST(FAST_ver=self.FAST_ver)
wrapper = FastWrapper(FAST_ver=self.FAST_ver, debug_level=self.debug_level)
# Read input model, FAST files or Yaml
if self.fst_vt == {}:
if self.read_yaml:
reader.FAST_yamlfile = self.FAST_yamlfile_in
reader.read_yaml()
else:
reader.FAST_InputFile = self.FAST_InputFile
reader.FAST_directory = self.FAST_directory
reader.execute()
# Initialize writer variables with input model
writer.fst_vt = reader.fst_vt
else:
writer.fst_vt = self.fst_vt
writer.FAST_runDirectory = self.FAST_runDirectory
writer.FAST_namingOut = self.FAST_namingOut
# Make any case specific variable changes
if self.case:
writer.update(fst_update=self.case)
# Modify any specified output channels
if self.channels:
writer.update_outlist(self.channels)
# Write out FAST model
writer.execute()
if self.write_yaml:
writer.FAST_yamlfile = self.FAST_yamlfile_out
writer.write_yaml()
# Run FAST
wrapper.FAST_exe = self.FAST_exe
wrapper.FAST_InputFile = os.path.split(writer.FAST_InputFileOut)[1]
wrapper.FAST_directory = os.path.split(writer.FAST_InputFileOut)[0]
FAST_Output = os.path.join(wrapper.FAST_directory, wrapper.FAST_InputFile[:-3]+'outb')
FAST_Output_txt = os.path.join(wrapper.FAST_directory, wrapper.FAST_InputFile[:-3]+'out')
#check if OpenFAST is set not to overwrite existing output files, TODO: move this further up in the workflow for minor computation savings
if self.overwrite_outfiles or (not self.overwrite_outfiles and not (os.path.exists(FAST_Output) or os.path.exists(FAST_Output_txt))):
wrapper.execute()
else:
if self.debug_level>0:
print('OpenFAST not execute: Output file "%s" already exists. To overwrite this output file, set "overwrite_outfiles = True".'%FAST_Output)
return FAST_Output
class runFAST_pywrapper_batch(object):
def __init__(self, **kwargs):
self.FAST_ver = 'OpenFAST'
run_dir = os.path.dirname( os.path.dirname( os.path.dirname( os.path.realpath(__file__) ) ) ) + os.sep
self.FAST_exe = os.path.join(run_dir, 'local/bin/openfast') # Path to executable
self.FAST_InputFile = None
self.FAST_directory = None
self.FAST_runDirectory = None
self.debug_level = 0
self.read_yaml = False
self.FAST_yamlfile_in = ''
self.fst_vt = {}
self.write_yaml = False
self.FAST_yamlfile_out = ''
self.case_list = []
self.case_name_list = []
self.channels = {}
self.overwrite_outfiles = True
self.post = None
# Optional population of class attributes from key word arguments
for (k, w) in kwargs.items():
try:
setattr(self, k, w)
except:
pass
super(runFAST_pywrapper_batch, self).__init__()
def run_serial(self):
# Run batch serially
if not os.path.exists(self.FAST_runDirectory):
os.makedirs(self.FAST_runDirectory)
out = [None]*len(self.case_list)
for i, (case, case_name) in enumerate(zip(self.case_list, self.case_name_list)):
out[i] = eval(case, case_name, self.FAST_ver, self.FAST_exe, self.FAST_runDirectory, self.FAST_InputFile, self.FAST_directory, self.read_yaml, self.FAST_yamlfile_in, self.fst_vt, self.write_yaml, self.FAST_yamlfile_out, self.channels, self.debug_level, self.overwrite_outfiles, self.post)
return out
def run_multi(self, cores=None):
# Run cases in parallel, threaded with multiprocessing module
if not os.path.exists(self.FAST_runDirectory):
os.makedirs(self.FAST_runDirectory)
if not cores:
cores = mp.cpu_count()
pool = mp.Pool(cores)
case_data_all = []
for i in range(len(self.case_list)):
case_data = []
case_data.append(self.case_list[i])
case_data.append(self.case_name_list[i])
case_data.append(self.FAST_ver)
case_data.append(self.FAST_exe)
case_data.append(self.FAST_runDirectory)
case_data.append(self.FAST_InputFile)
case_data.append(self.FAST_directory)
case_data.append(self.read_yaml)
case_data.append(self.FAST_yamlfile_in)
case_data.append(self.fst_vt)
case_data.append(self.write_yaml)
case_data.append(self.FAST_yamlfile_out)
case_data.append(self.channels)
case_data.append(self.debug_level)
case_data.append(self.overwrite_outfiles)
case_data.append(self.post)
case_data_all.append(case_data)
output = pool.map(eval_multi, case_data_all)
pool.close()
pool.join()
return output
def run_mpi(self, mpi_comm_map_down):
# Run in parallel with mpi
from mpi4py import MPI
# mpi comm management
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
sub_ranks = mpi_comm_map_down[rank]
size = len(sub_ranks)
N_cases = len(self.case_list)
N_loops = int(np.ceil(float(N_cases)/float(size)))
# file management
if not os.path.exists(self.FAST_runDirectory) and rank == 0:
os.makedirs(self.FAST_runDirectory)
case_data_all = []
for i in range(N_cases):
case_data = []
case_data.append(self.case_list[i])
case_data.append(self.case_name_list[i])
case_data.append(self.FAST_ver)
case_data.append(self.FAST_exe)
case_data.append(self.FAST_runDirectory)
case_data.append(self.FAST_InputFile)
case_data.append(self.FAST_directory)
case_data.append(self.read_yaml)
case_data.append(self.FAST_yamlfile_in)
case_data.append(self.fst_vt)
case_data.append(self.write_yaml)
case_data.append(self.FAST_yamlfile_out)
case_data.append(self.channels)
case_data.append(self.debug_level)
case_data.append(self.overwrite_outfiles)
case_data.append(self.post)
case_data_all.append(case_data)
output = []
for i in range(N_loops):
idx_s = i*size
idx_e = min((i+1)*size, N_cases)
for j, case_data in enumerate(case_data_all[idx_s:idx_e]):
data = [eval_multi, case_data]
rank_j = sub_ranks[j]
comm.send(data, dest=rank_j, tag=0)
# for rank_j in sub_ranks:
for j, case_data in enumerate(case_data_all[idx_s:idx_e]):
rank_j = sub_ranks[j]
data_out = comm.recv(source=rank_j, tag=1)
output.append(data_out)
return output
# def run_mpi(self, comm=None):
# # Run in parallel with mpi
# from mpi4py import MPI
# # mpi comm management
# if not comm:
# comm = MPI.COMM_WORLD
# size = comm.Get_size()
# rank = comm.Get_rank()
# N_cases = len(self.case_list)
# N_loops = int(np.ceil(float(N_cases)/float(size)))
# # file management
# if not os.path.exists(self.FAST_runDirectory) and rank == 0:
# os.makedirs(self.FAST_runDirectory)
# if rank == 0:
# case_data_all = []
# for i in range(N_cases):
# case_data = []
# case_data.append(self.case_list[i])
# case_data.append(self.case_name_list[i])
# case_data.append(self.FAST_ver)
# case_data.append(self.FAST_exe)
# case_data.append(self.FAST_runDirectory)
# case_data.append(self.FAST_InputFile)
# case_data.append(self.FAST_directory)
# case_data.append(self.read_yaml)
# case_data.append(self.FAST_yamlfile_in)
# case_data.append(self.fst_vt)
# case_data.append(self.write_yaml)
# case_data.append(self.FAST_yamlfile_out)
# case_data.append(self.channels)
# case_data.append(self.debug_level)
# case_data.append(self.post)
# case_data_all.append(case_data)
# else:
# case_data_all = []
# output = []
# for i in range(N_loops):
# # if # of cases left to run is less than comm size, split comm
# n_resid = N_cases - i*size
# if n_resid < size:
# split_comm = True
# color = np.zeros(size)
# for i in range(n_resid):
# color[i] = 1
# color = [int(j) for j in color]
# comm_i = MPI.COMM_WORLD.Split(color, 1)
# else:
# split_comm = False
# comm_i = comm
# # position in case list
# idx_s = i*size
# idx_e = min((i+1)*size, N_cases)
# # scatter out cases
# if split_comm:
# if color[rank] == 1:
# case_data_i = comm_i.scatter(case_data_all[idx_s:idx_e], root=0)
# else:
# case_data_i = comm_i.scatter(case_data_all[idx_s:idx_e], root=0)
# # eval
# out = eval_multi(case_data_i)
# # gather results
# if split_comm:
# if color[rank] == 1:
# output_i = comm_i.gather(out, root=0)
# else:
# output_i = comm_i.gather(out, root=0)
# if rank == 0:
# output.extend(output_i)
# return output
def eval(case, case_name, FAST_ver, FAST_exe, FAST_runDirectory, FAST_InputFile, FAST_directory, read_yaml, FAST_yamlfile_in, fst_vt, write_yaml, FAST_yamlfile_out, channels, debug_level, overwrite_outfiles, post):
# Batch FAST pyWrapper call, as a function outside the runFAST_pywrapper_batch class for pickle-ablility
fast = runFAST_pywrapper(FAST_ver=FAST_ver)
fast.FAST_exe = FAST_exe
fast.FAST_InputFile = FAST_InputFile
fast.FAST_directory = FAST_directory
fast.FAST_runDirectory = FAST_runDirectory
fast.read_yaml = read_yaml
fast.FAST_yamlfile_in = FAST_yamlfile_in
fast.fst_vt = fst_vt
fast.write_yaml = write_yaml
fast.FAST_yamlfile_out = FAST_yamlfile_out
fast.FAST_namingOut = case_name
fast.case = case
fast.channels = channels
fast.debug_level = debug_level
fast.overwrite_outfiles = overwrite_outfiles
FAST_Output = fast.execute()
# Post process
if post:
out = post(FAST_Output)
else:
out = []
return out
def eval_multi(data):
# helper function for running with multiprocessing.Pool.map
# converts list of arguement values to arguments
return eval(data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], data[12], data[13], data[14], data[15])
def example_runFAST_pywrapper_batch():
"""
Example of running a batch of cases, in serial or in parallel
"""
fastBatch = runFAST_pywrapper_batch(FAST_ver='OpenFAST')
# fastBatch.FAST_exe = 'C:/Users/egaertne/WT_Codes/openfast/build/glue-codes/fast/openfast.exe' # Path to executable
# fastBatch.FAST_InputFile = '5MW_Land_DLL_WTurb.fst' # FAST input file (ext=.fst)
# fastBatch.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/openfast/glue-codes/fast/5MW_Land_DLL_WTurb' # Path to fst directory files
# fastBatch.FAST_runDirectory = 'temp/OpenFAST'
# fastBatch.debug_level = 2
fastBatch.FAST_exe = '/projects/windse/importance_sampling/WT_Codes/openfast/build/glue-codes/openfast/openfast' # Path to executable
fastBatch.FAST_InputFile = '5MW_Land_DLL_WTurb.fst' # FAST input file (ext=.fst)
fastBatch.FAST_directory = "/projects/windse/importance_sampling/WISDEM/xloads_tc/templates/openfast/5MW_Land_DLL_WTurb-Shutdown" # Path to fst directory files
fastBatch.FAST_runDirectory = 'temp/OpenFAST'
fastBatch.debug_level = 2
fastBatch.post = FAST_IO_timeseries
## Define case list explicitly
# case_list = [{}, {}]
# case_list[0]['Fst', 'TMax'] = 4.
# case_list[1]['Fst', 'TMax'] = 5.
# case_name_list = ['test01', 'test02']
## Generate case list using General Case Generator
## Specify several variables that change independently or collectly
case_inputs = {}
case_inputs[("Fst","TMax")] = {'vals':[5.], 'group':0}
case_inputs[("InflowWind","WindType")] = {'vals':[1], 'group':0}
case_inputs[("Fst","OutFileFmt")] = {'vals':[2], 'group':0}
case_inputs[("InflowWind","HWindSpeed")] = {'vals':[8., 9., 10., 11., 12.], 'group':1}
case_inputs[("ElastoDyn","RotSpeed")] = {'vals':[9.156, 10.296, 11.431, 11.89, 12.1], 'group':1}
case_inputs[("ElastoDyn","BlPitch1")] = {'vals':[0., 0., 0., 0., 3.823], 'group':1}
case_inputs[("ElastoDyn","BlPitch2")] = case_inputs[("ElastoDyn","BlPitch1")]
case_inputs[("ElastoDyn","BlPitch3")] = case_inputs[("ElastoDyn","BlPitch1")]
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True','False'], 'group':2}
from CaseGen_General import CaseGen_General
case_list, case_name_list = CaseGen_General(case_inputs, dir_matrix=fastBatch.FAST_runDirectory, namebase='testing')
fastBatch.case_list = case_list
fastBatch.case_name_list = case_name_list
# fastBatch.run_serial()
# fastBatch.run_multi(2)
fastBatch.run_mpi()
def example_runFAST_CaseGenIEC():
from CaseGen_IEC import CaseGen_IEC
iec = CaseGen_IEC()
# Turbine Data
iec.init_cond = {} # can leave as {} if data not available
iec.init_cond[("ElastoDyn","RotSpeed")] = {'U':[3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22., 23., 24., 25]}
iec.init_cond[("ElastoDyn","RotSpeed")]['val'] = [6.972, 7.183, 7.506, 7.942, 8.469, 9.156, 10.296, 11.431, 11.89, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1]
iec.init_cond[("ElastoDyn","BlPitch1")] = {'U':[3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22., 23., 24., 25]}
iec.init_cond[("ElastoDyn","BlPitch1")]['val'] = [0., 0., 0., 0., 0., 0., 0., 0., 0., 3.823, 6.602, 8.668, 10.450, 12.055, 13.536, 14.920, 16.226, 17.473, 18.699, 19.941, 21.177, 22.347, 23.469]
iec.init_cond[("ElastoDyn","BlPitch2")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.init_cond[("ElastoDyn","BlPitch3")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.Turbine_Class = 'I' # I, II, III, IV
iec.Turbulence_Class = 'A'
iec.D = 126.
iec.z_hub = 90.
# DLC inputs
iec.dlc_inputs = {}
iec.dlc_inputs['DLC'] = [1.1, 1.5]
iec.dlc_inputs['U'] = [[8, 9, 10], [12]]
iec.dlc_inputs['Seeds'] = [[5, 6, 7], []]
iec.dlc_inputs['Yaw'] = [[], []]
iec.transient_dir_change = 'both' # '+','-','both': sign for transient events in EDC, EWS
iec.transient_shear_orientation = 'both' # 'v','h','both': vertical or horizontal shear for EWS
# Naming, file management, etc
iec.wind_dir = 'temp/wind'
iec.case_name_base = 'testing'
iec.Turbsim_exe = 'C:/Users/egaertne/WT_Codes/Turbsim_v2.00.07/bin/TurbSim_x64.exe'
iec.debug_level = 2
iec.parallel_windfile_gen = True
iec.cores = 4
iec.run_dir = 'temp/OpenFAST'
# Run case generator / wind file writing
case_inputs = {}
case_inputs[('Fst','OutFileFmt')] = {'vals':[1], 'group':0}
case_list, case_name_list, dlc_list = iec.execute(case_inputs=case_inputs)
# Run FAST cases
fastBatch = runFAST_pywrapper_batch(FAST_ver='OpenFAST')
fastBatch.FAST_exe = 'C:/Users/egaertne/WT_Codes/openfast/build/glue-codes/fast/openfast.exe' # Path to executable
fastBatch.FAST_InputFile = '5MW_Land_DLL_WTurb.fst' # FAST input file (ext=.fst)
fastBatch.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/openfast/glue-codes/fast/5MW_Land_DLL_WTurb' # Path to fst directory files
fastBatch.FAST_runDirectory = iec.run_dir
fastBatch.case_list = case_list
fastBatch.case_name_list = case_name_list
fastBatch.debug_level = 2
# fastBatch.run_serial()
fastBatch.run_multi(4)
def example_runFAST_pywrapper():
"""
Example of reading, writing, and running FAST 7, 8 and OpenFAST.
"""
FAST_ver = 'OpenFAST'
fast = runFAST_pywrapper(FAST_ver=FAST_ver, debug_level=2)
if FAST_ver.lower() == 'fast7':
fast.FAST_exe = 'C:/Users/egaertne/WT_Codes/FAST_v7.02.00d-bjj/FAST.exe' # Path to executable
fast.FAST_InputFile = 'Test12.fst' # FAST input file (ext=.fst)
fast.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/FAST_v7.02.00d-bjj/CertTest/' # Path to fst directory files
fast.FAST_runDirectory = 'temp/FAST7'
fast.FAST_namingOut = 'test'
elif FAST_ver.lower() == 'fast8':
fast.FAST_exe = 'C:/Users/egaertne/WT_Codes/FAST_v8.16.00a-bjj/bin/FAST_Win32.exe' # Path to executable
fast.FAST_InputFile = 'NREL5MW_onshore.fst' # FAST input file (ext=.fst)
fast.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/FAST_v8.16.00a-bjj/ref/5mw_onshore/' # Path to fst directory files
fast.FAST_runDirectory = 'temp/FAST8'
fast.FAST_namingOut = 'test'
# elif FAST_ver.lower() == 'openfast':
# fast.FAST_exe = 'C:/Users/egaertne/WT_Codes/openfast/build/glue-codes/fast/openfast.exe' # Path to executable
# fast.FAST_InputFile = '5MW_Land_DLL_WTurb.fst' # FAST input file (ext=.fst)
# fast.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/openfast/glue-codes/fast/5MW_Land_DLL_WTurb' # Path to fst directory files
# fast.FAST_runDirectory = 'temp/OpenFAST'
# fast.FAST_namingOut = 'test'
# fast.read_yaml = False
# fast.FAST_yamlfile_in = 'temp/OpenFAST/test.yaml'
# fast.write_yaml = False
# fast.FAST_yamlfile_out = 'temp/OpenFAST/test.yaml'
elif FAST_ver.lower() == 'openfast':
fast.FAST_exe = 'C:/Users/egaertne/WT_Codes/openfast-dev/build/glue-codes/openfast/openfast.exe' # Path to executable
# fast.FAST_InputFile = '5MW_Land_DLL_WTurb.fst' # FAST input file (ext=.fst)
# fast.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/openfast-dev/r-test/glue-codes/openfast/5MW_Land_DLL_WTurb' # Path to fst directory files
fast.FAST_InputFile = '5MW_OC3Spar_DLL_WTurb_WavesIrr.fst' # FAST input file (ext=.fst)
fast.FAST_directory = 'C:/Users/egaertne/WT_Codes/models/openfast-dev/r-test/glue-codes/openfast/5MW_OC3Spar_DLL_WTurb_WavesIrr' # Path to fst directory files
fast.FAST_runDirectory = 'temp/OpenFAST'
fast.FAST_namingOut = 'test_run_spar'
fast.read_yaml = False
fast.FAST_yamlfile_in = 'temp/OpenFAST/test.yaml'
fast.write_yaml = False
fast.FAST_yamlfile_out = 'temp/OpenFAST/test.yaml'
fast.execute()
if __name__=="__main__":
# example_runFAST_pywrapper()
example_runFAST_pywrapper_batch()
# example_runFAST_CaseGenIEC()
|
[
"CaseGen_IEC.CaseGen_IEC",
"ROSCO_toolbox.ofTools.fast_io.FAST_writer.InputWriter_OpenFAST",
"os.makedirs",
"ROSCO_toolbox.ofTools.fast_io.FAST_reader.InputReader_FAST7",
"ROSCO_toolbox.ofTools.fast_io.FAST_wrapper.FastWrapper",
"os.path.realpath",
"os.path.exists",
"multiprocessing.cpu_count",
"CaseGen_General.CaseGen_General",
"multiprocessing.Pool",
"ROSCO_toolbox.ofTools.fast_io.FAST_reader.InputReader_OpenFAST",
"os.path.split",
"os.path.join",
"ROSCO_toolbox.ofTools.fast_io.FAST_writer.InputWriter_FAST7"
] |
[((16453, 16545), 'CaseGen_General.CaseGen_General', 'CaseGen_General', (['case_inputs'], {'dir_matrix': 'fastBatch.FAST_runDirectory', 'namebase': '"""testing"""'}), "(case_inputs, dir_matrix=fastBatch.FAST_runDirectory,\n namebase='testing')\n", (16468, 16545), False, 'from CaseGen_General import CaseGen_General\n'), ((16795, 16808), 'CaseGen_IEC.CaseGen_IEC', 'CaseGen_IEC', ([], {}), '()\n', (16806, 16808), False, 'from CaseGen_IEC import CaseGen_IEC\n'), ((2548, 2613), 'ROSCO_toolbox.ofTools.fast_io.FAST_wrapper.FastWrapper', 'FastWrapper', ([], {'FAST_ver': 'self.FAST_ver', 'debug_level': 'self.debug_level'}), '(FAST_ver=self.FAST_ver, debug_level=self.debug_level)\n', (2559, 2613), False, 'from ROSCO_toolbox.ofTools.fast_io.FAST_wrapper import FastWrapper\n'), ((3920, 3994), 'os.path.join', 'os.path.join', (['wrapper.FAST_directory', "(wrapper.FAST_InputFile[:-3] + 'outb')"], {}), "(wrapper.FAST_directory, wrapper.FAST_InputFile[:-3] + 'outb')\n", (3932, 3994), False, 'import os, sys, time\n'), ((4019, 4092), 'os.path.join', 'os.path.join', (['wrapper.FAST_directory', "(wrapper.FAST_InputFile[:-3] + 'out')"], {}), "(wrapper.FAST_directory, wrapper.FAST_InputFile[:-3] + 'out')\n", (4031, 4092), False, 'import os, sys, time\n'), ((4926, 4969), 'os.path.join', 'os.path.join', (['run_dir', '"""local/bin/openfast"""'], {}), "(run_dir, 'local/bin/openfast')\n", (4938, 4969), False, 'import os, sys, time\n'), ((6700, 6714), 'multiprocessing.Pool', 'mp.Pool', (['cores'], {}), '(cores)\n', (6707, 6714), True, 'import multiprocessing as mp\n'), ((2233, 2274), 'ROSCO_toolbox.ofTools.fast_io.FAST_reader.InputReader_FAST7', 'InputReader_FAST7', ([], {'FAST_ver': 'self.FAST_ver'}), '(FAST_ver=self.FAST_ver)\n', (2250, 2274), False, 'from ROSCO_toolbox.ofTools.fast_io.FAST_reader import InputReader_Common, InputReader_OpenFAST, InputReader_FAST7\n'), ((2296, 2337), 'ROSCO_toolbox.ofTools.fast_io.FAST_writer.InputWriter_FAST7', 'InputWriter_FAST7', ([], {'FAST_ver': 'self.FAST_ver'}), '(FAST_ver=self.FAST_ver)\n', (2313, 2337), False, 'from ROSCO_toolbox.ofTools.fast_io.FAST_writer import InputWriter_Common, InputWriter_OpenFAST, InputWriter_FAST7\n'), ((3774, 3813), 'os.path.split', 'os.path.split', (['writer.FAST_InputFileOut'], {}), '(writer.FAST_InputFileOut)\n', (3787, 3813), False, 'import os, sys, time\n'), ((3850, 3889), 'os.path.split', 'os.path.split', (['writer.FAST_InputFileOut'], {}), '(writer.FAST_InputFileOut)\n', (3863, 3889), False, 'import os, sys, time\n'), ((5875, 5913), 'os.path.exists', 'os.path.exists', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (5889, 5913), False, 'import os, sys, time\n'), ((5927, 5962), 'os.makedirs', 'os.makedirs', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (5938, 5962), False, 'import os, sys, time\n'), ((6539, 6577), 'os.path.exists', 'os.path.exists', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (6553, 6577), False, 'import os, sys, time\n'), ((6591, 6626), 'os.makedirs', 'os.makedirs', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (6602, 6626), False, 'import os, sys, time\n'), ((6670, 6684), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (6682, 6684), True, 'import multiprocessing as mp\n'), ((8232, 8267), 'os.makedirs', 'os.makedirs', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (8243, 8267), False, 'import os, sys, time\n'), ((2419, 2463), 'ROSCO_toolbox.ofTools.fast_io.FAST_reader.InputReader_OpenFAST', 'InputReader_OpenFAST', ([], {'FAST_ver': 'self.FAST_ver'}), '(FAST_ver=self.FAST_ver)\n', (2439, 2463), False, 'from ROSCO_toolbox.ofTools.fast_io.FAST_reader import InputReader_Common, InputReader_OpenFAST, InputReader_FAST7\n'), ((2485, 2529), 'ROSCO_toolbox.ofTools.fast_io.FAST_writer.InputWriter_OpenFAST', 'InputWriter_OpenFAST', ([], {'FAST_ver': 'self.FAST_ver'}), '(FAST_ver=self.FAST_ver)\n', (2505, 2529), False, 'from ROSCO_toolbox.ofTools.fast_io.FAST_writer import InputWriter_Common, InputWriter_OpenFAST, InputWriter_FAST7\n'), ((8166, 8204), 'os.path.exists', 'os.path.exists', (['self.FAST_runDirectory'], {}), '(self.FAST_runDirectory)\n', (8180, 8204), False, 'import os, sys, time\n'), ((4315, 4342), 'os.path.exists', 'os.path.exists', (['FAST_Output'], {}), '(FAST_Output)\n', (4329, 4342), False, 'import os, sys, time\n'), ((4346, 4377), 'os.path.exists', 'os.path.exists', (['FAST_Output_txt'], {}), '(FAST_Output_txt)\n', (4360, 4377), False, 'import os, sys, time\n'), ((4850, 4876), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (4866, 4876), False, 'import os, sys, time\n')]
|
"""
Prior class for use in pisa.core.Param objects
"""
from __future__ import absolute_import, division
from collections import Iterable, OrderedDict
from numbers import Number
from operator import setitem
import numpy as np
from scipy.interpolate import splev, splrep, interp1d
from scipy.optimize import fminbound
import pint
from pisa import ureg
from pisa.utils.comparisons import isbarenumeric, recursiveEquality
from pisa.utils.fileio import from_file
from pisa.utils.log import logging, set_verbosity
__all__ = ['Prior', 'plot_prior', 'get_prior_bounds', 'test_Prior',
'test_Prior_plot']
__author__ = '<NAME>'
__license__ = '''Copyright (c) 2014-2017, The IceCube Collaboration
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
# TODO: uniform prior should take a constant, such that e.g. discrete parameter
# values when run separately will return valid comparisons across the
# discretely-chosen values (with different uniform priors)
# TODO: use units "natively" (not via strings) internal to the object; only
# serializing to json should convert to strings (and deserializing should
# convert from strings to Units objects)
# TODO: add a "to" and/or "ito" method for converting units akin to those
# methods in Pint quantities.
class Prior(object):
"""Prior information for a parameter. Defines the penalty (in
log-likelihood (llh)) for a parameter being at a given value (within the
prior's valid parameter range). Chi-squared penalties can also be returned
(but the *definition* of a prior here is always in terms of llh).
Note that since this is a penalty, the more negative the prior's log
likelihood, the greater the penalty and the less likely the parameter's
value is.
Valid parameters and properties of the object differ based upon what `kind`
of prior is specified.
Parameters
----------
kind='uniform', llh_offset=...
Uniform prior, no preference for any position relative to the valid
range, which is taken to be [-inf, +inf] [x-units].
kind='gaussian', mean=..., stddev=...
Gaussian prior, defining log likelihood penalty for parameter being at
any particular position. Valid range is [-inf, +inf] [x-units].
kind='linterp', param_vals=..., llh_vals=...
Linearly-interpolated prior. Note that "corners" in linear
interpolation may cause difficulties for some minimizers.
kind='spline', knots=..., coeffs=..., deg=...
Smooth spline interpolation.
Properties
----------
kind
max_at
max_at_str
state
valid_range
Additional properties are defined based on `kind`:
kind='uniform':
llh_offset
kind='gaussian':
mean
stddev
kind='linterp':
param_vals
llh_vals
kind='spline':
knots
coeffs
deg
Methods
-------
chi2
llh
Notes
-----
If the parameter the prior is being applied to has units, the prior's
"x"-values specification must have compatible units.
If you implement a new prior, it ***must*** raise an exception if methods
`llh` or `chi2` are called with a parameter value outside the prior's valid
range, so subtle bugs aren't introduced that appear as an issue in e.g. the
minimizer.
Examples
--------
For spline prior: knots, coeffs, and deg can be found by, e.g.,
scipy.interpolate.splrep; evaluation of spline priors is carried out
internally by scipy.interpolate.splev, so an exact match to the output of
the spline prior can be produced as follows:
>>> from scipy.interpolate import splrep, splev
>>> # Generate sample points
>>> param_vals = np.linspace(-10, 10, 100)
>>> llh_vals = param_vals**2
>>> # Define spline interpolant
>>> knots, coeffs, deg = splrep(param_vals, llh_vals)
>>> # Instantiate spline prior
>>> prior = Prior(kind='spline', knots=knots, coeffs=coeffs, deg=deg)
>>> # Generate sample points for interpolation
>>> param_upsamp = np.linspace(-10, 10, 1000)
>>> # Evaluation of spline using splev
>>> llh_upsamp = splev(param_upsamp, tck=(knots, coeffs, deg), ext=2)
>>> # Check that evaluation of spline matches call to prior.llh()
>>> all(prior.llh(param_upsamp) == llh_upsamp)
True
"""
def __init__(self, kind, **kwargs):
self._state_attrs = ['kind', 'max_at', 'units', 'valid_range']
self.units = None
kind = kind.lower() if isinstance(kind, basestring) else kind
self.chi2 = lambda x: -2*self.llh(x)
# Dispatch the correct initialization method
if kind in [None, 'none', 'uniform']:
self.__init_uniform(**kwargs)
elif kind == 'gaussian':
self.__init_gaussian(**kwargs)
elif kind == 'linterp':
self.__init_linterp(**kwargs)
elif kind == 'spline':
self.__init_spline(**kwargs)
elif kind == 'jeffreys':
self.__init_jeffreys(**kwargs)
else:
raise TypeError('Unknown Prior kind `' + str(kind) + '`')
@property
def units_str(self):
if self.units is None:
return ''
return ' ' + format(ureg(self.units).units, '~').strip()
def __str__(self):
return self._str(self)
def __repr__(self):
return '<' + str(self.__class__) + ' ' + self.__str__() + '>'
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return recursiveEquality(self.state, other.state)
def __ne__(self, other):
return not self.__eq__(other)
@property
def state(self):
state = OrderedDict()
for attr in self._state_attrs:
setitem(state, attr, getattr(self, attr))
return state
def __init_uniform(self, llh_offset=0):
self._state_attrs.extend(['llh_offset'])
self.kind = 'uniform'
self.llh_offset = llh_offset
def llh(x):
return 0.*self.__strip(x) + self.llh_offset
self.llh = llh
self.max_at = np.nan
self.max_at_str = 'no maximum'
self.valid_range = (-np.inf * ureg(self.units),
np.inf * ureg(self.units))
self._str = lambda s: 'uniform prior, llh_offset=%s' %self.llh_offset
def __init_jeffreys(self, A, B):
"""Calculate jeffreys prior as defined in Sivia p.125"""
self.kind = 'jeffreys'
if isinstance(A, Number):
A = A * ureg.dimensionless
if isinstance(B, Number):
B = B * ureg.dimensionless
assert A.dimensionality == B.dimensionality
self._state_attrs.extend(['A', 'B'])
if isinstance(A, ureg.Quantity):
self.units = str(A.units)
assert isinstance(B, ureg.Quantity), '%s' %type(B)
B = B.to(self.units)
self.A = A
self.B = B
def llh(x):
x = self.__strip(self.__convert(x))
A = self.__strip(self.A)
B = self.__strip(self.B)
return - np.log(x) + np.log(np.log(B)-np.log(A))
self.llh = llh
self.max_at = self.A
self.max_at_str = self.__stringify(self.max_at)
self.valid_range = (self.A * ureg(self.units),
self.B * ureg(self.units))
self._str = lambda s: "jeffreys' prior, range [%s,%s]"%(self.A, self.B)
def __init_gaussian(self, mean, stddev):
if isinstance(mean, Number):
mean = mean * ureg.dimensionless
if isinstance(stddev, Number):
stddev = stddev * ureg.dimensionless
assert mean.dimensionality == stddev.dimensionality
self._state_attrs.extend(['mean', 'stddev'])
self.kind = 'gaussian'
if isinstance(mean, ureg.Quantity):
self.units = str(mean.units)
assert isinstance(stddev, ureg.Quantity), \
str(type(stddev))
stddev = stddev.to(self.units)
self.mean = mean
self.stddev = stddev
def llh(x):
x = self.__strip(self.__convert(x))
m = self.__strip(self.mean)
s = self.__strip(self.stddev)
return -(x-m)**2 / (2*s**2)
self.llh = llh
self.max_at = self.mean
self.max_at_str = self.__stringify(self.max_at)
self.valid_range = (-np.inf * ureg(self.units),
np.inf * ureg(self.units))
self._str = lambda s: 'gaussian prior: stddev=%s%s, maximum at %s%s' \
%(self.__stringify(self.stddev), self.units_str,
self.__stringify(self.mean), self.units_str)
def __init_linterp(self, param_vals, llh_vals):
if not isinstance(param_vals, ureg.Quantity):
param_vals = param_vals * ureg.dimensionless
self._state_attrs.extend(['param_vals', 'llh_vals'])
self.kind = 'linterp'
if isinstance(param_vals, ureg.Quantity):
self.units = str(param_vals.units)
self.interp = interp1d(param_vals, llh_vals, kind='linear', copy=True,
bounds_error=True, assume_sorted=False)
self.param_vals = param_vals
self.llh_vals = llh_vals
def llh(x):
x = self.__strip(self.__convert(x))
return self.interp(x)
self.llh = llh
self.max_at = self.param_vals[self.llh_vals == np.max(self.llh_vals)]
self.max_at_str = ', '.join([self.__stringify(v) for v in self.max_at])
self.valid_range = (np.min(self.param_vals) * ureg(self.units),
np.max(self.param_vals) * ureg(self.units))
self._str = lambda s: 'linearly-interpolated prior: valid in [%s, %s]%s, maxima at (%s)%s' \
%(self.__stringify(np.min(self.param_vals)),
self.__stringify(np.max(self.param_vals)), self.units_str,
self.max_at_str, self.units_str)
def __init_spline(self, knots, coeffs, deg, units=None):
if not isinstance(knots, ureg.Quantity):
if units is None:
knots = knots * ureg.dimensionless
else:
knots = ureg.Quantity(np.asarray(knots), units)
self._state_attrs.extend(['knots', 'coeffs', 'deg', 'units'])
self.kind = 'spline'
if isinstance(knots, ureg.Quantity):
self.units = str(knots.units)
self.knots = knots
self.coeffs = coeffs
self.deg = deg
def llh(x):
x = self.__strip(self.__convert(x))
return splev(x, tck=(self.__strip(self.knots), coeffs, deg), ext=2)
self.llh = llh
self.max_at = fminbound(
func=self.__attach_units_to_args(self.chi2),
x1=np.min(self.__strip(self.knots)),
x2=np.max(self.__strip(self.knots)),
)
if self.units is not None:
self.max_at = self.max_at * ureg(self.units)
self.max_at_str = self.__stringify(self.max_at)
self.valid_range = (np.min(self.knots) * ureg(self.units),
np.max(self.knots) * ureg(self.units))
self._str = lambda s: 'spline prior: deg=%d, valid in [%s, %s]%s; max at %s%s' \
%(self.deg, self.__stringify(np.min(self.knots)),
self.__stringify(np.max(self.knots)), self.units_str,
self.max_at_str, self.units_str)
def __check_units(self, param_val):
if self.units is None:
if (isinstance(param_val, ureg.Quantity)
and param_val.dimensionality
!= ureg.dimensionless.dimensionality):
raise TypeError('Passed a value with units (%s), but this'
' prior has no units.' %param_val.units)
else:
if not isinstance(param_val, ureg.Quantity):
raise TypeError('Passed a value without units, but this prior'
' has units (%s).' %self.units)
if param_val.dimensionality != ureg(self.units).dimensionality:
raise TypeError('Passed a value with units (%s);'
' incompatible with prior units (%s)'
%(param_val.units, self.units))
def __convert(self, x):
if self.units is None:
if (isinstance(x, ureg.Quantity)
and x.dimensionality != ureg.dimensionless.dimensionality):
raise TypeError('No units on prior, so cannot understand'
' passed value (with units): %s' %x)
return x
if not isinstance(x, ureg.Quantity):
raise TypeError('Units %s must be present on param values (got'
' %s, type %s instead).'
% (self.units, x, type(x)))
return x.to(self.units)
@staticmethod
def __strip(x):
if isinstance(x, ureg.Quantity):
return x.magnitude
return x
def __stringify(self, x):
if self.units is not None:
x = x.to(self.units).magnitude
return format(x, '0.4e')
# TODO: proper function wrapping, including @wraps decorator
def __attach_units_to_args(self, func):
def newfunc(*args):
if self.units is None:
return func(*args)
u = ureg(self.units)
unitized_args = tuple([u*arg for arg in args])
return func(*unitized_args)
return newfunc
def plot_prior(obj, param=None, x_xform=None, ax1=None, ax2=None, **plt_kwargs):
"""Plot prior for param from template settings, params, or prior filename
or dict.
Arguments
---------
obj : str or dict
if str, interpret as path from which to load a dict
if (nested) dict, (innermost) must be dict of prior properties :
either supply `param` to choose which parameter's prior in `obj`
to plot, or prior dict, in which case `param` need not be specified
param
Param name to plot; necessary if obj is either pipeline settings or
params dict
x_xform
Transform to apply to x-values. E.g., to plot against sin^2 theta, use
x_xform = lambda x: np.sin(x)**2
ax1, ax2
Axes onto which to plot LLH and chi-squared, respectively. If none are
provided, new figures & axes will be created.
plt_kwargs
Keyword arguments to pass on to the plot function
Returns
-------
ax1, ax2
The axes onto which plots were drawn (ax1 = LLH, ax2 = chi^2)
"""
import matplotlib as mpl
mpl.use('pdf')
import matplotlib.pyplot as plt
if isinstance(obj, basestring):
obj = from_file(obj)
if param is not None and param in obj:
obj = obj[param]
if 'prior' in obj:
obj = obj['prior']
prior = Prior(**obj)
logging.info('Plotting Prior: %s', prior)
x0 = prior.valid_range[0]
x1 = prior.valid_range[1]
if prior.kind == 'gaussian':
x0 = max(x0, prior.max_at - 5*prior.stddev)
x1 = min(x1, prior.max_at + 5*prior.stddev)
if np.isinf(x0):
x0 = -1
if np.isinf(x1):
x1 = +1
# if prior.units is None, will result in dimensionless quantity
x = ureg.Quantity(np.linspace(x0, x1, 5000), prior.units)
llh = prior.llh(x)
chi2 = prior.chi2(x)
if x_xform is not None:
x = x_xform(x)
if ax1 is None:
f = plt.figure()
ax1 = f.add_subplot(111)
if ax2 is None:
f = plt.figure()
ax2 = f.add_subplot(111)
ax1.plot(x, llh, **plt_kwargs)
ax2.plot(x, chi2, **plt_kwargs)
ax1.set_title(str(prior), fontsize=8, y=1.02)
ax2.set_title(str(prior), fontsize=8, y=1.02)
ax1.set_xlabel(param)
ax2.set_xlabel(param)
ax1.set_ylabel('LLH')
ax2.set_ylabel(r'$\Delta\chi^2$')
return ax1, ax2
def get_prior_bounds(obj, param=None, stddev=1.0):
"""Obtain confidence regions for CL corresponding to given number of
stddevs from parameter prior.
Parameters
----------
obj : string or Mapping
if str, interpret as path from which to load a dict
if dict, can be:
template settings dict; must supply `param` to choose which to plot
params dict; must supply `param` to choose which to plot
prior dict
param : Param
Name of param for which to get bounds;
necessary if obj is either template settings or params
stddev : float or Iterable of floats
number of stddevs
Returns
-------
bounds : OrderedDict
A dictionary mapping the passed `stddev` values to the corresponding
bounds
"""
if isbarenumeric(stddev):
stddev = [stddev]
elif isinstance(stddev, Iterable):
stddev = list(stddev)
bounds = OrderedDict()
for s in stddev:
bounds[s] = []
if isinstance(obj, basestring):
obj = from_file(obj)
if 'params' in obj:
obj = obj['params']
if param is not None and param in obj:
obj = obj[param]
if 'prior' in obj:
obj = obj['prior']
prior = Prior(**obj)
logging.debug('Getting confidence region from prior: %s', prior)
x0 = prior.valid_range[0]
x1 = prior.valid_range[1]
x = ureg.Quantity(np.linspace(x0, x1, 10000), prior.units)
chi2 = prior.chi2(x)
for (i, xval) in enumerate(x[:-1]):
for s in stddev:
chi2_level = s**2
if chi2[i] > chi2_level and chi2[i+1] < chi2_level:
bounds[s].append(xval)
elif chi2[i] < chi2_level and chi2[i+1] > chi2_level:
bounds[s].append(x[i+1])
return bounds
# TODO enumerate all the cases rather than picking just a few.
# pylint: disable=unused-variable
def test_Prior():
"""Unit tests for Prior class"""
uniform = Prior(kind='uniform', llh_offset=1.5)
gaussian = Prior(kind='gaussian', mean=10, stddev=1)
x = np.linspace(-10, 10, 100)
y = x**2
linterp = Prior(kind='linterp', param_vals=x*ureg.meter,
llh_vals=y)
param_vals = np.linspace(-10, 10, 100)
llh_vals = x**2
knots, coeffs, deg = splrep(param_vals, llh_vals)
spline = Prior(kind='spline', knots=knots*ureg.foot, coeffs=coeffs,
deg=deg)
param_upsamp = np.linspace(-10, 10, 1000)*ureg.foot
llh_upsamp = splev(param_upsamp, tck=(knots, coeffs, deg), ext=2)
assert all(spline.llh(param_upsamp) == llh_upsamp)
# Asking for param value outside of range should fail
try:
linterp.llh(-1000*ureg.mile)
except ValueError:
pass
else:
assert False
try:
linterp.chi2(-1000*ureg.km)
except ValueError:
pass
else:
assert False
try:
spline.llh(-1000*ureg.meter)
except ValueError:
pass
else:
assert False
try:
spline.chi2(+1000*ureg.meter)
except ValueError:
pass
else:
assert False
# Asking for param value when units were used should fail
try:
spline.llh(10)
except TypeError:
pass
else:
assert False
# ... or vice versa
try:
gaussian.llh(10*ureg.meter)
except (TypeError, pint.DimensionalityError):
pass
else:
assert False
logging.info('<< PASS : test_Prior >>')
# TODO: FIX ME
def test_Prior_plot(ts_fname, param_name='theta23'):
"""Produce plots roughly like NuFIT's 1D chi-squared projections.
Parameters
----------
ts_fname : string
param_name : string
Returns
-------
ax1, ax2 : Matplotlib.axis
The plot axes are returned for further manipulation
"""
import matplotlib as mpl
mpl.use('pdf')
import matplotlib.pyplot as plt
stddev = [1, 2, 3, 4, 5]
chi2 = [s**2 for s in stddev]
ts = from_file(ts_fname)
f1 = plt.figure(1) #,figsize=(8,14),dpi=60)
f2 = plt.figure(2) #,figsize=(8,14),dpi=60)
f1.clf()
f2.clf()
ax1 = f1.add_subplot(111)
ax2 = f2.add_subplot(111)
# Defaults
x_xform = None
xlabel = param_name
xlim = None
ylim = 0, 15
# Special cases
if param_name == 'theta12':
x_xform = lambda x: np.sin(x)**2
xlabel = r'$\sin^2\theta_{12}$'
xlim = 0.2, 0.42
elif param_name == 'theta23':
x_xform = lambda x: np.sin(x)**2
xlabel = r'$\sin^2\theta_{23}$'
xlim = 0.26, 0.74
elif param_name == 'theta13':
x_xform = lambda x: np.sin(x)**2
xlabel = r'$\sin^2\theta_{13}$'
xlim = 0.012, 0.032
elif param_name == 'deltam21':
x_xform = lambda x: x*1e5
xlabel = r'$\Delta m^2_{21} \; {\rm[10^{-5}\;eV^2]}$'
xlim = 6.5, 8.7
elif param_name == 'deltam31':
x_xform = lambda x: np.abs(x)*1e3
xlabel = r'$|\Delta m^2_{31}| \; {\rm[10^{-3}\;eV^2]}$'
xlim = 2.15, 2.8
elif param_name == 'deltacp':
xlabel = r'$\delta_{\rm CP} \; {\rm [deg]}$'
plot_prior(select_hierarchy(ts['params'], normal_hierarchy=True),
param=param_name,
x_xform=x_xform, ax1=ax1, ax2=ax2,
color='r', label=r'${\rm NH}$')
plot_prior(select_hierarchy(ts['params'], normal_hierarchy=False),
param=param_name,
x_xform=x_xform, ax1=ax1, ax2=ax2,
color='b', linestyle='--', label=r'${\rm IH}$')
ax1.set_ylim([-0.5*y for y in ylim[::-1]])
ax2.set_ylim(ylim)
plt.tight_layout()
for ax in [ax1, ax2]:
ax.legend(loc='best', frameon=False)
ax.set_xlim(xlim)
ax.set_xlabel(xlabel)
ax.grid(which='both', b=True)
ax.set_title('')
for c2 in chi2:
ax2.plot(xlim, [c2, c2], 'k-', lw=1.0, alpha=0.4)
return ax1, ax2
if __name__ == '__main__':
set_verbosity(1)
test_Prior()
|
[
"numpy.abs",
"matplotlib.pyplot.figure",
"numpy.sin",
"pisa.utils.log.logging.info",
"scipy.interpolate.interp1d",
"matplotlib.pyplot.tight_layout",
"pisa.utils.log.logging.debug",
"pisa.utils.fileio.from_file",
"numpy.max",
"numpy.linspace",
"scipy.interpolate.splrep",
"pisa.utils.log.set_verbosity",
"numpy.asarray",
"numpy.isinf",
"pisa.utils.comparisons.isbarenumeric",
"numpy.min",
"matplotlib.use",
"collections.OrderedDict",
"pisa.ureg",
"numpy.log",
"pisa.utils.comparisons.recursiveEquality",
"scipy.interpolate.splev"
] |
[((15175, 15189), 'matplotlib.use', 'mpl.use', (['"""pdf"""'], {}), "('pdf')\n", (15182, 15189), True, 'import matplotlib as mpl\n'), ((15439, 15480), 'pisa.utils.log.logging.info', 'logging.info', (['"""Plotting Prior: %s"""', 'prior'], {}), "('Plotting Prior: %s', prior)\n", (15451, 15480), False, 'from pisa.utils.log import logging, set_verbosity\n'), ((15685, 15697), 'numpy.isinf', 'np.isinf', (['x0'], {}), '(x0)\n', (15693, 15697), True, 'import numpy as np\n'), ((15722, 15734), 'numpy.isinf', 'np.isinf', (['x1'], {}), '(x1)\n', (15730, 15734), True, 'import numpy as np\n'), ((17282, 17303), 'pisa.utils.comparisons.isbarenumeric', 'isbarenumeric', (['stddev'], {}), '(stddev)\n', (17295, 17303), False, 'from pisa.utils.comparisons import isbarenumeric, recursiveEquality\n'), ((17414, 17427), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (17425, 17427), False, 'from collections import Iterable, OrderedDict\n'), ((17740, 17804), 'pisa.utils.log.logging.debug', 'logging.debug', (['"""Getting confidence region from prior: %s"""', 'prior'], {}), "('Getting confidence region from prior: %s', prior)\n", (17753, 17804), False, 'from pisa.utils.log import logging, set_verbosity\n'), ((18548, 18573), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(100)'], {}), '(-10, 10, 100)\n', (18559, 18573), True, 'import numpy as np\n'), ((18697, 18722), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(100)'], {}), '(-10, 10, 100)\n', (18708, 18722), True, 'import numpy as np\n'), ((18768, 18796), 'scipy.interpolate.splrep', 'splrep', (['param_vals', 'llh_vals'], {}), '(param_vals, llh_vals)\n', (18774, 18796), False, 'from scipy.interpolate import splev, splrep, interp1d\n'), ((18970, 19022), 'scipy.interpolate.splev', 'splev', (['param_upsamp'], {'tck': '(knots, coeffs, deg)', 'ext': '(2)'}), '(param_upsamp, tck=(knots, coeffs, deg), ext=2)\n', (18975, 19022), False, 'from scipy.interpolate import splev, splrep, interp1d\n'), ((19922, 19961), 'pisa.utils.log.logging.info', 'logging.info', (['"""<< PASS : test_Prior >>"""'], {}), "('<< PASS : test_Prior >>')\n", (19934, 19961), False, 'from pisa.utils.log import logging, set_verbosity\n'), ((20337, 20351), 'matplotlib.use', 'mpl.use', (['"""pdf"""'], {}), "('pdf')\n", (20344, 20351), True, 'import matplotlib as mpl\n'), ((20461, 20480), 'pisa.utils.fileio.from_file', 'from_file', (['ts_fname'], {}), '(ts_fname)\n', (20470, 20480), False, 'from pisa.utils.fileio import from_file\n'), ((20490, 20503), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (20500, 20503), True, 'import matplotlib.pyplot as plt\n'), ((20538, 20551), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (20548, 20551), True, 'import matplotlib.pyplot as plt\n'), ((22098, 22116), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (22114, 22116), True, 'import matplotlib.pyplot as plt\n'), ((22441, 22457), 'pisa.utils.log.set_verbosity', 'set_verbosity', (['(1)'], {}), '(1)\n', (22454, 22457), False, 'from pisa.utils.log import logging, set_verbosity\n'), ((6028, 6070), 'pisa.utils.comparisons.recursiveEquality', 'recursiveEquality', (['self.state', 'other.state'], {}), '(self.state, other.state)\n', (6045, 6070), False, 'from pisa.utils.comparisons import isbarenumeric, recursiveEquality\n'), ((6191, 6204), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (6202, 6204), False, 'from collections import Iterable, OrderedDict\n'), ((9556, 9656), 'scipy.interpolate.interp1d', 'interp1d', (['param_vals', 'llh_vals'], {'kind': '"""linear"""', 'copy': '(True)', 'bounds_error': '(True)', 'assume_sorted': '(False)'}), "(param_vals, llh_vals, kind='linear', copy=True, bounds_error=True,\n assume_sorted=False)\n", (9564, 9656), False, 'from scipy.interpolate import splev, splrep, interp1d\n'), ((15276, 15290), 'pisa.utils.fileio.from_file', 'from_file', (['obj'], {}), '(obj)\n', (15285, 15290), False, 'from pisa.utils.fileio import from_file\n'), ((15842, 15867), 'numpy.linspace', 'np.linspace', (['x0', 'x1', '(5000)'], {}), '(x0, x1, 5000)\n', (15853, 15867), True, 'import numpy as np\n'), ((16016, 16028), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16026, 16028), True, 'import matplotlib.pyplot as plt\n'), ((16094, 16106), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16104, 16106), True, 'import matplotlib.pyplot as plt\n'), ((17523, 17537), 'pisa.utils.fileio.from_file', 'from_file', (['obj'], {}), '(obj)\n', (17532, 17537), False, 'from pisa.utils.fileio import from_file\n'), ((17887, 17913), 'numpy.linspace', 'np.linspace', (['x0', 'x1', '(10000)'], {}), '(x0, x1, 10000)\n', (17898, 17913), True, 'import numpy as np\n'), ((18916, 18942), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(1000)'], {}), '(-10, 10, 1000)\n', (18927, 18942), True, 'import numpy as np\n'), ((13915, 13931), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (13919, 13931), False, 'from pisa import ureg\n'), ((6685, 6701), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (6689, 6701), False, 'from pisa import ureg\n'), ((6740, 6756), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (6744, 6756), False, 'from pisa import ureg\n'), ((7774, 7790), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (7778, 7790), False, 'from pisa import ureg\n'), ((7829, 7845), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (7833, 7845), False, 'from pisa import ureg\n'), ((8902, 8918), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (8906, 8918), False, 'from pisa import ureg\n'), ((8957, 8973), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (8961, 8973), False, 'from pisa import ureg\n'), ((9934, 9955), 'numpy.max', 'np.max', (['self.llh_vals'], {}), '(self.llh_vals)\n', (9940, 9955), True, 'import numpy as np\n'), ((10065, 10088), 'numpy.min', 'np.min', (['self.param_vals'], {}), '(self.param_vals)\n', (10071, 10088), True, 'import numpy as np\n'), ((10091, 10107), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (10095, 10107), False, 'from pisa import ureg\n'), ((10137, 10160), 'numpy.max', 'np.max', (['self.param_vals'], {}), '(self.param_vals)\n', (10143, 10160), True, 'import numpy as np\n'), ((10163, 10179), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (10167, 10179), False, 'from pisa import ureg\n'), ((11454, 11470), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (11458, 11470), False, 'from pisa import ureg\n'), ((11555, 11573), 'numpy.min', 'np.min', (['self.knots'], {}), '(self.knots)\n', (11561, 11573), True, 'import numpy as np\n'), ((11576, 11592), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (11580, 11592), False, 'from pisa import ureg\n'), ((11622, 11640), 'numpy.max', 'np.max', (['self.knots'], {}), '(self.knots)\n', (11628, 11640), True, 'import numpy as np\n'), ((11643, 11659), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (11647, 11659), False, 'from pisa import ureg\n'), ((20836, 20845), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (20842, 20845), True, 'import numpy as np\n'), ((7589, 7598), 'numpy.log', 'np.log', (['x'], {}), '(x)\n', (7595, 7598), True, 'import numpy as np\n'), ((10719, 10736), 'numpy.asarray', 'np.asarray', (['knots'], {}), '(knots)\n', (10729, 10736), True, 'import numpy as np\n'), ((12577, 12593), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (12581, 12593), False, 'from pisa import ureg\n'), ((20976, 20985), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (20982, 20985), True, 'import numpy as np\n'), ((7608, 7617), 'numpy.log', 'np.log', (['B'], {}), '(B)\n', (7614, 7617), True, 'import numpy as np\n'), ((7618, 7627), 'numpy.log', 'np.log', (['A'], {}), '(A)\n', (7624, 7627), True, 'import numpy as np\n'), ((10317, 10340), 'numpy.min', 'np.min', (['self.param_vals'], {}), '(self.param_vals)\n', (10323, 10340), True, 'import numpy as np\n'), ((10378, 10401), 'numpy.max', 'np.max', (['self.param_vals'], {}), '(self.param_vals)\n', (10384, 10401), True, 'import numpy as np\n'), ((11795, 11813), 'numpy.min', 'np.min', (['self.knots'], {}), '(self.knots)\n', (11801, 11813), True, 'import numpy as np\n'), ((11851, 11869), 'numpy.max', 'np.max', (['self.knots'], {}), '(self.knots)\n', (11857, 11869), True, 'import numpy as np\n'), ((21117, 21126), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (21123, 21126), True, 'import numpy as np\n'), ((5721, 5737), 'pisa.ureg', 'ureg', (['self.units'], {}), '(self.units)\n', (5725, 5737), False, 'from pisa import ureg\n'), ((21416, 21425), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (21422, 21425), True, 'import numpy as np\n')]
|
# -*- coding: UTF-8 -*-
from flask import url_for, g, redirect
from flask_login import logout_user, current_user
from datetime import datetime
from importlib import import_module
from .. import db, login_manager
from ..models import User
from . import auth
@auth.route('/login/<string:authtype>')
def login_authorize(authtype):
oauth = getattr(import_module('.'+authtype, __package__), authtype)
return oauth.authorize(callback=url_for('auth.{}_authorized'.format(authtype), _external=True))
@auth.route('/logout')
def logout():
logout_user()
return redirect('/')
@auth.before_app_request
def before_request():
g.user = current_user
if g.user.is_authenticated:
g.user.last_seen = datetime.utcnow()
db.session.add(g.user)
db.session.commit()
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
|
[
"flask.redirect",
"datetime.datetime.utcnow",
"importlib.import_module",
"flask_login.logout_user"
] |
[((546, 559), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (557, 559), False, 'from flask_login import logout_user, current_user\n'), ((571, 584), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (579, 584), False, 'from flask import url_for, g, redirect\n'), ((351, 393), 'importlib.import_module', 'import_module', (["('.' + authtype)", '__package__'], {}), "('.' + authtype, __package__)\n", (364, 393), False, 'from importlib import import_module\n'), ((719, 736), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (734, 736), False, 'from datetime import datetime\n')]
|
#!/usr/bin env python
import json
import mock
from tests.unit import AWSMockServiceTestCase
from boto.cloudsearch2.domain import Domain
from boto.cloudsearch2.layer1 import CloudSearchConnection
from boto.cloudsearchdomain.layer1 import CloudSearchDomainConnection
class CloudSearchDomainConnectionTest(AWSMockServiceTestCase):
connection_class = CloudSearchDomainConnection
domain_status = """{
"SearchInstanceType": null,
"DomainId": "1234567890/demo",
"DomainName": "demo",
"Deleted": false,
"SearchInstanceCount": 0,
"Created": true,
"SearchService": {
"Endpoint": "search-demo.us-east-1.cloudsearch.amazonaws.com"
},
"RequiresIndexDocuments": false,
"Processing": false,
"DocService": {
"Endpoint": "doc-demo.us-east-1.cloudsearch.amazonaws.com"
},
"ARN": "arn:aws:cs:us-east-1:1234567890:domain/demo",
"SearchPartitionCount": 0
}"""
def create_service_connection(self, **kwargs):
if kwargs.get('host', None) is None:
kwargs['host'] = 'search-demo.us-east-1.cloudsearch.amazonaws.com'
return super(CloudSearchDomainConnectionTest, self).\
create_service_connection(**kwargs)
def test_get_search_service(self):
layer1 = CloudSearchConnection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key',
sign_request=True)
domain = Domain(layer1=layer1, data=json.loads(self.domain_status))
search_service = domain.get_search_service()
self.assertEqual(search_service.sign_request, True)
def test_get_document_service(self):
layer1 = CloudSearchConnection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key',
sign_request=True)
domain = Domain(layer1=layer1, data=json.loads(self.domain_status))
document_service = domain.get_document_service()
self.assertEqual(document_service.sign_request, True)
def test_search_with_auth(self):
layer1 = CloudSearchConnection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key',
sign_request=True)
domain = Domain(layer1=layer1, data=json.loads(self.domain_status))
search_service = domain.get_search_service()
response = {
'rank': '-text_relevance',
'match-expr': "Test",
'hits': {
'found': 30,
'start': 0,
'hit': {
'id': '12341',
'fields': {
'title': 'Document 1',
'rank': 1
}
}
},
'status': {
'rid': 'b7c167f6c2da6d93531b9a7b314ad030b3a74803b4b7797edb905ba5a6a08',
'time-ms': 2,
'cpu-time-ms': 0
}
}
self.set_http_response(status_code=200,
body=json.dumps(response).encode('utf-8'))
search_service.domain_connection = self.service_connection
resp = search_service.search()
headers = self.actual_request.headers
self.assertIsNotNone(headers.get('Authorization'))
def test_upload_documents_with_auth(self):
layer1 = CloudSearchConnection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key',
sign_request=True)
domain = Domain(layer1=layer1, data=json.loads(self.domain_status))
document_service = domain.get_document_service()
response = {
'status': 'success',
'adds': 1,
'deletes': 0,
}
document = {
"id": "1234",
"title": "Title 1",
"category": ["cat_a", "cat_b", "cat_c"]
}
self.set_http_response(status_code=200,
body=json.dumps(response).encode('utf-8'))
document_service.domain_connection = self.service_connection
document_service.add("1234", document)
resp = document_service.commit()
headers = self.actual_request.headers
self.assertIsNotNone(headers.get('Authorization'))
def test_no_host_provided(self):
# A host must be provided or a error is thrown.
with self.assertRaises(ValueError):
CloudSearchDomainConnection(
aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key'
)
|
[
"boto.cloudsearch2.layer1.CloudSearchConnection",
"json.loads",
"boto.cloudsearchdomain.layer1.CloudSearchDomainConnection",
"json.dumps"
] |
[((1331, 1461), 'boto.cloudsearch2.layer1.CloudSearchConnection', 'CloudSearchConnection', ([], {'aws_access_key_id': '"""aws_access_key_id"""', 'aws_secret_access_key': '"""aws_secret_access_key"""', 'sign_request': '(True)'}), "(aws_access_key_id='aws_access_key_id',\n aws_secret_access_key='aws_secret_access_key', sign_request=True)\n", (1352, 1461), False, 'from boto.cloudsearch2.layer1 import CloudSearchConnection\n'), ((1785, 1915), 'boto.cloudsearch2.layer1.CloudSearchConnection', 'CloudSearchConnection', ([], {'aws_access_key_id': '"""aws_access_key_id"""', 'aws_secret_access_key': '"""aws_secret_access_key"""', 'sign_request': '(True)'}), "(aws_access_key_id='aws_access_key_id',\n aws_secret_access_key='aws_secret_access_key', sign_request=True)\n", (1806, 1915), False, 'from boto.cloudsearch2.layer1 import CloudSearchConnection\n'), ((2241, 2371), 'boto.cloudsearch2.layer1.CloudSearchConnection', 'CloudSearchConnection', ([], {'aws_access_key_id': '"""aws_access_key_id"""', 'aws_secret_access_key': '"""aws_secret_access_key"""', 'sign_request': '(True)'}), "(aws_access_key_id='aws_access_key_id',\n aws_secret_access_key='aws_secret_access_key', sign_request=True)\n", (2262, 2371), False, 'from boto.cloudsearch2.layer1 import CloudSearchConnection\n'), ((3578, 3708), 'boto.cloudsearch2.layer1.CloudSearchConnection', 'CloudSearchConnection', ([], {'aws_access_key_id': '"""aws_access_key_id"""', 'aws_secret_access_key': '"""aws_secret_access_key"""', 'sign_request': '(True)'}), "(aws_access_key_id='aws_access_key_id',\n aws_secret_access_key='aws_secret_access_key', sign_request=True)\n", (3599, 3708), False, 'from boto.cloudsearch2.layer1 import CloudSearchConnection\n'), ((4709, 4826), 'boto.cloudsearchdomain.layer1.CloudSearchDomainConnection', 'CloudSearchDomainConnection', ([], {'aws_access_key_id': '"""aws_access_key_id"""', 'aws_secret_access_key': '"""aws_secret_access_key"""'}), "(aws_access_key_id='aws_access_key_id',\n aws_secret_access_key='aws_secret_access_key')\n", (4736, 4826), False, 'from boto.cloudsearchdomain.layer1 import CloudSearchDomainConnection\n'), ((1580, 1610), 'json.loads', 'json.loads', (['self.domain_status'], {}), '(self.domain_status)\n', (1590, 1610), False, 'import json\n'), ((2034, 2064), 'json.loads', 'json.loads', (['self.domain_status'], {}), '(self.domain_status)\n', (2044, 2064), False, 'import json\n'), ((2490, 2520), 'json.loads', 'json.loads', (['self.domain_status'], {}), '(self.domain_status)\n', (2500, 2520), False, 'import json\n'), ((3827, 3857), 'json.loads', 'json.loads', (['self.domain_status'], {}), '(self.domain_status)\n', (3837, 3857), False, 'import json\n'), ((3262, 3282), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (3272, 3282), False, 'import json\n'), ((4257, 4277), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (4267, 4277), False, 'import json\n')]
|
"""
Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved
Licensed under the MIT No Attribution License (MIT-0) (the ‘License’). You may not use this file except in compliance
with the License. A copy of the License is located at
https://opensource.org/licenses/MIT-0
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
Lambda function used as an AWS AppSync datasource to handle push notification message templates operations.
Message template is a feature in Amazon Pinpoint, so all requests are handled using the AWS Pinpoint SDK.
"""
import json
import boto3
from botocore.exceptions import ClientError
def handler(event, context):
"""
Main handler function that get the input messaged passed as parameter along with the operation to be performed.
The operation is passed via AWS AppSync API, then execute the proper operation.
"""
print('request: {}'.format(json.dumps(event, indent = 4)))
pinpoint_client = boto3.client('pinpoint')
message_input = {
'template_name': event['arguments']['template']
}
if 'input' in event['arguments']:
message_input['service'] = event['arguments']['input']['service']
message_input['action'] = event['arguments']['input']['action']
message_input['title'] = event['arguments']['input']['title']
message_input['body'] = event['arguments']['input']['body']
operations = {
'getMessage': get_message,
'createMessage': create_message,
'deleteMessage': delete_message
}
response = operations[event['operation']](pinpoint_client, message_input)
print('response: {}'.format(json.dumps(response, indent = 4)))
return response
def get_message(pinpoint_client, message_input):
"""
Based on a message passed as parameter, it gets a push notification message template
in Pinpoint and creates a message Payload to be returned
"""
try:
response_get_template = pinpoint_client.get_push_template(
TemplateName = message_input['template_name']
)
push_template = response_get_template['PushNotificationTemplateResponse']
if push_template['APNS']:
service = 'APNS'
elif push_template['GCM']:
service = 'GCM'
response = {
'status': 'MESSAGE_OK',
'message': {
'service': service,
'action': push_template[service]['Action'],
'title': push_template[service]['Title'],
'body': push_template[service]['Body']
}
}
except ClientError as ex:
response = create_error_payload(
exception = 'ClientError',
message = f'Unexpected error: {ex}',
endpoint_id = ''
)
return response
def create_message(pinpoint_client, message_input):
"""
Based on a message passed as parameter, it creates a push notification
message template in Pinpoint
"""
try:
template = message_input['template_name']
service = message_input['service']
response_create_template = pinpoint_client.create_push_template(
TemplateName = template,
PushNotificationTemplateRequest = {
service: {
'Action': message_input['action'],
'Title': message_input['title'],
'Body': message_input['body']
}
}
)
response = {
'status': 'MESSAGE_CREATED',
'message': f'Personalized {service} push message created for geofence {template}'
}
except ClientError as ex:
response = create_error_payload(
exception = 'ClientError',
message = f'Unexpected error: {ex}',
endpoint_id = ''
)
return response
def delete_message(pinpoint_client, message_input):
"""
Based on a message passed as parameter, it deletes a push notification
message template in Pinpoint
"""
try:
template = message_input['template_name']
response_delete_template = pinpoint_client.delete_push_template(
TemplateName = template
)
response = {
'status': 'MESSAGE_DELETED',
'message': f'Personalized message deleted for geofence {template}'
}
except ClientError as ex:
response = create_error_payload(
exception = 'ClientError',
message = f'Unexpected error: {ex}',
endpoint_id = ''
)
return response
def create_error_payload(exception, message, endpoint_id):
"""
Formats an error message to be added in case of failure
"""
print(f'{exception}: {message}')
error_payload = {
'status': 'MESSAGE_ERROR',
'message': f'{exception}: {message}'
}
return error_payload
|
[
"boto3.client",
"json.dumps"
] |
[((1809, 1833), 'boto3.client', 'boto3.client', (['"""pinpoint"""'], {}), "('pinpoint')\n", (1821, 1833), False, 'import boto3\n'), ((1754, 1781), 'json.dumps', 'json.dumps', (['event'], {'indent': '(4)'}), '(event, indent=4)\n', (1764, 1781), False, 'import json\n'), ((2491, 2521), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)'}), '(response, indent=4)\n', (2501, 2521), False, 'import json\n')]
|
#
# Copyright (c) 2021 Software AG, Darmstadt, Germany and/or its licensors
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Options"""
import logging
import os
from typing import Any
import functools
import click
from c8ylp.env import loadenv
from c8ylp.rest_client.c8yclient import CumulocityClient
def load_envfile(ctx: click.Context, _param: click.Parameter, value: Any):
"""Load environment variables from a file
Args:
ctx (click.Context): Click context
_param (click.Parameter): Click parameter
value (Any): Parameter value
"""
if not value or ctx.resilient_parsing:
return value
click.echo(f"Loading env-file: {value}")
if os.path.exists(value) and os.path.isfile(value):
loadenv(value)
else:
logging.info("env file does not exist or is not a file: %s", value)
return value
def deactivate_prompts(ctx: click.Context, _param: click.Parameter, value: Any):
"""Deactivate all prompts
Args:
ctx (click.Context): Click Context
_param (click.Parameter): Click Parameter
value (Any): Parameter value
Returns:
Any: Parameter value
"""
if value:
for i_param in ctx.command.params:
if isinstance(i_param, click.Option) and i_param.prompt is not None:
i_param.prompt = None
return value
def validate_token(ctx: click.Context, _param, value) -> Any:
"""Validate Cumulocity token. If it is invalid then it
will be ignored.
Args:
ctx (Any): Click context
_param (Any): Click param
value (Any): Parameter value
Returns:
Any: Parameter value
"""
if not value or ctx.resilient_parsing:
return None
host = ctx.params.get("host")
if not host:
return value
if not value:
return value
if isinstance(value, tuple):
return value
client = CumulocityClient(hostname=host, token=value)
try:
client.validate_credentials()
click.secho("Validating c8y token: ", nl=False)
click.secho("OK", fg="green")
except Exception:
click.secho("Validating c8y token: ", nl=False)
click.secho("EXPIRED/INVALID", fg="red")
logging.info(
"Token is no longer valid for host. The token will be ignored. host=%s",
host,
)
return ""
return value
HOSTNAME = click.option(
"--host",
"host",
is_eager=True,
prompt=False,
envvar=("C8Y_HOST", "C8Y_BASEURL", "C8Y_URL"),
help="Cumulocity Hostname [required] [env var: C8Y_HOST]",
)
HOSTNAME_PROMPT = click.option(
"--host",
"host",
is_eager=False,
prompt=True,
envvar=("C8Y_HOST", "C8Y_BASEURL", "C8Y_URL"),
help="Cumulocity Hostname [required] [env var: C8Y_HOST]",
)
ARG_DEVICE = click.argument(
"device",
nargs=1,
required=True,
)
DEVICE = click.option(
"--device",
"-d",
required=False,
envvar="C8YLP_DEVICE",
show_envvar=True,
help="Device external identity",
)
EXTERNAL_IDENTITY_TYPE = click.option(
"--external-type",
envvar="C8YLP_EXTERNAL_TYPE",
default="c8y_Serial",
show_default=True,
show_envvar=True,
help="external Id Type",
)
REMOTE_ACCESS_TYPE = click.option(
"--config",
"-c",
required=False,
envvar="C8YLP_CONFIG",
default="Passthrough",
show_default=True,
show_envvar=True,
help="name of the C8Y Remote Access Configuration",
)
C8Y_TENANT = click.option(
"--tenant", "-t", envvar="C8Y_TENANT", help="Cumulocity tenant id", show_envvar=True
)
C8Y_USER = click.option(
"--user",
"-u",
envvar=("C8Y_USER", "C8Y_USERNAME"),
show_envvar=True,
help="Cumulocity username",
)
C8Y_TOKEN = click.option(
"--token",
"-t",
callback=validate_token,
envvar="C8Y_TOKEN",
is_eager=True,
show_envvar=True,
help="Cumulocity token",
)
C8Y_PASSWORD = click.option(
"--password",
"-p",
envvar="C8Y_PASSWORD",
prompt=False,
hide_input=True,
show_envvar=True,
help="Cumulocity password",
)
C8Y_TFA_CODE = click.option(
"--tfa-code",
envvar="C8Y_TFA_CODE",
show_envvar=True,
help="TFA Code. Required when the 'TFA enabled' is enabled for a user",
)
PORT = click.option(
"--port",
type=click.IntRange(0, 65535),
default=2222,
envvar="C8YLP_PORT",
show_envvar=True,
show_default=True,
help="TCP Port which should be opened. 0=Random port",
)
PORT_DEFAULT_RANDOM = click.option(
"--port",
type=click.IntRange(0, 65535),
default=0,
envvar="C8YLP_PORT",
show_default=True,
show_envvar=True,
help="TCP Port which should be opened. 0=Random port",
)
PING_INTERVAL = click.option(
"--ping-interval",
type=int,
default=0,
show_default=True,
show_envvar=True,
envvar="C8YLP_PING_INTERVAL",
help="Websocket ping interval in seconds. 0=disabled",
)
TCP_SIZE = click.option(
"--tcp-size",
envvar="C8YLP_TCP_SIZE",
type=click.IntRange(1024, 8096 * 1024),
default=4096,
show_default=True,
show_envvar=True,
help="TCP Package Size",
)
TCP_TIMEOUT = click.option(
"--tcp-timeout",
envvar="C8YLP_TCP_TIMEOUT",
default=0,
show_default=True,
show_envvar=True,
help="Timeout in sec. for inactivity. Can be activated with values > 0",
)
LOGGING_VERBOSE = click.option(
"--verbose",
"-v",
envvar="C8YLP_VERBOSE",
is_flag=True,
default=False,
show_envvar=True,
help="Print Debug Information into the Logs and Console when set",
)
STORE_TOKEN = click.option(
"--store-token",
"store_token",
envvar="C8YLP_STORE_TOKEN",
is_flag=True,
default=True,
show_envvar=True,
help="Store the Cumulocity host, tenant and token to the env-file if a file is being used",
)
DISABLE_PROMPT = click.option(
"--disable-prompts",
"-d",
"disable_prompts",
envvar="C8YLP_DISABLE_PROMPTS",
default=False,
is_eager=True,
is_flag=True,
expose_value=True,
show_envvar=True,
callback=deactivate_prompts,
)
SSL_IGNORE_VERIFY = click.option(
"--ignore-ssl-validate",
envvar="C8YLP_IGNORE_SSL_VALIDATE",
is_flag=True,
default=False,
show_envvar=True,
help="Ignore Validation for SSL Certificates while connecting to Websocket",
)
SERVER_RECONNECT_LIMIT = click.option(
"--reconnects",
envvar="C8YLP_RECONNECTS",
type=click.IntRange(-1, 10),
default=5,
show_default=True,
show_envvar=True,
help="number of reconnects to the Cloud Remote Service. 0 for infinite reconnects",
)
SSH_USER = click.option(
"--ssh-user",
envvar="C8YLP_SSH_USER",
type=str,
required=True,
prompt=True,
show_envvar=True,
help="Start an interactive ssh session with the given user",
)
ARG_SSH_COMMAND = click.argument("command", nargs=1, required=True)
ARG_SCRIPT = click.argument(
"script", type=click.Path(resolve_path=True), nargs=1, required=True
)
ENV_FILE = click.option(
"--env-file",
"env_file",
envvar="C8YLP_ENV_FILE",
is_eager=True,
expose_value=True,
show_envvar=True,
type=click.Path(
exists=True,
),
callback=load_envfile,
help="Environment file to load. Any settings loaded via this file will control other options",
)
ENV_FILE_OPTIONAL_EXISTS = click.option(
"--env-file",
"env_file",
envvar="C8YLP_ENV_FILE",
is_eager=True,
expose_value=True,
show_envvar=True,
required=False,
type=click.Path(
exists=False,
),
callback=load_envfile,
help="Environment file to load. Any settings loaded via this file will control other options",
)
def common_options(f):
"""Common Options"""
options = [
ARG_DEVICE,
HOSTNAME,
C8Y_TENANT,
C8Y_USER,
C8Y_TOKEN,
C8Y_PASSWORD,
C8Y_TFA_CODE,
ENV_FILE,
EXTERNAL_IDENTITY_TYPE,
REMOTE_ACCESS_TYPE,
PORT_DEFAULT_RANDOM,
PING_INTERVAL,
TCP_SIZE,
TCP_TIMEOUT,
LOGGING_VERBOSE,
SSL_IGNORE_VERIFY,
STORE_TOKEN,
DISABLE_PROMPT,
SERVER_RECONNECT_LIMIT,
]
# Need to reverse the order to control the list order
options = reversed(options)
return functools.reduce(lambda x, opt: opt(x), options, f)
|
[
"click.argument",
"c8ylp.env.loadenv",
"click.option",
"os.path.exists",
"click.echo",
"logging.info",
"os.path.isfile",
"click.Path",
"c8ylp.rest_client.c8yclient.CumulocityClient",
"click.secho",
"click.IntRange"
] |
[((2964, 3140), 'click.option', 'click.option', (['"""--host"""', '"""host"""'], {'is_eager': '(True)', 'prompt': '(False)', 'envvar': "('C8Y_HOST', 'C8Y_BASEURL', 'C8Y_URL')", 'help': '"""Cumulocity Hostname [required] [env var: C8Y_HOST]"""'}), "('--host', 'host', is_eager=True, prompt=False, envvar=(\n 'C8Y_HOST', 'C8Y_BASEURL', 'C8Y_URL'), help=\n 'Cumulocity Hostname [required] [env var: C8Y_HOST]')\n", (2976, 3140), False, 'import click\n'), ((3177, 3353), 'click.option', 'click.option', (['"""--host"""', '"""host"""'], {'is_eager': '(False)', 'prompt': '(True)', 'envvar': "('C8Y_HOST', 'C8Y_BASEURL', 'C8Y_URL')", 'help': '"""Cumulocity Hostname [required] [env var: C8Y_HOST]"""'}), "('--host', 'host', is_eager=False, prompt=True, envvar=(\n 'C8Y_HOST', 'C8Y_BASEURL', 'C8Y_URL'), help=\n 'Cumulocity Hostname [required] [env var: C8Y_HOST]')\n", (3189, 3353), False, 'import click\n'), ((3386, 3434), 'click.argument', 'click.argument', (['"""device"""'], {'nargs': '(1)', 'required': '(True)'}), "('device', nargs=1, required=True)\n", (3400, 3434), False, 'import click\n'), ((3460, 3584), 'click.option', 'click.option', (['"""--device"""', '"""-d"""'], {'required': '(False)', 'envvar': '"""C8YLP_DEVICE"""', 'show_envvar': '(True)', 'help': '"""Device external identity"""'}), "('--device', '-d', required=False, envvar='C8YLP_DEVICE',\n show_envvar=True, help='Device external identity')\n", (3472, 3584), False, 'import click\n'), ((3634, 3784), 'click.option', 'click.option', (['"""--external-type"""'], {'envvar': '"""C8YLP_EXTERNAL_TYPE"""', 'default': '"""c8y_Serial"""', 'show_default': '(True)', 'show_envvar': '(True)', 'help': '"""external Id Type"""'}), "('--external-type', envvar='C8YLP_EXTERNAL_TYPE', default=\n 'c8y_Serial', show_default=True, show_envvar=True, help='external Id Type')\n", (3646, 3784), False, 'import click\n'), ((3829, 4019), 'click.option', 'click.option', (['"""--config"""', '"""-c"""'], {'required': '(False)', 'envvar': '"""C8YLP_CONFIG"""', 'default': '"""Passthrough"""', 'show_default': '(True)', 'show_envvar': '(True)', 'help': '"""name of the C8Y Remote Access Configuration"""'}), "('--config', '-c', required=False, envvar='C8YLP_CONFIG',\n default='Passthrough', show_default=True, show_envvar=True, help=\n 'name of the C8Y Remote Access Configuration')\n", (3841, 4019), False, 'import click\n'), ((4060, 4163), 'click.option', 'click.option', (['"""--tenant"""', '"""-t"""'], {'envvar': '"""C8Y_TENANT"""', 'help': '"""Cumulocity tenant id"""', 'show_envvar': '(True)'}), "('--tenant', '-t', envvar='C8Y_TENANT', help=\n 'Cumulocity tenant id', show_envvar=True)\n", (4072, 4163), False, 'import click\n'), ((4177, 4292), 'click.option', 'click.option', (['"""--user"""', '"""-u"""'], {'envvar': "('C8Y_USER', 'C8Y_USERNAME')", 'show_envvar': '(True)', 'help': '"""Cumulocity username"""'}), "('--user', '-u', envvar=('C8Y_USER', 'C8Y_USERNAME'),\n show_envvar=True, help='Cumulocity username')\n", (4189, 4292), False, 'import click\n'), ((4325, 4461), 'click.option', 'click.option', (['"""--token"""', '"""-t"""'], {'callback': 'validate_token', 'envvar': '"""C8Y_TOKEN"""', 'is_eager': '(True)', 'show_envvar': '(True)', 'help': '"""Cumulocity token"""'}), "('--token', '-t', callback=validate_token, envvar='C8Y_TOKEN',\n is_eager=True, show_envvar=True, help='Cumulocity token')\n", (4337, 4461), False, 'import click\n'), ((4505, 4641), 'click.option', 'click.option', (['"""--password"""', '"""-p"""'], {'envvar': '"""C8Y_PASSWORD"""', 'prompt': '(False)', 'hide_input': '(True)', 'show_envvar': '(True)', 'help': '"""Cumulocity password"""'}), "('--password', '-p', envvar='C8Y_PASSWORD', prompt=False,\n hide_input=True, show_envvar=True, help='Cumulocity password')\n", (4517, 4641), False, 'import click\n'), ((4685, 4829), 'click.option', 'click.option', (['"""--tfa-code"""'], {'envvar': '"""C8Y_TFA_CODE"""', 'show_envvar': '(True)', 'help': '"""TFA Code. Required when the \'TFA enabled\' is enabled for a user"""'}), '(\'--tfa-code\', envvar=\'C8Y_TFA_CODE\', show_envvar=True, help=\n "TFA Code. Required when the \'TFA enabled\' is enabled for a user")\n', (4697, 4829), False, 'import click\n'), ((5313, 5496), 'click.option', 'click.option', (['"""--ping-interval"""'], {'type': 'int', 'default': '(0)', 'show_default': '(True)', 'show_envvar': '(True)', 'envvar': '"""C8YLP_PING_INTERVAL"""', 'help': '"""Websocket ping interval in seconds. 0=disabled"""'}), "('--ping-interval', type=int, default=0, show_default=True,\n show_envvar=True, envvar='C8YLP_PING_INTERVAL', help=\n 'Websocket ping interval in seconds. 0=disabled')\n", (5325, 5496), False, 'import click\n'), ((5745, 5932), 'click.option', 'click.option', (['"""--tcp-timeout"""'], {'envvar': '"""C8YLP_TCP_TIMEOUT"""', 'default': '(0)', 'show_default': '(True)', 'show_envvar': '(True)', 'help': '"""Timeout in sec. for inactivity. Can be activated with values > 0"""'}), "('--tcp-timeout', envvar='C8YLP_TCP_TIMEOUT', default=0,\n show_default=True, show_envvar=True, help=\n 'Timeout in sec. for inactivity. Can be activated with values > 0')\n", (5757, 5932), False, 'import click\n'), ((5970, 6148), 'click.option', 'click.option', (['"""--verbose"""', '"""-v"""'], {'envvar': '"""C8YLP_VERBOSE"""', 'is_flag': '(True)', 'default': '(False)', 'show_envvar': '(True)', 'help': '"""Print Debug Information into the Logs and Console when set"""'}), "('--verbose', '-v', envvar='C8YLP_VERBOSE', is_flag=True,\n default=False, show_envvar=True, help=\n 'Print Debug Information into the Logs and Console when set')\n", (5982, 6148), False, 'import click\n'), ((6186, 6410), 'click.option', 'click.option', (['"""--store-token"""', '"""store_token"""'], {'envvar': '"""C8YLP_STORE_TOKEN"""', 'is_flag': '(True)', 'default': '(True)', 'show_envvar': '(True)', 'help': '"""Store the Cumulocity host, tenant and token to the env-file if a file is being used"""'}), "('--store-token', 'store_token', envvar='C8YLP_STORE_TOKEN',\n is_flag=True, default=True, show_envvar=True, help=\n 'Store the Cumulocity host, tenant and token to the env-file if a file is being used'\n )\n", (6198, 6410), False, 'import click\n'), ((6447, 6656), 'click.option', 'click.option', (['"""--disable-prompts"""', '"""-d"""', '"""disable_prompts"""'], {'envvar': '"""C8YLP_DISABLE_PROMPTS"""', 'default': '(False)', 'is_eager': '(True)', 'is_flag': '(True)', 'expose_value': '(True)', 'show_envvar': '(True)', 'callback': 'deactivate_prompts'}), "('--disable-prompts', '-d', 'disable_prompts', envvar=\n 'C8YLP_DISABLE_PROMPTS', default=False, is_eager=True, is_flag=True,\n expose_value=True, show_envvar=True, callback=deactivate_prompts)\n", (6459, 6656), False, 'import click\n'), ((6712, 6918), 'click.option', 'click.option', (['"""--ignore-ssl-validate"""'], {'envvar': '"""C8YLP_IGNORE_SSL_VALIDATE"""', 'is_flag': '(True)', 'default': '(False)', 'show_envvar': '(True)', 'help': '"""Ignore Validation for SSL Certificates while connecting to Websocket"""'}), "('--ignore-ssl-validate', envvar='C8YLP_IGNORE_SSL_VALIDATE',\n is_flag=True, default=False, show_envvar=True, help=\n 'Ignore Validation for SSL Certificates while connecting to Websocket')\n", (6724, 6918), False, 'import click\n'), ((7224, 7401), 'click.option', 'click.option', (['"""--ssh-user"""'], {'envvar': '"""C8YLP_SSH_USER"""', 'type': 'str', 'required': '(True)', 'prompt': '(True)', 'show_envvar': '(True)', 'help': '"""Start an interactive ssh session with the given user"""'}), "('--ssh-user', envvar='C8YLP_SSH_USER', type=str, required=True,\n prompt=True, show_envvar=True, help=\n 'Start an interactive ssh session with the given user')\n", (7236, 7401), False, 'import click\n'), ((7443, 7492), 'click.argument', 'click.argument', (['"""command"""'], {'nargs': '(1)', 'required': '(True)'}), "('command', nargs=1, required=True)\n", (7457, 7492), False, 'import click\n'), ((1190, 1230), 'click.echo', 'click.echo', (['f"""Loading env-file: {value}"""'], {}), "(f'Loading env-file: {value}')\n", (1200, 1230), False, 'import click\n'), ((2467, 2511), 'c8ylp.rest_client.c8yclient.CumulocityClient', 'CumulocityClient', ([], {'hostname': 'host', 'token': 'value'}), '(hostname=host, token=value)\n', (2483, 2511), False, 'from c8ylp.rest_client.c8yclient import CumulocityClient\n'), ((1238, 1259), 'os.path.exists', 'os.path.exists', (['value'], {}), '(value)\n', (1252, 1259), False, 'import os\n'), ((1264, 1285), 'os.path.isfile', 'os.path.isfile', (['value'], {}), '(value)\n', (1278, 1285), False, 'import os\n'), ((1295, 1309), 'c8ylp.env.loadenv', 'loadenv', (['value'], {}), '(value)\n', (1302, 1309), False, 'from c8ylp.env import loadenv\n'), ((1328, 1395), 'logging.info', 'logging.info', (['"""env file does not exist or is not a file: %s"""', 'value'], {}), "('env file does not exist or is not a file: %s', value)\n", (1340, 1395), False, 'import logging\n'), ((2568, 2615), 'click.secho', 'click.secho', (['"""Validating c8y token: """'], {'nl': '(False)'}), "('Validating c8y token: ', nl=False)\n", (2579, 2615), False, 'import click\n'), ((2624, 2653), 'click.secho', 'click.secho', (['"""OK"""'], {'fg': '"""green"""'}), "('OK', fg='green')\n", (2635, 2653), False, 'import click\n'), ((4889, 4913), 'click.IntRange', 'click.IntRange', (['(0)', '(65535)'], {}), '(0, 65535)\n', (4903, 4913), False, 'import click\n'), ((5124, 5148), 'click.IntRange', 'click.IntRange', (['(0)', '(65535)'], {}), '(0, 65535)\n', (5138, 5148), False, 'import click\n'), ((5601, 5634), 'click.IntRange', 'click.IntRange', (['(1024)', '(8096 * 1024)'], {}), '(1024, 8096 * 1024)\n', (5615, 5634), False, 'import click\n'), ((7038, 7060), 'click.IntRange', 'click.IntRange', (['(-1)', '(10)'], {}), '(-1, 10)\n', (7052, 7060), False, 'import click\n'), ((7542, 7571), 'click.Path', 'click.Path', ([], {'resolve_path': '(True)'}), '(resolve_path=True)\n', (7552, 7571), False, 'import click\n'), ((7761, 7784), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (7771, 7784), False, 'import click\n'), ((8127, 8151), 'click.Path', 'click.Path', ([], {'exists': '(False)'}), '(exists=False)\n', (8137, 8151), False, 'import click\n'), ((2684, 2731), 'click.secho', 'click.secho', (['"""Validating c8y token: """'], {'nl': '(False)'}), "('Validating c8y token: ', nl=False)\n", (2695, 2731), False, 'import click\n'), ((2740, 2780), 'click.secho', 'click.secho', (['"""EXPIRED/INVALID"""'], {'fg': '"""red"""'}), "('EXPIRED/INVALID', fg='red')\n", (2751, 2780), False, 'import click\n'), ((2789, 2889), 'logging.info', 'logging.info', (['"""Token is no longer valid for host. The token will be ignored. host=%s"""', 'host'], {}), "(\n 'Token is no longer valid for host. The token will be ignored. host=%s',\n host)\n", (2801, 2889), False, 'import logging\n')]
|
import sys
import unittest
import numpy as np
import crocoddyl
import pinocchio
from crocoddyl.utils import (CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived,
FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived)
class CostModelAbstractTestCase(unittest.TestCase):
ROBOT_MODEL = None
ROBOT_STATE = None
COST = None
COST_DER = None
def setUp(self):
self.robot_data = self.ROBOT_MODEL.createData()
self.x = self.ROBOT_STATE.rand()
self.u = pinocchio.utils.rand(self.ROBOT_MODEL.nv)
self.data = self.COST.createData(self.robot_data)
self.data_der = self.COST_DER.createData(self.robot_data)
nq, nv = self.ROBOT_MODEL.nq, self.ROBOT_MODEL.nv
pinocchio.forwardKinematics(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:])
pinocchio.computeForwardKinematicsDerivatives(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:],
pinocchio.utils.zero(nv))
pinocchio.computeJointJacobians(self.ROBOT_MODEL, self.robot_data, self.x[:nq])
pinocchio.updateFramePlacements(self.ROBOT_MODEL, self.robot_data)
pinocchio.jacobianCenterOfMass(self.ROBOT_MODEL, self.robot_data, self.x[:nq], False)
def test_dimensions(self):
self.assertEqual(self.COST.state.nx, self.COST_DER.state.nx, "Wrong nx.")
self.assertEqual(self.COST.state.ndx, self.COST_DER.state.ndx, "Wrong ndx.")
self.assertEqual(self.COST.nu, self.COST_DER.nu, "Wrong nu.")
self.assertEqual(self.COST.state.nq, self.COST_DER.state.nq, "Wrong nq.")
self.assertEqual(self.COST.state.nv, self.COST_DER.state.nv, "Wrong nv.")
self.assertEqual(self.COST.activation.nr, self.COST_DER.activation.nr, "Wrong nr.")
def test_calc(self):
# Run calc for both action models
self.COST.calc(self.data, self.x, self.u)
self.COST_DER.calc(self.data_der, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_der.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_der.r, atol=1e-9), "Wrong cost residuals.")
def test_calcDiff(self):
# Run calc for both action models
self.COST.calcDiff(self.data, self.x, self.u)
self.COST_DER.calcDiff(self.data_der, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_der.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_der.r, atol=1e-9), "Wrong cost residuals.")
# Checking the Jacobians and Hessians of the cost
self.assertTrue(np.allclose(self.data.Lx, self.data_der.Lx, atol=1e-9), "Wrong Lx.")
self.assertTrue(np.allclose(self.data.Lu, self.data_der.Lu, atol=1e-9), "Wrong Lu.")
self.assertTrue(np.allclose(self.data.Lxx, self.data_der.Lxx, atol=1e-9), "Wrong Lxx.")
self.assertTrue(np.allclose(self.data.Lxu, self.data_der.Lxu, atol=1e-9), "Wrong Lxu.")
self.assertTrue(np.allclose(self.data.Luu, self.data_der.Luu, atol=1e-9), "Wrong Luu.")
class CostModelSumTestCase(unittest.TestCase):
ROBOT_MODEL = None
ROBOT_STATE = None
COST = None
def setUp(self):
self.robot_data = self.ROBOT_MODEL.createData()
self.x = self.ROBOT_STATE.rand()
self.u = pinocchio.utils.rand(self.ROBOT_MODEL.nv)
self.cost_sum = crocoddyl.CostModelSum(self.ROBOT_STATE)
self.cost_sum.addCost('myCost', self.COST, 1.)
self.data = self.COST.createData(self.robot_data)
self.data_sum = self.cost_sum.createData(self.robot_data)
nq, nv = self.ROBOT_MODEL.nq, self.ROBOT_MODEL.nv
pinocchio.forwardKinematics(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:])
pinocchio.computeForwardKinematicsDerivatives(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:],
pinocchio.utils.zero(nv))
pinocchio.computeJointJacobians(self.ROBOT_MODEL, self.robot_data, self.x[:nq])
pinocchio.updateFramePlacements(self.ROBOT_MODEL, self.robot_data)
pinocchio.jacobianCenterOfMass(self.ROBOT_MODEL, self.robot_data, self.x[:nq], False)
def test_dimensions(self):
self.assertEqual(self.COST.state.nx, self.cost_sum.state.nx, "Wrong nx.")
self.assertEqual(self.COST.state.ndx, self.cost_sum.state.ndx, "Wrong ndx.")
self.assertEqual(self.COST.nu, self.cost_sum.nu, "Wrong nu.")
self.assertEqual(self.COST.state.nq, self.cost_sum.state.nq, "Wrong nq.")
self.assertEqual(self.COST.state.nv, self.cost_sum.state.nv, "Wrong nv.")
self.assertEqual(self.COST.activation.nr, self.cost_sum.nr, "Wrong nr.")
def test_calc(self):
# Run calc for both action models
self.COST.calc(self.data, self.x, self.u)
self.cost_sum.calc(self.data_sum, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_sum.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_sum.r, atol=1e-9), "Wrong cost residuals.")
def test_calcDiff(self):
# Run calc for both action models
self.COST.calcDiff(self.data, self.x, self.u)
self.cost_sum.calcDiff(self.data_sum, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_sum.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_sum.r, atol=1e-9), "Wrong cost residuals.")
# Checking the Jacobians and Hessians of the cost
self.assertTrue(np.allclose(self.data.Lx, self.data_sum.Lx, atol=1e-9), "Wrong Lx.")
self.assertTrue(np.allclose(self.data.Lu, self.data_sum.Lu, atol=1e-9), "Wrong Lu.")
self.assertTrue(np.allclose(self.data.Lxx, self.data_sum.Lxx, atol=1e-9), "Wrong Lxx.")
self.assertTrue(np.allclose(self.data.Lxu, self.data_sum.Lxu, atol=1e-9), "Wrong Lxu.")
self.assertTrue(np.allclose(self.data.Luu, self.data_sum.Luu, atol=1e-9), "Wrong Luu.")
def test_removeCost(self):
self.cost_sum.removeCost("myCost")
self.assertEqual(len(self.cost_sum.costs), 0, "The number of cost items should be zero")
class StateCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelState(ROBOT_STATE)
COST_DER = StateCostDerived(ROBOT_STATE)
class StateCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelState(ROBOT_STATE)
class ControlCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelControl(ROBOT_STATE)
COST_DER = ControlCostDerived(ROBOT_STATE)
class ControlCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelControl(ROBOT_STATE)
class CoMPositionCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
cref = pinocchio.utils.rand(3)
COST = crocoddyl.CostModelCoMPosition(ROBOT_STATE, cref)
COST_DER = CoMPositionCostDerived(ROBOT_STATE, cref=cref)
class CoMPositionCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
cref = pinocchio.utils.rand(3)
COST = crocoddyl.CostModelCoMPosition(ROBOT_STATE, cref)
class FramePlacementCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
Mref = crocoddyl.FramePlacement(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.SE3.Random())
COST = crocoddyl.CostModelFramePlacement(ROBOT_STATE, Mref)
COST_DER = FramePlacementCostDerived(ROBOT_STATE, Mref=Mref)
class FramePlacementCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
Mref = crocoddyl.FramePlacement(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.SE3.Random())
COST = crocoddyl.CostModelFramePlacement(ROBOT_STATE, Mref)
class FrameTranslationCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
xref = crocoddyl.FrameTranslation(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.utils.rand(3))
COST = crocoddyl.CostModelFrameTranslation(ROBOT_STATE, xref)
COST_DER = FrameTranslationCostDerived(ROBOT_STATE, xref=xref)
class FrameTranslationCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
xref = crocoddyl.FrameTranslation(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.utils.rand(3))
COST = crocoddyl.CostModelFrameTranslation(ROBOT_STATE, xref)
class FrameVelocityCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
vref = crocoddyl.FrameMotion(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.Motion.Random())
COST = crocoddyl.CostModelFrameVelocity(ROBOT_STATE, vref)
COST_DER = FrameVelocityCostDerived(ROBOT_STATE, vref=vref)
class FrameVelocityCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
vref = crocoddyl.FrameMotion(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.Motion.Random())
COST = crocoddyl.CostModelFrameVelocity(ROBOT_STATE, vref)
if __name__ == '__main__':
test_classes_to_run = [
StateCostTest, StateCostSumTest, ControlCostTest, ControlCostSumTest, CoMPositionCostTest,
CoMPositionCostSumTest, FramePlacementCostTest, FramePlacementCostSumTest, FrameTranslationCostTest,
FrameTranslationCostSumTest, FrameVelocityCostTest, FrameVelocityCostSumTest
]
loader = unittest.TestLoader()
suites_list = []
for test_class in test_classes_to_run:
suite = loader.loadTestsFromTestCase(test_class)
suites_list.append(suite)
big_suite = unittest.TestSuite(suites_list)
runner = unittest.TextTestRunner()
results = runner.run(big_suite)
sys.exit(not results.wasSuccessful())
|
[
"numpy.allclose",
"pinocchio.forwardKinematics",
"unittest.TestLoader",
"pinocchio.jacobianCenterOfMass",
"crocoddyl.CostModelCoMPosition",
"crocoddyl.utils.FrameVelocityCostDerived",
"pinocchio.SE3.Random",
"crocoddyl.utils.FrameTranslationCostDerived",
"crocoddyl.CostModelState",
"pinocchio.computeJointJacobians",
"crocoddyl.CostModelSum",
"pinocchio.buildSampleModelHumanoidRandom",
"pinocchio.Motion.Random",
"crocoddyl.utils.FramePlacementCostDerived",
"crocoddyl.StateMultibody",
"crocoddyl.utils.StateCostDerived",
"crocoddyl.utils.ControlCostDerived",
"crocoddyl.utils.CoMPositionCostDerived",
"unittest.TestSuite",
"crocoddyl.CostModelFramePlacement",
"pinocchio.updateFramePlacements",
"unittest.TextTestRunner",
"crocoddyl.CostModelFrameTranslation",
"crocoddyl.CostModelControl",
"pinocchio.utils.zero",
"crocoddyl.CostModelFrameVelocity",
"pinocchio.utils.rand"
] |
[((6529, 6571), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (6569, 6571), False, 'import pinocchio\n'), ((6590, 6627), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (6614, 6627), False, 'import crocoddyl\n'), ((6640, 6677), 'crocoddyl.CostModelState', 'crocoddyl.CostModelState', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (6664, 6677), False, 'import crocoddyl\n'), ((6693, 6722), 'crocoddyl.utils.StateCostDerived', 'StateCostDerived', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (6709, 6722), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((6789, 6831), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (6829, 6831), False, 'import pinocchio\n'), ((6850, 6887), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (6874, 6887), False, 'import crocoddyl\n'), ((6900, 6937), 'crocoddyl.CostModelState', 'crocoddyl.CostModelState', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (6924, 6937), False, 'import crocoddyl\n'), ((7008, 7050), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (7048, 7050), False, 'import pinocchio\n'), ((7069, 7106), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (7093, 7106), False, 'import crocoddyl\n'), ((7119, 7158), 'crocoddyl.CostModelControl', 'crocoddyl.CostModelControl', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (7145, 7158), False, 'import crocoddyl\n'), ((7174, 7205), 'crocoddyl.utils.ControlCostDerived', 'ControlCostDerived', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (7192, 7205), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((7274, 7316), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (7314, 7316), False, 'import pinocchio\n'), ((7335, 7372), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (7359, 7372), False, 'import crocoddyl\n'), ((7385, 7424), 'crocoddyl.CostModelControl', 'crocoddyl.CostModelControl', (['ROBOT_STATE'], {}), '(ROBOT_STATE)\n', (7411, 7424), False, 'import crocoddyl\n'), ((7499, 7541), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (7539, 7541), False, 'import pinocchio\n'), ((7560, 7597), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (7584, 7597), False, 'import crocoddyl\n'), ((7610, 7633), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['(3)'], {}), '(3)\n', (7630, 7633), False, 'import pinocchio\n'), ((7645, 7694), 'crocoddyl.CostModelCoMPosition', 'crocoddyl.CostModelCoMPosition', (['ROBOT_STATE', 'cref'], {}), '(ROBOT_STATE, cref)\n', (7675, 7694), False, 'import crocoddyl\n'), ((7710, 7756), 'crocoddyl.utils.CoMPositionCostDerived', 'CoMPositionCostDerived', (['ROBOT_STATE'], {'cref': 'cref'}), '(ROBOT_STATE, cref=cref)\n', (7732, 7756), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((7829, 7871), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (7869, 7871), False, 'import pinocchio\n'), ((7890, 7927), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (7914, 7927), False, 'import crocoddyl\n'), ((7940, 7963), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['(3)'], {}), '(3)\n', (7960, 7963), False, 'import pinocchio\n'), ((7975, 8024), 'crocoddyl.CostModelCoMPosition', 'crocoddyl.CostModelCoMPosition', (['ROBOT_STATE', 'cref'], {}), '(ROBOT_STATE, cref)\n', (8005, 8024), False, 'import crocoddyl\n'), ((8102, 8144), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (8142, 8144), False, 'import pinocchio\n'), ((8163, 8200), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (8187, 8200), False, 'import crocoddyl\n'), ((8312, 8364), 'crocoddyl.CostModelFramePlacement', 'crocoddyl.CostModelFramePlacement', (['ROBOT_STATE', 'Mref'], {}), '(ROBOT_STATE, Mref)\n', (8345, 8364), False, 'import crocoddyl\n'), ((8380, 8429), 'crocoddyl.utils.FramePlacementCostDerived', 'FramePlacementCostDerived', (['ROBOT_STATE'], {'Mref': 'Mref'}), '(ROBOT_STATE, Mref=Mref)\n', (8405, 8429), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((8505, 8547), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (8545, 8547), False, 'import pinocchio\n'), ((8566, 8603), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (8590, 8603), False, 'import crocoddyl\n'), ((8715, 8767), 'crocoddyl.CostModelFramePlacement', 'crocoddyl.CostModelFramePlacement', (['ROBOT_STATE', 'Mref'], {}), '(ROBOT_STATE, Mref)\n', (8748, 8767), False, 'import crocoddyl\n'), ((8847, 8889), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (8887, 8889), False, 'import pinocchio\n'), ((8908, 8945), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (8932, 8945), False, 'import crocoddyl\n'), ((9060, 9114), 'crocoddyl.CostModelFrameTranslation', 'crocoddyl.CostModelFrameTranslation', (['ROBOT_STATE', 'xref'], {}), '(ROBOT_STATE, xref)\n', (9095, 9114), False, 'import crocoddyl\n'), ((9130, 9181), 'crocoddyl.utils.FrameTranslationCostDerived', 'FrameTranslationCostDerived', (['ROBOT_STATE'], {'xref': 'xref'}), '(ROBOT_STATE, xref=xref)\n', (9157, 9181), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((9259, 9301), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (9299, 9301), False, 'import pinocchio\n'), ((9320, 9357), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (9344, 9357), False, 'import crocoddyl\n'), ((9472, 9526), 'crocoddyl.CostModelFrameTranslation', 'crocoddyl.CostModelFrameTranslation', (['ROBOT_STATE', 'xref'], {}), '(ROBOT_STATE, xref)\n', (9507, 9526), False, 'import crocoddyl\n'), ((9603, 9645), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (9643, 9645), False, 'import pinocchio\n'), ((9664, 9701), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (9688, 9701), False, 'import crocoddyl\n'), ((9813, 9864), 'crocoddyl.CostModelFrameVelocity', 'crocoddyl.CostModelFrameVelocity', (['ROBOT_STATE', 'vref'], {}), '(ROBOT_STATE, vref)\n', (9845, 9864), False, 'import crocoddyl\n'), ((9880, 9928), 'crocoddyl.utils.FrameVelocityCostDerived', 'FrameVelocityCostDerived', (['ROBOT_STATE'], {'vref': 'vref'}), '(ROBOT_STATE, vref=vref)\n', (9904, 9928), False, 'from crocoddyl.utils import CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived, FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived\n'), ((10003, 10045), 'pinocchio.buildSampleModelHumanoidRandom', 'pinocchio.buildSampleModelHumanoidRandom', ([], {}), '()\n', (10043, 10045), False, 'import pinocchio\n'), ((10064, 10101), 'crocoddyl.StateMultibody', 'crocoddyl.StateMultibody', (['ROBOT_MODEL'], {}), '(ROBOT_MODEL)\n', (10088, 10101), False, 'import crocoddyl\n'), ((10213, 10264), 'crocoddyl.CostModelFrameVelocity', 'crocoddyl.CostModelFrameVelocity', (['ROBOT_STATE', 'vref'], {}), '(ROBOT_STATE, vref)\n', (10245, 10264), False, 'import crocoddyl\n'), ((10634, 10655), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (10653, 10655), False, 'import unittest\n'), ((10827, 10858), 'unittest.TestSuite', 'unittest.TestSuite', (['suites_list'], {}), '(suites_list)\n', (10845, 10858), False, 'import unittest\n'), ((10872, 10897), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (10895, 10897), False, 'import unittest\n'), ((556, 597), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['self.ROBOT_MODEL.nv'], {}), '(self.ROBOT_MODEL.nv)\n', (576, 597), False, 'import pinocchio\n'), ((790, 882), 'pinocchio.forwardKinematics', 'pinocchio.forwardKinematics', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]', 'self.x[nq:]'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:nq],\n self.x[nq:])\n', (817, 882), False, 'import pinocchio\n'), ((1082, 1161), 'pinocchio.computeJointJacobians', 'pinocchio.computeJointJacobians', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:nq])\n', (1113, 1161), False, 'import pinocchio\n'), ((1170, 1236), 'pinocchio.updateFramePlacements', 'pinocchio.updateFramePlacements', (['self.ROBOT_MODEL', 'self.robot_data'], {}), '(self.ROBOT_MODEL, self.robot_data)\n', (1201, 1236), False, 'import pinocchio\n'), ((1245, 1335), 'pinocchio.jacobianCenterOfMass', 'pinocchio.jacobianCenterOfMass', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]', '(False)'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:\n nq], False)\n', (1275, 1335), False, 'import pinocchio\n'), ((3491, 3532), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['self.ROBOT_MODEL.nv'], {}), '(self.ROBOT_MODEL.nv)\n', (3511, 3532), False, 'import pinocchio\n'), ((3558, 3598), 'crocoddyl.CostModelSum', 'crocoddyl.CostModelSum', (['self.ROBOT_STATE'], {}), '(self.ROBOT_STATE)\n', (3580, 3598), False, 'import crocoddyl\n'), ((3846, 3938), 'pinocchio.forwardKinematics', 'pinocchio.forwardKinematics', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]', 'self.x[nq:]'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:nq],\n self.x[nq:])\n', (3873, 3938), False, 'import pinocchio\n'), ((4138, 4217), 'pinocchio.computeJointJacobians', 'pinocchio.computeJointJacobians', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:nq])\n', (4169, 4217), False, 'import pinocchio\n'), ((4226, 4292), 'pinocchio.updateFramePlacements', 'pinocchio.updateFramePlacements', (['self.ROBOT_MODEL', 'self.robot_data'], {}), '(self.ROBOT_MODEL, self.robot_data)\n', (4257, 4292), False, 'import pinocchio\n'), ((4301, 4391), 'pinocchio.jacobianCenterOfMass', 'pinocchio.jacobianCenterOfMass', (['self.ROBOT_MODEL', 'self.robot_data', 'self.x[:nq]', '(False)'], {}), '(self.ROBOT_MODEL, self.robot_data, self.x[:\n nq], False)\n', (4331, 4391), False, 'import pinocchio\n'), ((8277, 8299), 'pinocchio.SE3.Random', 'pinocchio.SE3.Random', ([], {}), '()\n', (8297, 8299), False, 'import pinocchio\n'), ((8680, 8702), 'pinocchio.SE3.Random', 'pinocchio.SE3.Random', ([], {}), '()\n', (8700, 8702), False, 'import pinocchio\n'), ((9024, 9047), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['(3)'], {}), '(3)\n', (9044, 9047), False, 'import pinocchio\n'), ((9436, 9459), 'pinocchio.utils.rand', 'pinocchio.utils.rand', (['(3)'], {}), '(3)\n', (9456, 9459), False, 'import pinocchio\n'), ((9775, 9800), 'pinocchio.Motion.Random', 'pinocchio.Motion.Random', ([], {}), '()\n', (9798, 9800), False, 'import pinocchio\n'), ((10175, 10200), 'pinocchio.Motion.Random', 'pinocchio.Motion.Random', ([], {}), '()\n', (10198, 10200), False, 'import pinocchio\n'), ((1048, 1072), 'pinocchio.utils.zero', 'pinocchio.utils.zero', (['nv'], {}), '(nv)\n', (1068, 1072), False, 'import pinocchio\n'), ((2199, 2252), 'numpy.allclose', 'np.allclose', (['self.data.r', 'self.data_der.r'], {'atol': '(1e-09)'}), '(self.data.r, self.data_der.r, atol=1e-09)\n', (2210, 2252), True, 'import numpy as np\n'), ((2633, 2686), 'numpy.allclose', 'np.allclose', (['self.data.r', 'self.data_der.r'], {'atol': '(1e-09)'}), '(self.data.r, self.data_der.r, atol=1e-09)\n', (2644, 2686), True, 'import numpy as np\n'), ((2794, 2849), 'numpy.allclose', 'np.allclose', (['self.data.Lx', 'self.data_der.Lx'], {'atol': '(1e-09)'}), '(self.data.Lx, self.data_der.Lx, atol=1e-09)\n', (2805, 2849), True, 'import numpy as np\n'), ((2887, 2942), 'numpy.allclose', 'np.allclose', (['self.data.Lu', 'self.data_der.Lu'], {'atol': '(1e-09)'}), '(self.data.Lu, self.data_der.Lu, atol=1e-09)\n', (2898, 2942), True, 'import numpy as np\n'), ((2980, 3037), 'numpy.allclose', 'np.allclose', (['self.data.Lxx', 'self.data_der.Lxx'], {'atol': '(1e-09)'}), '(self.data.Lxx, self.data_der.Lxx, atol=1e-09)\n', (2991, 3037), True, 'import numpy as np\n'), ((3076, 3133), 'numpy.allclose', 'np.allclose', (['self.data.Lxu', 'self.data_der.Lxu'], {'atol': '(1e-09)'}), '(self.data.Lxu, self.data_der.Lxu, atol=1e-09)\n', (3087, 3133), True, 'import numpy as np\n'), ((3172, 3229), 'numpy.allclose', 'np.allclose', (['self.data.Luu', 'self.data_der.Luu'], {'atol': '(1e-09)'}), '(self.data.Luu, self.data_der.Luu, atol=1e-09)\n', (3183, 3229), True, 'import numpy as np\n'), ((4104, 4128), 'pinocchio.utils.zero', 'pinocchio.utils.zero', (['nv'], {}), '(nv)\n', (4124, 4128), False, 'import pinocchio\n'), ((5244, 5297), 'numpy.allclose', 'np.allclose', (['self.data.r', 'self.data_sum.r'], {'atol': '(1e-09)'}), '(self.data.r, self.data_sum.r, atol=1e-09)\n', (5255, 5297), True, 'import numpy as np\n'), ((5678, 5731), 'numpy.allclose', 'np.allclose', (['self.data.r', 'self.data_sum.r'], {'atol': '(1e-09)'}), '(self.data.r, self.data_sum.r, atol=1e-09)\n', (5689, 5731), True, 'import numpy as np\n'), ((5839, 5894), 'numpy.allclose', 'np.allclose', (['self.data.Lx', 'self.data_sum.Lx'], {'atol': '(1e-09)'}), '(self.data.Lx, self.data_sum.Lx, atol=1e-09)\n', (5850, 5894), True, 'import numpy as np\n'), ((5932, 5987), 'numpy.allclose', 'np.allclose', (['self.data.Lu', 'self.data_sum.Lu'], {'atol': '(1e-09)'}), '(self.data.Lu, self.data_sum.Lu, atol=1e-09)\n', (5943, 5987), True, 'import numpy as np\n'), ((6025, 6082), 'numpy.allclose', 'np.allclose', (['self.data.Lxx', 'self.data_sum.Lxx'], {'atol': '(1e-09)'}), '(self.data.Lxx, self.data_sum.Lxx, atol=1e-09)\n', (6036, 6082), True, 'import numpy as np\n'), ((6121, 6178), 'numpy.allclose', 'np.allclose', (['self.data.Lxu', 'self.data_sum.Lxu'], {'atol': '(1e-09)'}), '(self.data.Lxu, self.data_sum.Lxu, atol=1e-09)\n', (6132, 6178), True, 'import numpy as np\n'), ((6217, 6274), 'numpy.allclose', 'np.allclose', (['self.data.Luu', 'self.data_sum.Luu'], {'atol': '(1e-09)'}), '(self.data.Luu, self.data_sum.Luu, atol=1e-09)\n', (6228, 6274), True, 'import numpy as np\n')]
|
from _mnncengine.cv import *
import _mnncengine.cv as _F
import MNN.numpy as _np
import MNN
def __to_int(x):
dtype = x.dtype
if dtype == _np.int32:
return x
return x.astype(_np.int32)
def resize(src, dsize=None, fx=None, fy=None, interpolation=INTER_LINEAR, code = None, mean=[], norm=[]):
if dsize is None and fx is None and fy is None:
raise ValueError('reisze must set dsize or fx,fy.')
if dsize is None: dsize = [0, 0]
if fx is None: fx = 0
if fy is None: fy = 0
if code is None: code = -1
else: code = hash(code)
return _F.resize(src, dsize, fx, fy, interpolation, code, mean, norm)
def copyTo(src, mask=None, dst=None):
if mask is None: return src.copy()
origin_dtype = src.dtype
# mask need cast to int
src = __to_int(src)
if dst is None: dst = _np.zeros_like(src)
else: dst = __to_int(dst)
# mask [h, w] -> [h, w, c]
mask = _np.expand_dims(mask, -1)
repeat = [1 for i in range(mask.ndim)]
repeat[-1] = src.shape[-1]
mask = _np.tile(mask, repeat)
if mask.shape != src.shape:
raise ValueError('mask [height, width] must equal to src [height, width].')
mask = __to_int(mask)
# select
return _np.where(mask, src, dst).astype(origin_dtype)
def bitwise_and(src1, src2, dst=None, mask=None):
origin_dtype = src1.dtype
src1 = __to_int(src1)
src2 = __to_int(src2)
res = _np.bitwise_and(src1, src2)
return copyTo(res, mask, dst).astype(origin_dtype)
def bitwise_or(src1, src2, dst=None, mask=None):
origin_dtype = src1.dtype
src1 = __to_int(src1)
src2 = __to_int(src2)
res = _np.bitwise_or(src1, src2).astype(origin_dtype)
return copyTo(res, mask, dst)
def bitwise_xor(src1, src2, dst=None, mask=None):
origin_dtype = src1.dtype
src1 = __to_int(src1)
src2 = __to_int(src2)
res = _np.bitwise_xor(src1, src2)
return copyTo(res, mask, dst).astype(origin_dtype)
def hconcat(src):
return _np.concatenate(src, 1)
def vconcat(src):
return _np.concatenate(src, 0)
def mean(src, mask=None):
if mask is not None:
src = copyTo(src, mask)
res = _np.mean(src, [0, 1])
if res.ndim == 0: size = 0
else: size = res.shape[0]
if size < 4:
res = _np.pad(res, [0, 4 - size])
return res
def flip(src, flipCode):
h, w, c = src.shape
m = MNN.CVMatrix()
if flipCode < 0:
m.write([-1., 0., w-1., 0., -1., h-1.])
elif flipCode == 0:
m.write([1., 0., 0., 0., -1., h-1.])
else:
m.write([-1., 0., w-1., 0., 1., 0.])
return warpAffine(src, m, [w, h])
ROTATE_90_CLOCKWISE = 0
ROTATE_180 = 1
ROTATE_90_COUNTERCLOCKWISE = 2
def rotate(src, rotateMode):
if rotateMode == ROTATE_90_CLOCKWISE:
return flip(src.transpose([1, 0, 2]), 1)
if rotateMode == ROTATE_180:
return flip(src, -1)
if rotateMode == ROTATE_90_COUNTERCLOCKWISE:
return flip(src.transpose([1, 0, 2]), 0)
return src
|
[
"MNN.numpy.pad",
"MNN.numpy.bitwise_xor",
"MNN.CVMatrix",
"MNN.numpy.concatenate",
"MNN.numpy.where",
"MNN.numpy.zeros_like",
"MNN.numpy.expand_dims",
"MNN.numpy.bitwise_or",
"MNN.numpy.mean",
"MNN.numpy.bitwise_and",
"MNN.numpy.tile",
"_mnncengine.cv.resize"
] |
[((583, 645), '_mnncengine.cv.resize', '_F.resize', (['src', 'dsize', 'fx', 'fy', 'interpolation', 'code', 'mean', 'norm'], {}), '(src, dsize, fx, fy, interpolation, code, mean, norm)\n', (592, 645), True, 'import _mnncengine.cv as _F\n'), ((922, 947), 'MNN.numpy.expand_dims', '_np.expand_dims', (['mask', '(-1)'], {}), '(mask, -1)\n', (937, 947), True, 'import MNN.numpy as _np\n'), ((1033, 1055), 'MNN.numpy.tile', '_np.tile', (['mask', 'repeat'], {}), '(mask, repeat)\n', (1041, 1055), True, 'import MNN.numpy as _np\n'), ((1411, 1438), 'MNN.numpy.bitwise_and', '_np.bitwise_and', (['src1', 'src2'], {}), '(src1, src2)\n', (1426, 1438), True, 'import MNN.numpy as _np\n'), ((1859, 1886), 'MNN.numpy.bitwise_xor', '_np.bitwise_xor', (['src1', 'src2'], {}), '(src1, src2)\n', (1874, 1886), True, 'import MNN.numpy as _np\n'), ((1971, 1994), 'MNN.numpy.concatenate', '_np.concatenate', (['src', '(1)'], {}), '(src, 1)\n', (1986, 1994), True, 'import MNN.numpy as _np\n'), ((2024, 2047), 'MNN.numpy.concatenate', '_np.concatenate', (['src', '(0)'], {}), '(src, 0)\n', (2039, 2047), True, 'import MNN.numpy as _np\n'), ((2141, 2162), 'MNN.numpy.mean', '_np.mean', (['src', '[0, 1]'], {}), '(src, [0, 1])\n', (2149, 2162), True, 'import MNN.numpy as _np\n'), ((2355, 2369), 'MNN.CVMatrix', 'MNN.CVMatrix', ([], {}), '()\n', (2367, 2369), False, 'import MNN\n'), ((830, 849), 'MNN.numpy.zeros_like', '_np.zeros_like', (['src'], {}), '(src)\n', (844, 849), True, 'import MNN.numpy as _np\n'), ((2255, 2282), 'MNN.numpy.pad', '_np.pad', (['res', '[0, 4 - size]'], {}), '(res, [0, 4 - size])\n', (2262, 2282), True, 'import MNN.numpy as _np\n'), ((1222, 1247), 'MNN.numpy.where', '_np.where', (['mask', 'src', 'dst'], {}), '(mask, src, dst)\n', (1231, 1247), True, 'import MNN.numpy as _np\n'), ((1635, 1661), 'MNN.numpy.bitwise_or', '_np.bitwise_or', (['src1', 'src2'], {}), '(src1, src2)\n', (1649, 1661), True, 'import MNN.numpy as _np\n')]
|
#
# Copyright (c) 2017, UT-BATTELLE, LLC
# All rights reserved.
#
# This software is released under the BSD license detailed
# in the LICENSE file in the top level a-prime directory
#
import numpy
from get_season_months_index import get_season_months_index
def get_days_in_season_months(begin_month, end_month):
days_in_month = numpy.array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
index_months, n_months_season = get_season_months_index(begin_month, end_month)
days_season_months = days_in_month[index_months]
return days_season_months
|
[
"get_season_months_index.get_season_months_index",
"numpy.array"
] |
[((334, 395), 'numpy.array', 'numpy.array', (['[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]'], {}), '([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])\n', (345, 395), False, 'import numpy\n'), ((433, 480), 'get_season_months_index.get_season_months_index', 'get_season_months_index', (['begin_month', 'end_month'], {}), '(begin_month, end_month)\n', (456, 480), False, 'from get_season_months_index import get_season_months_index\n')]
|
import discord
from discord.ext import commands
from discord.ext.commands import Cog
import config
import json
from helpers.checks import check_if_staff
from helpers.userlogs import get_userlog, set_userlog, userlog_event_types
class ModUserlog(Cog):
def __init__(self, bot):
self.bot = bot
def get_userlog_embed_for_id(self, uid: str, name: str, own: bool = False,
event=""):
own_note = " Good for you!" if own else ""
wanted_events = ["warns", "bans", "kicks", "mutes"]
if event and not isinstance(event, list):
wanted_events = [event]
embed = discord.Embed(color=discord.Color.dark_red())
embed.set_author(name=f"Userlog for {name}")
userlog = get_userlog()
if uid not in userlog:
embed.description = f"There are none!{own_note} (no entry)"
embed.color = discord.Color.green()
return embed
for event_type in wanted_events:
if event_type in userlog[uid] and userlog[uid][event_type]:
event_name = userlog_event_types[event_type]
for idx, event in enumerate(userlog[uid][event_type]):
issuer = "" if own else f"Issuer: {event['issuer_name']} "\
f"({event['issuer_id']})\n"
embed.add_field(name=f"{event_name} {idx + 1}: "
f"{event['timestamp']}",
value=issuer + f"Reason: {event['reason']}",
inline=False)
if not own and "watch" in userlog[uid]:
watch_state = "" if userlog[uid]["watch"] else "NOT "
embed.set_footer(text=f"User is {watch_state}under watch.")
if not embed.fields:
embed.description = f"There are none!{own_note}"
embed.color = discord.Color.green()
return embed
def clear_event_from_id(self, uid: str, event_type):
userlog = get_userlog()
if uid not in userlog:
return f"<@{uid}> has no {event_type}!"
event_count = len(userlog[uid][event_type])
if not event_count:
return f"<@{uid}> has no {event_type}!"
userlog[uid][event_type] = []
set_userlog(json.dumps(userlog))
return f"<@{uid}> no longer has any {event_type}!"
def delete_event_from_id(self, uid: str, idx: int, event_type):
userlog = get_userlog()
if uid not in userlog:
return f"<@{uid}> has no {event_type}!"
event_count = len(userlog[uid][event_type])
if not event_count:
return f"<@{uid}> has no {event_type}!"
if idx > event_count:
return "Index is higher than "\
f"count ({event_count})!"
if idx < 1:
return "Index is below 1!"
event = userlog[uid][event_type][idx - 1]
event_name = userlog_event_types[event_type]
embed = discord.Embed(color=discord.Color.dark_red(),
title=f"{event_name} {idx} on "
f"{event['timestamp']}",
description=f"Issuer: {event['issuer_name']}\n"
f"Reason: {event['reason']}")
del userlog[uid][event_type][idx - 1]
set_userlog(json.dumps(userlog))
return embed
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["events"])
async def eventtypes(self, ctx):
"""Lists the available event types, staff only."""
event_list = [f"{et} ({userlog_event_types[et]})" for et in
userlog_event_types]
event_text = ("Available events:\n``` - " +
"\n - ".join(event_list) +
"```")
await ctx.send(event_text)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(name="userlog",
aliases=["listwarns", "getuserlog", "listuserlog"])
async def userlog_cmd(self, ctx, target: discord.Member, event=""):
"""Lists the userlog events for a user, staff only."""
embed = self.get_userlog_embed_for_id(str(target.id), str(target),
event=event)
await ctx.send(embed=embed)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["listnotes", "usernotes"])
async def notes(self, ctx, target: discord.Member):
"""Lists the notes for a user, staff only."""
embed = self.get_userlog_embed_for_id(str(target.id), str(target),
event="notes")
await ctx.send(embed=embed)
@commands.guild_only()
@commands.command(aliases=["mywarns"])
async def myuserlog(self, ctx):
"""Lists your userlog events (warns etc)."""
embed = self.get_userlog_embed_for_id(str(ctx.author.id),
str(ctx.author), True)
await ctx.send(embed=embed)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["listwarnsid"])
async def userlogid(self, ctx, target: int):
"""Lists the userlog events for a user by ID, staff only."""
embed = self.get_userlog_embed_for_id(str(target), str(target))
await ctx.send(embed=embed)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["clearwarns"])
async def clearevent(self, ctx, target: discord.Member,
event="warns"):
"""Clears all events of given type for a user, staff only."""
log_channel = self.bot.get_channel(config.modlog_channel)
msg = self.clear_event_from_id(str(target.id), event)
safe_name = await commands.clean_content().convert(ctx, str(target))
await ctx.send(msg)
msg = f"🗑 **Cleared {event}**: {ctx.author.mention} cleared"\
f" all {event} events of {target.mention} | "\
f"{safe_name}"
await log_channel.send(msg)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["clearwarnsid"])
async def cleareventid(self, ctx, target: int, event="warns"):
"""Clears all events of given type for a userid, staff only."""
log_channel = self.bot.get_channel(config.modlog_channel)
msg = self.clear_event_from_id(str(target), event)
await ctx.send(msg)
msg = f"🗑 **Cleared {event}**: {ctx.author.mention} cleared"\
f" all {event} events of <@{target}> "
await log_channel.send(msg)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["delwarn"])
async def delevent(self, ctx, target: discord.Member, idx: int,
event="warns"):
"""Removes a specific event from a user, staff only."""
log_channel = self.bot.get_channel(config.modlog_channel)
del_event = self.delete_event_from_id(str(target.id), idx, event)
event_name = userlog_event_types[event].lower()
# This is hell.
if isinstance(del_event, discord.Embed):
await ctx.send(f"{target.mention} has a {event_name} removed!")
safe_name = await commands.clean_content().convert(ctx, str(target))
msg = f"🗑 **Deleted {event_name}**: "\
f"{ctx.author.mention} removed "\
f"{event_name} {idx} from {target.mention} | {safe_name}"
await log_channel.send(msg, embed=del_event)
else:
await ctx.send(del_event)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command(aliases=["delwarnid"])
async def deleventid(self, ctx, target: int, idx: int, event="warns"):
"""Removes a specific event from a userid, staff only."""
log_channel = self.bot.get_channel(config.modlog_channel)
del_event = self.delete_event_from_id(str(target), idx, event)
event_name = userlog_event_types[event].lower()
# This is hell.
if isinstance(del_event, discord.Embed):
await ctx.send(f"<@{target}> has a {event_name} removed!")
msg = f"🗑 **Deleted {event_name}**: "\
f"{ctx.author.mention} removed "\
f"{event_name} {idx} from <@{target}> "
await log_channel.send(msg, embed=del_event)
else:
await ctx.send(del_event)
@commands.guild_only()
@commands.check(check_if_staff)
@commands.command()
async def userinfo(self, ctx, *, user: discord.Member):
"""Gets user info, staff only."""
role = user.top_role.name
if role == "@everyone":
role = "@ everyone"
event_types = ["warns", "bans", "kicks", "mutes", "notes"]
embed = self.get_userlog_embed_for_id(str(user.id), str(user),
event=event_types)
user_name = await commands.clean_content().convert(ctx, user.name)
display_name = await commands.clean_content().convert(ctx, user.display_name)
await ctx.send(f"user = {user_name}\n"
f"id = {user.id}\n"
f"avatar = {user.avatar_url}\n"
f"bot = {user.bot}\n"
f"created_at = {user.created_at}\n"
f"display_name = {display_name}\n"
f"joined_at = {user.joined_at}\n"
f"activities = `{user.activities}`\n"
f"color = {user.colour}\n"
f"top_role = {role}\n",
embed=embed)
def setup(bot):
bot.add_cog(ModUserlog(bot))
|
[
"discord.ext.commands.command",
"discord.ext.commands.check",
"discord.Color.green",
"json.dumps",
"discord.ext.commands.clean_content",
"discord.Color.dark_red",
"discord.ext.commands.guild_only",
"helpers.userlogs.get_userlog"
] |
[((3450, 3471), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (3469, 3471), False, 'from discord.ext import commands\n'), ((3477, 3507), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (3491, 3507), False, 'from discord.ext import commands\n'), ((3513, 3549), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['events']"}), "(aliases=['events'])\n", (3529, 3549), False, 'from discord.ext import commands\n'), ((3928, 3949), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (3947, 3949), False, 'from discord.ext import commands\n'), ((3955, 3985), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (3969, 3985), False, 'from discord.ext import commands\n'), ((3991, 4079), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""userlog"""', 'aliases': "['listwarns', 'getuserlog', 'listuserlog']"}), "(name='userlog', aliases=['listwarns', 'getuserlog',\n 'listuserlog'])\n", (4007, 4079), False, 'from discord.ext import commands\n'), ((4409, 4430), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (4428, 4430), False, 'from discord.ext import commands\n'), ((4436, 4466), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (4450, 4466), False, 'from discord.ext import commands\n'), ((4472, 4524), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['listnotes', 'usernotes']"}), "(aliases=['listnotes', 'usernotes'])\n", (4488, 4524), False, 'from discord.ext import commands\n'), ((4813, 4834), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (4832, 4834), False, 'from discord.ext import commands\n'), ((4840, 4877), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['mywarns']"}), "(aliases=['mywarns'])\n", (4856, 4877), False, 'from discord.ext import commands\n'), ((5144, 5165), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (5163, 5165), False, 'from discord.ext import commands\n'), ((5171, 5201), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (5185, 5201), False, 'from discord.ext import commands\n'), ((5207, 5248), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['listwarnsid']"}), "(aliases=['listwarnsid'])\n", (5223, 5248), False, 'from discord.ext import commands\n'), ((5481, 5502), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (5500, 5502), False, 'from discord.ext import commands\n'), ((5508, 5538), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (5522, 5538), False, 'from discord.ext import commands\n'), ((5544, 5584), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['clearwarns']"}), "(aliases=['clearwarns'])\n", (5560, 5584), False, 'from discord.ext import commands\n'), ((6191, 6212), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (6210, 6212), False, 'from discord.ext import commands\n'), ((6218, 6248), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (6232, 6248), False, 'from discord.ext import commands\n'), ((6254, 6296), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['clearwarnsid']"}), "(aliases=['clearwarnsid'])\n", (6270, 6296), False, 'from discord.ext import commands\n'), ((6754, 6775), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (6773, 6775), False, 'from discord.ext import commands\n'), ((6781, 6811), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (6795, 6811), False, 'from discord.ext import commands\n'), ((6817, 6854), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['delwarn']"}), "(aliases=['delwarn'])\n", (6833, 6854), False, 'from discord.ext import commands\n'), ((7746, 7767), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (7765, 7767), False, 'from discord.ext import commands\n'), ((7773, 7803), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (7787, 7803), False, 'from discord.ext import commands\n'), ((7809, 7848), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['delwarnid']"}), "(aliases=['delwarnid'])\n", (7825, 7848), False, 'from discord.ext import commands\n'), ((8603, 8624), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (8622, 8624), False, 'from discord.ext import commands\n'), ((8630, 8660), 'discord.ext.commands.check', 'commands.check', (['check_if_staff'], {}), '(check_if_staff)\n', (8644, 8660), False, 'from discord.ext import commands\n'), ((8666, 8684), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (8682, 8684), False, 'from discord.ext import commands\n'), ((759, 772), 'helpers.userlogs.get_userlog', 'get_userlog', ([], {}), '()\n', (770, 772), False, 'from helpers.userlogs import get_userlog, set_userlog, userlog_event_types\n'), ((2037, 2050), 'helpers.userlogs.get_userlog', 'get_userlog', ([], {}), '()\n', (2048, 2050), False, 'from helpers.userlogs import get_userlog, set_userlog, userlog_event_types\n'), ((2491, 2504), 'helpers.userlogs.get_userlog', 'get_userlog', ([], {}), '()\n', (2502, 2504), False, 'from helpers.userlogs import get_userlog, set_userlog, userlog_event_types\n'), ((903, 924), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (922, 924), False, 'import discord\n'), ((1918, 1939), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (1937, 1939), False, 'import discord\n'), ((2324, 2343), 'json.dumps', 'json.dumps', (['userlog'], {}), '(userlog)\n', (2334, 2343), False, 'import json\n'), ((3402, 3421), 'json.dumps', 'json.dumps', (['userlog'], {}), '(userlog)\n', (3412, 3421), False, 'import json\n'), ((662, 686), 'discord.Color.dark_red', 'discord.Color.dark_red', ([], {}), '()\n', (684, 686), False, 'import discord\n'), ((3037, 3061), 'discord.Color.dark_red', 'discord.Color.dark_red', ([], {}), '()\n', (3059, 3061), False, 'import discord\n'), ((5910, 5934), 'discord.ext.commands.clean_content', 'commands.clean_content', ([], {}), '()\n', (5932, 5934), False, 'from discord.ext import commands\n'), ((9116, 9140), 'discord.ext.commands.clean_content', 'commands.clean_content', ([], {}), '()\n', (9138, 9140), False, 'from discord.ext import commands\n'), ((9194, 9218), 'discord.ext.commands.clean_content', 'commands.clean_content', ([], {}), '()\n', (9216, 9218), False, 'from discord.ext import commands\n'), ((7401, 7425), 'discord.ext.commands.clean_content', 'commands.clean_content', ([], {}), '()\n', (7423, 7425), False, 'from discord.ext import commands\n')]
|
#!/usr/bin/env python3
import pandas as pd
import sys
input_file = sys.argv[1]
output_file = sys.argv[2]
data_frame = pd.read_csv(input_file, header=None)
data_frame = data_frame.drop([0,1,2,16,17,18])
data_frame.columns = data_frame.iloc[0]
data_frame = data_frame.reindex(data_frame.index.drop(3))
data_frame.to_csv(output_file, index=False)
|
[
"pandas.read_csv"
] |
[((120, 156), 'pandas.read_csv', 'pd.read_csv', (['input_file'], {'header': 'None'}), '(input_file, header=None)\n', (131, 156), True, 'import pandas as pd\n')]
|
import requests
import warnings
from .helper import String
from ..errors import RequestError
class Session(object):
"""Dynamic way to way to keep requests.Session through out whole programs."""
TIMEOUT = 10
TIMEOUT_COUNT = 0
def __new__(cls, *args, **kwargs):
if not hasattr(cls, "cache"):
cls.cache = {}
return super(Session, cls).__new__(cls)
def __init__(self):
self.session = requests.Session()
# Learn more about this
adapter = requests.adapters.HTTPAdapter(pool_connections=600, pool_maxsize=600)
self.session.mount("https://", adapter)
@classmethod
def put_in_cache(cls, url, response):
url = url.strip()
cls.cache[url] = dict(count=1, response=response)
@classmethod
def clear_cache(cls):
""" clear cache to prevent memory error"""
del cls.cache
cls.cache = {}
@classmethod
def check_cache(cls, url):
"""Checks if url already have a response.
Stop from calling the request method more than once.
Great for saving mobile data on mobile devices.
"""
url = url.strip()
try:
if url in cls.cache.keys():
cls.cache[url]["count"] += 1
return cls.cache[url]["response"]
except MemoryError:
cls.clear_cache()
except:
pass
def method(self, method, url, bypass=None, **kwargs):
""" urllib requests method """
method = String.lower(method)
cache = self.check_cache(url)
if cache and method != "post":
return cache
try:
# GET
if method == "get":
web = self.session.get(url, timeout=self.TIMEOUT, **kwargs)
# POST
if method == "post":
web = self.session.post(url, timeout=self.TIMEOUT, **kwargs)
# PUT
if method == "put":
web = self.session.put(url, timeout=self.TIMEOUT, **kwargs)
# HEAD
if method == "head":
web = self.session.head(url, timeout=self.TIMEOUT)
except requests.exceptions.InvalidURL as e:
raise RequestError(1)
except requests.exceptions.Timeout:
self.TIMEOUT_COUNT += 1
if self.TIMEOUT_COUNT >= 3:
print("\n") # need for spacing
warn_msg = "\nWarning: Please check your internet connection ! "
warnings.warn(warn_msg)
self.TIMEOUT_COUNT = 0
raise RequestError(2)
except Exception as e:
raise RequestError(3)
status = web.status_code
try:
web.raise_for_status()
except:
raise RequestError(4)
else:
url = url.strip()
self.put_in_cache(url, web)
self.TIMEOUT_COUNT = 0
finally:
return web
|
[
"requests.adapters.HTTPAdapter",
"warnings.warn",
"requests.Session"
] |
[((441, 459), 'requests.Session', 'requests.Session', ([], {}), '()\n', (457, 459), False, 'import requests\n'), ((510, 579), 'requests.adapters.HTTPAdapter', 'requests.adapters.HTTPAdapter', ([], {'pool_connections': '(600)', 'pool_maxsize': '(600)'}), '(pool_connections=600, pool_maxsize=600)\n', (539, 579), False, 'import requests\n'), ((2510, 2533), 'warnings.warn', 'warnings.warn', (['warn_msg'], {}), '(warn_msg)\n', (2523, 2533), False, 'import warnings\n')]
|
from calendar import monthrange
from datetime import datetime, timedelta, tzinfo, date
from google.appengine.ext import ndb
from pytz import timezone, UTC
from consts.event_type import EventType
from models.event import Event
EST = timezone('EST')
class SeasonHelper:
""" General season-information helper methods """
@staticmethod
def effective_season_year(date=datetime.now()):
"""
Given a date, find the "effective season" year for the date. If all official events have been played, it's
effectively next season.
"""
effective_season_year = date.year
last_event_end_date = None
last_event_list = Event.query(
Event.year==int(date.year),
Event.event_type_enum.IN(EventType.SEASON_EVENT_TYPES)
).order(-Event.end_date).fetch(1, projection=[Event.end_date])
if last_event_list:
last_event = last_event_list[0]
last_event_end_date = last_event.end_date
if last_event_end_date is None:
# No events for year - assume current year is effective season year
return effective_season_year
if date > last_event_end_date:
# All events for current season have been played - effective season is next year
return effective_season_year + 1
return effective_season_year
@staticmethod
def is_kickoff_at_least_one_day_away(date=datetime.now(UTC), year=datetime.now().year):
"""
Returns True if Kickoff for a given year is at least one day away from the current date.
This will always be True if Kickoff for a given year happened before the current date.
Ex: SeasonHelper.is_kickoff_at_least_one_day_away(1992) == True
"""
kickoff_date = SeasonHelper.kickoff_datetime_utc(year)
return date >= (kickoff_date - timedelta(days=1))
@staticmethod
def kickoff_datetime_est(year=datetime.now().year):
""" Computes the date of Kickoff for a given year. Kickoff is always the first Saturday in January after Jan 2nd. """
jan_2nd = datetime(year, 1, 2, 10, 30, 00, tzinfo=EST) # Start Kickoff at 10:30am EST
days_ahead = 5 - jan_2nd.weekday() # Saturday is 5
# Kickoff won't occur *on* Jan 2nd if it's a Saturday - it'll be the next Saturday
if days_ahead <= 0:
days_ahead += 7
return jan_2nd + timedelta(days=days_ahead)
@staticmethod
def kickoff_datetime_utc(year=datetime.now().year):
""" Converts kickoff_date to a UTC datetime """
return SeasonHelper.kickoff_datetime_est(year).astimezone(UTC)
@staticmethod
def first_event_datetime_utc(year=datetime.now().year):
""" Computes day the first in-season event begins """
from database import event_query
events = event_query.EventListQuery(year).fetch()
earliest_start = None
timezone = None
for event in events:
if event.is_season_event and (earliest_start is None or event.start_date < earliest_start):
earliest_start = event.start_date
timezone = event.timezone_id
return earliest_start
|
[
"database.event_query.EventListQuery",
"datetime.datetime",
"datetime.timedelta",
"pytz.timezone",
"datetime.datetime.now",
"models.event.Event.event_type_enum.IN"
] |
[((235, 250), 'pytz.timezone', 'timezone', (['"""EST"""'], {}), "('EST')\n", (243, 250), False, 'from pytz import timezone, UTC\n'), ((381, 395), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (393, 395), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((1433, 1450), 'datetime.datetime.now', 'datetime.now', (['UTC'], {}), '(UTC)\n', (1445, 1450), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((2107, 2150), 'datetime.datetime', 'datetime', (['year', '(1)', '(2)', '(10)', '(30)', '(0)'], {'tzinfo': 'EST'}), '(year, 1, 2, 10, 30, 0, tzinfo=EST)\n', (2115, 2150), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((1457, 1471), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1469, 1471), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((1941, 1955), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1953, 1955), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((2416, 2442), 'datetime.timedelta', 'timedelta', ([], {'days': 'days_ahead'}), '(days=days_ahead)\n', (2425, 2442), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((2496, 2510), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2508, 2510), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((2702, 2716), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2714, 2716), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((1869, 1886), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1878, 1886), False, 'from datetime import datetime, timedelta, tzinfo, date\n'), ((2844, 2876), 'database.event_query.EventListQuery', 'event_query.EventListQuery', (['year'], {}), '(year)\n', (2870, 2876), False, 'from database import event_query\n'), ((739, 793), 'models.event.Event.event_type_enum.IN', 'Event.event_type_enum.IN', (['EventType.SEASON_EVENT_TYPES'], {}), '(EventType.SEASON_EVENT_TYPES)\n', (763, 793), False, 'from models.event import Event\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from .resblocks import BasicBlock
from .attention import SEAttention
class SEFCResPRE(nn.Module):
def __init__(self, args = {}):
super(SEFCResPRE, self).__init__()
in_channel, out_channel = args.get('input_channel', 441), args.get('out_channel', 10)
droprate = args.get('droprate', 0.2)
hidden_channel = args.get('hidden_channel', 64)
blocks = int(args.get('blocks', 22))
reduction_rate = args.get('reduction', 4)
self.conv1 = nn.Conv2d(in_channel, hidden_channel, kernel_size = 1, bias = False)
self.in1 = nn.InstanceNorm2d(hidden_channel)
self.relu = nn.ReLU(inplace=True)
layer = []
for _ in range(blocks):
layer.append(BasicBlock(
hidden_channel, hidden_channel,
droprate = droprate,
attention = SEAttention(hidden_channel, reduction = reduction_rate)
))
self.layer = nn.Sequential(*layer)
self.final = nn.Conv2d(hidden_channel, out_channel, kernel_size = 1)
def forward(self, x):
x = self.conv1(x)
x = self.in1(x)
x = self.relu(x)
x = self.layer(x)
x = self.final(x)
return x
|
[
"torch.nn.Conv2d",
"torch.nn.ReLU",
"torch.nn.InstanceNorm2d",
"torch.nn.Sequential"
] |
[((557, 621), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channel', 'hidden_channel'], {'kernel_size': '(1)', 'bias': '(False)'}), '(in_channel, hidden_channel, kernel_size=1, bias=False)\n', (566, 621), True, 'import torch.nn as nn\n'), ((645, 678), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['hidden_channel'], {}), '(hidden_channel)\n', (662, 678), True, 'import torch.nn as nn\n'), ((699, 720), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (706, 720), True, 'import torch.nn as nn\n'), ((1017, 1038), 'torch.nn.Sequential', 'nn.Sequential', (['*layer'], {}), '(*layer)\n', (1030, 1038), True, 'import torch.nn as nn\n'), ((1061, 1114), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_channel', 'out_channel'], {'kernel_size': '(1)'}), '(hidden_channel, out_channel, kernel_size=1)\n', (1070, 1114), True, 'import torch.nn as nn\n')]
|
from socket import *
import _io
class Socket(socket):
def __init__(self,*args,**kwargs):
bufsize=kwargs.get("bufsize",1024)
if bufsize<10:
raise ValueError("buffer too small")
self.bufsize=bufsize
if "bufsize" in kwargs:
del kwargs["bufsize"]
super().__init__(*args,**kwargs)
def receive(self):
result=b""
inmsg=False
addr=None
while True:
m=self.recvfrom(self.bufsize)
addr=m[1]
m=m[0]
if m==b'\x00':
inmsg=True
elif inmsg:
result+=m
elif m==b'\xff':
inmsg=False
break
return result,addr
def post(self,data,addr):
data=_io.BytesIO(data)
self.sendto(b'\x00',addr)
while True:
bs=data.read(self.bufsize)
self.sendto(bs,addr)
self.sendto(b'\xff',addr)
return len(data.getvalue())
|
[
"_io.BytesIO"
] |
[((789, 806), '_io.BytesIO', '_io.BytesIO', (['data'], {}), '(data)\n', (800, 806), False, 'import _io\n')]
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import division
import time
from datetime import datetime
import warnings
import numpy as np
import pandas as pd
from numpy import dot, exp
from numpy.linalg import norm, inv
from scipy.linalg import solve as spsolve
from scipy.integrate import trapz
import scipy.stats as stats
from lifelines.fitters import BaseFitter
from lifelines.statistics import chisq_test
from lifelines.utils import (survival_table_from_events, inv_normal_cdf, normalize,
significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times,
pass_for_numeric_dtypes_or_raise, check_low_var, coalesce,
check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning,
StepSizer, ConvergenceError, string_justify)
class CoxPHFitter(BaseFitter):
"""
This class implements fitting Cox's proportional hazard model:
h(t|x) = h_0(t)*exp(x'*beta)
Parameters:
alpha: the level in the confidence intervals.
tie_method: specify how the fitter should deal with ties. Currently only
'Efron' is available.
penalizer: Attach a L2 penalizer to the size of the coeffcients during regression. This improves
stability of the estimates and controls for high correlation between covariates.
For example, this shrinks the absolute value of beta_i. Recommended, even if a small value.
The penalty is 1/2 * penalizer * ||beta||^2.
strata: specify a list of columns to use in stratification. This is useful if a
catagorical covariate does not obey the proportional hazard assumption. This
is used similar to the `strata` expression in R.
See http://courses.washington.edu/b515/l17.pdf.
"""
def __init__(self, alpha=0.95, tie_method='Efron', penalizer=0.0, strata=None):
if not (0 < alpha <= 1.):
raise ValueError('alpha parameter must be between 0 and 1.')
if penalizer < 0:
raise ValueError("penalizer parameter must be >= 0.")
if tie_method != 'Efron':
raise NotImplementedError("Only Efron is available atm.")
self.alpha = alpha
self.tie_method = tie_method
self.penalizer = penalizer
self.strata = strata
def fit(self, df, duration_col, event_col=None,
show_progress=False, initial_beta=None,
strata=None, step_size=None, weights_col=None,
cluster_col=None, robust=False):
"""
Fit the Cox Propertional Hazard model to a dataset. Tied survival times
are handled using Efron's tie-method.
Parameters:
df: a Pandas dataframe with necessary columns `duration_col` and
`event_col`, plus other covariates. `duration_col` refers to
the lifetimes of the subjects. `event_col` refers to whether
the 'death' events was observed: 1 if observed, 0 else (censored).
duration_col: the column in dataframe that contains the subjects'
lifetimes.
event_col: the column in dataframe that contains the subjects' death
observation. If left as None, assume all individuals are non-censored.
weights_col: an optional column in the dataframe that denotes the weight per subject.
This column is expelled and not used as a covariate, but as a weight in the
final regression. Default weight is 1.
This can be used for case-weights. For example, a weight of 2 means there were two subjects with
identical observations.
This can be used for sampling weights. In that case, use `robust=True` to get more accurate standard errors.
show_progress: since the fitter is iterative, show convergence
diagnostics.
initial_beta: initialize the starting point of the iterative
algorithm. Default is the zero vector.
strata: specify a list of columns to use in stratification. This is useful if a
catagorical covariate does not obey the proportional hazard assumption. This
is used similar to the `strata` expression in R.
See http://courses.washington.edu/b515/l17.pdf.
step_size: set an initial step size for the fitting algorithm.
robust: Compute the robust errors using the Huber sandwich estimator, aka Wei-Lin estimate. This does not handle
ties, so if there are high number of ties, results may significantly differ. See
"The Robust Inference for the Cox Proportional Hazards Model", Journal of the American Statistical Association, Vol. 84, No. 408 (Dec., 1989), pp. 1074- 1078
cluster_col: specifies what column has unique identifers for clustering covariances. Using this forces the sandwich estimator (robust variance estimator) to
be used.
Returns:
self, with additional properties: hazards_, confidence_intervals_, baseline_survival_, etc.
"""
df = df.copy()
# Sort on time
df = df.sort_values(by=duration_col)
self._time_fit_was_called = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + ' UTC'
self.duration_col = duration_col
self.event_col = event_col
self.robust = robust
self.cluster_col = cluster_col
self.weights_col = weights_col
self._n_examples = df.shape[0]
self.strata = coalesce(strata, self.strata)
if self.strata is not None:
original_index = df.index.copy()
df = df.set_index(self.strata)
# Extract time and event
T = df[duration_col]
del df[duration_col]
if event_col is None:
E = pd.Series(np.ones(df.shape[0]), index=df.index)
else:
E = df[event_col]
del df[event_col]
if weights_col:
weights = df.pop(weights_col)
if (weights.astype(int) != weights).any() and not self.robust:
warnings.warn("""It appears your weights are not integers, possibly propensity or sampling scores then?
It's important to know that the naive variance estimates of the coefficients are biased. Instead a) set `robust=True` in the call to `fit`, or b) use Monte Carlo to
estimate the variances. See paper "Variance estimation when using inverse probability of treatment weighting (IPTW) with survival analysis"
""", RuntimeWarning)
if (weights <= 0).any():
raise ValueError("values in weights_col must be positive.")
else:
weights = pd.Series(np.ones((self._n_examples,)), index=df.index)
if self.cluster_col:
self._clusters = df.pop(self.cluster_col)
self._check_values(df, T, E)
df = df.astype(float)
# save fitting data for later
self.durations = T.copy()
self.event_observed = E.copy()
if self.strata is not None:
self.durations.index = original_index
self.event_observed.index = original_index
self.event_observed = self.event_observed.astype(bool)
self._norm_mean = df.mean(0)
self._norm_std = df.std(0)
E = E.astype(bool)
hazards_ = self._newton_rhaphson(normalize(df, self._norm_mean, self._norm_std), T, E,
weights=weights,
initial_beta=initial_beta,
show_progress=show_progress,
step_size=step_size)
self.hazards_ = pd.DataFrame(hazards_.T, columns=df.columns, index=['coef']) / self._norm_std
self.variance_matrix_ = -inv(self._hessian_) / np.outer(self._norm_std, self._norm_std)
self.standard_errors_ = self._compute_standard_errors(normalize(df, self._norm_mean, self._norm_std), T, E, weights)
self.confidence_intervals_ = self._compute_confidence_intervals()
self.baseline_hazard_ = self._compute_baseline_hazards(df, T, E, weights)
self.baseline_cumulative_hazard_ = self._compute_baseline_cumulative_hazard()
self.baseline_survival_ = self._compute_baseline_survival()
self._predicted_partial_hazards_ = self.predict_partial_hazard(df).values
self._train_log_partial_hazard = self.predict_log_partial_hazard(self._norm_mean.to_frame().T)
return self
def _newton_rhaphson(self, X, T, E, weights=None, initial_beta=None, step_size=None,
precision=10e-6, show_progress=True, max_steps=50):
"""
Newton Rhaphson algorithm for fitting CPH model.
Note that data is assumed to be sorted on T!
Parameters:
X: (n,d) Pandas DataFrame of observations.
T: (n) Pandas Series representing observed durations.
E: (n) Pandas Series representing death events.
weights: (n) an iterable representing weights per observation.
initial_beta: (1,d) numpy array of initial starting point for
NR algorithm. Default 0.
step_size: float > 0.001 to determine a starting step size in NR algorithm.
precision: the convergence halts if the norm of delta between
successive positions is less than epsilon.
show_progress: since the fitter is iterative, show convergence
diagnostics.
max_steps: the maximum number of interations of the Newton-Rhaphson algorithm.
Returns:
beta: (1,d) numpy array.
"""
self.path = []
assert precision <= 1., "precision must be less than or equal to 1."
n, d = X.shape
# make sure betas are correct size.
if initial_beta is not None:
assert initial_beta.shape == (d, 1)
beta = initial_beta
else:
beta = np.zeros((d, 1))
step_sizer = StepSizer(step_size)
step_size = step_sizer.next()
# Method of choice is just efron right now
if self.tie_method == 'Efron':
get_gradients = self._get_efron_values
else:
raise NotImplementedError("Only Efron is available.")
i = 0
converging = True
ll, previous_ll = 0, 0
start = time.time()
while converging:
self.path.append(beta.copy())
i += 1
if self.strata is None:
h, g, ll = get_gradients(X.values, beta, T.values, E.values, weights.values)
else:
g = np.zeros_like(beta).T
h = np.zeros((beta.shape[0], beta.shape[0]))
ll = 0
for strata in np.unique(X.index):
stratified_X, stratified_T, stratified_E, stratified_W = X.loc[[strata]], T.loc[[strata]], E.loc[[strata]], weights.loc[[strata]]
_h, _g, _ll = get_gradients(stratified_X.values, beta, stratified_T.values, stratified_E.values, stratified_W.values)
g += _g
h += _h
ll += _ll
if self.penalizer > 0:
# add the gradient and hessian of the l2 term
g -= self.penalizer * beta.T
h.flat[::d + 1] -= self.penalizer
# reusing a piece to make g * inv(h) * g.T faster later
try:
inv_h_dot_g_T = spsolve(-h, g.T, sym_pos=True)
except ValueError as e:
if 'infs or NaNs' in str(e):
raise ConvergenceError("""hessian or gradient contains nan or inf value(s). Convergence halted. Please see the following tips in the lifelines documentation:
https://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model
""")
else:
# something else?
raise e
delta = step_size * inv_h_dot_g_T
if np.any(np.isnan(delta)):
raise ConvergenceError("""delta contains nan value(s). Convergence halted. Please see the following tips in the lifelines documentation:
https://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model
""")
# Save these as pending result
hessian, gradient = h, g
norm_delta = norm(delta)
# reusing an above piece to make g * inv(h) * g.T faster.
newton_decrement = g.dot(inv_h_dot_g_T)/2
if show_progress:
print("Iteration %d: norm_delta = %.5f, step_size = %.5f, ll = %.5f, newton_decrement = %.5f, seconds_since_start = %.1f" % (i, norm_delta, step_size, ll, newton_decrement, time.time() - start))
# convergence criteria
if norm_delta < precision:
converging, completed = False, True
elif previous_ll != 0 and abs(ll - previous_ll) / (-previous_ll) < 1e-09:
# this is what R uses by default
converging, completed = False, True
elif newton_decrement < precision:
converging, completed = False, True
elif i >= max_steps:
# 50 iterations steps with N-R is a lot.
# Expected convergence is ~10 steps
converging, completed = False, False
elif step_size <= 0.00001:
converging, completed = False, False
elif abs(ll) < 0.0001 and norm_delta > 1.0:
warnings.warn("The log-likelihood is getting suspciously close to 0 and the delta is still large. There may be complete separation in the dataset. This may result in incorrect inference of coefficients. \
See https://stats.idre.ucla.edu/other/mult-pkg/faq/general/faqwhat-is-complete-or-quasi-complete-separation-in-logisticprobit-regression-and-how-do-we-deal-with-them/ ", ConvergenceWarning)
converging, completed = False, False
step_size = step_sizer.update(norm_delta).next()
beta += delta
previous_ll = ll
self._hessian_ = hessian
self._score_ = gradient
self._log_likelihood = ll
if show_progress and completed:
print("Convergence completed after %d iterations." % (i))
if not completed:
warnings.warn("Newton-Rhapson failed to converge sufficiently in %d steps." % max_steps, ConvergenceWarning)
return beta
def _get_efron_values(self, X, beta, T, E, weights):
"""
Calculates the first and second order vector differentials, with respect to beta.
Note that X, T, E are assumed to be sorted on T!
A good explaination for Efron. Consider three of five subjects who fail at the time.
As it is not known a priori that who is the first to fail, so one-third of
(φ1 + φ2 + φ3) is adjusted from sum_j^{5} φj after one fails. Similarly two-third
of (φ1 + φ2 + φ3) is adjusted after first two individuals fail, etc.
From https://cran.r-project.org/web/packages/survival/survival.pdf:
"Setting all weights to 2 for instance will give the same coefficient estimate but halve the variance. When
the Efron approximation for ties (default) is employed replication of the data will not give exactly the same coefficients as the
weights option, and in this case the weighted fit is arguably the correct one."
Parameters:
X: (n,d) numpy array of observations.
beta: (1, d) numpy array of coefficients.
T: (n) numpy array representing observed durations.
E: (n) numpy array representing death events.
weights: (n) an array representing weights per observation.
Returns:
hessian: (d, d) numpy array,
gradient: (1, d) numpy array
log_likelihood: double
"""
n, d = X.shape
hessian = np.zeros((d, d))
gradient = np.zeros((1, d))
log_lik = 0
# Init risk and tie sums to zero
x_tie_sum = np.zeros((1, d))
risk_phi, tie_phi = 0, 0
risk_phi_x, tie_phi_x = np.zeros((1, d)), np.zeros((1, d))
risk_phi_x_x, tie_phi_x_x = np.zeros((d, d)), np.zeros((d, d))
# Init number of ties and weights
weight_count = 0.0
tie_count = 0
scores = weights[:,None] * exp(dot(X, beta))
# Iterate backwards to utilize recursive relationship
for i in range(n - 1, -1, -1):
# Doing it like this to preserve shape
ti = T[i]
ei = E[i]
xi = X[i:i + 1]
score = scores[i:i+1]
w = weights[i]
# Calculate phi values
phi_i = score
phi_x_i = phi_i * xi
phi_x_x_i = dot(xi.T, phi_x_i)
# Calculate sums of Risk set
risk_phi += phi_i
risk_phi_x += phi_x_i
risk_phi_x_x += phi_x_x_i
# Calculate sums of Ties, if this is an event
if ei:
x_tie_sum += w * xi
tie_phi += phi_i
tie_phi_x += phi_x_i
tie_phi_x_x += phi_x_x_i
# Keep track of count
tie_count += 1
weight_count += w
if i > 0 and T[i - 1] == ti:
# There are more ties/members of the risk set
continue
elif tie_count == 0:
# Only censored with current time, move on
continue
# There was atleast one event and no more ties remain. Time to sum.
partial_gradient = np.zeros((1, d))
weighted_average = weight_count / tie_count
for l in range(tie_count):
"""
A good explaination for Efron. Consider three of five subjects who fail at the time.
As it is not known a priori that who is the first to fail, so one-third of
(φ1 + φ2 + φ3) is adjusted from sum_j^{5} φj after one fails. Similarly two-third
of (φ1 + φ2 + φ3) is adjusted after first two individuals fail, etc.
"""
numer = (risk_phi_x - l * tie_phi_x / tie_count)
denom = (risk_phi - l * tie_phi / tie_count)
# Gradient
partial_gradient += weighted_average * numer / denom
# Hessian
a1 = (risk_phi_x_x - l * tie_phi_x_x / tie_count) / denom
# In case numer and denom both are really small numbers,
# make sure to do division before multiplications
a2 = dot(numer.T / denom, numer / denom)
hessian -= weighted_average * (a1 - a2)
log_lik -= weighted_average * np.log(denom[0][0])
# Values outside tie sum
gradient += x_tie_sum - partial_gradient
log_lik += dot(x_tie_sum, beta)[0][0]
# reset tie values
tie_count = 0
weight_count = 0.0
x_tie_sum = np.zeros((1, d))
tie_phi = 0
tie_phi_x = np.zeros((1, d))
tie_phi_x_x = np.zeros((d, d))
return hessian, gradient, log_lik
def _compute_baseline_cumulative_hazard(self):
return self.baseline_hazard_.cumsum()
@staticmethod
def _check_values(df, T, E):
pass_for_numeric_dtypes_or_raise(df)
check_nans_or_infs(T)
check_nans_or_infs(E)
check_nans_or_infs(df)
check_low_var(df)
check_complete_separation(df, E, T)
def _compute_confidence_intervals(self):
alpha2 = inv_normal_cdf((1. + self.alpha) / 2.)
se = self.standard_errors_
hazards = self.hazards_.values
return pd.DataFrame(np.r_[hazards - alpha2 * se,
hazards + alpha2 * se],
index=['lower-bound', 'upper-bound'],
columns=self.hazards_.columns)
def _compute_sandwich_estimator(self, X, T, E, weights):
_, d = X.shape
if self.strata is not None and self.cluster_col is not None:
# TODO
raise NotImplementedError("Providing clusters and strata is not implemented yet")
if self.strata is not None:
score_residuals = np.empty((0, d))
for strata in np.unique(X.index):
# TODO: use pandas .groupby
stratified_X, stratified_T, stratified_E, stratified_W = X.loc[[strata]], T.loc[[strata]], E.loc[[strata]], weights.loc[[strata]]
score_residuals = np.append(score_residuals,
self._compute_residuals_within_strata(stratified_X.values, stratified_T.values, stratified_E.values, stratified_W.values) * stratified_W[:, None],
axis=0)
else:
score_residuals = self._compute_residuals_within_strata(X.values, T.values, E.values, weights.values) * weights[:, None]
if self.cluster_col:
score_residuals_ = np.empty((0, d))
for cluster in np.unique(self._clusters):
ix = self._clusters == cluster
weights_ = weights.values[ix]
score_residuals_ = np.append(score_residuals_,
(score_residuals[ix, :] * weights_[:, None]).sum(0).reshape(1, d),
axis=0)
score_residuals = score_residuals_
naive_var = inv(self._hessian_)
delta_betas = score_residuals.dot(naive_var)
sandwich_estimator = delta_betas.T.dot(delta_betas) / np.outer(self._norm_std, self._norm_std)
return sandwich_estimator
def _compute_residuals_within_strata(self, X, T, E, weights):
# https://www.stat.tamu.edu/~carroll/ftp/gk001.pdf
# lin1989
# https://www.ics.uci.edu/~dgillen/STAT255/Handouts/lecture10.pdf
# TODO: doesn't handle ties.
n, d = X.shape
# we already unnormalized the betas in `fit`, so we need normalize them again since X is
# normalized.
beta = self.hazards_.values[0] * self._norm_std
E = E.astype(int)
score_residuals = np.zeros((n, d))
phi_s = exp(dot(X, beta))
# compute these within strata
# need to store these histories, as we access them often
# this is a reverse cumulative sum. See original code in https://github.com/CamDavidsonPilon/lifelines/pull/496/files#diff-81ee0759dbae0770e1a02cf17f4cfbb1R431
risk_phi_x_history = (X * (weights * phi_s)[:, None])[::-1].cumsum(0)[::-1]
risk_phi_history = (weights * phi_s) [::-1].cumsum() [::-1][:, None]
# Iterate forwards
for i in range(0, n):
xi = X[i:i + 1]
phi_i = phi_s[i]
score = - phi_i * (
(E[:i+1] * weights[:i+1] / risk_phi_history[:i+1].T).T # this is constant-ish, and could be cached
* (xi - risk_phi_x_history[:i+1] / risk_phi_history[:i+1])
).sum(0)
if E[i]:
score = score + (xi - risk_phi_x_history[i] / risk_phi_history[i])
score_residuals[i, :] = score
return score_residuals
def _compute_standard_errors(self, df, T, E, weights):
if self.robust or self.cluster_col:
se = np.sqrt(self._compute_sandwich_estimator(df, T, E, weights).diagonal()) # / self._norm_std
else:
se = np.sqrt(self.variance_matrix_.diagonal())
return pd.DataFrame(se[None, :],
index=['se'], columns=self.hazards_.columns)
def _compute_z_values(self):
return (self.hazards_.loc['coef'] /
self.standard_errors_.loc['se'])
def _compute_p_values(self):
U = self._compute_z_values() ** 2
return stats.chi2.sf(U, 1)
@property
def summary(self):
"""Summary statistics describing the fit.
Set alpha property in the object before calling.
Returns
-------
df : pd.DataFrame
Contains columns coef, exp(coef), se(coef), z, p, lower, upper"""
df = pd.DataFrame(index=self.hazards_.columns)
df['coef'] = self.hazards_.loc['coef'].values
df['exp(coef)'] = exp(self.hazards_.loc['coef'].values)
df['se(coef)'] = self.standard_errors_.loc['se'].values
df['z'] = self._compute_z_values()
df['p'] = self._compute_p_values()
df['lower %.2f' % self.alpha] = self.confidence_intervals_.loc['lower-bound'].values
df['upper %.2f' % self.alpha] = self.confidence_intervals_.loc['upper-bound'].values
return df
def print_summary(self):
"""
Print summary statistics describing the fit.
"""
# Print information about data first
justify = string_justify(18)
print(self)
print("{} = {}".format(justify('duration col'), self.duration_col))
print("{} = {}".format(justify('event col'), self.event_col))
if self.weights_col:
print("{} = {}".format(justify('weights col'), self.weights_col))
if self.cluster_col:
print("{} = {}".format(justify('cluster col'), self.cluster_col))
if self.robust or self.cluster_col:
print("{} = {}".format(justify('robust variance'), True))
if self.strata:
print('{} = {}'.format(justify('strata'), self.strata))
print('{} = {}'.format(justify('number of subjects'), self._n_examples))
print('{} = {}'.format(justify('number of events'), self.event_observed.sum()))
print('{} = {:.3f}'.format(justify('log-likelihood'), self._log_likelihood))
print('{} = {}'.format(justify("time fit was run"), self._time_fit_was_called), end='\n\n')
print('---')
df = self.summary
# Significance codes last
df[''] = [significance_code(p) for p in df['p']]
print(df.to_string(float_format=lambda f: '{:4.4f}'.format(f)))
# Significance code explanation
print('---')
print(significance_codes_as_text(), end='\n\n')
print("Concordance = {:.3f}".format(self.score_))
print("Likelihood ratio test = {:.3f} on {} df, p={:.5f}".format(*self._compute_likelihood_ratio_test()))
return
def _compute_likelihood_ratio_test(self):
"""
This function computes the likelihood ratio test for the Cox model. We
compare the existing model (with all the covariates) to the trivial model
of no covariates.
Conviently, we can actually use the class itself to do most of the work.
"""
trivial_dataset = pd.DataFrame({'E': self.event_observed, 'T': self.durations})
cp_null = CoxPHFitter()
cp_null.fit(trivial_dataset, 'T', 'E', show_progress=False)
ll_null = cp_null._log_likelihood
ll_alt = self._log_likelihood
test_stat = 2*ll_alt - 2*ll_null
degrees_freedom = self.hazards_.shape[1]
_, p_value = chisq_test(test_stat, degrees_freedom=degrees_freedom, alpha=0.0)
return test_stat, degrees_freedom, p_value
def predict_partial_hazard(self, X):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
If X is a dataframe, the order of the columns do not matter. But
if X is an array, then the column ordering is assumed to be the
same as the training dataset.
Returns the partial hazard for the individuals, partial since the
baseline hazard is not included. Equal to \exp{\beta (X - mean{X_train})}
"""
return exp(self.predict_log_partial_hazard(X))
def predict_log_partial_hazard(self, X):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
This is equivalent to R's linear.predictors.
Returns the log of the partial hazard for the individuals, partial since the
baseline hazard is not included. Equal to \beta (X - mean{X_train})
If X is a dataframe, the order of the columns do not matter. But
if X is an array, then the column ordering is assumed to be the
same as the training dataset.
"""
hazard_names = self.hazards_.columns
if isinstance(X, pd.DataFrame):
order = hazard_names
X = X[order]
pass_for_numeric_dtypes_or_raise(X)
elif isinstance(X, pd.Series) and ((X.shape[0] == len(hazard_names) + 2) or (X.shape[0] == len(hazard_names))):
X = X.to_frame().T
order = hazard_names
X = X[order]
pass_for_numeric_dtypes_or_raise(X)
elif isinstance(X, pd.Series):
assert len(hazard_names) == 1, 'Series not the correct arugment'
X = pd.DataFrame(X)
pass_for_numeric_dtypes_or_raise(X)
X = X.astype(float)
index = _get_index(X)
X = normalize(X, self._norm_mean.values, 1)
return pd.DataFrame(np.dot(X, self.hazards_.T), index=index)
def predict_log_hazard_relative_to_mean(self, X):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
Returns the log hazard relative to the hazard of the mean covariates. This is the behaviour
of R's predict.coxph. Equal to \beta X - \beta mean{X_train}}
"""
return self.predict_log_partial_hazard(X) - self._train_log_partial_hazard.squeeze()
def predict_cumulative_hazard(self, X, times=None):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
times: an iterable of increasing times to predict the cumulative hazard at. Default
is the set of all durations (observed and unobserved). Uses a linear interpolation if
points in time are not in the index.
Returns the cumulative hazard of individuals.
"""
if self.strata:
cumulative_hazard_ = pd.DataFrame()
for stratum, stratified_X in X.groupby(self.strata):
try:
c_0 = self.baseline_cumulative_hazard_[[stratum]]
except KeyError:
raise StatError("""The stratum %s was not found in the original training data. For example, try
the following on the original dataset, df: `df.groupby(%s).size()`. Expected is that %s is not present in the output.
""" % (stratum, self.strata, stratum))
col = _get_index(stratified_X)
v = self.predict_partial_hazard(stratified_X)
cumulative_hazard_ = cumulative_hazard_.merge(pd.DataFrame(np.dot(c_0, v.T), index=c_0.index, columns=col), how='outer', right_index=True, left_index=True)
else:
c_0 = self.baseline_cumulative_hazard_
v = self.predict_partial_hazard(X)
col = _get_index(v)
cumulative_hazard_ = pd.DataFrame(np.dot(c_0, v.T), columns=col, index=c_0.index)
if times is not None:
# non-linear interpolations can push the survival curves above 1 and below 0.
return cumulative_hazard_.reindex(cumulative_hazard_.index.union(times)).interpolate("index").loc[times]
else:
return cumulative_hazard_
def predict_survival_function(self, X, times=None):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
times: an iterable of increasing times to predict the survival function at. Default
is the set of all durations (observed and unobserved)
Returns the estimated survival functions for the individuals
"""
return exp(-self.predict_cumulative_hazard(X, times=times))
def predict_percentile(self, X, p=0.5):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
Returns the median lifetimes for the individuals, by default. If the survival curve of an
individual does not cross 0.5, then the result is infinity.
http://stats.stackexchange.com/questions/102986/percentile-loss-functions
"""
subjects = _get_index(X)
return qth_survival_times(p, self.predict_survival_function(X)[subjects]).T
def predict_median(self, X):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
Returns the median lifetimes for the individuals. If the survival curve of an
individual does not cross 0.5, then the result is infinity.
"""
return self.predict_percentile(X, 0.5)
def predict_expectation(self, X):
"""
X: a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns
can be in any order. If a numpy array, columns must be in the
same order as the training data.
Compute the expected lifetime, E[T], using covarites X. This algorithm to compute the expection is
to use the fact that E[T] = int_0^inf P(T > t) dt = int_0^inf S(t) dt
To compute the integal, we use the trapizoidal rule to approximate the integral. However, if the
survival function, S(t), doesn't converge to 0, the the expectation is really infinity.
"""
subjects = _get_index(X)
v = self.predict_survival_function(X)[subjects]
return pd.DataFrame(trapz(v.values.T, v.index), index=subjects)
def _compute_baseline_hazard(self, data, durations, event_observed, weights, name):
# https://stats.stackexchange.com/questions/46532/cox-baseline-hazard
ind_hazards = self.predict_partial_hazard(data) * weights[:, None]
ind_hazards['event_at'] = durations.values
ind_hazards_summed_over_durations = ind_hazards.groupby('event_at')[0].sum().sort_index(ascending=False).cumsum()
ind_hazards_summed_over_durations.name = 'hazards'
event_table = survival_table_from_events(durations, event_observed, weights=weights)
event_table = event_table.join(ind_hazards_summed_over_durations)
baseline_hazard = pd.DataFrame(event_table['observed'] / event_table['hazards'], columns=[name]).fillna(0)
return baseline_hazard
def _compute_baseline_hazards(self, df, T, E, weights):
if self.strata:
index = self.durations.unique()
baseline_hazards_ = pd.DataFrame(index=index)
for stratum in df.index.unique():
baseline_hazards_ = baseline_hazards_.merge(
self._compute_baseline_hazard(data=df.loc[[stratum]], durations=T.loc[[stratum]], event_observed=E.loc[[stratum]], weights=weights.loc[[stratum]], name=stratum),
left_index=True,
right_index=True,
how='left')
return baseline_hazards_.fillna(0)
else:
return self._compute_baseline_hazard(data=df, durations=T, event_observed=E, weights=weights, name='baseline hazard')
def _compute_baseline_survival(self):
"""
Importantly, this agrees with what the KaplanMeierFitter produces. Ex:
from lifelines.datasets import load_rossi
from lifelines import CoxPHFitter, KaplanMeierFitter
rossi = load_rossi()
kmf = KaplanMeierFitter()
kmf.fit(rossi['week'], rossi['arrest'])
rossi2 = rossi[['week', 'arrest']].copy()
rossi2['var1'] = np.random.randn(432)
cph = CoxPHFitter()
cph.fit(rossi2, 'week', 'arrest')
ax = cph.baseline_survival_.plot()
kmf.plot(ax=ax)
"""
survival_df = exp(-self.baseline_cumulative_hazard_)
if self.strata is None:
survival_df.columns = ['baseline survival']
return survival_df
def plot(self, standardized=False, columns=None, **kwargs):
"""
Produces a visual representation of the fitted coefficients, including their standard errors and magnitudes.
Parameters:
standardized: standardize each estimated coefficient and confidence interval
endpoints by the standard error of the estimate.
columns : list-like, default None
Returns:
ax: the matplotlib axis that be edited.
"""
from matplotlib import pyplot as plt
ax = kwargs.get('ax', None) or plt.figure().add_subplot(111)
if columns is not None:
yaxis_locations = range(len(columns))
summary = self.summary.loc[columns]
lower_bound = self.confidence_intervals_[columns].loc['lower-bound'].copy()
upper_bound = self.confidence_intervals_[columns].loc['upper-bound'].copy()
hazards = self.hazards_[columns].values[0].copy()
else:
yaxis_locations = range(len(self.hazards_.columns))
summary = self.summary
lower_bound = self.confidence_intervals_.loc['lower-bound'].copy()
upper_bound = self.confidence_intervals_.loc['upper-bound'].copy()
hazards = self.hazards_.values[0].copy()
if standardized:
se = summary['se(coef)']
lower_bound /= se
upper_bound /= se
hazards /= se
order = np.argsort(hazards)
ax.scatter(upper_bound.values[order], yaxis_locations, marker='|', c='k')
ax.scatter(lower_bound.values[order], yaxis_locations, marker='|', c='k')
ax.scatter(hazards[order], yaxis_locations, marker='o', c='k')
ax.hlines(yaxis_locations, lower_bound.values[order], upper_bound.values[order], color='k', lw=1)
tick_labels = [c + significance_code(p).strip() for (c, p) in summary['p'][order].iteritems()]
plt.yticks(yaxis_locations, tick_labels)
plt.xlabel("standardized coef" if standardized else "coef")
return ax
def plot_covariate_groups(self, covariate, groups, **kwargs):
"""
Produces a visual representation comparing the baseline survival curve of the model versus
what happens when a covariate is varied over values in a group. This is useful to compare
subjects' survival as we vary a single covariate, all else being held equal. The baseline survival
curve is equal to the predicted survival curve at all average values in the original dataset.
Parameters:
covariate: a string of the covariate in the original dataset that we wish to vary.
groups: an iterable of the values we wish the covariate to take on.
Returns:
ax: the matplotlib axis that be edited.
"""
from matplotlib import pyplot as plt
if covariate not in self.hazards_.columns:
raise KeyError('covariate `%s` is not present in the original dataset' % covariate)
ax = kwargs.get('ax', None) or plt.figure().add_subplot(111)
x_bar = self._norm_mean.to_frame().T
X = pd.concat([x_bar] * len(groups))
X.index = ['%s=%s' % (covariate, g) for g in groups]
X[covariate] = groups
self.predict_survival_function(X).plot(ax=ax)
self.baseline_survival_.plot(ax=ax, ls='--')
return ax
@property
def score_(self):
if hasattr(self, '_concordance_score_'):
return self._concordance_score_
else:
self._concordance_score_ = concordance_index(self.durations,
-self._predicted_partial_hazards_,
self.event_observed)
del self._predicted_partial_hazards_
return self._concordance_score_
|
[
"lifelines.utils.significance_codes_as_text",
"scipy.linalg.solve",
"lifelines.statistics.chisq_test",
"numpy.empty",
"numpy.ones",
"numpy.isnan",
"numpy.argsort",
"datetime.datetime.utcnow",
"matplotlib.pyplot.figure",
"numpy.linalg.norm",
"numpy.exp",
"lifelines.utils.normalize",
"numpy.unique",
"pandas.DataFrame",
"lifelines.utils.ConvergenceError",
"numpy.zeros_like",
"matplotlib.pyplot.yticks",
"lifelines.utils.string_justify",
"lifelines.utils.check_nans_or_infs",
"scipy.stats.chi2.sf",
"lifelines.utils.StatError",
"lifelines.utils.concordance_index",
"lifelines.utils.survival_table_from_events",
"numpy.linalg.inv",
"lifelines.utils.inv_normal_cdf",
"lifelines.utils.coalesce",
"lifelines.utils.check_low_var",
"numpy.dot",
"lifelines.utils.significance_code",
"lifelines.utils.StepSizer",
"lifelines.utils.pass_for_numeric_dtypes_or_raise",
"numpy.outer",
"numpy.log",
"numpy.zeros",
"lifelines.utils._get_index",
"time.time",
"lifelines.utils.check_complete_separation",
"scipy.integrate.trapz",
"warnings.warn",
"matplotlib.pyplot.xlabel"
] |
[((5481, 5510), 'lifelines.utils.coalesce', 'coalesce', (['strata', 'self.strata'], {}), '(strata, self.strata)\n', (5489, 5510), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((9996, 10016), 'lifelines.utils.StepSizer', 'StepSizer', (['step_size'], {}), '(step_size)\n', (10005, 10016), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((10365, 10376), 'time.time', 'time.time', ([], {}), '()\n', (10374, 10376), False, 'import time\n'), ((16033, 16049), 'numpy.zeros', 'np.zeros', (['(d, d)'], {}), '((d, d))\n', (16041, 16049), True, 'import numpy as np\n'), ((16069, 16085), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (16077, 16085), True, 'import numpy as np\n'), ((16168, 16184), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (16176, 16184), True, 'import numpy as np\n'), ((19508, 19544), 'lifelines.utils.pass_for_numeric_dtypes_or_raise', 'pass_for_numeric_dtypes_or_raise', (['df'], {}), '(df)\n', (19540, 19544), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19553, 19574), 'lifelines.utils.check_nans_or_infs', 'check_nans_or_infs', (['T'], {}), '(T)\n', (19571, 19574), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19583, 19604), 'lifelines.utils.check_nans_or_infs', 'check_nans_or_infs', (['E'], {}), '(E)\n', (19601, 19604), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19613, 19635), 'lifelines.utils.check_nans_or_infs', 'check_nans_or_infs', (['df'], {}), '(df)\n', (19631, 19635), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19644, 19661), 'lifelines.utils.check_low_var', 'check_low_var', (['df'], {}), '(df)\n', (19657, 19661), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19670, 19705), 'lifelines.utils.check_complete_separation', 'check_complete_separation', (['df', 'E', 'T'], {}), '(df, E, T)\n', (19695, 19705), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19769, 19809), 'lifelines.utils.inv_normal_cdf', 'inv_normal_cdf', (['((1.0 + self.alpha) / 2.0)'], {}), '((1.0 + self.alpha) / 2.0)\n', (19783, 19809), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((19897, 20036), 'pandas.DataFrame', 'pd.DataFrame', (['np.r_[hazards - alpha2 * se, hazards + alpha2 * se]'], {'index': "['lower-bound', 'upper-bound']", 'columns': 'self.hazards_.columns'}), "(np.r_[hazards - alpha2 * se, hazards + alpha2 * se], index=[\n 'lower-bound', 'upper-bound'], columns=self.hazards_.columns)\n", (19909, 20036), True, 'import pandas as pd\n'), ((21686, 21705), 'numpy.linalg.inv', 'inv', (['self._hessian_'], {}), '(self._hessian_)\n', (21689, 21705), False, 'from numpy.linalg import norm, inv\n'), ((22404, 22420), 'numpy.zeros', 'np.zeros', (['(n, d)'], {}), '((n, d))\n', (22412, 22420), True, 'import numpy as np\n'), ((23746, 23816), 'pandas.DataFrame', 'pd.DataFrame', (['se[None, :]'], {'index': "['se']", 'columns': 'self.hazards_.columns'}), "(se[None, :], index=['se'], columns=self.hazards_.columns)\n", (23758, 23816), True, 'import pandas as pd\n'), ((24063, 24082), 'scipy.stats.chi2.sf', 'stats.chi2.sf', (['U', '(1)'], {}), '(U, 1)\n', (24076, 24082), True, 'import scipy.stats as stats\n'), ((24379, 24420), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'self.hazards_.columns'}), '(index=self.hazards_.columns)\n', (24391, 24420), True, 'import pandas as pd\n'), ((24501, 24538), 'numpy.exp', 'exp', (["self.hazards_.loc['coef'].values"], {}), "(self.hazards_.loc['coef'].values)\n", (24504, 24538), False, 'from numpy import dot, exp\n'), ((25065, 25083), 'lifelines.utils.string_justify', 'string_justify', (['(18)'], {}), '(18)\n', (25079, 25083), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((26911, 26972), 'pandas.DataFrame', 'pd.DataFrame', (["{'E': self.event_observed, 'T': self.durations}"], {}), "({'E': self.event_observed, 'T': self.durations})\n", (26923, 26972), True, 'import pandas as pd\n'), ((27267, 27332), 'lifelines.statistics.chisq_test', 'chisq_test', (['test_stat'], {'degrees_freedom': 'degrees_freedom', 'alpha': '(0.0)'}), '(test_stat, degrees_freedom=degrees_freedom, alpha=0.0)\n', (27277, 27332), False, 'from lifelines.statistics import chisq_test\n'), ((29403, 29416), 'lifelines.utils._get_index', '_get_index', (['X'], {}), '(X)\n', (29413, 29416), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((29430, 29469), 'lifelines.utils.normalize', 'normalize', (['X', 'self._norm_mean.values', '(1)'], {}), '(X, self._norm_mean.values, 1)\n', (29439, 29469), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((33108, 33121), 'lifelines.utils._get_index', '_get_index', (['X'], {}), '(X)\n', (33118, 33121), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((34332, 34345), 'lifelines.utils._get_index', '_get_index', (['X'], {}), '(X)\n', (34342, 34345), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((34971, 35041), 'lifelines.utils.survival_table_from_events', 'survival_table_from_events', (['durations', 'event_observed'], {'weights': 'weights'}), '(durations, event_observed, weights=weights)\n', (34997, 35041), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((36666, 36704), 'numpy.exp', 'exp', (['(-self.baseline_cumulative_hazard_)'], {}), '(-self.baseline_cumulative_hazard_)\n', (36669, 36704), False, 'from numpy import dot, exp\n'), ((38301, 38320), 'numpy.argsort', 'np.argsort', (['hazards'], {}), '(hazards)\n', (38311, 38320), True, 'import numpy as np\n'), ((38774, 38814), 'matplotlib.pyplot.yticks', 'plt.yticks', (['yaxis_locations', 'tick_labels'], {}), '(yaxis_locations, tick_labels)\n', (38784, 38814), True, 'from matplotlib import pyplot as plt\n'), ((38823, 38882), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["('standardized coef' if standardized else 'coef')"], {}), "('standardized coef' if standardized else 'coef')\n", (38833, 38882), True, 'from matplotlib import pyplot as plt\n'), ((7300, 7346), 'lifelines.utils.normalize', 'normalize', (['df', 'self._norm_mean', 'self._norm_std'], {}), '(df, self._norm_mean, self._norm_std)\n', (7309, 7346), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((7637, 7697), 'pandas.DataFrame', 'pd.DataFrame', (['hazards_.T'], {'columns': 'df.columns', 'index': "['coef']"}), "(hazards_.T, columns=df.columns, index=['coef'])\n", (7649, 7697), True, 'import pandas as pd\n'), ((7771, 7811), 'numpy.outer', 'np.outer', (['self._norm_std', 'self._norm_std'], {}), '(self._norm_std, self._norm_std)\n', (7779, 7811), True, 'import numpy as np\n'), ((7874, 7920), 'lifelines.utils.normalize', 'normalize', (['df', 'self._norm_mean', 'self._norm_std'], {}), '(df, self._norm_mean, self._norm_std)\n', (7883, 7920), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((9957, 9973), 'numpy.zeros', 'np.zeros', (['(d, 1)'], {}), '((d, 1))\n', (9965, 9973), True, 'import numpy as np\n'), ((12448, 12459), 'numpy.linalg.norm', 'norm', (['delta'], {}), '(delta)\n', (12452, 12459), False, 'from numpy.linalg import norm, inv\n'), ((14414, 14526), 'warnings.warn', 'warnings.warn', (["('Newton-Rhapson failed to converge sufficiently in %d steps.' % max_steps)", 'ConvergenceWarning'], {}), "('Newton-Rhapson failed to converge sufficiently in %d steps.' %\n max_steps, ConvergenceWarning)\n", (14427, 14526), False, 'import warnings\n'), ((16250, 16266), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (16258, 16266), True, 'import numpy as np\n'), ((16268, 16284), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (16276, 16284), True, 'import numpy as np\n'), ((16321, 16337), 'numpy.zeros', 'np.zeros', (['(d, d)'], {}), '((d, d))\n', (16329, 16337), True, 'import numpy as np\n'), ((16339, 16355), 'numpy.zeros', 'np.zeros', (['(d, d)'], {}), '((d, d))\n', (16347, 16355), True, 'import numpy as np\n'), ((16906, 16924), 'numpy.dot', 'dot', (['xi.T', 'phi_x_i'], {}), '(xi.T, phi_x_i)\n', (16909, 16924), False, 'from numpy import dot, exp\n'), ((17756, 17772), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (17764, 17772), True, 'import numpy as np\n'), ((19183, 19199), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (19191, 19199), True, 'import numpy as np\n'), ((19248, 19264), 'numpy.zeros', 'np.zeros', (['(1, d)'], {}), '((1, d))\n', (19256, 19264), True, 'import numpy as np\n'), ((19291, 19307), 'numpy.zeros', 'np.zeros', (['(d, d)'], {}), '((d, d))\n', (19299, 19307), True, 'import numpy as np\n'), ((20459, 20475), 'numpy.empty', 'np.empty', (['(0, d)'], {}), '((0, d))\n', (20467, 20475), True, 'import numpy as np\n'), ((20502, 20520), 'numpy.unique', 'np.unique', (['X.index'], {}), '(X.index)\n', (20511, 20520), True, 'import numpy as np\n'), ((21227, 21243), 'numpy.empty', 'np.empty', (['(0, d)'], {}), '((0, d))\n', (21235, 21243), True, 'import numpy as np\n'), ((21271, 21296), 'numpy.unique', 'np.unique', (['self._clusters'], {}), '(self._clusters)\n', (21280, 21296), True, 'import numpy as np\n'), ((21821, 21861), 'numpy.outer', 'np.outer', (['self._norm_std', 'self._norm_std'], {}), '(self._norm_std, self._norm_std)\n', (21829, 21861), True, 'import numpy as np\n'), ((22442, 22454), 'numpy.dot', 'dot', (['X', 'beta'], {}), '(X, beta)\n', (22445, 22454), False, 'from numpy import dot, exp\n'), ((26129, 26149), 'lifelines.utils.significance_code', 'significance_code', (['p'], {}), '(p)\n', (26146, 26149), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((26315, 26343), 'lifelines.utils.significance_codes_as_text', 'significance_codes_as_text', ([], {}), '()\n', (26341, 26343), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((28869, 28904), 'lifelines.utils.pass_for_numeric_dtypes_or_raise', 'pass_for_numeric_dtypes_or_raise', (['X'], {}), '(X)\n', (28901, 28904), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((29498, 29524), 'numpy.dot', 'np.dot', (['X', 'self.hazards_.T'], {}), '(X, self.hazards_.T)\n', (29504, 29524), True, 'import numpy as np\n'), ((30712, 30726), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (30724, 30726), True, 'import pandas as pd\n'), ((31600, 31613), 'lifelines.utils._get_index', '_get_index', (['v'], {}), '(v)\n', (31610, 31613), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((34430, 34456), 'scipy.integrate.trapz', 'trapz', (['v.values.T', 'v.index'], {}), '(v.values.T, v.index)\n', (34435, 34456), False, 'from scipy.integrate import trapz\n'), ((35425, 35450), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'index'}), '(index=index)\n', (35437, 35450), True, 'import pandas as pd\n'), ((40417, 40511), 'lifelines.utils.concordance_index', 'concordance_index', (['self.durations', '(-self._predicted_partial_hazards_)', 'self.event_observed'], {}), '(self.durations, -self._predicted_partial_hazards_, self.\n event_observed)\n', (40434, 40511), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((5783, 5803), 'numpy.ones', 'np.ones', (['df.shape[0]'], {}), '(df.shape[0])\n', (5790, 5803), True, 'import numpy as np\n'), ((6053, 6493), 'warnings.warn', 'warnings.warn', (['"""It appears your weights are not integers, possibly propensity or sampling scores then?\nIt\'s important to know that the naive variance estimates of the coefficients are biased. Instead a) set `robust=True` in the call to `fit`, or b) use Monte Carlo to\nestimate the variances. See paper "Variance estimation when using inverse probability of treatment weighting (IPTW) with survival analysis\\"\n"""', 'RuntimeWarning'], {}), '(\n """It appears your weights are not integers, possibly propensity or sampling scores then?\nIt\'s important to know that the naive variance estimates of the coefficients are biased. Instead a) set `robust=True` in the call to `fit`, or b) use Monte Carlo to\nestimate the variances. See paper "Variance estimation when using inverse probability of treatment weighting (IPTW) with survival analysis\\"\n"""\n , RuntimeWarning)\n', (6066, 6493), False, 'import warnings\n'), ((6643, 6671), 'numpy.ones', 'np.ones', (['(self._n_examples,)'], {}), '((self._n_examples,))\n', (6650, 6671), True, 'import numpy as np\n'), ((7749, 7768), 'numpy.linalg.inv', 'inv', (['self._hessian_'], {}), '(self._hessian_)\n', (7752, 7768), False, 'from numpy.linalg import norm, inv\n'), ((10674, 10714), 'numpy.zeros', 'np.zeros', (['(beta.shape[0], beta.shape[0])'], {}), '((beta.shape[0], beta.shape[0]))\n', (10682, 10714), True, 'import numpy as np\n'), ((10768, 10786), 'numpy.unique', 'np.unique', (['X.index'], {}), '(X.index)\n', (10777, 10786), True, 'import numpy as np\n'), ((11473, 11503), 'scipy.linalg.solve', 'spsolve', (['(-h)', 'g.T'], {'sym_pos': '(True)'}), '(-h, g.T, sym_pos=True)\n', (11480, 11503), True, 'from scipy.linalg import solve as spsolve\n'), ((12046, 12061), 'numpy.isnan', 'np.isnan', (['delta'], {}), '(delta)\n', (12054, 12061), True, 'import numpy as np\n'), ((12086, 12351), 'lifelines.utils.ConvergenceError', 'ConvergenceError', (['"""delta contains nan value(s). Convergence halted. Please see the following tips in the lifelines documentation:\nhttps://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model\n"""'], {}), '(\n """delta contains nan value(s). Convergence halted. Please see the following tips in the lifelines documentation:\nhttps://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model\n"""\n )\n', (12102, 12351), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((16487, 16499), 'numpy.dot', 'dot', (['X', 'beta'], {}), '(X, beta)\n', (16490, 16499), False, 'from numpy import dot, exp\n'), ((18768, 18803), 'numpy.dot', 'dot', (['(numer.T / denom)', '(numer / denom)'], {}), '(numer.T / denom, numer / denom)\n', (18771, 18803), False, 'from numpy import dot, exp\n'), ((29126, 29161), 'lifelines.utils.pass_for_numeric_dtypes_or_raise', 'pass_for_numeric_dtypes_or_raise', (['X'], {}), '(X)\n', (29158, 29161), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((31211, 31235), 'lifelines.utils._get_index', '_get_index', (['stratified_X'], {}), '(stratified_X)\n', (31221, 31235), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((31660, 31676), 'numpy.dot', 'np.dot', (['c_0', 'v.T'], {}), '(c_0, v.T)\n', (31666, 31676), True, 'import numpy as np\n'), ((35142, 35220), 'pandas.DataFrame', 'pd.DataFrame', (["(event_table['observed'] / event_table['hazards'])"], {'columns': '[name]'}), "(event_table['observed'] / event_table['hazards'], columns=[name])\n", (35154, 35220), True, 'import pandas as pd\n'), ((5180, 5197), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5195, 5197), False, 'from datetime import datetime\n'), ((10632, 10651), 'numpy.zeros_like', 'np.zeros_like', (['beta'], {}), '(beta)\n', (10645, 10651), True, 'import numpy as np\n'), ((18908, 18927), 'numpy.log', 'np.log', (['denom[0][0]'], {}), '(denom[0][0])\n', (18914, 18927), True, 'import numpy as np\n'), ((19043, 19063), 'numpy.dot', 'dot', (['x_tie_sum', 'beta'], {}), '(x_tie_sum, beta)\n', (19046, 19063), False, 'from numpy import dot, exp\n'), ((29294, 29309), 'pandas.DataFrame', 'pd.DataFrame', (['X'], {}), '(X)\n', (29306, 29309), True, 'import pandas as pd\n'), ((29322, 29357), 'lifelines.utils.pass_for_numeric_dtypes_or_raise', 'pass_for_numeric_dtypes_or_raise', (['X'], {}), '(X)\n', (29354, 29357), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((37412, 37424), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (37422, 37424), True, 'from matplotlib import pyplot as plt\n'), ((39897, 39909), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (39907, 39909), True, 'from matplotlib import pyplot as plt\n'), ((11611, 11897), 'lifelines.utils.ConvergenceError', 'ConvergenceError', (['"""hessian or gradient contains nan or inf value(s). Convergence halted. Please see the following tips in the lifelines documentation:\nhttps://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model\n"""'], {}), '(\n """hessian or gradient contains nan or inf value(s). Convergence halted. Please see the following tips in the lifelines documentation:\nhttps://lifelines.readthedocs.io/en/latest/Examples.html#problems-with-convergence-in-the-cox-proportional-hazard-model\n"""\n )\n', (11627, 11897), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((30942, 31198), 'lifelines.utils.StatError', 'StatError', (['("""The stratum %s was not found in the original training data. For example, try\nthe following on the original dataset, df: `df.groupby(%s).size()`. Expected is that %s is not present in the output.\n"""\n % (stratum, self.strata, stratum))'], {}), '(\n """The stratum %s was not found in the original training data. For example, try\nthe following on the original dataset, df: `df.groupby(%s).size()`. Expected is that %s is not present in the output.\n"""\n % (stratum, self.strata, stratum))\n', (30951, 31198), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((31373, 31389), 'numpy.dot', 'np.dot', (['c_0', 'v.T'], {}), '(c_0, v.T)\n', (31379, 31389), True, 'import numpy as np\n'), ((38690, 38710), 'lifelines.utils.significance_code', 'significance_code', (['p'], {}), '(p)\n', (38707, 38710), False, 'from lifelines.utils import survival_table_from_events, inv_normal_cdf, normalize, significance_code, significance_codes_as_text, concordance_index, _get_index, qth_survival_times, pass_for_numeric_dtypes_or_raise, check_low_var, coalesce, check_complete_separation, check_nans_or_infs, StatError, ConvergenceWarning, StepSizer, ConvergenceError, string_justify\n'), ((12805, 12816), 'time.time', 'time.time', ([], {}), '()\n', (12814, 12816), False, 'import time\n'), ((13599, 14001), 'warnings.warn', 'warnings.warn', (['"""The log-likelihood is getting suspciously close to 0 and the delta is still large. There may be complete separation in the dataset. This may result in incorrect inference of coefficients. See https://stats.idre.ucla.edu/other/mult-pkg/faq/general/faqwhat-is-complete-or-quasi-complete-separation-in-logisticprobit-regression-and-how-do-we-deal-with-them/ """', 'ConvergenceWarning'], {}), "(\n 'The log-likelihood is getting suspciously close to 0 and the delta is still large. There may be complete separation in the dataset. This may result in incorrect inference of coefficients. See https://stats.idre.ucla.edu/other/mult-pkg/faq/general/faqwhat-is-complete-or-quasi-complete-separation-in-logisticprobit-regression-and-how-do-we-deal-with-them/ '\n , ConvergenceWarning)\n", (13612, 14001), False, 'import warnings\n')]
|
from JumpScale import j
j.application.start("gendocs")
j.application.stop()
|
[
"JumpScale.j.application.stop",
"JumpScale.j.application.start"
] |
[((25, 55), 'JumpScale.j.application.start', 'j.application.start', (['"""gendocs"""'], {}), "('gendocs')\n", (44, 55), False, 'from JumpScale import j\n'), ((58, 78), 'JumpScale.j.application.stop', 'j.application.stop', ([], {}), '()\n', (76, 78), False, 'from JumpScale import j\n')]
|
"""pypi package setup."""
from __future__ import print_function
import codecs
from os import path
from setuptools import setup, find_packages
try:
import ROOT # pylint: disable=W0611
except ImportError:
print("ROOT is required by this library.")
DEPS = ['numpy', 'PyYAML>4.*', 'future', 'pylint']
HERE = path.abspath(path.dirname(__file__))
with codecs.open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='hepdata_lib',
version='0.2.7',
description='Library for getting your data into HEPData',
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
url='https://github.com/HEPData/hepdata_lib',
author='<NAME>, <NAME>',
author_email='<EMAIL>',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='HEPData physics OpenData',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
zip_safe=False,
install_requires=DEPS,
setup_requires=['pytest-runner', 'pytest-cov'],
tests_require=['pytest'],
project_urls={
'Documentation': 'https://hepdata-lib.readthedocs.io',
'Bug Reports': 'https://github.com/HEPData/hepdata_lib/issues',
'Source': 'https://github.com/HEPData/hepdata_lib',
}, )
|
[
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((328, 350), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (340, 350), False, 'from os import path\n'), ((370, 398), 'os.path.join', 'path.join', (['HERE', '"""README.md"""'], {}), "(HERE, 'README.md')\n", (379, 398), False, 'from os import path\n'), ((1083, 1134), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'docs', 'tests']"}), "(exclude=['contrib', 'docs', 'tests'])\n", (1096, 1134), False, 'from setuptools import setup, find_packages\n')]
|
from __future__ import absolute_import
from RecoTracker.IterativeTracking.LowPtQuadStep_cff import *
from .HIPixelTripletSeeds_cff import *
from .HIPixel3PrimTracks_cfi import *
hiLowPtQuadStepClusters = cms.EDProducer("HITrackClusterRemover",
clusterLessSolution = cms.bool(True),
trajectories = cms.InputTag("hiGlobalPrimTracks"),
overrideTrkQuals = cms.InputTag('hiInitialStepSelector','hiInitialStep'),
TrackQuality = cms.string('highPurity'),
minNumberOfLayersWithMeasBeforeFiltering = cms.int32(0),
pixelClusters = cms.InputTag("siPixelClusters"),
stripClusters = cms.InputTag("siStripClusters"),
Common = cms.PSet(
maxChi2 = cms.double(9.0),
),
Strip = cms.PSet(
#Yen-Jie's mod to preserve merged clusters
maxSize = cms.uint32(2),
maxChi2 = cms.double(9.0)
)
)
# SEEDING LAYERS
# Using 4 layers layerlist
hiLowPtQuadStepSeedLayers = hiPixelLayerQuadruplets.clone(
BPix = dict(skipClusters = cms.InputTag('hiLowPtQuadStepClusters')),
FPix = dict(skipClusters = cms.InputTag('hiLowPtQuadStepClusters'))
)
# SEEDS
from RecoTracker.TkTrackingRegions.globalTrackingRegionWithVertices_cfi import globalTrackingRegionWithVertices as _globalTrackingRegionWithVertices
from RecoTracker.TkHitPairs.hitPairEDProducer_cfi import hitPairEDProducer as _hitPairEDProducer
from RecoPixelVertexing.PixelTriplets.pixelTripletHLTEDProducer_cfi import pixelTripletHLTEDProducer as _pixelTripletHLTEDProducer
from RecoPixelVertexing.PixelLowPtUtilities.ClusterShapeHitFilterESProducer_cfi import *
from RecoPixelVertexing.PixelLowPtUtilities.trackCleaner_cfi import *
from RecoPixelVertexing.PixelTrackFitting.pixelFitterByHelixProjections_cfi import *
from RecoHI.HiTracking.HIPixelTrackFilter_cff import *
from RecoHI.HiTracking.HITrackingRegionProducer_cfi import *
hiLowPtQuadStepTrackingRegions = _globalTrackingRegionWithVertices.clone(RegionPSet=dict(
precise = True,
useMultipleScattering = False,
useFakeVertices = False,
beamSpot = "offlineBeamSpot",
useFixedError = True,
nSigmaZ = 4.0,
sigmaZVertex = 4.0,
fixedError = 0.5,
VertexCollection = "hiSelectedPixelVertex",
ptMin = 0.3,#0.2 for pp
useFoundVertices = True,
originRadius = 0.02 #0.02 for pp
))
hiLowPtQuadStepTracksHitDoubletsCA = _hitPairEDProducer.clone(
clusterCheck = "",
seedingLayers = "hiLowPtQuadStepSeedLayers",
trackingRegions = "hiLowPtQuadStepTrackingRegions",
maxElement = 50000000,
produceIntermediateHitDoublets = True,
layerPairs = [0,1,2]
)
import RecoPixelVertexing.PixelLowPtUtilities.LowPtClusterShapeSeedComparitor_cfi
from RecoPixelVertexing.PixelTriplets.caHitQuadrupletEDProducer_cfi import caHitQuadrupletEDProducer as _caHitQuadrupletEDProducer
hiLowPtQuadStepTracksHitQuadrupletsCA = _caHitQuadrupletEDProducer.clone(
doublets = "hiLowPtQuadStepTracksHitDoubletsCA",
extraHitRPhitolerance = 0.0,
SeedComparitorPSet = RecoPixelVertexing.PixelLowPtUtilities.LowPtClusterShapeSeedComparitor_cfi.LowPtClusterShapeSeedComparitor.clone(),
maxChi2 = dict(
pt1 = 0.7, pt2 = 2,
value1 = 1000, value2 = 150,
),
useBendingCorrection = True,
fitFastCircle = True,
fitFastCircleChi2Cut = True,
CAThetaCut = 0.0017,
CAPhiCut = 0.3,
)
hiLowPtQuadStepPixelTracksFilter = hiFilter.clone(
nSigmaTipMaxTolerance = 0,
lipMax = 1.0,
tipMax = 1.0,
ptMin = 0.4, #seeding region is 0.3
)
import RecoPixelVertexing.PixelTrackFitting.pixelTracks_cfi as _mod
hiLowPtQuadStepPixelTracks = _mod.pixelTracks.clone(
passLabel = 'Pixel detached tracks with vertex constraint',
# Ordered Hits
SeedingHitSets = "hiLowPtQuadStepTracksHitQuadrupletsCA",
# Fitter
Fitter = "pixelFitterByHelixProjections",
# Filter
Filter = "hiLowPtQuadStepPixelTracksFilter",
# Cleaner
Cleaner = "trackCleaner"
)
import RecoPixelVertexing.PixelLowPtUtilities.TrackSeeds_cfi
hiLowPtQuadStepSeeds = RecoPixelVertexing.PixelLowPtUtilities.TrackSeeds_cfi.pixelTrackSeeds.clone(
InputCollection = 'hiLowPtQuadStepPixelTracks'
)
# QUALITY CUTS DURING TRACK BUILDING
import TrackingTools.TrajectoryFiltering.TrajectoryFilter_cff
hiLowPtQuadStepTrajectoryFilter = TrackingTools.TrajectoryFiltering.TrajectoryFilter_cff.CkfBaseTrajectoryFilter_block.clone(
#maxLostHits = 1,
minimumNumberOfHits = 3,#3 for pp
minPt = 0.075,# 0.075 for pp
#constantValueForLostHitsFractionFilter = 0.701
)
import TrackingTools.KalmanUpdators.Chi2MeasurementEstimator_cfi
hiLowPtQuadStepChi2Est = TrackingTools.KalmanUpdators.Chi2MeasurementEstimator_cfi.Chi2MeasurementEstimator.clone(
ComponentName = 'hiLowPtQuadStepChi2Est',
nSigma = 3.0,
MaxChi2 = 9.0
)
# TRACK BUILDING
import RecoTracker.CkfPattern.GroupedCkfTrajectoryBuilder_cfi
hiLowPtQuadStepTrajectoryBuilder = RecoTracker.CkfPattern.GroupedCkfTrajectoryBuilder_cfi.GroupedCkfTrajectoryBuilder.clone(
trajectoryFilter = dict(refToPSet_ = 'hiLowPtQuadStepTrajectoryFilter'),
maxCand = 4, # 4 for pp
estimator = 'hiLowPtQuadStepChi2Est',
maxDPhiForLooperReconstruction = 2.0, # 2.0 for pp
# 0.63 GeV is the maximum pT for a charged particle to loop within the 1.1m radius
# of the outermost Tracker barrel layer (B=3.8T)
maxPtForLooperReconstruction = 0.7, # 0.7 for pp
alwaysUseInvalidHits = False
)
# MAKING OF TRACK CANDIDATES
# Trajectory cleaner in default
import RecoTracker.CkfPattern.CkfTrackCandidates_cfi
hiLowPtQuadStepTrackCandidates = RecoTracker.CkfPattern.CkfTrackCandidates_cfi.ckfTrackCandidates.clone(
src = 'hiLowPtQuadStepSeeds',
### these two parameters are relevant only for the CachingSeedCleanerBySharedInput
numHitsForSeedCleaner = 50,
onlyPixelHitsForSeedCleaner = True,
TrajectoryBuilderPSet = dict(refToPSet_ = 'hiLowPtQuadStepTrajectoryBuilder'),
clustersToSkip = 'hiLowPtQuadStepClusters',
doSeedingRegionRebuilding = True,
useHitsSplitting = True
)
# TRACK FITTING
import RecoTracker.TrackProducer.TrackProducer_cfi
hiLowPtQuadStepTracks = RecoTracker.TrackProducer.TrackProducer_cfi.TrackProducer.clone(
src = 'hiLowPtQuadStepTrackCandidates',
AlgorithmName = 'lowPtQuadStep',
Fitter = 'FlexibleKFFittingSmoother'
)
# Final selection
import RecoHI.HiTracking.hiMultiTrackSelector_cfi
hiLowPtQuadStepSelector = RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiMultiTrackSelector.clone(
src ='hiLowPtQuadStepTracks',
useAnyMVA = True,
GBRForestLabel = 'HIMVASelectorIter8',#FIXME MVA for new iteration
GBRForestVars = ['chi2perdofperlayer', 'nhits', 'nlayers', 'eta'],
trackSelectors = cms.VPSet(
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiLooseMTS.clone(
name = 'hiLowPtQuadStepLoose',
applyAdaptedPVCuts = False,
useMVA = False,
), #end of pset
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiTightMTS.clone(
name = 'hiLowPtQuadStepTight',
preFilterName = 'hiLowPtQuadStepLoose',
applyAdaptedPVCuts = False,
useMVA = True,
minMVA = -0.2
),
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiHighpurityMTS.clone(
name = 'hiLowPtQuadStep',
preFilterName = 'hiLowPtQuadStepTight',
applyAdaptedPVCuts = False,
useMVA = True,
minMVA = -0.09
),
) #end of vpset
) #end of clone
from Configuration.Eras.Modifier_trackingPhase1_cff import trackingPhase1
trackingPhase1.toModify(hiLowPtQuadStepSelector, useAnyMVA = False)
trackingPhase1.toModify(hiLowPtQuadStepSelector, trackSelectors = cms.VPSet(
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiLooseMTS.clone(
name = 'hiLowPtQuadStepLoose',
applyAdaptedPVCuts = False,
useMVA = False,
), #end of pset
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiTightMTS.clone(
name = 'hiLowPtQuadStepTight',
preFilterName = 'hiLowPtQuadStepLoose',
applyAdaptedPVCuts = False,
useMVA = False,
minMVA = -0.2
),
RecoHI.HiTracking.hiMultiTrackSelector_cfi.hiHighpurityMTS.clone(
name = 'hiLowPtQuadStep',
preFilterName = 'hiLowPtQuadStepTight',
applyAdaptedPVCuts = False,
useMVA = False,
minMVA = -0.09
),
) #end of vpset
)
import RecoTracker.FinalTrackSelectors.trackListMerger_cfi
hiLowPtQuadStepQual = RecoTracker.FinalTrackSelectors.trackListMerger_cfi.trackListMerger.clone(
TrackProducers = ['hiLowPtQuadStepTracks'],
hasSelector = [1],
selectedTrackQuals = ["hiLowPtQuadStepSelector:hiLowPtQuadStep"],
copyExtras = True,
makeReKeyedSeeds = cms.untracked.bool(False),
)
hiLowPtQuadStepTask = cms.Task(hiLowPtQuadStepClusters,
hiLowPtQuadStepSeedLayers,
hiLowPtQuadStepTrackingRegions,
hiLowPtQuadStepTracksHitDoubletsCA,
hiLowPtQuadStepTracksHitQuadrupletsCA,
pixelFitterByHelixProjections,
hiLowPtQuadStepPixelTracksFilter,
hiLowPtQuadStepPixelTracks,
hiLowPtQuadStepSeeds,
hiLowPtQuadStepTrackCandidates,
hiLowPtQuadStepTracks,
hiLowPtQuadStepSelector,
hiLowPtQuadStepQual)
hiLowPtQuadStep = cms.Sequence(hiLowPtQuadStepTask)
|
[
"Configuration.Eras.Modifier_trackingPhase1_cff.trackingPhase1.toModify",
"RecoTracker.TkHitPairs.hitPairEDProducer_cfi.hitPairEDProducer.clone",
"RecoPixelVertexing.PixelTrackFitting.pixelTracks_cfi.pixelTracks.clone"
] |
[((2342, 2570), 'RecoTracker.TkHitPairs.hitPairEDProducer_cfi.hitPairEDProducer.clone', '_hitPairEDProducer.clone', ([], {'clusterCheck': '""""""', 'seedingLayers': '"""hiLowPtQuadStepSeedLayers"""', 'trackingRegions': '"""hiLowPtQuadStepTrackingRegions"""', 'maxElement': '(50000000)', 'produceIntermediateHitDoublets': '(True)', 'layerPairs': '[0, 1, 2]'}), "(clusterCheck='', seedingLayers=\n 'hiLowPtQuadStepSeedLayers', trackingRegions=\n 'hiLowPtQuadStepTrackingRegions', maxElement=50000000,\n produceIntermediateHitDoublets=True, layerPairs=[0, 1, 2])\n", (2366, 2570), True, 'from RecoTracker.TkHitPairs.hitPairEDProducer_cfi import hitPairEDProducer as _hitPairEDProducer\n'), ((3606, 3869), 'RecoPixelVertexing.PixelTrackFitting.pixelTracks_cfi.pixelTracks.clone', '_mod.pixelTracks.clone', ([], {'passLabel': '"""Pixel detached tracks with vertex constraint"""', 'SeedingHitSets': '"""hiLowPtQuadStepTracksHitQuadrupletsCA"""', 'Fitter': '"""pixelFitterByHelixProjections"""', 'Filter': '"""hiLowPtQuadStepPixelTracksFilter"""', 'Cleaner': '"""trackCleaner"""'}), "(passLabel=\n 'Pixel detached tracks with vertex constraint', SeedingHitSets=\n 'hiLowPtQuadStepTracksHitQuadrupletsCA', Fitter=\n 'pixelFitterByHelixProjections', Filter=\n 'hiLowPtQuadStepPixelTracksFilter', Cleaner='trackCleaner')\n", (3628, 3869), True, 'import RecoPixelVertexing.PixelTrackFitting.pixelTracks_cfi as _mod\n'), ((7583, 7648), 'Configuration.Eras.Modifier_trackingPhase1_cff.trackingPhase1.toModify', 'trackingPhase1.toModify', (['hiLowPtQuadStepSelector'], {'useAnyMVA': '(False)'}), '(hiLowPtQuadStepSelector, useAnyMVA=False)\n', (7606, 7648), False, 'from Configuration.Eras.Modifier_trackingPhase1_cff import trackingPhase1\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Example script to run shipdemo.py
shipdemo copies RAW files to a "collector" folder, simulating an echosounder
when collecting data. shipdemo is useful for testing the RapidKrill listening
routine.
Created on Thu Aug 15 12:40:32 2019
@author: <NAME>, British Antarctic Survey
"""
#------------------------------------------------------------------------------
# import shipdemo
from shipdemo import shipdemo
#------------------------------------------------------------------------------
# Set the time rate (seconds) at which you want to move the RAW files
tr = 10
# 10 seconds is fine to quickly check how the RapidKrill listening routine
# handles the incoming files. However, this is unusual for echosounders. For
# example, an EK60 echosounder collecting multifrequency data down to 1000 m
# depth at a 2-seconds ping rate can take 5 minutes to generate a 25 Mb RAW
# file. RapidKrill takes about 1-2 minutes to read, process, and report results
# for the same size RAW file, depending of your computer. So, if you run
# shipdemo with time rates below 1-2 minutes, you will see how the incoming RAW
# files are piled up in a "pending processing list".
#------------------------------------------------------------------------------
# Run shipdemo
shipdemo(timerate=tr)
|
[
"shipdemo.shipdemo"
] |
[((1315, 1336), 'shipdemo.shipdemo', 'shipdemo', ([], {'timerate': 'tr'}), '(timerate=tr)\n', (1323, 1336), False, 'from shipdemo import shipdemo\n')]
|
#!/usr/bin/env python
# coding=utf-8
"""
pattern base class.
generates a test pattern.
history:
see git commits
todo:
~ all fine :-)
"""
# https://docs.python.org/2.7/howto/pyporting.html#division
from __future__ import division
import sys
import importlib
import os
import pkgutil
import collections
import array
import struct
import colorsys
import configdict
##########################################
# globals
pattern_list = []
##########################################
# special functions
def _load_all_modules(path, names):
"""Load all modules in path.
usage:
# Load all modules in the current directory.
load_all_modules(__file__,__name__)
based on
http://stackoverflow.com/a/25459405/574981
from <NAME>
"""
module_names = []
# For each module in the current directory...
for importer, module_name, is_package in pkgutil.iter_modules(
[os.path.dirname(path)]
):
# print("importing:", names + '.' + module_name)
# Import the module.
importlib.import_module(names + '.' + module_name)
module_names.append(module_name)
return module_names
##########################################
# package init
# Load all modules in the current directory.
# load_all_modules(__file__, __name__)
def load_all_submodules():
"""Load all submodules in this directory."""
# Load all modules in the current directory.
pattern_list = _load_all_modules(__file__, __name__)
return pattern_list
##########################################
# functions
def map(value, in_low, in_high, out_low, out_high):
"""
Map value from on range to another.
((value - in_low) * (out_high - out_low)) / (in_high - in_low) + out_low
"""
# example from /animation_nodes/nodes/number/map_range.py
# if inMin == inMax:
# newValue = 0
# # with clamping
# if inMin < inMax:
# _value = min(max(value, inMin), inMax)
# else:
# _value = min(max(value, inMax), inMin)
# with interpolation
# newValue = outMin + interpolation(
# (_value - inMin) / (inMax - inMin)
# ) * (outMax - outMin)
# without interpolation
# newValue = outMin + (
# (_value - inMin) / (inMax - inMin)
# ) * (outMax - outMin)
# # without clamping
# newValue = outMin + (
# (value - inMin) / (inMax - inMin)
# ) * (outMax - outMin)
# # without clamping - reworded
# result = (
# (
# ((value - in_low) / (in_high - in_low)) *
# (out_high - out_low)
# ) + out_low
# )
result = None
# based on http://arduino.cc/en/Reference/Map
# and http://stackoverflow.com/a/5650012/574981
result = (
(
((value - in_low) * (out_high - out_low)) /
(in_high - in_low)
) + out_low
)
return result
def map_bound(value, in_low, in_high, out_low, out_high):
"""Map value with high and low bound handling."""
result = None
if value <= in_low:
result = out_low
else:
if value >= in_high:
result = out_high
else:
# http://stackoverflow.com/a/5650012/574981
result = out_low + (
(out_high - out_low) * (value - in_low) / (in_high - in_low)
)
return result
def map_01_to_8bit(value):
"""Map value from 0-1 range to 0-255 range."""
result = None
result = int(map_bound(value, 0.0, 1.0, 0, 255))
return result
def map_01_to_16bit(value):
"""Map value from 0-1 range to 0-65535 range."""
# result = None
# result = int(map_bound(value, 0.0, 1.0, 0, 65535))
# return result
# return int(map_bound(value, 0.0, 1.0, 0, 65535))
# result = None
# if value <= 0:
# # result = 0
# return 0
# else:
# if value >= 1:
# # result = 65535
# return 65535
# else:
# # simplified
# # result = 65535 * value / 1
# return int(65535 * value)
# return result
return int(65535 * value)
def map_16bit_to_01(value):
"""Map value from 0-65535 range to 0-1 range."""
result = None
result = map_bound(value, 0, 65535, 0.0, 1.0)
return result
def map_16bit_to_8bit(value):
"""Map value from 0-65535 range to 0-255 range."""
if not (0 <= value < 65535):
value = min(max(value, 0), 65535)
return value >> 8
def calculate_16bit_parts(value):
"""Calculate the low and high part representations of value."""
if not (0 <= value < 65535):
value = min(max(value, 0), 65535)
# high_byte = value // 256
# low_byte = value % 256
# return high_byte, low_byte
# faster:
# return value // 256, value % 256
# faster again:
return value >> 8, value & 255
def calculate_16bit_values(value, mode_16bit=False):
"""Calculate the low and high part representations of value."""
high_byte = 0
low_byte = 0
# if mode_16bit:
if value > 65535:
value = 65535
low_byte, high_byte = struct.unpack(
"<BB",
struct.pack("<H", value)
)
# else:
# if value > 255:
# # convert 16bit range to 8bit range
# value = value / 256
# # check for bounds
# if value > 255:
# value = 255
# if value < 0:
# value = 0
# high_byte = value
return high_byte, low_byte
def calculate_16bit_values_as_dict(value, mode_16bit=False):
"""
Calculate the low and high part representations of value.
returns these as dict
"""
high_byte, low_byte = calculate_16bit_values(value, mode_16bit)
result = {
'high': high_byte,
'low': low_byte,
}
return result
def hsv_01_to_rgb_16bit(hue, saturation, value, mode_16bit):
"""
Convert hsv 0-1 floating values to rgb 16bit representations.
and returns this as dict with named attributes.
"""
r, g, b = colorsys.hsv_to_rgb(hue, saturation, value)
rgb16bit = {
'red': calculate_16bit_values_as_dict(
map_01_to_16bit(r),
mode_16bit
),
'green': calculate_16bit_values_as_dict(
map_01_to_16bit(g),
mode_16bit
),
'blue': calculate_16bit_values_as_dict(
map_01_to_16bit(b),
mode_16bit
)
}
return rgb16bit
##########################################
# classes
Value_16bit = collections.namedtuple('Value_16bit', ['hb', 'lb'])
class Pattern(object):
"""Base Pattern Class."""
def __init__(self, config, config_global):
"""Init pattern."""
# merge config with defaults
if not self.config_defaults:
self.config_defaults = {}
# extend config with defaults
self.config = config
configdict.extend_deep(self.config, self.config_defaults.copy())
# print("config: {}".format(self.config))
self.config_global = config_global
# print("config_global: {}".format(self.config_global))
# self.channel_count = config_global['channel_count']
# self.pixel_count = config_global['pixel_count']
# self.mode_16bit = config_global['mode_16bit']
self.values = config_global['value']
self.update_config()
def update_config(self):
"""Update all internal values from config_global."""
self.channel_count = self.config_global['channel_count']
self.pixel_count = self.config_global['pixel_count']
self.pixel_index_max = self.pixel_count - 1
self.repeat_count = self.config_global['repeat_count']
self.repeat_snake = self.config_global['repeat_snake']
self.update_interval = self.config_global['update_interval']
self.mode_16bit = self.config_global['mode_16bit']
self.color_channels = self.config_global['color_channels']
# self.color_channels = collections.namedtuple(
# 'color_channels',
# **self.color_channels_dict
# )
self.color_channels_count = len(self.color_channels)
if self.mode_16bit:
self.color_channels_count = self.color_channels_count * 2
self.total_channel_count = (
self.pixel_count *
self.color_channels_count
)
if self.repeat_count > 0:
self.total_channel_count *= self.repeat_count
def _calculate_16bit_values(self, value):
"""Calculate the low and high part representations of value."""
high_byte = 0
low_byte = 0
high_byte, low_byte = calculate_16bit_values(value, self.mode_16bit)
return high_byte, low_byte
def _hsv_01_to_rgb_16bit(self, hue, saturation, value):
"""Calculate the low and high part representations of value."""
rgb16bit = hsv_01_to_rgb_16bit(hue, saturation, value, self.mode_16bit)
return rgb16bit
def _calculate_step(self, universe):
"""Calculate single step."""
# pattern.Pattern._calculate_step(self)
# available attributes:
# global things (readonly)
# self.channel_count
# self.pixel_count
# self.repeat_count
# self.repeat_snake
# self.color_channels
# self.update_interval
# self.mode_16bit
# self.values['off']
# self.values['low']
# self.values['high']
# self.config_global[]
self.update_config()
# prepare temp array
data_output = array.array('B')
data_output.append(0)
# multiply so we have a array with total_channel_count zeros in it:
# this is much faster than a for loop!
data_output *= self.total_channel_count
# fill array with meaningfull data according to the pattern :-)
# .....
return data_output
##########################################
if __name__ == '__main__':
print(42 * '*')
print('Python Version: ' + sys.version)
print(42 * '*')
print(__doc__)
print(42 * '*')
print("This Module has no stand alone functionality.")
print(42 * '*')
##########################################
|
[
"importlib.import_module",
"colorsys.hsv_to_rgb",
"os.path.dirname",
"struct.pack",
"collections.namedtuple",
"array.array"
] |
[((6622, 6673), 'collections.namedtuple', 'collections.namedtuple', (['"""Value_16bit"""', "['hb', 'lb']"], {}), "('Value_16bit', ['hb', 'lb'])\n", (6644, 6673), False, 'import collections\n'), ((6122, 6165), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', (['hue', 'saturation', 'value'], {}), '(hue, saturation, value)\n', (6141, 6165), False, 'import colorsys\n'), ((1080, 1130), 'importlib.import_module', 'importlib.import_module', (["(names + '.' + module_name)"], {}), "(names + '.' + module_name)\n", (1103, 1130), False, 'import importlib\n'), ((5242, 5266), 'struct.pack', 'struct.pack', (['"""<H"""', 'value'], {}), "('<H', value)\n", (5253, 5266), False, 'import struct\n'), ((9671, 9687), 'array.array', 'array.array', (['"""B"""'], {}), "('B')\n", (9682, 9687), False, 'import array\n'), ((956, 977), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (971, 977), False, 'import os\n')]
|
import argparse
from pack import doPack
from extract import doExtract
parser = argparse.ArgumentParser()
subParsers = parser.add_subparsers(title="subcommands")
parserPack = subParsers.add_parser(
"pack",
help = "compress and pack data into a .p8 cart",
description = "Reads assets from a text file, compresses them, and writes them to the cart data of the target .p8 cart, along with the code required to unpack those assets."
)
parserPack.add_argument("--no-hilbert", action = "store_true", help = "disable rectangle subdivision and Hilbert mapping")
parserPack.add_argument("--no-rle", action = "store_true", help = "disable RLE compression")
parserPack.add_argument("--no-huffman", action = "store_true", help = "disable Huffman coding")
parserPack.add_argument("--spare-music", action = "store_true", help = "do not write into the music and sfx areas")
parserPack.add_argument("--gfx-only", action = "store_true", help = "do not write anywhere but the gfx area")
parserPack.add_argument("--progressbar", action = "store_true", help = "add a progress bar to the loader (21 tokens)")
parserPack.add_argument("input", help = "the text file to read assets from")
parserPack.add_argument("output", help = "the .p8 cart to write the data to")
parserPack.set_defaults(func = doPack)
parserExtract = subParsers.add_parser(
"extract",
help = "extract data from a .p8 cart",
description = "Reads data from a .p8 cart and outputs it into a text file that can be read by the packer. Choose the cart data area to extract, \"soundtrack\" if you wish to combine the music and sfx areas into one asset, \"all\" if you wish to dump all cart data into one big asset, or \"bitmap\" if you wish to extract an image from the sprite sheet. Background color (black by default) will be cropped out and the width will be padded to an even amount of pixels."
)
parserExtract.add_argument("source", choices = ["bitmap", "soundtrack", "all", "gfx", "gff", "map", "sfx", "music"], help = "the cart data area to extract")
parserExtract.add_argument("input", help = "the .p8 file to extract from")
parserExtract.add_argument("output", help = "the text file to output to")
parserExtract.add_argument("assetname", help = "a name for the asset")
parserExtract.add_argument("--bgcolor", help = "define background color for bitmap, default is 0", default = "0")
parserExtract.set_defaults(func = doExtract)
args = parser.parse_args()
if hasattr(args, 'func'):
args.func(args)
else:
parser.print_help()
|
[
"argparse.ArgumentParser"
] |
[((81, 106), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (104, 106), False, 'import argparse\n')]
|
""" imports & globals """
from azure.storage import CloudStorageAccount
from azure.storage.blob import BlockBlobService, Blob, ContentSettings
import datetime, os
from ast import literal_eval
from functools import wraps
import uuid
from mimetypes import guess_type, guess_extension, guess_all_extensions
""" helpers """
from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions
""" encryption """
from azurestoragewrap.encryption import (
KeyWrapper,
KeyResolver
)
""" custom Exceptions """
from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError
""" logging """
import logging
log = logging.getLogger('azurestoragewrap')
""" model base classes """
class StorageBlobModel(Blob):
_containername = ''
_encrypt = False
_dateformat = ''
_datetimeformat = ''
def __init__(self, **kwargs):
""" Create a StorageBlobModel Instance """
""" super """
super().__init__()
""" determine blob configuration """
if self.__class__._containername == '':
self._containername = self.__class__.__name__.lower()
else:
self._containername = self.__class__._containername
self._encrypt = self.__class__._encrypt
self._dateformat = self.__class__._dateformat
self._datetimeformat = self.__class__._datetimeformat
""" generate a uuid as blobname: if parameter blobname is None generate a UUID as blobname """
name = kwargs.get('name', None)
if name is None:
self.name = str(uuid.uuid4()).replace('-', '')
else:
self.name = str(name)
""" collect metadata from **kwargs """
metadataimage = {}
for key, default in vars(self.__class__).items():
if not key.startswith('_') and key != '' and (not key in vars(Blob).items()):
if key in kwargs:
value = kwargs.get(key)
to_type = type(default)
if to_type is datetime.datetime:
value = safe_cast(value, to_type, default, self._datetimeformat)
elif to_type is datetime.date:
value = safe_cast(value, to_type, default, self._dateformat)
else:
value = safe_cast(value, to_type, default)
setattr(self, key, value)
metadataimage[key] = value
else:
setattr(self, key, default)
metadataimage[key] = default
# blob source filename
self.filename = ''
metadataimage['filename'] = ''
# init metadata
self.metadata = metadataimage
def __instance_to_metadata__(self):
""" parse self self.metadata """
image = {}
image['filename'] = self.filename
for key, default in vars(self.__class__).items():
if not key.startswith('_') and key !='' and (not key in vars(Blob).items()):
if isinstance(default, datetime.date):
image[key] = safe_cast(getattr(self, key, default), str, dformat=self._dateformat)
if isinstance(default, datetime.datetime):
image[key] = safe_cast(getattr(self, key, default), str, dformat=self._datetimeformat)
else:
image[key] = getattr(self, key, default)
self.metadata = image
def __mergeblob__(self, message):
""" parse Blob Instance in Model vars """
if isinstance(message, Blob):
""" merge blob metadata vars """
for key, value in vars(message).items():
if not value is None:
setattr(self, key, value)
if (key == 'metadata'):
for metakey, metavalue in message.metadata.items():
default = getattr(self, metakey, None)
if not default is None:
if isinstance(default, datetime.date):
setattr(self, metakey, safe_cast(metavalue, datetime.date, dformat=self._dateformat))
if isinstance(default, datetime.datetime):
setattr(self, metakey, safe_cast(metavalue, datetime.date, dformat=self._datetimeformat))
else:
setattr(self, metakey, metavalue)
def fromfile(self, path_to_file, mimetype=None):
"""
load blob content from file in StorageBlobModel instance. Parameters are:
- path_to_file (required): path to a local file
- mimetype (optional): set a mimetype. azurestoragewrap will guess it if not given
"""
if os.path.isfile(path_to_file):
# Load file into self.__content__
self.filename = os.path.basename(path_to_file)
with open(path_to_file, "rb") as in_file:
self.content = in_file.read()
#guess mime-type
self.properties.content_settings = ContentSettings()
if mimetype is None:
mimetype = guess_type(path_to_file)
if mimetype[0] is None:
mimetype = 'application/octet-stream'
else:
if not mimetype[1] is None:
self.properties.content_settings.content_encoding = mimetype[1]
mimetype = mimetype[0]
self.properties.content_settings.content_type = mimetype
else:
raise AzureStorageWrapException(self, 'Can not load blob content, because given path is not a local file')
def fromtext(self, text, encoding='utf-8', mimetype='text/plain'):
"""
set blob content from given text in StorageBlobModel instance. Parameters are:
- text (required): path to a local file
- encoding (optional): text encoding (default is utf-8)
- mimetype (optional): set a mimetype. azurestoragewrap will guess it if not given
"""
if isinstance(text, str):
text = text.encode(encoding, 'ignore')
# Load text into self.__content__
self.content = bytes(text)
self.properties.content_settings = ContentSettings(content_type=mimetype, content_encoding=encoding)
else:
raise AzureStorageWrapException(self, 'Can not load blob content, because given text is not from type string')
def tofile(self, path_to_file, replace_file=False):
"""
save blob content from StorageBlobModel instance to file in given path/file. Parameters are:
- path_to_file (required): local path or file
"""
# create full path
if os.path.isdir(path_to_file):
if self.filename != '':
path_to_file = os.path.join(path_to_file, self.filename)
else:
# guess extention from mimetype
path_to_file = os.path.join(path_to_file, self.name + guess_extension(self.properties.content_settings.content_type))
elif os.path.isfile(path_to_file):
# check if given file extention fits to self.filename or mime type
#
if self.filename != '':
if os.path.splitext(self.filename)[1] != os.path.splitext(path_to_file)[1]:
raise AzureStorageWrapException(self, 'can not save blob to file because file extention {!s} does not fit to source file or mime type'.format(path_to_file))
else:
mimetype = guess_type(path_to_file)[0]
if mimtype != self.properties.content_settings.content_type:
raise AzureStorageWrapException(self, 'can not save blob to file because file extention {!s} does not fit to source file or mime type'.format(path_to_file))
else:
raise AzureStorageWrapException(self, 'can not save blob to file because {!s} is not a dir nor a file'.format(path_to_file))
# check if file exists (and replace or error)
if os.path.isfile(path_to_file):
if replace_file:
os.remove(path_to_file)
else:
raise AzureStorageWrapException(self, 'can not save blob to file {!s} because file exists and replace_file is False'.format(path_to_file))
# save file into self.__content__
self.filename = os.path.basename(path_to_file)
with open(path_to_file, "wb") as out_file:
out_file.write(self.content)
return path_to_file
def totext(self) ->str:
"""
return blob content from StorageBlobModel instance to a string. Parameters are:
"""
sreturn = ''
if self.properties.content_settings.content_encoding is None:
raise AzureStorageWrapException(self, 'can not convert blob {!s} to text because content_encoding is not given'.format(self.name))
else:
sreturn = self.content.decode(self.properties.content_settings.content_encoding, 'ignore')
return sreturn
def exists(self)->bool:
""" returns a flag that indicates if the blob is in storage or not """
if self.properties.etag is None:
return None
else:
return True
""" wrapper classes """
class StorageBlobContext():
"""Initializes the repository with the specified settings dict.
Required settings in config dict are:
- AZURE_STORAGE_NAME
- AZURE_STORAGE_KEY
- AZURE_REQUIRE_ENCRYPTION
- AZURE_KEY_IDENTIFIER
- AZURE_SECRET_KEY
- AZURE_STORAGE_IS_EMULATED
"""
_account = None
_account_name = ''
_account_key = ''
_is_emulated = False
_modeldefinitions = []
REGISTERED = True
""" decorators """
def get_modeldefinition(registered=False):
def wrap(func):
@wraps(func)
def wrapper(self, storagemodel, modeldefinition=None, *args, **kwargs):
""" modeldefinition already determined """
if not modeldefinition is None:
return func(self, storagemodel, modeldefinition, *args, **kwargs)
""" find modeldefinition for StorageQueueModel or StorageQueueModel """
if isinstance(storagemodel, StorageBlobModel):
definitionlist = [definition for definition in self._modeldefinitions if definition['modelname'] == storagemodel.__class__.__name__]
else:
log.info('Argument is not an StorageBlobModel')
raise AzureStorageWrapException(storagemodel, "Argument is not an StorageBlobModel")
if len(definitionlist) == 1:
modeldefinition = definitionlist[0]
elif len(definitionlist) > 1:
raise ModelRegisteredMoreThanOnceError(storagemodel)
if registered and (not isinstance(modeldefinition, dict)):
raise ModelNotRegisteredError(storagemodel)
return func(self, storagemodel, modeldefinition, *args, **kwargs)
return wrapper
return wrap
def __init__(self, **kwargs):
""" parse kwargs """
self._account_name = kwargs.get('AZURE_STORAGE_NAME', '')
self._account_key = kwargs.get('AZURE_STORAGE_KEY', '')
self._is_emulated = kwargs.get('AZURE_STORAGE_IS_EMULATED', False)
self._key_identifier = kwargs.get('AZURE_KEY_IDENTIFIER', '')
self._secret_key = kwargs.get('AZURE_SECRET_KEY', '')
""" account & service init """
if self._is_emulated:
self._account = CloudStorageAccount(is_emulated=True)
elif self._account_name != '' and self._account_key != '':
self._account = CloudStorageAccount(self._account_name, self._account_key)
else:
raise AzureException
""" registered models """
self._modeldefinitions = []
def __create__(self, modeldefinition:dict) -> bool:
if (not modeldefinition['blobservice'] is None):
try:
modeldefinition['blobservice'].create_container(modeldefinition['container'])
return True
except Exception as e:
msg = 'failed to create {} with error {}'.format(modeldefinition['container'], e)
raise AzureStorageWrapException(msg=msg)
else:
return True
pass
def __delete__(self, modeldefinition:dict) -> bool:
if (not modeldefinition['blobservice'] is None):
try:
modeldefinition['blobservice'].delete_container(modeldefinition['container'])
return True
except Exception as e:
msg = 'failed to delete {} with error {}'.format(modeldefinition['container'], e)
raise AzureStorageWrapException(msg=msg)
else:
return True
pass
@get_modeldefinition()
def register_model(self, storagemodel:object, modeldefinition = None):
""" set up an Queueservice for an StorageQueueModel in your Azure Storage Account
Will create the Queue if not exist!
required Parameter is:
- storagemodel: StorageQueueModel(Object)
"""
if modeldefinition is None:
""" test if containername already exists """
if [model for model in self._modeldefinitions if model['container'] == storagemodel._containername]:
raise NameConventionError(storagemodel._containername)
""" test if containername fits to azure naming rules """
if not test_azurestorage_nameconventions(storagemodel._containername, 'StorageBlobModel'):
raise NameConventionError(storagemodel._containername)
""" now register model """
modeldefinition = {
'modelname': storagemodel.__class__.__name__,
'container': storagemodel._containername,
'encrypt': storagemodel._encrypt,
'blobservice': self._account.create_block_blob_service()
}
""" encrypt queue service """
if modeldefinition['encrypt']:
# Create the KEK used for encryption.
# KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
kek = KeyWrapper(self._key_identifier, self._secret_key) # Key identifier
# Create the key resolver used for decryption.
# KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
key_resolver = KeyResolver()
key_resolver.put_key(kek)
# Set the require Encryption, KEK and key resolver on the service object.
modeldefinition['blobservice'].require_encryption = True
modeldefinition['blobservice'].key_encryption_key = kek
modeldefinition['blobservice'].key_resolver_funcion = key_resolver.resolve_key
self.__create__(modeldefinition)
self._modeldefinitions.append(modeldefinition)
log.info('model {} registered successfully. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
else:
log.info('model {} already registered. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
pass
@get_modeldefinition(REGISTERED)
def unregister_model(self, storagemodel:object, modeldefinition = None, delete_blob=False):
""" clear up an Queueservice for an StorageQueueModel in your Azure Storage Account
Will delete the hole Queue if delete_queue Flag is True!
required Parameter is:
- storagemodel: StorageQueueModel(Object)
Optional Parameter is:
- delete_queue: bool
"""
""" remove from modeldefinitions """
for i in range(len(self._modeldefinitions)):
if self._modeldefinitions[i]['modelname'] == modeldefinition['modelname']:
del self._modeldefinitions[i]
break
""" delete queue from storage if delete_queue == True """
if delete_blob:
self.__delete__(modeldefinition)
log.info('model {} unregistered successfully. Models are {!s}'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
pass
@get_modeldefinition(REGISTERED)
def upload(self, storagemodel:object, modeldefinition = None):
""" insert blob message into storage """
if (storagemodel.content is None) or (storagemodel.properties.content_settings.content_type is None):
# No content to upload
raise AzureStorageWrapException(storagemodel, "StorageBlobModel does not contain content nor content settings")
else:
blobservice = modeldefinition['blobservice']
container_name = modeldefinition['container']
blob_name = storagemodel.name
try:
# refresh metadata
storagemodel.__instance_to_metadata__()
""" upload bytes """
blobservice.create_blob_from_bytes(
container_name=container_name,
blob_name=blob_name,
blob=storagemodel.content,
metadata=storagemodel.metadata,
content_settings=storagemodel.properties.content_settings
)
storagemodel.properties = blobservice.get_blob_properties(container_name=container_name, blob_name=blob_name).properties
except Exception as e:
msg = 'can not save blob in container {} because {!s}'.format(storagemodel._containername, e)
raise AzureStorageWrapException(storagemodel, msg=msg)
return storagemodel
@get_modeldefinition(REGISTERED)
def download(self, storagemodel:object, modeldefinition = None):
""" load blob from storage into StorageBlobModelInstance """
if (storagemodel.name is None):
# No content to download
raise AzureStorageWrapException(storagemodel, "StorageBlobModel does not contain content nor content settings")
else:
container_name = modeldefinition['container']
blob_name = storagemodel.name
try:
if modeldefinition['blobservice'].exists(container_name, blob_name):
""" download blob """
blob = modeldefinition['blobservice'].get_blob_to_bytes(
container_name=modeldefinition['container'],
blob_name=storagemodel.name
)
storagemodel.__mergeblob__(blob)
except Exception as e:
msg = 'can not load blob from container {} because {!s}'.format(storagemodel._containername, e)
raise AzureStorageWrapException(storagemodel, msg=msg)
return storagemodel
@get_modeldefinition(REGISTERED)
def delete(self, storagemodel:object, modeldefinition = None) -> bool:
""" delete the blob from storage """
deleted = False
blobservice = modeldefinition['blobservice']
container_name = modeldefinition['container']
blob_name = storagemodel.name
try:
if blobservice.exists(container_name, blob_name):
""" delete """
blob = blobservice.delete_blob(container_name, blob_name)
deleted = True
except Exception as e:
msg = 'can not delete blob {} from storage because {!s}'.format(blob_name, e)
raise AzureStorageWrapException(storagemodel, msg=msg)
return deleted
@get_modeldefinition(REGISTERED)
def exists(self, storagemodel:object, modeldefinition = None) -> bool:
""" delete the blob from storage """
exists = False
blobservice = modeldefinition['blobservice']
container_name = modeldefinition['container']
blob_name = storagemodel.name
try:
blobs = self.list(storagemodel, modeldefinition, where=storagemodel.name)
if len(blobs) == 1:
storagemodel.__mergeblob__(blobs[0])
exists = True
except Exception as e:
msg = 'can not retireve blob {} from storage because {!s}'.format(blob_name, e)
raise AzureStorageWrapException(storagemodel, msg=msg)
return exists
@get_modeldefinition(REGISTERED)
def list(self, storagemodel:object, modeldefinition = None, where=None) ->list:
""" list blob messages in container """
try:
blobnames = []
if where is None:
generator = modeldefinition['blobservice'].list_blobs(modeldefinition['container'])
else:
generator = modeldefinition['blobservice'].list_blobs(modeldefinition['container'], prefix=where)
for blob in generator:
blobnames.append(blob)
except Exception as e:
msg = 'can not list blobs in container {} because {!s}'.format(storagemodel._containername, e)
raise AzureStorageWrapException(storagemodel, msg=msg)
finally:
return blobnames
|
[
"os.remove",
"azure.storage.blob.ContentSettings",
"os.path.isfile",
"os.path.join",
"mimetypes.guess_type",
"azurestoragewrap.exception.ModelRegisteredMoreThanOnceError",
"azurestoragewrap.encryption.KeyWrapper",
"azurestoragewrap.encryption.KeyResolver",
"azurestoragewrap.exception.AzureStorageWrapException",
"os.path.basename",
"azurestoragewrap.exception.NameConventionError",
"azure.storage.CloudStorageAccount",
"functools.wraps",
"azurestoragewrap.snippets.safe_cast",
"uuid.uuid4",
"azurestoragewrap.snippets.test_azurestorage_nameconventions",
"os.path.isdir",
"os.path.splitext",
"azurestoragewrap.exception.ModelNotRegisteredError",
"mimetypes.guess_extension",
"logging.getLogger"
] |
[((721, 758), 'logging.getLogger', 'logging.getLogger', (['"""azurestoragewrap"""'], {}), "('azurestoragewrap')\n", (738, 758), False, 'import logging\n'), ((5060, 5088), 'os.path.isfile', 'os.path.isfile', (['path_to_file'], {}), '(path_to_file)\n', (5074, 5088), False, 'import datetime, os\n'), ((7093, 7120), 'os.path.isdir', 'os.path.isdir', (['path_to_file'], {}), '(path_to_file)\n', (7106, 7120), False, 'import datetime, os\n'), ((8464, 8492), 'os.path.isfile', 'os.path.isfile', (['path_to_file'], {}), '(path_to_file)\n', (8478, 8492), False, 'import datetime, os\n'), ((8803, 8833), 'os.path.basename', 'os.path.basename', (['path_to_file'], {}), '(path_to_file)\n', (8819, 8833), False, 'import datetime, os\n'), ((5165, 5195), 'os.path.basename', 'os.path.basename', (['path_to_file'], {}), '(path_to_file)\n', (5181, 5195), False, 'import datetime, os\n'), ((5373, 5390), 'azure.storage.blob.ContentSettings', 'ContentSettings', ([], {}), '()\n', (5388, 5390), False, 'from azure.storage.blob import BlockBlobService, Blob, ContentSettings\n'), ((5892, 5996), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['self', '"""Can not load blob content, because given path is not a local file"""'], {}), "(self,\n 'Can not load blob content, because given path is not a local file')\n", (5917, 5996), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((6613, 6678), 'azure.storage.blob.ContentSettings', 'ContentSettings', ([], {'content_type': 'mimetype', 'content_encoding': 'encoding'}), '(content_type=mimetype, content_encoding=encoding)\n', (6628, 6678), False, 'from azure.storage.blob import BlockBlobService, Blob, ContentSettings\n'), ((6712, 6820), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['self', '"""Can not load blob content, because given text is not from type string"""'], {}), "(self,\n 'Can not load blob content, because given text is not from type string')\n", (6737, 6820), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((7458, 7486), 'os.path.isfile', 'os.path.isfile', (['path_to_file'], {}), '(path_to_file)\n', (7472, 7486), False, 'import datetime, os\n'), ((10294, 10305), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (10299, 10305), False, 'from functools import wraps\n'), ((12121, 12158), 'azure.storage.CloudStorageAccount', 'CloudStorageAccount', ([], {'is_emulated': '(True)'}), '(is_emulated=True)\n', (12140, 12158), False, 'from azure.storage import CloudStorageAccount\n'), ((17639, 17748), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel', '"""StorageBlobModel does not contain content nor content settings"""'], {}), "(storagemodel,\n 'StorageBlobModel does not contain content nor content settings')\n", (17664, 17748), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((19116, 19225), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel', '"""StorageBlobModel does not contain content nor content settings"""'], {}), "(storagemodel,\n 'StorageBlobModel does not contain content nor content settings')\n", (19141, 19225), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((5464, 5488), 'mimetypes.guess_type', 'guess_type', (['path_to_file'], {}), '(path_to_file)\n', (5474, 5488), False, 'from mimetypes import guess_type, guess_extension, guess_all_extensions\n'), ((7189, 7230), 'os.path.join', 'os.path.join', (['path_to_file', 'self.filename'], {}), '(path_to_file, self.filename)\n', (7201, 7230), False, 'import datetime, os\n'), ((8539, 8562), 'os.remove', 'os.remove', (['path_to_file'], {}), '(path_to_file)\n', (8548, 8562), False, 'import datetime, os\n'), ((12255, 12313), 'azure.storage.CloudStorageAccount', 'CloudStorageAccount', (['self._account_name', 'self._account_key'], {}), '(self._account_name, self._account_key)\n', (12274, 12313), False, 'from azure.storage import CloudStorageAccount\n'), ((14019, 14067), 'azurestoragewrap.exception.NameConventionError', 'NameConventionError', (['storagemodel._containername'], {}), '(storagemodel._containername)\n', (14038, 14067), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((14157, 14243), 'azurestoragewrap.snippets.test_azurestorage_nameconventions', 'test_azurestorage_nameconventions', (['storagemodel._containername', '"""StorageBlobModel"""'], {}), "(storagemodel._containername,\n 'StorageBlobModel')\n", (14190, 14243), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n'), ((14263, 14311), 'azurestoragewrap.exception.NameConventionError', 'NameConventionError', (['storagemodel._containername'], {}), '(storagemodel._containername)\n', (14282, 14311), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((14977, 15027), 'azurestoragewrap.encryption.KeyWrapper', 'KeyWrapper', (['self._key_identifier', 'self._secret_key'], {}), '(self._key_identifier, self._secret_key)\n', (14987, 15027), False, 'from azurestoragewrap.encryption import KeyWrapper, KeyResolver\n'), ((15340, 15353), 'azurestoragewrap.encryption.KeyResolver', 'KeyResolver', ([], {}), '()\n', (15351, 15353), False, 'from azurestoragewrap.encryption import KeyWrapper, KeyResolver\n'), ((20742, 20790), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel'], {'msg': 'msg'}), '(storagemodel, msg=msg)\n', (20767, 20790), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((21516, 21564), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel'], {'msg': 'msg'}), '(storagemodel, msg=msg)\n', (21541, 21564), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((22292, 22340), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel'], {'msg': 'msg'}), '(storagemodel, msg=msg)\n', (22317, 22340), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((11017, 11095), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel', '"""Argument is not an StorageBlobModel"""'], {}), "(storagemodel, 'Argument is not an StorageBlobModel')\n", (11042, 11095), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((11436, 11473), 'azurestoragewrap.exception.ModelNotRegisteredError', 'ModelNotRegisteredError', (['storagemodel'], {}), '(storagemodel)\n', (11459, 11473), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((12843, 12877), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', ([], {'msg': 'msg'}), '(msg=msg)\n', (12868, 12877), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((13339, 13373), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', ([], {'msg': 'msg'}), '(msg=msg)\n', (13364, 13373), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((18755, 18803), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel'], {'msg': 'msg'}), '(storagemodel, msg=msg)\n', (18780, 18803), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((19955, 20003), 'azurestoragewrap.exception.AzureStorageWrapException', 'AzureStorageWrapException', (['storagemodel'], {'msg': 'msg'}), '(storagemodel, msg=msg)\n', (19980, 20003), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((1678, 1690), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1688, 1690), False, 'import uuid\n'), ((2220, 2276), 'azurestoragewrap.snippets.safe_cast', 'safe_cast', (['value', 'to_type', 'default', 'self._datetimeformat'], {}), '(value, to_type, default, self._datetimeformat)\n', (2229, 2276), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n'), ((7380, 7442), 'mimetypes.guess_extension', 'guess_extension', (['self.properties.content_settings.content_type'], {}), '(self.properties.content_settings.content_type)\n', (7395, 7442), False, 'from mimetypes import guess_type, guess_extension, guess_all_extensions\n'), ((7940, 7964), 'mimetypes.guess_type', 'guess_type', (['path_to_file'], {}), '(path_to_file)\n', (7950, 7964), False, 'from mimetypes import guess_type, guess_extension, guess_all_extensions\n'), ((11287, 11333), 'azurestoragewrap.exception.ModelRegisteredMoreThanOnceError', 'ModelRegisteredMoreThanOnceError', (['storagemodel'], {}), '(storagemodel)\n', (11319, 11333), False, 'from azurestoragewrap.exception import AzureStorageWrapException, NameConventionError, ModelNotRegisteredError, ModelRegisteredMoreThanOnceError\n'), ((2361, 2413), 'azurestoragewrap.snippets.safe_cast', 'safe_cast', (['value', 'to_type', 'default', 'self._dateformat'], {}), '(value, to_type, default, self._dateformat)\n', (2370, 2413), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n'), ((2472, 2506), 'azurestoragewrap.snippets.safe_cast', 'safe_cast', (['value', 'to_type', 'default'], {}), '(value, to_type, default)\n', (2481, 2506), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n'), ((7638, 7669), 'os.path.splitext', 'os.path.splitext', (['self.filename'], {}), '(self.filename)\n', (7654, 7669), False, 'import datetime, os\n'), ((7676, 7706), 'os.path.splitext', 'os.path.splitext', (['path_to_file'], {}), '(path_to_file)\n', (7692, 7706), False, 'import datetime, os\n'), ((4365, 4426), 'azurestoragewrap.snippets.safe_cast', 'safe_cast', (['metavalue', 'datetime.date'], {'dformat': 'self._dateformat'}), '(metavalue, datetime.date, dformat=self._dateformat)\n', (4374, 4426), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n'), ((4562, 4627), 'azurestoragewrap.snippets.safe_cast', 'safe_cast', (['metavalue', 'datetime.date'], {'dformat': 'self._datetimeformat'}), '(metavalue, datetime.date, dformat=self._datetimeformat)\n', (4571, 4627), False, 'from azurestoragewrap.snippets import safe_cast, test_azurestorage_nameconventions\n')]
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: validate/v1/validate.proto
# plugin: python-betterproto
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import List
import betterproto
from betterproto.grpc.grpclib_server import ServiceBase
class KnownRegex(betterproto.Enum):
"""WellKnownRegex contain some well-known patterns."""
KNOWN_REGEX_UNKNOWN = 0
# HTTP header name as defined by RFC 7230.
KNOWN_REGEX_HTTP_HEADER_NAME = 1
# HTTP header value as defined by RFC 7230.
KNOWN_REGEX_HTTP_HEADER_VALUE = 2
@dataclass(eq=False, repr=False)
class FieldRules(betterproto.Message):
"""
FieldRules encapsulates the rules for each type of field. Depending on the
field, the correct set should be used to ensure proper validations.
"""
message: "MessageRules" = betterproto.message_field(17)
# Scalar Field Types
float: "FloatRules" = betterproto.message_field(1, group="type")
double: "DoubleRules" = betterproto.message_field(2, group="type")
int32: "Int32Rules" = betterproto.message_field(3, group="type")
int64: "Int64Rules" = betterproto.message_field(4, group="type")
uint32: "UInt32Rules" = betterproto.message_field(5, group="type")
uint64: "UInt64Rules" = betterproto.message_field(6, group="type")
sint32: "SInt32Rules" = betterproto.message_field(7, group="type")
sint64: "SInt64Rules" = betterproto.message_field(8, group="type")
fixed32: "Fixed32Rules" = betterproto.message_field(9, group="type")
fixed64: "Fixed64Rules" = betterproto.message_field(10, group="type")
sfixed32: "SFixed32Rules" = betterproto.message_field(11, group="type")
sfixed64: "SFixed64Rules" = betterproto.message_field(12, group="type")
bool: "BoolRules" = betterproto.message_field(13, group="type")
string: "StringRules" = betterproto.message_field(14, group="type")
bytes: "BytesRules" = betterproto.message_field(15, group="type")
# Complex Field Types
enum: "EnumRules" = betterproto.message_field(16, group="type")
repeated: "RepeatedRules" = betterproto.message_field(18, group="type")
map: "MapRules" = betterproto.message_field(19, group="type")
# Well-Known Field Types
any: "AnyRules" = betterproto.message_field(20, group="type")
duration: "DurationRules" = betterproto.message_field(21, group="type")
timestamp: "TimestampRules" = betterproto.message_field(22, group="type")
@dataclass(eq=False, repr=False)
class FloatRules(betterproto.Message):
"""FloatRules describes the constraints applied to `float` values"""
# Const specifies that this field must be exactly the specified value
const: float = betterproto.float_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: float = betterproto.float_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: float = betterproto.float_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: float = betterproto.float_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: float = betterproto.float_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[float] = betterproto.float_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[float] = betterproto.float_field(7)
@dataclass(eq=False, repr=False)
class DoubleRules(betterproto.Message):
"""DoubleRules describes the constraints applied to `double` values"""
# Const specifies that this field must be exactly the specified value
const: float = betterproto.double_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: float = betterproto.double_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: float = betterproto.double_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: float = betterproto.double_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: float = betterproto.double_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[float] = betterproto.double_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[float] = betterproto.double_field(7)
@dataclass(eq=False, repr=False)
class Int32Rules(betterproto.Message):
"""Int32Rules describes the constraints applied to `int32` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.int32_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.int32_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.int32_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.int32_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.int32_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.int32_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.int32_field(7)
@dataclass(eq=False, repr=False)
class Int64Rules(betterproto.Message):
"""Int64Rules describes the constraints applied to `int64` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.int64_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.int64_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.int64_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.int64_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.int64_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.int64_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.int64_field(7)
@dataclass(eq=False, repr=False)
class UInt32Rules(betterproto.Message):
"""UInt32Rules describes the constraints applied to `uint32` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.uint32_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.uint32_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.uint32_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.uint32_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.uint32_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.uint32_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.uint32_field(7)
@dataclass(eq=False, repr=False)
class UInt64Rules(betterproto.Message):
"""UInt64Rules describes the constraints applied to `uint64` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.uint64_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.uint64_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.uint64_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.uint64_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.uint64_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.uint64_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.uint64_field(7)
@dataclass(eq=False, repr=False)
class SInt32Rules(betterproto.Message):
"""SInt32Rules describes the constraints applied to `sint32` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.sint32_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.sint32_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.sint32_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.sint32_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.sint32_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.sint32_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.sint32_field(7)
@dataclass(eq=False, repr=False)
class SInt64Rules(betterproto.Message):
"""SInt64Rules describes the constraints applied to `sint64` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.sint64_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.sint64_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.sint64_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.sint64_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.sint64_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.sint64_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.sint64_field(7)
@dataclass(eq=False, repr=False)
class Fixed32Rules(betterproto.Message):
"""Fixed32Rules describes the constraints applied to `fixed32` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.fixed32_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.fixed32_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.fixed32_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.fixed32_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.fixed32_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.fixed32_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.fixed32_field(7)
@dataclass(eq=False, repr=False)
class Fixed64Rules(betterproto.Message):
"""Fixed64Rules describes the constraints applied to `fixed64` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.fixed64_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.fixed64_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.fixed64_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.fixed64_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.fixed64_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.fixed64_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.fixed64_field(7)
@dataclass(eq=False, repr=False)
class SFixed32Rules(betterproto.Message):
"""SFixed32Rules describes the constraints applied to `sfixed32` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.sfixed32_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.sfixed32_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.sfixed32_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.sfixed32_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.sfixed32_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.sfixed32_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.sfixed32_field(7)
@dataclass(eq=False, repr=False)
class SFixed64Rules(betterproto.Message):
"""SFixed64Rules describes the constraints applied to `sfixed64` values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.sfixed64_field(1)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: int = betterproto.sfixed64_field(2)
# Lte specifies that this field must be less than or equal to the specified
# value, inclusive
lte: int = betterproto.sfixed64_field(3)
# Gt specifies that this field must be greater than the specified value,
# exclusive. If the value of Gt is larger than a specified Lt or Lte, the
# range is reversed.
gt: int = betterproto.sfixed64_field(4)
# Gte specifies that this field must be greater than or equal to the
# specified value, inclusive. If the value of Gte is larger than a specified
# Lt or Lte, the range is reversed.
gte: int = betterproto.sfixed64_field(5)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.sfixed64_field(6)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.sfixed64_field(7)
@dataclass(eq=False, repr=False)
class BoolRules(betterproto.Message):
"""BoolRules describes the constraints applied to `bool` values"""
# Const specifies that this field must be exactly the specified value
const: bool = betterproto.bool_field(1)
@dataclass(eq=False, repr=False)
class StringRules(betterproto.Message):
"""StringRules describe the constraints applied to `string` values"""
# Const specifies that this field must be exactly the specified value
const: str = betterproto.string_field(1)
# Len specifies that this field must be the specified number of characters
# (Unicode code points). Note that the number of characters may differ from
# the number of bytes in the string.
len: int = betterproto.uint64_field(19)
# MinLen specifies that this field must be the specified number of characters
# (Unicode code points) at a minimum. Note that the number of characters may
# differ from the number of bytes in the string.
min_len: int = betterproto.uint64_field(2)
# MaxLen specifies that this field must be the specified number of characters
# (Unicode code points) at a maximum. Note that the number of characters may
# differ from the number of bytes in the string.
max_len: int = betterproto.uint64_field(3)
# LenBytes specifies that this field must be the specified number of bytes at
# a minimum
len_bytes: int = betterproto.uint64_field(20)
# MinBytes specifies that this field must be the specified number of bytes at
# a minimum
min_bytes: int = betterproto.uint64_field(4)
# MaxBytes specifies that this field must be the specified number of bytes at
# a maximum
max_bytes: int = betterproto.uint64_field(5)
# Pattern specifes that this field must match against the specified regular
# expression (RE2 syntax). The included expression should elide any
# delimiters.
pattern: str = betterproto.string_field(6)
# Prefix specifies that this field must have the specified substring at the
# beginning of the string.
prefix: str = betterproto.string_field(7)
# Suffix specifies that this field must have the specified substring at the
# end of the string.
suffix: str = betterproto.string_field(8)
# Contains specifies that this field must have the specified substring
# anywhere in the string.
contains: str = betterproto.string_field(9)
# NotContains specifies that this field cannot have the specified substring
# anywhere in the string.
not_contains: str = betterproto.string_field(23)
# In specifies that this field must be equal to one of the specified values
in_: List[str] = betterproto.string_field(10)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[str] = betterproto.string_field(11)
# Email specifies that the field must be a valid email address as defined by
# RFC 5322
email: bool = betterproto.bool_field(12, group="well_known")
# Hostname specifies that the field must be a valid hostname as defined by
# RFC 1034. This constraint does not support internationalized domain names
# (IDNs).
hostname: bool = betterproto.bool_field(13, group="well_known")
# Ip specifies that the field must be a valid IP (v4 or v6) address. Valid
# IPv6 addresses should not include surrounding square brackets.
ip: bool = betterproto.bool_field(14, group="well_known")
# Ipv4 specifies that the field must be a valid IPv4 address.
ipv4: bool = betterproto.bool_field(15, group="well_known")
# Ipv6 specifies that the field must be a valid IPv6 address. Valid IPv6
# addresses should not include surrounding square brackets.
ipv6: bool = betterproto.bool_field(16, group="well_known")
# Uri specifies that the field must be a valid, absolute URI as defined by
# RFC 3986
uri: bool = betterproto.bool_field(17, group="well_known")
# UriRef specifies that the field must be a valid URI as defined by RFC 3986
# and may be relative or absolute.
uri_ref: bool = betterproto.bool_field(18, group="well_known")
# Address specifies that the field must be either a valid hostname as defined
# by RFC 1034 (which does not support internationalized domain names or
# IDNs), or it can be a valid IP (v4 or v6).
address: bool = betterproto.bool_field(21, group="well_known")
# Uuid specifies that the field must be a valid UUID as defined by RFC 4122
uuid: bool = betterproto.bool_field(22, group="well_known")
# WellKnownRegex specifies a common well known pattern defined as a regex.
well_known_regex: "KnownRegex" = betterproto.enum_field(24, group="well_known")
# This applies to regexes HTTP_HEADER_NAME and HTTP_HEADER_VALUE to enable
# strict header validation. By default, this is true, and HTTP header
# validations are RFC-compliant. Setting to false will enable a looser
# validations that only disallows \r\n\0 characters, which can be used to
# bypass header matching rules.
strict: bool = betterproto.bool_field(25)
@dataclass(eq=False, repr=False)
class BytesRules(betterproto.Message):
"""BytesRules describe the constraints applied to `bytes` values"""
# Const specifies that this field must be exactly the specified value
const: bytes = betterproto.bytes_field(1)
# Len specifies that this field must be the specified number of bytes
len: int = betterproto.uint64_field(13)
# MinLen specifies that this field must be the specified number of bytes at a
# minimum
min_len: int = betterproto.uint64_field(2)
# MaxLen specifies that this field must be the specified number of bytes at a
# maximum
max_len: int = betterproto.uint64_field(3)
# Pattern specifes that this field must match against the specified regular
# expression (RE2 syntax). The included expression should elide any
# delimiters.
pattern: str = betterproto.string_field(4)
# Prefix specifies that this field must have the specified bytes at the
# beginning of the string.
prefix: bytes = betterproto.bytes_field(5)
# Suffix specifies that this field must have the specified bytes at the end
# of the string.
suffix: bytes = betterproto.bytes_field(6)
# Contains specifies that this field must have the specified bytes anywhere
# in the string.
contains: bytes = betterproto.bytes_field(7)
# In specifies that this field must be equal to one of the specified values
in_: List[bytes] = betterproto.bytes_field(8)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[bytes] = betterproto.bytes_field(9)
# Ip specifies that the field must be a valid IP (v4 or v6) address in byte
# format
ip: bool = betterproto.bool_field(10, group="well_known")
# Ipv4 specifies that the field must be a valid IPv4 address in byte format
ipv4: bool = betterproto.bool_field(11, group="well_known")
# Ipv6 specifies that the field must be a valid IPv6 address in byte format
ipv6: bool = betterproto.bool_field(12, group="well_known")
@dataclass(eq=False, repr=False)
class EnumRules(betterproto.Message):
"""EnumRules describe the constraints applied to enum values"""
# Const specifies that this field must be exactly the specified value
const: int = betterproto.int32_field(1)
# DefinedOnly specifies that this field must be only one of the defined
# values for this enum, failing on any undefined value.
defined_only: bool = betterproto.bool_field(2)
# In specifies that this field must be equal to one of the specified values
in_: List[int] = betterproto.int32_field(3)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[int] = betterproto.int32_field(4)
@dataclass(eq=False, repr=False)
class MessageRules(betterproto.Message):
"""
MessageRules describe the constraints applied to embedded message values.
For message-type fields, validation is performed recursively.
"""
# Skip specifies that the validation rules of this field should not be
# evaluated
skip: bool = betterproto.bool_field(1)
# Required specifies that this field must be set
required: bool = betterproto.bool_field(2)
@dataclass(eq=False, repr=False)
class RepeatedRules(betterproto.Message):
"""RepeatedRules describe the constraints applied to `repeated` values"""
# MinItems specifies that this field must have the specified number of items
# at a minimum
min_items: int = betterproto.uint64_field(1)
# MaxItems specifies that this field must have the specified number of items
# at a maximum
max_items: int = betterproto.uint64_field(2)
# Unique specifies that all elements in this field must be unique. This
# contraint is only applicable to scalar and enum types (messages are not
# supported).
unique: bool = betterproto.bool_field(3)
# Items specifies the contraints to be applied to each item in the field.
# Repeated message fields will still execute validation against each item
# unless skip is specified here.
items: "FieldRules" = betterproto.message_field(4)
@dataclass(eq=False, repr=False)
class MapRules(betterproto.Message):
"""MapRules describe the constraints applied to `map` values"""
# MinPairs specifies that this field must have the specified number of KVs at
# a minimum
min_pairs: int = betterproto.uint64_field(1)
# MaxPairs specifies that this field must have the specified number of KVs at
# a maximum
max_pairs: int = betterproto.uint64_field(2)
# NoSparse specifies values in this field cannot be unset. This only applies
# to map's with message value types.
no_sparse: bool = betterproto.bool_field(3)
# Keys specifies the constraints to be applied to each key in the field.
keys: "FieldRules" = betterproto.message_field(4)
# Values specifies the constraints to be applied to the value of each key in
# the field. Message values will still have their validations evaluated
# unless skip is specified here.
values: "FieldRules" = betterproto.message_field(5)
@dataclass(eq=False, repr=False)
class AnyRules(betterproto.Message):
"""
AnyRules describe constraints applied exclusively to the
`google.protobuf.Any` well-known type
"""
# Required specifies that this field must be set
required: bool = betterproto.bool_field(1)
# In specifies that this field's `type_url` must be equal to one of the
# specified values.
in_: List[str] = betterproto.string_field(2)
# NotIn specifies that this field's `type_url` must not be equal to any of
# the specified values.
not_in: List[str] = betterproto.string_field(3)
@dataclass(eq=False, repr=False)
class DurationRules(betterproto.Message):
"""
DurationRules describe the constraints applied exclusively to the
`google.protobuf.Duration` well-known type
"""
# Required specifies that this field must be set
required: bool = betterproto.bool_field(1)
# Const specifies that this field must be exactly the specified value
const: timedelta = betterproto.message_field(2)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: timedelta = betterproto.message_field(3)
# Lt specifies that this field must be less than the specified value,
# inclusive
lte: timedelta = betterproto.message_field(4)
# Gt specifies that this field must be greater than the specified value,
# exclusive
gt: timedelta = betterproto.message_field(5)
# Gte specifies that this field must be greater than the specified value,
# inclusive
gte: timedelta = betterproto.message_field(6)
# In specifies that this field must be equal to one of the specified values
in_: List[timedelta] = betterproto.message_field(7)
# NotIn specifies that this field cannot be equal to one of the specified
# values
not_in: List[timedelta] = betterproto.message_field(8)
@dataclass(eq=False, repr=False)
class TimestampRules(betterproto.Message):
"""
TimestampRules describe the constraints applied exclusively to the
`google.protobuf.Timestamp` well-known type
"""
# Required specifies that this field must be set
required: bool = betterproto.bool_field(1)
# Const specifies that this field must be exactly the specified value
const: datetime = betterproto.message_field(2)
# Lt specifies that this field must be less than the specified value,
# exclusive
lt: datetime = betterproto.message_field(3)
# Lte specifies that this field must be less than the specified value,
# inclusive
lte: datetime = betterproto.message_field(4)
# Gt specifies that this field must be greater than the specified value,
# exclusive
gt: datetime = betterproto.message_field(5)
# Gte specifies that this field must be greater than the specified value,
# inclusive
gte: datetime = betterproto.message_field(6)
# LtNow specifies that this must be less than the current time. LtNow can
# only be used with the Within rule.
lt_now: bool = betterproto.bool_field(7)
# GtNow specifies that this must be greater than the current time. GtNow can
# only be used with the Within rule.
gt_now: bool = betterproto.bool_field(8)
# Within specifies that this field must be within this duration of the
# current time. This constraint can be used alone or with the LtNow and GtNow
# rules.
within: timedelta = betterproto.message_field(9)
|
[
"betterproto.int32_field",
"betterproto.bytes_field",
"betterproto.bool_field",
"betterproto.sfixed64_field",
"betterproto.int64_field",
"betterproto.sfixed32_field",
"betterproto.string_field",
"betterproto.fixed64_field",
"betterproto.fixed32_field",
"betterproto.uint64_field",
"betterproto.message_field",
"betterproto.sint64_field",
"betterproto.uint32_field",
"betterproto.float_field",
"betterproto.sint32_field",
"betterproto.double_field",
"dataclasses.dataclass",
"betterproto.enum_field"
] |
[((600, 631), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (609, 631), False, 'from dataclasses import dataclass\n'), ((2483, 2514), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (2492, 2514), False, 'from dataclasses import dataclass\n'), ((3766, 3797), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (3775, 3797), False, 'from dataclasses import dataclass\n'), ((5059, 5090), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (5068, 5090), False, 'from dataclasses import dataclass\n'), ((6328, 6359), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (6337, 6359), False, 'from dataclasses import dataclass\n'), ((7597, 7628), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (7606, 7628), False, 'from dataclasses import dataclass\n'), ((8876, 8907), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (8885, 8907), False, 'from dataclasses import dataclass\n'), ((10155, 10186), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (10164, 10186), False, 'from dataclasses import dataclass\n'), ((11434, 11465), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (11443, 11465), False, 'from dataclasses import dataclass\n'), ((12713, 12744), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (12722, 12744), False, 'from dataclasses import dataclass\n'), ((14002, 14033), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (14011, 14033), False, 'from dataclasses import dataclass\n'), ((15291, 15322), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (15300, 15322), False, 'from dataclasses import dataclass\n'), ((16590, 16621), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (16599, 16621), False, 'from dataclasses import dataclass\n'), ((17889, 17920), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (17898, 17920), False, 'from dataclasses import dataclass\n'), ((18152, 18183), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (18161, 18183), False, 'from dataclasses import dataclass\n'), ((23008, 23039), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (23017, 23039), False, 'from dataclasses import dataclass\n'), ((25065, 25096), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (25074, 25096), False, 'from dataclasses import dataclass\n'), ((25782, 25813), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (25791, 25813), False, 'from dataclasses import dataclass\n'), ((26253, 26284), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (26262, 26284), False, 'from dataclasses import dataclass\n'), ((27172, 27203), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (27181, 27203), False, 'from dataclasses import dataclass\n'), ((28158, 28189), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (28167, 28189), False, 'from dataclasses import dataclass\n'), ((28758, 28789), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (28767, 28789), False, 'from dataclasses import dataclass\n'), ((30046, 30077), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)', 'repr': '(False)'}), '(eq=False, repr=False)\n', (30055, 30077), False, 'from dataclasses import dataclass\n'), ((869, 898), 'betterproto.message_field', 'betterproto.message_field', (['(17)'], {}), '(17)\n', (894, 898), False, 'import betterproto\n'), ((950, 992), 'betterproto.message_field', 'betterproto.message_field', (['(1)'], {'group': '"""type"""'}), "(1, group='type')\n", (975, 992), False, 'import betterproto\n'), ((1021, 1063), 'betterproto.message_field', 'betterproto.message_field', (['(2)'], {'group': '"""type"""'}), "(2, group='type')\n", (1046, 1063), False, 'import betterproto\n'), ((1090, 1132), 'betterproto.message_field', 'betterproto.message_field', (['(3)'], {'group': '"""type"""'}), "(3, group='type')\n", (1115, 1132), False, 'import betterproto\n'), ((1159, 1201), 'betterproto.message_field', 'betterproto.message_field', (['(4)'], {'group': '"""type"""'}), "(4, group='type')\n", (1184, 1201), False, 'import betterproto\n'), ((1230, 1272), 'betterproto.message_field', 'betterproto.message_field', (['(5)'], {'group': '"""type"""'}), "(5, group='type')\n", (1255, 1272), False, 'import betterproto\n'), ((1301, 1343), 'betterproto.message_field', 'betterproto.message_field', (['(6)'], {'group': '"""type"""'}), "(6, group='type')\n", (1326, 1343), False, 'import betterproto\n'), ((1372, 1414), 'betterproto.message_field', 'betterproto.message_field', (['(7)'], {'group': '"""type"""'}), "(7, group='type')\n", (1397, 1414), False, 'import betterproto\n'), ((1443, 1485), 'betterproto.message_field', 'betterproto.message_field', (['(8)'], {'group': '"""type"""'}), "(8, group='type')\n", (1468, 1485), False, 'import betterproto\n'), ((1516, 1558), 'betterproto.message_field', 'betterproto.message_field', (['(9)'], {'group': '"""type"""'}), "(9, group='type')\n", (1541, 1558), False, 'import betterproto\n'), ((1589, 1632), 'betterproto.message_field', 'betterproto.message_field', (['(10)'], {'group': '"""type"""'}), "(10, group='type')\n", (1614, 1632), False, 'import betterproto\n'), ((1665, 1708), 'betterproto.message_field', 'betterproto.message_field', (['(11)'], {'group': '"""type"""'}), "(11, group='type')\n", (1690, 1708), False, 'import betterproto\n'), ((1741, 1784), 'betterproto.message_field', 'betterproto.message_field', (['(12)'], {'group': '"""type"""'}), "(12, group='type')\n", (1766, 1784), False, 'import betterproto\n'), ((1809, 1852), 'betterproto.message_field', 'betterproto.message_field', (['(13)'], {'group': '"""type"""'}), "(13, group='type')\n", (1834, 1852), False, 'import betterproto\n'), ((1881, 1924), 'betterproto.message_field', 'betterproto.message_field', (['(14)'], {'group': '"""type"""'}), "(14, group='type')\n", (1906, 1924), False, 'import betterproto\n'), ((1951, 1994), 'betterproto.message_field', 'betterproto.message_field', (['(15)'], {'group': '"""type"""'}), "(15, group='type')\n", (1976, 1994), False, 'import betterproto\n'), ((2045, 2088), 'betterproto.message_field', 'betterproto.message_field', (['(16)'], {'group': '"""type"""'}), "(16, group='type')\n", (2070, 2088), False, 'import betterproto\n'), ((2121, 2164), 'betterproto.message_field', 'betterproto.message_field', (['(18)'], {'group': '"""type"""'}), "(18, group='type')\n", (2146, 2164), False, 'import betterproto\n'), ((2187, 2230), 'betterproto.message_field', 'betterproto.message_field', (['(19)'], {'group': '"""type"""'}), "(19, group='type')\n", (2212, 2230), False, 'import betterproto\n'), ((2282, 2325), 'betterproto.message_field', 'betterproto.message_field', (['(20)'], {'group': '"""type"""'}), "(20, group='type')\n", (2307, 2325), False, 'import betterproto\n'), ((2358, 2401), 'betterproto.message_field', 'betterproto.message_field', (['(21)'], {'group': '"""type"""'}), "(21, group='type')\n", (2383, 2401), False, 'import betterproto\n'), ((2436, 2479), 'betterproto.message_field', 'betterproto.message_field', (['(22)'], {'group': '"""type"""'}), "(22, group='type')\n", (2461, 2479), False, 'import betterproto\n'), ((2721, 2747), 'betterproto.float_field', 'betterproto.float_field', (['(1)'], {}), '(1)\n', (2744, 2747), False, 'import betterproto\n'), ((2854, 2880), 'betterproto.float_field', 'betterproto.float_field', (['(2)'], {}), '(2)\n', (2877, 2880), False, 'import betterproto\n'), ((3001, 3027), 'betterproto.float_field', 'betterproto.float_field', (['(3)'], {}), '(3)\n', (3024, 3027), False, 'import betterproto\n'), ((3224, 3250), 'betterproto.float_field', 'betterproto.float_field', (['(4)'], {}), '(4)\n', (3247, 3250), False, 'import betterproto\n'), ((3462, 3488), 'betterproto.float_field', 'betterproto.float_field', (['(5)'], {}), '(5)\n', (3485, 3488), False, 'import betterproto\n'), ((3592, 3618), 'betterproto.float_field', 'betterproto.float_field', (['(6)'], {}), '(6)\n', (3615, 3618), False, 'import betterproto\n'), ((3736, 3762), 'betterproto.float_field', 'betterproto.float_field', (['(7)'], {}), '(7)\n', (3759, 3762), False, 'import betterproto\n'), ((4007, 4034), 'betterproto.double_field', 'betterproto.double_field', (['(1)'], {}), '(1)\n', (4031, 4034), False, 'import betterproto\n'), ((4141, 4168), 'betterproto.double_field', 'betterproto.double_field', (['(2)'], {}), '(2)\n', (4165, 4168), False, 'import betterproto\n'), ((4289, 4316), 'betterproto.double_field', 'betterproto.double_field', (['(3)'], {}), '(3)\n', (4313, 4316), False, 'import betterproto\n'), ((4513, 4540), 'betterproto.double_field', 'betterproto.double_field', (['(4)'], {}), '(4)\n', (4537, 4540), False, 'import betterproto\n'), ((4752, 4779), 'betterproto.double_field', 'betterproto.double_field', (['(5)'], {}), '(5)\n', (4776, 4779), False, 'import betterproto\n'), ((4883, 4910), 'betterproto.double_field', 'betterproto.double_field', (['(6)'], {}), '(6)\n', (4907, 4910), False, 'import betterproto\n'), ((5028, 5055), 'betterproto.double_field', 'betterproto.double_field', (['(7)'], {}), '(7)\n', (5052, 5055), False, 'import betterproto\n'), ((5295, 5321), 'betterproto.int32_field', 'betterproto.int32_field', (['(1)'], {}), '(1)\n', (5318, 5321), False, 'import betterproto\n'), ((5426, 5452), 'betterproto.int32_field', 'betterproto.int32_field', (['(2)'], {}), '(2)\n', (5449, 5452), False, 'import betterproto\n'), ((5571, 5597), 'betterproto.int32_field', 'betterproto.int32_field', (['(3)'], {}), '(3)\n', (5594, 5597), False, 'import betterproto\n'), ((5792, 5818), 'betterproto.int32_field', 'betterproto.int32_field', (['(4)'], {}), '(4)\n', (5815, 5818), False, 'import betterproto\n'), ((6028, 6054), 'betterproto.int32_field', 'betterproto.int32_field', (['(5)'], {}), '(5)\n', (6051, 6054), False, 'import betterproto\n'), ((6156, 6182), 'betterproto.int32_field', 'betterproto.int32_field', (['(6)'], {}), '(6)\n', (6179, 6182), False, 'import betterproto\n'), ((6298, 6324), 'betterproto.int32_field', 'betterproto.int32_field', (['(7)'], {}), '(7)\n', (6321, 6324), False, 'import betterproto\n'), ((6564, 6590), 'betterproto.int64_field', 'betterproto.int64_field', (['(1)'], {}), '(1)\n', (6587, 6590), False, 'import betterproto\n'), ((6695, 6721), 'betterproto.int64_field', 'betterproto.int64_field', (['(2)'], {}), '(2)\n', (6718, 6721), False, 'import betterproto\n'), ((6840, 6866), 'betterproto.int64_field', 'betterproto.int64_field', (['(3)'], {}), '(3)\n', (6863, 6866), False, 'import betterproto\n'), ((7061, 7087), 'betterproto.int64_field', 'betterproto.int64_field', (['(4)'], {}), '(4)\n', (7084, 7087), False, 'import betterproto\n'), ((7297, 7323), 'betterproto.int64_field', 'betterproto.int64_field', (['(5)'], {}), '(5)\n', (7320, 7323), False, 'import betterproto\n'), ((7425, 7451), 'betterproto.int64_field', 'betterproto.int64_field', (['(6)'], {}), '(6)\n', (7448, 7451), False, 'import betterproto\n'), ((7567, 7593), 'betterproto.int64_field', 'betterproto.int64_field', (['(7)'], {}), '(7)\n', (7590, 7593), False, 'import betterproto\n'), ((7836, 7863), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(1)'], {}), '(1)\n', (7860, 7863), False, 'import betterproto\n'), ((7968, 7995), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(2)'], {}), '(2)\n', (7992, 7995), False, 'import betterproto\n'), ((8114, 8141), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(3)'], {}), '(3)\n', (8138, 8141), False, 'import betterproto\n'), ((8336, 8363), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(4)'], {}), '(4)\n', (8360, 8363), False, 'import betterproto\n'), ((8573, 8600), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(5)'], {}), '(5)\n', (8597, 8600), False, 'import betterproto\n'), ((8702, 8729), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(6)'], {}), '(6)\n', (8726, 8729), False, 'import betterproto\n'), ((8845, 8872), 'betterproto.uint32_field', 'betterproto.uint32_field', (['(7)'], {}), '(7)\n', (8869, 8872), False, 'import betterproto\n'), ((9115, 9142), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(1)'], {}), '(1)\n', (9139, 9142), False, 'import betterproto\n'), ((9247, 9274), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(2)'], {}), '(2)\n', (9271, 9274), False, 'import betterproto\n'), ((9393, 9420), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(3)'], {}), '(3)\n', (9417, 9420), False, 'import betterproto\n'), ((9615, 9642), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(4)'], {}), '(4)\n', (9639, 9642), False, 'import betterproto\n'), ((9852, 9879), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(5)'], {}), '(5)\n', (9876, 9879), False, 'import betterproto\n'), ((9981, 10008), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(6)'], {}), '(6)\n', (10005, 10008), False, 'import betterproto\n'), ((10124, 10151), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(7)'], {}), '(7)\n', (10148, 10151), False, 'import betterproto\n'), ((10394, 10421), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(1)'], {}), '(1)\n', (10418, 10421), False, 'import betterproto\n'), ((10526, 10553), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(2)'], {}), '(2)\n', (10550, 10553), False, 'import betterproto\n'), ((10672, 10699), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(3)'], {}), '(3)\n', (10696, 10699), False, 'import betterproto\n'), ((10894, 10921), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(4)'], {}), '(4)\n', (10918, 10921), False, 'import betterproto\n'), ((11131, 11158), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(5)'], {}), '(5)\n', (11155, 11158), False, 'import betterproto\n'), ((11260, 11287), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(6)'], {}), '(6)\n', (11284, 11287), False, 'import betterproto\n'), ((11403, 11430), 'betterproto.sint32_field', 'betterproto.sint32_field', (['(7)'], {}), '(7)\n', (11427, 11430), False, 'import betterproto\n'), ((11673, 11700), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(1)'], {}), '(1)\n', (11697, 11700), False, 'import betterproto\n'), ((11805, 11832), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(2)'], {}), '(2)\n', (11829, 11832), False, 'import betterproto\n'), ((11951, 11978), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(3)'], {}), '(3)\n', (11975, 11978), False, 'import betterproto\n'), ((12173, 12200), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(4)'], {}), '(4)\n', (12197, 12200), False, 'import betterproto\n'), ((12410, 12437), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(5)'], {}), '(5)\n', (12434, 12437), False, 'import betterproto\n'), ((12539, 12566), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(6)'], {}), '(6)\n', (12563, 12566), False, 'import betterproto\n'), ((12682, 12709), 'betterproto.sint64_field', 'betterproto.sint64_field', (['(7)'], {}), '(7)\n', (12706, 12709), False, 'import betterproto\n'), ((12955, 12983), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(1)'], {}), '(1)\n', (12980, 12983), False, 'import betterproto\n'), ((13088, 13116), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(2)'], {}), '(2)\n', (13113, 13116), False, 'import betterproto\n'), ((13235, 13263), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(3)'], {}), '(3)\n', (13260, 13263), False, 'import betterproto\n'), ((13458, 13486), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(4)'], {}), '(4)\n', (13483, 13486), False, 'import betterproto\n'), ((13696, 13724), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(5)'], {}), '(5)\n', (13721, 13724), False, 'import betterproto\n'), ((13826, 13854), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(6)'], {}), '(6)\n', (13851, 13854), False, 'import betterproto\n'), ((13970, 13998), 'betterproto.fixed32_field', 'betterproto.fixed32_field', (['(7)'], {}), '(7)\n', (13995, 13998), False, 'import betterproto\n'), ((14244, 14272), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(1)'], {}), '(1)\n', (14269, 14272), False, 'import betterproto\n'), ((14377, 14405), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(2)'], {}), '(2)\n', (14402, 14405), False, 'import betterproto\n'), ((14524, 14552), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(3)'], {}), '(3)\n', (14549, 14552), False, 'import betterproto\n'), ((14747, 14775), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(4)'], {}), '(4)\n', (14772, 14775), False, 'import betterproto\n'), ((14985, 15013), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(5)'], {}), '(5)\n', (15010, 15013), False, 'import betterproto\n'), ((15115, 15143), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(6)'], {}), '(6)\n', (15140, 15143), False, 'import betterproto\n'), ((15259, 15287), 'betterproto.fixed64_field', 'betterproto.fixed64_field', (['(7)'], {}), '(7)\n', (15284, 15287), False, 'import betterproto\n'), ((15536, 15565), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(1)'], {}), '(1)\n', (15562, 15565), False, 'import betterproto\n'), ((15670, 15699), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(2)'], {}), '(2)\n', (15696, 15699), False, 'import betterproto\n'), ((15818, 15847), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(3)'], {}), '(3)\n', (15844, 15847), False, 'import betterproto\n'), ((16042, 16071), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(4)'], {}), '(4)\n', (16068, 16071), False, 'import betterproto\n'), ((16281, 16310), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(5)'], {}), '(5)\n', (16307, 16310), False, 'import betterproto\n'), ((16412, 16441), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(6)'], {}), '(6)\n', (16438, 16441), False, 'import betterproto\n'), ((16557, 16586), 'betterproto.sfixed32_field', 'betterproto.sfixed32_field', (['(7)'], {}), '(7)\n', (16583, 16586), False, 'import betterproto\n'), ((16835, 16864), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(1)'], {}), '(1)\n', (16861, 16864), False, 'import betterproto\n'), ((16969, 16998), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(2)'], {}), '(2)\n', (16995, 16998), False, 'import betterproto\n'), ((17117, 17146), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(3)'], {}), '(3)\n', (17143, 17146), False, 'import betterproto\n'), ((17341, 17370), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(4)'], {}), '(4)\n', (17367, 17370), False, 'import betterproto\n'), ((17580, 17609), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(5)'], {}), '(5)\n', (17606, 17609), False, 'import betterproto\n'), ((17711, 17740), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(6)'], {}), '(6)\n', (17737, 17740), False, 'import betterproto\n'), ((17856, 17885), 'betterproto.sfixed64_field', 'betterproto.sfixed64_field', (['(7)'], {}), '(7)\n', (17882, 17885), False, 'import betterproto\n'), ((18123, 18148), 'betterproto.bool_field', 'betterproto.bool_field', (['(1)'], {}), '(1)\n', (18145, 18148), False, 'import betterproto\n'), ((18390, 18417), 'betterproto.string_field', 'betterproto.string_field', (['(1)'], {}), '(1)\n', (18414, 18417), False, 'import betterproto\n'), ((18633, 18661), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(19)'], {}), '(19)\n', (18657, 18661), False, 'import betterproto\n'), ((18897, 18924), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(2)'], {}), '(2)\n', (18921, 18924), False, 'import betterproto\n'), ((19160, 19187), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(3)'], {}), '(3)\n', (19184, 19187), False, 'import betterproto\n'), ((19307, 19335), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(20)'], {}), '(20)\n', (19331, 19335), False, 'import betterproto\n'), ((19455, 19482), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(4)'], {}), '(4)\n', (19479, 19482), False, 'import betterproto\n'), ((19602, 19629), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(5)'], {}), '(5)\n', (19626, 19629), False, 'import betterproto\n'), ((19819, 19846), 'betterproto.string_field', 'betterproto.string_field', (['(6)'], {}), '(6)\n', (19843, 19846), False, 'import betterproto\n'), ((19976, 20003), 'betterproto.string_field', 'betterproto.string_field', (['(7)'], {}), '(7)\n', (20000, 20003), False, 'import betterproto\n'), ((20127, 20154), 'betterproto.string_field', 'betterproto.string_field', (['(8)'], {}), '(8)\n', (20151, 20154), False, 'import betterproto\n'), ((20280, 20307), 'betterproto.string_field', 'betterproto.string_field', (['(9)'], {}), '(9)\n', (20304, 20307), False, 'import betterproto\n'), ((20442, 20470), 'betterproto.string_field', 'betterproto.string_field', (['(23)'], {}), '(23)\n', (20466, 20470), False, 'import betterproto\n'), ((20572, 20600), 'betterproto.string_field', 'betterproto.string_field', (['(10)'], {}), '(10)\n', (20596, 20600), False, 'import betterproto\n'), ((20716, 20744), 'betterproto.string_field', 'betterproto.string_field', (['(11)'], {}), '(11)\n', (20740, 20744), False, 'import betterproto\n'), ((20859, 20905), 'betterproto.bool_field', 'betterproto.bool_field', (['(12)'], {'group': '"""well_known"""'}), "(12, group='well_known')\n", (20881, 20905), False, 'import betterproto\n'), ((21100, 21146), 'betterproto.bool_field', 'betterproto.bool_field', (['(13)'], {'group': '"""well_known"""'}), "(13, group='well_known')\n", (21122, 21146), False, 'import betterproto\n'), ((21310, 21356), 'betterproto.bool_field', 'betterproto.bool_field', (['(14)'], {'group': '"""well_known"""'}), "(14, group='well_known')\n", (21332, 21356), False, 'import betterproto\n'), ((21440, 21486), 'betterproto.bool_field', 'betterproto.bool_field', (['(15)'], {'group': '"""well_known"""'}), "(15, group='well_known')\n", (21462, 21486), False, 'import betterproto\n'), ((21645, 21691), 'betterproto.bool_field', 'betterproto.bool_field', (['(16)'], {'group': '"""well_known"""'}), "(16, group='well_known')\n", (21667, 21691), False, 'import betterproto\n'), ((21802, 21848), 'betterproto.bool_field', 'betterproto.bool_field', (['(17)'], {'group': '"""well_known"""'}), "(17, group='well_known')\n", (21824, 21848), False, 'import betterproto\n'), ((21989, 22035), 'betterproto.bool_field', 'betterproto.bool_field', (['(18)'], {'group': '"""well_known"""'}), "(18, group='well_known')\n", (22011, 22035), False, 'import betterproto\n'), ((22263, 22309), 'betterproto.bool_field', 'betterproto.bool_field', (['(21)'], {'group': '"""well_known"""'}), "(21, group='well_known')\n", (22285, 22309), False, 'import betterproto\n'), ((22407, 22453), 'betterproto.bool_field', 'betterproto.bool_field', (['(22)'], {'group': '"""well_known"""'}), "(22, group='well_known')\n", (22429, 22453), False, 'import betterproto\n'), ((22570, 22616), 'betterproto.enum_field', 'betterproto.enum_field', (['(24)'], {'group': '"""well_known"""'}), "(24, group='well_known')\n", (22592, 22616), False, 'import betterproto\n'), ((22978, 23004), 'betterproto.bool_field', 'betterproto.bool_field', (['(25)'], {}), '(25)\n', (23000, 23004), False, 'import betterproto\n'), ((23245, 23271), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(1)'], {}), '(1)\n', (23268, 23271), False, 'import betterproto\n'), ((23361, 23389), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(13)'], {}), '(13)\n', (23385, 23389), False, 'import betterproto\n'), ((23505, 23532), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(2)'], {}), '(2)\n', (23529, 23532), False, 'import betterproto\n'), ((23648, 23675), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(3)'], {}), '(3)\n', (23672, 23675), False, 'import betterproto\n'), ((23865, 23892), 'betterproto.string_field', 'betterproto.string_field', (['(4)'], {}), '(4)\n', (23889, 23892), False, 'import betterproto\n'), ((24020, 24046), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(5)'], {}), '(5)\n', (24043, 24046), False, 'import betterproto\n'), ((24168, 24194), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(6)'], {}), '(6)\n', (24191, 24194), False, 'import betterproto\n'), ((24318, 24344), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(7)'], {}), '(7)\n', (24341, 24344), False, 'import betterproto\n'), ((24448, 24474), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(8)'], {}), '(8)\n', (24471, 24474), False, 'import betterproto\n'), ((24592, 24618), 'betterproto.bytes_field', 'betterproto.bytes_field', (['(9)'], {}), '(9)\n', (24615, 24618), False, 'import betterproto\n'), ((24727, 24773), 'betterproto.bool_field', 'betterproto.bool_field', (['(10)'], {'group': '"""well_known"""'}), "(10, group='well_known')\n", (24749, 24773), False, 'import betterproto\n'), ((24871, 24917), 'betterproto.bool_field', 'betterproto.bool_field', (['(11)'], {'group': '"""well_known"""'}), "(11, group='well_known')\n", (24893, 24917), False, 'import betterproto\n'), ((25015, 25061), 'betterproto.bool_field', 'betterproto.bool_field', (['(12)'], {'group': '"""well_known"""'}), "(12, group='well_known')\n", (25037, 25061), False, 'import betterproto\n'), ((25295, 25321), 'betterproto.int32_field', 'betterproto.int32_field', (['(1)'], {}), '(1)\n', (25318, 25321), False, 'import betterproto\n'), ((25483, 25508), 'betterproto.bool_field', 'betterproto.bool_field', (['(2)'], {}), '(2)\n', (25505, 25508), False, 'import betterproto\n'), ((25610, 25636), 'betterproto.int32_field', 'betterproto.int32_field', (['(3)'], {}), '(3)\n', (25633, 25636), False, 'import betterproto\n'), ((25752, 25778), 'betterproto.int32_field', 'betterproto.int32_field', (['(4)'], {}), '(4)\n', (25775, 25778), False, 'import betterproto\n'), ((26124, 26149), 'betterproto.bool_field', 'betterproto.bool_field', (['(1)'], {}), '(1)\n', (26146, 26149), False, 'import betterproto\n'), ((26224, 26249), 'betterproto.bool_field', 'betterproto.bool_field', (['(2)'], {}), '(2)\n', (26246, 26249), False, 'import betterproto\n'), ((26527, 26554), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(1)'], {}), '(1)\n', (26551, 26554), False, 'import betterproto\n'), ((26676, 26703), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(2)'], {}), '(2)\n', (26700, 26703), False, 'import betterproto\n'), ((26895, 26920), 'betterproto.bool_field', 'betterproto.bool_field', (['(3)'], {}), '(3)\n', (26917, 26920), False, 'import betterproto\n'), ((27140, 27168), 'betterproto.message_field', 'betterproto.message_field', (['(4)'], {}), '(4)\n', (27165, 27168), False, 'import betterproto\n'), ((27429, 27456), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(1)'], {}), '(1)\n', (27453, 27456), False, 'import betterproto\n'), ((27576, 27603), 'betterproto.uint64_field', 'betterproto.uint64_field', (['(2)'], {}), '(2)\n', (27600, 27603), False, 'import betterproto\n'), ((27748, 27773), 'betterproto.bool_field', 'betterproto.bool_field', (['(3)'], {}), '(3)\n', (27770, 27773), False, 'import betterproto\n'), ((27876, 27904), 'betterproto.message_field', 'betterproto.message_field', (['(4)'], {}), '(4)\n', (27901, 27904), False, 'import betterproto\n'), ((28126, 28154), 'betterproto.message_field', 'betterproto.message_field', (['(5)'], {}), '(5)\n', (28151, 28154), False, 'import betterproto\n'), ((28421, 28446), 'betterproto.bool_field', 'betterproto.bool_field', (['(1)'], {}), '(1)\n', (28443, 28446), False, 'import betterproto\n'), ((28568, 28595), 'betterproto.string_field', 'betterproto.string_field', (['(2)'], {}), '(2)\n', (28592, 28595), False, 'import betterproto\n'), ((28727, 28754), 'betterproto.string_field', 'betterproto.string_field', (['(3)'], {}), '(3)\n', (28751, 28754), False, 'import betterproto\n'), ((29040, 29065), 'betterproto.bool_field', 'betterproto.bool_field', (['(1)'], {}), '(1)\n', (29062, 29065), False, 'import betterproto\n'), ((29163, 29191), 'betterproto.message_field', 'betterproto.message_field', (['(2)'], {}), '(2)\n', (29188, 29191), False, 'import betterproto\n'), ((29302, 29330), 'betterproto.message_field', 'betterproto.message_field', (['(3)'], {}), '(3)\n', (29327, 29330), False, 'import betterproto\n'), ((29442, 29470), 'betterproto.message_field', 'betterproto.message_field', (['(4)'], {}), '(4)\n', (29467, 29470), False, 'import betterproto\n'), ((29584, 29612), 'betterproto.message_field', 'betterproto.message_field', (['(5)'], {}), '(5)\n', (29609, 29612), False, 'import betterproto\n'), ((29728, 29756), 'betterproto.message_field', 'betterproto.message_field', (['(6)'], {}), '(6)\n', (29753, 29756), False, 'import betterproto\n'), ((29864, 29892), 'betterproto.message_field', 'betterproto.message_field', (['(7)'], {}), '(7)\n', (29889, 29892), False, 'import betterproto\n'), ((30014, 30042), 'betterproto.message_field', 'betterproto.message_field', (['(8)'], {}), '(8)\n', (30039, 30042), False, 'import betterproto\n'), ((30331, 30356), 'betterproto.bool_field', 'betterproto.bool_field', (['(1)'], {}), '(1)\n', (30353, 30356), False, 'import betterproto\n'), ((30453, 30481), 'betterproto.message_field', 'betterproto.message_field', (['(2)'], {}), '(2)\n', (30478, 30481), False, 'import betterproto\n'), ((30591, 30619), 'betterproto.message_field', 'betterproto.message_field', (['(3)'], {}), '(3)\n', (30616, 30619), False, 'import betterproto\n'), ((30731, 30759), 'betterproto.message_field', 'betterproto.message_field', (['(4)'], {}), '(4)\n', (30756, 30759), False, 'import betterproto\n'), ((30872, 30900), 'betterproto.message_field', 'betterproto.message_field', (['(5)'], {}), '(5)\n', (30897, 30900), False, 'import betterproto\n'), ((31015, 31043), 'betterproto.message_field', 'betterproto.message_field', (['(6)'], {}), '(6)\n', (31040, 31043), False, 'import betterproto\n'), ((31182, 31207), 'betterproto.bool_field', 'betterproto.bool_field', (['(7)'], {}), '(7)\n', (31204, 31207), False, 'import betterproto\n'), ((31349, 31374), 'betterproto.bool_field', 'betterproto.bool_field', (['(8)'], {}), '(8)\n', (31371, 31374), False, 'import betterproto\n'), ((31569, 31597), 'betterproto.message_field', 'betterproto.message_field', (['(9)'], {}), '(9)\n', (31594, 31597), False, 'import betterproto\n')]
|
#-
# ==========================================================================
# Copyright 2015 Autodesk, Inc. All rights reserved.
#
# Use of this software is subject to the terms of the Autodesk
# license agreement provided at the time of installation or download,
# or which otherwise accompanies this software in either electronic
# or hard copy form.
# ==========================================================================
#+
import sys
import io
import pickle
import maya.api.OpenMaya as om
def maya_useNewAPI():
"""
The presence of this function tells Maya that the plugin produces, and
expects to be passed, objects created using the Maya Python API 2.0.
"""
pass
##############################################################################
##
## Proxy data class implementation
##
##############################################################################
class blindDoubleData(om.MPxData):
s_id = om.MTypeId( 0x80003 )
s_name = "blindDoubleData"
fValue = 0
def __init__(self):
om.MPxData.__init__(self)
@staticmethod
def creator():
return blindDoubleData()
def readASCII(self, args, lastParsedElement):
if len(args) > 0:
self.fValue = args.asDouble(lastParsedElement)
lastParsedElement = lastParsedElement+1
return lastParsedElement
def readBinary(self, istream, length):
rawData = io.BytesIO(istream)
reader = pickle.Unpickler(rawData)
self.fValue = reader.load()
return rawData.tell()
def writeASCII(self, ostream):
data = str(self.fValue)
data += " "
ostream[:] = bytearray(data, "ascii")
def writeBinary(self, ostream):
rawData = io.BytesIO()
writer = pickle.Pickler(rawData)
writer.dump( self.fValue )
ostream[:] = rawData.getvalue()
def copy(self, other):
self.fValue = other.fValue
def typeId(self):
return blindDoubleData.s_id
def name(self):
return blindDoubleData.s_name
def setValue(self, newValue):
self.fValue = newValue
##############################################################################
##
## Command class implementation
##
##############################################################################
class blindDoubleDataCmd(om.MPxCommand):
s_name = "blindDoubleData"
iter = None
def __init__(self):
om.MPxCommand.__init__(self)
@staticmethod
def creator():
return blindDoubleDataCmd()
def doIt(self, args):
sList = om.MGlobal.getActiveSelectionList()
self.iter = om.MItSelectionList(sList, om.MFn.kInvalid)
self.redoIt()
def redoIt(self):
# Iterate over all selected dependency nodes
#
while not self.iter.isDone():
# Get the selected dependency node and create
# a function set for it
#
dependNode = self.iter.getDependNode()
self.iter.next()
fnDN = om.MFnDependencyNode(dependNode)
fullName = "blindDoubleData"
try:
fnDN.findPlug(fullName, True)
# already have the attribute
continue
except:
pass
# Create a new attribute for our blind data
#
fnAttr = om.MFnTypedAttribute()
briefName = "BDD"
newAttr = fnAttr.create( fullName, briefName, blindDoubleData.s_id )
# Now add the new attribute to the current dependency node
#
fnDN.addAttribute( newAttr )
# Create a plug to set and retrive value off the node.
#
plug = om.MPlug( dependNode, newAttr )
# Instantiate blindDoubleData and set its value.
#
newData = blindDoubleData()
newData.setValue( 3.2 )
# Set the value for the plug.
#
plug.setMPxData( newData )
# Now try to retrieve the value off the plug as an MObject.
#
sData = plug.asMObject()
# Convert the data back to MPxData.
#
pdFn = om.MFnPluginData( sData )
data = pdFn.data()
assert(isinstance(data, blindDoubleData))
def undoIt(self):
return
def isUndoable(self):
return True
##############################################################################
##
## The following routines are used to register/unregister
## the command we are creating within Maya
##
##############################################################################
def initializePlugin(obj):
plugin = om.MFnPlugin(obj, "Autodesk", "3.0", "Any")
try:
plugin.registerData(blindDoubleData.s_name, blindDoubleData.s_id, blindDoubleData.creator)
except:
sys.stderr.write("Failed to register data\n")
raise
try:
plugin.registerCommand(blindDoubleDataCmd.s_name, blindDoubleDataCmd.creator)
except:
sys.stderr.write("Failed to register command\n")
raise
def uninitializePlugin(obj):
plugin = om.MFnPlugin(obj)
try:
plugin.deregisterCommand(blindDoubleDataCmd.s_name)
except:
sys.stderr.write("Failed to deregister command\n")
raise
try:
plugin.deregisterData(blindDoubleData.s_id)
except:
sys.stderr.write("Failed to deregister data\n")
raise
|
[
"io.BytesIO",
"maya.api.OpenMaya.MPxCommand.__init__",
"maya.api.OpenMaya.MFnDependencyNode",
"maya.api.OpenMaya.MGlobal.getActiveSelectionList",
"maya.api.OpenMaya.MFnPlugin",
"maya.api.OpenMaya.MItSelectionList",
"maya.api.OpenMaya.MFnPluginData",
"maya.api.OpenMaya.MFnTypedAttribute",
"pickle.Pickler",
"maya.api.OpenMaya.MPlug",
"sys.stderr.write",
"maya.api.OpenMaya.MPxData.__init__",
"maya.api.OpenMaya.MTypeId",
"pickle.Unpickler"
] |
[((930, 948), 'maya.api.OpenMaya.MTypeId', 'om.MTypeId', (['(524291)'], {}), '(524291)\n', (940, 948), True, 'import maya.api.OpenMaya as om\n'), ((4095, 4138), 'maya.api.OpenMaya.MFnPlugin', 'om.MFnPlugin', (['obj', '"""Autodesk"""', '"""3.0"""', '"""Any"""'], {}), "(obj, 'Autodesk', '3.0', 'Any')\n", (4107, 4138), True, 'import maya.api.OpenMaya as om\n'), ((4498, 4515), 'maya.api.OpenMaya.MFnPlugin', 'om.MFnPlugin', (['obj'], {}), '(obj)\n', (4510, 4515), True, 'import maya.api.OpenMaya as om\n'), ((1016, 1041), 'maya.api.OpenMaya.MPxData.__init__', 'om.MPxData.__init__', (['self'], {}), '(self)\n', (1035, 1041), True, 'import maya.api.OpenMaya as om\n'), ((1342, 1361), 'io.BytesIO', 'io.BytesIO', (['istream'], {}), '(istream)\n', (1352, 1361), False, 'import io\n'), ((1373, 1398), 'pickle.Unpickler', 'pickle.Unpickler', (['rawData'], {}), '(rawData)\n', (1389, 1398), False, 'import pickle\n'), ((1615, 1627), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1625, 1627), False, 'import io\n'), ((1639, 1662), 'pickle.Pickler', 'pickle.Pickler', (['rawData'], {}), '(rawData)\n', (1653, 1662), False, 'import pickle\n'), ((2242, 2270), 'maya.api.OpenMaya.MPxCommand.__init__', 'om.MPxCommand.__init__', (['self'], {}), '(self)\n', (2264, 2270), True, 'import maya.api.OpenMaya as om\n'), ((2367, 2402), 'maya.api.OpenMaya.MGlobal.getActiveSelectionList', 'om.MGlobal.getActiveSelectionList', ([], {}), '()\n', (2400, 2402), True, 'import maya.api.OpenMaya as om\n'), ((2417, 2460), 'maya.api.OpenMaya.MItSelectionList', 'om.MItSelectionList', (['sList', 'om.MFn.kInvalid'], {}), '(sList, om.MFn.kInvalid)\n', (2436, 2460), True, 'import maya.api.OpenMaya as om\n'), ((2734, 2766), 'maya.api.OpenMaya.MFnDependencyNode', 'om.MFnDependencyNode', (['dependNode'], {}), '(dependNode)\n', (2754, 2766), True, 'import maya.api.OpenMaya as om\n'), ((2973, 2995), 'maya.api.OpenMaya.MFnTypedAttribute', 'om.MFnTypedAttribute', ([], {}), '()\n', (2993, 2995), True, 'import maya.api.OpenMaya as om\n'), ((3263, 3292), 'maya.api.OpenMaya.MPlug', 'om.MPlug', (['dependNode', 'newAttr'], {}), '(dependNode, newAttr)\n', (3271, 3292), True, 'import maya.api.OpenMaya as om\n'), ((3632, 3655), 'maya.api.OpenMaya.MFnPluginData', 'om.MFnPluginData', (['sData'], {}), '(sData)\n', (3648, 3655), True, 'import maya.api.OpenMaya as om\n'), ((4249, 4294), 'sys.stderr.write', 'sys.stderr.write', (['"""Failed to register data\n"""'], {}), "('Failed to register data\\n')\n", (4265, 4294), False, 'import sys\n'), ((4401, 4449), 'sys.stderr.write', 'sys.stderr.write', (['"""Failed to register command\n"""'], {}), "('Failed to register command\\n')\n", (4417, 4449), False, 'import sys\n'), ((4587, 4637), 'sys.stderr.write', 'sys.stderr.write', (['"""Failed to deregister command\n"""'], {}), "('Failed to deregister command\\n')\n", (4603, 4637), False, 'import sys\n'), ((4710, 4757), 'sys.stderr.write', 'sys.stderr.write', (['"""Failed to deregister data\n"""'], {}), "('Failed to deregister data\\n')\n", (4726, 4757), False, 'import sys\n')]
|
import json
import statistics
import collections
import re
from qcg.pilotjob.utils.auxdir import find_report_files, find_log_files, find_rtimes_files, find_final_status_files
from qcg.pilotjob.utils.util import parse_datetime
from json import JSONDecodeError
from datetime import datetime, timedelta
class JobsReportStats:
@staticmethod
def from_workdir(workdir, verbose=False):
jobs_report_path = find_report_files(workdir)
if verbose:
print(f'found report files: {",".join(jobs_report_path)}')
log_files = find_log_files(workdir)
if verbose:
print(f'found log files: {",".join(log_files)}')
rt_files = find_rtimes_files(workdir)
if verbose:
print(f'found real time log files: {",".join(rt_files)}')
final_files = find_final_status_files(workdir)
if verbose:
print(f'found final status log files: {",".join(final_files)}')
return JobsReportStats(jobs_report_path, log_files, rt_files, final_files, verbose)
def __init__(self, report_files, log_files=None, rt_files=None, final_files=None, verbose=False):
"""
Analyze QCG-PJM execution.
Args:
report_files (list(str)) - list of paths to the report files
log_files (list(str)) - list of paths to the log files
"""
self.report_files = report_files
self.log_files = log_files or []
self.rt_files = rt_files or []
self.final_files = final_files or []
self.verbose = verbose
self.jstats = {}
self.gstats = {}
self.res = {}
self._analyze()
def has_realtime_stats(self):
return self.rt_files and all(self.jstats.get(attr) for attr in ['min_real_start',
'max_real_finish', 'total_real_time'])
def job_stats(self):
return self.jstats
def global_stats(self):
return self.gstats
def resources(self):
return self.res
def _analyze(self):
self._read_report_files(self.report_files)
if self.log_files:
self._parse_service_logs(self.log_files)
if self.rt_files:
self._parse_rt_logs(self.rt_files)
if self.final_files:
self._parse_final_files(self.final_files)
@staticmethod
def _parse_allocation(allocation):
nodes = {}
if allocation:
for node in allocation.split(','):
nodes[node[:node.index('[')]] = node[node.index('[') + 1:-1].split(':')
return nodes
def _read_report_files(self, report_files):
"""
Read QCG-PJM json report file.
The read data with statistics data are written to the self.jstats dictionary.
Args:
report_files (list(str)) - list of paths to the QCG-PJM json report file
"""
self.jstats = {'jobs': {}}
min_queue, max_queue, min_start, max_finish = None, None, None, None
if self.verbose:
print(f'reading reports from {",".join(report_files)} files ...')
for report_file in report_files:
if self.verbose:
print('parsing report file {} ...'.format(report_file))
with open(report_file, 'r') as report_f:
for line, entry in enumerate(report_f, 1):
try:
job_entry = json.loads(entry)
except JSONDecodeError as e:
raise Exception('wrong report "{}" file format: error in {} line: {}'.format(report_file, line, str(e)))
for attr in [ 'name', 'state', 'history', 'runtime' ]:
if not attr in job_entry:
raise Exception('wrong jobs.report {} file format: missing \'{}\' attribute'.format(report_file, attr))
rtime = None
if 'rtime' in job_entry['runtime']:
if '.' in job_entry['runtime']['rtime']:
rtime_t = datetime.strptime(job_entry['runtime']['rtime'], "%H:%M:%S.%f")
else:
rtime_t = datetime.strptime(job_entry['runtime']['rtime'], "%H:%M:%S")
rtime = timedelta(hours=rtime_t.hour, minutes=rtime_t.minute, seconds=rtime_t.second, microseconds=rtime_t.microsecond)
# find queued time
queued_state = list(filter(lambda st_en: st_en['state'] == 'QUEUED', job_entry['history']))
# find allocation creation time
schedule_state = list(filter(lambda st_en: st_en['state'] == 'SCHEDULED', job_entry['history']))
# find start executing time
exec_state = list(filter(lambda st_en: st_en['state'] == 'EXECUTING', job_entry['history']))
# find finish executing time
finish_state = list(filter(lambda st_en: st_en['state'] in ['SUCCEED','FAILED'], job_entry['history']))
assert len(finish_state) == 1, 'for job {} in line {}'.format(job_entry['name'], line)
job_nodes = None
allocation = job_entry.get('runtime', {}).get('allocation', None)
if allocation is not None:
job_nodes = JobsReportStats._parse_allocation(allocation)
# print(f'found allocation for job {job_entry["name"]}: {allocation}, after parsed: {job_nodes}')
queued_time = parse_datetime(queued_state[0]['date']) if queued_state else None
schedule_time = parse_datetime(schedule_state[0]['date']) if schedule_state else None
start_time = parse_datetime(exec_state[0]['date']) if exec_state else None
finish_time = parse_datetime(finish_state[0]['date']) if finish_state else None
self.jstats['jobs'][job_entry['name']] = {
'r_time': rtime,
'queue_time': queued_time,
'sched_time': schedule_time,
's_time': start_time,
'f_time': finish_time,
'name': job_entry['name'],
'nodes': job_nodes,
'pid': job_entry['runtime'].get('pid', None),
'pname': job_entry['runtime'].get('pname', None),
'runtime': job_entry['runtime'],
'history': job_entry['history'],
'state': job_entry['state'],
'messages': job_entry.get('messages'),
}
if queued_time:
if not min_queue or queued_time < min_queue:
min_queue = queued_time
if not max_queue or queued_time > max_queue:
max_queue = queued_time
if start_time:
if not min_start or start_time < min_start:
min_start = start_time
if not max_finish or finish_time > max_finish:
max_finish = finish_time
self.jstats['first_queue'] = min_queue
self.jstats['last_queue'] = max_queue
self.jstats['queue_time'] = max_queue - min_queue if all((max_queue is not None, min_queue is not None)) else None
self.jstats['first_start'] = min_start
self.jstats['last_finish'] = max_finish
self.jstats['execution_time'] = max_finish - min_start if all((max_finish is not None, min_start is not None)) else None
self.jstats['total_time'] = max_finish - min_queue if all((max_finish is not None, min_queue is not None)) else None
rtimes = [job['r_time'].total_seconds() for job in self.jstats['jobs'].values() if job['r_time']]
launchtimes = [(job['s_time'] - job['sched_time']).total_seconds() for job in self.jstats['jobs'].values() if job['s_time'] and job['sched_time']]
if rtimes:
self.jstats['rstats'] = self._generate_series_stats(rtimes)
if launchtimes:
self.jstats['launchstats'] = self._generate_series_stats(launchtimes)
def _generate_series_stats(self, serie):
"""
Generate statistics about given data serie.
Args:
serie (float[]) - serie data
Return:
serie statistics in form of dictionary
"""
stats = {}
stats['max'] = max(serie)
stats['min'] = min(serie)
stats['mean'] = statistics.mean(serie)
stats['median'] = statistics.median(serie)
stats['median_lo'] = statistics.median_low(serie)
stats['median_hi'] = statistics.median_high(serie)
stats['stdev'] = statistics.stdev(serie)
stats['pstdev'] = statistics.pstdev(serie)
stats['var'] = statistics.variance(serie)
stats['pvar'] = statistics.pvariance(serie)
return stats
def _parse_service_logs(self, service_log_files):
res_regexp = re.compile('available resources: (\d+) \((\d+) used\) cores on (\d+) nodes')
gov_regexp = re.compile('starting governor manager ...')
resinfo_regexp = re.compile('selected (\w+) resources information')
self.res = {}
log_file = None
if len(service_log_files) > 1:
governor_logs = []
# find log of governor manager
for log_file in service_log_files:
with open(log_file, 'r') as l_f:
for line in l_f:
m = gov_regexp.search(line.strip())
if m:
# found governor log
governor_logs.append(log_file)
break
if len(governor_logs) != 1:
print('warning: can not find single governor log (found files: {})'.format(','.join(governor_logs)))
print('warning: selecting the first log file: {}'.format(service_log_files[0]))
log_file = service_log_files[0]
else:
log_file = governor_logs[0]
else:
if service_log_files:
log_file = service_log_files[0]
if self.verbose:
print('parsing log file {} ...'.format(log_file))
with open(log_file, 'r') as s_f:
for line in s_f:
m = res_regexp.search(line.strip())
if m:
self.res = { 'cores': int(m.group(1)) - int(m.group(2)), 'nodes': int(m.group(3)) }
if self.verbose:
print('found resources: {}'.format(str(self.res)))
break
def _parse_rt_logs(self, rt_logs):
min_real_start = None
max_real_finish = None
rt_jobs = 0
for rt_log in rt_logs:
try:
if self.verbose:
print(f'reading real time log file {rt_log} ...')
with open(rt_log, 'rt') as rt_file:
node_rtimes = json.load(rt_file)
if 'rt' not in node_rtimes:
raise ValueError('wrong format - missing "rt" element')
rtimes = node_rtimes.get('rt', {})
for job_name, job_rtimes in rtimes.items():
if all((elem in job_rtimes for elem in ['s', 'f'])):
job_data = self.jstats.setdefault('jobs', {}).setdefault(job_name, {})
job_real_start = parse_datetime(job_rtimes.get('s'))
job_real_finish = parse_datetime(job_rtimes.get('f'))
job_data['real_start'] = job_real_start
job_data['real_finish'] = job_real_finish
if min_real_start is None or job_real_start < min_real_start:
min_real_start = job_real_start
if max_real_finish is None or job_real_finish > max_real_finish:
max_real_finish = job_real_finish
rt_jobs += 1
else:
if self.verbose:
print(f'warning: missing required elements for job {job_name} in rt log file {rt_log}')
except Exception as exc:
print(f'warning: can not read real time log file {rt_log}: {str(exc)}')
if self.verbose:
print(f'read {rt_jobs} jobs real time entries')
if min_real_start is not None:
self.jstats['min_real_start'] = min_real_start
if max_real_finish is not None:
self.jstats['max_real_finish'] = max_real_finish
if all((min_real_start is not None, max_real_finish is not None)):
self.jstats['total_real_time'] = (max_real_finish - min_real_start).total_seconds()
def _parse_final_files(self, final_logs):
global_service_started = None
global_service_finished = None
global_nodes = 0
global_cores = 0
global_jobs = 0
global_iterations = 0
global_failed_jobs = 0
global_failed_iterations = 0
for final_log in final_logs:
try:
with open(final_log, 'rt') as final_file:
final_report = json.load(final_file)
if all(attr in final_report.get('System', {}) for attr in ['Started', 'Generated']):
service_started = parse_datetime(final_report['System']['Started'])
service_finished = parse_datetime(final_report['System']['Generated'])
if global_service_started is None or service_started < global_service_started:
global_service_started = service_started
if global_service_finished is None or service_finished > global_service_finished:
global_service_finished = service_finished
global_nodes += final_report.get('Resources', {}).get('TotalNodes', 0)
global_cores += final_report.get('Resources', {}).get('TotalCores', 0)
global_jobs += final_report.get('JobStats', {}).get('TotalJobs', 0)
global_failed_jobs += final_report.get('JobStats', {}).get('FailedJobs', 0)
global_iterations += final_report.get('IterationStats', {}).get('TotalIterations', 0)
global_failed_iterations += final_report.get('IterationStats', {}).get('FailedIterations', 0)
except Exception as ex:
print(f'warning: failed to read final status log file {final_log}: {str(ex)}')
if all((global_service_started is not None, global_service_finished is not None)):
self.gstats['service_start'] = global_service_started
self.gstats['service_finish'] = global_service_finished
self.gstats['total_nodes'] = global_nodes
self.gstats['total_cores'] = global_cores
self.gstats['total_jobs'] = global_jobs
self.gstats['failed_jobs'] = global_failed_jobs
self.gstats['total_iterations'] = global_iterations
self.gstats['failed_iterations'] = global_failed_iterations
def job_info(self, *job_ids):
return {job_id: self.jstats.get('jobs', {}).get(job_id) for job_id in job_ids}
def filter_jobs(self, filter_def):
for job_name, job_data in self.jstats.get('jobs', {}).items():
if filter_def(job_data):
yield job_data
def allocation_jobs(self, node_name, core_name):
jobs = []
for job_name, job_data in self.jstats.get('jobs', {}).items():
job_nodes = job_data.get('nodes')
if job_nodes and core_name in job_nodes.get(node_name, []):
jobs.append(job_data)
jobs.sort(key=lambda job: job['real_start'])
return jobs
def job_start_finish_launch_overheads(self, details=False):
total_start_overhead = 0
total_finish_overhead = 0
total_jobs = 0
total_real_runtime = 0
total_qcg_runtime = 0
total_job_overhead_per_runtime = 0
result = {}
for job_name, job_data in self.jstats['jobs'].items():
if all((elem in job_data for elem in ['real_start', 'real_finish', 's_time', 'f_time'])):
real_job_start = job_data['real_start']
real_job_finish = job_data['real_finish']
qcg_job_start = job_data['s_time']
qcg_job_finish = job_data['f_time']
start_overhead = (real_job_start - qcg_job_start).total_seconds()
finish_overhead = (qcg_job_finish - real_job_finish).total_seconds()
job_real_runtime = (real_job_finish - real_job_start).total_seconds()
job_qcg_runtime = (qcg_job_finish - qcg_job_start).total_seconds()
total_real_runtime += job_real_runtime
total_qcg_runtime += job_qcg_runtime
if details:
result.setdefault('jobs', {})[job_name] = {'start': start_overhead, 'finish': finish_overhead}
total_start_overhead += start_overhead
total_finish_overhead += finish_overhead
# print(f'job {job_name} overhead: {(start_overhead + finish_overhead)}')
# print(f'job {job_name} runtime: {job_real_runtime}')
# print(f'job {job_name} overhead per runtime %: {100.0 * ((start_overhead + finish_overhead) / job_real_runtime)}')
total_job_overhead_per_runtime += 100.0 * ((start_overhead + finish_overhead) / job_real_runtime)
total_jobs += 1
if self.verbose:
print('generated start/finish launch overheads for {total_jobs} jobs')
result['start'] = total_start_overhead
result['finish'] = total_finish_overhead
result['total'] = total_start_overhead + total_finish_overhead
result['job_start_avg'] = total_start_overhead/total_jobs if total_jobs else 0
result['job_finish_avg'] = total_finish_overhead/total_jobs if total_jobs else 0
result['job_avg'] = (total_start_overhead + total_finish_overhead)/total_jobs if total_jobs else 0
result['job_real_rt_avg'] = (total_real_runtime)/total_jobs if total_jobs else 0
result['job_qcg_rt_avg'] = (total_qcg_runtime)/total_jobs if total_jobs else 0
result['job_avg_per_rt'] = (total_job_overhead_per_runtime)/total_jobs if total_jobs else 0
result['analyzed_jobs'] = total_jobs
return result
def _generate_gantt_dataframe(self, start_metric_name, finish_metric_name):
jobs_chart = []
min_start = None
max_finish = None
avail_nodes = collections.OrderedDict()
total_jobs = 0
for job_name, job_data in self.jstats.get('jobs', {}).items():
if all(job_data.get(elem) is not None for elem in ['nodes', start_metric_name, finish_metric_name]):
total_jobs += 1
if min_start is None or job_data.get(start_metric_name) < min_start:
min_start = job_data.get(start_metric_name)
if max_finish is None or job_data.get(finish_metric_name) > max_finish:
max_finish = job_data.get(finish_metric_name)
for node_name, cores in job_data.get('nodes', {}).items():
avail_nodes.setdefault(node_name, set()).update(cores)
jobs_chart.extend([{'Job': job_name,
'Start': str(job_data.get(start_metric_name)),
'Finish': str(job_data.get(finish_metric_name)),
'Core': f'{node_name}:{core}'} for core in cores])
total_nodes = len(avail_nodes)
total_cores = sum(len(cores) for _, cores in avail_nodes.items())
total_seconds = (max_finish - min_start).total_seconds()
node_order = []
while len(avail_nodes) > 0:
node_name, cores = avail_nodes.popitem(last=False)
node_order.extend([f'{node_name}:{core}' for core in sorted(cores, key=lambda c: int(c.split('&')[0]) if '&' in str(c) else int(c))])
return {'chart_data': jobs_chart,
'total_jobs': total_jobs,
'total_nodes': total_nodes,
'total_cores': total_cores,
'total_seconds': total_seconds,
'node_order': node_order}
def _generate_gantt_gaps_dataframe(self, start_metric_name, finish_metric_name):
gaps_chart = []
min_start = None
max_finish = None
avail_nodes = collections.OrderedDict()
total_jobs = 0
resource_nodes = {}
# assign jobs to cores to compute time boundaries
for job_name, job_data in self.jstats.get('jobs', {}).items():
if all(job_data.get(elem) is not None for elem in ['nodes', start_metric_name, finish_metric_name]):
total_jobs += 1
if min_start is None or job_data.get(start_metric_name) < min_start:
min_start = job_data.get(start_metric_name)
if max_finish is None or job_data.get(finish_metric_name) > max_finish:
max_finish = job_data.get(finish_metric_name)
for node_name, cores in job_data.get('nodes', {}).items():
avail_nodes.setdefault(node_name, set()).update(cores)
for core in cores:
resource_nodes.setdefault(node_name, {}).setdefault(core, []).append(job_data)
total_nodes = len(avail_nodes)
total_cores = sum(len(cores) for _, cores in avail_nodes.items())
total_seconds = (max_finish - min_start).total_seconds()
# sort jobs in each of the core by the start time
for node_name, cores in resource_nodes.items():
for core_name, core_jobs in cores.items():
core_jobs.sort(key=lambda job: job[start_metric_name])
if core_jobs:
if core_jobs[0][start_metric_name] != min_start:
gaps_chart.append({'Job': 'gap',
'Start': min_start,
'Finish': core_jobs[0][start_metric_name],
'Core': f'{node_name}:{core_name}'})
for job_nr in range(1,len(core_jobs)):
curr_job = core_jobs[job_nr]
prev_job = core_jobs[job_nr-1]
gaps_chart.append({'Job': 'gap',
'Start': prev_job[finish_metric_name],
'Finish': curr_job[start_metric_name],
'Core': f'{node_name}:{core_name}'})
if core_jobs[-1][finish_metric_name] != max_finish:
gaps_chart.append({'Job': 'gap',
'Start': core_jobs[-1][finish_metric_name],
'Finish': max_finish,
'Core': f'{node_name}:{core_name}'})
else:
gaps_chart.append({'Job': 'gap',
'Start': min_start,
'Finish': max_finish,
'Core': f'{node_name}:{core_name}'})
node_order = []
while len(avail_nodes) > 0:
node_name, cores = avail_nodes.popitem(last=False)
node_order.extend([f'{node_name}:{core}' for core in sorted(cores, key=lambda c: int(c.split('&')[0]) if '&' in str(c) else int(c))])
return {'chart_data': gaps_chart,
'total_jobs': total_jobs,
'total_nodes': total_nodes,
'total_cores': total_cores,
'total_seconds': total_seconds,
'node_order': node_order}
def gantt(self, output_file, real=True):
self._generate_gantt_chart(output_file, self._generate_gantt_dataframe, real=real)
def gantt_gaps(self, output_file, real=True):
self._generate_gantt_chart(output_file, self._generate_gantt_gaps_dataframe, real=real)
def _generate_gantt_chart(self, output_file, dataframe_generator, real=True):
try:
import plotly.express as px
import pandas as pd
except ImportError:
raise ImportError('To generate gantt chart the following packages must be installed: '
'plotly.express, pandas, kaleido')
start_metric_name = 's_time'
finish_metric_name = 'f_time'
if real:
start_metric_name = 'real_start'
finish_metric_name = 'real_finish'
chart_data = dataframe_generator(start_metric_name, finish_metric_name)
if self.verbose:
print(f'generated dataframes for {chart_data.get("total_jobs")} jobs on {chart_data.get("total_cores")} cores')
print(f'total nodes {chart_data.get("total_nodes")}, total seconds {chart_data.get("total_seconds")}')
min_start_moment = self.jstats.get('min_real_start')
max_finish_moment = self.jstats.get('max_real_finish')
# print(f'node order: {chart_data.get("node_order")}')
df = pd.DataFrame(chart_data.get("chart_data"))
fig = px.timeline(df, x_start='Start', x_end='Finish', y='Core', color='Job', category_orders={'Core': chart_data.get('node_order')})
fig.update_layout(
autosize=False,
width=int(chart_data.get('total_seconds', 1))*20,
height=chart_data.get('total_cores', 1)*20,
yaxis=dict(
title_text="Cores",
ticktext=chart_data.get("node_order"),
# tickvals=[1, 2, 3, 4],
tickmode="array"
))
fig.write_image(output_file)
def resource_usage(self, from_first_job=False, until_last_job=False, details=False):
resource_nodes = {}
jobs = {}
report = {}
# assign jobs to cores
for job_name, job_data in self.jstats.get('jobs', {}).items():
if job_data.get('nodes') is not None and job_data.get('real_start') is not None:
for node_name, cores in job_data.get('nodes', {}).items():
for core in cores:
resource_nodes.setdefault(node_name, {}).setdefault(core, []).append(job_data)
report['method'] = 'from_service_start'
if not from_first_job and all((self.gstats.get('service_start'), self.gstats.get('service_finish'))):
min_start_moment = self.gstats.get('service_start')
max_finish_moment = self.gstats.get('service_finish')
else:
min_start_moment = self.jstats.get('min_real_start')
max_finish_moment = self.jstats.get('max_real_finish')
report['method'] = 'from_first_job_start'
total_time = (max_finish_moment - min_start_moment).total_seconds()
if self.verbose:
print(f'total time seconds: {total_time}')
total_core_utilization = 0
total_cores = 0
# sort jobs in each of the core by the start time
for node_name, cores in resource_nodes.items():
for core_name, core_jobs in cores.items():
core_jobs.sort(key=lambda job: job['real_start'])
core_initial_wait = 0
core_injobs_wait = 0
core_finish_wait = 0
core_unused = 0
if core_jobs:
# moment between total scenario start and first job
core_initial_wait = (core_jobs[0]['real_start'] - min_start_moment).total_seconds()
core_unused += core_initial_wait
core_injobs_wait = 0
for job_nr in range(1,len(core_jobs)):
curr_job = core_jobs[job_nr]
prev_job = core_jobs[job_nr-1]
# moments between current job start and last job finish
core_injobs_wait += (curr_job['real_start'] - prev_job['real_finish']).total_seconds()
core_unused += core_injobs_wait
# moment between last job finish and total scenario finish
if not until_last_job:
core_finish_wait = (max_finish_moment - core_jobs[-1]['real_finish']).total_seconds()
core_unused += core_finish_wait
core_utilization = ((total_time - core_unused) / total_time) * 100
total_core_utilization += core_utilization
total_cores += 1
if details:
report.setdefault('nodes', {}).setdefault(node_name, {})[core_name] = {
'unused': core_unused,
'utilization': core_utilization,
'initial_unused': core_initial_wait,
'injobs_unused': core_injobs_wait,
'finish_unused': core_finish_wait,
}
report['total_cores'] = total_cores
report['avg_core_utilization'] = total_core_utilization/total_cores if total_cores else 0
if self.gstats.get('total_cores') and total_cores != self.gstats['total_cores']:
global_cores = self.gstats['total_cores']
report['not_used_cores'] = global_cores - total_cores
report['avg_all_cores_utilization'] = total_core_utilization/global_cores if global_cores else 0
return report
def _find_previous_latest_job_finish_on_resources(self, resource_nodes, job_data):
"""On resources allocated for job ``job_data``, find the last, previous job finish.
Args:
resource_nodes (dict): a mapping between 'nodes->cores->jobs' where jobs are sorted by the ``real_start`` attribute
job_data (dict): job attributes (from ``self.jstats['jobs']``)
Return:
datetime - a moment when the last job finished, before the specified one, on job's resources.
"""
max_finish_time = None
if job_data.get('real_start'):
job_start = job_data['real_start']
for node_name, cores in job_data.get('nodes', {}).items():
# find the last job that finished before 'job_start'
for core_name in cores:
core_jobs = resource_nodes.get(node_name, {}).get(core_name, [])
# find first job which start time is >= `job_start`
next_job = next((position for position, curr_job_data in enumerate(core_jobs) if curr_job_data['real_start'] >= job_start), None)
if next_job is not None and next_job > 0:
prev_job = core_jobs[next_job - 1]
if max_finish_time is None or prev_job['real_finish'] > max_finish_time:
max_finish_time = prev_job['real_finish']
return max_finish_time
def efficiency(self, details=False):
resource_nodes = {}
jobs = {}
report = {}
min_start_moment = self.jstats.get('min_real_start')
max_finish_moment = self.jstats.get('max_real_finish')
total_time = (max_finish_moment - min_start_moment).total_seconds()
total_core_utilization = 0
total_cores = 0
# assign jobs to cores
for job_name, job_data in self.jstats.get('jobs', {}).items():
if job_data.get('nodes') is not None and job_data.get('real_start') is not None:
for node_name, cores in job_data.get('nodes', {}).items():
for core in cores:
resource_nodes.setdefault(node_name, {}).setdefault(core, []).append(job_data)
# sort jobs in each of the core by the start time
for node_name, cores in resource_nodes.items():
for core_name, core_jobs in cores.items():
core_jobs.sort(key=lambda job: job['real_start'])
# sort jobs in each of the core by the start time
for node_name, cores in resource_nodes.items():
for core_name, core_jobs in cores.items():
core_unused = 0
prev_job = None
if core_jobs:
for core_job in core_jobs:
# moment between total scenario start and first job
prev_job_finish_time = self._find_previous_latest_job_finish_on_resources(resource_nodes, core_job)
if prev_job_finish_time:
core_unused += (core_job['real_start'] - prev_job_finish_time).total_seconds()
else:
if prev_job:
core_unused += (core_job['real_start'] - prev_job['real_finish']).total_seconds()
else:
core_unused += (core_job['real_start'] - min_start_moment).total_seconds()
prev_job = core_job
core_utilization = ((total_time - core_unused) / total_time) * 100
if details:
report.setdefault('nodes', {}).setdefault(node_name, {})[core_name] = {
'unused': core_unused,
'utilization': core_utilization
}
# print(f'node {node_name}:{core_name}: utilization {core_utilization:.1f}%, unused {core_unused:.4f} of total {total_time:.4f}')
total_core_utilization += core_utilization
total_cores += 1
report['total_cores'] = total_cores
report['avg_core_utilization'] = total_core_utilization/total_cores if total_cores else 0
return report
def efficiency_core(self, dest_node_name, dest_core_name, details=False):
resource_nodes = {}
jobs = {}
report = {}
min_start_moment = self.jstats.get('min_real_start')
max_finish_moment = self.jstats.get('max_real_finish')
total_time = (max_finish_moment - min_start_moment).total_seconds()
total_core_utilization = 0
total_cores = 0
# assign jobs to cores
for job_name, job_data in self.jstats.get('jobs', {}).items():
if job_data.get('nodes') is not None:
for node_name, cores in job_data.get('nodes', {}).items():
for core in cores:
resource_nodes.setdefault(node_name, {}).setdefault(core, []).append(job_data)
# sort jobs in each of the core by the start time
for node_name, cores in resource_nodes.items():
for core_name, core_jobs in cores.items():
core_jobs.sort(key=lambda job: job['real_start'])
# for job_name, job_data in self.jstats.get('jobs', {}).items():
# if all(elem in job_data for elem in ['real_start', 'real_finish']):
# prev_job_finish_time = self._find_previous_latest_job_finish_on_resources(resource_nodes, job_data)
# print(f'{job_name}: started {job_data.get("real_start")}-{job_data.get("real_finish")} previous latest job finish time {prev_job_finish_time}')
# print(f'checking core {dest_core_name} on node {dest_node_name} ...')
core_jobs = resource_nodes.get(dest_node_name, {}).get(dest_core_name, {})
# print(f'found jobs {",".join([job["name"] for job in core_jobs])}')
core_unused = 0
prev_job = None
if core_jobs:
for core_job in core_jobs:
# print(f'checking job {core_job["name"]} ...')
# moment between total scenario start and first job
prev_job_finish_time = self._find_previous_latest_job_finish_on_resources(resource_nodes, core_job)
# print(f'found previous job latest finish time as {prev_job_finish_time}')
if prev_job_finish_time:
core_unused += (core_job['real_start'] - prev_job_finish_time).total_seconds()
else:
if prev_job:
core_unused += (core_job['real_start'] - prev_job['real_finish']).total_seconds()
else:
core_unused += (core_job['real_start'] - min_start_moment).total_seconds()
prev_job = core_job
# print(f'after job {core_job["name"]} core unused {core_unused:.5f} ...')
core_utilization = ((total_time - core_unused) / total_time) * 100
print(f'node {dest_node_name}:{dest_core_name}: utilization {core_utilization:.1f}%, unused {core_unused:.4f} of total {total_time:.4f}')
total_core_utilization += core_utilization
total_cores += 1
|
[
"qcg.pilotjob.utils.auxdir.find_log_files",
"statistics.pstdev",
"statistics.variance",
"qcg.pilotjob.utils.auxdir.find_rtimes_files",
"statistics.median_low",
"json.loads",
"statistics.pvariance",
"datetime.timedelta",
"statistics.median_high",
"statistics.median",
"statistics.stdev",
"datetime.datetime.strptime",
"statistics.mean",
"qcg.pilotjob.utils.util.parse_datetime",
"re.compile",
"qcg.pilotjob.utils.auxdir.find_final_status_files",
"json.load",
"qcg.pilotjob.utils.auxdir.find_report_files",
"collections.OrderedDict"
] |
[((419, 445), 'qcg.pilotjob.utils.auxdir.find_report_files', 'find_report_files', (['workdir'], {}), '(workdir)\n', (436, 445), False, 'from qcg.pilotjob.utils.auxdir import find_report_files, find_log_files, find_rtimes_files, find_final_status_files\n'), ((557, 580), 'qcg.pilotjob.utils.auxdir.find_log_files', 'find_log_files', (['workdir'], {}), '(workdir)\n', (571, 580), False, 'from qcg.pilotjob.utils.auxdir import find_report_files, find_log_files, find_rtimes_files, find_final_status_files\n'), ((681, 707), 'qcg.pilotjob.utils.auxdir.find_rtimes_files', 'find_rtimes_files', (['workdir'], {}), '(workdir)\n', (698, 707), False, 'from qcg.pilotjob.utils.auxdir import find_report_files, find_log_files, find_rtimes_files, find_final_status_files\n'), ((820, 852), 'qcg.pilotjob.utils.auxdir.find_final_status_files', 'find_final_status_files', (['workdir'], {}), '(workdir)\n', (843, 852), False, 'from qcg.pilotjob.utils.auxdir import find_report_files, find_log_files, find_rtimes_files, find_final_status_files\n'), ((8657, 8679), 'statistics.mean', 'statistics.mean', (['serie'], {}), '(serie)\n', (8672, 8679), False, 'import statistics\n'), ((8706, 8730), 'statistics.median', 'statistics.median', (['serie'], {}), '(serie)\n', (8723, 8730), False, 'import statistics\n'), ((8760, 8788), 'statistics.median_low', 'statistics.median_low', (['serie'], {}), '(serie)\n', (8781, 8788), False, 'import statistics\n'), ((8818, 8847), 'statistics.median_high', 'statistics.median_high', (['serie'], {}), '(serie)\n', (8840, 8847), False, 'import statistics\n'), ((8873, 8896), 'statistics.stdev', 'statistics.stdev', (['serie'], {}), '(serie)\n', (8889, 8896), False, 'import statistics\n'), ((8923, 8947), 'statistics.pstdev', 'statistics.pstdev', (['serie'], {}), '(serie)\n', (8940, 8947), False, 'import statistics\n'), ((8971, 8997), 'statistics.variance', 'statistics.variance', (['serie'], {}), '(serie)\n', (8990, 8997), False, 'import statistics\n'), ((9022, 9049), 'statistics.pvariance', 'statistics.pvariance', (['serie'], {}), '(serie)\n', (9042, 9049), False, 'import statistics\n'), ((9147, 9233), 're.compile', 're.compile', (['"""available resources: (\\\\d+) \\\\((\\\\d+) used\\\\) cores on (\\\\d+) nodes"""'], {}), "(\n 'available resources: (\\\\d+) \\\\((\\\\d+) used\\\\) cores on (\\\\d+) nodes')\n", (9157, 9233), False, 'import re\n'), ((9245, 9288), 're.compile', 're.compile', (['"""starting governor manager ..."""'], {}), "('starting governor manager ...')\n", (9255, 9288), False, 'import re\n'), ((9314, 9365), 're.compile', 're.compile', (['"""selected (\\\\w+) resources information"""'], {}), "('selected (\\\\w+) resources information')\n", (9324, 9365), False, 'import re\n'), ((18907, 18932), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (18930, 18932), False, 'import collections\n'), ((20849, 20874), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (20872, 20874), False, 'import collections\n'), ((11188, 11206), 'json.load', 'json.load', (['rt_file'], {}), '(rt_file)\n', (11197, 11206), False, 'import json\n'), ((13445, 13466), 'json.load', 'json.load', (['final_file'], {}), '(final_file)\n', (13454, 13466), False, 'import json\n'), ((13607, 13656), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["final_report['System']['Started']"], {}), "(final_report['System']['Started'])\n", (13621, 13656), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((13696, 13747), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["final_report['System']['Generated']"], {}), "(final_report['System']['Generated'])\n", (13710, 13747), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((3394, 3411), 'json.loads', 'json.loads', (['entry'], {}), '(entry)\n', (3404, 3411), False, 'import json\n'), ((4266, 4382), 'datetime.timedelta', 'timedelta', ([], {'hours': 'rtime_t.hour', 'minutes': 'rtime_t.minute', 'seconds': 'rtime_t.second', 'microseconds': 'rtime_t.microsecond'}), '(hours=rtime_t.hour, minutes=rtime_t.minute, seconds=rtime_t.\n second, microseconds=rtime_t.microsecond)\n', (4275, 4382), False, 'from datetime import datetime, timedelta\n'), ((5552, 5591), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["queued_state[0]['date']"], {}), "(queued_state[0]['date'])\n", (5566, 5591), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((5654, 5695), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["schedule_state[0]['date']"], {}), "(schedule_state[0]['date'])\n", (5668, 5695), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((5757, 5794), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["exec_state[0]['date']"], {}), "(exec_state[0]['date'])\n", (5771, 5794), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((5853, 5892), 'qcg.pilotjob.utils.util.parse_datetime', 'parse_datetime', (["finish_state[0]['date']"], {}), "(finish_state[0]['date'])\n", (5867, 5892), False, 'from qcg.pilotjob.utils.util import parse_datetime\n'), ((4041, 4104), 'datetime.datetime.strptime', 'datetime.strptime', (["job_entry['runtime']['rtime']", '"""%H:%M:%S.%f"""'], {}), "(job_entry['runtime']['rtime'], '%H:%M:%S.%f')\n", (4058, 4104), False, 'from datetime import datetime, timedelta\n'), ((4173, 4233), 'datetime.datetime.strptime', 'datetime.strptime', (["job_entry['runtime']['rtime']", '"""%H:%M:%S"""'], {}), "(job_entry['runtime']['rtime'], '%H:%M:%S')\n", (4190, 4233), False, 'from datetime import datetime, timedelta\n')]
|
from typing import List, Optional, Tuple
from collections import defaultdict
from mp_api.core.client import BaseRester
from mp_api.routes.eos.models import EOSDoc
class EOSRester(BaseRester):
suffix = "eos"
document_model = EOSDoc # type: ignore
primary_key = "task_id"
def search_eos_docs(
self,
volumes: Optional[Tuple[float, float]] = None,
energies: Optional[Tuple[float, float]] = None,
sort_field: Optional[str] = None,
ascending: Optional[bool] = None,
num_chunks: Optional[int] = None,
chunk_size: int = 1000,
all_fields: bool = True,
fields: Optional[List[str]] = None,
):
"""
Query equations of state docs using a variety of search criteria.
Arguments:
volumes (Tuple[float,float]): Minimum and maximum volume in A³/atom to consider for EOS plot range.
energies (Tuple[float,float]): Minimum and maximum energy in eV/atom to consider for EOS plot range.
sort_field (str): Field used to sort results.
ascending (bool): Whether sorting should be in ascending order.
num_chunks (int): Maximum number of chunks of data to yield. None will yield all possible.
chunk_size (int): Number of data entries per chunk.
all_fields (bool): Whether to return all fields in the document. Defaults to True.
fields (List[str]): List of fields in EOSDoc to return data for.
Default is material_id only if all_fields is False.
Returns:
([EOSDoc]) List of eos documents
"""
query_params = defaultdict(dict) # type: dict
if volumes:
query_params.update({"volumes_min": volumes[0], "volumes_max": volumes[1]})
if energies:
query_params.update(
{"energies_min": energies[0], "energies_max": energies[1]}
)
if sort_field:
query_params.update({"sort_field": sort_field})
if ascending is not None:
query_params.update({"ascending": ascending})
query_params = {
entry: query_params[entry]
for entry in query_params
if query_params[entry] is not None
}
return super().search(
num_chunks=num_chunks,
chunk_size=chunk_size,
all_fields=all_fields,
fields=fields,
**query_params
)
|
[
"collections.defaultdict"
] |
[((1651, 1668), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (1662, 1668), False, 'from collections import defaultdict\n')]
|
from django.db import models
# Create your models here.
class Moeda(models.Model):
codigo = models.CharField('Código', max_length=3, primary_key=True)
moeda = models.CharField('Nome da Moeda', max_length=255)
simbolo = models.CharField('Símbolo', max_length=5)
def __str__(self):
return self.codigo
class Meta:
verbose_name = 'Moeda'
verbose_name_plural = 'Moedas'
class Cotacao(models.Model):
moeda = models.ForeignKey(Moeda, on_delete=models.PROTECT, related_name='cotacao_moeda')
base = models.ForeignKey(Moeda, on_delete=models.PROTECT, related_name='cotacao_base')
data = models.DateField('Data')
cotacao = models.DecimalField('Cotação', decimal_places=20, max_digits=25)
def __str__(self):
return '%s - %s %s' % (self.data, self.moeda, self.cotacao)
class Meta:
verbose_name = 'Cotação'
verbose_name_plural = 'Cotações'
unique_together = (('moeda', 'base', 'data'),)
|
[
"django.db.models.CharField",
"django.db.models.DecimalField",
"django.db.models.ForeignKey",
"django.db.models.DateField"
] |
[((97, 155), 'django.db.models.CharField', 'models.CharField', (['"""Código"""'], {'max_length': '(3)', 'primary_key': '(True)'}), "('Código', max_length=3, primary_key=True)\n", (113, 155), False, 'from django.db import models\n'), ((168, 217), 'django.db.models.CharField', 'models.CharField', (['"""Nome da Moeda"""'], {'max_length': '(255)'}), "('Nome da Moeda', max_length=255)\n", (184, 217), False, 'from django.db import models\n'), ((232, 273), 'django.db.models.CharField', 'models.CharField', (['"""Símbolo"""'], {'max_length': '(5)'}), "('Símbolo', max_length=5)\n", (248, 273), False, 'from django.db import models\n'), ((454, 539), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Moeda'], {'on_delete': 'models.PROTECT', 'related_name': '"""cotacao_moeda"""'}), "(Moeda, on_delete=models.PROTECT, related_name='cotacao_moeda'\n )\n", (471, 539), False, 'from django.db import models\n'), ((546, 625), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Moeda'], {'on_delete': 'models.PROTECT', 'related_name': '"""cotacao_base"""'}), "(Moeda, on_delete=models.PROTECT, related_name='cotacao_base')\n", (563, 625), False, 'from django.db import models\n'), ((637, 661), 'django.db.models.DateField', 'models.DateField', (['"""Data"""'], {}), "('Data')\n", (653, 661), False, 'from django.db import models\n'), ((676, 740), 'django.db.models.DecimalField', 'models.DecimalField', (['"""Cotação"""'], {'decimal_places': '(20)', 'max_digits': '(25)'}), "('Cotação', decimal_places=20, max_digits=25)\n", (695, 740), False, 'from django.db import models\n')]
|
from torch.utils.data import Dataset
from scipy import ndimage
from .augmentation import augmentation
import skimage
import imageio
import numpy as np
import h5py
import os
import random
class NeuroDataset(Dataset):
def __init__(self, data_path, phase='train', transform=False, target_channels="3"):
"""Custom PyTorch Dataset for nuclei dataset
Parameters
----------
data_path: str
path to the nuclei dataset hdf5 file
phase: str, optional
phase this dataset is used for (train, val. test)
"""
self.data_path = data_path
self.phase = phase
self.transform = transform
if "," in target_channels:
self.target_channels = [int(c) for c in targat_channels.split(',')]
else:
self.target_channels = [int(target_channels)]
self.target_dim = len(self.target_channels)
with h5py.File(self.data_path,"r") as h:
self.data_names = list(h.keys())
self.dim = 1
def __len__(self):
return len(self.data_names)
def __getitem__(self, idx):
with h5py.File(self.data_path,"r") as h:
data = h[self.data_names[idx]][:]
x = data[0]
x = np.expand_dims(x, axis=0)
y = data[self.target_channels]
if self.transform:
x, y = augmentation(x, y)
return x, y
|
[
"h5py.File",
"numpy.expand_dims"
] |
[((1287, 1312), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (1301, 1312), True, 'import numpy as np\n'), ((951, 981), 'h5py.File', 'h5py.File', (['self.data_path', '"""r"""'], {}), "(self.data_path, 'r')\n", (960, 981), False, 'import h5py\n'), ((1172, 1202), 'h5py.File', 'h5py.File', (['self.data_path', '"""r"""'], {}), "(self.data_path, 'r')\n", (1181, 1202), False, 'import h5py\n')]
|
# configuration steps import
import subprocess
import traceback
from nedgeBlockerException import NedgeBlockerException
from steps.firewallCheck import FirewallCheck
from steps.baseConfigurationStep import BaseConfigurationStep
from steps.nedeployRCConfig import NedeployRCConfig
from steps.nedeployBashActivation import NedeployBashActivation
from steps.nedeployInstall import NedeployInstall
from steps.nedeployPrecheck import NedeployPrecheck
from steps.neadmRCConfig import NeadmRCConfig
from steps.neadmInitWait import NeadmInitWait
from steps.neadmSystemInit import NeadmSystemInit
from steps.neadmLicenseActivation import NeadmLicenseActivation
from steps.neadmOnlineNodesWait import NeadmOnlineNodesWait
from steps.neadmClusterCreation import NeadmClusterCreation
from steps.waitAuditService import WaitAuditService
from steps.waitNodeUUID import WaitNodeUUID
from steps.systemPreConfig import SystemPreConfig
from steps.sshConfig import SSHConfig
from steps.systemPostConfig import SystemPostConfig
class NedgeBaseConfigurator:
def __init__(self, environment={}, steps=[]):
self.environment = environment
self.steps = steps
self.blockers = []
def configure(self):
print('Configuration started')
#reset blockers
self.blockers = []
try:
for step in self.steps:
if isinstance(step, BaseConfigurationStep):
# configuration step virtual method
step.print_step_name()
step.process(self.environment)
else:
print('WARNING: There is unknown object'
'in configuration steps!')
return True
except subprocess.CalledProcessError as cpe:
print('Failed!\nMessage:\n{0}\nTrace:\n{1}\nOutput:\n{2}'
.format(cpe.message, traceback.format_exc(), cpe.output))
return False
except NedgeBlockerException as nbe:
print('Got blocker configuration exception')
print(nbe.blockers)
self.blockers = nbe.blockers
return False
except Exception as e:
print("Nedge configuration failed. Terminating")
print('{}'.format(e.message))
print('Traceback in {}'.format(traceback.format_exc()))
return False
def get_blockers(self):
return self.blockers
class NedgeNodeConfigurator(NedgeBaseConfigurator):
_steps = [FirewallCheck(),
SystemPreConfig(),
SSHConfig(),
NedeployRCConfig(),
NedeployBashActivation(),
NedeployPrecheck(),
NedeployInstall(),
WaitAuditService(),
WaitNodeUUID(),
SystemPostConfig()]
def __init__(self, environment={}):
environment['node_type'] = 'data'
NedgeBaseConfigurator.__init__(self, environment,
NedgeNodeConfigurator._steps)
class NedgeGatewayConfigurator(NedgeBaseConfigurator):
_steps = [FirewallCheck(),
SystemPreConfig(),
SSHConfig(),
NedeployRCConfig(),
NedeployBashActivation(),
NedeployPrecheck(),
NedeployInstall(),
WaitAuditService(),
WaitNodeUUID(),
SystemPostConfig()]
def __init__(self, environment={}):
environment['node_type'] = 'gateway'
NedgeBaseConfigurator.__init__(self, environment,
NedgeNodeConfigurator._steps)
class NedgeMgmtConfigurator(NedgeBaseConfigurator):
_steps = [
FirewallCheck(),
SystemPreConfig(),
SSHConfig(),
NedeployRCConfig(),
NedeployBashActivation(),
NedeployPrecheck(),
NedeployInstall(),
WaitAuditService(),
NeadmRCConfig(),
NeadmInitWait(),
NeadmSystemInit(),
WaitNodeUUID(),
NeadmLicenseActivation(),
NeadmOnlineNodesWait(),
NeadmClusterCreation(),
WaitNodeUUID(),
SystemPostConfig()
]
def __init__(self, environment={}):
environment['node_type'] = 'mgmt'
NedgeBaseConfigurator.__init__(self, environment,
NedgeMgmtConfigurator._steps)
|
[
"steps.neadmSystemInit.NeadmSystemInit",
"steps.neadmLicenseActivation.NeadmLicenseActivation",
"steps.systemPreConfig.SystemPreConfig",
"steps.systemPostConfig.SystemPostConfig",
"steps.nedeployPrecheck.NedeployPrecheck",
"steps.waitAuditService.WaitAuditService",
"steps.waitNodeUUID.WaitNodeUUID",
"steps.neadmClusterCreation.NeadmClusterCreation",
"steps.nedeployBashActivation.NedeployBashActivation",
"steps.neadmRCConfig.NeadmRCConfig",
"steps.neadmInitWait.NeadmInitWait",
"steps.nedeployRCConfig.NedeployRCConfig",
"steps.sshConfig.SSHConfig",
"steps.neadmOnlineNodesWait.NeadmOnlineNodesWait",
"traceback.format_exc",
"steps.nedeployInstall.NedeployInstall",
"steps.firewallCheck.FirewallCheck"
] |
[((2498, 2513), 'steps.firewallCheck.FirewallCheck', 'FirewallCheck', ([], {}), '()\n', (2511, 2513), False, 'from steps.firewallCheck import FirewallCheck\n'), ((2529, 2546), 'steps.systemPreConfig.SystemPreConfig', 'SystemPreConfig', ([], {}), '()\n', (2544, 2546), False, 'from steps.systemPreConfig import SystemPreConfig\n'), ((2562, 2573), 'steps.sshConfig.SSHConfig', 'SSHConfig', ([], {}), '()\n', (2571, 2573), False, 'from steps.sshConfig import SSHConfig\n'), ((2589, 2607), 'steps.nedeployRCConfig.NedeployRCConfig', 'NedeployRCConfig', ([], {}), '()\n', (2605, 2607), False, 'from steps.nedeployRCConfig import NedeployRCConfig\n'), ((2623, 2647), 'steps.nedeployBashActivation.NedeployBashActivation', 'NedeployBashActivation', ([], {}), '()\n', (2645, 2647), False, 'from steps.nedeployBashActivation import NedeployBashActivation\n'), ((2663, 2681), 'steps.nedeployPrecheck.NedeployPrecheck', 'NedeployPrecheck', ([], {}), '()\n', (2679, 2681), False, 'from steps.nedeployPrecheck import NedeployPrecheck\n'), ((2697, 2714), 'steps.nedeployInstall.NedeployInstall', 'NedeployInstall', ([], {}), '()\n', (2712, 2714), False, 'from steps.nedeployInstall import NedeployInstall\n'), ((2730, 2748), 'steps.waitAuditService.WaitAuditService', 'WaitAuditService', ([], {}), '()\n', (2746, 2748), False, 'from steps.waitAuditService import WaitAuditService\n'), ((2764, 2778), 'steps.waitNodeUUID.WaitNodeUUID', 'WaitNodeUUID', ([], {}), '()\n', (2776, 2778), False, 'from steps.waitNodeUUID import WaitNodeUUID\n'), ((2794, 2812), 'steps.systemPostConfig.SystemPostConfig', 'SystemPostConfig', ([], {}), '()\n', (2810, 2812), False, 'from steps.systemPostConfig import SystemPostConfig\n'), ((3095, 3110), 'steps.firewallCheck.FirewallCheck', 'FirewallCheck', ([], {}), '()\n', (3108, 3110), False, 'from steps.firewallCheck import FirewallCheck\n'), ((3126, 3143), 'steps.systemPreConfig.SystemPreConfig', 'SystemPreConfig', ([], {}), '()\n', (3141, 3143), False, 'from steps.systemPreConfig import SystemPreConfig\n'), ((3159, 3170), 'steps.sshConfig.SSHConfig', 'SSHConfig', ([], {}), '()\n', (3168, 3170), False, 'from steps.sshConfig import SSHConfig\n'), ((3186, 3204), 'steps.nedeployRCConfig.NedeployRCConfig', 'NedeployRCConfig', ([], {}), '()\n', (3202, 3204), False, 'from steps.nedeployRCConfig import NedeployRCConfig\n'), ((3220, 3244), 'steps.nedeployBashActivation.NedeployBashActivation', 'NedeployBashActivation', ([], {}), '()\n', (3242, 3244), False, 'from steps.nedeployBashActivation import NedeployBashActivation\n'), ((3260, 3278), 'steps.nedeployPrecheck.NedeployPrecheck', 'NedeployPrecheck', ([], {}), '()\n', (3276, 3278), False, 'from steps.nedeployPrecheck import NedeployPrecheck\n'), ((3294, 3311), 'steps.nedeployInstall.NedeployInstall', 'NedeployInstall', ([], {}), '()\n', (3309, 3311), False, 'from steps.nedeployInstall import NedeployInstall\n'), ((3327, 3345), 'steps.waitAuditService.WaitAuditService', 'WaitAuditService', ([], {}), '()\n', (3343, 3345), False, 'from steps.waitAuditService import WaitAuditService\n'), ((3361, 3375), 'steps.waitNodeUUID.WaitNodeUUID', 'WaitNodeUUID', ([], {}), '()\n', (3373, 3375), False, 'from steps.waitNodeUUID import WaitNodeUUID\n'), ((3391, 3409), 'steps.systemPostConfig.SystemPostConfig', 'SystemPostConfig', ([], {}), '()\n', (3407, 3409), False, 'from steps.systemPostConfig import SystemPostConfig\n'), ((3701, 3716), 'steps.firewallCheck.FirewallCheck', 'FirewallCheck', ([], {}), '()\n', (3714, 3716), False, 'from steps.firewallCheck import FirewallCheck\n'), ((3726, 3743), 'steps.systemPreConfig.SystemPreConfig', 'SystemPreConfig', ([], {}), '()\n', (3741, 3743), False, 'from steps.systemPreConfig import SystemPreConfig\n'), ((3753, 3764), 'steps.sshConfig.SSHConfig', 'SSHConfig', ([], {}), '()\n', (3762, 3764), False, 'from steps.sshConfig import SSHConfig\n'), ((3774, 3792), 'steps.nedeployRCConfig.NedeployRCConfig', 'NedeployRCConfig', ([], {}), '()\n', (3790, 3792), False, 'from steps.nedeployRCConfig import NedeployRCConfig\n'), ((3802, 3826), 'steps.nedeployBashActivation.NedeployBashActivation', 'NedeployBashActivation', ([], {}), '()\n', (3824, 3826), False, 'from steps.nedeployBashActivation import NedeployBashActivation\n'), ((3836, 3854), 'steps.nedeployPrecheck.NedeployPrecheck', 'NedeployPrecheck', ([], {}), '()\n', (3852, 3854), False, 'from steps.nedeployPrecheck import NedeployPrecheck\n'), ((3864, 3881), 'steps.nedeployInstall.NedeployInstall', 'NedeployInstall', ([], {}), '()\n', (3879, 3881), False, 'from steps.nedeployInstall import NedeployInstall\n'), ((3891, 3909), 'steps.waitAuditService.WaitAuditService', 'WaitAuditService', ([], {}), '()\n', (3907, 3909), False, 'from steps.waitAuditService import WaitAuditService\n'), ((3919, 3934), 'steps.neadmRCConfig.NeadmRCConfig', 'NeadmRCConfig', ([], {}), '()\n', (3932, 3934), False, 'from steps.neadmRCConfig import NeadmRCConfig\n'), ((3944, 3959), 'steps.neadmInitWait.NeadmInitWait', 'NeadmInitWait', ([], {}), '()\n', (3957, 3959), False, 'from steps.neadmInitWait import NeadmInitWait\n'), ((3969, 3986), 'steps.neadmSystemInit.NeadmSystemInit', 'NeadmSystemInit', ([], {}), '()\n', (3984, 3986), False, 'from steps.neadmSystemInit import NeadmSystemInit\n'), ((3996, 4010), 'steps.waitNodeUUID.WaitNodeUUID', 'WaitNodeUUID', ([], {}), '()\n', (4008, 4010), False, 'from steps.waitNodeUUID import WaitNodeUUID\n'), ((4020, 4044), 'steps.neadmLicenseActivation.NeadmLicenseActivation', 'NeadmLicenseActivation', ([], {}), '()\n', (4042, 4044), False, 'from steps.neadmLicenseActivation import NeadmLicenseActivation\n'), ((4054, 4076), 'steps.neadmOnlineNodesWait.NeadmOnlineNodesWait', 'NeadmOnlineNodesWait', ([], {}), '()\n', (4074, 4076), False, 'from steps.neadmOnlineNodesWait import NeadmOnlineNodesWait\n'), ((4086, 4108), 'steps.neadmClusterCreation.NeadmClusterCreation', 'NeadmClusterCreation', ([], {}), '()\n', (4106, 4108), False, 'from steps.neadmClusterCreation import NeadmClusterCreation\n'), ((4118, 4132), 'steps.waitNodeUUID.WaitNodeUUID', 'WaitNodeUUID', ([], {}), '()\n', (4130, 4132), False, 'from steps.waitNodeUUID import WaitNodeUUID\n'), ((4142, 4160), 'steps.systemPostConfig.SystemPostConfig', 'SystemPostConfig', ([], {}), '()\n', (4158, 4160), False, 'from steps.systemPostConfig import SystemPostConfig\n'), ((1883, 1905), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1903, 1905), False, 'import traceback\n'), ((2322, 2344), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2342, 2344), False, 'import traceback\n')]
|
import logging
from vosekast_control.Log import LOGGER
from vosekast_control.connectors.RelayControl import RelayControl
from vosekast_control.utils.Msg import StatusMessage
from vosekast_control.connectors import MQTTConnection
class Valve:
# regulations
BINARY = "BINARY"
ANALOG = "ANALOG"
# valve_types
TWO_WAY = "TWO_WAY"
THREE_WAY = "THREE_WAY"
SWITCH = "SWITCH"
# valve states
UNKNOWN = "UNKNOWN"
OPEN = "OPEN"
CLOSED = "CLOSED"
def __init__(
self, vosekast, name, relay_port, valve_type, regulation,
):
super().__init__()
self.vosekast = vosekast
self.name = name
self._relay_port = relay_port
self.valve_type = valve_type
self.regulation = regulation
self._state = self.UNKNOWN
self.logger = logging.getLogger(LOGGER)
self.state = self.UNKNOWN
# todo fix bounce
def close(self):
"""
function to close the valve or switch
:return:
"""
self.logger.info("Closing {}".format(self.name))
RelayControl.relays_off([self._relay_port])
self.state = self.CLOSED
def open(self):
"""
open the valve
:return:
"""
self.logger.info("Opening {}".format(self.name))
RelayControl.relays_on([self._relay_port])
self.state = self.OPEN
@property
def is_closed(self):
return self.state == self.CLOSED
@property
def is_open(self):
return self.state == self.OPEN
@property
def state(self):
return self._state
@state.setter
def state(self, new_state):
self._state = new_state
self.logger.info(f"New state of valve {self.name} is: {new_state}")
self.publish_state()
def publish_state(self):
MQTTConnection.publish_message(StatusMessage("valve", self.name, self.state))
|
[
"vosekast_control.connectors.RelayControl.RelayControl.relays_on",
"vosekast_control.connectors.RelayControl.RelayControl.relays_off",
"vosekast_control.utils.Msg.StatusMessage",
"logging.getLogger"
] |
[((830, 855), 'logging.getLogger', 'logging.getLogger', (['LOGGER'], {}), '(LOGGER)\n', (847, 855), False, 'import logging\n'), ((1086, 1129), 'vosekast_control.connectors.RelayControl.RelayControl.relays_off', 'RelayControl.relays_off', (['[self._relay_port]'], {}), '([self._relay_port])\n', (1109, 1129), False, 'from vosekast_control.connectors.RelayControl import RelayControl\n'), ((1313, 1355), 'vosekast_control.connectors.RelayControl.RelayControl.relays_on', 'RelayControl.relays_on', (['[self._relay_port]'], {}), '([self._relay_port])\n', (1335, 1355), False, 'from vosekast_control.connectors.RelayControl import RelayControl\n'), ((1865, 1910), 'vosekast_control.utils.Msg.StatusMessage', 'StatusMessage', (['"""valve"""', 'self.name', 'self.state'], {}), "('valve', self.name, self.state)\n", (1878, 1910), False, 'from vosekast_control.utils.Msg import StatusMessage\n')]
|
from collections import deque
def get_vision(_map:list,start_pos:tuple,n_row:int,n_col:int):
q = deque()
visited = [[False]* n_col for _ in range(n_row)]
dist = [[0]* n_col for _ in range(n_row)]
q.append(start_pos)
visited[start_pos[0]][start_pos[1]] = True
ans = []
foods = []
monster = []
distx = [0,0,1,-1]
disty = [1,-1,0,0]
def is_valid(_x,_y):
return x in range(n_col) and y in range(n_row)
while q:
top = q.popleft()
ans.append(top)
if _map[top[0]][top[1]] == 2:
foods.append(top)
elif _map[top[0]][top[1]] == 3:
monster.append(top)
for dx,dy in zip(distx,disty):
x = top[0] + dx
y = top[1] + dy
if is_valid(x,y) and not visited[x][y]:
dist[x][y] = dist[top[0]][top[1]] + 1
if (dist[x][y] > 3):
continue
q.append((x,y))
visited[x][y] = True
return ans,foods,monster
if __name__ == '__main__':
start_pos = (7,7)
# des_pos = (14,14)
_map = [[1,1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 2, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 2, 0, 0, 0, 0],
[1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0]]
print(_map)
print(get_vision(_map,start_pos,15,15))
|
[
"collections.deque"
] |
[((101, 108), 'collections.deque', 'deque', ([], {}), '()\n', (106, 108), False, 'from collections import deque\n')]
|
# -*- coding: utf-8 -*-
from model.group import Group
import allure
def test_add_group(app, db, json_groups, check_ui):
group = json_groups
old_groups = given_group_list(db)
create_group(app, group)
new_groups = given_group_list(db)
check_groups(app, old_groups, new_groups, group, check_ui)
@allure.step('Given a group list')
def given_group_list(db):
return db.get_group_list()
@allure.step('When I add the group "{group}" to the list')
def create_group(app, group):
app.group.create(group)
@allure.step('Then the new group list is equal to the old list with the added group')
def check_groups(app, old_groups, new_groups, group, check_ui):
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert new_groups == app.group.get_group_list()
|
[
"allure.step"
] |
[((317, 350), 'allure.step', 'allure.step', (['"""Given a group list"""'], {}), "('Given a group list')\n", (328, 350), False, 'import allure\n'), ((411, 468), 'allure.step', 'allure.step', (['"""When I add the group "{group}" to the list"""'], {}), '(\'When I add the group "{group}" to the list\')\n', (422, 468), False, 'import allure\n'), ((530, 619), 'allure.step', 'allure.step', (['"""Then the new group list is equal to the old list with the added group"""'], {}), "(\n 'Then the new group list is equal to the old list with the added group')\n", (541, 619), False, 'import allure\n')]
|
from Src.Model import RNN
from Src.DataPrep import DataPrep
import time
import math
import argparse
from sys import argv
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import torch
import torch.nn as nn
def time_since(s):
now = time.time()
s = now - s
m = math.floor(s / 60)
s -= m * 60
return f'|{m:3}:{s:02.0f}|'
def fit(model, criterion, category_tensor, line_tensor):
hidden = model.init_hidden()
model.zero_grad()
for i in range(line_tensor.size()[0]):
output, hidden = model(line_tensor[i], hidden)
loss = criterion(output, category_tensor) #criterion
loss.backward()
for p in model.parameters():
p.data.add_(p.grad.data, alpha=-lr)
return output, loss.item()
def train(model, criterion, n_iters, print_every=5000):
model.train()
current_loss = 0
global all_losses
all_losses = []
start = time.time()
for iter in range(1, n_iters + 1):
category, line, category_tensor, line_tensor = data.random_training_sample()
output, loss = fit(model, criterion, category_tensor, line_tensor)
current_loss += loss
if iter % print_every == 0:
__, guess = data.cat_from_output(output, n_predictions=1)[0]
correct = '✓' if guess == category else f'✗ {category:10}'
print(f'{iter:6} {iter / n_iters * 100:6.2f}% {time_since(start)} {loss:7.4f} {line:10} {guess:10} {correct}')
if iter % 1000 == 0:
all_losses.append(current_loss/ 1000)
current_loss = 0
model.eval()
def predict(model, line, top_n=3, prnt=True):
model.eval()
if prnt:
print(f'> {line}↴')
line_tensor = data.line2tensor(line)
with torch.no_grad():
hidden = model.init_hidden()
for i in range(line_tensor.size()[0]):
output, hidden = model(line_tensor[i], hidden)
topv, topi = output.topk(top_n, 1, True)
for i in range(top_n):
value = topv[0, i].item()
category_index = topi[0, i].item()
if prnt:
print(f'{value:4.2f} {data.all_categories[category_index]}')
model.train()
return data.all_categories[topi[0, 0].item()]
def plot_loss():
fig, ax = plt.subplots()
ax.plot(all_losses)
plt.show()
def plot_confusion(model, n=10000):
confusion = torch.zeros(data.n_categories, data.n_categories)
for i in range(n):
category, line, __, __ = data.random_training_sample()
output = predict(model, line, top_n=1, prnt=False)
category_i = data.all_categories.index(category)
output_i = data.all_categories.index(output)
confusion[category_i, output_i] += 1
for i in range(data.n_categories):
confusion[i] /= confusion[i].sum()
fig, ax = plt.subplots()
cax = ax.matshow(confusion.numpy())
fig.colorbar(cax)
ax.set_xticklabels([''] + data.all_categories, rotation=90)
ax.set_yticklabels([''] + data.all_categories)
ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
plt.show()
ap = argparse.ArgumentParser()
ap.add_argument('-t', '--train', action='store_true')
ap.add_argument('-p', '--plot', action='store_true')
ap.add_argument('-c', '--classify')
options = ap.parse_args()
if __name__ == '__main__':
data = DataPrep()
rnn = RNN(data.n_letters, 128, data.n_categories)
if options.train:
criterion = nn.NLLLoss()
lr = 0.005
train(rnn, criterion, n_iters=100000)
plot_loss()
torch.save(rnn.state_dict(), 'rnn.pth')
else:
rnn.load_state_dict(torch.load('rnn.pth'))
if options.plot:
plot_confusion(rnn)
if options.classify:
predict(rnn, argv[-1], top_n=3, prnt=True)
|
[
"matplotlib.pyplot.show",
"argparse.ArgumentParser",
"Src.Model.RNN",
"torch.load",
"math.floor",
"time.time",
"Src.DataPrep.DataPrep",
"torch.nn.NLLLoss",
"torch.zeros",
"matplotlib.ticker.MultipleLocator",
"torch.no_grad",
"matplotlib.pyplot.subplots"
] |
[((3172, 3197), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3195, 3197), False, 'import argparse\n'), ((257, 268), 'time.time', 'time.time', ([], {}), '()\n', (266, 268), False, 'import time\n'), ((293, 311), 'math.floor', 'math.floor', (['(s / 60)'], {}), '(s / 60)\n', (303, 311), False, 'import math\n'), ((915, 926), 'time.time', 'time.time', ([], {}), '()\n', (924, 926), False, 'import time\n'), ((2278, 2292), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2290, 2292), True, 'import matplotlib.pyplot as plt\n'), ((2321, 2331), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2329, 2331), True, 'import matplotlib.pyplot as plt\n'), ((2386, 2435), 'torch.zeros', 'torch.zeros', (['data.n_categories', 'data.n_categories'], {}), '(data.n_categories, data.n_categories)\n', (2397, 2435), False, 'import torch\n'), ((2835, 2849), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2847, 2849), True, 'import matplotlib.pyplot as plt\n'), ((3151, 3161), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3159, 3161), True, 'import matplotlib.pyplot as plt\n'), ((3405, 3415), 'Src.DataPrep.DataPrep', 'DataPrep', ([], {}), '()\n', (3413, 3415), False, 'from Src.DataPrep import DataPrep\n'), ((3426, 3469), 'Src.Model.RNN', 'RNN', (['data.n_letters', '(128)', 'data.n_categories'], {}), '(data.n_letters, 128, data.n_categories)\n', (3429, 3469), False, 'from Src.Model import RNN\n'), ((1743, 1758), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1756, 1758), False, 'import torch\n'), ((3061, 3086), 'matplotlib.ticker.MultipleLocator', 'ticker.MultipleLocator', (['(1)'], {}), '(1)\n', (3083, 3086), True, 'import matplotlib.ticker as ticker\n'), ((3119, 3144), 'matplotlib.ticker.MultipleLocator', 'ticker.MultipleLocator', (['(1)'], {}), '(1)\n', (3141, 3144), True, 'import matplotlib.ticker as ticker\n'), ((3513, 3525), 'torch.nn.NLLLoss', 'nn.NLLLoss', ([], {}), '()\n', (3523, 3525), True, 'import torch.nn as nn\n'), ((3698, 3719), 'torch.load', 'torch.load', (['"""rnn.pth"""'], {}), "('rnn.pth')\n", (3708, 3719), False, 'import torch\n')]
|
# Copyright (c) 2018-2019 ISciences, LLC.
# All rights reserved.
#
# WSIM is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from typing import Dict, List, Optional
from .actions import \
composite_anomalies, \
composite_indicator_adjusted, \
composite_indicator_return_periods, \
composite_indicators, \
compute_return_periods, \
correct_forecast, \
create_forcing_file,\
forcing_summary, \
result_summary, \
return_period_summary, \
run_lsm,\
standard_anomaly_summary, \
time_integrate
from .config_base import ConfigBase as Config
from .dates import get_lead_months
from .step import Step
def monthly_observed(config: Config, yearmon: str, meta_steps: Dict[str, Step]) -> List[Step]:
print('Generating steps for', yearmon, 'observed data')
steps = []
# Skip if we would already have run this date as part of spinup
if yearmon not in config.historical_yearmons():
if config.should_run_lsm(yearmon):
# Prepare the dataset for use (convert from GRIB to netCDF, compute pWetDays, etc.)
steps += config.observed_data().prep_steps(yearmon=yearmon)
# Combine forcing data for LSM run
steps += create_forcing_file(config.workspace(), config.observed_data(), yearmon=yearmon)
# Run the LSM
steps += run_lsm(config.workspace(), config.static_data(), yearmon=yearmon)
steps += config.result_postprocess_steps(yearmon=yearmon)
# Do time integration
for window in config.integration_windows():
steps += time_integrate(config.workspace(), config.lsm_integrated_stats(), forcing = False, yearmon=yearmon, window=window)
steps += time_integrate(config.workspace(), config.forcing_integrated_stats(), forcing = True, yearmon=yearmon, window=window)
# Compute return periods
for window in [1] + config.integration_windows():
steps += compute_return_periods(config.workspace(),
result_vars=config.lsm_rp_vars() if window == 1 else config.lsm_integrated_var_names(),
forcing_vars=config.forcing_rp_vars() if window==1 else config.forcing_integrated_var_names(),
state_vars=config.state_rp_vars() if window==1 else None,
yearmon=yearmon,
window=window)
# Compute composite indicators
for window in [1] + config.integration_windows():
# Don't write composite steps for a window that extends back too early.
if yearmon >= config.historical_yearmons()[window-1]:
composite_indicator_steps = composite_indicators(config.workspace(), window=window, yearmon=yearmon)
steps += composite_indicator_steps
meta_steps['all_composites'].require(composite_indicator_steps)
if window == 1:
meta_steps['all_monthly_composites'].require(composite_indicator_steps)
if yearmon not in config.historical_yearmons():
steps += composite_anomalies(config.workspace(), window=window, yearmon=yearmon)
# Express composite anomalies in terms of a return period
# (relative to historical composite anomalies)
steps += composite_indicator_return_periods(config.workspace(), yearmon=yearmon, window=window)
# Produce an "adjusted" composite based on the return periods
# of the composite surface anomaly and composite deficit anomaly
adjusted_indicator_steps = composite_indicator_adjusted(config.workspace(), yearmon=yearmon, window=window)
steps += adjusted_indicator_steps
meta_steps['all_adjusted_composites'].require(adjusted_indicator_steps)
if window == 1:
meta_steps['all_adjusted_monthly_composites'].require(adjusted_indicator_steps)
return steps
def monthly_forecast(config: Config,
yearmon: str,
meta_steps: Dict[str, Step],
*, forecast_lag_hours: Optional[int] = None) -> List[Step]:
steps = []
if not config.models():
raise ValueError("Forecast requested for {} iteration but configuration specifies no models. "
"Did you want to use --forecasts none?".format(yearmon))
if not config.forecast_targets(yearmon):
raise ValueError("Forecast requested for {} iteration but configuration specifies no forecast targets. "
"Did you want to use --forecasts none?".format(yearmon))
for model in config.models():
if forecast_lag_hours is not None:
available = len(config.forecast_ensemble_members(model, yearmon, lag_hours=forecast_lag_hours))
total = len(config.forecast_ensemble_members(model, yearmon))
if total - available > 0:
print('Omitting {} prep steps for {} forecasts generated after {}'.format(
model,
total-available,
(datetime.datetime.utcnow() - datetime.timedelta(hours=forecast_lag_hours)).strftime('%Y%m%d%H')))
for target in config.forecast_targets(yearmon):
lead_months = get_lead_months(yearmon, target)
for model in config.models():
print('Generating steps for', model, yearmon, 'forecast target', target)
for member in config.forecast_ensemble_members(model, yearmon, lag_hours=forecast_lag_hours):
if config.should_run_lsm(yearmon):
# Prepare the dataset for use (convert from GRIB to netCDF, etc.)
steps += meta_steps['prepare_forecasts'].require(
config.forecast_data(model).prep_steps(yearmon=yearmon, target=target, member=member))
# Bias-correct the forecast
steps += meta_steps['prepare_forecasts'].require(
correct_forecast(config.forecast_data(model), yearmon=yearmon, member=member, target=target, lead_months=lead_months))
# Assemble forcing inputs for forecast
steps += meta_steps['prepare_forecasts'].require(
create_forcing_file(config.workspace(), config.forecast_data(model),
yearmon=yearmon, target=target, model=model, member=member))
for member in config.forecast_ensemble_members(model, yearmon):
if config.should_run_lsm(yearmon):
# Run LSM with forecast data
steps += run_lsm(config.workspace(), config.static_data(),
yearmon=yearmon, target=target, model=model, member=member, lead_months=lead_months)
steps += config.result_postprocess_steps(yearmon=yearmon, target=target, model=model, member=member)
for window in config.integration_windows():
# Time integrate the results
steps += time_integrate(config.workspace(), config.lsm_integrated_stats(), forcing=False, yearmon=yearmon, window=window, model=model, member=member, target=target)
steps += time_integrate(config.workspace(), config.forcing_integrated_stats(), forcing=True, yearmon=yearmon, window=window, model=model, member=member, target=target)
# Compute return periods
for window in [1] + config.integration_windows():
steps += compute_return_periods(config.workspace(),
forcing_vars=config.forcing_rp_vars() if window==1 else config.forcing_integrated_var_names(),
result_vars=config.lsm_rp_vars() if window==1 else config.lsm_integrated_var_names(),
state_vars=config.state_rp_vars() if window==1 else None,
yearmon=yearmon,
window=window,
model=model,
target=target,
member=member)
del model
for window in [1] + config.integration_windows():
# Summarize forecast ensemble
# TODO add individual model summaries
steps += meta_steps['results_summaries'].require(
result_summary(config, yearmon=yearmon, target=target, window=window))
steps += meta_steps['forcing_summaries'].require(
forcing_summary(config, yearmon=yearmon, target=target, window=window))
steps += return_period_summary(config, yearmon=yearmon, target=target, window=window)
steps += standard_anomaly_summary(config, yearmon=yearmon, target=target, window=window)
# Generate composite indicators from summarized ensemble data
steps += composite_anomalies(config.workspace(),
window=window, yearmon=yearmon, target=target, quantile=50)
composite_indicator_steps = composite_indicators(config.workspace(),
window=window, yearmon=yearmon, target=target, quantile=50)
steps += composite_indicator_steps
meta_steps['all_composites'].require(composite_indicator_steps)
if window == 1:
meta_steps['all_monthly_composites'].require(composite_indicator_steps)
steps += composite_indicator_return_periods(config.workspace(),
yearmon=yearmon, window=window, target=target)
adjusted_indicator_steps = composite_indicator_adjusted(config.workspace(),
yearmon=yearmon, window=window, target=target)
steps += adjusted_indicator_steps
meta_steps['all_adjusted_composites'].require(adjusted_indicator_steps)
if window == 1:
meta_steps['all_adjusted_monthly_composites'].require(adjusted_indicator_steps)
return steps
|
[
"datetime.datetime.utcnow",
"datetime.timedelta"
] |
[((5671, 5697), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5695, 5697), False, 'import datetime\n'), ((5700, 5744), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': 'forecast_lag_hours'}), '(hours=forecast_lag_hours)\n', (5718, 5744), False, 'import datetime\n')]
|
# -*- coding: utf-8 -*-
import scrapy
import json
class ZhihuSpider(scrapy.Spider):
name = 'zhihu'
allowed_domains = ['www.zhihu.com']
start_urls = ['https://www.zhihu.com/']
loginUrl = 'https://www.zhihu.com/#signin'
siginUrl = 'https://www.zhihu.com/login/email'
feedUrl = 'https://www.zhihu.com/api/v3/feed/topstory'
nextFeedUrl = ''
curFeedId = 0
custom_settings = {
"COOKIES_ENABLED": True,
}
headers = {
'Host':
'www.zhihu.com',
'Connection':
'keep-alive',
'Origin':
'https://www.zhihu.com',
'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36',
'Content-Type':
'application/x-www-form-urlencoded; charset=UTF-8',
'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'X-Requested-With':
'XMLHttpRequest',
'DNT':
1,
'Referer':
'https://www.zhihu.com/',
'Accept-Encoding':
'gzip, deflate, br',
'Accept-Language':
'zh-CN,zh;q=0.8,en;q=0.6',
'Upgrade-Insecure-Requests:':
1,
}
cookies = {
'd_c0':
'"AHCAtu1iqAmPTped76X1ZdN0X_qAwhjdLUU=|1458699045"',
'__utma':
'51854390.1407411155.1458699046.1458699046.1458699046.1',
'__utmv':
'51854390.000--|3=entry_date=20160322=1',
'_zap':
'850897bb-cba4-4d0b-8653-fd65e7578ac2',
'q_c1':
'b7918ff9a5514d2981c30050c8c732e1|1502937247000|1491446589000',
'aliyungf_tc':
'AQAAACtKLW+lywEAOhSntJwFFTilwpwt',
'_xsrf':
'f3ab08fc68489f44ae77236555367c70',
'r_cap_id':
'"M2NjNDAwNTZmY2ExNDA3NzgzNjZkZDA1ODNjZWJkNjI=|1503458111|36984ab33f21997b742d97ace2e02043cbb0a76e"',
'cap_id':
'"ZTIxMmM5Yzg1MGJkNDcxNjgxYzZjMjNlYTg3OGE0Yzk=|1503457914|8dce8550bca28e427771a0e7e1fe1bafb6e170f6"',
}
def start_requests(self):
return [
scrapy.http.FormRequest(
self.loginUrl,
headers=self.headers,
cookies=self.cookies,
meta={'cookiejar': 1},
callback=self.post_login)
]
def post_login(self, response):
xsrf = response.css(
'div.view-signin > form > input[name=_xsrf]::attr(value)'
).extract_first()
self.headers['X-Xsrftoken'] = xsrf
return [
scrapy.http.FormRequest(
self.siginUrl,
method='POST',
headers=self.headers,
meta={'cookiejar': response.meta['cookiejar']},
formdata={
'_xsrf': xsrf,
'captcha_type': 'cn',
'email': '<EMAIL>',
'password': '<PASSWORD>',
},
callback=self.after_login)
]
def after_login(self, response):
jdict = json.loads(response.body)
print('after_login', jdict)
if jdict['r'] == 0:
z_c0 = response.headers.getlist('Set-Cookie')[2].split(';')[
0].split('=')[1]
self.headers['authorization'] = 'Bearer ' + z_c0
return scrapy.http.FormRequest(
url=self.feedUrl,
method='GET',
meta={'cookiejar': response.meta['cookiejar']},
headers=self.headers,
formdata={
'action_feed': 'True',
'limit': '10',
'action': 'down',
'after_id': str(self.curFeedId),
'desktop': 'true'
},
callback=self.parse)
else:
print(jdict['error'])
def parse(self, response):
with open('zhihu.json', 'a') as fd:
fd.write(response.body)
jdict = json.loads(response.body)
jdatas = jdict['data']
for entry in jdatas:
entry['pid'] = entry['id']
yield entry
jpaging = jdict['paging']
self.curFeedId += len(jdatas)
if jpaging['is_end'] == False and self.curFeedId < 50:
self.nextFeedUrl = jpaging['next']
yield self.next_request(response)
def next_request(self, response):
return scrapy.http.FormRequest(
url=self.nextFeedUrl,
method='GET',
meta={'cookiejar': response.meta['cookiejar']},
headers=self.headers,
callback=self.parse)
|
[
"json.loads",
"scrapy.http.FormRequest"
] |
[((3076, 3101), 'json.loads', 'json.loads', (['response.body'], {}), '(response.body)\n', (3086, 3101), False, 'import json\n'), ((4009, 4034), 'json.loads', 'json.loads', (['response.body'], {}), '(response.body)\n', (4019, 4034), False, 'import json\n'), ((4441, 4600), 'scrapy.http.FormRequest', 'scrapy.http.FormRequest', ([], {'url': 'self.nextFeedUrl', 'method': '"""GET"""', 'meta': "{'cookiejar': response.meta['cookiejar']}", 'headers': 'self.headers', 'callback': 'self.parse'}), "(url=self.nextFeedUrl, method='GET', meta={\n 'cookiejar': response.meta['cookiejar']}, headers=self.headers,\n callback=self.parse)\n", (4464, 4600), False, 'import scrapy\n'), ((2113, 2249), 'scrapy.http.FormRequest', 'scrapy.http.FormRequest', (['self.loginUrl'], {'headers': 'self.headers', 'cookies': 'self.cookies', 'meta': "{'cookiejar': 1}", 'callback': 'self.post_login'}), "(self.loginUrl, headers=self.headers, cookies=self.\n cookies, meta={'cookiejar': 1}, callback=self.post_login)\n", (2136, 2249), False, 'import scrapy\n'), ((2571, 2827), 'scrapy.http.FormRequest', 'scrapy.http.FormRequest', (['self.siginUrl'], {'method': '"""POST"""', 'headers': 'self.headers', 'meta': "{'cookiejar': response.meta['cookiejar']}", 'formdata': "{'_xsrf': xsrf, 'captcha_type': 'cn', 'email': '<EMAIL>', 'password':\n '<PASSWORD>'}", 'callback': 'self.after_login'}), "(self.siginUrl, method='POST', headers=self.headers,\n meta={'cookiejar': response.meta['cookiejar']}, formdata={'_xsrf': xsrf,\n 'captcha_type': 'cn', 'email': '<EMAIL>', 'password': '<PASSWORD>'},\n callback=self.after_login)\n", (2594, 2827), False, 'import scrapy\n')]
|
'''Code obtained from https://github.com/gitshanks/fer2013'''
# load json and create model
from __future__ import division
from keras.models import Sequential
from keras.layers import Dense
from keras.models import model_from_json
import numpy
import os
import numpy as np
json_file = open('web_app/keras_model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("web_app/keras_model_weights.h5")
print("Loaded model from disk")
truey=[]
predy=[]
x = np.load('data/keras_modXtest.npy')
y = np.load('data/keras_modytest.npy')
yhat= loaded_model.predict(x)
yh = yhat.tolist()
yt = y.tolist()
count = 0
for i in range(len(y)):
yy = max(yh[i])
yyt = max(yt[i])
predy.append(yh[i].index(yy))
truey.append(yt[i].index(yyt))
if(yh[i].index(yy)== yt[i].index(yyt)):
count+=1
acc = (count/len(y))*100
#saving values for confusion matrix and analysis
np.save('data/truey', truey)
np.save('data/predy', predy)
print("Predicted and true label values saved")
print("Accuracy on test set :"+str(acc)+"%")
|
[
"numpy.load",
"keras.models.model_from_json",
"numpy.save"
] |
[((394, 428), 'keras.models.model_from_json', 'model_from_json', (['loaded_model_json'], {}), '(loaded_model_json)\n', (409, 428), False, 'from keras.models import model_from_json\n'), ((574, 608), 'numpy.load', 'np.load', (['"""data/keras_modXtest.npy"""'], {}), "('data/keras_modXtest.npy')\n", (581, 608), True, 'import numpy as np\n'), ((613, 647), 'numpy.load', 'np.load', (['"""data/keras_modytest.npy"""'], {}), "('data/keras_modytest.npy')\n", (620, 647), True, 'import numpy as np\n'), ((996, 1024), 'numpy.save', 'np.save', (['"""data/truey"""', 'truey'], {}), "('data/truey', truey)\n", (1003, 1024), True, 'import numpy as np\n'), ((1025, 1053), 'numpy.save', 'np.save', (['"""data/predy"""', 'predy'], {}), "('data/predy', predy)\n", (1032, 1053), True, 'import numpy as np\n')]
|
import pandas as pd
import re
class Formatter:
"""Formatting Class.
This class contains method that are used for formatting the data fetched
from the pickle and sent to DataFilter class
"""
def get_projects_details(self, projects):
"""Method to process all details related to project
This method process all project details, that are present
in a list by looping through each project and formatting it.
Args:
projects (dict): A list of projects with different information
of project
Returns:
dict: A dict with keys of different keys corresponding to different
project related graphs, or metrics.
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"count" represent the count for the x_value and "list" represent
"Different values" for x_values
"""
if isinstance(projects, int):
return projects
projects_details = {}
# key id_type is used by frontend to add appropriate urls in frontend
project_acccess = self.dict_generator_overview(
projects, 'project_access', 'id', 'access')
project_acccess['id_type'] = 'project'
projects_details['Number of Projects'] = len(projects)
projects_details['Projects Visibility'] = project_acccess
return projects_details
def get_subjects_details(self, subjects_data):
"""Method to process all details related to Subject
This method process all subject details, that are present
in a list by looping through each subject and formatting it.
Args:
Subjects (dict): A list of each subject having
its age, ID, Project ID, handedness, gender.
Returns:
dict: A dict with keys of different keys corresponding to different
subject related graphs, or metrics.
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"count" represent the count for the x_value and "list" represent
"Different values" for x_values
"""
if isinstance(subjects_data, int):
return subjects_data
subjects_details = {}
# Subject age information
age_list = []
age_none = []
for subject in subjects_data:
if subject['age'] != '':
if int(subject['age']) > 0 and int(subject['age']) < 130:
age_list.append([int(subject['age']), subject['ID']])
else:
age_none.append(subject['ID'])
else:
age_none.append(subject['ID'])
age_df = pd.DataFrame(age_list, columns=['age', 'count'])
age_df['age'] = pd.cut(
x=age_df['age'],
bins=[0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 130],
labels=['0-10', '10-20', '20-30', '30-40', '40-50', '50-60',
'60-70', '70-80', '80-90', '90-100', 'Above_100'])
age_ranged = age_df.groupby('age')['count'].apply(list)
age_final_df = age_df.groupby('age').count()
age_final_df['list'] = age_ranged
age_range = age_final_df.to_dict()
age_range['count'].update({'No Data': len(age_none)})
age_range['list'].update({'No Data': age_none})
age_range['id_type'] = 'subject'
# Age end
# Subject handedness information
handedness = self.dict_generator_overview(
subjects_data, 'handedness', 'ID', 'handedness')
handedness['id_type'] = 'subject'
# Subject gender information
gender = self.dict_generator_overview(
subjects_data, 'gender', 'ID', 'gender')
gender['id_type'] = 'subject'
# Subjects per project information
subjects_per_project = self.dict_generator_per_view(
subjects_data, 'project', 'ID', 'spp')
subjects_per_project['id_type'] = 'subject'
# Number of subjects information
subjects_details['Number of Subjects'] = len(subjects_data)
subjects_details['Subjects/Project'] = subjects_per_project
subjects_details['Age Range'] = age_range
subjects_details['Gender'] = gender
subjects_details['Handedness'] = handedness
return subjects_details
def get_experiments_details(self, experiments):
"""Method to process all details related to Experiment
This method process all experiment details, that are present
in a list by looping through each experiment and formatting it.
Args:
experiments (dict): A list of experiment with each
having its ID, Project ID, experiment type
Returns:
dict: A dict with keys of different keys corresponding to different
experiment related graphs, or metrics.
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"count" represent the count for the x_value and "list" represent
"Different values" for x_values
"""
if isinstance(experiments, int):
return experiments
experiments_details = {}
experiments_details['Number of Experiments'] = len(experiments)
# Experiments per project information
experiments_per_project = self.dict_generator_per_view(
experiments, 'project', 'ID', 'epp')
experiments_per_project['id_type'] = 'experiment'
# Experiments type information
experiment_type = self.dict_generator_overview(
experiments, 'xsiType', 'ID', 'xsiType')
experiment_type['id_type'] = 'experiment'
experiments_types_per_project = self.dict_generator_per_view_stacked(
experiments, 'project', 'xsiType', 'ID')
experiments_types_per_project['id_type'] = 'experiment'
prop_exp = self.proportion_graphs(
experiments, 'subject_ID', 'ID', 'Subjects with ', ' experiment')
prop_exp['id_type'] = 'subject'
experiments_details['Sessions types/Project'] =\
experiments_types_per_project
experiments_details['Experiment Types'] = experiment_type
experiments_details['Experiments/Project'] = experiments_per_project
experiments_details['Experiments Proportions'] = prop_exp
return experiments_details
def get_scans_details(self, scans):
"""Method to process all details related to scan
This method process all scan details, that are present
in a list by looping through each scan and formatting it.
Args:
scans (dict): A list of scans with each scan
having its project ID, scan ID, subject ID, experiment ID,
scan type and scan quality.
Returns:
dict: A dict with keys of different keys corresponding to different
scan related graphs, or metrics.
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"count" represent the count for the x_value and "list" represent
"Different values" for x_values
"""
if isinstance(scans, int):
return scans
scan_quality = self.dict_generator_overview(
scans, 'xnat:imagescandata/quality', 'ID',
'quality', 'xnat:imagescandata/id')
scan_quality['id_type'] = 'experiment'
# Scans type information
type_dict = self.dict_generator_overview(
scans, 'xnat:imagescandata/type',
'ID', 'type', 'xnat:imagescandata/id')
type_dict['id_type'] = 'experiment'
# Scans xsi type information
xsi_type_dict = self.dict_generator_overview(
scans, 'xsiType', 'ID', 'xsiType', 'xnat:imagescandata/id')
xsi_type_dict['id_type'] = 'experiment'
# Scans per project information
scans_per_project = self.dict_generator_overview(
scans, 'project', 'ID', 'spp', 'xnat:imagescandata/id')
scans_per_project['id_type'] = 'experiment'
prop_scan = self.proportion_graphs(
scans, 'xnat:imagesessiondata/subject_id',
'ID', 'Subjects with ', ' scans')
prop_scan['id_type'] = 'subject'
scans_details = {}
scans_details['Scans Quality'] = scan_quality
scans_details['Scan Types'] = type_dict
scans_details['XSI Scan Types'] = xsi_type_dict
scans_details['Scans/Project'] = scans_per_project
scans_details['Scans Proportions'] = prop_scan
scans_details['Number of Scans'] = len(scans)
return scans_details
def get_projects_details_specific(self, projects, name):
"""This project process list of all projects.
This generate list of projects that are visible to user and
the list of projects owned, collaborated or member.
Args:
projects (list): List of projects with there details
name (String): Name of the user
Returns:
list: List of projects which are visible to user.
"""
if projects is None:
return 1
project_list_owned_collab_member = []
for project in projects:
project_owner = project['project_owners']
project_collabs = project['project_collabs']
project_member = project['project_members']
user = name
if project_owner.find(user) != -1\
or project_collabs.find(user) != -1\
or project_member.find(user) != -1:
project_list_owned_collab_member.append(project['id'])
project_list_all = [project['id'] for project in projects]
list_data = {}
list_data['project_list'] = project_list_all
list_data['project_list_ow_co_me'] = project_list_owned_collab_member
return list_data
def get_resources_details(
self, resources=None, project_id=None):
"""Resource processing
This method process the resources that are saved as in pickle file.
it generates the required format for each plot.
Args:
resources ( list, optional): Each resource have its corresponding
ID, project ID, label and experiment id and by default
it will be skipped and no graph of resources will be added.
project_id (String, optional): For per project view, the project id
by default it will not return any project details.
Returns:
dict/int: If resource exist then a dict with the corresponding data
else -1.
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
if resources is None:
return None
df = pd.DataFrame(
resources,
columns=['project', 'session', 'resource', 'label'])
if project_id is not None: # Code for per project view
try:
df = df.groupby(['project']).get_group(project_id)
except KeyError:
return -1
df['resource'] = df['resource'].map(
lambda x: re.sub('<Resource Object>', 'Resource Object', str(x)))
resource_pp = df[df['resource'] != 'No Data'][['project', 'resource']]
session = df[df['resource'] != 'No Data']['session']
resource_pp['resource'] = session + '/' + resource_pp['resource']
resource_pp = resource_pp.rename(columns={'resource': 'count'})
resources_pp_df = resource_pp.groupby('project')['count'].apply(list)
resource_pp = resource_pp.groupby('project').count()
resource_pp['list'] = resources_pp_df
resource_pp = resource_pp.to_dict()
resource_pp['id_type'] = 'experiment'
res_pp_no_data = df[
df['resource'] == 'No Data'].groupby('project').count()
# Creates 2 Dataframe
# Data frame 1 have project which have resources
# Data frame 2 have project which don't have resources
# Subtract dataframe 1 from 2 if dataframe 2 have any project id
# left then this means project id doesn't have any resource
no_data_rpp = res_pp_no_data.index.difference(
resources_pp_df.index).to_list()
# Remove those project ID
if len(no_data_rpp) != 0:
no_data_update = {}
for item in no_data_rpp:
no_data_update[item] = 0
resource_pp['count'].update(no_data_update)
# Resource types
resource_types = self.dict_generator_resources(df, 'label', 'session')
resource_types['id_type'] = 'experiment'
resource_type_ps = self.dict_generator_resources(
df, 'label', 'session')
resource_type_ps['id_type'] = 'experiment'
# Code for number of experiments having common
# number of resources for each project
pro_exp_list = [[item[0], item[1]] for item in resources]
pro_exp_df = pd.DataFrame(
pro_exp_list, columns=['project', 'session'])
# Create a Dataframe that have 3 columns where
# 1st column: project_x will have projects
# 2nd column: session will have session details
# 3rd column: project_y will have count of resources
pro_exp_count = pro_exp_df.groupby('session').count().reset_index()
project_session = pro_exp_df.drop_duplicates(subset="session")
resource_count_df = pd.merge(
project_session, pro_exp_count, on='session')
resource_count_df['project_y'] = resource_count_df[
'project_y'].astype(str) + ' Resources/Session'
# Send the above created data from to dict_generator_per_view_stacked
# This will create the format required for stacked plot
resource_count_dict = self.dict_generator_per_view_stacked(
resource_count_df, 'project_x', 'project_y', 'session')
resource_count_dict['id_type'] = 'experiment'
return {
'Resources/Project': resource_pp,
'Resource Types': resource_types,
'Session resource count/Project': resource_count_dict}
def proportion_graphs(self, data, property_x, property_y, prefix, suffix):
data_list = [[item[property_x], item[property_y]] for item in data]
# Create a data frame
df = pd.DataFrame(data_list, columns=['per_view', 'count'])
# Group by property property_x as per_view and count
df_proportion = df.groupby(
'per_view', as_index=False).count().groupby('count').count()
# Use count to group by property x
df_proportion['list'] = df.groupby(
'per_view', as_index=False).count().groupby(
'count')['per_view'].apply(list)
# Add prefix and suffix to count for easy understanding
# Eg. Number of subject with 1 experiments
# Here prefix is Number of subject with and suffix is experiments
# and count is 1
df_proportion.index = prefix + df_proportion.index.astype(str) + suffix
return df_proportion.rename(columns={'per_view': 'count'}).to_dict()
def dict_generator_resources(self, df, x_name, y_name):
"""Generate a dictionary from the data frame of resources
in the format required for graphs
Args:
df (Datafrmae): A dataframe of resources
x_name (Str): The name which will be on X axis of graph
y_name (Str): The name which will be on Y axis of graph
Returns:
Dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
data = df[df[y_name] != 'No Data'][[
x_name, y_name]]
data = data.rename(columns={y_name: 'count'})
data_df = data.groupby(
x_name)['count'].apply(list)
data = data.groupby(x_name).count()
data['list'] = data_df
data_dict = data.to_dict()
return data_dict
def dict_generator_overview(
self, data, property_x, property_y, x_new, extra=None):
"""Generate a dictionary from the data list of project, subjects,
experiments and scans in the format required for graphs.
Args:
data (list): List of projects or subjects or exp or scans
property_x (str): The name which will be on X axis of graph
property_y (str): The name which will be on Y axis of graph
x_new (str): The new name which will be shown on X axis of graph
extra (str, optional): Add another value to be concatenated
in x_axis, when click on graph occurs. Useful when
the x_axis values are not unique and by default will not
be used for concatenation.
Returns:
Dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
property_list = []
property_none = []
for item in data:
if item[property_x] != '':
if extra is None:
property_list.append([item[property_x], item[property_y]])
else:
property_list.append(
[
item[property_x],
item[property_y] + '/' + item[extra]])
else:
if extra is None:
property_none.append(item[property_y])
else:
property_none.append(item[property_y] + '/' + item[extra])
property_df = pd.DataFrame(
property_list, columns=[x_new, 'count'])
property_df_series = property_df.groupby(
x_new)['count'].apply(list)
property_final_df = property_df.groupby(x_new).count()
property_final_df['list'] = property_df_series
property_dict = property_final_df.to_dict()
if len(property_none) != 0:
property_dict['count'].update({'No Data': len(property_none)})
property_dict['list'].update({'No Data': property_none})
return property_dict
def dict_generator_per_view(
self, data, property_x, property_y, x_new):
"""Generate a dictionary from the data list of subjects,
experiments and scans in the format required for graphs.
The generated data is only for single project.
Args:
data (list): List of projects or subjects or exp or scans
property_x (str): The name which will be on X axis of graph
property_y (str): The name which will be on Y axis of graph
x_new (str): The new name which will be shown on X axis of graph
Returns:
Dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
per_list = [[item[property_x], item[property_y]] for item in data]
per_df = pd.DataFrame(per_list, columns=[x_new, 'count'])
per_df_series = per_df.groupby(x_new)['count'].apply(list)
per_df = per_df.groupby(x_new).count()
per_df['list'] = per_df_series
per_view = per_df.to_dict()
return per_view
def dict_generator_per_view_stacked(
self, data, property_x, property_y, property_z):
"""Generate dict format that is used by plotly for stacked graphs view,
data like project details, scan, experiments, subject as field
property_x and property_y are used to group by the pandas data frame
and both are used on x axis values while property_z is used on y axis.
Args:
data (list): List of data project, subject, scan and experiments
property_x (str): The name which will be on X axis of graph
property_y (str): The name which will be on X axis of graph
property_z (str): The name which will be on Y axis of graph
Returns:
dict:{count:{prop_x:{prop_y:prop_z_count}},
list:{prop_x:{prop_y:prop_z_list}}
}
"""
if isinstance(data, list):
per_list = [[
item[property_x], item[property_y],
item[property_z]] for item in data]
per_df = pd.DataFrame(
per_list, columns=[property_x, property_y, property_z])
else:
per_df = data
per_df_series = per_df.groupby(
[property_x, property_y])[property_z].apply(list)
per_df = per_df.groupby([property_x, property_y]).count()
per_df['list'] = per_df_series
dict_tupled = per_df.to_dict()
dict_output_list = {}
for item in dict_tupled['list']:
dict_output_list[item[0]] = {}
for item in dict_tupled['list']:
dict_output_list[
item[0]].update({item[1]: dict_tupled['list'][item]})
dict_output_count = {}
for item in dict_tupled[property_z]:
dict_output_count[item[0]] = {}
for item in dict_tupled[property_z]:
dict_output_count[
item[0]].update({item[1]: dict_tupled[property_z][item]})
return {'count': dict_output_count, 'list': dict_output_list}
class FormatterPP(Formatter):
"""Formatting Class for per project view.
This class contains method that are used for formatting the data fetched
from the pickle and sent to DataFilter class.
Args:
Formatter (Formatter): Inherits formatter class.
project_id (str): ID of the project, we want to process data.
"""
# Initializing the central interface object in the constructor
def __init__(self, project_id):
self.project_id = project_id
def get_projects_details(self, projects):
"""Takes the project information and perform operation that
are required for displaying details specific to the project.
Args:
projects (list): List of projects
Returns:
dict: Information of project formatted for better view.
"""
# Returns data for per project view
project_dict = {}
for project in projects:
if project['id'] == self.project_id:
project_dict = project
project_details = {}
project_details['Owner(s)'] = project_dict['project_owners']\
.split('<br/>')
project_details['Collaborator(s)'] = project_dict['project_collabs']\
.split('<br/>')
if project_details['Collaborator(s)'][0] == '':
project_details['Collaborator(s)'] = ['------']
project_details['member(s)'] = project_dict['project_members']\
.split('<br/>')
if project_details['member(s)'][0] == '':
project_details['member(s)'] = ['------']
project_details['user(s)'] = project_dict['project_users']\
.split('<br/>')
if project_details['user(s)'][0] == '':
project_details['user(s)'] = ['------']
project_details['last_accessed(s)'] =\
project_dict['project_last_access'].split('<br/>')
project_details['insert_user(s)'] = project_dict['insert_user']
project_details['insert_date'] = project_dict['insert_date']
project_details['access'] = project_dict['project_access']
project_details['name'] = project_dict['name']
project_details['last_workflow'] =\
project_dict['project_last_workflow']
return project_details
def get_subjects_details(self, subjects):
"""Calls the parent class method for processing the subjects
details and removing extra information for per project view.
Args:
subjects (list): List of subjects.
Returns:
dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
subjects_data = []
for subject in subjects:
if subject['project'] == self.project_id:
subjects_data.append(subject)
# Using code from the parent class for processing
subjects_details = super().get_subjects_details(subjects_data)
# Delete Subject/Project plot as this is present in counter of
# per project view
del subjects_details['Subjects/Project']
return subjects_details
def get_experiments_details(self, experiments_data):
"""Calls the parent class method for processing the experiment
details and removing extra information for per project view.
Args:
experiments_data (list): List of experiments.
Returns:
dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
experiments = []
for experiment in experiments_data:
if experiment['project'] == self.project_id:
experiments.append(experiment)
# Using code from the parent class for processing
experiments_details = super().get_experiments_details(experiments)
# Delete Experiments/Project plot as this is present in counter of
# per project view
del experiments_details['Experiments/Project']
# Experiments per subject information
experiments_per_subject = super().dict_generator_per_view(
experiments, 'subject_ID', 'ID', 'eps')
experiments_per_subject['id_type'] = 'experiment'
experiments_details['Experiments/Subject'] = experiments_per_subject
return experiments_details
def get_scans_details(self, scans_data):
"""Calls the parent class method for processing the scan
details and removing extra information for per project view.
Args:
scans_data (list): List of experiments.
Returns:
dict: For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
scans = []
for scan in scans_data:
if scan['project'] == self.project_id:
scans.append(scan)
# Using code from the parent class for processing
scans_details = super().get_scans_details(scans)
# Delete Scans/Project plot as this is present in counter of
# per project view
del scans_details['Scans/Project']
# Scans per subject information
scans_per_subject = super().dict_generator_overview(
scans, 'xnat:imagesessiondata/subject_id',
'ID', 'sps', 'xnat:imagescandata/id')
scans_per_subject['id_type'] = 'experiment'
scans_details['Scans/Subject'] = scans_per_subject
return scans_details
def get_resources_details(self, resources=None):
"""Calls the parent class method for processing the resource
details and removing extra information for per project view.
Args:
resources (list, optional): List of resources and by default
it will be skipped and no graph of resources will be added.
Returns:
dict/int: If no resource data present then return -1 else
For each graph this format is used
{"count": {"x": "y"}, "list": {"x": "list"}}
"""
if resources is None:
return None
# Using code from the parent class for processing
resources_out = super().get_resources_details(
resources, self.project_id)
if not isinstance(resources_out, int):
if 'Resources/Project' in resources_out:
del resources_out['Resources/Project']
if 'Session resource count/Project' in resources_out:
del resources_out['Session resource count/Project']
return resources_out
|
[
"pandas.DataFrame",
"pandas.cut",
"pandas.merge"
] |
[((2786, 2834), 'pandas.DataFrame', 'pd.DataFrame', (['age_list'], {'columns': "['age', 'count']"}), "(age_list, columns=['age', 'count'])\n", (2798, 2834), True, 'import pandas as pd\n'), ((2860, 3060), 'pandas.cut', 'pd.cut', ([], {'x': "age_df['age']", 'bins': '[0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 130]', 'labels': "['0-10', '10-20', '20-30', '30-40', '40-50', '50-60', '60-70', '70-80',\n '80-90', '90-100', 'Above_100']"}), "(x=age_df['age'], bins=[0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, \n 130], labels=['0-10', '10-20', '20-30', '30-40', '40-50', '50-60',\n '60-70', '70-80', '80-90', '90-100', 'Above_100'])\n", (2866, 3060), True, 'import pandas as pd\n'), ((11008, 11084), 'pandas.DataFrame', 'pd.DataFrame', (['resources'], {'columns': "['project', 'session', 'resource', 'label']"}), "(resources, columns=['project', 'session', 'resource', 'label'])\n", (11020, 11084), True, 'import pandas as pd\n'), ((13224, 13282), 'pandas.DataFrame', 'pd.DataFrame', (['pro_exp_list'], {'columns': "['project', 'session']"}), "(pro_exp_list, columns=['project', 'session'])\n", (13236, 13282), True, 'import pandas as pd\n'), ((13695, 13749), 'pandas.merge', 'pd.merge', (['project_session', 'pro_exp_count'], {'on': '"""session"""'}), "(project_session, pro_exp_count, on='session')\n", (13703, 13749), True, 'import pandas as pd\n'), ((14595, 14649), 'pandas.DataFrame', 'pd.DataFrame', (['data_list'], {'columns': "['per_view', 'count']"}), "(data_list, columns=['per_view', 'count'])\n", (14607, 14649), True, 'import pandas as pd\n'), ((17853, 17906), 'pandas.DataFrame', 'pd.DataFrame', (['property_list'], {'columns': "[x_new, 'count']"}), "(property_list, columns=[x_new, 'count'])\n", (17865, 17906), True, 'import pandas as pd\n'), ((19211, 19259), 'pandas.DataFrame', 'pd.DataFrame', (['per_list'], {'columns': "[x_new, 'count']"}), "(per_list, columns=[x_new, 'count'])\n", (19223, 19259), True, 'import pandas as pd\n'), ((20528, 20596), 'pandas.DataFrame', 'pd.DataFrame', (['per_list'], {'columns': '[property_x, property_y, property_z]'}), '(per_list, columns=[property_x, property_y, property_z])\n', (20540, 20596), True, 'import pandas as pd\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
RESTful endpoints for powering on/off/restarting VMs in vLab
"""
from setuptools import setup, find_packages
setup(name="vlab-power-api",
author="<NAME>,",
author_email='<EMAIL>',
version='2019.06.19',
packages=find_packages(),
include_package_data=True,
package_files={'vlab_folder_api' : ['app.ini']},
description="vLab service for powering on/off/restart VMs",
install_requires=['flask', 'ldap3', 'pyjwt', 'uwsgi', 'vlab-api-common',
'ujson', 'cryptography', 'vlab-inf-common', 'celery']
)
|
[
"setuptools.find_packages"
] |
[((287, 302), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (300, 302), False, 'from setuptools import setup, find_packages\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the data range file-like object."""
import os
import unittest
from dfvfs.file_io import data_range_io
from dfvfs.file_io import os_file_io
from dfvfs.path import data_range_path_spec
from dfvfs.path import os_path_spec
from dfvfs.resolver import context
from tests.file_io import test_lib
class DataRangeTest(test_lib.SylogTestCase):
"""The unit test for the data range file-like object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_file = os.path.join(u'test_data', u'syslog')
self._os_path_spec = os_path_spec.OSPathSpec(location=test_file)
self._data_range_path_spec = data_range_path_spec.DataRangePathSpec(
range_offset=167, range_size=1080, parent=self._os_path_spec)
def testOpenCloseFileObject(self):
"""Test the open and close functionality using a file-like object."""
os_file_object = os_file_io.OSFile(self._resolver_context)
os_file_object.open(path_spec=self._os_path_spec)
file_object = data_range_io.DataRange(
self._resolver_context, file_object=os_file_object)
file_object.open()
self._TestGetSizeFileObject(file_object)
file_object.close()
os_file_object.close()
def testSetRange(self):
"""Test the set data range functionality."""
os_file_object = os_file_io.OSFile(self._resolver_context)
os_file_object.open(path_spec=self._os_path_spec)
file_object = data_range_io.DataRange(
self._resolver_context, file_object=os_file_object)
file_object.SetRange(167, 1080)
file_object.open()
self.assertEqual(file_object.get_size(), 1080)
file_object.close()
os_file_object.close()
# TODO: add some edge case tesing here.
def testOpenClosePathSpec(self):
"""Test the open and close functionality using a path specification."""
file_object = data_range_io.DataRange(self._resolver_context)
file_object.open(path_spec=self._data_range_path_spec)
self.assertEqual(file_object.get_size(), 1080)
file_object.close()
def testSeek(self):
"""Test the seek functionality."""
file_object = data_range_io.DataRange(self._resolver_context)
file_object.open(path_spec=self._data_range_path_spec)
self._TestSeekFileObject(file_object, base_offset=0)
file_object.close()
def testRead(self):
"""Test the read functionality."""
file_object = data_range_io.DataRange(self._resolver_context)
file_object.open(path_spec=self._data_range_path_spec)
self._TestReadFileObject(file_object, base_offset=0)
file_object.close()
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"dfvfs.file_io.data_range_io.DataRange",
"dfvfs.resolver.context.Context",
"dfvfs.path.os_path_spec.OSPathSpec",
"dfvfs.path.data_range_path_spec.DataRangePathSpec",
"dfvfs.file_io.os_file_io.OSFile",
"os.path.join"
] |
[((2690, 2705), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2703, 2705), False, 'import unittest\n'), ((564, 581), 'dfvfs.resolver.context.Context', 'context.Context', ([], {}), '()\n', (579, 581), False, 'from dfvfs.resolver import context\n'), ((598, 635), 'os.path.join', 'os.path.join', (['u"""test_data"""', 'u"""syslog"""'], {}), "(u'test_data', u'syslog')\n", (610, 635), False, 'import os\n'), ((661, 704), 'dfvfs.path.os_path_spec.OSPathSpec', 'os_path_spec.OSPathSpec', ([], {'location': 'test_file'}), '(location=test_file)\n', (684, 704), False, 'from dfvfs.path import os_path_spec\n'), ((738, 842), 'dfvfs.path.data_range_path_spec.DataRangePathSpec', 'data_range_path_spec.DataRangePathSpec', ([], {'range_offset': '(167)', 'range_size': '(1080)', 'parent': 'self._os_path_spec'}), '(range_offset=167, range_size=1080,\n parent=self._os_path_spec)\n', (776, 842), False, 'from dfvfs.path import data_range_path_spec\n'), ((981, 1022), 'dfvfs.file_io.os_file_io.OSFile', 'os_file_io.OSFile', (['self._resolver_context'], {}), '(self._resolver_context)\n', (998, 1022), False, 'from dfvfs.file_io import os_file_io\n'), ((1095, 1170), 'dfvfs.file_io.data_range_io.DataRange', 'data_range_io.DataRange', (['self._resolver_context'], {'file_object': 'os_file_object'}), '(self._resolver_context, file_object=os_file_object)\n', (1118, 1170), False, 'from dfvfs.file_io import data_range_io\n'), ((1398, 1439), 'dfvfs.file_io.os_file_io.OSFile', 'os_file_io.OSFile', (['self._resolver_context'], {}), '(self._resolver_context)\n', (1415, 1439), False, 'from dfvfs.file_io import os_file_io\n'), ((1512, 1587), 'dfvfs.file_io.data_range_io.DataRange', 'data_range_io.DataRange', (['self._resolver_context'], {'file_object': 'os_file_object'}), '(self._resolver_context, file_object=os_file_object)\n', (1535, 1587), False, 'from dfvfs.file_io import data_range_io\n'), ((1935, 1982), 'dfvfs.file_io.data_range_io.DataRange', 'data_range_io.DataRange', (['self._resolver_context'], {}), '(self._resolver_context)\n', (1958, 1982), False, 'from dfvfs.file_io import data_range_io\n'), ((2199, 2246), 'dfvfs.file_io.data_range_io.DataRange', 'data_range_io.DataRange', (['self._resolver_context'], {}), '(self._resolver_context)\n', (2222, 2246), False, 'from dfvfs.file_io import data_range_io\n'), ((2469, 2516), 'dfvfs.file_io.data_range_io.DataRange', 'data_range_io.DataRange', (['self._resolver_context'], {}), '(self._resolver_context)\n', (2492, 2516), False, 'from dfvfs.file_io import data_range_io\n')]
|
import FWCore.ParameterSet.Config as cms
# the clients
from DQM.TrackingMonitor.ClientTrackEfficiencySTACosmicMuons_cff import *
from DQM.TrackingMonitor.ClientTrackEfficiencyTkTracks_cff import *
from DQMOffline.Muon.trackResidualsTest_cfi import *
from DQMOffline.Muon.muonRecoTest_cfi import *
from DQMOffline.Muon.muonTestSummary_cfi import *
from DQMOffline.Muon.muonTestSummaryCosmics_cfi import *
from DQMOffline.Muon.EfficencyPlotter_cfi import *
from DQMOffline.Muon.TriggerMatchEfficencyPlotter_cfi import *
from DQMServices.Core.DQMQualityTester import DQMQualityTester
muonSourcesQualityTests = DQMQualityTester(
prescaleFactor = cms.untracked.int32(1),
qtList = cms.untracked.FileInPath('DQMOffline/Muon/data/QualityTests1.xml')
)
muonClientsQualityTests = DQMQualityTester(
prescaleFactor = cms.untracked.int32(1),
qtList = cms.untracked.FileInPath('DQMOffline/Muon/data/QualityTests2.xml')
)
cosmicMuonQualityTests = cms.Sequence(ClientTrackEfficiencyTkTracks*
ClientTrackEfficiencySTACosmicMuons*
muonSourcesQualityTests*
muTrackResidualsTest*
muRecoTest*
muonClientsQualityTests*
muonCosmicTestSummary)
muonQualityTests = cms.Sequence(muonSourcesQualityTests*
muTrackResidualsTest*
effPlotter*
muRecoTest*
muonClientsQualityTests*
muonTestSummary)
muonQualityTests_miniAOD = cms.Sequence(muonSourcesQualityTests*
muTrackResidualsTest*
effPlotter_miniAOD*
muRecoTest*
muonClientsQualityTests*
muonTestSummary*
triggerMatchEffPlotterTightMiniAOD)
|
[
"FWCore.ParameterSet.Config.Sequence",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.FileInPath"
] |
[((952, 1157), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['(ClientTrackEfficiencyTkTracks * ClientTrackEfficiencySTACosmicMuons *\n muonSourcesQualityTests * muTrackResidualsTest * muRecoTest *\n muonClientsQualityTests * muonCosmicTestSummary)'], {}), '(ClientTrackEfficiencyTkTracks *\n ClientTrackEfficiencySTACosmicMuons * muonSourcesQualityTests *\n muTrackResidualsTest * muRecoTest * muonClientsQualityTests *\n muonCosmicTestSummary)\n', (964, 1157), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1388, 1522), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['(muonSourcesQualityTests * muTrackResidualsTest * effPlotter * muRecoTest *\n muonClientsQualityTests * muonTestSummary)'], {}), '(muonSourcesQualityTests * muTrackResidualsTest * effPlotter *\n muRecoTest * muonClientsQualityTests * muonTestSummary)\n', (1400, 1522), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1702, 1885), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['(muonSourcesQualityTests * muTrackResidualsTest * effPlotter_miniAOD *\n muRecoTest * muonClientsQualityTests * muonTestSummary *\n triggerMatchEffPlotterTightMiniAOD)'], {}), '(muonSourcesQualityTests * muTrackResidualsTest *\n effPlotter_miniAOD * muRecoTest * muonClientsQualityTests *\n muonTestSummary * triggerMatchEffPlotterTightMiniAOD)\n', (1714, 1885), True, 'import FWCore.ParameterSet.Config as cms\n'), ((648, 670), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1)'], {}), '(1)\n', (667, 670), True, 'import FWCore.ParameterSet.Config as cms\n'), ((685, 751), 'FWCore.ParameterSet.Config.untracked.FileInPath', 'cms.untracked.FileInPath', (['"""DQMOffline/Muon/data/QualityTests1.xml"""'], {}), "('DQMOffline/Muon/data/QualityTests1.xml')\n", (709, 751), True, 'import FWCore.ParameterSet.Config as cms\n'), ((820, 842), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1)'], {}), '(1)\n', (839, 842), True, 'import FWCore.ParameterSet.Config as cms\n'), ((857, 923), 'FWCore.ParameterSet.Config.untracked.FileInPath', 'cms.untracked.FileInPath', (['"""DQMOffline/Muon/data/QualityTests2.xml"""'], {}), "('DQMOffline/Muon/data/QualityTests2.xml')\n", (881, 923), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class MassMailingListMerge(models.TransientModel):
_name = 'mailing.list.merge'
_description = 'Merge Mass Mailing List'
src_list_ids = fields.Many2many('mailing.list', string='Mailing Lists')
dest_list_id = fields.Many2one('mailing.list', string='Destination Mailing List')
merge_options = fields.Selection([
('new', 'Merge into a new mailing list'),
('existing', 'Merge into an existing mailing list'),
], 'Merge Option', required=True, default='new')
new_list_name = fields.Char('New Mailing List Name')
archive_src_lists = fields.Boolean('Archive source mailing lists', default=True)
@api.model
def default_get(self, fields):
res = super(MassMailingListMerge, self).default_get(fields)
src_list_ids = self.env.context.get('active_ids')
res.update({
'src_list_ids': src_list_ids,
'dest_list_id': src_list_ids and src_list_ids[0] or False,
})
return res
def action_mailing_lists_merge(self):
if self.merge_options == 'new':
self.dest_list_id = self.env['mailing.list'].create({
'name': self.new_list_name,
}).id
self.dest_list_id.action_merge(self.src_list_ids, self.archive_src_lists)
return self.dest_list_id
|
[
"odoo.fields.Selection",
"odoo.fields.Many2one",
"odoo.fields.Char",
"odoo.fields.Many2many",
"odoo.fields.Boolean"
] |
[((288, 344), 'odoo.fields.Many2many', 'fields.Many2many', (['"""mailing.list"""'], {'string': '"""Mailing Lists"""'}), "('mailing.list', string='Mailing Lists')\n", (304, 344), False, 'from odoo import api, fields, models\n'), ((364, 430), 'odoo.fields.Many2one', 'fields.Many2one', (['"""mailing.list"""'], {'string': '"""Destination Mailing List"""'}), "('mailing.list', string='Destination Mailing List')\n", (379, 430), False, 'from odoo import api, fields, models\n'), ((451, 618), 'odoo.fields.Selection', 'fields.Selection', (["[('new', 'Merge into a new mailing list'), ('existing',\n 'Merge into an existing mailing list')]", '"""Merge Option"""'], {'required': '(True)', 'default': '"""new"""'}), "([('new', 'Merge into a new mailing list'), ('existing',\n 'Merge into an existing mailing list')], 'Merge Option', required=True,\n default='new')\n", (467, 618), False, 'from odoo import api, fields, models\n'), ((654, 690), 'odoo.fields.Char', 'fields.Char', (['"""New Mailing List Name"""'], {}), "('New Mailing List Name')\n", (665, 690), False, 'from odoo import api, fields, models\n'), ((715, 775), 'odoo.fields.Boolean', 'fields.Boolean', (['"""Archive source mailing lists"""'], {'default': '(True)'}), "('Archive source mailing lists', default=True)\n", (729, 775), False, 'from odoo import api, fields, models\n')]
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import asyncio
import os
from unittest.mock import Mock, patch
from urllib.parse import urlparse
from azure.core.credentials import AccessToken
from azure.identity import KnownAuthorities
from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential
from azure.identity.aio._credentials.managed_identity import ImdsCredential, MsiCredential
from azure.identity._constants import EnvironmentVariables
import pytest
from helpers import async_validating_transport, mock_response, Request
from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache
@pytest.mark.asyncio
async def test_default_credential_authority():
authority = "authority.com"
expected_access_token = "***"
response = mock_response(
json_payload={
"access_token": expected_access_token,
"expires_in": 0,
"expires_on": 42,
"not_before": 0,
"resource": "scope",
"token_type": "Bearer",
}
)
async def exercise_credentials(authority_kwarg, expected_authority=None):
expected_authority = expected_authority or authority_kwarg
async def send(request, **_):
url = urlparse(request.url)
assert url.scheme == "https", "Unexpected scheme '{}'".format(url.scheme)
assert url.netloc == expected_authority, "Expected authority '{}', actual was '{}'".format(
expected_authority, url.netloc
)
return response
# environment credential configured with client secret should respect authority
environment = {
EnvironmentVariables.AZURE_CLIENT_ID: "client_id",
EnvironmentVariables.AZURE_CLIENT_SECRET: "secret",
EnvironmentVariables.AZURE_TENANT_ID: "tenant_id",
}
with patch("os.environ", environment):
transport = Mock(send=send)
if authority_kwarg:
credential = DefaultAzureCredential(authority=authority_kwarg, transport=transport)
else:
credential = DefaultAzureCredential(transport=transport)
access_token, _ = await credential.get_token("scope")
assert access_token == expected_access_token
# managed identity credential should ignore authority
with patch("os.environ", {EnvironmentVariables.MSI_ENDPOINT: "https://some.url"}):
transport = Mock(send=asyncio.coroutine(lambda *_, **__: response))
if authority_kwarg:
credential = DefaultAzureCredential(authority=authority_kwarg, transport=transport)
else:
credential = DefaultAzureCredential(transport=transport)
access_token, _ = await credential.get_token("scope")
assert access_token == expected_access_token
# shared cache credential should respect authority
upn = os.environ.get(EnvironmentVariables.AZURE_USERNAME, "spam@eggs") # preferring environment values to
tenant = os.environ.get(EnvironmentVariables.AZURE_TENANT_ID, "tenant") # prevent failure during live runs
account = get_account_event(username=upn, uid="guid", utid=tenant, authority=authority_kwarg)
cache = populated_cache(account)
with patch.object(SharedTokenCacheCredential, "supported"):
credential = DefaultAzureCredential(_cache=cache, authority=authority_kwarg, transport=Mock(send=send))
access_token, _ = await credential.get_token("scope")
assert access_token == expected_access_token
# all credentials not representing managed identities should use a specified authority or default to public cloud
await exercise_credentials("authority.com")
await exercise_credentials(None, KnownAuthorities.AZURE_PUBLIC_CLOUD)
def test_exclude_options():
def assert_credentials_not_present(chain, *credential_classes):
actual = {c.__class__ for c in chain.credentials}
assert len(actual)
# no unexpected credential is in the chain
excluded = set(credential_classes)
assert len(actual & excluded) == 0
# only excluded credentials have been excluded from the default
default = {c.__class__ for c in DefaultAzureCredential().credentials}
assert actual <= default # n.b. we know actual is non-empty
assert default - actual <= excluded
# with no environment variables set, ManagedIdentityCredential = ImdsCredential
with patch("os.environ", {}):
credential = DefaultAzureCredential(exclude_managed_identity_credential=True)
assert_credentials_not_present(credential, ImdsCredential, MsiCredential)
# with $MSI_ENDPOINT set, ManagedIdentityCredential = MsiCredential
with patch("os.environ", {"MSI_ENDPOINT": "spam"}):
credential = DefaultAzureCredential(exclude_managed_identity_credential=True)
assert_credentials_not_present(credential, ImdsCredential, MsiCredential)
if SharedTokenCacheCredential.supported():
credential = DefaultAzureCredential(exclude_shared_token_cache_credential=True)
assert_credentials_not_present(credential, SharedTokenCacheCredential)
@pytest.mark.asyncio
async def test_shared_cache_tenant_id():
expected_access_token = "<PASSWORD>-access-token"
refresh_token_a = "refresh-token-a"
refresh_token_b = "<PASSWORD>"
# The value of upn is arbitrary because this test verifies the credential's behavior given a specified
# tenant. During a complete live test run, $AZURE_USERNAME will have a value which DefaultAzureCredential
# should pass to SharedTokenCacheCredential. This test will fail if the mock accounts don't match that value.
upn = os.environ.get(EnvironmentVariables.AZURE_USERNAME, "spam@eggs")
tenant_a = "tenant-a"
tenant_b = "tenant-b"
# two cached accounts, same username, different tenants -> shared_cache_tenant_id should prevail
account_a = get_account_event(username=upn, uid="another-guid", utid=tenant_a, refresh_token=refresh_token_a)
account_b = get_account_event(username=upn, uid="more-guid", utid=tenant_b, refresh_token=refresh_token_b)
cache = populated_cache(account_a, account_b)
credential = get_credential_for_shared_cache_test(
refresh_token_b, expected_access_token, cache, shared_cache_tenant_id=tenant_b
)
token = await credential.get_token("scope")
assert token.token == expected_access_token
# redundantly specifying shared_cache_username makes no difference
credential = get_credential_for_shared_cache_test(
refresh_token_b, expected_access_token, cache, shared_cache_tenant_id=tenant_b, shared_cache_username=upn
)
token = await credential.get_token("scope")
assert token.token == expected_access_token
# shared_cache_tenant_id should prevail over AZURE_TENANT_ID
with patch("os.environ", {EnvironmentVariables.AZURE_TENANT_ID: tenant_a}):
credential = get_credential_for_shared_cache_test(
refresh_token_b, expected_access_token, cache, shared_cache_tenant_id=tenant_b
)
token = await credential.get_token("scope")
assert token.token == expected_access_token
# AZURE_TENANT_ID should be used when shared_cache_tenant_id isn't specified
with patch("os.environ", {EnvironmentVariables.AZURE_TENANT_ID: tenant_b}):
credential = get_credential_for_shared_cache_test(refresh_token_b, expected_access_token, cache)
token = await credential.get_token("scope")
assert token.token == expected_access_token
@pytest.mark.asyncio
async def test_shared_cache_username():
expected_access_token = "expected-access-token"
refresh_token_a = "refresh-token-a"
refresh_token_b = "refresh-token-b"
upn_a = "spam<PASSWORD>"
upn_b = "eg<PASSWORD>"
# The value of tenant_id is arbitrary because this test verifies the credential's behavior given a specified
# username. During a complete live test run, $AZURE_TENANT_ID will have a value which DefaultAzureCredential
# should pass to SharedTokenCacheCredential. This test will fail if the mock accounts don't match that value.
tenant_id = os.environ.get(EnvironmentVariables.AZURE_TENANT_ID, "the-tenant")
# two cached accounts, same tenant, different usernames -> shared_cache_username should prevail
account_a = get_account_event(username=upn_a, uid="another-guid", utid=tenant_id, refresh_token=refresh_token_a)
account_b = get_account_event(username=upn_b, uid="more-guid", utid=tenant_id, refresh_token=refresh_token_b)
cache = populated_cache(account_a, account_b)
credential = get_credential_for_shared_cache_test(
refresh_token_a, expected_access_token, cache, shared_cache_username=upn_a
)
token = await credential.get_token("scope")
assert token.token == expected_access_token
# shared_cache_username should prevail over AZURE_USERNAME
with patch("os.environ", {EnvironmentVariables.AZURE_USERNAME: upn_b}):
credential = get_credential_for_shared_cache_test(
refresh_token_a, expected_access_token, cache, shared_cache_username=upn_a
)
token = await credential.get_token("scope")
assert token.token == expected_access_token
# AZURE_USERNAME should be used when shared_cache_username isn't specified
with patch("os.environ", {EnvironmentVariables.AZURE_USERNAME: upn_b}):
credential = get_credential_for_shared_cache_test(refresh_token_b, expected_access_token, cache)
token = await credential.get_token("scope")
assert token.token == expected_access_token
def get_credential_for_shared_cache_test(expected_refresh_token, expected_access_token, cache, **kwargs):
exclude_other_credentials = {
option: True for option in ("exclude_environment_credential", "exclude_managed_identity_credential")
}
# validating transport will raise if the shared cache credential isn't used, or selects the wrong refresh token
transport = async_validating_transport(
requests=[Request(required_data={"refresh_token": expected_refresh_token})],
responses=[mock_response(json_payload=build_aad_response(access_token=expected_access_token))],
)
# this credential uses a mock shared cache, so it works on all platforms
with patch.object(SharedTokenCacheCredential, "supported", lambda: True):
return DefaultAzureCredential(_cache=cache, transport=transport, **exclude_other_credentials, **kwargs)
|
[
"unittest.mock.patch.object",
"helpers.mock_response",
"test_shared_cache_credential.get_account_event",
"asyncio.coroutine",
"test_shared_cache_credential.build_aad_response",
"test_shared_cache_credential.populated_cache",
"unittest.mock.Mock",
"os.environ.get",
"unittest.mock.patch",
"azure.identity.aio.SharedTokenCacheCredential.supported",
"azure.identity.aio.DefaultAzureCredential",
"helpers.Request",
"urllib.parse.urlparse"
] |
[((905, 1077), 'helpers.mock_response', 'mock_response', ([], {'json_payload': "{'access_token': expected_access_token, 'expires_in': 0, 'expires_on': 42,\n 'not_before': 0, 'resource': 'scope', 'token_type': 'Bearer'}"}), "(json_payload={'access_token': expected_access_token,\n 'expires_in': 0, 'expires_on': 42, 'not_before': 0, 'resource': 'scope',\n 'token_type': 'Bearer'})\n", (918, 1077), False, 'from helpers import async_validating_transport, mock_response, Request\n'), ((5148, 5186), 'azure.identity.aio.SharedTokenCacheCredential.supported', 'SharedTokenCacheCredential.supported', ([], {}), '()\n', (5184, 5186), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((5890, 5954), 'os.environ.get', 'os.environ.get', (['EnvironmentVariables.AZURE_USERNAME', '"""spam@eggs"""'], {}), "(EnvironmentVariables.AZURE_USERNAME, 'spam@eggs')\n", (5904, 5954), False, 'import os\n'), ((6126, 6227), 'test_shared_cache_credential.get_account_event', 'get_account_event', ([], {'username': 'upn', 'uid': '"""another-guid"""', 'utid': 'tenant_a', 'refresh_token': 'refresh_token_a'}), "(username=upn, uid='another-guid', utid=tenant_a,\n refresh_token=refresh_token_a)\n", (6143, 6227), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((6240, 6338), 'test_shared_cache_credential.get_account_event', 'get_account_event', ([], {'username': 'upn', 'uid': '"""more-guid"""', 'utid': 'tenant_b', 'refresh_token': 'refresh_token_b'}), "(username=upn, uid='more-guid', utid=tenant_b,\n refresh_token=refresh_token_b)\n", (6257, 6338), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((6347, 6384), 'test_shared_cache_credential.populated_cache', 'populated_cache', (['account_a', 'account_b'], {}), '(account_a, account_b)\n', (6362, 6384), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((8346, 8412), 'os.environ.get', 'os.environ.get', (['EnvironmentVariables.AZURE_TENANT_ID', '"""the-tenant"""'], {}), "(EnvironmentVariables.AZURE_TENANT_ID, 'the-tenant')\n", (8360, 8412), False, 'import os\n'), ((8530, 8634), 'test_shared_cache_credential.get_account_event', 'get_account_event', ([], {'username': 'upn_a', 'uid': '"""another-guid"""', 'utid': 'tenant_id', 'refresh_token': 'refresh_token_a'}), "(username=upn_a, uid='another-guid', utid=tenant_id,\n refresh_token=refresh_token_a)\n", (8547, 8634), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((8647, 8748), 'test_shared_cache_credential.get_account_event', 'get_account_event', ([], {'username': 'upn_b', 'uid': '"""more-guid"""', 'utid': 'tenant_id', 'refresh_token': 'refresh_token_b'}), "(username=upn_b, uid='more-guid', utid=tenant_id,\n refresh_token=refresh_token_b)\n", (8664, 8748), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((8757, 8794), 'test_shared_cache_credential.populated_cache', 'populated_cache', (['account_a', 'account_b'], {}), '(account_a, account_b)\n', (8772, 8794), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((3071, 3135), 'os.environ.get', 'os.environ.get', (['EnvironmentVariables.AZURE_USERNAME', '"""spam@eggs"""'], {}), "(EnvironmentVariables.AZURE_USERNAME, 'spam@eggs')\n", (3085, 3135), False, 'import os\n'), ((3189, 3251), 'os.environ.get', 'os.environ.get', (['EnvironmentVariables.AZURE_TENANT_ID', '"""tenant"""'], {}), "(EnvironmentVariables.AZURE_TENANT_ID, 'tenant')\n", (3203, 3251), False, 'import os\n'), ((3306, 3394), 'test_shared_cache_credential.get_account_event', 'get_account_event', ([], {'username': 'upn', 'uid': '"""guid"""', 'utid': 'tenant', 'authority': 'authority_kwarg'}), "(username=upn, uid='guid', utid=tenant, authority=\n authority_kwarg)\n", (3323, 3394), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((3406, 3430), 'test_shared_cache_credential.populated_cache', 'populated_cache', (['account'], {}), '(account)\n', (3421, 3430), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n'), ((4650, 4673), 'unittest.mock.patch', 'patch', (['"""os.environ"""', '{}'], {}), "('os.environ', {})\n", (4655, 4673), False, 'from unittest.mock import Mock, patch\n'), ((4696, 4760), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'exclude_managed_identity_credential': '(True)'}), '(exclude_managed_identity_credential=True)\n', (4718, 4760), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((4925, 4970), 'unittest.mock.patch', 'patch', (['"""os.environ"""', "{'MSI_ENDPOINT': 'spam'}"], {}), "('os.environ', {'MSI_ENDPOINT': 'spam'})\n", (4930, 4970), False, 'from unittest.mock import Mock, patch\n'), ((4993, 5057), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'exclude_managed_identity_credential': '(True)'}), '(exclude_managed_identity_credential=True)\n', (5015, 5057), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((5209, 5275), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'exclude_shared_token_cache_credential': '(True)'}), '(exclude_shared_token_cache_credential=True)\n', (5231, 5275), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((7048, 7117), 'unittest.mock.patch', 'patch', (['"""os.environ"""', '{EnvironmentVariables.AZURE_TENANT_ID: tenant_a}'], {}), "('os.environ', {EnvironmentVariables.AZURE_TENANT_ID: tenant_a})\n", (7053, 7117), False, 'from unittest.mock import Mock, patch\n'), ((7466, 7535), 'unittest.mock.patch', 'patch', (['"""os.environ"""', '{EnvironmentVariables.AZURE_TENANT_ID: tenant_b}'], {}), "('os.environ', {EnvironmentVariables.AZURE_TENANT_ID: tenant_b})\n", (7471, 7535), False, 'from unittest.mock import Mock, patch\n'), ((9109, 9174), 'unittest.mock.patch', 'patch', (['"""os.environ"""', '{EnvironmentVariables.AZURE_USERNAME: upn_b}'], {}), "('os.environ', {EnvironmentVariables.AZURE_USERNAME: upn_b})\n", (9114, 9174), False, 'from unittest.mock import Mock, patch\n'), ((9517, 9582), 'unittest.mock.patch', 'patch', (['"""os.environ"""', '{EnvironmentVariables.AZURE_USERNAME: upn_b}'], {}), "('os.environ', {EnvironmentVariables.AZURE_USERNAME: upn_b})\n", (9522, 9582), False, 'from unittest.mock import Mock, patch\n'), ((10485, 10553), 'unittest.mock.patch.object', 'patch.object', (['SharedTokenCacheCredential', '"""supported"""', '(lambda : True)'], {}), "(SharedTokenCacheCredential, 'supported', lambda : True)\n", (10497, 10553), False, 'from unittest.mock import Mock, patch\n'), ((10569, 10670), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'_cache': 'cache', 'transport': 'transport'}), '(_cache=cache, transport=transport, **\n exclude_other_credentials, **kwargs)\n', (10591, 10670), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((1370, 1391), 'urllib.parse.urlparse', 'urlparse', (['request.url'], {}), '(request.url)\n', (1378, 1391), False, 'from urllib.parse import urlparse\n'), ((1997, 2029), 'unittest.mock.patch', 'patch', (['"""os.environ"""', 'environment'], {}), "('os.environ', environment)\n", (2002, 2029), False, 'from unittest.mock import Mock, patch\n'), ((2055, 2070), 'unittest.mock.Mock', 'Mock', ([], {'send': 'send'}), '(send=send)\n', (2059, 2070), False, 'from unittest.mock import Mock, patch\n'), ((2493, 2569), 'unittest.mock.patch', 'patch', (['"""os.environ"""', "{EnvironmentVariables.MSI_ENDPOINT: 'https://some.url'}"], {}), "('os.environ', {EnvironmentVariables.MSI_ENDPOINT: 'https://some.url'})\n", (2498, 2569), False, 'from unittest.mock import Mock, patch\n'), ((3444, 3497), 'unittest.mock.patch.object', 'patch.object', (['SharedTokenCacheCredential', '"""supported"""'], {}), "(SharedTokenCacheCredential, 'supported')\n", (3456, 3497), False, 'from unittest.mock import Mock, patch\n'), ((2132, 2202), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'authority': 'authority_kwarg', 'transport': 'transport'}), '(authority=authority_kwarg, transport=transport)\n', (2154, 2202), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((2250, 2293), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'transport': 'transport'}), '(transport=transport)\n', (2272, 2293), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((2712, 2782), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'authority': 'authority_kwarg', 'transport': 'transport'}), '(authority=authority_kwarg, transport=transport)\n', (2734, 2782), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((2830, 2873), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {'transport': 'transport'}), '(transport=transport)\n', (2852, 2873), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((10221, 10285), 'helpers.Request', 'Request', ([], {'required_data': "{'refresh_token': expected_refresh_token}"}), "(required_data={'refresh_token': expected_refresh_token})\n", (10228, 10285), False, 'from helpers import async_validating_transport, mock_response, Request\n'), ((2605, 2649), 'asyncio.coroutine', 'asyncio.coroutine', (['(lambda *_, **__: response)'], {}), '(lambda *_, **__: response)\n', (2622, 2649), False, 'import asyncio\n'), ((3598, 3613), 'unittest.mock.Mock', 'Mock', ([], {'send': 'send'}), '(send=send)\n', (3602, 3613), False, 'from unittest.mock import Mock, patch\n'), ((4405, 4429), 'azure.identity.aio.DefaultAzureCredential', 'DefaultAzureCredential', ([], {}), '()\n', (4427, 4429), False, 'from azure.identity.aio import DefaultAzureCredential, SharedTokenCacheCredential\n'), ((10334, 10388), 'test_shared_cache_credential.build_aad_response', 'build_aad_response', ([], {'access_token': 'expected_access_token'}), '(access_token=expected_access_token)\n', (10352, 10388), False, 'from test_shared_cache_credential import build_aad_response, get_account_event, populated_cache\n')]
|
#!/usr/bin/env python3
import sys
sys.path.append("../")
import plotlib
import numpy
import pylab
import networkx
import pickle
import sys
G,pos=pickle.load(open("graph.pickle","rb"))
a_arr, m_hist, cor_hist=pickle.load(open("results.pickle","rb"))
e=numpy.loadtxt("eigenval.csv", delimiter=",")
v=numpy.loadtxt("eigenvec.csv", delimiter=",")
group_id=numpy.loadtxt("group_id.csv", delimiter=",")
sort_ind=numpy.argsort(group_id)
A=numpy.loadtxt("adjacency.csv",delimiter=",")
Dnorm=numpy.diag(numpy.sum(A,axis=1)**-1)
prob=Dnorm@A
P=v.shape[0]
for dim in range(1,5):
x=v[:,1:dim+1]
x=x/numpy.linalg.norm(x,axis=1,keepdims=True)
r=numpy.zeros(P)
for i in range(P):
r[i]=numpy.sum(prob[i,:]*0.5*(1-numpy.sum(x*x[i:i+1,:],axis=1)))
plotlib.plot_color_network_positive("subgoal_laplacian"+str(dim)+".svg",G,pos,r)
for a_ind in range(len(a_arr)):
a=numpy.around(a_arr[a_ind],decimals=1)
cor=cor_hist[a_ind]
r=numpy.zeros(P)
for i in range(P):
r[i]=numpy.sum(prob[i,:]*0.5*(1-cor[i,:]))
plotlib.plot_color_network_positive("subgoal_network"+str(a_ind).zfill(2)+".svg",G,pos,r,title=r"$\alpha$="+str(a))
|
[
"sys.path.append",
"numpy.sum",
"numpy.zeros",
"numpy.argsort",
"numpy.around",
"numpy.linalg.norm",
"numpy.loadtxt"
] |
[((35, 57), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (50, 57), False, 'import sys\n'), ((254, 298), 'numpy.loadtxt', 'numpy.loadtxt', (['"""eigenval.csv"""'], {'delimiter': '""","""'}), "('eigenval.csv', delimiter=',')\n", (267, 298), False, 'import numpy\n'), ((301, 345), 'numpy.loadtxt', 'numpy.loadtxt', (['"""eigenvec.csv"""'], {'delimiter': '""","""'}), "('eigenvec.csv', delimiter=',')\n", (314, 345), False, 'import numpy\n'), ((355, 399), 'numpy.loadtxt', 'numpy.loadtxt', (['"""group_id.csv"""'], {'delimiter': '""","""'}), "('group_id.csv', delimiter=',')\n", (368, 399), False, 'import numpy\n'), ((410, 433), 'numpy.argsort', 'numpy.argsort', (['group_id'], {}), '(group_id)\n', (423, 433), False, 'import numpy\n'), ((437, 482), 'numpy.loadtxt', 'numpy.loadtxt', (['"""adjacency.csv"""'], {'delimiter': '""","""'}), "('adjacency.csv', delimiter=',')\n", (450, 482), False, 'import numpy\n'), ((649, 663), 'numpy.zeros', 'numpy.zeros', (['P'], {}), '(P)\n', (660, 663), False, 'import numpy\n'), ((884, 922), 'numpy.around', 'numpy.around', (['a_arr[a_ind]'], {'decimals': '(1)'}), '(a_arr[a_ind], decimals=1)\n', (896, 922), False, 'import numpy\n'), ((952, 966), 'numpy.zeros', 'numpy.zeros', (['P'], {}), '(P)\n', (963, 966), False, 'import numpy\n'), ((499, 519), 'numpy.sum', 'numpy.sum', (['A'], {'axis': '(1)'}), '(A, axis=1)\n', (508, 519), False, 'import numpy\n'), ((601, 644), 'numpy.linalg.norm', 'numpy.linalg.norm', (['x'], {'axis': '(1)', 'keepdims': '(True)'}), '(x, axis=1, keepdims=True)\n', (618, 644), False, 'import numpy\n'), ((1003, 1048), 'numpy.sum', 'numpy.sum', (['(prob[i, :] * 0.5 * (1 - cor[i, :]))'], {}), '(prob[i, :] * 0.5 * (1 - cor[i, :]))\n', (1012, 1048), False, 'import numpy\n'), ((727, 763), 'numpy.sum', 'numpy.sum', (['(x * x[i:i + 1, :])'], {'axis': '(1)'}), '(x * x[i:i + 1, :], axis=1)\n', (736, 763), False, 'import numpy\n')]
|
from flask_restful import reqparse, Resource, fields, marshal_with
from flask_jwt_extended import jwt_required, get_jwt_identity
from mini_gplus.daos.user import find_user
from mini_gplus.daos.post import dangerously_get_post
from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment
from mini_gplus.daos.exceptions import UnauthorizedAccess
from .mention import check_mentioned_user_ids
from .users import user_fields
from .media import check_media_object_names, MediaUrls
from .s3 import delete_from_s3
MaxCommentMediaCount = 1
comment_parser = reqparse.RequestParser()
comment_parser.add_argument('content', type=str, required=True)
comment_parser.add_argument('mentioned_user_ids', type=str, action='append', default=[])
comment_parser.add_argument('media_object_names', type=str, action="append", default=[])
nested_comment_fields = {
'id': fields.String(attribute='eid'),
'created_at_seconds': fields.Integer(attribute='created_at'),
'author': fields.Nested(user_fields),
'content': fields.String,
'deleted': fields.Boolean,
'media_urls': MediaUrls(attribute='media_list'),
}
comment_fields = dict({
'comments': fields.List(fields.Nested(nested_comment_fields))
}, **nested_comment_fields)
class Comments(Resource):
@jwt_required()
@marshal_with(comment_fields)
def post(self, post_id: str):
"""
Creates a new comment to a post
"""
user_id = get_jwt_identity()
user = find_user(user_id)
post = dangerously_get_post(post_id)
args = comment_parser.parse_args()
comment = create_comment(
user,
content=args['content'],
parent_post=post,
parent_comment=None,
mentioned_users=check_mentioned_user_ids(args['mentioned_user_ids']),
media_list=check_media_object_names(args['media_object_names'], MaxCommentMediaCount)
)
return comment, 201
class Comment(Resource):
@jwt_required()
def delete(self, post_id: str, comment_id: str):
user_id = get_jwt_identity()
user = find_user(user_id)
post = dangerously_get_post(post_id)
comment = dangerously_get_comment(comment_id, post)
if user != comment.author:
raise UnauthorizedAccess()
for m in comment.media_list:
delete_from_s3(m)
deleted_comment = delete_comment(user, comment_id, post)
return {'id': deleted_comment.eid}, 201
class NestedComments(Resource):
@jwt_required()
@marshal_with(nested_comment_fields)
def post(self, post_id: str, comment_id: str):
"""
Creates a nested comment to a comment
"""
user_id = get_jwt_identity()
user = find_user(user_id)
post = dangerously_get_post(post_id)
comment = dangerously_get_comment(comment_id, post)
if not post.comments2.filter(eid=comment.eid):
return {'msg': 'Cannot nest more than two levels of comment'}, 403
args = comment_parser.parse_args()
nested_comment = create_comment(
user,
content=args['content'],
parent_post=post,
parent_comment=comment,
mentioned_users=check_mentioned_user_ids(args['mentioned_user_ids']),
media_list=check_media_object_names(args['media_object_names'], MaxCommentMediaCount)
)
return nested_comment, 201
class NestedComment(Resource):
@jwt_required()
def delete(self, post_id: str, comment_id: str, nested_comment_id: str):
user_id = get_jwt_identity()
user = find_user(user_id)
post = dangerously_get_post(post_id)
nested_comment = dangerously_get_comment(nested_comment_id, post)
if user != nested_comment.author:
raise UnauthorizedAccess()
for m in nested_comment.media_list:
delete_from_s3(m)
deleted_nested_comment = delete_comment(user, nested_comment_id, post)
return {'id': deleted_nested_comment.eid}, 201
|
[
"flask_restful.fields.String",
"flask_restful.fields.Nested",
"flask_restful.fields.Integer",
"flask_jwt_extended.get_jwt_identity",
"mini_gplus.daos.comment.dangerously_get_comment",
"mini_gplus.daos.comment.delete_comment",
"flask_restful.reqparse.RequestParser",
"flask_jwt_extended.jwt_required",
"mini_gplus.daos.user.find_user",
"mini_gplus.daos.exceptions.UnauthorizedAccess",
"mini_gplus.daos.post.dangerously_get_post",
"flask_restful.marshal_with"
] |
[((583, 607), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (605, 607), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((887, 917), 'flask_restful.fields.String', 'fields.String', ([], {'attribute': '"""eid"""'}), "(attribute='eid')\n", (900, 917), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((945, 983), 'flask_restful.fields.Integer', 'fields.Integer', ([], {'attribute': '"""created_at"""'}), "(attribute='created_at')\n", (959, 983), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((999, 1025), 'flask_restful.fields.Nested', 'fields.Nested', (['user_fields'], {}), '(user_fields)\n', (1012, 1025), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((1295, 1309), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (1307, 1309), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((1315, 1343), 'flask_restful.marshal_with', 'marshal_with', (['comment_fields'], {}), '(comment_fields)\n', (1327, 1343), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((2004, 2018), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (2016, 2018), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((2543, 2557), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (2555, 2557), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((2563, 2598), 'flask_restful.marshal_with', 'marshal_with', (['nested_comment_fields'], {}), '(nested_comment_fields)\n', (2575, 2598), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((3499, 3513), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (3511, 3513), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((1460, 1478), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (1476, 1478), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((1494, 1512), 'mini_gplus.daos.user.find_user', 'find_user', (['user_id'], {}), '(user_id)\n', (1503, 1512), False, 'from mini_gplus.daos.user import find_user\n'), ((1528, 1557), 'mini_gplus.daos.post.dangerously_get_post', 'dangerously_get_post', (['post_id'], {}), '(post_id)\n', (1548, 1557), False, 'from mini_gplus.daos.post import dangerously_get_post\n'), ((2090, 2108), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (2106, 2108), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((2124, 2142), 'mini_gplus.daos.user.find_user', 'find_user', (['user_id'], {}), '(user_id)\n', (2133, 2142), False, 'from mini_gplus.daos.user import find_user\n'), ((2158, 2187), 'mini_gplus.daos.post.dangerously_get_post', 'dangerously_get_post', (['post_id'], {}), '(post_id)\n', (2178, 2187), False, 'from mini_gplus.daos.post import dangerously_get_post\n'), ((2207, 2248), 'mini_gplus.daos.comment.dangerously_get_comment', 'dangerously_get_comment', (['comment_id', 'post'], {}), '(comment_id, post)\n', (2230, 2248), False, 'from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment\n'), ((2417, 2455), 'mini_gplus.daos.comment.delete_comment', 'delete_comment', (['user', 'comment_id', 'post'], {}), '(user, comment_id, post)\n', (2431, 2455), False, 'from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment\n'), ((2738, 2756), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (2754, 2756), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((2772, 2790), 'mini_gplus.daos.user.find_user', 'find_user', (['user_id'], {}), '(user_id)\n', (2781, 2790), False, 'from mini_gplus.daos.user import find_user\n'), ((2806, 2835), 'mini_gplus.daos.post.dangerously_get_post', 'dangerously_get_post', (['post_id'], {}), '(post_id)\n', (2826, 2835), False, 'from mini_gplus.daos.post import dangerously_get_post\n'), ((2854, 2895), 'mini_gplus.daos.comment.dangerously_get_comment', 'dangerously_get_comment', (['comment_id', 'post'], {}), '(comment_id, post)\n', (2877, 2895), False, 'from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment\n'), ((3609, 3627), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (3625, 3627), False, 'from flask_jwt_extended import jwt_required, get_jwt_identity\n'), ((3643, 3661), 'mini_gplus.daos.user.find_user', 'find_user', (['user_id'], {}), '(user_id)\n', (3652, 3661), False, 'from mini_gplus.daos.user import find_user\n'), ((3677, 3706), 'mini_gplus.daos.post.dangerously_get_post', 'dangerously_get_post', (['post_id'], {}), '(post_id)\n', (3697, 3706), False, 'from mini_gplus.daos.post import dangerously_get_post\n'), ((3733, 3781), 'mini_gplus.daos.comment.dangerously_get_comment', 'dangerously_get_comment', (['nested_comment_id', 'post'], {}), '(nested_comment_id, post)\n', (3756, 3781), False, 'from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment\n'), ((3971, 4016), 'mini_gplus.daos.comment.delete_comment', 'delete_comment', (['user', 'nested_comment_id', 'post'], {}), '(user, nested_comment_id, post)\n', (3985, 4016), False, 'from mini_gplus.daos.comment import create_comment, dangerously_get_comment, delete_comment\n'), ((1196, 1232), 'flask_restful.fields.Nested', 'fields.Nested', (['nested_comment_fields'], {}), '(nested_comment_fields)\n', (1209, 1232), False, 'from flask_restful import reqparse, Resource, fields, marshal_with\n'), ((2302, 2322), 'mini_gplus.daos.exceptions.UnauthorizedAccess', 'UnauthorizedAccess', ([], {}), '()\n', (2320, 2322), False, 'from mini_gplus.daos.exceptions import UnauthorizedAccess\n'), ((3842, 3862), 'mini_gplus.daos.exceptions.UnauthorizedAccess', 'UnauthorizedAccess', ([], {}), '()\n', (3860, 3862), False, 'from mini_gplus.daos.exceptions import UnauthorizedAccess\n')]
|
from typing import Tuple, List
from webdnn.graph import traverse
from webdnn.graph.graph import Graph
from webdnn.graph.operators.elementwise import Elementwise
from webdnn.graph.operators.fused_elementwise import FusedElementwise
from webdnn.graph.optimize_rule import OptimizeRule
from webdnn.util import flags
def _find_elementwise_sub_graph(graph: Graph) -> List[Graph]:
"""
Find all sub graphs which are consisted of only elementwise operators
For each sub graph, follow conditions are checked about all input variable :code:`x`.
- :code:`x.output_from` is elementwise operator.
- All operators in :code`x.input_to` are included in sub graph
And if satisfied, `x.output_from` is merged into sub graph. If `x.output_from` is already merged into other sub graph, then two sub
graph are merged into single sub graph.
In follow examples, let all operators be elementwise.
ex.1)
...code-block:: text
sub_graph sub_graph
+-------+ +-------------------+
+-{op1}-> v1-|-{op3}-|-> v3 +-|-{op1}-> v1 -{op3}-|-> v3
| +-------+ | +-------------------+
-{op0}-> v0 -+ => -{op0}-> v0 -+
| |
+-{op2}-> v2 +-{op2}-> v2
Considering :code:`v1`,
- :code:`v1.output_from = op1` is elementwise operator.
- :code:`v1.input_to` contains only :code:`op3`, which is included in sub graph.
Therefore :code:`op1` is merged into sub graph, and :code:`v0` is registered as input variable.
Considering :code:`v0`,
- :code:`v0.output_from = op0` is elementwise operator.
- :code:`v0.input_to` is :code:`op1` and :code:`op2`, and op2 is not included in sub graph
Therefore :code:`op0` cannot be merged into sub graph.
ex.2)
...code-block:: text
+---------------------+
-{op0}-> v0 -{op1}-> v1 -+ -{op0}-> v0 --|-{op1}-> v1 -+ |
| +-------+ | | |
+-|-{op3}-|-v3 => +-|-------------+-{op3}-|-> v3
| +-------+ | +---------------------+
| |
-{op2}-> v2 -+ -{op2}-> v2-+
Considering :code:`v1`,
- :code:`v1.output_from = op1` is elementwise operator.
- :code:`v1.input_to` is only :code:`op3`, which is included in sub graph.
Therefore :code:`op1` is merged into sub graph, and :code:`v0` is registered as input variable.
ex.3)
...code-block:: text
+-----------------------------------+
+-{op1}-> v1 -+ | +-{op1}-> v1 -+ |
| | +-------+ | | | |
-{op0}-> v0 -+ +-|-{op3}-|-v3 => -|-{op0}-> v0 -+ +-{op3}-|-> v3
| | +-------+ | | | |
+-{op2}-> v2 -+ | +-{op2}-> v2 -+ |
+-----------------------------------+
Considering :code:`v1`,
- :code:`v1.output_from = op1` is elementwise operator.
- :code:`v1.input_to` contains only :code:`op3`, which is included in sub graph.
Therefore :code:`op1` is merged into sub graph, and :code:`v0` is registered as input variable.
Considering :code:`v2`,
- :code:`v2.output_from = op2` is elementwise operator.
- :code:`v2.input_to` contains only :code:`op3`, which is included in sub graph.
Therefore :code:`op2` is also merged into sub graph.
Considering :code:`v0`,
- :code:`v0.output_from = op0` is elementwise operator.
- :code:`v0.input_to` is :code:`op1` and :code`op2`, both are included in sub graph.
Therefore :code:`op0` is also merged into sub graph.
Returns:
(list of :class:`~webdnn.graph.graph.Graph`): list of sub graphs
"""
queue = list(traverse.filter_nodes(traverse.listup_operators(graph), Elementwise))
sub_graphs = {op: Graph(list(op.inputs.values()), list(op.outputs.values())) for op in queue}
result = []
while len(queue) > 0:
out_node = queue.pop()
sub_graph = sub_graphs[out_node]
flag_changed = False
new_inputs = []
for x in sub_graph.inputs:
# Condition 1: x.output_from is elementwise operator
if not isinstance(x.output_from, Elementwise):
new_inputs.append(x)
continue
# Condition 2: All operators in x.input_to are included in sub graph
if not _check_condition2(x, sub_graph):
new_inputs.append(x)
continue
# Sub graph can be merged with x.output_from
if x.output_from in queue:
new_inputs.extend(sub_graphs[x.output_from].inputs)
queue.remove(x.output_from)
flag_changed = True
elif x.output_from in result:
result.remove(x.output_from)
new_inputs.extend(sub_graphs[x.output_from].inputs)
flag_changed = True
else:
new_inputs.extend(sub_graphs[x.output_from].inputs)
flag_changed = True
sub_graph.inputs = list(set(new_inputs))
if flag_changed:
queue.append(out_node)
else:
result.append(out_node)
return list(filter(lambda g: len(traverse.listup_operators(g)) >= 2, [sub_graphs[op] for op in result]))
def _check_condition2(v, sub_graph):
ops = traverse.listup_operators(sub_graph)
for op in v.input_to:
if op not in ops:
return False
return True
class ElementwiseKernelFusion(OptimizeRule):
def flags(self):
return [
flags.optimize.OPTIMIZE,
flags.optimize.ELEMENTWISE_KERNEL_FUSION
]
def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
sub_graphs = _find_elementwise_sub_graph(graph)
if len(sub_graphs) == 0:
return graph, False
for sub_graph in sub_graphs:
FusedElementwise(None, sub_graph)
return graph, True
|
[
"webdnn.graph.traverse.listup_operators",
"webdnn.graph.operators.fused_elementwise.FusedElementwise"
] |
[((6157, 6193), 'webdnn.graph.traverse.listup_operators', 'traverse.listup_operators', (['sub_graph'], {}), '(sub_graph)\n', (6182, 6193), False, 'from webdnn.graph import traverse\n'), ((4544, 4576), 'webdnn.graph.traverse.listup_operators', 'traverse.listup_operators', (['graph'], {}), '(graph)\n', (4569, 4576), False, 'from webdnn.graph import traverse\n'), ((6707, 6740), 'webdnn.graph.operators.fused_elementwise.FusedElementwise', 'FusedElementwise', (['None', 'sub_graph'], {}), '(None, sub_graph)\n', (6723, 6740), False, 'from webdnn.graph.operators.fused_elementwise import FusedElementwise\n'), ((6036, 6064), 'webdnn.graph.traverse.listup_operators', 'traverse.listup_operators', (['g'], {}), '(g)\n', (6061, 6064), False, 'from webdnn.graph import traverse\n')]
|
import pickle
import shutil
import re
import hashlib
import base64
from datetime import datetime
import pandas as pd
def df_to_records(df: pd.DataFrame, dataset: str, drop_columns = []):
"""
Convert dataframe to a list of record oriented dicts.
Parameters
----------
df : pd.DataFrame
Input dataset.
dataset : str
Name of provider dataset.
drop_columns : type
Which columns (if any) to drop.
Returns
-------
list
List of row-wise dicts.
"""
if dataset == 'OXCGRT':
records = oxcgrt_records(df, dataset, drop_columns)
else:
records = df.to_dict(orient="records")
# ensure that the dataset doesn't have a `dataset` column
# if so, we will have to change this
try:
assert "dataset" not in records[0].keys()
except Exception as e:
raise ValueError('Input dataset contains "dataset" column name.')
# assign a dataset key to each record
for x in records:
x["dataset"] = dataset
return records
def write_records(records: list, dir: str, fn: str):
"""
Write records to a pickle file.
Parameters
----------
records : list
List of preprocessed records.
dir : str
Output directory.
fn : str
Output file name.
Returns
-------
None
"""
try:
print("Writing records.pickle...")
pickle.dump(records, open(dir + "/" + "records.pickle", "wb"))
except Exception as e:
shutil.rmtree(dir)
raise e("Unable to write tmp/preprocess/records.p.")
def oxcgrt_records(ox: pd.DataFrame, dataset: str, drop_columns: list = []):
"""
Function to convert OXCGRT data to list of record dicts.
This presents an additional challenge because of the wide format of the OXCGRT data.
Parameters
----------
ox : pd.DataFrame
Input OXCGRT data.
dataset : str
Name of provider dataset.
drop_columns : list
Which columns (if any) to drop.
Returns
-------
list
List of record dicts.
"""
full_value_names, value_names, stub_names = get_names(ox)
id_columns = [x for x in list(set(ox.columns).difference(set(full_value_names))) if x not in drop_columns]
records = ox.to_dict(orient="records")
rs = [x for x in [get_measure_records(r, stub_names, id_columns, full_value_names) for r in records] if x != []]
rs = [item for sublist in rs for item in sublist]
return(rs)
def get_names(ox: pd.DataFrame):
"""
Get the names of columns holding measure information.
These columns begin with the prefix "A1\_" etc.
returns:
full_value_names: the names of all columns with measure information
value_names: the names of measure columns
stub_names: the measure column prefixes (i.e. "A1")
Parameters
----------
ox : pd.DataFrame
Input OXCGRT dataset.
Returns
-------
full_value_names: list
The names of all columns with measure information.
value_names: list
The names of measure columns.
stub_names: list
The measure column prefixes (i.e. "A1").
"""
stub_exp = r'[A-Z][0-9]+_'
full_value_names = [match for match in ox.columns if re.findall(stub_exp , match) != []]
value_names = [x for x in full_value_names if 'Flag' not in x]
value_names = [x for x in value_names if 'Notes' not in x]
stub_names = [x.split('_')[0] for x in value_names]
return(full_value_names, value_names, stub_names)
def get_measure_records(combined_record, stub_names, id_columns, full_value_names):
"""
Function to break rows into individual records by stub group.
i.e. subset a row for only C4 records and other information, repeat for all possible measures.
Also drops records where notes column is blank i.e. sum(notes columns) == 0.
Parameters
----------
combined_record : type
Dict of a single OXCGRT row.
stub_names : type
List of names of each stub group.
id_columns : type
List of columns to be retained as IDs.
full_value_names : type
List of full names of value columns.
Returns
-------
list
List of dicts containing all records extracted from a given row.
"""
records = []
for stub in stub_names:
stub_keys = [x for x in full_value_names if stub in x]
keys = id_columns + stub_keys
try:
flag_key = [x for x in stub_keys if '_Flag' in x][0]
except Exception:
pass
try:
notes_key = [x for x in stub_keys if '_Notes' in x][0]
except Exception:
pass
subset = {key: value for key, value in combined_record.items() if key in keys}
# Pass record if notes are blank
try:
if sum([subset[notes_key]]) == 0:
continue
except Exception:
pass
try:
subset['flag'] = subset.pop(flag_key)
except Exception:
subset['flag'] = 0.0
pass
try:
subset['notes'] = subset.pop(notes_key)
except Exception:
pass
#replace 0.0 in id columns with None
for col in id_columns:
if subset[col] == 0.0:
subset[col] = None
measure_key = list(set(list(subset.keys())).difference(set(id_columns + ['measure_name', 'flag', 'notes'])))
subset['measure'] = subset.pop(measure_key[0])
subset['measure_name'] = measure_key[0]
records.append(subset)
return(records)
def split_df_by_group(data: pd.DataFrame, group: str):
"""
Split a dataframe by group and return a named dictionary.
Parameters
----------
data : pd.DataFrame
Input dataset.
group : str
Name of column to be used as group.
Returns
-------
dict
Dict of dataset slices named by group.
"""
grouped = data.groupby(group)
groups = grouped.groups
grouped = [grouped.get_group(x) for x in grouped.groups]
return(dict(zip(groups, grouped)))
def filter_new_hashes(data: pd.DataFrame,
ingested_path: str,
date_now: str = datetime.now().strftime('%Y_%m_%d'),
save_ingestion_hashes: bool = False) -> pd.DataFrame:
"""
Filter records by the row-wise hashes of their content.
Reduces the number of records that need to be processed from each dataset.
Will not filter hashes that were ingested on the same day as the function is called.
Parameters
----------
data : pd.DataFrame
Input data.
ingested_path : str
Path to ingested hash reference.
date_now : str
String of current date.
save_ingestion_hashes: bool
Should ingestion hashes be saved?
Returns
-------
pd.DataFrame
Filtered data.
"""
# Read the reference file for ingested hashes
ingested_hash_ref = pd.read_csv(ingested_path)
# Filter for hashes that were not processed today
ingested_hash_ref.loc[ingested_hash_ref['date_processed'] != date_now, :]
# Define row-wise hashes for the input dataset
data['_hash'] = get_row_hashes(data)
# Filter for only hash values that have not been processed on a different day
data = data.loc[[x not in ingested_hash_ref['hash'] for x in data['_hash']]]
# Get the hashes that were just ingested
new_hashes = pd.DataFrame({'hash': data['_hash'], 'date_processed': date_now})
# Remove _hash column from new data
data = data.drop(columns=['_hash'])
# Combine previous hash ref with new hash ref
ingested_hash_ref = pd.concat([ingested_hash_ref, new_hashes]).drop_duplicates()
# Write combined hash ref to csv file if save_ingestion_hashes is True
if save_ingestion_hashes:
ingested_hash_ref.to_csv(ingested_path, index=False)
return(data)
def get_row_hashes(data: pd.DataFrame) -> list:
"""
Get row-wise base64 encoded hashes for a dataframe.
Parameters
----------
data : pd.DataFrame
Input data.
Returns
-------
list
list of hashes.
"""
# Combine row values into a single string
hash_strings = list(data.apply(lambda x: ''.join([str(x) for x in tuple(x)]), axis = 1))
# Hash and base64 encode string
hashes = [base64.b64encode(hashlib.sha1(x.encode("UTF-8")).digest()) for x in hash_strings]
return(hashes)
|
[
"pandas.DataFrame",
"pandas.read_csv",
"re.findall",
"shutil.rmtree",
"datetime.datetime.now",
"pandas.concat"
] |
[((7076, 7102), 'pandas.read_csv', 'pd.read_csv', (['ingested_path'], {}), '(ingested_path)\n', (7087, 7102), True, 'import pandas as pd\n'), ((7556, 7621), 'pandas.DataFrame', 'pd.DataFrame', (["{'hash': data['_hash'], 'date_processed': date_now}"], {}), "({'hash': data['_hash'], 'date_processed': date_now})\n", (7568, 7621), True, 'import pandas as pd\n'), ((1526, 1544), 'shutil.rmtree', 'shutil.rmtree', (['dir'], {}), '(dir)\n', (1539, 1544), False, 'import shutil\n'), ((6311, 6325), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6323, 6325), False, 'from datetime import datetime\n'), ((7778, 7820), 'pandas.concat', 'pd.concat', (['[ingested_hash_ref, new_hashes]'], {}), '([ingested_hash_ref, new_hashes])\n', (7787, 7820), True, 'import pandas as pd\n'), ((3299, 3326), 're.findall', 're.findall', (['stub_exp', 'match'], {}), '(stub_exp, match)\n', (3309, 3326), False, 'import re\n')]
|
from unittest.mock import patch
from django.contrib.auth import authenticate
from django.http import HttpRequest
from django.test import override_settings
from django.urls import reverse
from axes.apps import AppConfig
from axes.models import AccessAttempt, AccessLog
from axes.tests.base import AxesTestCase
@patch('axes.apps.AppConfig.logging_initialized', False)
@patch('axes.apps.log')
class AppsTestCase(AxesTestCase):
def test_axes_config_log_re_entrant(self, log):
"""
Test that initialize call count does not increase on repeat calls.
"""
AppConfig.initialize()
calls = log.info.call_count
AppConfig.initialize()
self.assertTrue(
calls == log.info.call_count and calls > 0,
'AxesConfig.initialize needs to be re-entrant',
)
@override_settings(AXES_VERBOSE=False)
def test_axes_config_log_not_verbose(self, log):
AppConfig.initialize()
self.assertFalse(log.info.called)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_axes_config_log_user_only(self, log):
AppConfig.initialize()
log.info.assert_called_with('AXES: blocking by username only.')
@override_settings(AXES_ONLY_USER_FAILURES=False)
def test_axes_config_log_ip_only(self, log):
AppConfig.initialize()
log.info.assert_called_with('AXES: blocking by IP only.')
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_axes_config_log_user_ip(self, log):
AppConfig.initialize()
log.info.assert_called_with('AXES: blocking by combination of username and IP.')
class AccessLogTestCase(AxesTestCase):
def test_access_log_on_logout(self):
"""
Test a valid logout and make sure the logout_time is updated.
"""
self.login(is_valid_username=True, is_valid_password=True)
self.assertIsNone(AccessLog.objects.latest('id').logout_time)
response = self.client.get(reverse('admin:logout'))
self.assertContains(response, 'Logged out')
self.assertIsNotNone(AccessLog.objects.latest('id').logout_time)
def test_log_data_truncated(self):
"""
Test that get_query_str properly truncates data to the max_length (default 1024).
"""
# An impossibly large post dict
extra_data = {'a' * x: x for x in range(1024)}
self.login(**extra_data)
self.assertEqual(
len(AccessAttempt.objects.latest('id').post_data), 1024
)
@override_settings(AXES_DISABLE_ACCESS_LOG=True)
def test_valid_logout_without_success_log(self):
AccessLog.objects.all().delete()
response = self.login(is_valid_username=True, is_valid_password=True)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(AccessLog.objects.all().count(), 0)
self.assertContains(response, 'Logged out', html=True)
@override_settings(AXES_DISABLE_ACCESS_LOG=True)
def test_valid_login_without_success_log(self):
"""
Test that a valid login does not generate an AccessLog when DISABLE_SUCCESS_ACCESS_LOG is True.
"""
AccessLog.objects.all().delete()
response = self.login(is_valid_username=True, is_valid_password=True)
self.assertEqual(response.status_code, 302)
self.assertEqual(AccessLog.objects.all().count(), 0)
@override_settings(AXES_DISABLE_ACCESS_LOG=True)
def test_valid_logout_without_log(self):
AccessLog.objects.all().delete()
response = self.login(is_valid_username=True, is_valid_password=True)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(AccessLog.objects.count(), 0)
self.assertContains(response, 'Logged out', html=True)
@override_settings(AXES_DISABLE_ACCESS_LOG=True)
def test_non_valid_login_without_log(self):
"""
Test that a non-valid login does generate an AccessLog when DISABLE_ACCESS_LOG is True.
"""
AccessLog.objects.all().delete()
response = self.login(is_valid_username=True, is_valid_password=False)
self.assertEqual(response.status_code, 200)
self.assertEqual(AccessLog.objects.all().count(), 0)
|
[
"axes.models.AccessLog.objects.latest",
"axes.apps.AppConfig.initialize",
"unittest.mock.patch",
"axes.models.AccessLog.objects.count",
"django.urls.reverse",
"axes.models.AccessLog.objects.all",
"axes.models.AccessAttempt.objects.latest",
"django.test.override_settings"
] |
[((314, 369), 'unittest.mock.patch', 'patch', (['"""axes.apps.AppConfig.logging_initialized"""', '(False)'], {}), "('axes.apps.AppConfig.logging_initialized', False)\n", (319, 369), False, 'from unittest.mock import patch\n'), ((371, 393), 'unittest.mock.patch', 'patch', (['"""axes.apps.log"""'], {}), "('axes.apps.log')\n", (376, 393), False, 'from unittest.mock import patch\n'), ((836, 873), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(False)'}), '(AXES_VERBOSE=False)\n', (853, 873), False, 'from django.test import override_settings\n'), ((1006, 1053), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(True)'}), '(AXES_ONLY_USER_FAILURES=True)\n', (1023, 1053), False, 'from django.test import override_settings\n'), ((1214, 1262), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)'}), '(AXES_ONLY_USER_FAILURES=False)\n', (1231, 1262), False, 'from django.test import override_settings\n'), ((1415, 1479), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(True)'}), '(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)\n', (1432, 1479), False, 'from django.test import override_settings\n'), ((2543, 2590), 'django.test.override_settings', 'override_settings', ([], {'AXES_DISABLE_ACCESS_LOG': '(True)'}), '(AXES_DISABLE_ACCESS_LOG=True)\n', (2560, 2590), False, 'from django.test import override_settings\n'), ((2955, 3002), 'django.test.override_settings', 'override_settings', ([], {'AXES_DISABLE_ACCESS_LOG': '(True)'}), '(AXES_DISABLE_ACCESS_LOG=True)\n', (2972, 3002), False, 'from django.test import override_settings\n'), ((3424, 3471), 'django.test.override_settings', 'override_settings', ([], {'AXES_DISABLE_ACCESS_LOG': '(True)'}), '(AXES_DISABLE_ACCESS_LOG=True)\n', (3441, 3471), False, 'from django.test import override_settings\n'), ((3822, 3869), 'django.test.override_settings', 'override_settings', ([], {'AXES_DISABLE_ACCESS_LOG': '(True)'}), '(AXES_DISABLE_ACCESS_LOG=True)\n', (3839, 3869), False, 'from django.test import override_settings\n'), ((588, 610), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (608, 610), False, 'from axes.apps import AppConfig\n'), ((656, 678), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (676, 678), False, 'from axes.apps import AppConfig\n'), ((935, 957), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (955, 957), False, 'from axes.apps import AppConfig\n'), ((1113, 1135), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (1133, 1135), False, 'from axes.apps import AppConfig\n'), ((1320, 1342), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (1340, 1342), False, 'from axes.apps import AppConfig\n'), ((1537, 1559), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (1557, 1559), False, 'from axes.apps import AppConfig\n'), ((1999, 2022), 'django.urls.reverse', 'reverse', (['"""admin:logout"""'], {}), "('admin:logout')\n", (2006, 2022), False, 'from django.urls import reverse\n'), ((2799, 2822), 'django.urls.reverse', 'reverse', (['"""admin:logout"""'], {}), "('admin:logout')\n", (2806, 2822), False, 'from django.urls import reverse\n'), ((3672, 3695), 'django.urls.reverse', 'reverse', (['"""admin:logout"""'], {}), "('admin:logout')\n", (3679, 3695), False, 'from django.urls import reverse\n'), ((3723, 3748), 'axes.models.AccessLog.objects.count', 'AccessLog.objects.count', ([], {}), '()\n', (3746, 3748), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((1919, 1949), 'axes.models.AccessLog.objects.latest', 'AccessLog.objects.latest', (['"""id"""'], {}), "('id')\n", (1943, 1949), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((2106, 2136), 'axes.models.AccessLog.objects.latest', 'AccessLog.objects.latest', (['"""id"""'], {}), "('id')\n", (2130, 2136), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((2652, 2675), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (2673, 2675), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((3192, 3215), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (3213, 3215), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((3525, 3548), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (3546, 3548), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((4046, 4069), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (4067, 4069), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((2475, 2509), 'axes.models.AccessAttempt.objects.latest', 'AccessAttempt.objects.latest', (['"""id"""'], {}), "('id')\n", (2503, 2509), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((2850, 2873), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (2871, 2873), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((3382, 3405), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (3403, 3405), False, 'from axes.models import AccessAttempt, AccessLog\n'), ((4237, 4260), 'axes.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (4258, 4260), False, 'from axes.models import AccessAttempt, AccessLog\n')]
|
from django.contrib import admin
from django.contrib.admin.models import LogEntry
from django.utils.html import format_html
from django.utils.timesince import timesince
from tests.test_app.polls.models import Campaign, Cheese, AllFields
from .models import Poll, Choice, Vote
class ChoiceInline(admin.TabularInline):
model = Choice
show_change_link = True
@admin.register(Poll)
class PollAdmin(admin.ModelAdmin):
fieldsets = (("general", {"fields": ("owner",)}), ("other", {"fields": ("text", "pub_date", "active")}))
raw_id_fields = ("owner",)
list_display = ("owner", "text", "pub_date", "active")
list_display_links = ()
list_filter = ("active", "owner")
list_select_related = False
list_per_page = 20
list_max_show_all = 100
list_editable = ("active",)
search_fields = ("text", "owner__email")
date_hierarchy = "pub_date"
save_as = True
save_as_continue = True
save_on_top = True
preserve_filters = True
inlines = (ChoiceInline,)
actions = []
actions_on_top = True
actions_on_bottom = True
actions_selection_counter = True
@admin.register(Choice)
class ChoiceAdmin(admin.ModelAdmin):
list_display = ("poll", "choice_text")
list_per_page = 20
list_editable = ("choice_text",)
autocomplete_fields = ("poll",)
@admin.register(Vote)
class VoteAdmin(admin.ModelAdmin):
list_display = ("user", "poll", "choice")
@admin.register(LogEntry)
class LogEntryAdmin(admin.ModelAdmin):
list_display = ("user", "object", "action_flag", "change_message", "modified")
readonly_fields = ["object", "modified"]
search_fields = ("user__email",)
date_hierarchy = "action_time"
list_filter = ("action_flag", "content_type__model")
list_per_page = 20
def object(self, obj):
url = obj.get_admin_url()
return format_html(
'<a href="{url}">{obj} [{model}]</a>'.format(url=url, obj=obj.object_repr, model=obj.content_type.model)
)
def modified(self, obj):
if not obj.action_time:
return "Never"
return "{} ago".format(timesince(obj.action_time))
modified.admin_order_field = "action_time"
@admin.register(Cheese)
class CheeseAdmin(admin.ModelAdmin):
list_display = ("name", "stinky")
is_editable = ("name", "stinky")
@admin.register(Campaign)
class CampaignAdmin(admin.ModelAdmin):
list_display = ("id", "promoter")
search_fields = ("promoter__email", "promoter__username")
autocomplete_fields = (
"polls",
"promoter",
)
@admin.register(AllFields)
class AllFieldsAdmin(admin.ModelAdmin):
list_display = (
"id",
"char",
"text",
"slug",
"email",
"float",
"decimal",
"integer",
"small_integer",
"big_integer",
"positive_integer",
"boolean",
"null_boolean",
"file",
"file_path",
"date",
"date_time",
"time",
"duration",
"identifier",
"generic_ip_address",
)
list_editable = (
"char",
"text",
"slug",
"email",
"float",
"decimal",
"integer",
"small_integer",
"big_integer",
"positive_integer",
"boolean",
"null_boolean",
"date",
"date_time",
"time",
"duration",
"identifier",
"generic_ip_address",
)
list_filter = (
"char",
"text",
"slug",
"email",
"float",
"decimal",
"integer",
"small_integer",
"big_integer",
"positive_integer",
"boolean",
"null_boolean",
"date",
"date_time",
"time",
"duration",
"identifier",
"generic_ip_address",
)
list_display_links = ("id",)
fieldsets = (
("char", {"fields": ("char", "text", "slug", "email",)}),
("number", {"fields": ("float", "decimal", "integer", "small_integer", "big_integer", "positive_integer",)}),
("boolean", {"fields": ("boolean", "null_boolean",)}),
("file", {"classes": ("collapse",), "fields": ("file", "file_path",)}),
("time", {"fields": ("date", "date_time", "time",)}),
("other", {"classes": ("collapse",), "fields": ("duration", "identifier", "generic_ip_address",)}),
)
|
[
"django.utils.timesince.timesince",
"django.contrib.admin.register"
] |
[((370, 390), 'django.contrib.admin.register', 'admin.register', (['Poll'], {}), '(Poll)\n', (384, 390), False, 'from django.contrib import admin\n'), ((1124, 1146), 'django.contrib.admin.register', 'admin.register', (['Choice'], {}), '(Choice)\n', (1138, 1146), False, 'from django.contrib import admin\n'), ((1326, 1346), 'django.contrib.admin.register', 'admin.register', (['Vote'], {}), '(Vote)\n', (1340, 1346), False, 'from django.contrib import admin\n'), ((1431, 1455), 'django.contrib.admin.register', 'admin.register', (['LogEntry'], {}), '(LogEntry)\n', (1445, 1455), False, 'from django.contrib import admin\n'), ((2191, 2213), 'django.contrib.admin.register', 'admin.register', (['Cheese'], {}), '(Cheese)\n', (2205, 2213), False, 'from django.contrib import admin\n'), ((2329, 2353), 'django.contrib.admin.register', 'admin.register', (['Campaign'], {}), '(Campaign)\n', (2343, 2353), False, 'from django.contrib import admin\n'), ((2567, 2592), 'django.contrib.admin.register', 'admin.register', (['AllFields'], {}), '(AllFields)\n', (2581, 2592), False, 'from django.contrib import admin\n'), ((2112, 2138), 'django.utils.timesince.timesince', 'timesince', (['obj.action_time'], {}), '(obj.action_time)\n', (2121, 2138), False, 'from django.utils.timesince import timesince\n')]
|
import boto3
import logging
logger = logging.getLogger(__name__)
class Connection(object):
def __init__(self, type, service=None, region='us-west-2', profile='default'):
self.region = region
self.connection_type = type
self.service = service
self.client = None
self.resource = None
self.profile = profile
try:
boto3.setup_default_session(profile_name=self.profile)
except Exception as e:
logger.info("Problem setting default boto3 session: {}".format(e))
def connect(self):
if self.connection_type is None:
raise AttributeError(
"Could not determine connect type. Set client or resource."
)
elif self.connection_type == "client":
client = boto3.client(
self.service,
region_name=self.region
)
self.client = client
return self.client
elif self.connection_type == "resource":
resource = boto3.resource(
self.service,
region_name=self.region
)
self.resource = resource
return self.resource
elif self.connection_type == "session":
try:
session = boto3.Session(
region_name=self.region,
profile_name=self.profile
)
logger.info(
"Returning session for default profile."
)
except Exception as e:
logger.info(
"We are likely running on AWS instance.: {}".format(e)
)
session = boto3.Session(
region_name=self.region
)
return session
else:
raise AttributeError(
"Connection type is not supported."
)
|
[
"boto3.Session",
"boto3.client",
"boto3.setup_default_session",
"boto3.resource",
"logging.getLogger"
] |
[((38, 65), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (55, 65), False, 'import logging\n'), ((385, 439), 'boto3.setup_default_session', 'boto3.setup_default_session', ([], {'profile_name': 'self.profile'}), '(profile_name=self.profile)\n', (412, 439), False, 'import boto3\n'), ((808, 859), 'boto3.client', 'boto3.client', (['self.service'], {'region_name': 'self.region'}), '(self.service, region_name=self.region)\n', (820, 859), False, 'import boto3\n'), ((1042, 1095), 'boto3.resource', 'boto3.resource', (['self.service'], {'region_name': 'self.region'}), '(self.service, region_name=self.region)\n', (1056, 1095), False, 'import boto3\n'), ((1303, 1368), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'self.region', 'profile_name': 'self.profile'}), '(region_name=self.region, profile_name=self.profile)\n', (1316, 1368), False, 'import boto3\n'), ((1718, 1756), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'self.region'}), '(region_name=self.region)\n', (1731, 1756), False, 'import boto3\n')]
|
"""
Code adapted from: https://github.com/NVIDIA/semantic-segmentation
Copyright 2020 Nvidia Corporation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Custom Norm wrappers to enable sync BN, regular BN and for weight
initialization
"""
import re
import torch
import torch.nn as nn
from utils.config import cfg
from apex import amp
from runx.logx import logx
align_corners = cfg.MODEL.ALIGN_CORNERS
def Norm2d(in_channels, **kwargs):
"""
Custom Norm Function to allow flexible switching
"""
layer = getattr(cfg.MODEL, 'BNFUNC')
normalization_layer = layer(in_channels, **kwargs)
return normalization_layer
def initialize_weights(*models):
"""
Initialize Model Weights
"""
for model in models:
for module in model.modules():
if isinstance(module, (nn.Conv2d, nn.Linear)):
nn.init.kaiming_normal_(module.weight)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, cfg.MODEL.BNFUNC):
module.weight.data.fill_(1)
module.bias.data.zero_()
@amp.float_function
def Upsample(x, size):
"""
Wrapper Around the Upsample Call
"""
return nn.functional.interpolate(x, size=size, mode='bilinear',
align_corners=align_corners)
@amp.float_function
def Upsample2(x):
"""
Wrapper Around the Upsample Call
"""
return nn.functional.interpolate(x, scale_factor=2, mode='bilinear',
align_corners=align_corners)
def Down2x(x):
return torch.nn.functional.interpolate(
x, scale_factor=0.5, mode='bilinear', align_corners=align_corners)
def Up15x(x):
return torch.nn.functional.interpolate(
x, scale_factor=1.5, mode='bilinear', align_corners=align_corners)
def scale_as(x, y):
'''
scale x to the same size as y
'''
y_size = y.size(2), y.size(3)
if cfg.OPTIONS.TORCH_VERSION >= 1.5:
x_scaled = torch.nn.functional.interpolate(
x, size=y_size, mode='bilinear',
align_corners=align_corners, recompute_scale_factor=True)
else:
x_scaled = torch.nn.functional.interpolate(
x, size=y_size, mode='bilinear',
align_corners=align_corners)
return x_scaled
def DownX(x, scale_factor):
'''
scale x to the same size as y
'''
if cfg.OPTIONS.TORCH_VERSION >= 1.5:
x_scaled = torch.nn.functional.interpolate(
x, scale_factor=scale_factor, mode='bilinear',
align_corners=align_corners, recompute_scale_factor=True)
else:
x_scaled = torch.nn.functional.interpolate(
x, scale_factor=scale_factor, mode='bilinear',
align_corners=align_corners)
return x_scaled
def ResizeX(x, scale_factor):
'''
scale x by some factor
'''
if cfg.OPTIONS.TORCH_VERSION >= 1.5:
x_scaled = torch.nn.functional.interpolate(
x, scale_factor=scale_factor, mode='bilinear',
align_corners=align_corners, recompute_scale_factor=True)
else:
x_scaled = torch.nn.functional.interpolate(
x, scale_factor=scale_factor, mode='bilinear',
align_corners=align_corners)
return x_scaled
|
[
"torch.nn.functional.interpolate",
"torch.nn.init.kaiming_normal_"
] |
[((2631, 2721), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['x'], {'size': 'size', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, size=size, mode='bilinear', align_corners=\n align_corners)\n", (2656, 2721), True, 'import torch.nn as nn\n'), ((2858, 2953), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['x'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, scale_factor=2, mode='bilinear', align_corners\n =align_corners)\n", (2883, 2953), True, 'import torch.nn as nn\n'), ((3014, 3116), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': '(0.5)', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, scale_factor=0.5, mode='bilinear',\n align_corners=align_corners)\n", (3045, 3116), False, 'import torch\n'), ((3149, 3251), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': '(1.5)', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, scale_factor=1.5, mode='bilinear',\n align_corners=align_corners)\n", (3180, 3251), False, 'import torch\n'), ((3424, 3550), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'size': 'y_size', 'mode': '"""bilinear"""', 'align_corners': 'align_corners', 'recompute_scale_factor': '(True)'}), "(x, size=y_size, mode='bilinear',\n align_corners=align_corners, recompute_scale_factor=True)\n", (3455, 3550), False, 'import torch\n'), ((3601, 3698), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'size': 'y_size', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, size=y_size, mode='bilinear',\n align_corners=align_corners)\n", (3632, 3698), False, 'import torch\n'), ((3880, 4021), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': 'scale_factor', 'mode': '"""bilinear"""', 'align_corners': 'align_corners', 'recompute_scale_factor': '(True)'}), "(x, scale_factor=scale_factor, mode=\n 'bilinear', align_corners=align_corners, recompute_scale_factor=True)\n", (3911, 4021), False, 'import torch\n'), ((4071, 4183), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': 'scale_factor', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, scale_factor=scale_factor, mode=\n 'bilinear', align_corners=align_corners)\n", (4102, 4183), False, 'import torch\n'), ((4359, 4500), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': 'scale_factor', 'mode': '"""bilinear"""', 'align_corners': 'align_corners', 'recompute_scale_factor': '(True)'}), "(x, scale_factor=scale_factor, mode=\n 'bilinear', align_corners=align_corners, recompute_scale_factor=True)\n", (4390, 4500), False, 'import torch\n'), ((4550, 4662), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['x'], {'scale_factor': 'scale_factor', 'mode': '"""bilinear"""', 'align_corners': 'align_corners'}), "(x, scale_factor=scale_factor, mode=\n 'bilinear', align_corners=align_corners)\n", (4581, 4662), False, 'import torch\n'), ((2254, 2292), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['module.weight'], {}), '(module.weight)\n', (2277, 2292), True, 'import torch.nn as nn\n')]
|
from .helpers import dualLP, optimalLP, optimalLPConstant
import numpy as np
def computeCostMinPoA(n, B, f, options=None):
''' Authors: <NAME>, <NAME> and <NAME>
Copyright(c) 2020 <NAME>, <NAME>, <NAME>.
All rights reserved. See LICENSE file in the project root for full license information.
Description
-----------
Computes the price-of-anarchy of atomic congestion games
with congestion functions obtained as linear combinations of
{b_1(x),...,b_m(x)}, and n players
Parameters
----------
n : int
Number of players.
B : (m,n) ndarray
Basis congestion functions defined for 'N = {1, 2, ..., n}'.
f : (m,n) ndarray
Player cost functions defined for 'N = {1, 2, ..., n}'.
options : dict, optional
Optimization options.
Returns
-------
PoA : float
Price-of-anarchy.
'''
if options is None:
try:
from scipy.optimize import linprog
options = { 'solver' : linprog,
'method' : 'revised simplex' }
except ImportError:
msg = 'No optimization options were specified, and SciPy is not installed.'
raise RuntimeError(msg)
Btemp = np.pad(B, pad_width=((0,0),(1,1)), mode='constant').T
ftemp = np.pad(f, pad_width=((0,0),(1,1)), mode='constant').T
x, _, exitFlag, output = dualLP( n, Btemp, ftemp, True, options)
if exitFlag:
raise RuntimeError(output)
PoA = 1./x[1]
return PoA
def computeWelfareMaxPoA(n, B, f, options=None):
''' Authors: <NAME>, <NAME> and <NAME>
Copyright(c) 2020 <NAME>, <NAME>, <NAME>.
All rights reserved. See LICENSE file in the project root for full license information.
Description
-----------
Computes the price-of-anarchy of atomic congestion games
with welfare functions obtained as linear combinations of
{b_1(x),...,b_m(x)}, utility functions obtained as linear
combinations of {f_1(x),...,f_m(x)}, and n players.
Parameters
----------
n : int
Number of players.
B : (m,n) ndarray
Basis welfare functions defined for 'N = {1, 2, ..., n}'.
f : (m,n) ndarray
Player utility functions defined for 'N = {1, 2, ..., n}'.
options : dict, optional
Optimization options.
Returns
-------
PoA : float
Price-of-anarchy of optimal constant tolls.
'''
if options is None:
try:
from scipy.optimize import linprog
options = { 'solver' : linprog,
'method' : 'revised simplex' }
except ImportError:
msg = 'No optimization options were specified, and SciPy is not installed.'
raise RuntimeError(msg)
Btemp = np.pad(B, pad_width=((0,0),(1,1)), mode='constant').T
ftemp = np.pad(f, pad_width=((0,0),(1,1)), mode='constant').T
x, _, exitFlag, output = dualLP( n, Btemp, ftemp, False, options)
if exitFlag:
raise RuntimeError(output)
PoA = x[1]
return PoA
def optimizeCostMinPoA(n, B, options=None):
''' Authors: <NAME>, <NAME> and <NAME>
Copyright(c) 2020 <NAME>, <NAME>, <NAME>.
All rights reserved. See LICENSE file in the project root for full license information.
Description
-----------
Optimizes the price-of-anarchy of atomic congestion games
with congestion functions obtained as linear combination of
basis {b_1(x),...,b_m(x)}, and n players.
Parameters
----------
n : int
Number of players.
B : (m,n) ndarray
Basis cost functions defined for 'N = {1, 2, ..., n}'.
options : dict, optional
Optimization options.
Returns
-------
OptPoA : float
Price-of-anarchy of optimal constant tolls.
Optf : (m,n) ndarray
Functions used to generate optimal mechanism.
'''
if options is None:
try:
from scipy.optimize import linprog
options = { 'solver' : linprog,
'method' : 'revised simplex' }
except ImportError:
msg = 'No optimization options were specified, and SciPy is not installed.'
raise RuntimeError(msg)
m = np.shape( B )[0]
OptPoA = 0.
Optf = np.zeros( (m,n), dtype=np.float )
for currentBasis in np.arange(m):
w = B[currentBasis,:]
x, _, exitFlag, output = optimalLP(n, np.pad(w, pad_width=1, mode='constant'), True, options)
if exitFlag:
raise RuntimeError(output)
Optf[currentBasis,:] = x[0:n]
currentPoA = 1./x[n]
OptPoA = max(OptPoA, currentPoA)
return [ OptPoA, Optf ]
def optimizeCostMinPoAConstant(n, B, options=None):
''' Authors: <NAME>, <NAME> and <NAME>
Copyright(c) 2020 <NAME>, <NAME>, <NAME>.
All rights reserved. See LICENSE file in the project root for full license information.
Description
-----------
Optimizes the price-of-anarchy (using *constant* tolls) of atomic
congestion games with congestion functions obtained as linear combination
of basis {b_1(x),...,b_m(x)}, and n players.
Parameters
----------
n : int
Number of players.
B : (m,n) ndarray
Basis congestion functions defined for 'N = {1, 2, ..., n}'.
options : dict, optional
Optimization options.
Returns
-------
OptPoA : float
Price-of-anarchy of optimal constant mechanism.
OptTau : (m,) ndarray
Values used to generate optimal constant mechanism.
'''
if options is None:
try:
from scipy.optimize import linprog
options = { 'solver' : linprog,
'method' : 'revised simplex' }
except ImportError:
msg = 'No optimization options were specified, and SciPy is not installed.'
raise RuntimeError(msg)
m = np.shape( B )[0]
Btemp = np.pad(B, pad_width=((0,0),(1,1)), mode='constant').T
x, _, exitFlag, output = optimalLPConstant(n, Btemp, True, options)
if exitFlag:
raise RuntimeError(output)
OptPoA = 1./x[m+1]
OptTau = x[0:m]/x[m]
return [ OptPoA, OptTau ]
def optimizeWelfareMaxPoA(n, B, options=None):
''' Authors: <NAME>, <NAME> and <NAME>
Copyright(c) 2020 <NAME>, <NAME>, <NAME>.
All rights reserved. See LICENSE file in the project root for full license information.
Description
-----------
Optimizes the price-of-anarchy of atomic congestion games
with welfare functions obtained as linear combination of basis
{b_1(x),...,b_m(x)}, and n players.
Parameters
----------
n : int
Number of players.
B : (m,n) ndarray
Resource welfare function defined for 'N = {1, 2, ..., n}'.
options : dict, optional
Choice of solver and options.
Returns
-------
OptPoA : float
Optimal price-of-anarchy.
Optf : (m,n) ndarray
Functions used to generate optimal mechanism.
'''
if options is None:
try:
from scipy.optimize import linprog
options = { 'solver' : linprog,
'method' : 'revised simplex' }
except ImportError:
msg = 'No optimization options were specified, and SciPy is not installed.'
raise RuntimeError(msg)
m = np.shape( B )[0]
OptPoA = 0.
Optf = np.zeros( (m,n), dtype=np.float )
for currentBasis in np.arange(m):
w = B[currentBasis,:]
x, _, exitFlag, output = optimalLP(n, np.pad(w, pad_width=1, mode='constant'), False, options)
if exitFlag:
raise RuntimeError(output)
Optf[currentBasis,:] = x[0:n]
currentPoA = x[n]
OptPoA = max(OptPoA, currentPoA)
return [ OptPoA, Optf ]
|
[
"numpy.pad",
"numpy.shape",
"numpy.zeros",
"numpy.arange"
] |
[((4569, 4601), 'numpy.zeros', 'np.zeros', (['(m, n)'], {'dtype': 'np.float'}), '((m, n), dtype=np.float)\n', (4577, 4601), True, 'import numpy as np\n'), ((4628, 4640), 'numpy.arange', 'np.arange', (['m'], {}), '(m)\n', (4637, 4640), True, 'import numpy as np\n'), ((7957, 7989), 'numpy.zeros', 'np.zeros', (['(m, n)'], {'dtype': 'np.float'}), '((m, n), dtype=np.float)\n', (7965, 7989), True, 'import numpy as np\n'), ((8016, 8028), 'numpy.arange', 'np.arange', (['m'], {}), '(m)\n', (8025, 8028), True, 'import numpy as np\n'), ((1322, 1376), 'numpy.pad', 'np.pad', (['B'], {'pad_width': '((0, 0), (1, 1))', 'mode': '"""constant"""'}), "(B, pad_width=((0, 0), (1, 1)), mode='constant')\n", (1328, 1376), True, 'import numpy as np\n'), ((1388, 1442), 'numpy.pad', 'np.pad', (['f'], {'pad_width': '((0, 0), (1, 1))', 'mode': '"""constant"""'}), "(f, pad_width=((0, 0), (1, 1)), mode='constant')\n", (1394, 1442), True, 'import numpy as np\n'), ((2956, 3010), 'numpy.pad', 'np.pad', (['B'], {'pad_width': '((0, 0), (1, 1))', 'mode': '"""constant"""'}), "(B, pad_width=((0, 0), (1, 1)), mode='constant')\n", (2962, 3010), True, 'import numpy as np\n'), ((3022, 3076), 'numpy.pad', 'np.pad', (['f'], {'pad_width': '((0, 0), (1, 1))', 'mode': '"""constant"""'}), "(f, pad_width=((0, 0), (1, 1)), mode='constant')\n", (3028, 3076), True, 'import numpy as np\n'), ((4525, 4536), 'numpy.shape', 'np.shape', (['B'], {}), '(B)\n', (4533, 4536), True, 'import numpy as np\n'), ((6329, 6340), 'numpy.shape', 'np.shape', (['B'], {}), '(B)\n', (6337, 6340), True, 'import numpy as np\n'), ((6358, 6412), 'numpy.pad', 'np.pad', (['B'], {'pad_width': '((0, 0), (1, 1))', 'mode': '"""constant"""'}), "(B, pad_width=((0, 0), (1, 1)), mode='constant')\n", (6364, 6412), True, 'import numpy as np\n'), ((7913, 7924), 'numpy.shape', 'np.shape', (['B'], {}), '(B)\n', (7921, 7924), True, 'import numpy as np\n'), ((4727, 4766), 'numpy.pad', 'np.pad', (['w'], {'pad_width': '(1)', 'mode': '"""constant"""'}), "(w, pad_width=1, mode='constant')\n", (4733, 4766), True, 'import numpy as np\n'), ((8115, 8154), 'numpy.pad', 'np.pad', (['w'], {'pad_width': '(1)', 'mode': '"""constant"""'}), "(w, pad_width=1, mode='constant')\n", (8121, 8154), True, 'import numpy as np\n')]
|
import folium
import numpy as np
from folium.plugins import HeatMap, MarkerCluster
import pandas as pd
from math import sin, cos, acos, asin, atan2, radians, degrees
def plot_circle(lat, lon, radius, map=None, **kwargs):
"""
Plot a circle on a map (creating a new folium map instance if necessary).
Parameters
----------
lat: float
latitude of circle to plot (degrees)
lon: float
longitude of circle to plot (degrees)
radius: arraylike, float
List of distances specifying the radius of circle(s) to plot (m)
map: folium.Map
existing map object
Returns
-------
Folium map object
Examples
--------
>>> import folium
>>> armageddon.plot_circle(52.79, -2.95, 1e3, map=None)
"""
# If the radius is int or float, change radius to list
if isinstance(radius, (int, float)):
radius = [radius]
# If a map is not given, create a map
if not map:
map = folium.Map(location=[lat, lon], control_scale=True)
# Decide colors which are used for showing damage zone circles.
# zone1: purple, zone2: red, zone3: orange, zone4: yellow
colors = ['#9370DB', '#DC143C', '#FF8000', '#FFFF00']
# Plot color cicles with starting from zone1
# To do so, sort the list of radius to fit zone number = color index+1.
for i, rad in enumerate(sorted(radius, reverse=True)):
folium.Circle([lat, lon], rad, fill=True,
fillOpacity=1., color=colors[i],
**kwargs).add_to(map)
return map
def latlon_to_xyz(lat, lon):
"""Change lattitude and longitude into the rectangular coordinate system.
The equatorial plane is the xy plane,
and the axis of rotation is the z axis.
Parameters
----------
lat: float
latitude(degree)
lon: float
longitude(degree)
rlat: float
latitude(rad)
rlon: float
longitude(rad)
Returns
---------
float
Points on the rectangular coordinate system.
"""
# Change degrees to radians
rlat, rlon = radians(lat), radians(lon)
return cos(rlat) * cos(rlon), cos(rlat) * sin(rlon), sin(rlat)
def xyz_to_latlon(x, y, z):
"""Change coodinate from xyz coordinate system to
latitude & longitude coordinates.
Parameter
----------
x: float
x coordinate of Equatorial plane
y: float
y coodinate of Equatorial plane
z: float
z coodinate of Arctic direction
Returns
---------
float
Points on the earth surface(degree)
"""
rlat = asin(z)
coslat = cos(rlat)
return degrees(rlat), degrees(atan2(y / coslat, x / coslat))
def halfway_on_sphere(lat, lon, elat, elon, z):
"""
Calculate a point on the great circle rout of asteroid. If z= 0.5,
the return shows lat & lon of harfway point.
Parameter
---------
lat: float
latitude of zero point
lon: float
longitude of zero point
elat: float
latitude of entry point
elon: float
longitude of entry point
z: float
calculation point between entry and zero point.
Return
--------
list
latitude and longitude of interval point.
"""
# Cange lattitude & longitude to xyz coodinate
xyz0, xyz1 = latlon_to_xyz(lat, lon), latlon_to_xyz(elat, elon)
# Calculate a distance between entry point and zero point.
theta = acos(sum(x * y for x, y in zip(xyz0, xyz1)))
v0 = sin(theta * (1 - z)) / sin(theta)
v1 = sin(theta * z) / sin(theta)
# Calculate latitude a& longitude of interval point.
interval_lat, interval_lon = xyz_to_latlon(
*(x * v0 + y * v1 for x, y in zip(xyz0, xyz1)))
return [interval_lat, interval_lon]
def plot_line(lat, lon, elat, elon, map=None, n=100):
"""
Plot a black lineconnecting entry point and zero point.
Parameters
----------
lat: float
latitude of circle to plot (degrees)
lon: float
longitude of circle to plot (degrees)
elat: float
latitude of entry point (degrees)
elon: float
longitude of entry point(degrees)
n: int
number of rute divisions.Default value is 100.
map: folium.Map
existing map object
Returns
-------
Folium map object
Examples
--------
>>> plot_line(52.79, -2.95, 53.48, -2.24 , map=None)
"""
# If a map is not given, create a map.
if not map:
map = folium.Map(location=[lat, lon], control_scale=True)
Harf = []
# Calculate points of intervals between entry and zero pints.
for i in range(n):
Intervals = halfway_on_sphere(lat, lon, elat, elon, z=i / (n + 1))
Harf.append(Intervals)
# Make a list of plotting points : [entry point, harf point, zero point]
points = [[lat, lon], *Harf, [elat, elon]]
# Plotting a line on map.
folium.PolyLine(points,
color="black", weight=2.5, opacity=1).add_to(map)
return map
def get_lat_long_of_postcodes(postcodes, sector=False):
"""
Return location(latitude,longitude) of a list of postcode units or sectors.
Parameters
----------
postcodes : list of lists
list of postcode units or postcode sectors
sector : bool, optional
if true return populations for postcode sectors,
otherwise postcode units
Returns
-------
list of lists
Contains the latitude,longitude of input postcode units or sectors
Examples
--------
>>> get_lat_log_of_postcode([['SW7 2AZ','SW7 2BT','SW7 2BU','SW7 2DD']])
>>> get_lat_log_of_postcode([['SW7 2']], True)
"""
# Get postcodes from csv
postcodes_pd = pd.read_csv('./armageddon/resources/full_postcodes.csv')
# Modify postcodes to no spaces for processing
postcodes = [[x2.replace(" ", "_") for x2 in x1] for x1 in postcodes]
postcodes_pd['Postcode'] = postcodes_pd['Postcode'].str.replace(" ", "_")
# If sector flag is True―taking average of unit locations
if sector:
postcodes_pd = postcodes_pd.groupby(
postcodes_pd['Postcode'].str.slice(stop=5),
as_index=True).mean().reset_index()
# Select postcodes
select_postcodes = postcodes_pd[
postcodes_pd['Postcode'].isin(postcodes[0])][['Latitude', 'Longitude']]
return select_postcodes.values.tolist()
def heat_map_layer(locations, weights, map=None, radius=25):
"""
Return heat map layer for follium map from
a list of locations and a list of weights
Parameters
----------
locations : list of lists
list of latitutde and longitude coordinates
corresponding to postcode units or postcode sectors
weights : list of lists, array-like
list of weights to be plotted at locations
Returns
-------
Follium map
Examples
--------
>>> locations = get_lat_long_of_postcodes(postcodes, sector=False)
>>> weights = [['10000', '20000', '30000', '40000']]
>>> heat_map_layer(locations, weights, map = None, radius = 25)
"""
# Calculate an average of latitude and longitude of given locations
Avr_location = np.average(locations, axis=0)
# If a map is not given, create a map
if not map:
map = folium.Map(location=Avr_location, control_scale=True)
# Creating copy of locations
combo = locations.copy()
# Appending weight to the third column of combo.
for i, a in enumerate(combo):
a.append(float(weights[0][i]))
# Initialize Follium HeatMap instance
heat_map = HeatMap(combo, name=None, min_opacity=0.5,
max_zoom=18, radius=radius, blur=15, gradient=None,
overlay=False, control=True, show=True)
heat_map.add_to(map)
return map
def plot_marker(lat, lon, popup=None, map=None, **kwargs):
"""
Plot a point on a map (creating a new folium map instance if necessary).
Parameters
----------
lat: float
latitude of point to plot (degrees)
lon: float
longitude of point to plot (degrees)
popup: str
will plot a string label at point
map: folium.Map
existing map object
Returns
-------
Folium map object
Examples
--------
>>> import folium
>>> armageddon.plot_point(52.79, -2.95, 1e3, map=None)
"""
if popup is not None:
if isinstance(popup, (str)) is False:
popup = None
if not map:
map = folium.Map(location=[lat, lon], control_scale=True)
folium.map.Marker(location=[lat, lon], popup=popup,
tooltip=None, icon=None,
draggable=False, **kwargs).add_to(map)
return map
def plot_multiple_markers(locations, popups=None, map=None):
"""
Return heat cluster of markers for follium map from
a list of locations
Parameters
----------
locations : list of lists
list of latitutde and longitude coordinates
corresponding to postcode units or postcode sectors
popup: list of str
will plot a string label at points
map: folium.Map
existing map object
Returns
-------
Follium map
Examples
--------
>>> locations = get_lat_long_of_postcodes(postcodes, sector=False)
>>> plot_multiple_markers(locations, popups= None, map = None)
"""
Avr_location = np.average(locations, axis=0)
if not map:
map = folium.Map(location=Avr_location, control_scale=True)
map = MarkerCluster(locations=locations, popups=popups,
icons=None, name='Location Markers',
overlay=True, control=True,
show=True, icon_create_function=None,
options=None).add_to(map)
return map
|
[
"folium.map.Marker",
"numpy.average",
"math.asin",
"math.atan2",
"pandas.read_csv",
"math.radians",
"folium.plugins.HeatMap",
"math.sin",
"folium.Circle",
"math.cos",
"folium.Map",
"folium.plugins.MarkerCluster",
"folium.PolyLine",
"math.degrees"
] |
[((2603, 2610), 'math.asin', 'asin', (['z'], {}), '(z)\n', (2607, 2610), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2624, 2633), 'math.cos', 'cos', (['rlat'], {}), '(rlat)\n', (2627, 2633), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((5734, 5790), 'pandas.read_csv', 'pd.read_csv', (['"""./armageddon/resources/full_postcodes.csv"""'], {}), "('./armageddon/resources/full_postcodes.csv')\n", (5745, 5790), True, 'import pandas as pd\n'), ((7200, 7229), 'numpy.average', 'np.average', (['locations'], {'axis': '(0)'}), '(locations, axis=0)\n', (7210, 7229), True, 'import numpy as np\n'), ((7605, 7744), 'folium.plugins.HeatMap', 'HeatMap', (['combo'], {'name': 'None', 'min_opacity': '(0.5)', 'max_zoom': '(18)', 'radius': 'radius', 'blur': '(15)', 'gradient': 'None', 'overlay': '(False)', 'control': '(True)', 'show': '(True)'}), '(combo, name=None, min_opacity=0.5, max_zoom=18, radius=radius, blur\n =15, gradient=None, overlay=False, control=True, show=True)\n', (7612, 7744), False, 'from folium.plugins import HeatMap, MarkerCluster\n'), ((9429, 9458), 'numpy.average', 'np.average', (['locations'], {'axis': '(0)'}), '(locations, axis=0)\n', (9439, 9458), True, 'import numpy as np\n'), ((974, 1025), 'folium.Map', 'folium.Map', ([], {'location': '[lat, lon]', 'control_scale': '(True)'}), '(location=[lat, lon], control_scale=True)\n', (984, 1025), False, 'import folium\n'), ((2096, 2108), 'math.radians', 'radians', (['lat'], {}), '(lat)\n', (2103, 2108), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2110, 2122), 'math.radians', 'radians', (['lon'], {}), '(lon)\n', (2117, 2122), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2181, 2190), 'math.sin', 'sin', (['rlat'], {}), '(rlat)\n', (2184, 2190), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2645, 2658), 'math.degrees', 'degrees', (['rlat'], {}), '(rlat)\n', (2652, 2658), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((3507, 3527), 'math.sin', 'sin', (['(theta * (1 - z))'], {}), '(theta * (1 - z))\n', (3510, 3527), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((3530, 3540), 'math.sin', 'sin', (['theta'], {}), '(theta)\n', (3533, 3540), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((3550, 3564), 'math.sin', 'sin', (['(theta * z)'], {}), '(theta * z)\n', (3553, 3564), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((3567, 3577), 'math.sin', 'sin', (['theta'], {}), '(theta)\n', (3570, 3577), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((4499, 4550), 'folium.Map', 'folium.Map', ([], {'location': '[lat, lon]', 'control_scale': '(True)'}), '(location=[lat, lon], control_scale=True)\n', (4509, 4550), False, 'import folium\n'), ((7303, 7356), 'folium.Map', 'folium.Map', ([], {'location': 'Avr_location', 'control_scale': '(True)'}), '(location=Avr_location, control_scale=True)\n', (7313, 7356), False, 'import folium\n'), ((8523, 8574), 'folium.Map', 'folium.Map', ([], {'location': '[lat, lon]', 'control_scale': '(True)'}), '(location=[lat, lon], control_scale=True)\n', (8533, 8574), False, 'import folium\n'), ((9490, 9543), 'folium.Map', 'folium.Map', ([], {'location': 'Avr_location', 'control_scale': '(True)'}), '(location=Avr_location, control_scale=True)\n', (9500, 9543), False, 'import folium\n'), ((2135, 2144), 'math.cos', 'cos', (['rlat'], {}), '(rlat)\n', (2138, 2144), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2147, 2156), 'math.cos', 'cos', (['rlon'], {}), '(rlon)\n', (2150, 2156), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2158, 2167), 'math.cos', 'cos', (['rlat'], {}), '(rlat)\n', (2161, 2167), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2170, 2179), 'math.sin', 'sin', (['rlon'], {}), '(rlon)\n', (2173, 2179), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((2668, 2697), 'math.atan2', 'atan2', (['(y / coslat)', '(x / coslat)'], {}), '(y / coslat, x / coslat)\n', (2673, 2697), False, 'from math import sin, cos, acos, asin, atan2, radians, degrees\n'), ((4922, 4983), 'folium.PolyLine', 'folium.PolyLine', (['points'], {'color': '"""black"""', 'weight': '(2.5)', 'opacity': '(1)'}), "(points, color='black', weight=2.5, opacity=1)\n", (4937, 4983), False, 'import folium\n'), ((8580, 8687), 'folium.map.Marker', 'folium.map.Marker', ([], {'location': '[lat, lon]', 'popup': 'popup', 'tooltip': 'None', 'icon': 'None', 'draggable': '(False)'}), '(location=[lat, lon], popup=popup, tooltip=None, icon=None,\n draggable=False, **kwargs)\n', (8597, 8687), False, 'import folium\n'), ((9555, 9730), 'folium.plugins.MarkerCluster', 'MarkerCluster', ([], {'locations': 'locations', 'popups': 'popups', 'icons': 'None', 'name': '"""Location Markers"""', 'overlay': '(True)', 'control': '(True)', 'show': '(True)', 'icon_create_function': 'None', 'options': 'None'}), "(locations=locations, popups=popups, icons=None, name=\n 'Location Markers', overlay=True, control=True, show=True,\n icon_create_function=None, options=None)\n", (9568, 9730), False, 'from folium.plugins import HeatMap, MarkerCluster\n'), ((1408, 1497), 'folium.Circle', 'folium.Circle', (['[lat, lon]', 'rad'], {'fill': '(True)', 'fillOpacity': '(1.0)', 'color': 'colors[i]'}), '([lat, lon], rad, fill=True, fillOpacity=1.0, color=colors[i],\n **kwargs)\n', (1421, 1497), False, 'import folium\n')]
|
#
# Copyright (c) 2021 Nitric Technologies Pty Ltd.
#
# This file is part of Nitric Python 3 SDK.
# See https://github.com/nitrictech/python-sdk for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from dataclasses import dataclass
from typing import Union
from grpclib import GRPCError
from nitric.api.exception import exception_from_grpc_error
from nitric.utils import new_default_channel
from nitricapi.nitric.secret.v1 import SecretServiceStub, Secret as SecretMessage, SecretVersion as VersionMessage
class Secrets(object):
"""
Nitric secrets management client.
This client insulates application code from stack specific secrets managements services.
"""
def __init__(self):
"""Construct a Nitric Storage Client."""
self._channel = new_default_channel()
self._secrets_stub = SecretServiceStub(channel=self._channel)
def __del__(self):
# close the channel when this client is destroyed
if self._channel is not None:
self._channel.close()
def secret(self, name: str):
"""Return a reference to a secret container from the connected secrets management service."""
return SecretContainer(_secrets=self, name=name)
def _secret_to_wire(secret: SecretContainer) -> SecretMessage:
return SecretMessage(name=secret.name)
@dataclass(frozen=True)
class SecretContainer(object):
"""A reference to a secret container, used to store and retrieve secret versions."""
_secrets: Secrets
name: str
async def put(self, value: Union[str, bytes]) -> SecretVersion:
"""
Create a new secret version, making it the latest and storing the provided value.
:param value: the secret value to store
"""
if isinstance(value, str):
value = bytes(value, "utf-8")
secret_message = _secret_to_wire(self)
try:
response = await self._secrets._secrets_stub.put(secret=secret_message, value=value)
return self.version(version=response.secret_version.version)
except GRPCError as grpc_err:
raise exception_from_grpc_error(grpc_err)
def version(self, version: str):
"""
Return a reference to a specific version of a secret.
Can be used to retrieve the secret value associated with the version.
"""
return SecretVersion(_secrets=self._secrets, secret=self, id=version)
def latest(self):
"""
Return a reference to the 'latest' secret version.
Note: using 'access' on this reference may return different values between requests if a
new version is created between access calls.
"""
return self.version("latest")
def _secret_version_to_wire(version: SecretVersion) -> VersionMessage:
return VersionMessage(_secret_to_wire(version.secret), version=version.id)
@dataclass(frozen=True)
class SecretVersion(object):
"""A reference to a version of a secret, used to access the value of the version."""
_secrets: Secrets
secret: SecretContainer
id: str
async def access(self) -> SecretValue:
"""Return the value stored against this version of the secret."""
version_message = _secret_version_to_wire(self)
try:
response = await self._secrets._secrets_stub.access(secret_version=version_message)
except GRPCError as grpc_err:
raise exception_from_grpc_error(grpc_err)
# Construct a new SecretVersion if the response version id doesn't match this reference.
# This ensures calls to access from the 'latest' version return new version objects
# with a fixed version id.
static_version = (
self
if response.secret_version.version == self.id
else SecretVersion(_secrets=self._secrets, secret=self.secret, id=response.secret_version.version)
)
return SecretValue(version=static_version, value=response.value)
@dataclass(frozen=True)
class SecretValue(object):
"""Represents the value of a secret, tied to a specific version."""
# The version containing this value. Never 'latest', always a specific version.
version: SecretVersion
value: bytes
def __str__(self) -> str:
return self.value.decode("utf-8")
def __bytes__(self) -> bytes:
return self.value
def as_string(self):
"""Return the content of this secret value as a string."""
return str(self)
def as_bytes(self):
"""Return the content of this secret value."""
return bytes(self)
|
[
"nitricapi.nitric.secret.v1.SecretServiceStub",
"nitric.utils.new_default_channel",
"nitricapi.nitric.secret.v1.Secret",
"nitric.api.exception.exception_from_grpc_error",
"dataclasses.dataclass"
] |
[((1877, 1899), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1886, 1899), False, 'from dataclasses import dataclass\n'), ((3421, 3443), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (3430, 3443), False, 'from dataclasses import dataclass\n'), ((4525, 4547), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (4534, 4547), False, 'from dataclasses import dataclass\n'), ((1842, 1873), 'nitricapi.nitric.secret.v1.Secret', 'SecretMessage', ([], {'name': 'secret.name'}), '(name=secret.name)\n', (1855, 1873), True, 'from nitricapi.nitric.secret.v1 import SecretServiceStub, Secret as SecretMessage, SecretVersion as VersionMessage\n'), ((1327, 1348), 'nitric.utils.new_default_channel', 'new_default_channel', ([], {}), '()\n', (1346, 1348), False, 'from nitric.utils import new_default_channel\n'), ((1378, 1418), 'nitricapi.nitric.secret.v1.SecretServiceStub', 'SecretServiceStub', ([], {'channel': 'self._channel'}), '(channel=self._channel)\n', (1395, 1418), False, 'from nitricapi.nitric.secret.v1 import SecretServiceStub, Secret as SecretMessage, SecretVersion as VersionMessage\n'), ((2654, 2689), 'nitric.api.exception.exception_from_grpc_error', 'exception_from_grpc_error', (['grpc_err'], {}), '(grpc_err)\n', (2679, 2689), False, 'from nitric.api.exception import exception_from_grpc_error\n'), ((3964, 3999), 'nitric.api.exception.exception_from_grpc_error', 'exception_from_grpc_error', (['grpc_err'], {}), '(grpc_err)\n', (3989, 3999), False, 'from nitric.api.exception import exception_from_grpc_error\n')]
|
# Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
|
[
"unittest.TestLoader",
"unittest.TestSuite"
] |
[((211, 231), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (229, 231), False, 'import unittest\n'), ((245, 266), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (264, 266), False, 'import unittest\n')]
|
# Generated by Django 2.0.1 on 2018-02-16 12:00
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FacultyCourses',
fields=[
('ufid', models.BigAutoField(primary_key=True, serialize=False)),
('course_id', models.BigIntegerField()),
('date_created', models.DateTimeField()),
('date_modified', models.DateTimeField()),
],
),
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('u_id', models.BigAutoField(primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=20)),
('last_name', models.CharField(max_length=20)),
('email_id', models.EmailField(max_length=255, unique=True)),
('mobile_number', models.CharField(max_length=20)),
('role', models.IntegerField(choices=[(0, 'Admin'), (1, 'Faculty'), (2, 'Student')])),
('active', models.BooleanField(default=True)),
('staff', models.BooleanField(default=False)),
('admin', models.BooleanField(default=False)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
managers=[
('object', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='UserCourses',
fields=[
('ucid', models.BigAutoField(primary_key=True, serialize=False)),
('course_id', models.BigIntegerField()),
('date_created', models.DateTimeField()),
('date_modified', models.DateTimeField()),
('user', models.ForeignKey(on_delete='cascade', to='User.User')),
],
),
migrations.AddField(
model_name='facultycourses',
name='user',
field=models.ForeignKey(on_delete='cascade', to='User.User'),
),
]
|
[
"django.db.models.BigIntegerField",
"django.db.models.ForeignKey",
"django.db.models.BigAutoField",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.EmailField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((2430, 2484), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': '"""cascade"""', 'to': '"""User.User"""'}), "(on_delete='cascade', to='User.User')\n", (2447, 2484), False, 'from django.db import migrations, models\n'), ((344, 398), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (363, 398), False, 'from django.db import migrations, models\n'), ((431, 455), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (453, 455), False, 'from django.db import migrations, models\n'), ((491, 513), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (511, 513), False, 'from django.db import migrations, models\n'), ((550, 572), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (570, 572), False, 'from django.db import migrations, models\n'), ((708, 765), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""password"""'}), "(max_length=128, verbose_name='password')\n", (724, 765), False, 'from django.db import migrations, models\n'), ((799, 869), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""last login"""'}), "(blank=True, null=True, verbose_name='last login')\n", (819, 869), False, 'from django.db import migrations, models\n'), ((897, 951), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (916, 951), False, 'from django.db import migrations, models\n'), ((985, 1016), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (1001, 1016), False, 'from django.db import migrations, models\n'), ((1049, 1080), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (1065, 1080), False, 'from django.db import migrations, models\n'), ((1112, 1158), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (1129, 1158), False, 'from django.db import migrations, models\n'), ((1195, 1226), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (1211, 1226), False, 'from django.db import migrations, models\n'), ((1254, 1329), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, 'Admin'), (1, 'Faculty'), (2, 'Student')]"}), "(choices=[(0, 'Admin'), (1, 'Faculty'), (2, 'Student')])\n", (1273, 1329), False, 'from django.db import migrations, models\n'), ((1359, 1392), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1378, 1392), False, 'from django.db import migrations, models\n'), ((1421, 1455), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1440, 1455), False, 'from django.db import migrations, models\n'), ((1484, 1518), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1503, 1518), False, 'from django.db import migrations, models\n'), ((1554, 1593), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1574, 1593), False, 'from django.db import migrations, models\n'), ((1630, 1665), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1650, 1665), False, 'from django.db import migrations, models\n'), ((1978, 2032), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (1997, 2032), False, 'from django.db import migrations, models\n'), ((2065, 2089), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (2087, 2089), False, 'from django.db import migrations, models\n'), ((2125, 2147), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2145, 2147), False, 'from django.db import migrations, models\n'), ((2184, 2206), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2204, 2206), False, 'from django.db import migrations, models\n'), ((2234, 2288), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': '"""cascade"""', 'to': '"""User.User"""'}), "(on_delete='cascade', to='User.User')\n", (2251, 2288), False, 'from django.db import migrations, models\n')]
|
import builtins
import datetime
import hashlib
import secrets
from db import *
from swagger_server.models.default_error import DefaultError as swg_DefaultError
from swagger_server.models.default_message import DefaultMessage as swg_DefaultMessage
from swagger_server.models.account_definition import AccountDefinition as swg_AccountDefinition
from swagger_server.models.account_definition_list import AccountDefinitionList as swg_AccountDefinitionList
from swagger_server.models.api_key import ApiKey as swg_ApiKey
from flask import request
PAGING_SIZE = 30
MAX_KEY_PER_ACCOUNT = 10
AUTH_HEADER_NAME = 'X-API-KEY'
def _render_account(db_account):
tags = {}
for tag in db_account.tags:
tags[tag.key] = tag.value
return swg_AccountDefinition(
name=db_account.name,
permission=db_account.permission,
tag=tags,
domains=db_account.domains,
create_date=db_account.creation_date,
last_modification_date=db_account.last_modification_date,
)
def _access_deny(api_msg='access deny, invalid/missing API key',
log_msg='access deny, invalid/missing API key'):
builtins.CAS_CONTEXT['logger'].info(log_msg)
return swg_DefaultError(code='AccessDeny', message=api_msg), 403
def _insuf_perm(api_msg='access deny, invalid/missing API key',
log_msg='access deny, invalid/missing API key'):
builtins.CAS_CONTEXT['logger'].info(log_msg)
return swg_DefaultError(code='InsufficientPermission', message=api_msg), 403
def access_control():
api_key = request.headers.get(AUTH_HEADER_NAME)
if api_key is None:
return None
secret_hash = hashlib.sha512(api_key.encode('utf-8')).hexdigest()
session = builtins.CAS_CONTEXT['db_session']()
key = session.query(DbApiKey).filter_by(secret_hash=secret_hash).first()
if key is not None:
return {'id': key.account.id,
'perm': key.account.permission,
'name': key.account.name
}
else:
return None
def _check_perm(account, allowed_roles, action, checked_id=None,
api_msg="access deny, not enough permission",
log_msg="access deny, not enough permission for user '%s' to do '%s'"):
if account is None:
return _access_deny(), 403
if account['perm'] not in allowed_roles \
or (checked_id and checked_id != account['name']):
return _insuf_perm(
log_msg=log_msg % (account['name'], action),
api_msg=api_msg,
), 403
return None
def _account_create(body):
account = access_control()
perm = _check_perm(account, ['AdminWrite'], 'create account')
if perm:
return perm
session = builtins.CAS_CONTEXT['db_session']()
name = body.name
permission = body.permission
try:
new_account = DbAccount(
name=name,
permission=permission,
creation_date=datetime.datetime.now(),
last_modification_date=datetime.datetime.now()
)
session.add(new_account)
session.flush()
session.refresh(new_account)
except sqlalchemy.exc.IntegrityError as e:
# loss matching of not unique account name
if 'unique' not in str(e) or 'UNIQUE' not in str(e):
log_msg = 'Duplicate account name, account "%s" already exists' % name
else:
log_msg = 'Invalid/incomplete data in the payload'
#log_msg = str(e)
builtins.CAS_CONTEXT['logger'].info(log_msg)
return swg_DefaultError(code='InvalidInputData', message=log_msg), 400
if body.tag is not None:
for tag in body.tag:
tag = DbTagAccount(key=tag, value=body.tag[tag], account_id=new_account.id)
session.add(tag)
session.commit()
session.refresh(new_account)
ret = _render_account(new_account)
session.close()
return ret
def _account_delete(accountId):
account = access_control()
if account is None:
return _access_deny(log_msg="access deny on account delete")
if account['perm'] not in ['AdminWrite']:
return _access_deny(
log_msg="user '%s' (perm: %s) doesn't have permission to delete an account" % (account['name'], account['perm']),
api_msg="deleting an account requires admin permission",
)
session = builtins.CAS_CONTEXT['db_session']()
account = session.query(DbAccount).filter_by(name=accountId).first()
# TODO handle cascading or error messages when certificates/domains/notifications are
# still present
if account is not None:
for tag in account.tags:
session.delete(tag)
session.query(DbAccount).filter_by(name=accountId).delete()
log_msg = "user '%s' deleted" % accountId
builtins.CAS_CONTEXT['logger'].info(log_msg)
ret = swg_DefaultMessage(message=log_msg)
ret_code = 200
else:
log_msg = "user '%s' doesn't exist, nothing to be done" % accountId
builtins.CAS_CONTEXT['logger'].info(log_msg)
ret = swg_DefaultError(message=log_msg)
ret_code = 404
session.commit()
session.close()
return ret , ret_code
def _account_get(accountId):
if account is None:
return _access_deny(log_msg="access deny on account delete")
if account['perm'] in ['AdminWrite', 'AdminRead'] or \
account['name'] != accountId and account['perm'] in ['Read', 'Write', 'SelfRegisterDomain']:
return _access_deny(
log_msg="user '%s' (perm: %s) doesn't have permission to read an account" % (account['name'], account['perm']),
api_msg="read an account requires admin (read) permission",
)
session = builtins.CAS_CONTEXT['db_session']()
account = session.query(DbAccount).filter_by(name=accountId).first()
if account is None:
log_msg = "user '%s' doesn't exist" % accountId
session.close()
return swg_DefaultError(message=log_msg), 404
ret = _render_account(account)
session.close()
if ret is None:
log_msg = 'Account "%s" doesn\'t exist' % accountId
builtins.CAS_CONTEXT['logger'].warning(log_msg)
return swg_DefaultError(code='InvalidInputData', message=log_msg), 404
return ret
def _account_list(next_id=None):
if next_id is None:
next_id = 0
session = builtins.CAS_CONTEXT['db_session']()
accounts = session.query(DbAccount).filter(DbAccount.id > next_id).order_by(DbAccount.id).limit(PAGING_SIZE)
rendered_accounts = []
for account in accounts:
rendered_accounts.append(_render_account(account))
if len(rendered_accounts) == PAGING_SIZE:
next_id = account.id
elif len(rendered_accounts) == 0:
next_id = None
else:
next_id = None
ret = swg_AccountDefinitionList()
ret.list = rendered_accounts
ret.next_id = next_id
session.close()
return ret
def _account_update(accountId, body):
session = builtins.CAS_CONTEXT['db_session']()
account = session.query(DbAccount).filter_by(name=accountId).first()
if account is None:
log_msg = "user '%s' doesn't exist" % accountId
return swg_DefaultError(message=log_msg), 404
if body.name:
account.name = body.name
if body.permission:
account.permission = body.permission
if account.tags is not None:
for tag in account.tags:
session.delete(tag)
account.last_modification_date = datetime.datetime.now()
for tag in body.tag:
tag = DbTagAccount(key=tag, value=body.tag[tag], account_id=account.id)
session.add(tag)
session.commit()
session.refresh(account)
ret = _render_account(account)
session.close()
return ret
def _render_apikey(key, fullkey=None):
ret = swg_ApiKey()
if fullkey is not None:
ret.secret = fullkey
else:
ret.secret = "%s-XXXXXXX" % key.secret_prefix
ret.create_date = key.creation_date
ret.id = key.id
ret.last_modification_date = key.last_modification_date
return ret
def _apikey_create(accountId):
session = builtins.CAS_CONTEXT['db_session']()
account = session.query(DbAccount).filter_by(name=accountId).first()
if account is None:
log_msg = "user '%s' doesn't exist" % accountId
session.close()
return swg_DefaultError(message=log_msg), 404
if len(account.api_keys) > MAX_KEY_PER_ACCOUNT:
log_msg = "user '%s' already has the maximum number of keys (%d)" % (accountId, MAX_KEY_PER_ACCOUNT)
session.close()
return swg_DefaultError(message=log_msg), 401
secret_prefix = secrets.token_hex(4)
secret_main = secrets.token_urlsafe(32)
api_key = "%s-%s" % (secret_prefix, secret_main)
hashed_key = hashlib.sha512(api_key.encode('utf-8')).hexdigest()
dbapi_key = DbApiKey(secret_hash=hashed_key,
creation_date=datetime.datetime.now(),
secret_prefix=secret_prefix,
account_id=account.id,
)
session.add(dbapi_key)
session.commit()
ret = _render_apikey(dbapi_key, fullkey=api_key)
session.close()
return ret
def _apikey_delete(accountId, keyId):
session = builtins.CAS_CONTEXT['db_session']()
# TODO: join in one query instead of two queries...
try:
internal_account_id = session.query(DbAccount).filter_by(name=accountId).first().id
except:
log_msg = "cannot delete key, user '%s' doesn't exist" % accountId
builtins.CAS_CONTEXT['logger'].info(log_msg)
session.close()
return swg_DefaultError(code='InvalidInputData', message=log_msg), 404
key = session.query(DbApiKey).filter_by(id=keyId, account_id=internal_account_id).first()
if key is not None:
log_msg = "key '%s' (key: '%s-XXXX') of user '%s' deleted" % (keyId, key.secret_prefix, accountId)
ret = swg_DefaultMessage(message=log_msg)
ret_code = 200
session.delete(key)
session.commit()
else:
log_msg = "cannot delete, key '%s' of user '%s' doesn't exist" % (keyId, accountId)
ret_code = 404
ret = swg_DefaultError(code='InvalidInputData', message=log_msg)
builtins.CAS_CONTEXT['logger'].info(log_msg)
session.close()
return ret, ret_code
def _apikey_get(accountId, keyId):
session = builtins.CAS_CONTEXT['db_session']()
# TODO: join in one query instead of two queries...
try:
internal_account_id = session.query(DbAccount).filter_by(name=accountId).first().id
except:
log_msg = "cannot delete key, user '%s' doesn't exist" % accountId
builtins.CAS_CONTEXT['logger'].info(log_msg)
session.close()
return swg_DefaultError(code='InvalidInputData', message=log_msg), 404
key = session.query(DbApiKey).filter_by(id=keyId, account_id=internal_account_id).first()
if key is None:
ret_code = 404
log_msg = "key '%s' of user '%s' doesn't exist" % (keyId, accountId)
ret = swg_DefaultError(code='InvalidInputData', message=log_msg)
else:
ret_code = 200
ret = _render_apikey(key)
log_msg = "key '%s' of user '%s' queried" % (keyId, accountId)
builtins.CAS_CONTEXT['logger'].info(log_msg)
return ret, ret_code
def _apikey_list(accountId, next_id=None):
session = builtins.CAS_CONTEXT['db_session']()
account = session.query(DbAccount).filter_by(name=accountId).first()
if account is None:
log_msg = "cannot list keys, user '%s' doesn't exist" % accountId
builtins.CAS_CONTEXT['logger'].info(log_msg)
session.close()
return swg_DefaultError(code='InvalidInputData', message=log_msg), 404
ret = []
for k in account.api_keys:
ret.append(_render_apikey(k))
return ret
def _certificate_create(domainName, body):
pass
def _certificate_delete(domainName, certificateId):
pass
def _certificate_get(domainName, certificateId):
pass
def _certificate_list(domainName, next_id=None):
pass
def _domain_create(body):
pass
def _domain_delete(domainName):
pass
def _domain_get(domainName):
pass
def _domain_list(next_id=None):
pass
def _notification_acknowledge(notificationId, body):
pass
def _notification_get(notificationId):
pass
def _notification_list(next_id=None):
pass
|
[
"flask.request.headers.get",
"swagger_server.models.account_definition_list.AccountDefinitionList",
"secrets.token_hex",
"secrets.token_urlsafe",
"swagger_server.models.default_message.DefaultMessage",
"swagger_server.models.account_definition.AccountDefinition",
"swagger_server.models.api_key.ApiKey",
"swagger_server.models.default_error.DefaultError",
"datetime.datetime.now"
] |
[((744, 970), 'swagger_server.models.account_definition.AccountDefinition', 'swg_AccountDefinition', ([], {'name': 'db_account.name', 'permission': 'db_account.permission', 'tag': 'tags', 'domains': 'db_account.domains', 'create_date': 'db_account.creation_date', 'last_modification_date': 'db_account.last_modification_date'}), '(name=db_account.name, permission=db_account.\n permission, tag=tags, domains=db_account.domains, create_date=\n db_account.creation_date, last_modification_date=db_account.\n last_modification_date)\n', (765, 970), True, 'from swagger_server.models.account_definition import AccountDefinition as swg_AccountDefinition\n'), ((1544, 1581), 'flask.request.headers.get', 'request.headers.get', (['AUTH_HEADER_NAME'], {}), '(AUTH_HEADER_NAME)\n', (1563, 1581), False, 'from flask import request\n'), ((6894, 6921), 'swagger_server.models.account_definition_list.AccountDefinitionList', 'swg_AccountDefinitionList', ([], {}), '()\n', (6919, 6921), True, 'from swagger_server.models.account_definition_list import AccountDefinitionList as swg_AccountDefinitionList\n'), ((7568, 7591), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7589, 7591), False, 'import datetime\n'), ((7892, 7904), 'swagger_server.models.api_key.ApiKey', 'swg_ApiKey', ([], {}), '()\n', (7902, 7904), True, 'from swagger_server.models.api_key import ApiKey as swg_ApiKey\n'), ((8736, 8756), 'secrets.token_hex', 'secrets.token_hex', (['(4)'], {}), '(4)\n', (8753, 8756), False, 'import secrets\n'), ((8775, 8800), 'secrets.token_urlsafe', 'secrets.token_urlsafe', (['(32)'], {}), '(32)\n', (8796, 8800), False, 'import secrets\n'), ((1195, 1247), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""AccessDeny"""', 'message': 'api_msg'}), "(code='AccessDeny', message=api_msg)\n", (1211, 1247), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((1436, 1500), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InsufficientPermission"""', 'message': 'api_msg'}), "(code='InsufficientPermission', message=api_msg)\n", (1452, 1500), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((4918, 4953), 'swagger_server.models.default_message.DefaultMessage', 'swg_DefaultMessage', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (4936, 4953), True, 'from swagger_server.models.default_message import DefaultMessage as swg_DefaultMessage\n'), ((5130, 5163), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (5146, 5163), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((9974, 10009), 'swagger_server.models.default_message.DefaultMessage', 'swg_DefaultMessage', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (9992, 10009), True, 'from swagger_server.models.default_message import DefaultMessage as swg_DefaultMessage\n'), ((10225, 10283), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (10241, 10283), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((11093, 11151), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (11109, 11151), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((6034, 6067), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (6050, 6067), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((6279, 6337), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (6295, 6337), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((7274, 7307), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (7290, 7307), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((8436, 8469), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (8452, 8469), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((8676, 8709), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'message': 'log_msg'}), '(message=log_msg)\n', (8692, 8709), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((8997, 9020), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9018, 9020), False, 'import datetime\n'), ((11722, 11780), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (11738, 11780), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((2971, 2994), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2992, 2994), False, 'import datetime\n'), ((3035, 3058), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3056, 3058), False, 'import datetime\n'), ((3577, 3635), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (3593, 3635), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((9671, 9729), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (9687, 9729), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n'), ((10801, 10859), 'swagger_server.models.default_error.DefaultError', 'swg_DefaultError', ([], {'code': '"""InvalidInputData"""', 'message': 'log_msg'}), "(code='InvalidInputData', message=log_msg)\n", (10817, 10859), True, 'from swagger_server.models.default_error import DefaultError as swg_DefaultError\n')]
|