code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
#!/usr/bin/env python
# coding: utf-8
# import packages
import re
import pandas as pd
from bs4 import BeautifulSoup
from collections import defaultdict
# Function definitions
def extract_all_characters(soup):
"""
Function to extract characters from XML file of a play.
Extracts the value of two tag attributes
One relates to Act/Scene divisions and the other is for
the name of the speaking character. These should be fairly
clear from the code.
This function should be modified to deal with different XML schema.
"""
idList = []
for a in soup.findAll(['div', 'sp']):
if 'type' in a.attrs.keys():
idList.append(a.attrs['type'])
elif 'who' in a.attrs.keys():
idList.append(a.attrs['who'])
df = pd.DataFrame(idList, columns=['names'])
return df
def character_pairings_in(l):
"""
Function to create list of tuples of character pairings from extracted data
This also (quite crudely) removes any Act or Scene divisions, which have all
been tagged using an asterisk.
"""
# Create list from Pandas DF
#l = dataframe[0].tolist()
l = [x for x in l if str(x) != 'nan']
# Create pairings from list
l2 = [(l[i],l[i+1]) for i in range(len(l)-1)]
# Remove all Act and Scene markers
#x = [[t for t in a if not '#' in t] for a in l2]
# Keep only pairs of characters
y = [row for row in l2 if len(row) > 1]
# Create list of tuples
character_pairings = [tuple(l) for l in y]
return character_pairings
def create_edgelist_from(pairs):
"""
Function to create edgelists for "speaking-in-turn" pairs
Returns results in a way that will be useful in Gephi
"""
# Create edgelist using defaultDict
edges = defaultdict(int)
for people in pairs:
for personA in people:
for personB in people:
if personA < personB:
edges[personA + ",undirected," + personB] += 1
# Create a dataframe from the defaultDict
df = pd.DataFrame.from_dict(edges, orient='index')
df.reset_index(level=0, inplace=True)
# Split cell on comma into muliple columns
split = (df['index'].str.split(',', expand=True).rename(columns=lambda x: f"col{x+1}"))
# Merge these split columns with the 'weights' from the first df
merged = split.join(df[0])
# Rename columns for use in Gephi
merged.columns = ["Source", "Type", "Target", "Weight"]
return merged
if __name__=="__main__":
# List of filenames
targets = ["KL", "ado", "2h4", "wt"]
# Read in play and create BeautifulSoup object
for target in targets:
filename = f"/Users/au564346/Desktop/{target}.xml"
with open(filename, 'r') as file:
raw = file.read()
soup = BeautifulSoup(raw, 'lxml')
# Create list using extract function
character_list = extract_all_characters(soup)
# Cleaning
#cleaned = pd.read_csv(f"{target}.csv", header=None)
#merged = pd.merge(character_list, cleaned, left_on="names", right_on=0, how="left")
#merged = merged[1].dropna()
#merged = merged[~merged.str.contains('#')]
# Create edgelist
edgelist_df = create_edgelist_from(character_pairings_in(character_list))
print(edgelist_df)
# Save to csv
edgelist_df.to_csv(f"{target}.csv", sep=",", index=False, header=True)
| [
"bs4.BeautifulSoup",
"pandas.DataFrame",
"collections.defaultdict",
"pandas.DataFrame.from_dict"
] | [((1799, 1815), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1810, 1815), False, 'from collections import defaultdict\n'), ((2068, 2113), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['edges'], {'orient': '"""index"""'}), "(edges, orient='index')\n", (2090, 2113), True, 'import pandas as pd\n'), ((2832, 2858), 'bs4.BeautifulSoup', 'BeautifulSoup', (['raw', '"""lxml"""'], {}), "(raw, 'lxml')\n", (2845, 2858), False, 'from bs4 import BeautifulSoup\n'), ((803, 842), 'pandas.DataFrame', 'pd.DataFrame', (['idList'], {'columns': "['names']"}), "(idList, columns=['names'])\n", (815, 842), True, 'import pandas as pd\n')] |
# MIT License
# Copyright (c) 2021 xadrianzetx
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import hashlib
import requests
from typing import Optional
from coral_deeplab.pretrained import MLModel, EdgeTPUModel
GOOGLE_DRIVE_EXPORT_URL = 'https://docs.google.com/uc?export=download'
def download_and_checksum_mlmodel(model: MLModel,
dst: Optional[str] = None) -> str:
"""Downloads model from google drive and checks it md5sum.
Arguments
---------
model : MLModel
One of `MLModel` options
available in `cdl.pretrained` module.
dst : str, default = None
Model destination path. Saves to
library dir if not specified
Returns
-------
filepath : str
Path to downloaded model.
Notes
-----
Downloaded file will be deleted and exception raised
if md5sum fails.
"""
filename = model.value.get('filename')
module_dir = os.path.dirname(os.path.realpath(__file__))
filepath = os.path.join(module_dir if not dst else dst, filename)
if os.path.isfile(filepath):
return filepath
print(f'Attempting to download {filename}')
origin = model.value.get('origin')
response = requests.get(GOOGLE_DRIVE_EXPORT_URL, params={'id': origin})
with open(filepath, 'wb') as file:
checksum = hashlib.md5(response.content)
file.write(response.content)
if checksum.hexdigest() != model.value.get('checksum'):
os.remove(filepath)
raise Warning(f'md5sum failed for {filename} and file was deleted.')
return filepath
def from_precompiled(model: EdgeTPUModel, dest: Optional[str] = None) -> str:
"""Returns path to precompiled edgetpu model.
Arguments
---------
model : EdgeTPUModel
One of `EdgeTPUModel` options
available in `cdl.pretrained` module.
dest : str, default = None
Model destination path. Saves to
library dir if not specified
Returns
-------
model : str
Path to downloaded model.
"""
if not isinstance(model, EdgeTPUModel):
raise ValueError('Incorrect model type specified. '
'Use one of cdl.pretrained.EdgeTPUModel')
model = download_and_checksum_mlmodel(model, dest)
return model
| [
"hashlib.md5",
"os.path.join",
"requests.get",
"os.path.isfile",
"os.path.realpath",
"os.remove"
] | [((2031, 2085), 'os.path.join', 'os.path.join', (['(module_dir if not dst else dst)', 'filename'], {}), '(module_dir if not dst else dst, filename)\n', (2043, 2085), False, 'import os\n'), ((2094, 2118), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (2108, 2118), False, 'import os\n'), ((2247, 2307), 'requests.get', 'requests.get', (['GOOGLE_DRIVE_EXPORT_URL'], {'params': "{'id': origin}"}), "(GOOGLE_DRIVE_EXPORT_URL, params={'id': origin})\n", (2259, 2307), False, 'import requests\n'), ((1988, 2014), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2004, 2014), False, 'import os\n'), ((2367, 2396), 'hashlib.md5', 'hashlib.md5', (['response.content'], {}), '(response.content)\n', (2378, 2396), False, 'import hashlib\n'), ((2503, 2522), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (2512, 2522), False, 'import os\n')] |
# movie recommendation program
import pandas as pd
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import CountVectorizer
# load data
df = pd.read_csv('New.csv')
print(df.head(3))
# df['id']=df.index
# df.to_csv('New')
# get count of movies in the data set and the number of columns
df.shape
# create list of important columns for the recommendation engine
columns = ['Actors', 'Director', 'Genre', 'Title']
print(df[columns].head(3))
df[columns].isnull().values.any()
# create function to combine values of the important columns
def get_important_values(data):
important_features = []
for i in range(0, data.shape[0]):
important_features.append(
data['Actors'][i] + ' ' + data['Director'][i] + ' ' + data['Genre'][i] + ' ' + data['Title'][i])
return important_features
# Create a column to hold the combined strings
df['important_features'] = get_important_values(df)
# show data
print(df['important_features'].head(3))
# convert text to matirx of token counts
cm = CountVectorizer().fit_transform(df['important_features'])
# Get the cosine similarity matrix from the count matirx
cs = cosine_similarity(cm)
print(cs)
cs.shape
title = 'The Amazing Spider-Man'
movie_id = df[df.Title == title]['id'].values[0]
scores = list(enumerate(cs[movie_id]))
# sort list
sorted_scores = sorted(scores, key=lambda x: x[1], reverse=True)
sorted_scores = sorted_scores[1:]
print(sorted_scores)
j = 0
print('The 7 most recommended movies to', title, 'are:\n')
for item in sorted_scores:
movie_title = df[df.id == item[0]]['Title'].values[0]
print(j + 1, movie_title)
j = j + 1
if j > 6:
break
| [
"sklearn.feature_extraction.text.CountVectorizer",
"sklearn.metrics.pairwise.cosine_similarity",
"pandas.read_csv"
] | [((205, 227), 'pandas.read_csv', 'pd.read_csv', (['"""New.csv"""'], {}), "('New.csv')\n", (216, 227), True, 'import pandas as pd\n'), ((1201, 1222), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['cm'], {}), '(cm)\n', (1218, 1222), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((1079, 1096), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {}), '()\n', (1094, 1096), False, 'from sklearn.feature_extraction.text import CountVectorizer\n')] |
from newspaper import Article
# #A new article from TOI
# url = "https://www.indiatoday.in/india/story/surrogate-mother-need-not-to-be-close-relative-single-woman-can-avail-surrogacy-parliamentary-panel-1643545-2020-02-05"
# #For different language newspaper refer above table
# toi_article = Article(url, language="en") # en for English
x=input()
# url = "https://www.indiatoday.in/india/story/surrogate-mother-need-not-to-be-close-relative-single-woman-can-avail-surrogacy-parliamentary-panel-1643545-2020-02-05"
url="x"
toi_article = Article(url, language="en")
sent2=toi_article.title
print(sent2)
#To download thearticle
toi_article.download()
#To parse the article
toi_article.parse()
#To perform natural language processing ie..nlp
# toi_article.nlp()
#To extract title
# print("Article's Title:")
# print(toi_article.title)
# print("n")
# toi_article.
# #To extract text
# print("Article's Text:")
# print(toi_article.text)
# print("n")
# #To extract summary
# print("Article's Summary:")
# print(toi_article.summary)
# print("n")
# #To extract keywords
# print("Article's Keywords:")
# print(toi_article.keywords) | [
"newspaper.Article"
] | [((560, 587), 'newspaper.Article', 'Article', (['url'], {'language': '"""en"""'}), "(url, language='en')\n", (567, 587), False, 'from newspaper import Article\n')] |
from SEAL import SplineSpace, create_knots
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
p = 2
n = 10
t = create_knots(0, 1, p, n)
S = SplineSpace(p, t)
c = [(0, 1, 0), (1, 2, 1), (1.5, 3, 2), (1.7, -1, 3), (1, -1.5, 4), (3, 3, 3), (4, 4, 3), (5, 2, 2), (6, 5, 4), (7, -1, 5)]
f = S(c)
x = S.parameter_values()
y = f(x)
cp = f.visualize(iterations=4)
fig = plt.figure()
axs = Axes3D(fig)
axs.plot(*zip(*cp))
axs.plot(*zip(*y))
plt.show() | [
"SEAL.create_knots",
"matplotlib.pyplot.figure",
"SEAL.SplineSpace",
"mpl_toolkits.mplot3d.Axes3D",
"matplotlib.pyplot.show"
] | [((133, 157), 'SEAL.create_knots', 'create_knots', (['(0)', '(1)', 'p', 'n'], {}), '(0, 1, p, n)\n', (145, 157), False, 'from SEAL import SplineSpace, create_knots\n'), ((162, 179), 'SEAL.SplineSpace', 'SplineSpace', (['p', 't'], {}), '(p, t)\n', (173, 179), False, 'from SEAL import SplineSpace, create_knots\n'), ((386, 398), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (396, 398), True, 'import matplotlib.pyplot as plt\n'), ((405, 416), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (411, 416), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((457, 467), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (465, 467), True, 'import matplotlib.pyplot as plt\n')] |
import io
from pathlib import Path
from tcga.utils import unlist1
# Returns the object from a list
# iff the list is a singleton
assert 43 == unlist1([43])
# An iterable will be consumed:
assert 36 == unlist1((x ** 2) for x in [6])
# These fail with a ValueError:
# unlist1([])
# unlist1([1, 2])
from tcga.utils import relpath
# Returns the path relative to the script
from tcga.utils import mkdir
# A wrapper for Path.mkdir:
# path = mkdir(Path("/path/to/folder"))
from tcga.utils import first
# Returns the first element of an iterable
assert 'A' == first("ABCD")
from tcga.utils import at_most_n
# Lazy cut-off for iterables
print(list(at_most_n("ABCD", n=2)))
# ['A', 'B']
from tcga.utils import whatsmyname
def rose():
print(whatsmyname())
# Prints the name of
# the function: rose
from tcga.utils import assert_exists
# If `file` is a filename or path then
# assert_exists(file)
# either raises FileNotFoundError
# or returns back `file`
from tcga.utils import md5
# Computes the md5 hash of a text stream chunkwise.
# Attempts to rewind the stream back using .tell()
print(md5(io.StringIO("I know that I shall meet my fate")))
# 06a118b2f090ed1b39a1d07efdaa5d78
from tcga.utils import from_iterable
# Wraps chain.from_iterable, i.e.
print(set(from_iterable([[1, 2, 5], [4, 5]])))
print(from_iterable([[1, 2, 5], [4, 5]], type=set))
# {1, 2, 4, 5}
from tcga.utils import minidict
# A minimalistic read-only dictionary
minidict({1: 'A', 2: 'B'})
from tcga.utils import seek_then_rewind
# Context manager for rewinding file descriptors
with open(__file__, mode='r') as fd:
with seek_then_rewind(fd, seek=2):
print(fd.readline().strip())
# port io
print(fd.readline().strip())
# import io
| [
"tcga.utils.from_iterable",
"tcga.utils.seek_then_rewind",
"tcga.utils.at_most_n",
"tcga.utils.minidict",
"tcga.utils.first",
"tcga.utils.whatsmyname",
"io.StringIO",
"tcga.utils.unlist1"
] | [((1448, 1478), 'tcga.utils.minidict', 'minidict', (["{(1): 'A', (2): 'B'}"], {}), "({(1): 'A', (2): 'B'})\n", (1456, 1478), False, 'from tcga.utils import minidict\n'), ((143, 156), 'tcga.utils.unlist1', 'unlist1', (['[43]'], {}), '([43])\n', (150, 156), False, 'from tcga.utils import unlist1\n'), ((202, 230), 'tcga.utils.unlist1', 'unlist1', (['(x ** 2 for x in [6])'], {}), '(x ** 2 for x in [6])\n', (209, 230), False, 'from tcga.utils import unlist1\n'), ((556, 569), 'tcga.utils.first', 'first', (['"""ABCD"""'], {}), "('ABCD')\n", (561, 569), False, 'from tcga.utils import first\n'), ((1316, 1360), 'tcga.utils.from_iterable', 'from_iterable', (['[[1, 2, 5], [4, 5]]'], {'type': 'set'}), '([[1, 2, 5], [4, 5]], type=set)\n', (1329, 1360), False, 'from tcga.utils import from_iterable\n'), ((644, 666), 'tcga.utils.at_most_n', 'at_most_n', (['"""ABCD"""'], {'n': '(2)'}), "('ABCD', n=2)\n", (653, 666), False, 'from tcga.utils import at_most_n\n'), ((740, 753), 'tcga.utils.whatsmyname', 'whatsmyname', ([], {}), '()\n', (751, 753), False, 'from tcga.utils import whatsmyname\n'), ((1106, 1153), 'io.StringIO', 'io.StringIO', (['"""I know that I shall meet my fate"""'], {}), "('I know that I shall meet my fate')\n", (1117, 1153), False, 'import io\n'), ((1273, 1307), 'tcga.utils.from_iterable', 'from_iterable', (['[[1, 2, 5], [4, 5]]'], {}), '([[1, 2, 5], [4, 5]])\n', (1286, 1307), False, 'from tcga.utils import from_iterable\n'), ((1611, 1639), 'tcga.utils.seek_then_rewind', 'seek_then_rewind', (['fd'], {'seek': '(2)'}), '(fd, seek=2)\n', (1627, 1639), False, 'from tcga.utils import seek_then_rewind\n')] |
import pandas as pd
import matplotlib.pyplot as plt
# Bagian 1 -- Load data dari sumbernya
df = pd.read_csv(
'https://raw.githubusercontent.com/datasets/covid-19/\
master/data/countries-aggregated.csv', parse_dates=['Date'])
# Bagian 2 -- Melakukan filter data untuk data Malaysia dan Indonesia
negara = ["Indonesia", "Malaysia", 'Singapore']
df = df[df["Country"].isin(negara)]
# Bagian 3 -- Menambahkan kolom total Case, fatality rate, dan cure rate
df['Cases'] = df.iloc[:, 2:5].sum(axis=1)
df['FRates'] = ((df['Deaths']/df['Cases'])*100).fillna(0)
df['CRates'] = ((df['Recovered']/df['Cases'])*100).fillna(0)
covid19 = df.pivot(index='Date', columns='Country', values='Confirmed')
cured = df.pivot(index='Date', columns='Country', values='Recovered')
death = df.pivot(index='Date', columns='Country', values='Deaths')
frate = df.pivot(index='Date', columns='Country', values='FRates')
crate = df.pivot(index='Date', columns='Country', values='CRates')
plt.style.use('fivethirtyeight')
plt.figure(num=1, dpi=200)
plot = covid19.plot(grid=False, fontsize=15, figsize=(12, 8),
linewidth=5, legend=False, ax=plt.gca())
plot.grid(b=True, which='major', axis='y', ls='--', lw=.5, c='k', alpha=.3)
plot.set_title("COVID-19 Confimed Case", fontweight='bold', loc='center')
plot.set_xlabel('Dates')
plot.set_ylabel('# of Infected')
for country in negara:
plot.text(x=covid19.index[-1], y=int(covid19[country].tail(1)),
s=country+": "+str(int(covid19[country].tail(1))),
fontsize=15)
plot.text(x=covid19.index[1], y=-1850, s='Source: https://github.com/datasets/\
covid-19/blob/master/data/countries-aggregated.csv', fontsize=12,
fontweight='bold')
plot.text(x=covid19.index[1], y=-2100, s="by: GSK", fontsize=12,
fontweight='bold')
plt.savefig('Infected Number.png', bbox_inches="tight")
plt.figure(num=2, dpi=200)
plot = death.plot(grid=False, fontsize=15, figsize=(12, 8), linewidth=5,
legend=False, ax=plt.gca())
plot.grid(b=True, which='major', axis='y', ls='--', lw=.5, c='k', alpha=.3)
plot.set_title("COVID-19 Total Death", fontweight='bold', loc='center')
plot.set_xlabel('Dates')
plot.set_ylabel('# of Death')
for country in negara:
plot.text(x=death.index[-1], y=int(death[country].tail(1)),
s=country+": "+str(int(death[country].tail(1))),
fontsize=15)
plot.text(x=covid19.index[1], y=-160, s='Source: https://github.com/datasets/\
covid-19/blob/master/data/countries-aggregated.csv', fontsize=12,
fontweight='bold')
plot.text(x=covid19.index[1], y=-180, s="by: GSK", fontsize=12,
fontweight='bold')
plt.savefig('Death Number.png', bbox_inches="tight")
plt.figure(num=5, dpi=200)
plot = cured.plot(grid=False, fontsize=15, figsize=(12, 8), linewidth=5,
legend=False, ax=plt.gca())
plot.grid(b=True, which='major', axis='y', ls='--', lw=.5, c='k', alpha=.3)
plot.set_title("COVID-19 Total Recovered", fontweight='bold', loc='center')
plot.set_xlabel('Dates')
plot.set_ylabel('# of Recovered')
for country in negara:
plot.text(x=cured.index[-1], y=int(cured[country].tail(1)),
s=country+": "+str(int(cured[country].tail(1))),
fontsize=15)
plot.text(x=covid19.index[1], y=-160, s='Source: https://github.com/datasets/\
covid-19/blob/master/data/countries-aggregated.csv', fontsize=12,
fontweight='bold')
plot.text(x=covid19.index[1], y=-180, s="by: GSK", fontsize=12,
fontweight='bold')
plt.savefig('Cured Number.png', bbox_inches="tight")
plt.figure(num=3, dpi=200)
plot = frate.plot(grid=False, fontsize=15, figsize=(12, 8), linewidth=5,
legend=False, ax=plt.gca())
plot.grid(b=True, which='major', axis='y', ls='--', lw=.5, c='k', alpha=.3)
plot.set_title("COVID-19 Fatality Rate", fontweight='bold', loc='center')
plot.set_xlabel('Dates')
plot.set_ylabel('Fatality Rate (%)')
for country in negara:
plot.text(x=frate.index[-1], y=float(frate[country].tail(1)),
s=country+": "+str(float("%.2f" % frate[country].tail(1)))+'%',
fontsize=15)
plot.text(x=covid19.index[1], y=-2.5, s='Source: https://github.com/datasets/\
covid-19/blob/master/data/countries-aggregated.csv', fontsize=12,
fontweight='bold')
plot.text(x=covid19.index[1], y=-2.8, s="by: GSK", fontsize=12,
fontweight='bold')
plt.savefig('Fatality Rate.png', bbox_inches="tight")
plt.figure(num=4, dpi=200)
plot = crate.plot(grid=False, fontsize=15, figsize=(12, 8), linewidth=5,
legend=False, ax=plt.gca())
plot.grid(b=True, which='major', axis='y', ls='--', lw=.5, c='k', alpha=.3)
plot.set_title("COVID-19 Recovered Rate", fontweight='bold', loc='center')
plot.set_xlabel('Dates')
plot.set_ylabel('Recovered Rate (%)')
for country in negara:
plot.text(x=crate.index[-1], y=float(crate[country].tail(1)),
s=country+": "+str(float("%.2f" % crate[country].tail(1)))+'%',
fontsize=15)
plot.text(x=covid19.index[1], y=-2.5, s='Source: https://github.com/datasets/\
covid-19/blob/master/data/countries-aggregated.csv', fontsize=12,
fontweight='bold')
plot.text(x=covid19.index[1], y=-2.8, s="by: GSK", fontsize=12,
fontweight='bold')
plt.savefig('Recovered Rate.png', bbox_inches="tight")
| [
"matplotlib.pyplot.savefig",
"pandas.read_csv",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.figure"
] | [((97, 232), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/datasets/covid-19/master/data/countries-aggregated.csv"""'], {'parse_dates': "['Date']"}), "(\n 'https://raw.githubusercontent.com/datasets/covid-19/master/data/countries-aggregated.csv'\n , parse_dates=['Date'])\n", (108, 232), True, 'import pandas as pd\n'), ((964, 996), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""fivethirtyeight"""'], {}), "('fivethirtyeight')\n", (977, 996), True, 'import matplotlib.pyplot as plt\n'), ((997, 1023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(1)', 'dpi': '(200)'}), '(num=1, dpi=200)\n', (1007, 1023), True, 'import matplotlib.pyplot as plt\n'), ((1807, 1862), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Infected Number.png"""'], {'bbox_inches': '"""tight"""'}), "('Infected Number.png', bbox_inches='tight')\n", (1818, 1862), True, 'import matplotlib.pyplot as plt\n'), ((1865, 1891), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(2)', 'dpi': '(200)'}), '(num=2, dpi=200)\n', (1875, 1891), True, 'import matplotlib.pyplot as plt\n'), ((2658, 2710), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Death Number.png"""'], {'bbox_inches': '"""tight"""'}), "('Death Number.png', bbox_inches='tight')\n", (2669, 2710), True, 'import matplotlib.pyplot as plt\n'), ((2713, 2739), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(5)', 'dpi': '(200)'}), '(num=5, dpi=200)\n', (2723, 2739), True, 'import matplotlib.pyplot as plt\n'), ((3514, 3566), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Cured Number.png"""'], {'bbox_inches': '"""tight"""'}), "('Cured Number.png', bbox_inches='tight')\n", (3525, 3566), True, 'import matplotlib.pyplot as plt\n'), ((3569, 3595), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(3)', 'dpi': '(200)'}), '(num=3, dpi=200)\n', (3579, 3595), True, 'import matplotlib.pyplot as plt\n'), ((4388, 4441), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Fatality Rate.png"""'], {'bbox_inches': '"""tight"""'}), "('Fatality Rate.png', bbox_inches='tight')\n", (4399, 4441), True, 'import matplotlib.pyplot as plt\n'), ((4444, 4470), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(4)', 'dpi': '(200)'}), '(num=4, dpi=200)\n', (4454, 4470), True, 'import matplotlib.pyplot as plt\n'), ((5265, 5319), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Recovered Rate.png"""'], {'bbox_inches': '"""tight"""'}), "('Recovered Rate.png', bbox_inches='tight')\n", (5276, 5319), True, 'import matplotlib.pyplot as plt\n'), ((1136, 1145), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1143, 1145), True, 'import matplotlib.pyplot as plt\n'), ((2000, 2009), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2007, 2009), True, 'import matplotlib.pyplot as plt\n'), ((2848, 2857), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2855, 2857), True, 'import matplotlib.pyplot as plt\n'), ((3704, 3713), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3711, 3713), True, 'import matplotlib.pyplot as plt\n'), ((4579, 4588), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4586, 4588), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# *****************************************************************************/
# * Authors: <NAME>, <NAME>
# *****************************************************************************/
from __future__ import absolute_import, division, print_function, unicode_literals # , nested_scopes, generators, generator_stop, with_statement, annotations
import tensorflow as tf
from tensorflow import keras
import numpy as np
import os, json, random
import matplotlib.pyplot as plt
def main():
#fashion_mnist = keras.datasets.fashion_mnist
#(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
if os.path.exists("QDist_2.txt") and os.path.exists("labels_1.txt"):
with open("QDist_2.txt") as jF:
shapelets = json.load(jF)
with open("labels_1.txt") as jF:
labels = json.load(jF)
dists = []
for key in shapelets:
dists.append(shapelets[key])
shuffArr = [i for i in zip(dists, labels)]
random.shuffle(shuffArr)
#print(shuffArr)
dists = np.array([i[0] for i in shuffArr])
labels = np.array([i[1] for i in shuffArr])
print(labels)
test = np.array(dists[0:1])
train = dists[1:]
test_labels = np.array(labels[0:1])
train_labels = labels[1:]
print(train.shape)
#print(train_images)
#class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
# 'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
#train_images = train_images / 255.0
#
#test_images = test_images / 255.0
model = keras.Sequential([
keras.layers.Flatten(input_shape=(11, 11)),
keras.layers.Dense(128, activation=tf.nn.relu),
keras.layers.Dense(64, activation=tf.nn.relu),
keras.layers.Dense(3, activation=tf.nn.softmax)
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
checkpoint_path = "training_2/cp_1.ckpt"
checkpoint_dir = os.path.dirname(checkpoint_path)
# Create checkpoint callback
cp_callback = tf.keras.callbacks.ModelCheckpoint(checkpoint_path,
save_weights_only=True,
verbose=1)
model.fit(train, train_labels, epochs=50, callbacks = [cp_callback])
test_loss, test_acc = model.evaluate(dists, labels)
#test_loss, test_acc = model.evaluate(test, test_labels)
print(test_labels)
print('Test accuracy:', test_acc)
predictions = model.predict(test)
model.save('my_model_new_data.h5')
if __name__ == '__main__':
main()
| [
"os.path.exists",
"random.shuffle",
"os.path.dirname",
"numpy.array",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.callbacks.ModelCheckpoint",
"json.load",
"tensorflow.keras.layers.Flatten"
] | [((703, 732), 'os.path.exists', 'os.path.exists', (['"""QDist_2.txt"""'], {}), "('QDist_2.txt')\n", (717, 732), False, 'import os, json, random\n'), ((737, 767), 'os.path.exists', 'os.path.exists', (['"""labels_1.txt"""'], {}), "('labels_1.txt')\n", (751, 767), False, 'import os, json, random\n'), ((1089, 1113), 'random.shuffle', 'random.shuffle', (['shuffArr'], {}), '(shuffArr)\n', (1103, 1113), False, 'import os, json, random\n'), ((1161, 1195), 'numpy.array', 'np.array', (['[i[0] for i in shuffArr]'], {}), '([i[0] for i in shuffArr])\n', (1169, 1195), True, 'import numpy as np\n'), ((1214, 1248), 'numpy.array', 'np.array', (['[i[1] for i in shuffArr]'], {}), '([i[1] for i in shuffArr])\n', (1222, 1248), True, 'import numpy as np\n'), ((1290, 1310), 'numpy.array', 'np.array', (['dists[0:1]'], {}), '(dists[0:1])\n', (1298, 1310), True, 'import numpy as np\n'), ((1361, 1382), 'numpy.array', 'np.array', (['labels[0:1]'], {}), '(labels[0:1])\n', (1369, 1382), True, 'import numpy as np\n'), ((2261, 2293), 'os.path.dirname', 'os.path.dirname', (['checkpoint_path'], {}), '(checkpoint_path)\n', (2276, 2293), False, 'import os, json, random\n'), ((2357, 2447), 'tensorflow.keras.callbacks.ModelCheckpoint', 'tf.keras.callbacks.ModelCheckpoint', (['checkpoint_path'], {'save_weights_only': '(True)', 'verbose': '(1)'}), '(checkpoint_path, save_weights_only=True,\n verbose=1)\n', (2391, 2447), True, 'import tensorflow as tf\n'), ((835, 848), 'json.load', 'json.load', (['jF'], {}), '(jF)\n', (844, 848), False, 'import os, json, random\n'), ((915, 928), 'json.load', 'json.load', (['jF'], {}), '(jF)\n', (924, 928), False, 'import os, json, random\n'), ((1794, 1836), 'tensorflow.keras.layers.Flatten', 'keras.layers.Flatten', ([], {'input_shape': '(11, 11)'}), '(input_shape=(11, 11))\n', (1814, 1836), False, 'from tensorflow import keras\n'), ((1851, 1897), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(128)'], {'activation': 'tf.nn.relu'}), '(128, activation=tf.nn.relu)\n', (1869, 1897), False, 'from tensorflow import keras\n'), ((1912, 1957), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(64)'], {'activation': 'tf.nn.relu'}), '(64, activation=tf.nn.relu)\n', (1930, 1957), False, 'from tensorflow import keras\n'), ((1972, 2019), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(3)'], {'activation': 'tf.nn.softmax'}), '(3, activation=tf.nn.softmax)\n', (1990, 2019), False, 'from tensorflow import keras\n')] |
from sql_gen.commands import CreateSQLTaskCommand
from sql_gen.database import Connector, EMDatabase
from emtask.database import addb
class RewireVerbSQLTask(object):
def create_rewire_verb_template(self, verb, new_path):
self._create_sql(
"rewire_verb.sql",
entity_def_id=verb._entity_keyname,
verb_name=verb._name,
new_pd_path=new_path,
)
def _create_sql(self, *args, **kwargs):
template_values = dict(**kwargs)
CreateSQLTaskCommand(
template_name=args[0], run_once=True, template_values=template_values
).run()
class VerbDB(object):
def __init__(self, entity_keyname=None, name=None, repository_path=None):
self._entity_keyname = entity_keyname
self._name = name
self._repository_path = repository_path
def fetch(self, repository_path=None):
v_by_repo_path = (
"SELECT ci.KEYNAME as ENTITY_KEYNAME, v.NAME, pd.REPOSITORY_PATH"
" FROM CCADMIN_IDMAP ci , EVA_VERB v, EVA_PROCESS_DESC_REFERENCE pdr,"
" EVA_PROCESS_DESCRIPTOR PD"
" WHERE v.PROCESS_DESC_REF_ID = pdr.id"
" AND pdr.PROCESS_DESCRIPTOR_ID = pd.id"
" AND ci.KEYSET ='ED'"
" AND ci.ID =v.ENTITY_DEF_ID"
" AND pd.REPOSITORY_PATH ='{}';"
)
return self.convert_from_db_fetch(
addb().fetch(v_by_repo_path.format(repository_path))
)
def convert_from_db_fetch(self, table):
result = []
for row in table:
result.append(
VerbDB(row["ENTITY_KEYNAME"], row["NAME"], self._repository_path)
)
return result
| [
"emtask.database.addb",
"sql_gen.commands.CreateSQLTaskCommand"
] | [((506, 602), 'sql_gen.commands.CreateSQLTaskCommand', 'CreateSQLTaskCommand', ([], {'template_name': 'args[0]', 'run_once': '(True)', 'template_values': 'template_values'}), '(template_name=args[0], run_once=True, template_values=\n template_values)\n', (526, 602), False, 'from sql_gen.commands import CreateSQLTaskCommand\n'), ((1416, 1422), 'emtask.database.addb', 'addb', ([], {}), '()\n', (1420, 1422), False, 'from emtask.database import addb\n')] |
# - *- coding: utf- 8 - *-
from typing import Optional
import aiohttp
# Асинхронная сессия для запросов
class RequestsSession:
def __init__(self) -> None:
self._session: Optional[aiohttp.ClientSession] = None
async def get_session(self) -> aiohttp.ClientSession:
if self._session is None:
new_session = aiohttp.ClientSession()
self._session = new_session
return self._session
async def close(self) -> None:
if self._session is None:
return None
await self._session.close()
| [
"aiohttp.ClientSession"
] | [((343, 366), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (364, 366), False, 'import aiohttp\n')] |
import inspect
from estimagic.optimization import bhhh
from estimagic.optimization import cyipopt_optimizers
from estimagic.optimization import fides_optimizers
from estimagic.optimization import nag_optimizers
from estimagic.optimization import neldermead
from estimagic.optimization import nlopt_optimizers
from estimagic.optimization import pounders
from estimagic.optimization import pygmo_optimizers
from estimagic.optimization import scipy_optimizers
from estimagic.optimization import tao_optimizers
MODULES = [
cyipopt_optimizers,
fides_optimizers,
nag_optimizers,
nlopt_optimizers,
pygmo_optimizers,
scipy_optimizers,
tao_optimizers,
bhhh,
neldermead,
pounders,
]
ALL_ALGORITHMS = {}
AVAILABLE_ALGORITHMS = {}
for module in MODULES:
func_dict = dict(inspect.getmembers(module, inspect.isfunction))
for name, func in func_dict.items():
if hasattr(func, "_algorithm_info"):
ALL_ALGORITHMS[name] = func
if func._algorithm_info.is_available:
AVAILABLE_ALGORITHMS[name] = func
GLOBAL_ALGORITHMS = [
"nlopt_direct",
"nlopt_esch",
"nlopt_isres",
"nlopt_crs2_lm",
]
GLOBAL_ALGORITHMS += [name for name in AVAILABLE_ALGORITHMS if name.startswith("pygmo")]
| [
"inspect.getmembers"
] | [((807, 853), 'inspect.getmembers', 'inspect.getmembers', (['module', 'inspect.isfunction'], {}), '(module, inspect.isfunction)\n', (825, 853), False, 'import inspect\n')] |
from base64 import b64encode
def get_token(custos_settings):
tokenStr = custos_settings.CUSTOS_CLIENT_ID + ":" + custos_settings.CUSTOS_CLIENT_SEC
tokenByte = tokenStr.encode('utf-8')
encodedBytes = b64encode(tokenByte)
return encodedBytes.decode('utf-8')
| [
"base64.b64encode"
] | [((213, 233), 'base64.b64encode', 'b64encode', (['tokenByte'], {}), '(tokenByte)\n', (222, 233), False, 'from base64 import b64encode\n')] |
import sep
import numpy as np
import scarlet
from scarlet.wavelet import mad_wavelet, Starlet
from .utils import extract_obj, image_gaia_stars
from astropy.table import Table, Column
from astropy import units as u
from astropy.units import Quantity
from astropy.coordinates import SkyCoord
from kuaizi.mock import Data
def interpolate(data_lr, data_hr):
''' Interpolate low resolution data to high resolution
Parameters
----------
data_lr: Data
low resolution Data
data_hr: Data
high resolution Data
Result
------
interp: numpy array
the images in data_lr interpolated to the grid of data_hr
'''
frame_lr = scarlet.Frame(data_lr.images.shape,
wcs=data_lr.wcs, channels=data_lr.channels)
frame_hr = scarlet.Frame(data_hr.images.shape,
wcs=data_hr.wcs, channels=data_hr.channels)
coord_lr0 = (np.arange(data_lr.images.shape[1]), np.arange(
data_lr.images.shape[1]))
coord_hr = (np.arange(data_hr.images.shape[1]), np.arange(
data_hr.images.shape[1]))
coord_lr = scarlet.resampling.convert_coordinates(
coord_lr0, frame_lr, frame_hr)
interp = []
for image in data_lr.images:
interp.append(scarlet.interpolation.sinc_interp(
image[None, :, :], coord_hr, coord_lr, angle=None)[0].T)
return np.array(interp)
# Vanilla detection: SEP
def vanilla_detection(detect_image, mask=None, sigma=3, b=64, f=3, minarea=5, deblend_nthresh=30,
deblend_cont=0.001, sky_subtract=True, show_fig=True, **kwargs):
'''
Source detection using Source Extractor (actually SEP).
Parameters
----------
detect_image: 2-D numpy array
image
mask: numpy 2-D array
image mask
sigma: float
detection threshold
b: float
box size
f: float
kernel size
minarea: float
minimum area for a source
sky_subtract: bool
whether subtract the estimated sky from the input image, then detect sources
show_fig: bool
whether plot a figure showing objects and segmentation map
**kwargs: see `utils.extract_obj`.
Result
------
obj_cat: `astropy.table.Table` object
catalog of detected sources
segmap: numpy array
segmentation map
fig: `matplotlib.pyplot.figure` object
'''
result = extract_obj(
detect_image,
mask=mask,
b=b,
f=f,
sigma=sigma,
minarea=minarea,
deblend_nthresh=deblend_nthresh,
deblend_cont=deblend_cont,
sky_subtract=sky_subtract,
show_fig=show_fig,
**kwargs)
obj_cat = result[0]
arg_ind = obj_cat.argsort('flux', reverse=True)
obj_cat.sort('flux', reverse=True)
obj_cat['index'] = np.arange(len(obj_cat))
segmap = result[1]
segmap = np.append(-1, np.argsort(arg_ind))[segmap] + 1
if show_fig is True:
fig = result[2]
return obj_cat, segmap, fig
else:
return obj_cat, segmap
def wavelet_detection(detect_image, mask=None, wavelet_lvl=4, low_freq_lvl=0, high_freq_lvl=1,
sigma=3, b=64, f=3, minarea=5, deblend_nthresh=30,
deblend_cont=0.001, sky_subtract=True, show_fig=True, **kwargs):
'''
Perform wavelet transform before detecting sources. This enable us to emphasize features with high frequency or low frequency.
Parameters
----------
detect_image: 2-D numpy array
image
mask: numpy 2-D array
image mask
wavelet_lvl: int
the number of wavelet decompositions
high_freq_lvl: int
this parameter controls how much low-frequency features are wiped away. It should be smaller than `wavelet_lvl - 1`.
`high_freq_lvl=0` means no low-freq features are wiped (equivalent to vanilla), higher number yields a image with less low-freq features.
sigma: float
detection threshold
b: float
box size
f: float
kernel size
minarea: float
minimum area for a source
sky_subtract: bool
whether subtract the estimated sky from the input image, then detect sources
show_fig: bool
whether plot a figure showing objects and segmentation map
**kwargs: see `utils.extract_obj`.
Result
------
obj_cat: `astropy.table.Table` object
catalog of detected sources
segmap: numpy array
segmentation map
fig: `matplotlib.pyplot.figure` object
'''
Sw = Starlet(detect_image, lvl=wavelet_lvl) # wavelet decomposition
w = Sw.coefficients
iw = Sw.image
if high_freq_lvl != 0:
w[:, (high_freq_lvl):, :, :] = 0 # remove low frequency features
# w: from high to low
if low_freq_lvl != 0:
w[:, :(low_freq_lvl), :, :] = 0 # remove high frequency features
# image with high-frequency features highlighted
high_freq_image = Starlet(coefficients=w).image[0]
result = vanilla_detection(
high_freq_image,
mask=mask,
sigma=sigma,
b=b,
f=f,
minarea=minarea,
deblend_nthresh=deblend_nthresh,
deblend_cont=deblend_cont,
sky_subtract=sky_subtract,
show_fig=show_fig,
**kwargs)
if show_fig is True:
obj_cat, segmap, fig = result
return obj_cat, segmap, fig
else:
obj_cat, segmap = result
return obj_cat, segmap
def makeCatalog(datas, mask=None, lvl=3, method='wavelet', convolve=False, conv_radius=5,
match_gaia=True, show_fig=True, visual_gaia=True, **kwargs):
''' Creates a detection catalog by combining low and high resolution data.
This function is used for detection before running scarlet.
It is particularly useful for stellar crowded fields and for detecting high frequency features.
Parameters
----------
datas: array
array of Data objects
mask: numpy 2-D array
image mask
lvl: int
detection lvl, i.e., sigma in SEP
method: str
Options:
"wavelet" uses wavelet decomposition of images before combination, emphasizes high-frequency features
"vanilla" directly detect objects using SEP
match_gaia: bool
whether matching the detection catalog with Gaia dataset
show_fig: bool
whether show the detection catalog as a figure
visual_gaia: bool
whether mark Gaia stars in the figure
kwargs:
See the arguments of 'utils.extract_obj'.
Returns
-------
obj_cat: `astropy.table.Table` object
catalog of detected sources
segmap: numpy array
segmentation map
bg_rms: array
background level for each dataset
'''
if len(datas) == 1:
hr_images = datas[0].images / \
np.sum(datas[0].images, axis=(1, 2))[:, None, None]
# Detection image as the sum over all images
detect_image = np.sum(hr_images, axis=0)
else:
data_lr, data_hr = datas
# Create observations for each image
# Interpolate low resolution to high resolution
interp = interpolate(data_lr, data_hr)
# Normalisation of the interpolate low res images
interp = interp / np.sum(interp, axis=(1, 2))[:, None, None]
# Normalisation of the high res data
hr_images = data_hr.images / \
np.sum(data_hr.images, axis=(1, 2))[:, None, None]
# Detection image as the sum over all images
detect_image = np.sum(interp, axis=0) + np.sum(hr_images, axis=0)
detect_image *= np.sum(data_hr.images)
if np.size(detect_image.shape) == 3:
detect = detect_image.mean(axis=0)
else:
detect = detect_image
if convolve:
from astropy.convolution import convolve, Box2DKernel, Gaussian2DKernel
detect = convolve(detect.astype(float), Gaussian2DKernel(conv_radius))
if method == 'wavelet':
result = wavelet_detection(
detect, mask=mask, sigma=lvl, show_fig=show_fig, **kwargs)
else:
result = vanilla_detection(
detect, mask=mask, sigma=lvl, show_fig=show_fig, **kwargs)
obj_cat = result[0]
segmap = result[1]
## RA and Dec
if len(datas) == 1:
ra, dec = datas[0].wcs.wcs_pix2world(obj_cat['x'], obj_cat['y'], 0)
obj_cat.add_columns([Column(data=ra, name='ra'),
Column(data=dec, name='dec')])
else:
ra_lr, dec_lr = data_lr.wcs.wcs_pix2world(
obj_cat['x'], obj_cat['y'], 0)
ra_hr, dec_hr = data_hr.wcs.wcs_pix2world(
obj_cat['x'], obj_cat['y'], 0)
obj_cat.add_columns(
[Column(data=ra_lr, name='ra_lr'), Column(data=dec_lr, name='dec_lr')])
obj_cat.add_columns(
[Column(data=ra_hr, name='ra_hr'), Column(data=dec_lr, name='dec_hr')])
# Reorder columns
colnames = obj_cat.colnames
for item in ['dec', 'ra', 'y', 'x', 'index']:
if item in colnames:
colnames.remove(item)
colnames.insert(0, item)
obj_cat = obj_cat[colnames]
obj_cat.add_column(
Column(data=[None] * len(obj_cat), name='obj_type'), index=0)
if len(datas) == 1:
bg_rms = mad_wavelet(datas[0].images)
else:
bg_rms = []
for data in datas:
bg_rms.append(mad_wavelet(data.images))
if match_gaia:
obj_cat.add_column(
Column(data=[None] * len(obj_cat), name='gaia_coord'))
if len(datas) == 1:
w = datas[0].wcs
pixel_scale = w.to_header()['PC2_2'] * 3600
else:
w = data_hr.wcs
pixel_scale = w.to_header()['PC2_2'] * 3600
# Retrieve GAIA catalog
gaia_stars = image_gaia_stars(
detect, w, pixel_scale=pixel_scale,
verbose=True, visual=visual_gaia)
# Cross-match with SExtractor catalog
from astropy.coordinates import SkyCoord, match_coordinates_sky
temp, dist, _ = match_coordinates_sky(SkyCoord(ra=gaia_stars['ra'], dec=gaia_stars['dec'], unit='deg'),
SkyCoord(ra=obj_cat['ra'], dec=obj_cat['dec'], unit='deg'), nthneighbor=1)
flag = dist < 5 * u.arcsec
star_mag = gaia_stars['phot_g_mean_mag'].data
psf_ind = temp[flag]
star_mag = star_mag[flag]
bright_star_flag = star_mag < 19.0
obj_cat['obj_type'][psf_ind[bright_star_flag]
] = scarlet.source.ExtendedSource
obj_cat['obj_type'][psf_ind[~bright_star_flag]
] = scarlet.source.PointSource
# we also use the coordinates from Gaia for bright stars
obj_cat['gaia_coord'][psf_ind] = np.array(
gaia_stars[['ra', 'dec']])[flag]
# Cross-match for a second time: to deal with splitted bright stars
temp_cat = obj_cat.copy(copy_data=True)
temp_cat.remove_rows(psf_ind)
temp2, dist2, _ = match_coordinates_sky(SkyCoord(ra=gaia_stars['ra'], dec=gaia_stars['dec'], unit='deg'),
SkyCoord(ra=temp_cat['ra'], dec=temp_cat['dec'], unit='deg'), nthneighbor=1)
flag2 = dist2 < 1 * u.arcsec
psf_ind2 = temp_cat[temp2[flag2]]['index'].data
# we also use the coordinates from Gaia for bright stars
obj_cat.remove_rows(psf_ind2)
#obj_cat['gaia_coord'][psf_ind2] = np.array(gaia_stars[['ra', 'dec']])[flag2]
#obj_cat['obj_type'][psf_ind2] = scarlet.source.PointSource
print(f'# Matched {len(psf_ind)} stars from GAIA')
obj_cat['index'] = np.arange(len(obj_cat))
# Visualize the results
if show_fig and match_gaia:
from matplotlib.patches import Ellipse as mpl_ellip
from .display import ORG, GRN
fig = result[2]
ax1 = fig.get_axes()[0]
xlim = ax1.get_xlim()
ylim = ax1.get_ylim()
# Plot an ellipse for each object
for star in gaia_stars[flag]:
smask = mpl_ellip(
xy=(star['x_pix'], star['y_pix']),
width=(2.0 * star['rmask_arcsec'] / pixel_scale),
height=(2.0 * star['rmask_arcsec'] / pixel_scale),
angle=0.0)
smask.set_facecolor(ORG(0.2))
smask.set_edgecolor(ORG(1.0))
smask.set_alpha(0.3)
ax1.add_artist(smask)
# Show stars
ax1.scatter(
gaia_stars['x_pix'],
gaia_stars['y_pix'],
color=GRN(1.0),
s=100,
alpha=0.9,
marker='+')
ax1.set_xlim(xlim)
ax1.set_ylim(ylim)
return obj_cat, segmap, bg_rms
| [
"scarlet.Frame",
"scarlet.interpolation.sinc_interp",
"numpy.size",
"astropy.coordinates.SkyCoord",
"numpy.argsort",
"scarlet.resampling.convert_coordinates",
"numpy.array",
"numpy.sum",
"scarlet.wavelet.mad_wavelet",
"astropy.table.Column",
"astropy.convolution.Gaussian2DKernel",
"scarlet.wav... | [((677, 756), 'scarlet.Frame', 'scarlet.Frame', (['data_lr.images.shape'], {'wcs': 'data_lr.wcs', 'channels': 'data_lr.channels'}), '(data_lr.images.shape, wcs=data_lr.wcs, channels=data_lr.channels)\n', (690, 756), False, 'import scarlet\n'), ((801, 880), 'scarlet.Frame', 'scarlet.Frame', (['data_hr.images.shape'], {'wcs': 'data_hr.wcs', 'channels': 'data_hr.channels'}), '(data_hr.images.shape, wcs=data_hr.wcs, channels=data_hr.channels)\n', (814, 880), False, 'import scarlet\n'), ((1121, 1190), 'scarlet.resampling.convert_coordinates', 'scarlet.resampling.convert_coordinates', (['coord_lr0', 'frame_lr', 'frame_hr'], {}), '(coord_lr0, frame_lr, frame_hr)\n', (1159, 1190), False, 'import scarlet\n'), ((1387, 1403), 'numpy.array', 'np.array', (['interp'], {}), '(interp)\n', (1395, 1403), True, 'import numpy as np\n'), ((4560, 4598), 'scarlet.wavelet.Starlet', 'Starlet', (['detect_image'], {'lvl': 'wavelet_lvl'}), '(detect_image, lvl=wavelet_lvl)\n', (4567, 4598), False, 'from scarlet.wavelet import mad_wavelet, Starlet\n'), ((928, 962), 'numpy.arange', 'np.arange', (['data_lr.images.shape[1]'], {}), '(data_lr.images.shape[1])\n', (937, 962), True, 'import numpy as np\n'), ((964, 998), 'numpy.arange', 'np.arange', (['data_lr.images.shape[1]'], {}), '(data_lr.images.shape[1])\n', (973, 998), True, 'import numpy as np\n'), ((1025, 1059), 'numpy.arange', 'np.arange', (['data_hr.images.shape[1]'], {}), '(data_hr.images.shape[1])\n', (1034, 1059), True, 'import numpy as np\n'), ((1061, 1095), 'numpy.arange', 'np.arange', (['data_hr.images.shape[1]'], {}), '(data_hr.images.shape[1])\n', (1070, 1095), True, 'import numpy as np\n'), ((6997, 7022), 'numpy.sum', 'np.sum', (['hr_images'], {'axis': '(0)'}), '(hr_images, axis=0)\n', (7003, 7022), True, 'import numpy as np\n'), ((7639, 7661), 'numpy.sum', 'np.sum', (['data_hr.images'], {}), '(data_hr.images)\n', (7645, 7661), True, 'import numpy as np\n'), ((7670, 7697), 'numpy.size', 'np.size', (['detect_image.shape'], {}), '(detect_image.shape)\n', (7677, 7697), True, 'import numpy as np\n'), ((9298, 9326), 'scarlet.wavelet.mad_wavelet', 'mad_wavelet', (['datas[0].images'], {}), '(datas[0].images)\n', (9309, 9326), False, 'from scarlet.wavelet import mad_wavelet, Starlet\n'), ((4975, 4998), 'scarlet.wavelet.Starlet', 'Starlet', ([], {'coefficients': 'w'}), '(coefficients=w)\n', (4982, 4998), False, 'from scarlet.wavelet import mad_wavelet, Starlet\n'), ((7564, 7586), 'numpy.sum', 'np.sum', (['interp'], {'axis': '(0)'}), '(interp, axis=0)\n', (7570, 7586), True, 'import numpy as np\n'), ((7589, 7614), 'numpy.sum', 'np.sum', (['hr_images'], {'axis': '(0)'}), '(hr_images, axis=0)\n', (7595, 7614), True, 'import numpy as np\n'), ((7933, 7962), 'astropy.convolution.Gaussian2DKernel', 'Gaussian2DKernel', (['conv_radius'], {}), '(conv_radius)\n', (7949, 7962), False, 'from astropy.convolution import convolve, Box2DKernel, Gaussian2DKernel\n'), ((10092, 10156), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': "gaia_stars['ra']", 'dec': "gaia_stars['dec']", 'unit': '"""deg"""'}), "(ra=gaia_stars['ra'], dec=gaia_stars['dec'], unit='deg')\n", (10100, 10156), False, 'from astropy.coordinates import SkyCoord, match_coordinates_sky\n'), ((10204, 10262), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': "obj_cat['ra']", 'dec': "obj_cat['dec']", 'unit': '"""deg"""'}), "(ra=obj_cat['ra'], dec=obj_cat['dec'], unit='deg')\n", (10212, 10262), False, 'from astropy.coordinates import SkyCoord, match_coordinates_sky\n'), ((10810, 10845), 'numpy.array', 'np.array', (["gaia_stars[['ra', 'dec']]"], {}), "(gaia_stars[['ra', 'dec']])\n", (10818, 10845), True, 'import numpy as np\n'), ((11076, 11140), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': "gaia_stars['ra']", 'dec': "gaia_stars['dec']", 'unit': '"""deg"""'}), "(ra=gaia_stars['ra'], dec=gaia_stars['dec'], unit='deg')\n", (11084, 11140), False, 'from astropy.coordinates import SkyCoord, match_coordinates_sky\n'), ((11190, 11250), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': "temp_cat['ra']", 'dec': "temp_cat['dec']", 'unit': '"""deg"""'}), "(ra=temp_cat['ra'], dec=temp_cat['dec'], unit='deg')\n", (11198, 11250), False, 'from astropy.coordinates import SkyCoord, match_coordinates_sky\n'), ((12100, 12261), 'matplotlib.patches.Ellipse', 'mpl_ellip', ([], {'xy': "(star['x_pix'], star['y_pix'])", 'width': "(2.0 * star['rmask_arcsec'] / pixel_scale)", 'height': "(2.0 * star['rmask_arcsec'] / pixel_scale)", 'angle': '(0.0)'}), "(xy=(star['x_pix'], star['y_pix']), width=2.0 * star[\n 'rmask_arcsec'] / pixel_scale, height=2.0 * star['rmask_arcsec'] /\n pixel_scale, angle=0.0)\n", (12109, 12261), True, 'from matplotlib.patches import Ellipse as mpl_ellip\n'), ((2913, 2932), 'numpy.argsort', 'np.argsort', (['arg_ind'], {}), '(arg_ind)\n', (2923, 2932), True, 'import numpy as np\n'), ((6869, 6905), 'numpy.sum', 'np.sum', (['datas[0].images'], {'axis': '(1, 2)'}), '(datas[0].images, axis=(1, 2))\n', (6875, 6905), True, 'import numpy as np\n'), ((7298, 7325), 'numpy.sum', 'np.sum', (['interp'], {'axis': '(1, 2)'}), '(interp, axis=(1, 2))\n', (7304, 7325), True, 'import numpy as np\n'), ((7437, 7472), 'numpy.sum', 'np.sum', (['data_hr.images'], {'axis': '(1, 2)'}), '(data_hr.images, axis=(1, 2))\n', (7443, 7472), True, 'import numpy as np\n'), ((8413, 8439), 'astropy.table.Column', 'Column', ([], {'data': 'ra', 'name': '"""ra"""'}), "(data=ra, name='ra')\n", (8419, 8439), False, 'from astropy.table import Table, Column\n'), ((8470, 8498), 'astropy.table.Column', 'Column', ([], {'data': 'dec', 'name': '"""dec"""'}), "(data=dec, name='dec')\n", (8476, 8498), False, 'from astropy.table import Table, Column\n'), ((8741, 8773), 'astropy.table.Column', 'Column', ([], {'data': 'ra_lr', 'name': '"""ra_lr"""'}), "(data=ra_lr, name='ra_lr')\n", (8747, 8773), False, 'from astropy.table import Table, Column\n'), ((8775, 8809), 'astropy.table.Column', 'Column', ([], {'data': 'dec_lr', 'name': '"""dec_lr"""'}), "(data=dec_lr, name='dec_lr')\n", (8781, 8809), False, 'from astropy.table import Table, Column\n'), ((8854, 8886), 'astropy.table.Column', 'Column', ([], {'data': 'ra_hr', 'name': '"""ra_hr"""'}), "(data=ra_hr, name='ra_hr')\n", (8860, 8886), False, 'from astropy.table import Table, Column\n'), ((8888, 8922), 'astropy.table.Column', 'Column', ([], {'data': 'dec_lr', 'name': '"""dec_hr"""'}), "(data=dec_lr, name='dec_hr')\n", (8894, 8922), False, 'from astropy.table import Table, Column\n'), ((9410, 9434), 'scarlet.wavelet.mad_wavelet', 'mad_wavelet', (['data.images'], {}), '(data.images)\n', (9421, 9434), False, 'from scarlet.wavelet import mad_wavelet, Starlet\n'), ((1272, 1360), 'scarlet.interpolation.sinc_interp', 'scarlet.interpolation.sinc_interp', (['image[None, :, :]', 'coord_hr', 'coord_lr'], {'angle': 'None'}), '(image[None, :, :], coord_hr, coord_lr,\n angle=None)\n', (1305, 1360), False, 'import scarlet\n')] |
# coding=utf-8
from django.urls import path
from BroadviewCOSS import views
app_name = 'BroadviewCOSS'
urlpatterns = [
path('', views.index),
path('index/', views.index, name='index'),
path('login/', views.login, name='login'),
path('logout/', views.logout, name='logout'),
path('mainframe/', views.mainframe, name='mainframe'),
path('mainframe/validate', views.mainframe_ip_validate, name='mainframe-ip-validate'),
path('mainframe/add', views.mainframe_add, name='add-mainframe'),
path('mainframe/update', views.mainframe_update, name='update-mainframe'),
path('mainframe/delete', views.mainframe_delete, name='delete-mainframe'),
path('task/', views.task, name='task'),
path('task/validate', views.task_name_validate, name='task-name-validate'),
path('task/details', views.task_details, name='task-details'),
path('task/add', views.task_add, name='add-task'),
path('task/update', views.task_update, name='update-task'),
path('task/delete', views.task_delete, name='delete-task'),
path('task/taskrun/add', views.taskrun_add, name='add-taskrun'),
path('task/taskrun/update', views.taskrun_update, name='update-taskrun'),
path('task/taskrun/delete', views.taskrun_delete, name='delete-taskrun'),
path('category/', views.category, name='category'),
path('category/validate', views.category_name_validate, name='category-name-validate'),
path('category/add', views.category_add, name='add-category'),
path('category/update', views.category_update, name='update-category'),
path('category/delete', views.category_delete, name='delete-category'),
path('user/', views.user, name='user'),
path('user/add', views.user_add, name='add-user'),
path('user/update', views.user_update, name='update-user'),
path('user/delete', views.user_delete, name='delete-user'),
path('user/validate', views.user_name_validate, name='user-name-validate'),
path('role/', views.role, name='role'),
path('role/add/', views.role_add, name='add-role'),
path('role/update/', views.role_update, name='update-role'),
path('role/delete', views.role_delete, name='delete-role'),
path('role/validate', views.role_name_validate, name='role-name-validate'),
]
| [
"django.urls.path"
] | [((124, 145), 'django.urls.path', 'path', (['""""""', 'views.index'], {}), "('', views.index)\n", (128, 145), False, 'from django.urls import path\n'), ((151, 192), 'django.urls.path', 'path', (['"""index/"""', 'views.index'], {'name': '"""index"""'}), "('index/', views.index, name='index')\n", (155, 192), False, 'from django.urls import path\n'), ((198, 239), 'django.urls.path', 'path', (['"""login/"""', 'views.login'], {'name': '"""login"""'}), "('login/', views.login, name='login')\n", (202, 239), False, 'from django.urls import path\n'), ((245, 289), 'django.urls.path', 'path', (['"""logout/"""', 'views.logout'], {'name': '"""logout"""'}), "('logout/', views.logout, name='logout')\n", (249, 289), False, 'from django.urls import path\n'), ((295, 348), 'django.urls.path', 'path', (['"""mainframe/"""', 'views.mainframe'], {'name': '"""mainframe"""'}), "('mainframe/', views.mainframe, name='mainframe')\n", (299, 348), False, 'from django.urls import path\n'), ((354, 444), 'django.urls.path', 'path', (['"""mainframe/validate"""', 'views.mainframe_ip_validate'], {'name': '"""mainframe-ip-validate"""'}), "('mainframe/validate', views.mainframe_ip_validate, name=\n 'mainframe-ip-validate')\n", (358, 444), False, 'from django.urls import path\n'), ((445, 509), 'django.urls.path', 'path', (['"""mainframe/add"""', 'views.mainframe_add'], {'name': '"""add-mainframe"""'}), "('mainframe/add', views.mainframe_add, name='add-mainframe')\n", (449, 509), False, 'from django.urls import path\n'), ((515, 588), 'django.urls.path', 'path', (['"""mainframe/update"""', 'views.mainframe_update'], {'name': '"""update-mainframe"""'}), "('mainframe/update', views.mainframe_update, name='update-mainframe')\n", (519, 588), False, 'from django.urls import path\n'), ((594, 667), 'django.urls.path', 'path', (['"""mainframe/delete"""', 'views.mainframe_delete'], {'name': '"""delete-mainframe"""'}), "('mainframe/delete', views.mainframe_delete, name='delete-mainframe')\n", (598, 667), False, 'from django.urls import path\n'), ((673, 711), 'django.urls.path', 'path', (['"""task/"""', 'views.task'], {'name': '"""task"""'}), "('task/', views.task, name='task')\n", (677, 711), False, 'from django.urls import path\n'), ((717, 791), 'django.urls.path', 'path', (['"""task/validate"""', 'views.task_name_validate'], {'name': '"""task-name-validate"""'}), "('task/validate', views.task_name_validate, name='task-name-validate')\n", (721, 791), False, 'from django.urls import path\n'), ((797, 858), 'django.urls.path', 'path', (['"""task/details"""', 'views.task_details'], {'name': '"""task-details"""'}), "('task/details', views.task_details, name='task-details')\n", (801, 858), False, 'from django.urls import path\n'), ((864, 913), 'django.urls.path', 'path', (['"""task/add"""', 'views.task_add'], {'name': '"""add-task"""'}), "('task/add', views.task_add, name='add-task')\n", (868, 913), False, 'from django.urls import path\n'), ((919, 977), 'django.urls.path', 'path', (['"""task/update"""', 'views.task_update'], {'name': '"""update-task"""'}), "('task/update', views.task_update, name='update-task')\n", (923, 977), False, 'from django.urls import path\n'), ((983, 1041), 'django.urls.path', 'path', (['"""task/delete"""', 'views.task_delete'], {'name': '"""delete-task"""'}), "('task/delete', views.task_delete, name='delete-task')\n", (987, 1041), False, 'from django.urls import path\n'), ((1047, 1110), 'django.urls.path', 'path', (['"""task/taskrun/add"""', 'views.taskrun_add'], {'name': '"""add-taskrun"""'}), "('task/taskrun/add', views.taskrun_add, name='add-taskrun')\n", (1051, 1110), False, 'from django.urls import path\n'), ((1116, 1188), 'django.urls.path', 'path', (['"""task/taskrun/update"""', 'views.taskrun_update'], {'name': '"""update-taskrun"""'}), "('task/taskrun/update', views.taskrun_update, name='update-taskrun')\n", (1120, 1188), False, 'from django.urls import path\n'), ((1194, 1266), 'django.urls.path', 'path', (['"""task/taskrun/delete"""', 'views.taskrun_delete'], {'name': '"""delete-taskrun"""'}), "('task/taskrun/delete', views.taskrun_delete, name='delete-taskrun')\n", (1198, 1266), False, 'from django.urls import path\n'), ((1272, 1322), 'django.urls.path', 'path', (['"""category/"""', 'views.category'], {'name': '"""category"""'}), "('category/', views.category, name='category')\n", (1276, 1322), False, 'from django.urls import path\n'), ((1328, 1419), 'django.urls.path', 'path', (['"""category/validate"""', 'views.category_name_validate'], {'name': '"""category-name-validate"""'}), "('category/validate', views.category_name_validate, name=\n 'category-name-validate')\n", (1332, 1419), False, 'from django.urls import path\n'), ((1420, 1481), 'django.urls.path', 'path', (['"""category/add"""', 'views.category_add'], {'name': '"""add-category"""'}), "('category/add', views.category_add, name='add-category')\n", (1424, 1481), False, 'from django.urls import path\n'), ((1487, 1557), 'django.urls.path', 'path', (['"""category/update"""', 'views.category_update'], {'name': '"""update-category"""'}), "('category/update', views.category_update, name='update-category')\n", (1491, 1557), False, 'from django.urls import path\n'), ((1563, 1633), 'django.urls.path', 'path', (['"""category/delete"""', 'views.category_delete'], {'name': '"""delete-category"""'}), "('category/delete', views.category_delete, name='delete-category')\n", (1567, 1633), False, 'from django.urls import path\n'), ((1639, 1677), 'django.urls.path', 'path', (['"""user/"""', 'views.user'], {'name': '"""user"""'}), "('user/', views.user, name='user')\n", (1643, 1677), False, 'from django.urls import path\n'), ((1683, 1732), 'django.urls.path', 'path', (['"""user/add"""', 'views.user_add'], {'name': '"""add-user"""'}), "('user/add', views.user_add, name='add-user')\n", (1687, 1732), False, 'from django.urls import path\n'), ((1738, 1796), 'django.urls.path', 'path', (['"""user/update"""', 'views.user_update'], {'name': '"""update-user"""'}), "('user/update', views.user_update, name='update-user')\n", (1742, 1796), False, 'from django.urls import path\n'), ((1802, 1860), 'django.urls.path', 'path', (['"""user/delete"""', 'views.user_delete'], {'name': '"""delete-user"""'}), "('user/delete', views.user_delete, name='delete-user')\n", (1806, 1860), False, 'from django.urls import path\n'), ((1866, 1940), 'django.urls.path', 'path', (['"""user/validate"""', 'views.user_name_validate'], {'name': '"""user-name-validate"""'}), "('user/validate', views.user_name_validate, name='user-name-validate')\n", (1870, 1940), False, 'from django.urls import path\n'), ((1946, 1984), 'django.urls.path', 'path', (['"""role/"""', 'views.role'], {'name': '"""role"""'}), "('role/', views.role, name='role')\n", (1950, 1984), False, 'from django.urls import path\n'), ((1990, 2040), 'django.urls.path', 'path', (['"""role/add/"""', 'views.role_add'], {'name': '"""add-role"""'}), "('role/add/', views.role_add, name='add-role')\n", (1994, 2040), False, 'from django.urls import path\n'), ((2046, 2105), 'django.urls.path', 'path', (['"""role/update/"""', 'views.role_update'], {'name': '"""update-role"""'}), "('role/update/', views.role_update, name='update-role')\n", (2050, 2105), False, 'from django.urls import path\n'), ((2111, 2169), 'django.urls.path', 'path', (['"""role/delete"""', 'views.role_delete'], {'name': '"""delete-role"""'}), "('role/delete', views.role_delete, name='delete-role')\n", (2115, 2169), False, 'from django.urls import path\n'), ((2175, 2249), 'django.urls.path', 'path', (['"""role/validate"""', 'views.role_name_validate'], {'name': '"""role-name-validate"""'}), "('role/validate', views.role_name_validate, name='role-name-validate')\n", (2179, 2249), False, 'from django.urls import path\n')] |
import numpy as np
import cv2
from keras.layers import Input
from keras.models import Model
from keras.models import load_model
decoder = load_model('roses_decoder.h5')
perceptron = load_model('decoder-perceptron.h5')
path = 'dataset/rose'
id=25 # sample code
param0 = np.loadtxt(path+'{:04d}.txt'.format(id))
id=26 # sample code
param1 = np.loadtxt(path+'{:04d}.txt'.format(id))
id=2 # sample code
param2 = np.loadtxt(path+'{:04d}.txt'.format(id))
id=235 # sample code
param3 = np.loadtxt(path+'{:04d}.txt'.format(id))
param = np.copy(param0)
last_value = -1
cv2.namedWindow("generator")
cv2.imshow('generator',np.zeros((112*5,112*5),np.uint8))
value = int(param[7])
def update_value( *args ):
global value
print(args[0])
value = float(args[0])
cv2.createTrackbar("value", "generator", value, 90, update_value)
id = 0
while True:
if last_value != value:
last_value = value
param[7] = value
coded = perceptron.predict(param.reshape(1,-1))
decoded = decoder.predict(coded)
decoded = np.asarray(decoded[0]*255,np.uint8)
decoded = cv2.resize(decoded,(112*5,112*5))
cv2.imshow('generator',decoded)
key = cv2.waitKey(10)
if key == 27:
break
elif key == ord('r') or key == ord('0'):
param = np.copy(param0)
update_value(int(param[7]))
elif key == ord('1'):
param = np.copy(param1)
update_value(int(param[7]))
elif key == ord('2'):
param = np.copy(param2)
update_value(int(param[7]))
elif key == ord('3'):
param = np.copy(param3)
update_value(int(param[7]))
elif key == ord('s'):
cv2.imwrite('generator/final'+str(id)+'.png',decoded)
np.savetxt('generator/final'+str(id)+'.txt',param)
id += 1
cv2.destroyAllWindows()
| [
"numpy.copy",
"keras.models.load_model",
"numpy.asarray",
"cv2.imshow",
"numpy.zeros",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.resize",
"cv2.createTrackbar",
"cv2.namedWindow"
] | [((139, 169), 'keras.models.load_model', 'load_model', (['"""roses_decoder.h5"""'], {}), "('roses_decoder.h5')\n", (149, 169), False, 'from keras.models import load_model\n'), ((183, 218), 'keras.models.load_model', 'load_model', (['"""decoder-perceptron.h5"""'], {}), "('decoder-perceptron.h5')\n", (193, 218), False, 'from keras.models import load_model\n'), ((532, 547), 'numpy.copy', 'np.copy', (['param0'], {}), '(param0)\n', (539, 547), True, 'import numpy as np\n'), ((566, 594), 'cv2.namedWindow', 'cv2.namedWindow', (['"""generator"""'], {}), "('generator')\n", (581, 594), False, 'import cv2\n'), ((767, 832), 'cv2.createTrackbar', 'cv2.createTrackbar', (['"""value"""', '"""generator"""', 'value', '(90)', 'update_value'], {}), "('value', 'generator', value, 90, update_value)\n", (785, 832), False, 'import cv2\n'), ((1803, 1826), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1824, 1826), False, 'import cv2\n'), ((618, 656), 'numpy.zeros', 'np.zeros', (['(112 * 5, 112 * 5)', 'np.uint8'], {}), '((112 * 5, 112 * 5), np.uint8)\n', (626, 656), True, 'import numpy as np\n'), ((1196, 1211), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (1207, 1211), False, 'import cv2\n'), ((1049, 1087), 'numpy.asarray', 'np.asarray', (['(decoded[0] * 255)', 'np.uint8'], {}), '(decoded[0] * 255, np.uint8)\n', (1059, 1087), True, 'import numpy as np\n'), ((1103, 1142), 'cv2.resize', 'cv2.resize', (['decoded', '(112 * 5, 112 * 5)'], {}), '(decoded, (112 * 5, 112 * 5))\n', (1113, 1142), False, 'import cv2\n'), ((1145, 1177), 'cv2.imshow', 'cv2.imshow', (['"""generator"""', 'decoded'], {}), "('generator', decoded)\n", (1155, 1177), False, 'import cv2\n'), ((1305, 1320), 'numpy.copy', 'np.copy', (['param0'], {}), '(param0)\n', (1312, 1320), True, 'import numpy as np\n'), ((1399, 1414), 'numpy.copy', 'np.copy', (['param1'], {}), '(param1)\n', (1406, 1414), True, 'import numpy as np\n'), ((1493, 1508), 'numpy.copy', 'np.copy', (['param2'], {}), '(param2)\n', (1500, 1508), True, 'import numpy as np\n'), ((1587, 1602), 'numpy.copy', 'np.copy', (['param3'], {}), '(param3)\n', (1594, 1602), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# distrib.py
import pandas as pd
import numpy as np
import scipy.integrate
import scipy.interpolate
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
from .utils import grouper
from .plotting import plotVertBar
def integrate(xvec, yvec):
return abs(scipy.integrate.simps(yvec, x=xvec))
def normalizeDistrib(x, y, u=None):
x = x.values if isinstance(x, pd.Series) else x
y = y.values if isinstance(y, pd.Series) else y
# normalize the distribution to area of 1
norm = integrate(x, y)
#print("CONTINs norm", norm)
y /= norm
if u is not None:
u /= norm
return x, y, u
def area(xvec, yvec, showArea = True):
"""Returns a string with the area value of the given discrete curve points."""
return " $\int${:.3g}".format(integrate(xvec, yvec)) if showArea else ""
def findPeakRanges(x, y, tol=1e-16):
"""Returns the location of data/peak above a base line.
Assumes it touches the baseline before and after. For distributions.
*tol*: Multiplied by Y to produce a threshold to distinguish noise/artifacts from peaks."""
x = x.values if isinstance(x, pd.Series) else x
y = y.values if isinstance(y, pd.Series) else y
# look at all data above zero, get their array indices
indices = np.where(y > tol*y.max())[0]
# segmentation: look where continous groups of indices start and end
indexGroups = np.where(np.diff(indices) > 1)[0]
ranges = []
istart = indices[0]
def appendPeakRange(start, end):
#print("appending", start, end, end-start)
start, end = max(start-1, 0), min(end+1, len(x)-1)
monotony = np.sign(np.diff(y[start:end+1]))
if not all(monotony == monotony[0]):
# avoid monotonously increasing/decreasing peaks -> unwanted artefacts
ranges.append((start, end))
for idx in indexGroups:
appendPeakRange(istart, indices[idx]) # add the new range to the list
istart = indices[idx+1] # start new range
appendPeakRange(istart, indices[-1])
#print("findPeakRanges", ranges)
return ranges
def findLocalMinima(peakRanges, xarr, yarr, doPlot=False, verbose=False):
"""Identify local (non-zero) minima within given peak ranges and separate those
bimodal ranges into monomodal ranges, thus splitting up the peak range if it contains
maxima connected by non-zero minima. Returns a list of index tuples indicating the
start and end of each peak. Uses 4th order spline fitting and its derivative
for finding positions of local minima."""
#print("findLocalMinima", peakRanges)
newRanges = []
if doPlot:
plt.figure(figsize=(15,5))
for ip, (istart, iend) in enumerate(peakRanges):
if verbose: print((istart, iend), xarr[istart], xarr[iend])
if iend-istart < 5: # skip this, can't be fitted and no sub-peaks are likely
newRanges.append((istart, iend))
continue
while yarr[istart] <= 0. and istart < iend:
istart += 1 # exclude leading zero
while yarr[iend] <= 0. and istart < iend:
iend -= 1 # exclude trailing zero
if istart == iend:
continue
if verbose: print((istart, iend))
x, y = xarr[istart:iend+1], yarr[istart:iend+1]
try:
spline = scipy.interpolate.InterpolatedUnivariateSpline(x, y, k=4)
except:
print(f"Warning: Could not findLocalMinima() within {(istart, iend)}!")
newRanges.append((istart, iend))
continue
#if verbose: print(spline(x))
deriv = spline.derivative()
#if verbose: print(deriv(x))
roots = deriv.roots()
# get indices of roots and ignore any duplicate indices
rootIdx = set(np.argmin(np.abs(xarr[:,np.newaxis]-roots[np.newaxis,:]), axis=0))
rootIdx.add(istart); rootIdx.add(iend)
rootIdx = sorted(rootIdx)
#if rootIdx[0] == istart: # omit the first root at the beginning
# rootIdx = rootIdx[1:]
if verbose: print((istart, iend), len(roots), roots, rootIdx)
if doPlot:
plt.subplot(1,len(peakRanges), ip+1)
radGrid = np.linspace(x[0], x[-1], 200)
plt.plot(x, y, label="data")
plt.plot(radGrid, spline(radGrid), label="spline"),
plt.ylabel("data & spline approx.")
handles1, labels1 = plt.gca().get_legend_handles_labels()
[plotVertBar(plt, xarr[i], spline(radGrid).max(), color="blue", ls=":") for i in rootIdx]
plt.gca().twinx()
plt.plot(radGrid, deriv(radGrid), label="deriv. spline", color="green")
plt.ylabel("1st derivative")
handles2, labels2 = plt.gca().get_legend_handles_labels()
plt.grid(); plt.legend(handles1+handles2, labels1+labels2)
peakBoundaries = rootIdx[::2]
if verbose: print(peakBoundaries)
newRanges += [tuple(peakBoundaries[i:i+2]) for i in range(len(peakBoundaries)-1)]
if verbose: print(newRanges)
return newRanges
def getLargestPeaks(peakRanges, xarr, yarr, count=1):
def peakRangeArea(peakRange):
return integrate(xarr[peakRange[0]:peakRange[1]+1], yarr[peakRange[0]:peakRange[1]+1])
return sorted(peakRanges, key=peakRangeArea, reverse=True)[:count]
class Moments(dict):
@staticmethod
def nthMoment(x, weights, n):
"""Calculates the nth moment of the given distribution weights."""
center = 0
if n > 0: # calculate the mean first
center = np.average(x, weights=weights) if sum(weights) else 0.
# np.sqrt(u**2)/len(u) # center uncertainty
if n == 1:
return center # the mean
var = 1.
if n > 1:
var = np.sum(weights*(x-center)**2) / np.sum(weights)
if n == 2:
return var # the variance
return np.sum(weights*(x-center)**n) / np.sum(weights) / var**n
@classmethod
def fromData(cls, x, y):
store = cls()
mean, var, skew, kurt = [cls.nthMoment(x, y, i) for i in range(1,5)]
store['area'] = integrate(x, y)
store['mean'] = mean
store['var'] = var
store['skew'] = skew
store['kurt'] = kurt
return store
@property
def area(self):
return self['area']
@property
def mean(self):
return self['mean']
@property
def var(self):
return self['var']
@property
def skew(self):
return self['skew']
@property
def kurt(self):
return self['kurt']
def __str__(self):
return "\n".join(
["{: <4s}: {: 9.2g}".format(k, self[k])
for k in ("area", "mean", "var", "skew", "kurt")]
)
def distrParFromDistrib(mean, var, N=1.):
# SASfit manual, 6.4. Log-Normal distribution
median = mean**2/np.sqrt(var + mean**2)
sigma = np.sqrt(np.log(mean**2/median**2))
#print("momentToDistrPar", mean, var, "->", median, sigma)
return N, sigma, median # return in the order used elsewhere for distrPar
class Distribution:
x, y, u = None, None, None
peaks = None # list of peak (start, end) indices pointing into x,y,u
color = None
plotAxes, plotAxisIdx = None, 0
def __init__(self, xvec, yvec, uvec, maxPeakCount=None):
xvec = xvec.values if isinstance(xvec, pd.Series) else xvec
yvec = yvec.values if isinstance(yvec, pd.Series) else yvec
uvec = uvec.values if isinstance(uvec, pd.Series) else uvec
self.x, self.y, self.u = normalizeDistrib(xvec, yvec, uvec)
self.peaks = findPeakRanges(self.x, self.y, tol=1e-6)
# refine the peak ranges containing multiple maxima
self.peaks = findLocalMinima(self.peaks, self.x, self.y)
# For a given list of peaks (by start/end indices) return only those
# whose ratio of amount to uncertainty ratio is always below the given max. ratio
#maxRatio = 1.5
#self.peakRanges = [(istart, iend) for istart, iend in self.peakRanges
# if maxRatio > 1/np.median(self.y[istart:iend+1]/self.u[istart:iend+1])]
# Sort the peaks by area and use the largest (last) only, assuming monomodal distributions
if maxPeakCount:
self.peaks = getLargestPeaks(self.peaks, self.x, self.y, count=maxPeakCount)
def peakData(self, peakRange):
return (self.x[peakRange[0]:peakRange[1]+1],
self.y[peakRange[0]:peakRange[1]+1],
self.u[peakRange[0]:peakRange[1]+1])
def uncertRatioMedian(self, peakRange):
_, y, u = self.peakData(peakRange)
return 1./np.median(y/u)
@staticmethod
def getBarWidth(xvec):
return np.concatenate((np.diff(xvec)[:1], np.diff(xvec)))
def plotPeak(self, peakRange, moments, distrPar, showFullRange=False, ax=None):
"""*showFullRange*: Set the x range to cover the whole distribution instead of the peak only."""
x, y, u = self.peakData(peakRange)
if not ax:
ax = plt.gca()
mom, momLo, momHi = moments
dp, dpLo, dpHi = distrPar
#ax.plot(x, y, 'o', color=cls.color)
lbl, fmt = [], "{: <7s} {: 9.2g} ±{: 9.2g}"
for k in "area", "median", "var", "skew", "kurt":
if k == "median":
lbl.append(fmt.format("median:", dp[-1], max(abs(dp[-1]-dpLo[-1]), abs(dpHi[-1]-dp[-1]))))
else:
lbl.append(fmt.format(k+':', mom[k], max(abs(mom[k]-momLo[k]), abs(momHi[k]-mom[k]))))
lbl.append("LogNorm: "+distrParToText(dp)[0])
ax.bar(x, y, width=self.getBarWidth(x), color=self.color, alpha=0.5, label="\n".join(lbl))
ax.fill_between(x, np.maximum(0, y-u), y+u,
color='red', lw=0, alpha=0.1,
label=f"uncertainties (lvl: {self.uncertRatioMedian(peakRange):.3g})")
if showFullRange:
ax.set_xlim((self.x.min(), self.x.max()))
ax.set_xlabel(f"Radius (m)")
ax.legend(prop=font_manager.FontProperties(family='monospace')); ax.grid(True);
def plot(self, ax, distPar, name=""):
"""plot complete distribution as loaded from file"""
lbl = ("from file, " + name
+ area(self.x, self.y, showArea=True)
+"\n"+distrParLatex(distPar[0]))
ax.fill_between(self.x, self.y,
#width=GenericResult.getBarWidth(self.x),
color=self.color, alpha=0.5, label=lbl)
#ax.errorbar(self.x, self.y, yerr=self.u, lw=lineWidth()*2, label=lbl)
ax.fill_between(self.x, np.maximum(0, self.y-self.u), self.y+self.u,
color='red', lw=0, alpha=0.1, label="uncertainties")
ax.set_xlabel(f"Radius (m)")
ax.legend(); ax.grid(); ax.set_xscale("log")
def peakDistrPar(self, plotAxes=None, plotAxisStart=0, **plotPeakKwargs):
distrPar = []
moments = []
for i, peakRange in enumerate(self.peaks): # for each peak
x, y, u = self.peakData(peakRange)
N = integrate(x, y)
mom = Moments.fromData(x, y)
momLo = Moments.fromData(x, np.maximum(0, y-u))
momHi = Moments.fromData(x, y+u)
dptmp = distrParFromDistrib(mom.mean, mom.var, N=N)
dptmpLo = distrParFromDistrib(momLo.mean, momLo.var, N=N)
dptmpHi = distrParFromDistrib(momHi.mean, momHi.var, N=N)
distrPar.append(dptmp)
moments.append(mom)
if plotAxes is not None:
plotPeakKwargs['ax'] = plotAxes[plotAxisStart+i]
self.plotPeak(peakRange, (mom,momLo,momHi), (dptmp,dptmpLo,dptmpHi), **plotPeakKwargs)
return distrPar, moments
def distrParToText(distrPar):
numPars = 3
if len(distrPar) > numPars:
fmt = "R_{i}={:3.0f} s_{i}={:0.2f} N_{i}={:.3g}"
else:
fmt = "R={:3.0f} s={:0.2f} N={:.3g}"
return [fmt.format(p[2]*1e9, p[1], p[0], i = i)
for i, p in enumerate(grouper(distrPar, numPars))]
def distrParToFilename(distrPar, prefix=''):
return '_'.join([prefix] + distrParToText(distrPar)).replace(' ', '_')
def distrParLatex(distrPar, *kwargs):
return "\n".join(['$'+txt.replace(' ',r'\;')+'$' for txt in distrParToText(distrPar)])
def distrParFromFilename(fn):
fn = fn.split('=')
fn = [elem.lstrip('_') for elem in fn]
fn = [(elem.split('_', maxsplit=1) if elem[0].isnumeric() else [elem]) for elem in fn]
fn = list(itertools.chain(*fn))
return list(itertools.chain(*[(float(grp[5]), float(grp[3]), float(grp[1])*1e-9)
for grp in grouper(fn, 6)]))
def test():
"""Some testing."""
distrPar = (1, 0.2, 40e-9)
print("distrPar: ", list(grouper(distrPar, 3)))
print("distrParToText:", distrParToText(distrPar))
print("distrParLatex: ", distrParLatex(distrPar))
print("distrParToFilename: ", distrParToFilename(distrPar))
print("distrParFromFilename:", distrParFromFilename(distrParToFilename(distrPar)))
print("distrParFromFilename:", distrParFromFilename(distrParToFilename(distrPar, "lognorm")))
print()
distrPar = (1, 0.2, 40e-9)+(1, 0.1, 10e-9)
print("distrPar: ", list(grouper(distrPar, 3)))
print("distrParToText:", distrParToText(distrPar))
print("distrParLatex: ", distrParLatex(distrPar))
print("distrParToFilename: ", distrParToFilename(distrPar))
print("distrParFromFilename:", distrParFromFilename(distrParToFilename(distrPar)))
print("distrParFromFilename:", distrParFromFilename(distrParToFilename(distrPar, "lognorm")))
| [
"numpy.abs",
"matplotlib.pyplot.grid",
"numpy.sqrt",
"numpy.median",
"matplotlib.pyplot.ylabel",
"numpy.average",
"matplotlib.pyplot.gca",
"matplotlib.font_manager.FontProperties",
"numpy.log",
"matplotlib.pyplot.plot",
"numpy.diff",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.linspace... | [((2684, 2711), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 5)'}), '(figsize=(15, 5))\n', (2694, 2711), True, 'import matplotlib.pyplot as plt\n'), ((6911, 6935), 'numpy.sqrt', 'np.sqrt', (['(var + mean ** 2)'], {}), '(var + mean ** 2)\n', (6918, 6935), True, 'import numpy as np\n'), ((6954, 6985), 'numpy.log', 'np.log', (['(mean ** 2 / median ** 2)'], {}), '(mean ** 2 / median ** 2)\n', (6960, 6985), True, 'import numpy as np\n'), ((1678, 1703), 'numpy.diff', 'np.diff', (['y[start:end + 1]'], {}), '(y[start:end + 1])\n', (1685, 1703), True, 'import numpy as np\n'), ((4225, 4254), 'numpy.linspace', 'np.linspace', (['x[0]', 'x[-1]', '(200)'], {}), '(x[0], x[-1], 200)\n', (4236, 4254), True, 'import numpy as np\n'), ((4267, 4295), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'label': '"""data"""'}), "(x, y, label='data')\n", (4275, 4295), True, 'import matplotlib.pyplot as plt\n'), ((4372, 4407), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""data & spline approx."""'], {}), "('data & spline approx.')\n", (4382, 4407), True, 'import matplotlib.pyplot as plt\n'), ((4706, 4734), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""1st derivative"""'], {}), "('1st derivative')\n", (4716, 4734), True, 'import matplotlib.pyplot as plt\n'), ((4817, 4827), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (4825, 4827), True, 'import matplotlib.pyplot as plt\n'), ((4829, 4879), 'matplotlib.pyplot.legend', 'plt.legend', (['(handles1 + handles2)', '(labels1 + labels2)'], {}), '(handles1 + handles2, labels1 + labels2)\n', (4839, 4879), True, 'import matplotlib.pyplot as plt\n'), ((8706, 8722), 'numpy.median', 'np.median', (['(y / u)'], {}), '(y / u)\n', (8715, 8722), True, 'import numpy as np\n'), ((9102, 9111), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (9109, 9111), True, 'import matplotlib.pyplot as plt\n'), ((9775, 9795), 'numpy.maximum', 'np.maximum', (['(0)', '(y - u)'], {}), '(0, y - u)\n', (9785, 9795), True, 'import numpy as np\n'), ((10658, 10688), 'numpy.maximum', 'np.maximum', (['(0)', '(self.y - self.u)'], {}), '(0, self.y - self.u)\n', (10668, 10688), True, 'import numpy as np\n'), ((1439, 1455), 'numpy.diff', 'np.diff', (['indices'], {}), '(indices)\n', (1446, 1455), True, 'import numpy as np\n'), ((3819, 3869), 'numpy.abs', 'np.abs', (['(xarr[:, np.newaxis] - roots[np.newaxis, :])'], {}), '(xarr[:, np.newaxis] - roots[np.newaxis, :])\n', (3825, 3869), True, 'import numpy as np\n'), ((5589, 5619), 'numpy.average', 'np.average', (['x'], {'weights': 'weights'}), '(x, weights=weights)\n', (5599, 5619), True, 'import numpy as np\n'), ((5818, 5853), 'numpy.sum', 'np.sum', (['(weights * (x - center) ** 2)'], {}), '(weights * (x - center) ** 2)\n', (5824, 5853), True, 'import numpy as np\n'), ((5850, 5865), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (5856, 5865), True, 'import numpy as np\n'), ((5938, 5973), 'numpy.sum', 'np.sum', (['(weights * (x - center) ** n)'], {}), '(weights * (x - center) ** n)\n', (5944, 5973), True, 'import numpy as np\n'), ((5970, 5985), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (5976, 5985), True, 'import numpy as np\n'), ((8817, 8830), 'numpy.diff', 'np.diff', (['xvec'], {}), '(xvec)\n', (8824, 8830), True, 'import numpy as np\n'), ((10089, 10136), 'matplotlib.font_manager.FontProperties', 'font_manager.FontProperties', ([], {'family': '"""monospace"""'}), "(family='monospace')\n", (10116, 10136), True, 'import matplotlib.font_manager as font_manager\n'), ((11219, 11239), 'numpy.maximum', 'np.maximum', (['(0)', '(y - u)'], {}), '(0, y - u)\n', (11229, 11239), True, 'import numpy as np\n'), ((4440, 4449), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4447, 4449), True, 'import matplotlib.pyplot as plt\n'), ((4592, 4601), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4599, 4601), True, 'import matplotlib.pyplot as plt\n'), ((4767, 4776), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4774, 4776), True, 'import matplotlib.pyplot as plt\n'), ((8798, 8811), 'numpy.diff', 'np.diff', (['xvec'], {}), '(xvec)\n', (8805, 8811), True, 'import numpy as np\n')] |
import cryptoMath, sys
def getKey():
print('What is the first key?')
getKey_one = int(input())
print('What is the second key?')
getKey_two = int(input())
return getKey_one, getKey_two
def encrypt(message, key_one, key_two):
symbols = ' abcdefghjklmnopqrstuvwxyz'
new_message = ''
for i in range(0, len(message)):
position = symbols.find(message[i])
position = int(position * key_one)%27
position = (position + key_two)%27
new_message += symbols[position]
return new_message
while True:
file = open('message.txt', 'r+')
message = file.read()
file.close()
key_one, key_two = getKey()
new_message = encrypt(message, key_one, key_two)
print(new_message)
print('press e to exit')
pause = input()
if pause == 'e':
sys.exit()
| [
"sys.exit"
] | [((825, 835), 'sys.exit', 'sys.exit', ([], {}), '()\n', (833, 835), False, 'import cryptoMath, sys\n')] |
from django.contrib import admin
from keyvaluestore.models import KeyValueStore
class KeyValueStoreAdmin(admin.ModelAdmin):
list_display = ('key', 'value')
search_fields = ('key', 'value')
admin.site.register(KeyValueStore, KeyValueStoreAdmin)
| [
"django.contrib.admin.site.register"
] | [((202, 256), 'django.contrib.admin.site.register', 'admin.site.register', (['KeyValueStore', 'KeyValueStoreAdmin'], {}), '(KeyValueStore, KeyValueStoreAdmin)\n', (221, 256), False, 'from django.contrib import admin\n')] |
#!/bin/env python
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Get pull requests. """
import os
import time
import os.path
from github import Github
class PRChecker(object):
""" PR Checker. """
def __init__(self):
self.github = Github(os.getenv('GITHUB_API_TOKEN'), timeout=60)
self.repo = None
def check(self):
""" check pr. """
filename = 'block.txt'
pr_id = os.getenv('GIT_PR_ID')
if not pr_id:
print('No PR ID')
exit(0)
print(pr_id)
if not os.path.isfile(filename):
print('No author to check')
exit(0)
self.repo = self.github.get_repo('PaddlePaddle/Paddle')
pr = self.repo.get_pull(int(pr_id))
user = pr.user.login
with open(filename) as f:
for l in f:
if l.rstrip('\r\n') == user:
print('{} has unit-test to be fixed, so CI failed.'.format(
user))
exit(1)
exit(0)
if __name__ == '__main__':
pr_checker = PRChecker()
pr_checker.check()
| [
"os.path.isfile",
"os.getenv"
] | [((987, 1009), 'os.getenv', 'os.getenv', (['"""GIT_PR_ID"""'], {}), "('GIT_PR_ID')\n", (996, 1009), False, 'import os\n'), ((824, 853), 'os.getenv', 'os.getenv', (['"""GITHUB_API_TOKEN"""'], {}), "('GITHUB_API_TOKEN')\n", (833, 853), False, 'import os\n'), ((1118, 1142), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (1132, 1142), False, 'import os\n')] |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import x2paddle
import os
import numpy as np
import paddle.fluid.core as core
import paddle.fluid as fluid
import onnx
from onnx import helper, onnx_pb
class PaddleOpMapper(object):
def __init__(self):
self.paddle_onnx_dtype_map = {
core.VarDesc.VarType.FP32: onnx_pb.TensorProto.FLOAT,
core.VarDesc.VarType.FP64: onnx_pb.TensorProto.DOUBLE,
core.VarDesc.VarType.INT32: onnx_pb.TensorProto.INT32,
core.VarDesc.VarType.INT16: onnx_pb.TensorProto.INT16,
core.VarDesc.VarType.INT16: onnx_pb.TensorProto.UINT16,
core.VarDesc.VarType.INT64: onnx_pb.TensorProto.INT64,
core.VarDesc.VarType.BOOL: onnx_pb.TensorProto.BOOL
}
self.name_counter = dict()
def get_name(self, op_name, var_name):
name = 'p2o.{}.{}'.format(op_name, var_name)
if name not in self.name_counter:
self.name_counter[name] = 0
else:
self.name_counter[name] += 1
return name + '.{}'.format(self.name_counter[name])
def make_constant_node(self, name, dtype, value=None):
if isinstance(value, list):
dims = (len(value), )
elif value is None:
dims = ()
value = []
else:
dims = ()
value = [value]
tensor = helper.make_tensor(
name=name, data_type=dtype, dims=dims, vals=value)
node = helper.make_node(
'Constant', inputs=[], outputs=[name], value=tensor)
return node
def conv2d(self, op, block):
kernel_shape = block.var(op.input('Filter')[0]).shape
node = helper.make_node(
'Conv',
inputs=op.input('Input') + op.input('Filter'),
outputs=op.output('Output'),
dilations=op.attr('dilations'),
kernel_shape=kernel_shape[-2:],
strides=op.attr('strides'),
group=op.attr('groups'),
pads=op.attr('paddings') + op.attr('paddings'))
return node
def relu(self, op, block):
node = helper.make_node(
'Relu', inputs=op.input('X'), outputs=op.output('Out'))
return node
def elementwise_add(self, op, block):
axis = op.attr('axis')
x_shape = block.var(op.input('X')[0]).shape
y_shape = block.var(op.input('Y')[0]).shape
if len(y_shape) == 1 and axis == 1:
shape_name = self.get_name(op.type, 'shape')
shape_value = [1] * len(x_shape)
shape_value[axis] = y_shape[0]
shape_node = self.make_constant_node(
shape_name, onnx_pb.TensorProto.INT64, shape_value)
temp_value = self.get_name(op.type, 'temp')
y_node = helper.make_node(
'Reshape',
inputs=[op.input('Y')[0], shape_name],
outputs=[temp_value])
node = helper.make_node(
'Add',
inputs=[op.input('X')[0], temp_value],
outputs=op.output('Out'))
return [shape_node, y_node, node]
elif len(x_shape) == len(y_shape):
node = helper.make_node(
'Add',
inputs=[op.input('X')[0], op.input('Y')[0]],
outputs=op.output('Out'))
return node
else:
raise Excpetion("Unexpected situation happend in elementwise_add")
def pool2d(self, op, block):
pool_type = {
'max': ('MaxPool', 'GlobalMaxPool'),
'avg': ('AveragePool', 'GlobalAveragePool')
}
if op.attr('global_pooling'):
node = helper.make_node(
pool_type[op.attr('pooling_type')][1],
inputs=op.input('X'),
outputs=op.output('Out'),
)
else:
node = helper.make_node(
pool_type[op.attr('pooling_type')][0],
inputs=op.input('X'),
outputs=op.output('Out'),
kernel_shape=op.attr('ksize'),
strides=op.attr('strides'),
pads=op.attr('paddings') + op.attr('paddings'))
return node
def softmax(self, op, block):
node = helper.make_node(
'Softmax',
inputs=op.input('X'),
outputs=op.output('Out'),
axis=op.attr('axis'))
return node
def scale(self, op, block):
scale = op.attr('scale')
bias = op.attr('bias')
if math.fabs(scale - 1.0) < 1e-06 and math.fabs(bias - 0.0) < 1e-06:
node = helper.make_node(
'Identity', inputs=op.input('X'), outputs=op.output('Out'))
return node
else:
scale_name = self.get_name(op.type, 'scale')
bias_name = self.get_name(op.type, 'bias')
scale_node = self.make_constant_node(
scale_name, onnx_pb.TensorProto.FLOAT, scale)
bias_node = self.make_constant_node(bias_name,
onnx_pb.TensorProto.FLOAT, bias)
temp_tensor_name = self.get_name(op.type, 'temporary')
if op.attr('bias_after_scale'):
node1 = helper.make_node(
'Mul',
inputs=[scale_name, op.input('X')[0]],
outputs=[temp_tensor_name])
node2 = helper.make_node(
'Add',
inputs=[bias_name, temp_tensor_name],
outputs=op.output('Out'))
else:
node1 = helper.make_node(
'Add',
inputs=[bias_name, op.input('X')[0]],
outputs=temp_tensor_name)
node2 = helper.make_node(
'Mul',
inputs=[scale_name, temp_tensor_name],
outputs=[op.output('Out')])
return [scale_node, bias_node, node1, node2]
def mul(self, op, block):
x_shape = block.var(op.input('X')[0]).shape
y_shape = block.var(op.input('Y')[0]).shape
out_shape = list(block.var(op.output('Out')[0]).shape)
x_num_col_dims = op.attr('x_num_col_dims')
y_num_col_dims = op.attr('y_num_col_dims')
flatten_x_name = 'flatten_{}'.format(op.input('X')[0])
flatten_y_name = 'flatten_{}'.format(op.input('Y')[0])
shape_name = 'temp_shape_{}'.format(op.output('Out')[0])
temp_out_name = 'temp_{}'.format(op.output('Out')[0])
flatten_x = helper.make_node(
'Flatten',
inputs=op.input('X'),
outputs=[flatten_x_name],
axis=x_num_col_dims)
flatten_y = helper.make_node(
'Flatten',
inputs=op.input('Y'),
outputs=[flatten_y_name],
axis=y_num_col_dims)
shape_node = self.make_constant_node(
shape_name, onnx_pb.TensorProto.INT64, out_shape)
node = helper.make_node(
'MatMul',
inputs=[flatten_x_name, flatten_y_name],
outputs=[temp_out_name])
reshape_out = helper.make_node(
'Reshape',
inputs=[temp_out_name, shape_name],
outputs=op.output('Out'))
return [flatten_x, flatten_y, shape_node, node, reshape_out]
def batch_norm(self, op, block):
kwargs = {
'epsilon': op.attr('epsilon'),
'momentum': op.attr('momentum')
}
inputs = op.input('X') + op.input('Scale') + op.input(
'Bias') + op.input('Mean') + op.input('Variance')
node = helper.make_node(
'BatchNormalization',
inputs=inputs,
outputs=op.output('Y'),
**kwargs)
return node
def concat(self, op, block):
node = helper.make_node(
'Concat',
inputs=op.input('X'),
outputs=op.output('Out'),
axis=op.attr('axis'))
return node
def depthwise_conv2d(self, op, block):
return self.conv2d(op, block)
def relu6(self, op, block):
min_name = self.get_name(op.type, 'min')
max_name = self.get_name(op.type, 'max')
min_node = self.make_constant_node(min_name, onnx_pb.TensorProto.FLOAT,
0)
max_node = self.make_constant_node(max_name, onnx_pb.TensorProto.FLOAT,
op.attr('threshold'))
node = helper.make_node(
'Clip',
inputs=[op.input('X')[0], min_name, max_name],
outputs=op.output('Out'),
)
return [min_node, max_node, node]
def shape(self, op, block):
node = helper.make_node(
'Shape', inputs=op.input('Input'), outputs=op.output('Out'))
return node
def split(self, op, block):
sections = op.attr('sections')
if len(sections) > 0:
node = helper.make_node(
'Split',
inputs=op.input('X'),
outputs=op.output('Out'),
axis=op.attr('axis'),
split=sections)
else:
node = helper.make_node(
'Split',
inputs=op.input('X'),
outputs=op.output('Out'),
axis=op.attr('axis'))
def slice(self, op, block):
axes = op.attr('axes')
starts = op.attr('starts')
ends = op.attr('ends')
axes_name = get_name(op.type, 'axes')
starts_name = get_name(op.type, 'starts')
ends_name = get_name(op.type, 'ends')
axes_node = make_constant_node(axes_name, onnx_pb.TensorProto.INT64,
axes)
starts_node = make_constant_node(starts_name, onnx_pb.TensorProto.INT64,
starts)
ends_node = make_constant_node(ends_name, onnx_pb.TensorProto.INT64,
ends)
node = helper.make_node(
"Slice",
inputs=[op.input('Input')[0], starts_name, ends_name, axes_name],
outputs=op.output('Out'),
)
return [starts_node, ends_node, axes_node, node]
def fill_constant(self, op, block):
value = op.attr('value')
dtype = op.attr('dtype')
shape = op.attr('shape')
value = np.ones(shape) * value
node = helper.make_node(
'Constant',
inputs=[],
outputs=op.attr('Out'),
value=helper.make_tensor(
name=op.attr('Out'),
data_type=self.paddle_onnx_dtype_map[dtype],
dims=shape,
vals=value.tolist()))
return node
def transpose2(self, op, block):
node = helper.make_node(
'Transpose',
inputs=op.input('X'),
outputs=op.output('Out'),
perm=op.attr('perm'))
return node
def reshape2(self, op, block):
input_names = op.input_names
if 'Shape' in input_names and len(op.input('Shape')) > 0:
node = helper.make_node(
'Reshape',
inputs=[op.input('X')[0],
op.input('Shape')[0]],
outputs=op.output('Out'))
else:
shape = op.attr('shape')
shape_name = get_name(op.type, 'shape')
shape_node = make_constant_node(shape_name,
onnxpb.TensorProto.INT64, shape)
node = helper.make_node(
'Reshape',
inputs=[op.input('X')[0], shape_name],
outputs=op.output('Out'))
return [shape_node, node]
return node
def dropout(self, op, block):
dropout_mode = op.attr('dropout_implementation')
dropout_prob = op.attr('dropout_prob')
if dropout_mode == 'upscale_in_train':
node = helper.make_node(
'Identity', inputs=op.input('X'), outputs=op.output('Out'))
return node
elif dropout_mode == 'downgrade_in_infer':
scale_name = self.get_name(op.type, 'scale')
scale_node = self.make_constant_node(
scale_name, onnx_pb.TensorProto.FLOAT, 1 - dropout_prob)
node = helper.make_node(
"Mul",
inputs=[op.input('X')[0], scale_name],
outputs=op.output('Out'))
return [scale_node, node]
else:
raise Exception("Unexpected situation happend")
def reduce_mean(self, op, block):
node = helper.make_node(
'ReduceMean',
inputs=op.input('X'),
outputs=op.output('Out'),
axes=op.attr('axes'),
keepdims=op.attr('keep_dim'))
return node
def nearest_interp(self, op, block):
input_names = op.input_names
if 'OutSize' in input_names and len(op.input('OutSize')) > 0:
node = helper.make_node(
'Resize',
inputs=[op.input('X')[0], '',
op.input('OutSize')[0]],
outputs=op.output('Out'))
elif 'Scale' in input_names and len(op.input('Scale')) > 0:
node = helper.make_node(
'Resize',
inputs=[op.input('X')[0],
op.input('Scale')[0]],
outputs=op.output('Out'))
else:
out_shape = [op.attr('out_h'), op.attr('out_w')]
scale = op.attr('scale')
if out_shape.count(-1) > 0:
scale_name = self.get_name(op.type, 'scale')
scale_node = self.make_constant_node(
scale_name, onnx_pb.TensorProto.FLOAT, [1, 1, scale, scale])
roi_name = self.get_name(op.type, 'roi')
roi_node = self.make_constant_node(roi_name,
onnx_pb.TensorProto.FLOAT,
[1, 1, 1, 1, 1, 1, 1, 1])
node = helper.make_node(
'Resize',
inputs=[op.input('X')[0], roi_name, scale_name],
outputs=op.output('Out'),
mode='nearest')
return [scale_node, roi_node, node]
else:
raise Exception("Unexpected situation happend")
return node
def hard_sigmoid(self, op, block):
slope = op.attr('slope')
offset = op.attr('offset')
node = helper.make_node(
'HardSigmoid',
inputs=op.input('X'),
outputs=op.output('Out'),
alpha=slope,
beta=offset)
return node
def elementwise_mul(self, op, block):
axis = op.attr('axis')
x_shape = block.var(op.input('X')[0]).shape
y_shape = block.var(op.input('Y')[0]).shape
if len(y_shape) == 1 and axis == 1:
shape_name = self.get_name(op.type, 'shape')
shape_value = [1] * len(x_shape)
shape_value[axis] = y_shape[0]
shape_node = self.make_constant_node(
shape_name, onnx_pb.TensorProto.INT64, shape_value)
temp_value = self.get_name(op.type, 'temp')
y_node = helper.make_node(
'Reshape',
inputs=[op.input('Y')[0], shape_name],
outputs=[temp_value])
node = helper.make_node(
'Mul',
inputs=[op.input('X')[0], temp_value],
outputs=op.output('Out'))
return [shape_node, y_node, node]
elif len(x_shape) == len(y_shape):
node = helper.make_node(
'Mul',
inputs=[op.input('X')[0], op.input('Y')[0]],
outputs=op.output('Out'))
return node
else:
raise Excpetion("Unexpected situation happend in elementwise_add")
return node
def feed(self, op, block):
name = op.output('Out')[0]
var = block.var(name)
tensor_info = helper.make_tensor_value_info(
name=name,
shape=var.shape,
elem_type=self.paddle_onnx_dtype_map[var.dtype])
return tensor_info
def fetch(self, op, block):
name = op.input('X')[0]
var = block.var(name)
tensor_info = helper.make_tensor_value_info(
name=name,
shape=var.shape,
elem_type=self.paddle_onnx_dtype_map[var.dtype])
return tensor_info
def convert_weights(self, program):
var_names = program.global_block().vars
nodes = list()
for name in var_names:
var = program.global_block().var(name)
if name.endswith('feed') or name.endswith('fetch'):
continue
if not var.persistable:
continue
weight = np.array(fluid.global_scope().find_var(name).get_tensor())
tensor = helper.make_tensor(
name=name,
dims=var.shape,
data_type=self.paddle_onnx_dtype_map[var.dtype],
vals=weight.flatten().tolist())
node = helper.make_node(
'Constant', inputs=[], outputs=[name], value=tensor)
nodes.append(node)
return nodes
def convert(self, program, save_dir):
weight_nodes = self.convert_weights(program)
op_nodes = list()
input_nodes = list()
output_nodes = list()
unsupported_ops = set()
for block in program.blocks:
for op in block.ops:
print('Translating op: {}'.format(op.type))
if not hasattr(self, op.type):
unsupported_ops.add(op.type)
continue
if len(unsupported_ops) > 0:
continue
node = getattr(self, op.type)(op, block)
if op.type == 'feed':
input_nodes.append(node)
elif op.type == 'fetch':
output_nodes.append(node)
else:
if isinstance(node, list):
op_nodes = op_nodes + node
else:
op_nodes.append(node)
if len(unsupported_ops) > 0:
print("There's {} ops are not supported yet".format(
len(unsupported_ops)))
for op in unsupported_ops:
print("=========== {} ===========".format(op))
return
graph = helper.make_graph(
nodes=weight_nodes + op_nodes,
name='onnx_model_from_paddle',
initializer=[],
inputs=input_nodes,
outputs=output_nodes)
model = helper.make_model(graph, producer_name='X2Paddle')
onnx.checker.check_model(model)
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
with open(os.path.join(save_dir, 'x2paddle_model.onnx'), 'wb') as f:
f.write(model.SerializeToString())
print("Translated model saved in {}".format(
os.path.join(save_dir, 'x2paddle_model.onnx')))
| [
"onnx.helper.make_graph",
"onnx.helper.make_node",
"numpy.ones",
"os.makedirs",
"paddle.fluid.global_scope",
"onnx.helper.make_tensor_value_info",
"os.path.join",
"onnx.helper.make_model",
"os.path.isdir",
"math.fabs",
"onnx.helper.make_tensor",
"onnx.checker.check_model"
] | [((1961, 2030), 'onnx.helper.make_tensor', 'helper.make_tensor', ([], {'name': 'name', 'data_type': 'dtype', 'dims': 'dims', 'vals': 'value'}), '(name=name, data_type=dtype, dims=dims, vals=value)\n', (1979, 2030), False, 'from onnx import helper, onnx_pb\n'), ((2059, 2128), 'onnx.helper.make_node', 'helper.make_node', (['"""Constant"""'], {'inputs': '[]', 'outputs': '[name]', 'value': 'tensor'}), "('Constant', inputs=[], outputs=[name], value=tensor)\n", (2075, 2128), False, 'from onnx import helper, onnx_pb\n'), ((7589, 7686), 'onnx.helper.make_node', 'helper.make_node', (['"""MatMul"""'], {'inputs': '[flatten_x_name, flatten_y_name]', 'outputs': '[temp_out_name]'}), "('MatMul', inputs=[flatten_x_name, flatten_y_name], outputs\n =[temp_out_name])\n", (7605, 7686), False, 'from onnx import helper, onnx_pb\n'), ((16740, 16851), 'onnx.helper.make_tensor_value_info', 'helper.make_tensor_value_info', ([], {'name': 'name', 'shape': 'var.shape', 'elem_type': 'self.paddle_onnx_dtype_map[var.dtype]'}), '(name=name, shape=var.shape, elem_type=self.\n paddle_onnx_dtype_map[var.dtype])\n', (16769, 16851), False, 'from onnx import helper, onnx_pb\n'), ((17028, 17139), 'onnx.helper.make_tensor_value_info', 'helper.make_tensor_value_info', ([], {'name': 'name', 'shape': 'var.shape', 'elem_type': 'self.paddle_onnx_dtype_map[var.dtype]'}), '(name=name, shape=var.shape, elem_type=self.\n paddle_onnx_dtype_map[var.dtype])\n', (17057, 17139), False, 'from onnx import helper, onnx_pb\n'), ((19237, 19384), 'onnx.helper.make_graph', 'helper.make_graph', ([], {'nodes': '(weight_nodes + op_nodes)', 'name': '"""onnx_model_from_paddle"""', 'initializer': '[]', 'inputs': 'input_nodes', 'outputs': 'output_nodes'}), "(nodes=weight_nodes + op_nodes, name=\n 'onnx_model_from_paddle', initializer=[], inputs=input_nodes, outputs=\n output_nodes)\n", (19254, 19384), False, 'from onnx import helper, onnx_pb\n'), ((19452, 19502), 'onnx.helper.make_model', 'helper.make_model', (['graph'], {'producer_name': '"""X2Paddle"""'}), "(graph, producer_name='X2Paddle')\n", (19469, 19502), False, 'from onnx import helper, onnx_pb\n'), ((19511, 19542), 'onnx.checker.check_model', 'onnx.checker.check_model', (['model'], {}), '(model)\n', (19535, 19542), False, 'import onnx\n'), ((10996, 11010), 'numpy.ones', 'np.ones', (['shape'], {}), '(shape)\n', (11003, 11010), True, 'import numpy as np\n'), ((17855, 17924), 'onnx.helper.make_node', 'helper.make_node', (['"""Constant"""'], {'inputs': '[]', 'outputs': '[name]', 'value': 'tensor'}), "('Constant', inputs=[], outputs=[name], value=tensor)\n", (17871, 17924), False, 'from onnx import helper, onnx_pb\n'), ((19559, 19582), 'os.path.isdir', 'os.path.isdir', (['save_dir'], {}), '(save_dir)\n', (19572, 19582), False, 'import os\n'), ((19596, 19617), 'os.makedirs', 'os.makedirs', (['save_dir'], {}), '(save_dir)\n', (19607, 19617), False, 'import os\n'), ((5116, 5138), 'math.fabs', 'math.fabs', (['(scale - 1.0)'], {}), '(scale - 1.0)\n', (5125, 5138), False, 'import math\n'), ((5151, 5172), 'math.fabs', 'math.fabs', (['(bias - 0.0)'], {}), '(bias - 0.0)\n', (5160, 5172), False, 'import math\n'), ((19636, 19681), 'os.path.join', 'os.path.join', (['save_dir', '"""x2paddle_model.onnx"""'], {}), "(save_dir, 'x2paddle_model.onnx')\n", (19648, 19681), False, 'import os\n'), ((19807, 19852), 'os.path.join', 'os.path.join', (['save_dir', '"""x2paddle_model.onnx"""'], {}), "(save_dir, 'x2paddle_model.onnx')\n", (19819, 19852), False, 'import os\n'), ((17573, 17593), 'paddle.fluid.global_scope', 'fluid.global_scope', ([], {}), '()\n', (17591, 17593), True, 'import paddle.fluid as fluid\n')] |
import unittest
import sys
sys.path.insert(1, '..')
import easy_gui
class TestGUI(easy_gui.EasyGUI):
def __init__(self):
self.geometry('500x400')
self.add_section('test_section')
self.sections['test_section'].add_widget(type='button', text='Button1', command_func=lambda e: print('Button1 working!'))
self.sections['test_section'].add_widget(type='label', text='Here\'s an awesome label!')
self.add_section('output_section')
self.sections['output_section'].add_widget(type='stdout', height=20, width=40)
class TestEasyGUI(unittest.TestCase):
def test_gui_creation(self):
gui = TestGUI()
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"sys.path.insert"
] | [((27, 51), 'sys.path.insert', 'sys.path.insert', (['(1)', '""".."""'], {}), "(1, '..')\n", (42, 51), False, 'import sys\n'), ((723, 738), 'unittest.main', 'unittest.main', ([], {}), '()\n', (736, 738), False, 'import unittest\n')] |
# Copyright (C) 2021, Pyronear contributors.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import requests
from requests.models import Response
import logging
from urllib.parse import urljoin
import io
from typing import Dict, Any, Optional
from .exceptions import HTTPRequestException
__all__ = ['Client']
logging.basicConfig()
ROUTES: Dict[str, str] = {
"token": "/login/access-token",
#################
# DEVICES
#################
# Device-logged
"heartbeat": "/devices/heartbeat",
"update-my-location": "/devices/update-my-location",
"get-my-device": "/devices/me",
"update-my-hash": "/devices/hash",
# User-logged
"get-my-devices": "/devices/my-devices",
#################
# SITES
#################
"get-sites": "/sites",
"no-alert-site": "/sites/no-alert",
#################
# EVENTS
#################
"create-event": "/events",
"get-unacknowledged-events": "/events/unacknowledged",
"get-past-events": "/events/past",
"acknowledge-event": "/events/{event_id}/acknowledge",
#################
# INSTALLATIONS
#################
"get-site-devices": "/installations/site-devices/{site_id}",
#################
# MEDIA
#################
"create-media": "/media",
"create-media-from-device": "/media/from-device",
"upload-media": "/media/{media_id}/upload",
"get-media-url": "/media/{media_id}/url",
#################
# ALERTS
#################
"send-alert": "/alerts",
"send-alert-from-device": "/alerts/from-device",
"get-alerts": "/alerts",
"get-ongoing-alerts": "/alerts/ongoing",
}
class Client:
"""Client class to interact with the PyroNear API
Args:
api_url (str): url of the pyronear API
credentials_login (str): Login (e.g: username)
credentials_password (str): Password (e.g: <PASSWORD> (don't do this))
"""
api: str
routes: Dict[str, str]
token: str
def __init__(self, api_url: str, credentials_login: str, credentials_password: str) -> None:
self.api = api_url
# Prepend API url to each route
self.routes = {k: urljoin(self.api, v) for k, v in ROUTES.items()}
self.refresh_token(credentials_login, credentials_password)
@property
def headers(self) -> Dict[str, str]:
return {"Authorization": f"Bearer {self.token}"}
def refresh_token(self, login: str, password: str) -> None:
self.token = self._retrieve_token(login, password)
def _retrieve_token(self, login: str, password: str) -> str:
response = requests.post(self.routes["token"],
data=f"username={login}&password={password}",
headers={"Content-Type": "application/x-www-form-urlencoded",
"accept": "application/json"
})
if response.status_code == 200:
return response.json()["access_token"]
else:
# Anyone has a better suggestion?
raise HTTPRequestException(response.status_code, response.text)
# Device functions
def heartbeat(self) -> Response:
"""Updates the last ping of the device
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "DEVICE_LOGIN", "MY_PWD")
>>> response = api_client.heartbeat()
Returns:
HTTP response containing the update device info
"""
return requests.put(self.routes["heartbeat"], headers=self.headers)
def update_my_location(
self,
lat: Optional[float] = None,
lon: Optional[float] = None,
elevation: Optional[float] = None,
yaw: Optional[float] = None,
pitch: Optional[float] = None
) -> Response:
"""Updates the location of the device
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "DEVICE_LOGIN", "MY_PWD")
>>> response = api_client.update_my_location(lat=10., lon=-5.45)
Returns:
HTTP response containing the update device info
"""
payload = {}
if lat is not None:
payload["lat"] = lat
if lon is not None:
payload["lon"] = lon
if elevation is not None:
payload["elevation"] = elevation
if yaw is not None:
payload["yaw"] = yaw
if pitch is not None:
payload["pitch"] = pitch
if len(payload) == 0:
raise ValueError("At least one location information"
+ "(lat, lon, elevation, yaw, pitch) must be filled")
return requests.put(self.routes["update-my-location"], headers=self.headers, json=payload)
def create_event(self, lat: float, lon: float) -> Response:
"""Register an event (e.g wildfire).
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.create_event(lat=10., lon=-5.45)
Args:
lat: the latitude of the event
lon: the longitude of the event
Returns:
HTTP response containing the created event
"""
payload = {"lat": lat,
"lon": lon}
return requests.post(self.routes["create-event"], headers=self.headers, json=payload)
def create_no_alert_site(self, lat: float, lon: float, name: str, country: str,
geocode: str, group_id: int = None) -> Response:
"""Create a site that is not supposed to generate alerts.
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.<EMAIL>", "MY_LOGIN", "MY_PWD")
>>> response = api_client.create_no_alert_site(lat=10., lon=-5.45, name="farm", country="FR", geocode="01")
Args:
lat: the latitude of the site
lon: the longitude of the site
name: the name of the site
country: the country where the site is located
geocode: the geocode of the site
Returns:
HTTP response containing the created site
"""
payload = {"lat": lat,
"lon": lon,
"name": name,
"country": country,
"geocode": geocode}
if group_id is not None:
payload["group_id"] = group_id
return requests.post(self.routes["no-alert-site"], headers=self.headers, json=payload)
def send_alert(
self,
lat: float,
lon: float,
device_id: int,
azimuth: Optional[float] = None,
event_id: Optional[int] = None,
media_id: Optional[int] = None
) -> Response:
"""Raise an alert to the API.
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.send_alert(lat=10., lon=-5.45, device_id=3, azimuth=2.)
Args:
lat: the latitude of the alert
lon: the longitude of the alert
azimuth: the azimuth of the alert
event_id: the ID of the event this alerts relates to
device_id: the ID of the device that raised this alert
media_id: optional media ID linked to this alert
Returns:
HTTP response containing the created alert
"""
payload = {"lat": lat,
"lon": lon,
"event_id": event_id,
"device_id": device_id
}
if isinstance(media_id, int):
payload["media_id"] = media_id
if isinstance(azimuth, float):
payload["azimuth"] = azimuth
return requests.post(self.routes["send-alert"], headers=self.headers, json=payload)
def send_alert_from_device(
self,
lat: float,
lon: float,
azimuth: Optional[float] = None,
event_id: Optional[int] = None,
media_id: Optional[int] = None
) -> Response:
"""Raise an alert to the API from a device (no need to specify device ID).
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "DEVICE_LOGIN", "MY_PWD")
>>> response = api_client.send_alert_from_device(lat=10., lon=-5.45)
Args:
lat: the latitude of the alert
lon: the longitude of the alert
azimuth: the azimuth of the alert
event_id: the ID of the event this alerts relates to
media_id: optional media ID linked to this alert
Returns:
HTTP response containing the created alert
"""
payload = {"lat": lat,
"lon": lon,
"event_id": event_id
}
if isinstance(media_id, int):
payload["media_id"] = media_id
if isinstance(azimuth, float):
payload["azimuth"] = azimuth
return requests.post(self.routes["send-alert-from-device"], headers=self.headers, json=payload)
def create_media(self, device_id: int) -> Response:
"""Create a media entry
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.create_media(device_id=3)
Args:
device_id: ID of the device that created that media
Returns:
HTTP response containing the created media
"""
return requests.post(self.routes["create-media"], headers=self.headers, json={"device_id": device_id})
def create_media_from_device(self):
"""Create a media entry from a device (no need to specify device ID).
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "DEVICE_LOGIN", "MY_PWD")
>>> response = api_client.create_media_from_device()
Returns:
HTTP response containing the created media
"""
return requests.post(self.routes["create-media-from-device"], headers=self.headers, json={})
def upload_media(self, media_id: int, media_data: bytes) -> Response:
"""Upload the media content
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> with open("path/to/my/file.ext", "rb") as f: data = f.read()
>>> response = api_client.upload_media(media_id=1, media_data=data)
Args:
media_id: ID of the associated media entry
media_data: byte data
Returns:
HTTP response containing the updated media
"""
return requests.post(self.routes["upload-media"].format(media_id=media_id), headers=self.headers,
files={'file': io.BytesIO(media_data)})
# User functions
def get_my_devices(self) -> Response:
"""Get the devices who are owned by the logged user
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_my_devices()
Returns:
HTTP response containing the list of owned devices
"""
return requests.get(self.routes["get-my-devices"], headers=self.headers)
def get_sites(self) -> Response:
"""Get all the existing sites in the DB
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_sites()
Returns:
HTTP response containing the list of sites
"""
return requests.get(self.routes["get-sites"], headers=self.headers)
def get_all_alerts(self) -> Response:
"""Get all the existing alerts in the DB
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_all_alerts()
Returns:
HTTP response containing the list of all alerts
"""
return requests.get(self.routes["get-alerts"], headers=self.headers)
def get_ongoing_alerts(self) -> Response:
"""Get all the existing alerts in the DB that have the status 'start'
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_ongoing_alerts()
Returns:
HTTP response containing the list of all ongoing alerts
"""
return requests.get(self.routes["get-ongoing-alerts"], headers=self.headers)
def get_unacknowledged_events(self) -> Response:
"""Get all the existing events in the DB that have the field "is_acknowledged" set to `False`
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_unacknowledged_events()
Returns:
HTTP response containing the list of all events that haven't been acknowledged
"""
return requests.get(self.routes["get-unacknowledged-events"], headers=self.headers)
def acknowledge_event(self, event_id: int) -> Response:
"""Switch the `is_acknowledged` field value of the event to `True`
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.acknowledge_event(event_id=1)
Args:
event_id: ID of the associated event entry
Returns:
HTTP response containing the updated event
"""
return requests.put(self.routes["acknowledge-event"].format(event_id=event_id), headers=self.headers)
def get_site_devices(self, site_id: int) -> Response:
"""Fetch the devices that are installed on a specific site
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_site_devices(1)
Args:
site_id: the identifier of the site
Returns:
HTTP response containing the list of corresponding devices
"""
return requests.get(self.routes["get-site-devices"].format(site_id=site_id), headers=self.headers)
def get_media_url(self, media_id: int) -> Response:
"""Get the image as a URL
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_media_url(1)
Args:
media_id: the identifier of the media entry
Returns:
HTTP response containing the URL to the media content
"""
return requests.get(self.routes["get-media-url"].format(media_id=media_id), headers=self.headers)
def get_media_url_and_read(self, media_id: int) -> Response:
"""Get the image as a url and read it
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_media_url_and_read(1)
Args:
media_id: the identifier of the media entry
Returns:
HTTP response containing the media content
"""
image_url = requests.get(self.routes["get-media-url"].format(media_id=media_id), headers=self.headers)
return requests.get(image_url.json()['url'])
def get_past_events(self) -> Response:
"""Get all past events
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_past_events()
Returns:
HTTP response containing the list of past events
"""
return requests.get(self.routes["get-past-events"], headers=self.headers)
def get_my_device(self) -> Response:
"""Get information about the current device
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.get_my_device()
Returns:
HTTP response containing the device information
"""
return requests.get(self.routes["get-my-device"], headers=self.headers)
def update_my_hash(self, software_hash: str) -> Response:
"""Updates the software hash of the current device
Example::
>>> from pyroclient import client
>>> api_client = client.Client("http://pyronear-api.herokuapp.com", "MY_LOGIN", "MY_PWD")
>>> response = api_client.update_my_hash()
Returns:
HTTP response containing the updated device information
"""
payload = {"software_hash": software_hash}
return requests.put(self.routes["update-my-hash"], headers=self.headers, json=payload)
| [
"logging.basicConfig",
"requests.post",
"io.BytesIO",
"requests.get",
"urllib.parse.urljoin",
"requests.put"
] | [((429, 450), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (448, 450), False, 'import logging\n'), ((2729, 2912), 'requests.post', 'requests.post', (["self.routes['token']"], {'data': 'f"""username={login}&password={password}"""', 'headers': "{'Content-Type': 'application/x-www-form-urlencoded', 'accept':\n 'application/json'}"}), "(self.routes['token'], data=\n f'username={login}&password={password}', headers={'Content-Type':\n 'application/x-www-form-urlencoded', 'accept': 'application/json'})\n", (2742, 2912), False, 'import requests\n'), ((3716, 3776), 'requests.put', 'requests.put', (["self.routes['heartbeat']"], {'headers': 'self.headers'}), "(self.routes['heartbeat'], headers=self.headers)\n", (3728, 3776), False, 'import requests\n'), ((4961, 5049), 'requests.put', 'requests.put', (["self.routes['update-my-location']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['update-my-location'], headers=self.headers, json=\n payload)\n", (4973, 5049), False, 'import requests\n'), ((5657, 5735), 'requests.post', 'requests.post', (["self.routes['create-event']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['create-event'], headers=self.headers, json=payload)\n", (5670, 5735), False, 'import requests\n'), ((6847, 6926), 'requests.post', 'requests.post', (["self.routes['no-alert-site']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['no-alert-site'], headers=self.headers, json=payload)\n", (6860, 6926), False, 'import requests\n'), ((8233, 8309), 'requests.post', 'requests.post', (["self.routes['send-alert']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['send-alert'], headers=self.headers, json=payload)\n", (8246, 8309), False, 'import requests\n'), ((9531, 9623), 'requests.post', 'requests.post', (["self.routes['send-alert-from-device']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['send-alert-from-device'], headers=self.headers,\n json=payload)\n", (9544, 9623), False, 'import requests\n'), ((10120, 10220), 'requests.post', 'requests.post', (["self.routes['create-media']"], {'headers': 'self.headers', 'json': "{'device_id': device_id}"}), "(self.routes['create-media'], headers=self.headers, json={\n 'device_id': device_id})\n", (10133, 10220), False, 'import requests\n'), ((10672, 10761), 'requests.post', 'requests.post', (["self.routes['create-media-from-device']"], {'headers': 'self.headers', 'json': '{}'}), "(self.routes['create-media-from-device'], headers=self.headers,\n json={})\n", (10685, 10761), False, 'import requests\n'), ((12012, 12077), 'requests.get', 'requests.get', (["self.routes['get-my-devices']"], {'headers': 'self.headers'}), "(self.routes['get-my-devices'], headers=self.headers)\n", (12024, 12077), False, 'import requests\n'), ((12481, 12541), 'requests.get', 'requests.get', (["self.routes['get-sites']"], {'headers': 'self.headers'}), "(self.routes['get-sites'], headers=self.headers)\n", (12493, 12541), False, 'import requests\n'), ((12961, 13022), 'requests.get', 'requests.get', (["self.routes['get-alerts']"], {'headers': 'self.headers'}), "(self.routes['get-alerts'], headers=self.headers)\n", (12973, 13022), False, 'import requests\n'), ((13488, 13557), 'requests.get', 'requests.get', (["self.routes['get-ongoing-alerts']"], {'headers': 'self.headers'}), "(self.routes['get-ongoing-alerts'], headers=self.headers)\n", (13500, 13557), False, 'import requests\n'), ((14083, 14159), 'requests.get', 'requests.get', (["self.routes['get-unacknowledged-events']"], {'headers': 'self.headers'}), "(self.routes['get-unacknowledged-events'], headers=self.headers)\n", (14095, 14159), False, 'import requests\n'), ((17073, 17139), 'requests.get', 'requests.get', (["self.routes['get-past-events']"], {'headers': 'self.headers'}), "(self.routes['get-past-events'], headers=self.headers)\n", (17085, 17139), False, 'import requests\n'), ((17560, 17624), 'requests.get', 'requests.get', (["self.routes['get-my-device']"], {'headers': 'self.headers'}), "(self.routes['get-my-device'], headers=self.headers)\n", (17572, 17624), False, 'import requests\n'), ((18134, 18213), 'requests.put', 'requests.put', (["self.routes['update-my-hash']"], {'headers': 'self.headers', 'json': 'payload'}), "(self.routes['update-my-hash'], headers=self.headers, json=payload)\n", (18146, 18213), False, 'import requests\n'), ((2290, 2310), 'urllib.parse.urljoin', 'urljoin', (['self.api', 'v'], {}), '(self.api, v)\n', (2297, 2310), False, 'from urllib.parse import urljoin\n'), ((11533, 11555), 'io.BytesIO', 'io.BytesIO', (['media_data'], {}), '(media_data)\n', (11543, 11555), False, 'import io\n')] |
import vtk
from paraview import simple
from .elevation import ElevationFilter
class SourceImage:
def __init__(self, grid, terrainFile):
self.zSpacing = 1
self.eScale = 0
self.proxiesToDelete = []
# Build up image grid
self.image = vtk.vtkImageData()
self.image.SetOrigin(grid.Origin[0], grid.Origin[1], grid.Origin[2])
self.image.SetSpacing(grid.Spacing[0], grid.Spacing[1], grid.Spacing[2])
self.image.SetDimensions(grid.Size[0] + 1, grid.Size[1] + 1, grid.Size[2] + 1) # Point maxes
# Attach grid to paraview source
self.paraviewProducer = simple.TrivialProducer()
vtkProducer = self.paraviewProducer.GetClientSideObject()
vtkProducer.SetOutput(self.image)
self.elevationFilter = ElevationFilter(grid, terrainFile)
self.addPointArray(self.elevationFilter.getArray())
self.elevatedSource = self.elevationFilter.getFilter(self.paraviewProducer)
self.proxiesToDelete = [self.elevatedSource, self.paraviewProducer]
def addCellArray(self, data):
self.image.GetCellData().AddArray(data)
self.elevatedSource.MarkModified(self.elevatedSource)
def addPointArray(self, data):
self.image.GetPointData().AddArray(data)
def getSource(self):
return self.elevatedSource
def setZSpace(self, space):
self.zSpacing = space
currSpacing = self.image.GetSpacing()
self.image.SetSpacing(currSpacing[0], currSpacing[1], currSpacing[2] * self.zSpacing)
self.elevatedSource.MarkModified(self.elevatedSource)
return self.zSpacing
def setElevationScale(self, eScale):
self.eScale = eScale
self.elevationFilter.setScale(eScale)
self.elevatedSource.MarkModified(self.elevatedSource)
return self.eScale
def minElevation(self):
return self.elevationFilter.getMin()
def __del__(self):
for proxy in self.proxiesToDelete:
simple.Delete(proxy)
| [
"paraview.simple.TrivialProducer",
"vtk.vtkImageData",
"paraview.simple.Delete"
] | [((279, 297), 'vtk.vtkImageData', 'vtk.vtkImageData', ([], {}), '()\n', (295, 297), False, 'import vtk\n'), ((632, 656), 'paraview.simple.TrivialProducer', 'simple.TrivialProducer', ([], {}), '()\n', (654, 656), False, 'from paraview import simple\n'), ((1998, 2018), 'paraview.simple.Delete', 'simple.Delete', (['proxy'], {}), '(proxy)\n', (2011, 2018), False, 'from paraview import simple\n')] |
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.forms import PasswordInput
from django.contrib.auth import authenticate
from account.models import Account
class RegistrationForm(UserCreationForm):
email_errors = {
'required': 'Обязательное поле.',
'invalid': 'Проверьте введенные данные.',
'unique': 'Такая почта уже зарегистрирована.'
}
username_errors = {
'required': 'Обязательное поле.',
'invalid': 'Проверьте введенные данные.',
'unique': 'Имя пользователя уже занято.'
}
email = forms.EmailField(error_messages=email_errors, max_length=60, required=True,
widget=forms.TextInput(attrs={'placeholder': 'Email', }))
username = forms.CharField(error_messages=username_errors, max_length=30, required=True,
widget=forms.TextInput(attrs={'placeholder': "Имя пользователя", 'autocomplete': 'off'}))
class Meta:
model = Account
fields = [
'email',
'username',
'<PASSWORD>',
'<PASSWORD>'
]
def __init__(self, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
self.fields['password1'].widget = PasswordInput(attrs={'placeholder': 'Пароль', 'autocomplete': 'off'})
self.fields['password2'].widget = PasswordInput(
attrs={'placeholder': 'Подтвердите пароль', 'autocomplete': 'off'})
self.fields['email'].widget.attrs.update({'autofocus': False})
class AccountAuthenticationForm(forms.ModelForm):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(attrs={'placeholder': 'Пароль'}))
class Meta:
model = Account
fields = ('email', 'password')
def clean(self):
if self.is_valid():
email = self.cleaned_data['email']
password = self.cleaned_data['password']
email_exists_req = Account.objects.filter(email=email)
email_is_active_req = Account.objects.filter(email=email, is_active=False)
if not authenticate(email=email, password=password):
if not email_exists_req.exists():
raise forms.ValidationError("Указанная почта не была зарегистрирована")
if email_is_active_req.exists():
raise forms.ValidationError("Ваша учетная запись была деактивирована")
raise forms.ValidationError("Неправильно указана почта или пароль")
class AccountUpdateForm(forms.ModelForm):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
username = forms.CharField(required=True, widget=forms.TextInput(attrs={
'placeholder': 'Имя пользователя'
}))
class Meta:
model = Account
fields = ('email', 'username')
def clean_email(self):
email = self.cleaned_data['email']
try:
account = Account.objects.exclude(pk=self.instance.pk).get(email=email)
except Account.DoesNotExist:
return email
raise forms.ValidationError("Такая электронная почта уже используется")
def clean_username(self):
username = self.cleaned_data['username']
try:
account = Account.objects.exclude(pk=self.instance.pk).get(username=username)
except Account.DoesNotExist:
return username
raise forms.ValidationError("Такое имя пользователя уже используется")
| [
"django.contrib.auth.authenticate",
"account.models.Account.objects.exclude",
"django.forms.PasswordInput",
"account.models.Account.objects.filter",
"django.forms.ValidationError",
"django.forms.TextInput"
] | [((1290, 1359), 'django.forms.PasswordInput', 'PasswordInput', ([], {'attrs': "{'placeholder': 'Пароль', 'autocomplete': 'off'}"}), "(attrs={'placeholder': 'Пароль', 'autocomplete': 'off'})\n", (1303, 1359), False, 'from django.forms import PasswordInput\n'), ((1402, 1487), 'django.forms.PasswordInput', 'PasswordInput', ([], {'attrs': "{'placeholder': 'Подтвердите пароль', 'autocomplete': 'off'}"}), "(attrs={'placeholder': 'Подтвердите пароль', 'autocomplete':\n 'off'})\n", (1415, 1487), False, 'from django.forms import PasswordInput\n'), ((3229, 3294), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Такая электронная почта уже используется"""'], {}), "('Такая электронная почта уже используется')\n", (3250, 3294), False, 'from django import forms\n'), ((3557, 3621), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Такое имя пользователя уже используется"""'], {}), "('Такое имя пользователя уже используется')\n", (3578, 3621), False, 'from django import forms\n'), ((711, 758), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Email'}"}), "(attrs={'placeholder': 'Email'})\n", (726, 758), False, 'from django import forms\n'), ((893, 978), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Имя пользователя', 'autocomplete': 'off'}"}), "(attrs={'placeholder': 'Имя пользователя', 'autocomplete':\n 'off'})\n", (908, 978), False, 'from django import forms\n'), ((1671, 1718), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Email'}"}), "(attrs={'placeholder': 'Email'})\n", (1686, 1718), False, 'from django import forms\n'), ((1773, 1825), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'placeholder': 'Пароль'}"}), "(attrs={'placeholder': 'Пароль'})\n", (1792, 1825), False, 'from django import forms\n'), ((2088, 2123), 'account.models.Account.objects.filter', 'Account.objects.filter', ([], {'email': 'email'}), '(email=email)\n', (2110, 2123), False, 'from account.models import Account\n'), ((2158, 2210), 'account.models.Account.objects.filter', 'Account.objects.filter', ([], {'email': 'email', 'is_active': '(False)'}), '(email=email, is_active=False)\n', (2180, 2210), False, 'from account.models import Account\n'), ((2737, 2784), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Email'}"}), "(attrs={'placeholder': 'Email'})\n", (2752, 2784), False, 'from django import forms\n'), ((2839, 2897), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Имя пользователя'}"}), "(attrs={'placeholder': 'Имя пользователя'})\n", (2854, 2897), False, 'from django import forms\n'), ((2230, 2274), 'django.contrib.auth.authenticate', 'authenticate', ([], {'email': 'email', 'password': 'password'}), '(email=email, password=password)\n', (2242, 2274), False, 'from django.contrib.auth import authenticate\n'), ((2580, 2641), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Неправильно указана почта или пароль"""'], {}), "('Неправильно указана почта или пароль')\n", (2601, 2641), False, 'from django import forms\n'), ((2352, 2417), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Указанная почта не была зарегистрирована"""'], {}), "('Указанная почта не была зарегистрирована')\n", (2373, 2417), False, 'from django import forms\n'), ((2493, 2557), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Ваша учетная запись была деактивирована"""'], {}), "('Ваша учетная запись была деактивирована')\n", (2514, 2557), False, 'from django import forms\n'), ((3091, 3135), 'account.models.Account.objects.exclude', 'Account.objects.exclude', ([], {'pk': 'self.instance.pk'}), '(pk=self.instance.pk)\n', (3114, 3135), False, 'from account.models import Account\n'), ((3410, 3454), 'account.models.Account.objects.exclude', 'Account.objects.exclude', ([], {'pk': 'self.instance.pk'}), '(pk=self.instance.pk)\n', (3433, 3454), False, 'from account.models import Account\n')] |
import logging
from homeassistant.core import HomeAssistant, State
from .service import ServiceExt
from .state import StateExt
class LightExt:
_logger = logging.getLogger(__name__)
ON_STATE = "on"
OFF_STATE = "off"
UNKNOWN_STATE = ""
@classmethod
def turn_on(cls, hass : HomeAssistant, id, data={}, debug=False):
ServiceExt.call_service(hass,"light" , "turn_on", service_data=data, target= {"entity_id" : id}, debug=debug)
@classmethod
def turn_off(cls, hass: HomeAssistant, id, data={}, debug=False):
ServiceExt.call_service(hass,"light" , "turn_off", service_data=data, target= {"entity_id" : id}, debug=debug)
@classmethod
def get_std_attributes(cls, hass :HomeAssistant, id, debug=False):
state = hass.states.get(id)
if state == None:
raise Exception(f"Exception, {id} state not existing")
else:
on_off = state.state
attributes = ["brightness", "color_temp" ] #, "rgb_color", "rgbw_color", "rgbww_color"]
data = {}
for i in attributes:
if state.attributes.get(i, None) != None:
data[i] = state.attributes[i]
return on_off, data
| [
"logging.getLogger"
] | [((170, 197), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (187, 197), False, 'import logging\n')] |
from django.contrib import admin
# Register your models here.
from .models import Huser, Grouper, MemberOf
admin.site.register(Huser)
admin.site.register(Grouper)
admin.site.register(MemberOf)
| [
"django.contrib.admin.site.register"
] | [((110, 136), 'django.contrib.admin.site.register', 'admin.site.register', (['Huser'], {}), '(Huser)\n', (129, 136), False, 'from django.contrib import admin\n'), ((137, 165), 'django.contrib.admin.site.register', 'admin.site.register', (['Grouper'], {}), '(Grouper)\n', (156, 165), False, 'from django.contrib import admin\n'), ((166, 195), 'django.contrib.admin.site.register', 'admin.site.register', (['MemberOf'], {}), '(MemberOf)\n', (185, 195), False, 'from django.contrib import admin\n')] |
from typing import Optional, List
from thinc.types import Floats2d
from thinc.api import Model, with_cpu
from spacy.attrs import ID, ORTH, PREFIX, SUFFIX, SHAPE, LOWER
from spacy.util import registry
from spacy.tokens import Doc
# TODO: replace with registered layer after spacy v3.0.7
from spacy.ml import extract_ngrams
def TextCatCNN_v1(
tok2vec: Model, exclusive_classes: bool, nO: Optional[int] = None
) -> Model[List[Doc], Floats2d]:
"""
Build a simple CNN text classifier, given a token-to-vector model as inputs.
If exclusive_classes=True, a softmax non-linearity is applied, so that the
outputs sum to 1. If exclusive_classes=False, a logistic non-linearity
is applied instead, so that outputs are in the range [0, 1].
"""
chain = registry.get("layers", "chain.v1")
reduce_mean = registry.get("layers", "reduce_mean.v1")
Logistic = registry.get("layers", "Logistic.v1")
Softmax = registry.get("layers", "Softmax.v1")
Linear = registry.get("layers", "Linear.v1")
list2ragged = registry.get("layers", "list2ragged.v1")
# extract_ngrams = registry.get("layers", "spacy.extract_ngrams.v1")
with Model.define_operators({">>": chain}):
cnn = tok2vec >> list2ragged() >> reduce_mean()
if exclusive_classes:
output_layer = Softmax(nO=nO, nI=tok2vec.maybe_get_dim("nO"))
model = cnn >> output_layer
model.set_ref("output_layer", output_layer)
else:
linear_layer = Linear(nO=nO, nI=tok2vec.maybe_get_dim("nO"))
model = cnn >> linear_layer >> Logistic()
model.set_ref("output_layer", linear_layer)
model.set_ref("tok2vec", tok2vec)
model.set_dim("nO", nO)
model.attrs["multi_label"] = not exclusive_classes
return model
def TextCatBOW_v1(
exclusive_classes: bool,
ngram_size: int,
no_output_layer: bool,
nO: Optional[int] = None,
) -> Model[List[Doc], Floats2d]:
chain = registry.get("layers", "chain.v1")
Logistic = registry.get("layers", "Logistic.v1")
SparseLinear = registry.get("layers", "SparseLinear.v1")
softmax_activation = registry.get("layers", "softmax_activation.v1")
with Model.define_operators({">>": chain}):
sparse_linear = SparseLinear(nO)
model = extract_ngrams(ngram_size, attr=ORTH) >> sparse_linear
model = with_cpu(model, model.ops)
if not no_output_layer:
output_layer = softmax_activation() if exclusive_classes else Logistic()
model = model >> with_cpu(output_layer, output_layer.ops)
model.set_ref("output_layer", sparse_linear)
model.attrs["multi_label"] = not exclusive_classes
return model
def TextCatEnsemble_v1(
width: int,
embed_size: int,
pretrained_vectors: Optional[bool],
exclusive_classes: bool,
ngram_size: int,
window_size: int,
conv_depth: int,
dropout: Optional[float],
nO: Optional[int] = None,
) -> Model:
# Don't document this yet, I'm not sure it's right.
HashEmbed = registry.get("layers", "HashEmbed.v1")
FeatureExtractor = registry.get("layers", "spacy.FeatureExtractor.v1")
Maxout = registry.get("layers", "Maxout.v1")
StaticVectors = registry.get("layers", "spacy.StaticVectors.v1")
Softmax = registry.get("layers", "Softmax.v1")
Linear = registry.get("layers", "Linear.v1")
ParametricAttention = registry.get("layers", "ParametricAttention.v1")
Dropout = registry.get("layers", "Dropout.v1")
Logistic = registry.get("layers", "Logistic.v1")
build_bow_text_classifier = registry.get("architectures", "spacy.TextCatBOW.v1")
list2ragged = registry.get("layers", "list2ragged.v1")
chain = registry.get("layers", "chain.v1")
concatenate = registry.get("layers", "concatenate.v1")
clone = registry.get("layers", "clone.v1")
reduce_sum = registry.get("layers", "reduce_sum.v1")
with_array = registry.get("layers", "with_array.v1")
uniqued = registry.get("layers", "uniqued.v1")
residual = registry.get("layers", "residual.v1")
expand_window = registry.get("layers", "expand_window.v1")
cols = [ORTH, LOWER, PREFIX, SUFFIX, SHAPE, ID]
with Model.define_operators({">>": chain, "|": concatenate, "**": clone}):
lower = HashEmbed(
nO=width, nV=embed_size, column=cols.index(LOWER), dropout=dropout, seed=10
)
prefix = HashEmbed(
nO=width // 2,
nV=embed_size,
column=cols.index(PREFIX),
dropout=dropout,
seed=11,
)
suffix = HashEmbed(
nO=width // 2,
nV=embed_size,
column=cols.index(SUFFIX),
dropout=dropout,
seed=12,
)
shape = HashEmbed(
nO=width // 2,
nV=embed_size,
column=cols.index(SHAPE),
dropout=dropout,
seed=13,
)
width_nI = sum(layer.get_dim("nO") for layer in [lower, prefix, suffix, shape])
trained_vectors = FeatureExtractor(cols) >> with_array(
uniqued(
(lower | prefix | suffix | shape)
>> Maxout(nO=width, nI=width_nI, normalize=True),
column=cols.index(ORTH),
)
)
if pretrained_vectors:
static_vectors = StaticVectors(width)
vector_layer = trained_vectors | static_vectors
vectors_width = width * 2
else:
vector_layer = trained_vectors
vectors_width = width
tok2vec = vector_layer >> with_array(
Maxout(width, vectors_width, normalize=True)
>> residual(
(
expand_window(window_size=window_size)
>> Maxout(
nO=width, nI=width * ((window_size * 2) + 1), normalize=True
)
)
)
** conv_depth,
pad=conv_depth,
)
cnn_model = (
tok2vec
>> list2ragged()
>> ParametricAttention(width)
>> reduce_sum()
>> residual(Maxout(nO=width, nI=width))
>> Linear(nO=nO, nI=width)
>> Dropout(0.0)
)
linear_model = build_bow_text_classifier(
nO=nO,
ngram_size=ngram_size,
exclusive_classes=exclusive_classes,
no_output_layer=False,
)
nO_double = nO * 2 if nO else None
if exclusive_classes:
output_layer = Softmax(nO=nO, nI=nO_double)
else:
output_layer = Linear(nO=nO, nI=nO_double) >> Dropout(0.0) >> Logistic()
model = (linear_model | cnn_model) >> output_layer
model.set_ref("tok2vec", tok2vec)
if model.has_dim("nO") is not False:
model.set_dim("nO", nO)
model.set_ref("output_layer", linear_model.get_ref("output_layer"))
model.attrs["multi_label"] = not exclusive_classes
return model
| [
"spacy.util.registry.get",
"thinc.api.with_cpu",
"thinc.api.Model.define_operators",
"spacy.ml.extract_ngrams"
] | [((775, 809), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""chain.v1"""'], {}), "('layers', 'chain.v1')\n", (787, 809), False, 'from spacy.util import registry\n'), ((828, 868), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""reduce_mean.v1"""'], {}), "('layers', 'reduce_mean.v1')\n", (840, 868), False, 'from spacy.util import registry\n'), ((884, 921), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Logistic.v1"""'], {}), "('layers', 'Logistic.v1')\n", (896, 921), False, 'from spacy.util import registry\n'), ((936, 972), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Softmax.v1"""'], {}), "('layers', 'Softmax.v1')\n", (948, 972), False, 'from spacy.util import registry\n'), ((986, 1021), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Linear.v1"""'], {}), "('layers', 'Linear.v1')\n", (998, 1021), False, 'from spacy.util import registry\n'), ((1040, 1080), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""list2ragged.v1"""'], {}), "('layers', 'list2ragged.v1')\n", (1052, 1080), False, 'from spacy.util import registry\n'), ((1968, 2002), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""chain.v1"""'], {}), "('layers', 'chain.v1')\n", (1980, 2002), False, 'from spacy.util import registry\n'), ((2018, 2055), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Logistic.v1"""'], {}), "('layers', 'Logistic.v1')\n", (2030, 2055), False, 'from spacy.util import registry\n'), ((2075, 2116), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""SparseLinear.v1"""'], {}), "('layers', 'SparseLinear.v1')\n", (2087, 2116), False, 'from spacy.util import registry\n'), ((2142, 2189), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""softmax_activation.v1"""'], {}), "('layers', 'softmax_activation.v1')\n", (2154, 2189), False, 'from spacy.util import registry\n'), ((3042, 3080), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""HashEmbed.v1"""'], {}), "('layers', 'HashEmbed.v1')\n", (3054, 3080), False, 'from spacy.util import registry\n'), ((3104, 3155), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""spacy.FeatureExtractor.v1"""'], {}), "('layers', 'spacy.FeatureExtractor.v1')\n", (3116, 3155), False, 'from spacy.util import registry\n'), ((3169, 3204), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Maxout.v1"""'], {}), "('layers', 'Maxout.v1')\n", (3181, 3204), False, 'from spacy.util import registry\n'), ((3225, 3273), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""spacy.StaticVectors.v1"""'], {}), "('layers', 'spacy.StaticVectors.v1')\n", (3237, 3273), False, 'from spacy.util import registry\n'), ((3288, 3324), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Softmax.v1"""'], {}), "('layers', 'Softmax.v1')\n", (3300, 3324), False, 'from spacy.util import registry\n'), ((3338, 3373), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Linear.v1"""'], {}), "('layers', 'Linear.v1')\n", (3350, 3373), False, 'from spacy.util import registry\n'), ((3400, 3448), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""ParametricAttention.v1"""'], {}), "('layers', 'ParametricAttention.v1')\n", (3412, 3448), False, 'from spacy.util import registry\n'), ((3463, 3499), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Dropout.v1"""'], {}), "('layers', 'Dropout.v1')\n", (3475, 3499), False, 'from spacy.util import registry\n'), ((3515, 3552), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""Logistic.v1"""'], {}), "('layers', 'Logistic.v1')\n", (3527, 3552), False, 'from spacy.util import registry\n'), ((3585, 3637), 'spacy.util.registry.get', 'registry.get', (['"""architectures"""', '"""spacy.TextCatBOW.v1"""'], {}), "('architectures', 'spacy.TextCatBOW.v1')\n", (3597, 3637), False, 'from spacy.util import registry\n'), ((3656, 3696), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""list2ragged.v1"""'], {}), "('layers', 'list2ragged.v1')\n", (3668, 3696), False, 'from spacy.util import registry\n'), ((3709, 3743), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""chain.v1"""'], {}), "('layers', 'chain.v1')\n", (3721, 3743), False, 'from spacy.util import registry\n'), ((3762, 3802), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""concatenate.v1"""'], {}), "('layers', 'concatenate.v1')\n", (3774, 3802), False, 'from spacy.util import registry\n'), ((3815, 3849), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""clone.v1"""'], {}), "('layers', 'clone.v1')\n", (3827, 3849), False, 'from spacy.util import registry\n'), ((3867, 3906), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""reduce_sum.v1"""'], {}), "('layers', 'reduce_sum.v1')\n", (3879, 3906), False, 'from spacy.util import registry\n'), ((3924, 3963), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""with_array.v1"""'], {}), "('layers', 'with_array.v1')\n", (3936, 3963), False, 'from spacy.util import registry\n'), ((3978, 4014), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""uniqued.v1"""'], {}), "('layers', 'uniqued.v1')\n", (3990, 4014), False, 'from spacy.util import registry\n'), ((4030, 4067), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""residual.v1"""'], {}), "('layers', 'residual.v1')\n", (4042, 4067), False, 'from spacy.util import registry\n'), ((4088, 4130), 'spacy.util.registry.get', 'registry.get', (['"""layers"""', '"""expand_window.v1"""'], {}), "('layers', 'expand_window.v1')\n", (4100, 4130), False, 'from spacy.util import registry\n'), ((1165, 1202), 'thinc.api.Model.define_operators', 'Model.define_operators', (["{'>>': chain}"], {}), "({'>>': chain})\n", (1187, 1202), False, 'from thinc.api import Model, with_cpu\n'), ((2200, 2237), 'thinc.api.Model.define_operators', 'Model.define_operators', (["{'>>': chain}"], {}), "({'>>': chain})\n", (2222, 2237), False, 'from thinc.api import Model, with_cpu\n'), ((2367, 2393), 'thinc.api.with_cpu', 'with_cpu', (['model', 'model.ops'], {}), '(model, model.ops)\n', (2375, 2393), False, 'from thinc.api import Model, with_cpu\n'), ((4193, 4261), 'thinc.api.Model.define_operators', 'Model.define_operators', (["{'>>': chain, '|': concatenate, '**': clone}"], {}), "({'>>': chain, '|': concatenate, '**': clone})\n", (4215, 4261), False, 'from thinc.api import Model, with_cpu\n'), ((2296, 2333), 'spacy.ml.extract_ngrams', 'extract_ngrams', (['ngram_size'], {'attr': 'ORTH'}), '(ngram_size, attr=ORTH)\n', (2310, 2333), False, 'from spacy.ml import extract_ngrams\n'), ((2540, 2580), 'thinc.api.with_cpu', 'with_cpu', (['output_layer', 'output_layer.ops'], {}), '(output_layer, output_layer.ops)\n', (2548, 2580), False, 'from thinc.api import Model, with_cpu\n')] |
# /usr/bin/python
# encoding=utf-8
import os
import sys
import click
import functools
def singleton(cls):
_instance = {}
def inner(*args, **kwargs):
if cls not in _instance:
_instance[cls] = cls(*args, **kwargs)
return _instance[cls]
return inner
# class singleton(object):
# def __init__(self, cls):
# self._cls = cls
# self._instance = {}
# def __call__(self):
# if self._cls not in self._instance:
# self._instance[self._cls] = self._cls()
# return self._instance[self._cls]
# common
CONTEXT_SETTINGS = dict(
help_option_names=['-h', '--help'],
default_map={
'cli': {'stage': 'pp'},
'run': {'tag': 'common_tag'},
}
)
# # offline
# CONTEXT_SETTINGS = dict(
# default_map={'runserver': {'port': 5000}}
# )
# # Apsara Stack
# CONTEXT_SETTINGS = dict(
# default_map={'runserver': {'port': 5000}}
# )
def stage(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# import logging
# logger = logging.getLogger(name)
# logger.info('Async Function {}, {}, {}'.format(
# func.__name__, str(args), str(kwargs)))
try:
click.secho('{} args: {}, kwargs: {} start !'.format(
func.__name__, args, kwargs), fg=u'blue', bg=u'black', underline=True, bold=True)
result = func(*args, **kwargs)
print('{} success!'.format(func.__name__))
return result
except Exception as e:
print('{} failed! message: {}'.format(func.__name__, e))
return False
# import traceback
# logger.error(str(e), traceback.format_exc())
# raise e
return wrapper
@singleton
class Operater(object):
def __init__(self, *args, **kwargs):
import random
self.num = random.random()
@stage
def stage_setup(self, *args, **kwargs):
pass
@stage
def stage_init(self, *args, **kwargs):
pass
@stage
def stage_migration(self, *args, **kwargs):
pass
@stage
def stage_lock(self, *args, **kwargs):
pass
@stage
def stage_run(self, *args, **kwargs):
pass
@stage
def stage_parser(self, *args, **kwargs):
pass
@stage
def stage_collect(self, *args, **kwargs):
pass
@stage
def stage_upload(self, *args, **kwargs):
pass
@stage
def stage_clean(self, *args, **kwargs):
pass
@stage
def tool_status(self, *args, **kwargs):
pass
def print_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo('Version 1.0, Powered by Ecs-Lab')
ctx.exit()
class Product(object):
def __init__(self, debug=False):
self.debug = debug
pass_product = click.make_pass_decorator(Product, ensure=True)
@click.group(invoke_without_command=True, context_settings=CONTEXT_SETTINGS)
@click.option('-v', '--version', is_flag=True, callback=print_version,
expose_value=False, is_eager=True)
@click.option('-s', '--stage', type=click.STRING, default='all', help=u'兼容旧命令')
@click.option('-c', '--clean', type=click.BOOL, default=False, help=u'兼容旧命令, 测试结束释放vm')
@click.option('-m', '--mode', type=click.Choice(('simple', 'full')), default=u'full', help=u'兼容旧命令, 解析模式, 默认: full')
@click.option('--debug/--no-debug', default=False, help=u'开启debug模式,默认: --no-debug')
@click.pass_context
def cli(ctx, stage, clean, mode, debug):
"""
欢迎使用CLI。 本工具由ECS-Lab提供。
\b
这是一套用于测试云主机的自动化测试框架,部分功能完善中,欢迎使用。这是一套用于测试云主机的自动化测试框架,部分功能完善中,欢迎使用这是一套用于测试云主机的自动化测试框架,部分功能完善中,欢迎使用这是一套用于测试云主机的自动化测试框架,部分功能完善中,欢迎使用
"""
click.echo('prepare some thing')
if ctx.invoked_subcommand is None:
click.echo('stage = {}'.format(stage))
click.echo('clean = {}'.format(clean))
click.echo('I was invoked without subcommand')
else:
click.echo('I am about to invoke %s' % ctx.invoked_subcommand)
ctx.ensure_object(dict)
ctx.obj = Product(debug)
@cli.command(help=u'完整测试流程,setup -> init -> run -> parser -> collect -> upload')
@click.option('-t', '--tag', envvar='tag')
@click.option('-c', '--clean/--keep', default=False, help=u'测试结束是否释放vm, 默认: --keep')
@click.pass_context
def all(ctx, **kwargs):
# ctx.forward(setup) and ctx.forward(init) and ctx.forward(run) and ctx.forward(clean)
ctx.forward(setup)
ctx.forward(init)
ctx.forward(run)
ctx.forward(clean)
@cli.command(help=u'创建实例操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-c', '--clean/--keep', default=False, help=u'开vm失败是否释放vm, 默认: --keep')
def setup(**kwargs):
op = Operater()
click.echo(op.num)
op.stage_setup(**kwargs)
@cli.command(help=u'vm迁移操作')
@click.option('-t', '--tag', envvar='tag')
# @click.option('-f', '--force/--no-force', default=False, help=u'是否强制迁移,默认值: False')
def migration(**kwargs):
op = Operater()
op.stage_migration(**kwargs)
@cli.command(help=u'锁nc操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-f', '--force/--no-force', envvar='force_lock', default=False, help=u'是否强制锁nc,锁定失败则任务终止,默认值: False')
@pass_product
def lock(repo, **kwargs):
click.echo(repo.__dict__)
click.echo(repo.debug)
Operater().stage_lock(**kwargs)
@cli.command(help=u'初始化实例操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-f', '--force/--no-force', default=False, help=u'是否强制初始化,会重复初始化, 默认: False')
@click.option('-u', '--update/--no-update', default=False, help=u'是否更新代码,默认: False')
@click.option('-r', '--run/--no-run', default=False, help=u'初始化结束是否进行压测,默认: False')
@click.pass_context
def init(ctx, **kwargs):
op = Operater()
click.echo(op.num)
op.stage_init(**kwargs)
return False
@cli.command(help=u'性能压测操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-p', '--parser', type=click.BOOL, default=True, help=u'是否同步解析, 默认: True')
@click.option('-l', '--loop', type=click.INT, default=1, help=u'循环跑几轮, 默认: 1')
@click.option('-u', '--upload', type=click.BOOL, default=True, help=u'是否同步上传结果, 默认: True')
@click.option('-n', '--new_task_id/--old_task_id', default=False, help=u'是否生成新到task_id, 默认: False')
@click.option('-f', '--force/--no-force', default=False, help=u'是否忽略case状态强制重跑, 默认: False')
@click.option('-c', '--clean/--keep', default=False, help=u'测试结束是否释放vm, 默认: --keep')
@click.option('-m', '--parallel/--common', default=False, help=u'是否是多vm并行压测, 默认: False')
@click.option('--case_type', type=click.Choice(('cpu', 'io', 'memory', 'network')), multiple=True, help=u'指定跑那些case_type到case,支持多选,默认全跑')
@click.option('-s', '--scenario_name', type=click.STRING, multiple=True, help=u'指定跑哪几个case,支持多选,默认全跑')
@click.pass_context
def run(ctx, **kwargs):
Operater().stage_run(**kwargs)
@cli.command(help=u'结果解析操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-r', '--remote/--local', default=False, help=u'是否在远端解析, 默认: False')
@click.option('-u', '--upload', type=click.BOOL, default=True, help=u'是否同步上传结果, 默认: True')
@click.option('-m', '--mode', type=click.Choice(('simple', 'full')), default=u'full', help=u'解析模式, 默认: full')
def parser(**kwargs):
Operater().stage_parser(**kwargs)
@cli.command(help=u'结果收集操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-r', '--result', type=click.BOOL, default=True, help=u'收集结果数据, 默认: True')
@click.option('-i', '--instance', type=click.BOOL, default=True, help=u'收集实例信息, 默认: True')
@click.option('-v', '--verbose', type=click.BOOL, default=False, help=u'详细信息, 默认: False')
def collect(**kwargs):
Operater().stage_collect(**kwargs)
@cli.command(help=u'结果上传操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-b', '--backup', type=click.BOOL, default=True, help=u'结果备份到oss, 默认: True')
@click.option('-i', '--instance', type=click.BOOL, default=True, help=u'上传实例信息, 默认: True')
@click.option('-t', '--task', type=click.BOOL, default=True, help=u'上传task信息, 默认: True')
@click.option('-r', '--result', type=click.BOOL, default=True, help=u'上传结果信息, 默认: True')
@click.option('-s', '--status', type=click.BOOL, default=True, help=u'上传case状态信息, 默认: True')
def upload(**kwargs):
Operater().stage_upload(**kwargs)
@cli.command(help=u'实例释放操作')
@click.option('-t', '--tag', envvar='tag')
@click.option('-n', '--unlock_nc', type=click.BOOL, default=True, help=u'是否解锁nc, 默认: True')
def clean(**kwargs):
Operater().stage_clean(**kwargs)
@cli.command(help=u'各种状态检查')
@click.option('-t', '--tag', envvar='tag')
@click.option('-a', '--all', type=click.BOOL, default=True, help=u'全部状态')
@click.option('-c', '--case/--no-case', default=False, help=u'case状态')
@click.option('-i', '--instance/--no-instance', default=False, help=u'实例状态')
@click.option('-u', '--upload/--no-upload', default=False, help=u'上传状态')
@click.option('-p', '--parser/--no-parser', default=False, help=u'解析状态')
@click.option('-v', '--verbose/--no-verbose', default=False, help=u'显示详细信息')
def status(**kwargs):
Operater().tool_status(**kwargs)
@cli.command(help=u'nc 相关操作及信息展示')
@click.option('-t', '--tag', envvar='tag')
def nc(**kwargs):
# “”“
# # TODO: 这里考虑要不要搞成交互式的,使用prompt_toolkit库可以实现。
# 1. 输入nc_id或者nc_ip可以查看nc上存在的vm信息和nc状态,cpu、内存数等。
# 2. 可执行迁移vm命令、锁定nc,解锁nc等。
# ”“”
pass
@cli.command(help=u'vm 操作及信息展示')
@click.option('-t', '--tag', envvar='tag')
def vm(**kwargs):
# “”“
# # TODO: 这里考虑要不要搞成交互式的,使用prompt_toolkit库可以实现。prettytable 可输出表表格
# 1. 输入instance_id或者pub_ip可以查看vm信息和状态,cpu、内存数等。
# 2. 可执行迁移vm命令、锁定nc,解锁nc等。
# 3. 可挂在磁盘、网卡、操作安全组等命令,可查看安全组等命令。
# ”“”
from prettytable import PrettyTable
x = PrettyTable(["City name", "Area", "Population", "Annual Rainfall"])
x.align["City name"] = "l" # Left align city names
# One space between column edges and contents (default)
x.padding_width = 1
x.add_row(["Adelaide", 1295, 1158259, 600.5])
x.add_row(["Brisbane", 5905, 1857594, 1146.4])
x.add_row(["Darwin", 112, 120900, 1714.7])
x.add_row(["Hobart", 1357, 205556, 619.5])
x.add_row(["Sydney", 2058, 4336374, 1214.8])
x.add_row(["Melbourne", 1566, 3806092, 646.9])
x.add_row(["Perth", 5386, 1554769, 869.4])
click.echo(x)
if __name__ == "__main__":
cli(obj={})
| [
"prettytable.PrettyTable",
"click.Choice",
"click.make_pass_decorator",
"click.group",
"click.option",
"functools.wraps",
"click.echo",
"random.random"
] | [((2851, 2898), 'click.make_pass_decorator', 'click.make_pass_decorator', (['Product'], {'ensure': '(True)'}), '(Product, ensure=True)\n', (2876, 2898), False, 'import click\n'), ((2902, 2977), 'click.group', 'click.group', ([], {'invoke_without_command': '(True)', 'context_settings': 'CONTEXT_SETTINGS'}), '(invoke_without_command=True, context_settings=CONTEXT_SETTINGS)\n', (2913, 2977), False, 'import click\n'), ((2979, 3087), 'click.option', 'click.option', (['"""-v"""', '"""--version"""'], {'is_flag': '(True)', 'callback': 'print_version', 'expose_value': '(False)', 'is_eager': '(True)'}), "('-v', '--version', is_flag=True, callback=print_version,\n expose_value=False, is_eager=True)\n", (2991, 3087), False, 'import click\n'), ((3099, 3177), 'click.option', 'click.option', (['"""-s"""', '"""--stage"""'], {'type': 'click.STRING', 'default': '"""all"""', 'help': 'u"""兼容旧命令"""'}), "('-s', '--stage', type=click.STRING, default='all', help=u'兼容旧命令')\n", (3111, 3177), False, 'import click\n'), ((3179, 3270), 'click.option', 'click.option', (['"""-c"""', '"""--clean"""'], {'type': 'click.BOOL', 'default': '(False)', 'help': 'u"""兼容旧命令, 测试结束释放vm"""'}), "('-c', '--clean', type=click.BOOL, default=False, help=\n u'兼容旧命令, 测试结束释放vm')\n", (3191, 3270), False, 'import click\n'), ((3384, 3472), 'click.option', 'click.option', (['"""--debug/--no-debug"""'], {'default': '(False)', 'help': 'u"""开启debug模式,默认: --no-debug"""'}), "('--debug/--no-debug', default=False, help=\n u'开启debug模式,默认: --no-debug')\n", (3396, 3472), False, 'import click\n'), ((4162, 4203), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (4174, 4203), False, 'import click\n'), ((4205, 4293), 'click.option', 'click.option', (['"""-c"""', '"""--clean/--keep"""'], {'default': '(False)', 'help': 'u"""测试结束是否释放vm, 默认: --keep"""'}), "('-c', '--clean/--keep', default=False, help=\n u'测试结束是否释放vm, 默认: --keep')\n", (4217, 4293), False, 'import click\n'), ((4545, 4586), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (4557, 4586), False, 'import click\n'), ((4588, 4677), 'click.option', 'click.option', (['"""-c"""', '"""--clean/--keep"""'], {'default': '(False)', 'help': 'u"""开vm失败是否释放vm, 默认: --keep"""'}), "('-c', '--clean/--keep', default=False, help=\n u'开vm失败是否释放vm, 默认: --keep')\n", (4600, 4677), False, 'import click\n'), ((4798, 4839), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (4810, 4839), False, 'import click\n'), ((5035, 5076), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (5047, 5076), False, 'import click\n'), ((5078, 5196), 'click.option', 'click.option', (['"""-f"""', '"""--force/--no-force"""'], {'envvar': '"""force_lock"""', 'default': '(False)', 'help': 'u"""是否强制锁nc,锁定失败则任务终止,默认值: False"""'}), "('-f', '--force/--no-force', envvar='force_lock', default=False,\n help=u'是否强制锁nc,锁定失败则任务终止,默认值: False')\n", (5090, 5196), False, 'import click\n'), ((5359, 5400), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (5371, 5400), False, 'import click\n'), ((5402, 5497), 'click.option', 'click.option', (['"""-f"""', '"""--force/--no-force"""'], {'default': '(False)', 'help': 'u"""是否强制初始化,会重复初始化, 默认: False"""'}), "('-f', '--force/--no-force', default=False, help=\n u'是否强制初始化,会重复初始化, 默认: False')\n", (5414, 5497), False, 'import click\n'), ((5494, 5582), 'click.option', 'click.option', (['"""-u"""', '"""--update/--no-update"""'], {'default': '(False)', 'help': 'u"""是否更新代码,默认: False"""'}), "('-u', '--update/--no-update', default=False, help=\n u'是否更新代码,默认: False')\n", (5506, 5582), False, 'import click\n'), ((5579, 5666), 'click.option', 'click.option', (['"""-r"""', '"""--run/--no-run"""'], {'default': '(False)', 'help': 'u"""初始化结束是否进行压测,默认: False"""'}), "('-r', '--run/--no-run', default=False, help=\n u'初始化结束是否进行压测,默认: False')\n", (5591, 5666), False, 'import click\n'), ((5827, 5868), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (5839, 5868), False, 'import click\n'), ((5870, 5962), 'click.option', 'click.option', (['"""-p"""', '"""--parser"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""是否同步解析, 默认: True"""'}), "('-p', '--parser', type=click.BOOL, default=True, help=\n u'是否同步解析, 默认: True')\n", (5882, 5962), False, 'import click\n'), ((5959, 6036), 'click.option', 'click.option', (['"""-l"""', '"""--loop"""'], {'type': 'click.INT', 'default': '(1)', 'help': 'u"""循环跑几轮, 默认: 1"""'}), "('-l', '--loop', type=click.INT, default=1, help=u'循环跑几轮, 默认: 1')\n", (5971, 6036), False, 'import click\n'), ((6038, 6132), 'click.option', 'click.option', (['"""-u"""', '"""--upload"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""是否同步上传结果, 默认: True"""'}), "('-u', '--upload', type=click.BOOL, default=True, help=\n u'是否同步上传结果, 默认: True')\n", (6050, 6132), False, 'import click\n'), ((6129, 6232), 'click.option', 'click.option', (['"""-n"""', '"""--new_task_id/--old_task_id"""'], {'default': '(False)', 'help': 'u"""是否生成新到task_id, 默认: False"""'}), "('-n', '--new_task_id/--old_task_id', default=False, help=\n u'是否生成新到task_id, 默认: False')\n", (6141, 6232), False, 'import click\n'), ((6229, 6324), 'click.option', 'click.option', (['"""-f"""', '"""--force/--no-force"""'], {'default': '(False)', 'help': 'u"""是否忽略case状态强制重跑, 默认: False"""'}), "('-f', '--force/--no-force', default=False, help=\n u'是否忽略case状态强制重跑, 默认: False')\n", (6241, 6324), False, 'import click\n'), ((6321, 6409), 'click.option', 'click.option', (['"""-c"""', '"""--clean/--keep"""'], {'default': '(False)', 'help': 'u"""测试结束是否释放vm, 默认: --keep"""'}), "('-c', '--clean/--keep', default=False, help=\n u'测试结束是否释放vm, 默认: --keep')\n", (6333, 6409), False, 'import click\n'), ((6406, 6498), 'click.option', 'click.option', (['"""-m"""', '"""--parallel/--common"""'], {'default': '(False)', 'help': 'u"""是否是多vm并行压测, 默认: False"""'}), "('-m', '--parallel/--common', default=False, help=\n u'是否是多vm并行压测, 默认: False')\n", (6418, 6498), False, 'import click\n'), ((6633, 6738), 'click.option', 'click.option', (['"""-s"""', '"""--scenario_name"""'], {'type': 'click.STRING', 'multiple': '(True)', 'help': 'u"""指定跑哪几个case,支持多选,默认全跑"""'}), "('-s', '--scenario_name', type=click.STRING, multiple=True,\n help=u'指定跑哪几个case,支持多选,默认全跑')\n", (6645, 6738), False, 'import click\n'), ((6846, 6887), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (6858, 6887), False, 'import click\n'), ((6889, 6975), 'click.option', 'click.option', (['"""-r"""', '"""--remote/--local"""'], {'default': '(False)', 'help': 'u"""是否在远端解析, 默认: False"""'}), "('-r', '--remote/--local', default=False, help=\n u'是否在远端解析, 默认: False')\n", (6901, 6975), False, 'import click\n'), ((6972, 7066), 'click.option', 'click.option', (['"""-u"""', '"""--upload"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""是否同步上传结果, 默认: True"""'}), "('-u', '--upload', type=click.BOOL, default=True, help=\n u'是否同步上传结果, 默认: True')\n", (6984, 7066), False, 'import click\n'), ((7264, 7305), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (7276, 7305), False, 'import click\n'), ((7307, 7399), 'click.option', 'click.option', (['"""-r"""', '"""--result"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""收集结果数据, 默认: True"""'}), "('-r', '--result', type=click.BOOL, default=True, help=\n u'收集结果数据, 默认: True')\n", (7319, 7399), False, 'import click\n'), ((7396, 7490), 'click.option', 'click.option', (['"""-i"""', '"""--instance"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""收集实例信息, 默认: True"""'}), "('-i', '--instance', type=click.BOOL, default=True, help=\n u'收集实例信息, 默认: True')\n", (7408, 7490), False, 'import click\n'), ((7487, 7580), 'click.option', 'click.option', (['"""-v"""', '"""--verbose"""'], {'type': 'click.BOOL', 'default': '(False)', 'help': 'u"""详细信息, 默认: False"""'}), "('-v', '--verbose', type=click.BOOL, default=False, help=\n u'详细信息, 默认: False')\n", (7499, 7580), False, 'import click\n'), ((7670, 7711), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (7682, 7711), False, 'import click\n'), ((7713, 7807), 'click.option', 'click.option', (['"""-b"""', '"""--backup"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""结果备份到oss, 默认: True"""'}), "('-b', '--backup', type=click.BOOL, default=True, help=\n u'结果备份到oss, 默认: True')\n", (7725, 7807), False, 'import click\n'), ((7804, 7898), 'click.option', 'click.option', (['"""-i"""', '"""--instance"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""上传实例信息, 默认: True"""'}), "('-i', '--instance', type=click.BOOL, default=True, help=\n u'上传实例信息, 默认: True')\n", (7816, 7898), False, 'import click\n'), ((7895, 7987), 'click.option', 'click.option', (['"""-t"""', '"""--task"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""上传task信息, 默认: True"""'}), "('-t', '--task', type=click.BOOL, default=True, help=\n u'上传task信息, 默认: True')\n", (7907, 7987), False, 'import click\n'), ((7984, 8076), 'click.option', 'click.option', (['"""-r"""', '"""--result"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""上传结果信息, 默认: True"""'}), "('-r', '--result', type=click.BOOL, default=True, help=\n u'上传结果信息, 默认: True')\n", (7996, 8076), False, 'import click\n'), ((8073, 8169), 'click.option', 'click.option', (['"""-s"""', '"""--status"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""上传case状态信息, 默认: True"""'}), "('-s', '--status', type=click.BOOL, default=True, help=\n u'上传case状态信息, 默认: True')\n", (8085, 8169), False, 'import click\n'), ((8257, 8298), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (8269, 8298), False, 'import click\n'), ((8300, 8395), 'click.option', 'click.option', (['"""-n"""', '"""--unlock_nc"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""是否解锁nc, 默认: True"""'}), "('-n', '--unlock_nc', type=click.BOOL, default=True, help=\n u'是否解锁nc, 默认: True')\n", (8312, 8395), False, 'import click\n'), ((8481, 8522), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (8493, 8522), False, 'import click\n'), ((8524, 8596), 'click.option', 'click.option', (['"""-a"""', '"""--all"""'], {'type': 'click.BOOL', 'default': '(True)', 'help': 'u"""全部状态"""'}), "('-a', '--all', type=click.BOOL, default=True, help=u'全部状态')\n", (8536, 8596), False, 'import click\n'), ((8598, 8667), 'click.option', 'click.option', (['"""-c"""', '"""--case/--no-case"""'], {'default': '(False)', 'help': 'u"""case状态"""'}), "('-c', '--case/--no-case', default=False, help=u'case状态')\n", (8610, 8667), False, 'import click\n'), ((8669, 8744), 'click.option', 'click.option', (['"""-i"""', '"""--instance/--no-instance"""'], {'default': '(False)', 'help': 'u"""实例状态"""'}), "('-i', '--instance/--no-instance', default=False, help=u'实例状态')\n", (8681, 8744), False, 'import click\n'), ((8746, 8817), 'click.option', 'click.option', (['"""-u"""', '"""--upload/--no-upload"""'], {'default': '(False)', 'help': 'u"""上传状态"""'}), "('-u', '--upload/--no-upload', default=False, help=u'上传状态')\n", (8758, 8817), False, 'import click\n'), ((8819, 8890), 'click.option', 'click.option', (['"""-p"""', '"""--parser/--no-parser"""'], {'default': '(False)', 'help': 'u"""解析状态"""'}), "('-p', '--parser/--no-parser', default=False, help=u'解析状态')\n", (8831, 8890), False, 'import click\n'), ((8892, 8967), 'click.option', 'click.option', (['"""-v"""', '"""--verbose/--no-verbose"""'], {'default': '(False)', 'help': 'u"""显示详细信息"""'}), "('-v', '--verbose/--no-verbose', default=False, help=u'显示详细信息')\n", (8904, 8967), False, 'import click\n'), ((9065, 9106), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (9077, 9106), False, 'import click\n'), ((9325, 9366), 'click.option', 'click.option', (['"""-t"""', '"""--tag"""'], {'envvar': '"""tag"""'}), "('-t', '--tag', envvar='tag')\n", (9337, 9366), False, 'import click\n'), ((959, 980), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (974, 980), False, 'import functools\n'), ((2684, 2729), 'click.echo', 'click.echo', (['"""Version 1.0, Powered by Ecs-Lab"""'], {}), "('Version 1.0, Powered by Ecs-Lab')\n", (2694, 2729), False, 'import click\n'), ((3719, 3751), 'click.echo', 'click.echo', (['"""prepare some thing"""'], {}), "('prepare some thing')\n", (3729, 3751), False, 'import click\n'), ((4718, 4736), 'click.echo', 'click.echo', (['op.num'], {}), '(op.num)\n', (4728, 4736), False, 'import click\n'), ((5237, 5262), 'click.echo', 'click.echo', (['repo.__dict__'], {}), '(repo.__dict__)\n', (5247, 5262), False, 'import click\n'), ((5267, 5289), 'click.echo', 'click.echo', (['repo.debug'], {}), '(repo.debug)\n', (5277, 5289), False, 'import click\n'), ((5731, 5749), 'click.echo', 'click.echo', (['op.num'], {}), '(op.num)\n', (5741, 5749), False, 'import click\n'), ((9644, 9711), 'prettytable.PrettyTable', 'PrettyTable', (["['City name', 'Area', 'Population', 'Annual Rainfall']"], {}), "(['City name', 'Area', 'Population', 'Annual Rainfall'])\n", (9655, 9711), False, 'from prettytable import PrettyTable\n'), ((10198, 10211), 'click.echo', 'click.echo', (['x'], {}), '(x)\n', (10208, 10211), False, 'import click\n'), ((1872, 1887), 'random.random', 'random.random', ([], {}), '()\n', (1885, 1887), False, 'import random\n'), ((3893, 3939), 'click.echo', 'click.echo', (['"""I was invoked without subcommand"""'], {}), "('I was invoked without subcommand')\n", (3903, 3939), False, 'import click\n'), ((3958, 4020), 'click.echo', 'click.echo', (["('I am about to invoke %s' % ctx.invoked_subcommand)"], {}), "('I am about to invoke %s' % ctx.invoked_subcommand)\n", (3968, 4020), False, 'import click\n'), ((3301, 3333), 'click.Choice', 'click.Choice', (["('simple', 'full')"], {}), "(('simple', 'full'))\n", (3313, 3333), False, 'import click\n'), ((6528, 6576), 'click.Choice', 'click.Choice', (["('cpu', 'io', 'memory', 'network')"], {}), "(('cpu', 'io', 'memory', 'network'))\n", (6540, 6576), False, 'import click\n'), ((7097, 7129), 'click.Choice', 'click.Choice', (["('simple', 'full')"], {}), "(('simple', 'full'))\n", (7109, 7129), False, 'import click\n')] |
#!/usr/bin/env python
import argparse
import csv
import datetime
import os
import pytz
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from stravalib.client import Client
from stravalib.model import Activity
from db import ChallengeSqlDB
from model2 import Run, User
DEFAULT_SCOPE = ["https://spreadsheets.google.com/feeds",
"https://www.googleapis.com/auth/drive"]
class ChallengeSpread():
def __init__(self, credentials_path):
self.credentials = ServiceAccountCredentials.from_json_keyfile_name(
credentials_path, DEFAULT_SCOPE)
self.gc = gspread.authorize(self.credentials)
def __get_sheet(self, spread_key, sheet_name):
spread_sheet = self.gc.open_by_key(spread_key)
return spread_sheet.worksheet(sheet_name)
def update_summary_run(self, spread_key, sheet_name, run_data):
print("Updating run spreadsheet: %s" % (sheet_name))
# element in run_data : [intania, distance]
worksheet = self.__get_sheet(spread_key, sheet_name)
cell_list = worksheet.range("A2:D%d" % (len(run_data) + 1))
for idx, cell in enumerate(cell_list):
i = int(idx / 4)
j = idx % 4
if j == 0:
print(run_data[i])
element = run_data[i][j]
cell.value = element
# Update in batch
worksheet.update_cells(cell_list, "USER_ENTERED")
def update_runner(self, spread_key, sheet_name, runner_data):
print("Updating runner spreadsheet: %s" % (sheet_name))
# element in runner_data : [no., displayname, intania]
worksheet = self.__get_sheet(spread_key, sheet_name)
cell_list = worksheet.range("A2:D%d" % (len(runner_data) + 1))
for idx, cell in enumerate(cell_list):
i = int(idx / 4)
j = idx % 4
if j == 0:
# Insert row number
element = i + 1
else:
element = runner_data[i][j - 1]
cell.value = element
# Update in batch
worksheet.update_cells(cell_list, "USER_ENTERED")
# Reuired environment
TIME_STRING_FORMAT = "%Y-%m-%d %H:%M:%S"
DEFAULT_OUTPUT_DIR = "./"
MYSQL_HOST = os.environ["MYSQL_HOST"]
MYSQL_USERNAME = os.environ["MYSQL_USERNAME"]
MYSQL_PASSWORD = os.environ["MYSQL_PASSWORD"]
MYSQL_DB_NAME = os.environ["MYSQL_DB_NAME"]
ChallengeSqlDB.init(MYSQL_HOST, MYSQL_USERNAME,
MYSQL_PASSWORD, MYSQL_DB_NAME)
def update_runner_spread_intania(challenge_spread, spread_key, sheet_name):
users = ChallengeSqlDB.get_all_intania_users()
runner_data = []
for user in users:
if user.clubs:
intania = user.clubs[0].intania
else:
intania = "N/A"
runner_data.append((user.first_name, user.last_name, intania))
challenge_spread.update_runner(spread_key, sheet_name, runner_data)
def update_run_spread_intania(challenge_spread, spread_key, sheet_name):
rows = ChallengeSqlDB.get_summary_intania_distance()
run_data = []
for row in rows:
# row.total_distance type is Decimal
run_data.append(
(row.intania, int(row.total_distance) /
1000.0, row.total_user, row.total_run)
)
challenge_spread.update_summary_run(spread_key, sheet_name, run_data)
def update_run_spread_ranger(challenge_spread, spread_key, sheet_name):
rows = ChallengeSqlDB.get_summary_ranger_distance()
run_data = []
for row in rows:
# row.total_distance type is Decimal
run_data.append(
(row.name, int(row.total_distance) /
1000.0, row.total_user, row.total_run)
)
challenge_spread.update_summary_run(spread_key, sheet_name, run_data)
def upload_reports(drive_cleint_config, token_path, folder_id, report_paths):
g_auth = GoogleAuth()
g_auth.LoadClientConfigFile(drive_cleint_config)
g_auth.LoadCredentialsFile(token_path)
drive = GoogleDrive(g_auth)
for report_path in report_paths:
with open(report_path, "r") as file:
title = os.path.basename(file.name)
file_drive = drive.CreateFile({
"title": title,
"parents": [{"kind": "drive#fileLink", "id": folder_id}]
})
file_drive.SetContentString(file.read())
file_drive.Upload()
print("Upload file: %s" % (title))
def gen_run_report(timestamp, report_path):
runs = ChallengeSqlDB.get_all_runs()
with open(report_path, "w", newline="") as csvfile:
fieldnames = ["timestamp",
"start_date",
"start_date_local",
"strava_id",
"name",
"distance",
"moving_time",
"elapsed_time",
"elev_high",
"elev_low",
"total_elevation_gain",
"manual",
"promo_comment",
"promo_multiplier",
"user_strava_id",
"user_first_name",
"user_last_name",
"user_intania",
"user_ranger",
"created_at"
]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
n_run = 0
for run in runs:
row = vars(run)
row["timestamp"] = timestamp
row["start_date"] = row["start_date"].strftime(TIME_STRING_FORMAT)
row["start_date_local"] = row["start_date_local"].strftime(
TIME_STRING_FORMAT)
row["created_at"] = row["created_at"].strftime(TIME_STRING_FORMAT)
# Customise user info
user = run.user
row["user_strava_id"] = user.strava_id
row["user_first_name"] = user.first_name
row["user_last_name"] = user.last_name
if user.clubs:
row["user_intania"] = user.clubs[0].intania
else:
row["user_intania"] = ""
if user.registration and user.registration.foundation:
row["user_ranger"] = user.registration.foundation.name
else:
row["user_ranger"] = ""
# Filter only wanted fields
row = {key: row[key] for key in fieldnames if key in row}
writer.writerow(row)
n_run += 1
print("Total Runs:", n_run)
print("Generated report to", report_path)
def gen_runner_report(timestamp, report_path):
users = ChallengeSqlDB.get_all_users()
with open(report_path, "w", newline="") as csvfile:
fieldnames = ["timestamp", "id", "strava_id", "first_name",
"last_name", "intania", "ranger", "created_at"]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
n_user = 0
for user in users:
row = vars(user)
row["timestamp"] = timestamp
row["created_at"] = row["created_at"].strftime(TIME_STRING_FORMAT)
# Customise intania and ranger fields
if user.clubs:
row["intania"] = user.clubs[0].intania
else:
row["intania"] = ""
if user.registration and user.registration.foundation:
row["ranger"] = user.registration.foundation.name
else:
row["ranger"] = ""
# Filter only wanted fields
row = {key: row[key] for key in fieldnames if key in row}
writer.writerow(row)
n_user += 1
print("Total Runners:", n_user)
print("Generated report to", report_path)
def main():
if args.time_zone:
tz = pytz.timezone(args.time_zone)
now = datetime.datetime.now(tz)
else:
now = datetime.datetime.now()
timestamp = now.strftime(TIME_STRING_FORMAT)
report_prefix = now.strftime("%Y%m%d_%H%M%S")
print("Report timestamp:", timestamp)
output_dir = args.output_dir
os.makedirs(output_dir, exist_ok=True)
runner_report_name = "%s_runner_report.csv" % (report_prefix)
run_report_name = "%s_run_report.csv" % (report_prefix)
runner_report_path = os.path.join(output_dir, runner_report_name)
gen_runner_report(timestamp, runner_report_path)
run_report_path = os.path.join(output_dir, run_report_name)
gen_run_report(timestamp, run_report_path)
if args.drive_cleint_config and args.drive_token and args.drive_folder_id:
print("GDrive config is set, uploading reports to Gdrive.")
upload_reports(args.drive_cleint_config, args.drive_token,
args.drive_folder_id, [runner_report_path, run_report_path])
if args.credentials:
print("GSpread credentials is set, uploading summary to spreadsheet.")
challenge_spread = ChallengeSpread(args.credentials)
if args.run_spread_key and args.intania_run_sheet_name:
update_run_spread_intania(
challenge_spread,
args.run_spread_key,
args.intania_run_sheet_name
)
if args.run_spread_key and args.ranger_run_sheet_name:
update_run_spread_ranger(
challenge_spread,
args.run_spread_key,
args.ranger_run_sheet_name
)
if args.runner_spread_key and args.intania_runner_sheet_name:
update_runner_spread_intania(
challenge_spread,
args.runner_spread_key,
args.intania_runner_sheet_name
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--output-dir", help="Output directory for reports", action="store",
default=DEFAULT_OUTPUT_DIR, dest="output_dir", type=str)
parser.add_argument("--time-zone", help="Timezone for timestamp", action="store",
default=None, dest="time_zone", type=str)
parser.add_argument("--drive-client-config", help="GDrive client config file", action="store",
dest="drive_cleint_config", type=str)
parser.add_argument("--drive-token", help="GDrive access token file", action="store",
dest="drive_token", type=str)
parser.add_argument("--drive-folder-id", help="Destination folder id on GDrive", action="store",
dest="drive_folder_id", type=str)
# Spreadsheet config
parser.add_argument("--credentials", help="GSpread credentials", action="store",
dest="credentials", type=str)
parser.add_argument("--run-spread-key", help="Spreadsheet key for intania & ranger run summary", action="store",
dest="run_spread_key", type=str)
parser.add_argument("--intania-run-sheet-name", help="Worksheet name for intania run summary", action="store",
dest="intania_run_sheet_name", type=str)
parser.add_argument("--ranger-run-sheet-name", help="Worksheet name for ranger run summary", action="store",
dest="ranger_run_sheet_name", type=str)
parser.add_argument("--runner-spread-key", help="Spreadsheet key for runner summary", action="store",
dest="runner_spread_key", type=str)
parser.add_argument("--intania-runner-sheet-name", help="Worksheet name for runner summary", action="store",
dest="intania_runner_sheet_name", type=str)
args = parser.parse_args()
main()
| [
"csv.DictWriter",
"pytz.timezone",
"db.ChallengeSqlDB.get_all_intania_users",
"db.ChallengeSqlDB.get_all_runs",
"db.ChallengeSqlDB.get_all_users",
"os.makedirs",
"argparse.ArgumentParser",
"gspread.authorize",
"os.path.join",
"db.ChallengeSqlDB.get_summary_ranger_distance",
"pydrive.auth.GoogleA... | [((2492, 2570), 'db.ChallengeSqlDB.init', 'ChallengeSqlDB.init', (['MYSQL_HOST', 'MYSQL_USERNAME', 'MYSQL_PASSWORD', 'MYSQL_DB_NAME'], {}), '(MYSQL_HOST, MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_DB_NAME)\n', (2511, 2570), False, 'from db import ChallengeSqlDB\n'), ((2681, 2719), 'db.ChallengeSqlDB.get_all_intania_users', 'ChallengeSqlDB.get_all_intania_users', ([], {}), '()\n', (2717, 2719), False, 'from db import ChallengeSqlDB\n'), ((3103, 3148), 'db.ChallengeSqlDB.get_summary_intania_distance', 'ChallengeSqlDB.get_summary_intania_distance', ([], {}), '()\n', (3146, 3148), False, 'from db import ChallengeSqlDB\n'), ((3532, 3576), 'db.ChallengeSqlDB.get_summary_ranger_distance', 'ChallengeSqlDB.get_summary_ranger_distance', ([], {}), '()\n', (3574, 3576), False, 'from db import ChallengeSqlDB\n'), ((3964, 3976), 'pydrive.auth.GoogleAuth', 'GoogleAuth', ([], {}), '()\n', (3974, 3976), False, 'from pydrive.auth import GoogleAuth\n'), ((4085, 4104), 'pydrive.drive.GoogleDrive', 'GoogleDrive', (['g_auth'], {}), '(g_auth)\n', (4096, 4104), False, 'from pydrive.drive import GoogleDrive\n'), ((4589, 4618), 'db.ChallengeSqlDB.get_all_runs', 'ChallengeSqlDB.get_all_runs', ([], {}), '()\n', (4616, 4618), False, 'from db import ChallengeSqlDB\n'), ((6786, 6816), 'db.ChallengeSqlDB.get_all_users', 'ChallengeSqlDB.get_all_users', ([], {}), '()\n', (6814, 6816), False, 'from db import ChallengeSqlDB\n'), ((8275, 8313), 'os.makedirs', 'os.makedirs', (['output_dir'], {'exist_ok': '(True)'}), '(output_dir, exist_ok=True)\n', (8286, 8313), False, 'import os\n'), ((8466, 8510), 'os.path.join', 'os.path.join', (['output_dir', 'runner_report_name'], {}), '(output_dir, runner_report_name)\n', (8478, 8510), False, 'import os\n'), ((8586, 8627), 'os.path.join', 'os.path.join', (['output_dir', 'run_report_name'], {}), '(output_dir, run_report_name)\n', (8598, 8627), False, 'import os\n'), ((9893, 9918), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (9916, 9918), False, 'import argparse\n'), ((591, 676), 'oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name', 'ServiceAccountCredentials.from_json_keyfile_name', (['credentials_path', 'DEFAULT_SCOPE'], {}), '(credentials_path,\n DEFAULT_SCOPE)\n', (639, 676), False, 'from oauth2client.service_account import ServiceAccountCredentials\n'), ((704, 739), 'gspread.authorize', 'gspread.authorize', (['self.credentials'], {}), '(self.credentials)\n', (721, 739), False, 'import gspread\n'), ((5462, 5508), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'fieldnames'}), '(csvfile, fieldnames=fieldnames)\n', (5476, 5508), False, 'import csv\n'), ((7029, 7075), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'fieldnames'}), '(csvfile, fieldnames=fieldnames)\n', (7043, 7075), False, 'import csv\n'), ((7977, 8006), 'pytz.timezone', 'pytz.timezone', (['args.time_zone'], {}), '(args.time_zone)\n', (7990, 8006), False, 'import pytz\n'), ((8021, 8046), 'datetime.datetime.now', 'datetime.datetime.now', (['tz'], {}), '(tz)\n', (8042, 8046), False, 'import datetime\n'), ((8071, 8094), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8092, 8094), False, 'import datetime\n'), ((4208, 4235), 'os.path.basename', 'os.path.basename', (['file.name'], {}), '(file.name)\n', (4224, 4235), False, 'import os\n')] |
from functools import partial
from asciimatics import widgets
from asciimatics.event import KeyboardEvent
from asciimatics.screen import Screen
from asciimatics.widgets import Layout
from groklog.filter_manager import FilterManager
from groklog.process_node import ShellProcessIO
from groklog.ui.filter_viewer import FilterViewer
from groklog.ui.terminal import Terminal
from . import scene_names
from .base_app import BaseApp
class GrokLog(BaseApp):
def __init__(self, screen, filter_manager: FilterManager):
super().__init__(
screen,
screen.height,
screen.width,
can_scroll=False,
name="GrokLog",
title="GrokLog",
)
self.filter_manager = filter_manager
# Register all of the filter widgets
self._filter_widgets = {}
for filter in self.filter_manager:
self._register_filter(filter)
self.central_layout = Layout([100], fill_frame=True)
self.add_layout(self.central_layout)
self.view_filter(self.filter_manager.selected_filter)
# Create the Tab Layout and buttons for it
self.tab_layout = Layout([1, 0, 1, 1, 1, 1, 1])
self.add_layout(self.tab_layout)
self.create_tab_buttons()
self.fix()
def reset(self):
# After coming back from the AddFilter call, recreate the tab buttons to fill
# in any missing tabs.
for filter in self.filter_manager:
if filter not in self._filter_widgets:
self._register_filter(filter)
self.create_tab_buttons()
return super().reset()
def _register_filter(self, filter):
"""Create a widget for this filter and save it under self._filter_widgets"""
if filter in self._filter_widgets:
# This filter is already registered.
return
if isinstance(filter, ShellProcessIO):
widget = Terminal(
name="term",
shell=filter,
height=widgets.Widget.FILL_COLUMN,
)
else:
widget = FilterViewer(filter=filter, height=widgets.Widget.FILL_COLUMN)
self._filter_widgets[filter] = widget
def view_filter(self, filter):
"""Change the actively shown central widget"""
if self.scene is not None:
self.display_toast(f"Viewing {filter.name}: '{filter.command}'")
self.filter_manager.selected_filter = filter
# Replace the central layout widget
new_widget = self._filter_widgets[filter]
self.central_layout.clear_widgets()
self.central_layout.add_widget(new_widget)
self.central_layout.add_widget(widgets.Divider())
if isinstance(new_widget, Terminal):
# The terminal has a... hard time keeping stuff on the screen. This forces
# the terminal to re-subscribe and refresh the screen.
# TODO: Investigate why the terminal doesn't redraw it's screen correctly
new_widget.reset()
# This seems to put the widget into the update() loop
self.fix()
self.central_layout.focus(force_widget=new_widget)
self.screen.force_update(full_refresh=True)
def create_tab_buttons(self):
"""Create all of the tab buttons again"""
self.tab_layout.clear_widgets()
self.tab_layout.add_widget(
widgets.Button(
text="Add Filter",
on_click=partial(self.change_scene, scene_names.FILTER_CREATOR_SCENE),
),
column=0,
)
self.tab_layout.add_widget(widgets.VerticalDivider(), column=1)
for column, filter in enumerate(self.filter_manager, 2):
self.tab_layout.add_widget(
widgets.Button(
text=filter.name,
on_click=lambda filter=filter: self.view_filter(filter),
),
column=column,
)
self.fix()
def process_event(self, event):
if isinstance(event, KeyboardEvent):
if event.key_code in [Screen.ctrl("c")]:
# Catch Ctrl+C and pass it on to the sub shell
self.display_toast("Press Escape to close GrokLog!")
if (
self.filter_manager.selected_filter
is self.filter_manager.root_filter
):
self.filter_manager.root_filter.send_sigint()
return
return super().process_event(event)
| [
"asciimatics.widgets.Layout",
"asciimatics.widgets.Divider",
"groklog.ui.terminal.Terminal",
"asciimatics.screen.Screen.ctrl",
"groklog.ui.filter_viewer.FilterViewer",
"functools.partial",
"asciimatics.widgets.VerticalDivider"
] | [((956, 986), 'asciimatics.widgets.Layout', 'Layout', (['[100]'], {'fill_frame': '(True)'}), '([100], fill_frame=True)\n', (962, 986), False, 'from asciimatics.widgets import Layout\n'), ((1172, 1201), 'asciimatics.widgets.Layout', 'Layout', (['[1, 0, 1, 1, 1, 1, 1]'], {}), '([1, 0, 1, 1, 1, 1, 1])\n', (1178, 1201), False, 'from asciimatics.widgets import Layout\n'), ((1948, 2018), 'groklog.ui.terminal.Terminal', 'Terminal', ([], {'name': '"""term"""', 'shell': 'filter', 'height': 'widgets.Widget.FILL_COLUMN'}), "(name='term', shell=filter, height=widgets.Widget.FILL_COLUMN)\n", (1956, 2018), False, 'from groklog.ui.terminal import Terminal\n'), ((2117, 2179), 'groklog.ui.filter_viewer.FilterViewer', 'FilterViewer', ([], {'filter': 'filter', 'height': 'widgets.Widget.FILL_COLUMN'}), '(filter=filter, height=widgets.Widget.FILL_COLUMN)\n', (2129, 2179), False, 'from groklog.ui.filter_viewer import FilterViewer\n'), ((2713, 2730), 'asciimatics.widgets.Divider', 'widgets.Divider', ([], {}), '()\n', (2728, 2730), False, 'from asciimatics import widgets\n'), ((3635, 3660), 'asciimatics.widgets.VerticalDivider', 'widgets.VerticalDivider', ([], {}), '()\n', (3658, 3660), False, 'from asciimatics import widgets\n'), ((3491, 3551), 'functools.partial', 'partial', (['self.change_scene', 'scene_names.FILTER_CREATOR_SCENE'], {}), '(self.change_scene, scene_names.FILTER_CREATOR_SCENE)\n', (3498, 3551), False, 'from functools import partial\n'), ((4125, 4141), 'asciimatics.screen.Screen.ctrl', 'Screen.ctrl', (['"""c"""'], {}), "('c')\n", (4136, 4141), False, 'from asciimatics.screen import Screen\n')] |
from kairon import cli
import logging
if __name__ == "__main__":
logging.basicConfig(level="DEBUG")
cli()
| [
"logging.basicConfig",
"kairon.cli"
] | [((70, 104), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '"""DEBUG"""'}), "(level='DEBUG')\n", (89, 104), False, 'import logging\n'), ((109, 114), 'kairon.cli', 'cli', ([], {}), '()\n', (112, 114), False, 'from kairon import cli\n')] |
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
with open("README.md") as f:
long_description = f.read()
with open("requirements.txt", "r") as f:
required = f.read().splitlines()
setup(
name="alepython",
description="Python Accumulated Local Effects (ALE) package.",
author="<NAME>",
author_email="<EMAIL>",
license="Apache 2",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/MaximeJumelle/alepython/",
install_requires=required,
extras_require={"test": ["pytest>=5.4", "pytest-cov>=2.8"]},
setup_requires=["setuptools-scm"],
python_requires=">=3.5",
use_scm_version=dict(write_to="src/alepython/_version.py"),
keywords="alepython",
package_dir={"": "src"},
packages=find_packages(where="src"),
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approched :: Apache 2",
"Operating System :: OS Independent",
],
)
| [
"setuptools.find_packages"
] | [((821, 847), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (834, 847), False, 'from setuptools import find_packages, setup\n')] |
from http import HttpRequest, get_file_md5, HttpResponse
if __name__ == '__main__':
data = """GET /dir/test2.html HTTP/1.1
Host: detectportal.firefox.com
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0
Accept: */*
Accept-Language: zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2
Accept-Encoding: gzip, deflate
Cache-Control: no-cache
Pragma: no-cache
Connection: close
Cookie: a=1; b=2
"""
postdata = 'POST /a.html?a=2&p=bb HTTP/1.1\r\nHost: 127.0.0.1\r\nUser-Agent: python-requests/2.22.0\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nContent-Length: 27\r\nContent-Type: application/x-www-form-urlencoded\r\n\r\nname=kk&password=<PASSWORD>'
data = data.replace('\n', '\r\n')
# data = data.replace('\n', '\r\n')
#
#
#
# obj = HttpRequest(data.encode())
# print(obj.version)
# print(obj.method)
# print(obj.request_args)
# print(obj.path)
# print(obj.post_data)
# print(obj.anchor)
# print(obj.cookie)
# print(get_file_md5("C:\\Users\\kk\\Desktop\\a.exe"))
try:
requ = HttpRequest(data.encode())
resp = HttpResponse(200, requ.path, 'c293bc4cf5fe9da8d94898e38d9b5726')
print(resp.get_socket_data().decode())
except:
resp = HttpResponse(400, '/400.html')
print(resp.get_socket_data().decode())
| [
"http.HttpResponse"
] | [((1178, 1242), 'http.HttpResponse', 'HttpResponse', (['(200)', 'requ.path', '"""c293bc4cf5fe9da8d94898e38d9b5726"""'], {}), "(200, requ.path, 'c293bc4cf5fe9da8d94898e38d9b5726')\n", (1190, 1242), False, 'from http import HttpRequest, get_file_md5, HttpResponse\n'), ((1317, 1347), 'http.HttpResponse', 'HttpResponse', (['(400)', '"""/400.html"""'], {}), "(400, '/400.html')\n", (1329, 1347), False, 'from http import HttpRequest, get_file_md5, HttpResponse\n')] |
from django.urls import path
from . import views
urlpatterns = [
path('owner/<int:owner_id>', views.index),
]
| [
"django.urls.path"
] | [((70, 111), 'django.urls.path', 'path', (['"""owner/<int:owner_id>"""', 'views.index'], {}), "('owner/<int:owner_id>', views.index)\n", (74, 111), False, 'from django.urls import path\n')] |
import sys
import pyshorteners
def shorten_url(url):
s = pyshorteners.Shortener()
short_url = s.tinyurl.short(url)
return short_url
def get_code(authorize_url):
sys.stderr.write("\x1b[2J\x1b[H")
short_url = shorten_url(authorize_url)
"""Show authorization URL and return the code the user wrote."""
message = "Check this link in your browser: " + short_url
sys.stderr.write("\n")
sys.stderr.write("\n")
sys.stderr.write("Youtube authentication required!\n")
sys.stderr.write(message + "\n")
try: input = raw_input #For Python2 compatability
except NameError:
#For Python3 on Windows compatability
try: from builtins import input as input
except ImportError: pass
return input("Enter verification code: ")
| [
"sys.stderr.write",
"pyshorteners.Shortener",
"builtins.input"
] | [((62, 86), 'pyshorteners.Shortener', 'pyshorteners.Shortener', ([], {}), '()\n', (84, 86), False, 'import pyshorteners\n'), ((179, 212), 'sys.stderr.write', 'sys.stderr.write', (['"""\x1b[2J\x1b[H"""'], {}), "('\\x1b[2J\\x1b[H')\n", (195, 212), False, 'import sys\n'), ((392, 414), 'sys.stderr.write', 'sys.stderr.write', (['"""\n"""'], {}), "('\\n')\n", (408, 414), False, 'import sys\n'), ((419, 441), 'sys.stderr.write', 'sys.stderr.write', (['"""\n"""'], {}), "('\\n')\n", (435, 441), False, 'import sys\n'), ((446, 500), 'sys.stderr.write', 'sys.stderr.write', (['"""Youtube authentication required!\n"""'], {}), "('Youtube authentication required!\\n')\n", (462, 500), False, 'import sys\n'), ((505, 537), 'sys.stderr.write', 'sys.stderr.write', (["(message + '\\n')"], {}), "(message + '\\n')\n", (521, 537), False, 'import sys\n'), ((755, 789), 'builtins.input', 'input', (['"""Enter verification code: """'], {}), "('Enter verification code: ')\n", (760, 789), True, 'from builtins import input as input\n')] |
from PyQt5 import QtGui, QtCore
from PyQt5.QtWidgets import QDialog
from src.custom_signals import CustomSignals
from src.qt_ui.dynamics_dialog_ui import Ui_DynamicsDialog
class DynamicsDialog(QDialog):
"""Widget provides input of two values: total_time - limit of time that all data collection would take,
and iteration_time - time to wait between iterations of data collection
"""
def __init__(self):
super(DynamicsDialog, self).__init__()
self._ui = Ui_DynamicsDialog()
self._ui.setupUi(self)
self._ui.buttonBox.setEnabled(False)
self.signals = CustomSignals()
self._ui.buttonBox.clicked.connect(self.button_clicked)
# Double value regular expression for validator
self.regexp = QtCore.QRegExp('\\d*[\\.]?\\d+')
self.validator = QtGui.QRegExpValidator(self.regexp)
self._ui.iterationTimeEdit.setValidator(self.validator)
self._ui.totalTimeEdit.setValidator(self.validator)
self._ui.iterationTimeEdit.textChanged.connect(self.check_state)
self._ui.iterationTimeEdit.textChanged.emit(self._ui.iterationTimeEdit.text())
self._ui.totalTimeEdit.textChanged.connect(self.check_state)
self._ui.totalTimeEdit.textChanged.emit(self._ui.totalTimeEdit.text())
def get_values(self):
iteration_time = float(self._ui.iterationTimeEdit.text())
total_time = float(self._ui.totalTimeEdit.text())
if iteration_time <= total_time:
return iteration_time, total_time
else:
return None
def button_clicked(self):
self.signals.send_data.emit(self.get_values())
def check_state(self):
iteration_state = self.validator.validate(self._ui.iterationTimeEdit.text(), 0)[0]
total_state = self.validator.validate(self._ui.totalTimeEdit.text(), 0)[0]
if iteration_state == QtGui.QValidator.Acceptable and total_state == QtGui.QValidator.Acceptable:
self._ui.buttonBox.setEnabled(True)
else:
self._ui.buttonBox.setEnabled(False)
def closeEvent(self, event):
self.signals.send_data.emit(None)
event.accept()
def reject(self):
self.close()
| [
"src.qt_ui.dynamics_dialog_ui.Ui_DynamicsDialog",
"PyQt5.QtCore.QRegExp",
"src.custom_signals.CustomSignals",
"PyQt5.QtGui.QRegExpValidator"
] | [((488, 507), 'src.qt_ui.dynamics_dialog_ui.Ui_DynamicsDialog', 'Ui_DynamicsDialog', ([], {}), '()\n', (505, 507), False, 'from src.qt_ui.dynamics_dialog_ui import Ui_DynamicsDialog\n'), ((608, 623), 'src.custom_signals.CustomSignals', 'CustomSignals', ([], {}), '()\n', (621, 623), False, 'from src.custom_signals import CustomSignals\n'), ((767, 799), 'PyQt5.QtCore.QRegExp', 'QtCore.QRegExp', (['"""\\\\d*[\\\\.]?\\\\d+"""'], {}), "('\\\\d*[\\\\.]?\\\\d+')\n", (781, 799), False, 'from PyQt5 import QtGui, QtCore\n'), ((825, 860), 'PyQt5.QtGui.QRegExpValidator', 'QtGui.QRegExpValidator', (['self.regexp'], {}), '(self.regexp)\n', (847, 860), False, 'from PyQt5 import QtGui, QtCore\n')] |
# Copyright 2017 The Tulsi Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for bazel_build_events.py."""
import io
import json
import unittest
import bazel_build_events
ROOT_ID = {'foo': 'bar'}
CHILD_ID = {'foo': 'child'}
GRANDCHILD_ID = {'foo': 'grandchild'}
ROOT_EVENT_DICT = {
'id': ROOT_ID,
'children': [CHILD_ID],
'progress': {
'stdout': 'Hello',
'stderr': 'World',
},
'namedSetOfFiles': {
'files': [{'uri': 'file:///dir/file.txt'}],
},
}
CHILD_EVENT_DICT = {
'id': CHILD_ID,
'progress': {
'stderr': 'Hello!',
},
}
CHILD_WITHOUT_ID_EVENT_DICT = {
'progress': {
'stderr': 'Hello!',
},
}
CHILD_EVENT_WITH_CHILD_DICT = {
'id': CHILD_ID,
'children': [{'foo': 'grandchild'}],
}
GRANDCHILD_EVENT_DICT = {
'id': GRANDCHILD_ID,
'progress': {
'stderr': 'Hello from the grandchild!',
},
}
class TestFileLineReader(unittest.TestCase):
def testMultiLine(self):
test_file = io.StringIO()
test_file.write(u'First Line.\nSecond Line.\nThird Line.\n')
test_file.seek(0)
reader = bazel_build_events._FileLineReader(test_file)
self.assertEqual(reader.check_for_changes(), 'First Line.\n')
self.assertEqual(reader.check_for_changes(), 'Second Line.\n')
self.assertEqual(reader.check_for_changes(), 'Third Line.\n')
self.assertIsNone(reader.check_for_changes())
def testLineRescans(self):
test_file = io.StringIO()
reader = bazel_build_events._FileLineReader(test_file)
self.assertIsNone(reader.check_for_changes())
test_file.write(u'Line')
test_file.seek(0)
self.assertIsNone(reader.check_for_changes())
test_file.seek(0, 2)
partial_pos = test_file.tell()
test_file.write(u'!\n')
test_file.seek(partial_pos)
self.assertEqual(reader.check_for_changes(), 'Line!\n')
self.assertIsNone(reader.check_for_changes())
class TestBazelBuildEvents(unittest.TestCase):
def testBuildEventParsing(self):
event_dict = ROOT_EVENT_DICT
build_event = bazel_build_events.BazelBuildEvent(event_dict)
self.assertEqual(build_event.stdout, 'Hello')
self.assertEqual(build_event.stderr, 'World')
self.assertEqual(build_event.files, ['/dir/file.txt'])
class TestBazelBuildEventsWatcher(unittest.TestCase):
def testWatcherBuildEvent(self):
test_file = io.StringIO()
watcher = bazel_build_events.BazelBuildEventsWatcher(test_file)
test_file.write(json.dumps(ROOT_EVENT_DICT) + u'\n')
test_file.seek(0)
new_events = watcher.check_for_new_events()
self.assertEqual(len(new_events), 1)
build_event = new_events[0]
self.assertEqual(build_event.stdout, 'Hello')
self.assertEqual(build_event.stderr, 'World')
self.assertEqual(build_event.files, ['/dir/file.txt'])
if __name__ == '__main__':
unittest.main()
| [
"json.dumps",
"bazel_build_events._FileLineReader",
"bazel_build_events.BazelBuildEvent",
"unittest.main",
"io.StringIO",
"bazel_build_events.BazelBuildEventsWatcher"
] | [((3363, 3378), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3376, 3378), False, 'import unittest\n'), ((1533, 1546), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1544, 1546), False, 'import io\n'), ((1647, 1692), 'bazel_build_events._FileLineReader', 'bazel_build_events._FileLineReader', (['test_file'], {}), '(test_file)\n', (1681, 1692), False, 'import bazel_build_events\n'), ((1988, 2001), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1999, 2001), False, 'import io\n'), ((2015, 2060), 'bazel_build_events._FileLineReader', 'bazel_build_events._FileLineReader', (['test_file'], {}), '(test_file)\n', (2049, 2060), False, 'import bazel_build_events\n'), ((2578, 2624), 'bazel_build_events.BazelBuildEvent', 'bazel_build_events.BazelBuildEvent', (['event_dict'], {}), '(event_dict)\n', (2612, 2624), False, 'import bazel_build_events\n'), ((2892, 2905), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2903, 2905), False, 'import io\n'), ((2920, 2973), 'bazel_build_events.BazelBuildEventsWatcher', 'bazel_build_events.BazelBuildEventsWatcher', (['test_file'], {}), '(test_file)\n', (2962, 2973), False, 'import bazel_build_events\n'), ((2994, 3021), 'json.dumps', 'json.dumps', (['ROOT_EVENT_DICT'], {}), '(ROOT_EVENT_DICT)\n', (3004, 3021), False, 'import json\n')] |
from django.db import models
from apps.metadata.songs.models import Song
from apps.metadata.users.models import User
class UserPreference(models.Model):
user = models.ForeignKey(User, unique=False, on_delete=models.CASCADE)
song = models.ForeignKey(Song, unique=False, related_name='song', on_delete=models.CASCADE)
play_count = models.IntegerField(default=0, unique=False)
class Meta:
unique_together = (('user', 'song'),)
| [
"django.db.models.IntegerField",
"django.db.models.ForeignKey"
] | [((167, 230), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'unique': '(False)', 'on_delete': 'models.CASCADE'}), '(User, unique=False, on_delete=models.CASCADE)\n', (184, 230), False, 'from django.db import models\n'), ((242, 331), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Song'], {'unique': '(False)', 'related_name': '"""song"""', 'on_delete': 'models.CASCADE'}), "(Song, unique=False, related_name='song', on_delete=models\n .CASCADE)\n", (259, 331), False, 'from django.db import models\n'), ((344, 388), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'unique': '(False)'}), '(default=0, unique=False)\n', (363, 388), False, 'from django.db import models\n')] |
#
# PlotView.py -- base class for plot viewer
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import logging
import numpy as np
from ginga.misc import Callback, Settings
from ginga import AstroImage
from ginga.gw import Widgets
try:
from ginga.gw import Plot
from ginga.util import plots
have_mpl = True
except ImportError:
have_mpl = False
class PlotViewGw(Callback.Callbacks):
"""A Ginga viewer for displaying 2D plots using matplotlib.
"""
vname = 'Ginga Plot'
vtypes = [AstroImage.AstroImage]
@classmethod
def viewable(cls, dataobj):
"""Test whether `dataobj` is viewable by this viewer."""
if not isinstance(dataobj, AstroImage.AstroImage):
return False
shp = list(dataobj.shape)
if 0 in shp or len(shp) != 1:
return False
return True
def __init__(self, logger=None, settings=None):
Callback.Callbacks.__init__(self)
if logger is not None:
self.logger = logger
else:
self.logger = logging.Logger('PlotView')
# Create settings and set defaults
if settings is None:
settings = Settings.SettingGroup(logger=self.logger)
self.settings = settings
self.settings.add_defaults(plot_bg='white', show_marker=False,
linewidth=1, linestyle='-',
linecolor='blue', markersize=6,
markerwidth=0.5, markercolor='red',
markerstyle='o', file_suffix='.png')
# for debugging
self.name = str(self)
if not have_mpl:
raise ImportError('Install matplotlib to use this plugin')
top = Widgets.VBox()
top.set_border_width(4)
self.line_plot = plots.Plot(logger=self.logger,
width=400, height=400)
bg = self.settings.get('plot_bg', 'white')
if plots.MPL_GE_2_0:
kwargs = {'facecolor': bg}
else:
kwargs = {'axisbg': bg}
self.line_plot.add_axis(**kwargs)
self.plot_w = Plot.PlotWidget(self.line_plot)
self.plot_w.resize(400, 400)
# enable interactivity in the plot
self.line_plot.connect_ui()
self.line_plot.enable(zoom=True, pan=True)
self.line_plot.add_callback('limits-set', self.limits_cb)
ax = self.line_plot.ax
ax.grid(True)
top.add_widget(self.plot_w, stretch=1)
captions = (('Log X', 'checkbutton', 'Log Y', 'checkbutton',
'Show Marker', 'checkbutton'),
('X Low:', 'label', 'x_lo', 'entry',
'X High:', 'label', 'x_hi', 'entry',
'Reset X', 'button'),
('Y Low:', 'label', 'y_lo', 'entry',
'Y High:', 'label', 'y_hi', 'entry',
'Reset Y', 'button'),
('Save', 'button'))
# for now...
orientation = 'vertical'
w, b = Widgets.build_info(captions, orientation=orientation)
self.w = b
top.add_widget(w, stretch=0)
b.log_x.set_state(self.line_plot.logx)
b.log_x.add_callback('activated', self.log_x_cb)
b.log_x.set_tooltip('Plot X-axis in log scale')
b.log_y.set_state(self.line_plot.logy)
b.log_y.add_callback('activated', self.log_y_cb)
b.log_y.set_tooltip('Plot Y-axis in log scale')
b.x_lo.add_callback('activated', lambda w: self.set_xlim_cb())
b.x_lo.set_tooltip('Set X lower limit')
b.x_hi.add_callback('activated', lambda w: self.set_xlim_cb())
b.x_hi.set_tooltip('Set X upper limit')
b.y_lo.add_callback('activated', lambda w: self.set_ylim_cb())
b.y_lo.set_tooltip('Set Y lower limit')
b.y_hi.add_callback('activated', lambda w: self.set_ylim_cb())
b.y_hi.set_tooltip('Set Y upper limit')
b.reset_x.add_callback('activated', lambda w: self.reset_xlim_cb())
b.reset_x.set_tooltip('Autoscale X limits')
b.reset_y.add_callback('activated', lambda w: self.reset_ylim_cb())
b.reset_y.set_tooltip('Autoscale Y limits')
b.show_marker.set_state(self.settings.get('show_marker', False))
b.show_marker.add_callback('activated', self.set_marker_cb)
b.show_marker.set_tooltip('Mark data points')
# Button to save plot
self.save_plot = b.save
self.save_plot.set_tooltip('Save table plot')
self.save_plot.add_callback('activated', lambda w: self.save_cb())
self.save_plot.set_enabled(False)
self.widget = top
# For callbacks
for name in ['image-set']:
self.enable_callback(name)
def get_widget(self):
return self.widget
def get_settings(self):
return self.settings
def get_logger(self):
return self.logger
def clear(self):
self.widget.clear()
def initialize_channel(self, fv, channel):
# no housekeeping to do (for now) on our part, just override to
# suppress the logger warning
pass
def set_dataobj(self, dataobj):
if not self.viewable(dataobj):
raise ValueError("Can't display this data object")
self._dataobj = dataobj
self.do_plot(reset_xlimits=True, reset_ylimits=True)
self.make_callback('image-set', dataobj)
def get_dataobj(self):
return self._dataobj
def clear_data(self):
"""Clear comboboxes and columns."""
self.w.x_lo.set_text('')
self.w.x_hi.set_text('')
self.w.y_lo.set_text('')
self.w.y_hi.set_text('')
def clear_plot(self):
"""Clear plot display."""
self.line_plot.clear()
self.line_plot.draw()
self.save_plot.set_enabled(False)
def do_plot(self, reset_xlimits=True, reset_ylimits=True):
"""Simple line plot."""
self.clear_plot()
if self._dataobj is None: # No data to plot
return
plt_kw = {
'lw': self.settings.get('linewidth', 1),
'ls': self.settings.get('linestyle', '-'),
'color': self.settings.get('linecolor', 'blue'),
'ms': self.settings.get('markersize', 6),
'mew': self.settings.get('markerwidth', 0.5),
'mfc': self.settings.get('markercolor', 'red')}
plt_kw['mec'] = plt_kw['mfc']
try:
x_data, y_data = self.get_plot_data()
marker = self.get_marker()
self.line_plot.plot(
x_data, y_data,
xtitle=self.get_label('x'), ytitle=self.get_label('y'),
marker=marker, **plt_kw)
if not reset_xlimits:
self.set_xlim_cb()
self.set_xlimits_widgets()
if not reset_ylimits:
self.set_ylim_cb()
self.set_ylimits_widgets()
except Exception as e:
self.logger.error(str(e))
else:
self.save_plot.set_enabled(True)
def set_xlimits_widgets(self, set_min=True, set_max=True):
"""Populate axis limits GUI with current plot values."""
xmin, xmax = self.line_plot.ax.get_xlim()
if set_min:
self.w.x_lo.set_text('{0}'.format(xmin))
if set_max:
self.w.x_hi.set_text('{0}'.format(xmax))
def set_ylimits_widgets(self, set_min=True, set_max=True):
"""Populate axis limits GUI with current plot values."""
ymin, ymax = self.line_plot.ax.get_ylim()
if set_min:
self.w.y_lo.set_text('{0}'.format(ymin))
if set_max:
self.w.y_hi.set_text('{0}'.format(ymax))
def limits_cb(self, plot, dct):
"""Callback that is called when the limits are set by the
plot object.
"""
self.set_xlimits_widgets()
self.set_ylimits_widgets()
def get_plot_data(self):
"""Extract only good data point for plotting."""
y_data = self._dataobj.get_data()
x_data = np.arange(len(y_data))
return x_data, y_data
def get_marker(self):
_marker_type = self.settings.get('markerstyle', 'o')
if not self.w.show_marker.get_state():
_marker_type = None
return _marker_type
def get_label(self, axis):
"""Return plot label for the given axis."""
if axis == 'x':
label = 'Index'
if axis == 'y':
label = 'Value'
return label
def log_x_cb(self, w, val):
"""Toggle linear/log scale for X-axis."""
self.line_plot.logx = val
self.do_plot()
def log_y_cb(self, w, val):
"""Toggle linear/log scale for Y-axis."""
self.line_plot.logy = val
self.do_plot()
def set_xlim_cb(self, redraw=True):
"""Set plot limit based on user values."""
try:
xmin = float(self.w.x_lo.get_text())
except Exception:
set_min = True
else:
set_min = False
try:
xmax = float(self.w.x_hi.get_text())
except Exception:
set_max = True
else:
set_max = False
if set_min or set_max:
self.line_plot.draw()
self.set_xlimits_widgets(set_min=set_min, set_max=set_max)
if not (set_min and set_max):
self.line_plot.ax.set_xlim(xmin, xmax)
if redraw:
self.line_plot.draw()
def set_ylim_cb(self, redraw=True):
"""Set plot limit based on user values."""
try:
ymin = float(self.w.y_lo.get_text())
except Exception:
set_min = True
else:
set_min = False
try:
ymax = float(self.w.y_hi.get_text())
except Exception:
set_max = True
else:
set_max = False
if set_min or set_max:
self.line_plot.draw()
self.set_ylimits_widgets(set_min=set_min, set_max=set_max)
if not (set_min and set_max):
self.line_plot.ax.set_ylim(ymin, ymax)
if redraw:
self.line_plot.draw()
def reset_xlim_cb(self):
self.line_plot.autoscale('x')
def reset_ylim_cb(self):
self.line_plot.autoscale('y')
def set_marker_cb(self, w, val):
"""Toggle show/hide data point markers."""
self.do_plot()
def save_cb(self):
"""Save plot to file."""
# This just defines the basename.
# Extension has to be explicitly defined or things can get messy.
w = Widgets.SaveDialog(title='Save plot')
target = w.get_path()
if target is None:
# Save canceled
return
plot_ext = self.settings.get('file_suffix', '.png')
if not target.endswith(plot_ext):
target += plot_ext
# TODO: This can be a user preference?
fig_dpi = 100
try:
fig = self.line_plot.get_figure()
fig.savefig(target, dpi=fig_dpi)
except Exception as e:
self.logger.error(str(e))
else:
self.logger.info('Table plot saved as {0}'.format(target))
def __str__(self):
return "PlotViewer"
| [
"ginga.gw.Widgets.build_info",
"ginga.gw.Widgets.SaveDialog",
"ginga.misc.Callback.Callbacks.__init__",
"ginga.util.plots.Plot",
"ginga.gw.Plot.PlotWidget",
"logging.Logger",
"ginga.misc.Settings.SettingGroup",
"ginga.gw.Widgets.VBox"
] | [((975, 1008), 'ginga.misc.Callback.Callbacks.__init__', 'Callback.Callbacks.__init__', (['self'], {}), '(self)\n', (1002, 1008), False, 'from ginga.misc import Callback, Settings\n'), ((1823, 1837), 'ginga.gw.Widgets.VBox', 'Widgets.VBox', ([], {}), '()\n', (1835, 1837), False, 'from ginga.gw import Widgets\n'), ((1896, 1949), 'ginga.util.plots.Plot', 'plots.Plot', ([], {'logger': 'self.logger', 'width': '(400)', 'height': '(400)'}), '(logger=self.logger, width=400, height=400)\n', (1906, 1949), False, 'from ginga.util import plots\n'), ((2219, 2250), 'ginga.gw.Plot.PlotWidget', 'Plot.PlotWidget', (['self.line_plot'], {}), '(self.line_plot)\n', (2234, 2250), False, 'from ginga.gw import Plot\n'), ((3134, 3187), 'ginga.gw.Widgets.build_info', 'Widgets.build_info', (['captions'], {'orientation': 'orientation'}), '(captions, orientation=orientation)\n', (3152, 3187), False, 'from ginga.gw import Widgets\n'), ((10747, 10784), 'ginga.gw.Widgets.SaveDialog', 'Widgets.SaveDialog', ([], {'title': '"""Save plot"""'}), "(title='Save plot')\n", (10765, 10784), False, 'from ginga.gw import Widgets\n'), ((1114, 1140), 'logging.Logger', 'logging.Logger', (['"""PlotView"""'], {}), "('PlotView')\n", (1128, 1140), False, 'import logging\n'), ((1237, 1278), 'ginga.misc.Settings.SettingGroup', 'Settings.SettingGroup', ([], {'logger': 'self.logger'}), '(logger=self.logger)\n', (1258, 1278), False, 'from ginga.misc import Callback, Settings\n')] |
"""
glucoseDataFrame.py
Creates a dataframe of glucose related statistics
in diabetics for predictive analysis.
"""
import sys
import os
import math
from datetime import *
from dateutil.parser import parse
import pandas as pd
import numpy as np
sys.path.append("..") # proper file path for importing local modules
from pythonScripts.jsonToCsv import convertToCsv
#-------CONSTANTS-------------
CONVERSION_FACTOR = 18.01559
#-------Dicts----------
#basal rates (unit/hour)
BASAL = {
"0" : .625,
"2" : .650, #if hour equals 2, then also minute = 30 cause (2:30)
"4" : .800,
"8" : .725,
"12" : .700,
"14" : .250,
"19" : .650
}
#insulin sensitivity (mg/dL/unit)
SENSITIVITY = {
"0" : 60,
"6" : 70,
"9" : 60,
"12" : 60,
"15" : 60
}
#carb ratio (grams/unit)
CARB_RATIO = {
"0" : 10,
"6" : 5,
"11" : 5.5, #if hour equals 11, then also minute = 30 cause (11:30)
"14" : 6,
"18" : 7,
"21" : 9
}
#----------------------
#-----------------------------
def convert_glucose(glucose_levels):
"""Do conversion across entire dataset
conversion mmol/L to mg/dL"""
value_row = glucose_levels.loc[:, 'value']
convert_row = value_row.mul(CONVERSION_FACTOR)
round_conversion = convert_row.round(2)
return round_conversion
def divide_timestamp(time_row):
"""Seperates timestamp into individual
months, days, weekdays, hours, and minutes"""
month_list = []
day_list = []
weekday_list = []
hour_list = []
minutes_list = []
time_str = time_row.astype(str).values.tolist()
for i in time_str:
#for months
month = parse(i).month
month_list.append(month)
#for days
day = parse(i).day
day_list.append(day)
#for weekdays
weekday = parse(i).weekday()
weekday_list.append(weekday)
#for hours
hour = parse(i).hour
hour_list.append(hour)
#for minutes
minute = parse(i).minute
minutes_list.append(minute)
return month_list, day_list, weekday_list, hour_list, minutes_list
def create_dataframe():
"""Creates dataframe for glucose analysis"""
#---get correct path to csv input file-----------
path_to_input_csv = convertToCsv()
current_file = os.path.basename(path_to_input_csv)
print(f"Currently Reading File: {current_file}")
care_link_file = input("\nEnter Medtronic File: ")
#------------------------------------------------
#----------Create data frame-------------------
#get all data from csv
gluc_level_data = pd.read_csv(path_to_input_csv)
# remove rows that are NaN for value
gluc_level_data = gluc_level_data[pd.notnull(gluc_level_data["value"])]
#----------------------------------------------
#---------------conversion mmol/L to mg/dL-----------------
glu = convert_glucose(gluc_level_data)
#----------------------------------------------------------
#--------Save month, day, weekday, hour, minutes---------------
timestamp = gluc_level_data.loc[:, 'time']
saved_index = timestamp.index # save the index from this dataframe as variable index
month_list, day_list, weekday_list, hour_list, minutes_list = divide_timestamp(timestamp)
#convert the lists to dataframes while ensuring the index corresponds to the other dataframes
monthdf = pd.DataFrame(np.array(month_list), index=saved_index)
daydf = pd.DataFrame(np.array(day_list), index=saved_index)
weekdaydf = pd.DataFrame(np.array(weekday_list), index=saved_index)
hourdf = pd.DataFrame(np.array(hour_list), index=saved_index)
minutesdf = pd.DataFrame(np.array(minutes_list), index=saved_index)
#--------------------------------------------------------------
#---------BOLUS OUTPUT---------------------------
path_to_care_link = os.path.join(os.getcwd(), "csvData", "csvInData")
bolus_carb_csv = pd.read_csv(os.path.join(path_to_care_link, care_link_file), skiprows=6)
bolus = bolus_carb_csv.loc[:, 'Bolus Volume Delivered (U)']
date = bolus_carb_csv.loc[:, 'Date']
time = bolus_carb_csv.loc[:, 'Time']
carb = bolus_carb_csv.loc[:, 'BWZ Carb Input (grams)']
bolus_carb_data = pd.concat([date, time, bolus, carb], axis=1, ignore_index=True)
#remove column if NaN value in both columns 2&3
bolus_carb_data = bolus_carb_data.dropna(subset=[2, 3], how='all')
#get rid of last header row
bolus_carb_data = bolus_carb_data.drop(bolus_carb_data.index[len(bolus_carb_data)-1])
bolus_carb_data.columns = ["Date", "Time", "Bolus (U)", "Carb Input (grams)"]
#-------------------------------------------------------------------------
#--------Save month, day, weekday, hour, minutes---------------
month_list_b = []
day_list_b = []
hour_list_b = []
minutes_list_b = []
date = bolus_carb_data.loc[:, 'Date']
time = bolus_carb_data.loc[:, 'Time']
index_bolus = date.index # save the index from this dataframe as variable index
day_str = date.astype(str).values.tolist()
time_str_b = time.astype(str).values.tolist()
for j in time_str_b:
time_whole = datetime.strptime(j, '%H:%M:%S')
#for months
hour_list_b.append(time_whole.hour)
#for days
minutes_list_b.append(time_whole.minute)
for k in day_str:
date_whole = datetime.strptime(k, '%Y/%m/%d')
#for hours
month_list_b.append(date_whole.month)
#for minutes
day_list_b.append(date_whole.day)
#convert the lists to dataframes while ensuring the index corresponds to the other dataframes
monthdf_bolus = pd.DataFrame(np.array(month_list_b), index=index_bolus)
daydf_bolus = pd.DataFrame(np.array(day_list_b), index=index_bolus)
hourdf_bolus = pd.DataFrame(np.array(hour_list_b), index=index_bolus)
minutesdf_bolus = pd.DataFrame(np.array(minutes_list_b), index=index_bolus)
#concatenate all of these
bolus_carb_final = pd.concat([bolus_carb_data, monthdf_bolus, daydf_bolus, hourdf_bolus, minutesdf_bolus], axis=1, ignore_index=True)
bolus_carb_final.columns = ["Date", "Time", "Bolus (U)", "Carb Input (grams)", "Month", "Day", "Hour", "Minutes"]
#--------------------------------------------------------------
#--------Concatenate all of the dataframes into one dataframe----------------------------
final = pd.concat([timestamp, glu, monthdf, daydf, weekdaydf, hourdf, minutesdf], axis=1, ignore_index=True) #concatenate the dataframe together
#give columns names
final.columns = ["TimeStamp", "Glucose (mg/dL)", "Month", "Day", "Weekday", "Hour", "Minutes"]
#----------------------------------------------------------------------------------------
#MERGE MEDTRONIC DATA WITH DEXCOM
#----------------------------------------------------------------------------------------
#make dataframe of NaN filled bolus and carb columns with indexes matching tidepool
bolus_carbdf = pd.DataFrame(np.nan, index=saved_index, columns=["Bolus (U)", "Carb Input (grams)"])
#match up the bolus insulin & carb intake from one csv
for index_med, row_med in bolus_carb_final.iterrows(): #go through Medtronic Data
mins_med = getattr(row_med, "Minutes")
hrs_med = getattr(row_med, "Hour")
day_med = getattr(row_med, "Day")
month_med = getattr(row_med, "Month")
bolus_med = getattr(row_med, "Bolus (U)")
carb_med = getattr(row_med, "Carb Input (grams)")
cur_smalls = -1
got_one = False
for index_tide, row_tide in final.iterrows(): #go through Tidepool Data
mins_tide = getattr(row_tide, "Minutes")
hrs_tide = getattr(row_tide, "Hour")
day_tide = getattr(row_tide, "Day")
month_tide = getattr(row_tide, "Month")
#find closest time in Tidepool data to Medtronic data
if month_tide == month_med and day_tide == day_med and hrs_tide == hrs_med:
#time difference of medtronic time minux tidepool time
dif_time = mins_med - mins_tide
if (dif_time) <= 5:
cur_smalls = index_tide
if got_one:
break #get out of this inner loop as we found the time we wanted for this data
if (dif_time) <= 5:
got_one = True
#add bolus & carb info to bolusCarbdf
if cur_smalls != -1:
if not math.isnan(float(carb_med)):
bolus_carbdf.loc[cur_smalls, 'Carb Input (grams)'] = carb_med
if not math.isnan(float(bolus_med)):
bolus_carbdf.loc[cur_smalls, 'Bolus (U)'] = bolus_med
#--------Concatenate all of the bolusCarbdf dataframe with final dataframe---------------
#concatenate the dataframes together
almost_final = pd.concat([timestamp, glu, monthdf, daydf, weekdaydf, hourdf,
minutesdf, bolus_carbdf], axis=1, ignore_index=True)
#give columns names
almost_final.columns = ["TimeStamp", "Glucose (mg/dL)", "Month",
"Day", "Weekday", "Hour", "Minutes", "Bolus (U)",
"Carb Input (grams)"]
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
#create initial csv OUTPUT
path_base_name = os.path.basename(path_to_input_csv)
output_file_name = "OUTPUT_" + path_base_name
path_to_out_csv = os.path.join(os.getcwd(), "csvData", "csvOutData")
output_file_path = os.path.join(path_to_out_csv, output_file_name)
almost_final.to_csv(output_file_path) # return dataframes as a csv
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
basal_sens_ratio_data = pd.read_csv(output_file_path)
basal_list = []
insulin_sens_list = []
carb_ratio_list = []
for index, row in basal_sens_ratio_data.iterrows():
#for basal list
if row['Hour'] >= 0 and row['Hour'] < 3:
if row['Hour'] == 2 and row['Minutes'] < 30:
basal_list.append(BASAL["0"])
elif row['Hour'] == 2 and row['Minutes'] >= 30:
basal_list.append(BASAL["2"])
else:
basal_list.append(BASAL["0"])
elif row['Hour'] >= 3 and row['Hour'] < 4:
basal_list.append(BASAL["2"])
elif row['Hour'] >= 4 and row['Hour'] < 8:
basal_list.append(BASAL["4"])
elif row['Hour'] >= 8 and row['Hour'] < 12:
basal_list.append(BASAL["8"])
elif row['Hour'] >= 12 and row['Hour'] < 14:
basal_list.append(BASAL["12"])
elif row['Hour'] >= 14 and row['Hour'] < 19:
basal_list.append(BASAL["14"])
elif row['Hour'] >= 19 and row['Hour'] < 24:
basal_list.append(BASAL["19"])
#for insulin sensitivity list
if row['Hour'] >= 0 and row['Hour'] < 6:
insulin_sens_list.append(SENSITIVITY["0"])
elif row['Hour'] >= 6 and row['Hour'] < 9:
insulin_sens_list.append(SENSITIVITY["6"])
elif row['Hour'] >= 9 and row['Hour'] < 12:
insulin_sens_list.append(SENSITIVITY["9"])
elif row['Hour'] >= 12 and row['Hour'] < 15:
insulin_sens_list.append(SENSITIVITY["12"])
elif row['Hour'] >= 15 and row['Hour'] < 24:
insulin_sens_list.append(SENSITIVITY["15"])
#for carb ratio list
if row['Hour'] >= 0 and row['Hour'] < 6:
carb_ratio_list.append(CARB_RATIO["0"])
elif row['Hour'] >= 6 and row['Hour'] < 12:
if row['Hour'] == 11 and row['Minutes'] < 30:
carb_ratio_list.append(CARB_RATIO["6"])
elif row['Hour'] == 11 and row['Minutes'] >= 30:
carb_ratio_list.append(CARB_RATIO["11"])
else:
carb_ratio_list.append(CARB_RATIO["6"])
elif row['Hour'] >= 12 and row['Hour'] < 14:
carb_ratio_list.append(CARB_RATIO["11"])
elif row['Hour'] >= 14 and row['Hour'] < 18:
carb_ratio_list.append(CARB_RATIO["14"])
elif row['Hour'] >= 18 and row['Hour'] < 21:
carb_ratio_list.append(CARB_RATIO["18"])
elif row['Hour'] >= 21 and row['Hour'] < 24:
carb_ratio_list.append(CARB_RATIO["21"])
#create dataframes from lists
basaldf = pd.DataFrame(np.array(basal_list), index=saved_index) #like above set index to index
insulindf = pd.DataFrame(np.array(insulin_sens_list), index=saved_index) #like above set index to index
carbdf = pd.DataFrame(np.array(carb_ratio_list), index=saved_index) #like above set index to index
#----------------------------------------------------------------------------------------
#--------Concatenate the new dataframes into final dataframe----------------------------
real_final = pd.concat([timestamp, glu, basaldf, insulindf, carbdf, monthdf, daydf, weekdaydf, hourdf, minutesdf, bolus_carbdf], axis=1, ignore_index=True) #concatenate the dataframe together
#----------------------------------------------------------------------------------------
#give columns names
real_final.columns = ["TimeStamp", "Glucose (mg/dL)", "Basal Insulin (U/hr)",
"Insulin Sensitivity (mg/dL/U)","Carb Ratio (g/U)", "Month", "Day",
"Weekday", "Hour", "Minutes", "Bolus (U)", "Carb Input (grams)"]
last_time = ""
for index, row in real_final.iterrows():
if row['TimeStamp'] == last_time:
real_final = real_final.drop(index, axis=0)
last_time = row['TimeStamp']
'''
#create final csv OUTPUT (rewrites the earlier csv file)
header = ["TimeStamp", "Glucose (mg/dL)", "Basal Insulin (U/hr)","Insulin Sensitivity (mg/dL/U)","Carb Ratio (g/U)", "Month", "Day","Weekday", "Hour","Minutes","Bolus (U)", "Carb Input (grams)"]
'''
real_final = real_final.reindex(index=real_final.index[::-1])
real_final.to_csv(output_file_path) # return dataframes as a csv
def main():
"""main"""
create_dataframe()
if __name__ == '__main__':
main()
| [
"dateutil.parser.parse",
"pandas.read_csv",
"pythonScripts.jsonToCsv.convertToCsv",
"os.path.join",
"os.getcwd",
"numpy.array",
"pandas.concat",
"os.path.basename",
"pandas.DataFrame",
"pandas.notnull",
"sys.path.append"
] | [((247, 268), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (262, 268), False, 'import sys\n'), ((2268, 2282), 'pythonScripts.jsonToCsv.convertToCsv', 'convertToCsv', ([], {}), '()\n', (2280, 2282), False, 'from pythonScripts.jsonToCsv import convertToCsv\n'), ((2302, 2337), 'os.path.basename', 'os.path.basename', (['path_to_input_csv'], {}), '(path_to_input_csv)\n', (2318, 2337), False, 'import os\n'), ((2602, 2632), 'pandas.read_csv', 'pd.read_csv', (['path_to_input_csv'], {}), '(path_to_input_csv)\n', (2613, 2632), True, 'import pandas as pd\n'), ((4234, 4297), 'pandas.concat', 'pd.concat', (['[date, time, bolus, carb]'], {'axis': '(1)', 'ignore_index': '(True)'}), '([date, time, bolus, carb], axis=1, ignore_index=True)\n', (4243, 4297), True, 'import pandas as pd\n'), ((5999, 6117), 'pandas.concat', 'pd.concat', (['[bolus_carb_data, monthdf_bolus, daydf_bolus, hourdf_bolus, minutesdf_bolus]'], {'axis': '(1)', 'ignore_index': '(True)'}), '([bolus_carb_data, monthdf_bolus, daydf_bolus, hourdf_bolus,\n minutesdf_bolus], axis=1, ignore_index=True)\n', (6008, 6117), True, 'import pandas as pd\n'), ((6408, 6512), 'pandas.concat', 'pd.concat', (['[timestamp, glu, monthdf, daydf, weekdaydf, hourdf, minutesdf]'], {'axis': '(1)', 'ignore_index': '(True)'}), '([timestamp, glu, monthdf, daydf, weekdaydf, hourdf, minutesdf],\n axis=1, ignore_index=True)\n', (6417, 6512), True, 'import pandas as pd\n'), ((7002, 7090), 'pandas.DataFrame', 'pd.DataFrame', (['np.nan'], {'index': 'saved_index', 'columns': "['Bolus (U)', 'Carb Input (grams)']"}), "(np.nan, index=saved_index, columns=['Bolus (U)',\n 'Carb Input (grams)'])\n", (7014, 7090), True, 'import pandas as pd\n'), ((8883, 9001), 'pandas.concat', 'pd.concat', (['[timestamp, glu, monthdf, daydf, weekdaydf, hourdf, minutesdf, bolus_carbdf]'], {'axis': '(1)', 'ignore_index': '(True)'}), '([timestamp, glu, monthdf, daydf, weekdaydf, hourdf, minutesdf,\n bolus_carbdf], axis=1, ignore_index=True)\n', (8892, 9001), True, 'import pandas as pd\n'), ((9493, 9528), 'os.path.basename', 'os.path.basename', (['path_to_input_csv'], {}), '(path_to_input_csv)\n', (9509, 9528), False, 'import os\n'), ((9675, 9722), 'os.path.join', 'os.path.join', (['path_to_out_csv', 'output_file_name'], {}), '(path_to_out_csv, output_file_name)\n', (9687, 9722), False, 'import os\n'), ((10016, 10045), 'pandas.read_csv', 'pd.read_csv', (['output_file_path'], {}), '(output_file_path)\n', (10027, 10045), True, 'import pandas as pd\n'), ((13132, 13278), 'pandas.concat', 'pd.concat', (['[timestamp, glu, basaldf, insulindf, carbdf, monthdf, daydf, weekdaydf,\n hourdf, minutesdf, bolus_carbdf]'], {'axis': '(1)', 'ignore_index': '(True)'}), '([timestamp, glu, basaldf, insulindf, carbdf, monthdf, daydf,\n weekdaydf, hourdf, minutesdf, bolus_carbdf], axis=1, ignore_index=True)\n', (13141, 13278), True, 'import pandas as pd\n'), ((2712, 2748), 'pandas.notnull', 'pd.notnull', (["gluc_level_data['value']"], {}), "(gluc_level_data['value'])\n", (2722, 2748), True, 'import pandas as pd\n'), ((3400, 3420), 'numpy.array', 'np.array', (['month_list'], {}), '(month_list)\n', (3408, 3420), True, 'import numpy as np\n'), ((3466, 3484), 'numpy.array', 'np.array', (['day_list'], {}), '(day_list)\n', (3474, 3484), True, 'import numpy as np\n'), ((3534, 3556), 'numpy.array', 'np.array', (['weekday_list'], {}), '(weekday_list)\n', (3542, 3556), True, 'import numpy as np\n'), ((3603, 3622), 'numpy.array', 'np.array', (['hour_list'], {}), '(hour_list)\n', (3611, 3622), True, 'import numpy as np\n'), ((3672, 3694), 'numpy.array', 'np.array', (['minutes_list'], {}), '(minutes_list)\n', (3680, 3694), True, 'import numpy as np\n'), ((3875, 3886), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3884, 3886), False, 'import os\n'), ((3945, 3992), 'os.path.join', 'os.path.join', (['path_to_care_link', 'care_link_file'], {}), '(path_to_care_link, care_link_file)\n', (3957, 3992), False, 'import os\n'), ((5676, 5698), 'numpy.array', 'np.array', (['month_list_b'], {}), '(month_list_b)\n', (5684, 5698), True, 'import numpy as np\n'), ((5750, 5770), 'numpy.array', 'np.array', (['day_list_b'], {}), '(day_list_b)\n', (5758, 5770), True, 'import numpy as np\n'), ((5823, 5844), 'numpy.array', 'np.array', (['hour_list_b'], {}), '(hour_list_b)\n', (5831, 5844), True, 'import numpy as np\n'), ((5900, 5924), 'numpy.array', 'np.array', (['minutes_list_b'], {}), '(minutes_list_b)\n', (5908, 5924), True, 'import numpy as np\n'), ((9614, 9625), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9623, 9625), False, 'import os\n'), ((12639, 12659), 'numpy.array', 'np.array', (['basal_list'], {}), '(basal_list)\n', (12647, 12659), True, 'import numpy as np\n'), ((12740, 12767), 'numpy.array', 'np.array', (['insulin_sens_list'], {}), '(insulin_sens_list)\n', (12748, 12767), True, 'import numpy as np\n'), ((12845, 12870), 'numpy.array', 'np.array', (['carb_ratio_list'], {}), '(carb_ratio_list)\n', (12853, 12870), True, 'import numpy as np\n'), ((1656, 1664), 'dateutil.parser.parse', 'parse', (['i'], {}), '(i)\n', (1661, 1664), False, 'from dateutil.parser import parse\n'), ((1736, 1744), 'dateutil.parser.parse', 'parse', (['i'], {}), '(i)\n', (1741, 1744), False, 'from dateutil.parser import parse\n'), ((1908, 1916), 'dateutil.parser.parse', 'parse', (['i'], {}), '(i)\n', (1913, 1916), False, 'from dateutil.parser import parse\n'), ((1991, 1999), 'dateutil.parser.parse', 'parse', (['i'], {}), '(i)\n', (1996, 1999), False, 'from dateutil.parser import parse\n'), ((1818, 1826), 'dateutil.parser.parse', 'parse', (['i'], {}), '(i)\n', (1823, 1826), False, 'from dateutil.parser import parse\n')] |
import json
from importlib import resources
bpe_merges = []
for line in resources.open_text(__name__, 'bpe_merges.jsonl'):
bpe_merges.append(tuple(json.loads(line)))
| [
"importlib.resources.open_text",
"json.loads"
] | [((75, 124), 'importlib.resources.open_text', 'resources.open_text', (['__name__', '"""bpe_merges.jsonl"""'], {}), "(__name__, 'bpe_merges.jsonl')\n", (94, 124), False, 'from importlib import resources\n'), ((154, 170), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (164, 170), False, 'import json\n')] |
import logging
from .sources import wcota
logger = logging.getLogger()
def load_brazillian_dataset(source):
logger.info("Loading %s data", source)
if source == "wcota":
dataset = wcota.WCotaDataset()
else:
raise ValueError(f"Dataset Could not be Retrived: {source!r}, is it available?")
return dataset
| [
"logging.getLogger"
] | [((52, 71), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (69, 71), False, 'import logging\n')] |
# Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Produce a figure that shows a map of the Salish Sea with markers indicating
the risks of high water levels at the Point Atkinson, Victoria, Campbell River,
Nanaimo, and Cherry Point tide gauge locations.
The figure also shows wind vectors that indicate the average wind speed and
direction averaged over the 4 hours preceding the maximum sea surface height
at each location.
The figure is a thumbnail version of the figure produced by
:py:mod:`nowcast.figures.publish.storm_surge_alerts`.
It is intended primarily for use on the Salish Sea Storm Surge Information
Portal page https://salishsea.eos.ubc.ca/storm-surge/.
Testing notebook for this module is
https://nbviewer.jupyter.org/github/SalishSeaCast/SalishSeaNowcast/blob/main/notebooks/figures/publish/TestStormSurgeAlertsThumbnailModule.ipynb
"""
from collections import namedtuple
import arrow
import matplotlib.pyplot as plt
import numpy
from matplotlib import gridspec
from salishsea_tools import places, nc_tools, stormtools, unit_conversions, wind_tools
import nowcast.figures.website_theme
from nowcast.figures import shared
def make_figure(
grids_15m,
weather_path,
coastline,
tidal_predictions,
figsize=(18, 20),
theme=nowcast.figures.website_theme,
):
"""Plot high water level risk indication markers and 4h average wind
vectors on a Salish Sea map.
:arg dict grids_15m: Collection of 15m sea surface height datasets at tide
gauge locations,
keyed by tide gauge station name.
:arg str weather_path: The directory where the weather forcing files
are stored.
:arg coastline: Coastline dataset.
:type coastline: :class:`mat.Dataset`
:arg str tidal_predictions: Path to directory of tidal prediction
file.
:arg 2-tuple figsize: Figure size (width, height) in inches.
:arg theme: Module-like object that defines the style elements for the
figure. See :py:mod:`nowcast.figures.website_theme` for an
example.
:returns: :py:class:`matplotlib.figure.Figure`
"""
plot_data = _prep_plot_data(grids_15m, tidal_predictions, weather_path)
fig, (ax_map, ax_no_risk, ax_high_risk, ax_extreme_risk) = _prep_fig_axes(
figsize, theme
)
_plot_alerts_map(ax_map, coastline, plot_data, theme)
legend_boxes = (ax_no_risk, ax_high_risk, ax_extreme_risk)
risk_levels = (None, "moderate risk", "extreme risk")
legend_texts = (
"No flooding\nrisk",
"Risk of\nhigh water",
"Extreme risk\nof flooding",
)
for ax, risk_level, text in zip(legend_boxes, risk_levels, legend_texts):
_plot_legend(ax, risk_level, text, theme)
return fig
def _prep_plot_data(grids_15m, tidal_predictions, weather_path):
max_ssh, max_ssh_time, risk_levels = {}, {}, {}
u_wind_4h_avg, v_wind_4h_avg, max_wind_avg = {}, {}, {}
for name in places.TIDE_GAUGE_SITES:
ssh_ts = nc_tools.ssh_timeseries_at_point(grids_15m[name], 0, 0, datetimes=True)
ttide = shared.get_tides(name, tidal_predictions)
max_ssh[name], max_ssh_time[name] = shared.find_ssh_max(name, ssh_ts, ttide)
risk_levels[name] = stormtools.storm_surge_risk_level(
name, max_ssh[name], ttide
)
wind_avg = wind_tools.calc_wind_avg_at_point(
arrow.get(max_ssh_time[name]),
weather_path,
places.PLACES[name]["wind grid ji"],
avg_hrs=-4,
)
u_wind_4h_avg[name], v_wind_4h_avg[name] = wind_avg
max_wind_avg[name], _ = wind_tools.wind_speed_dir(
u_wind_4h_avg[name], v_wind_4h_avg[name]
)
plot_data = namedtuple(
"PlotData",
"ssh_ts, max_ssh, max_ssh_time, risk_levels, "
"u_wind_4h_avg, v_wind_4h_avg, max_wind_avg",
)
return plot_data(
ssh_ts,
max_ssh,
max_ssh_time,
risk_levels,
u_wind_4h_avg,
v_wind_4h_avg,
max_wind_avg,
)
def _prep_fig_axes(figsize, theme):
fig = plt.figure(figsize=figsize, facecolor=theme.COLOURS["figure"]["facecolor"])
gs = gridspec.GridSpec(2, 3, width_ratios=[1, 1, 1], height_ratios=[6, 1])
gs.update(hspace=0.15, wspace=0.05)
ax_map = fig.add_subplot(gs[0, :])
ax_no_risk = fig.add_subplot(gs[1, 0])
ax_no_risk.set_facecolor(theme.COLOURS["figure"]["facecolor"])
ax_high_risk = fig.add_subplot(gs[1, 1])
ax_high_risk.set_facecolor(theme.COLOURS["figure"]["facecolor"])
ax_extreme_risk = fig.add_subplot(gs[1, 2])
ax_extreme_risk.set_facecolor(theme.COLOURS["figure"]["facecolor"])
return fig, (ax_map, ax_no_risk, ax_high_risk, ax_extreme_risk)
def _plot_alerts_map(ax, coastline, plot_data, theme):
shared.plot_map(ax, coastline)
for name in places.TIDE_GAUGE_SITES:
alpha = 0 if numpy.isnan(plot_data.max_ssh[name]) else 0.3
shared.plot_risk_level_marker(
ax, name, plot_data.risk_levels[name], "o", 55, alpha, theme
)
shared.plot_wind_arrow(
ax,
*places.PLACES[name]["lon lat"],
plot_data.u_wind_4h_avg[name],
plot_data.v_wind_4h_avg[name],
theme,
)
# Format the axes and make it pretty
_alerts_map_axis_labels(ax, plot_data.ssh_ts.time[0], theme)
_alerts_map_wind_legend(ax, theme)
_alerts_map_geo_labels(ax, theme)
def _alerts_map_axis_labels(ax, date_time, theme):
ax.set_title(
f"Marine and Atmospheric Conditions\n {date_time:%A, %B %d, %Y}",
fontproperties=theme.FONTS["axes title large"],
color=theme.COLOURS["text"]["axes title"],
)
ax.set_xlabel(
"Longitude [°E]",
fontproperties=theme.FONTS["axis"],
color=theme.COLOURS["text"]["axis"],
)
ax.set_ylabel(
"Latitude [°N]",
fontproperties=theme.FONTS["axis"],
color=theme.COLOURS["text"]["axis"],
)
ax.grid(axis="both")
theme.set_axis_colors(ax)
def _alerts_map_wind_legend(ax, theme):
shared.plot_wind_arrow(ax, -122.5, 50.65, 0, -5, theme)
ax.text(
-122.58,
50.5,
"Reference: 5 m/s",
rotation=90,
fontproperties=theme.FONTS["axes annotation"],
color=theme.COLOURS["text"]["axes annotation"],
)
shared.plot_wind_arrow(ax, -122.75, 50.65, 0, unit_conversions.knots_mps(-5), theme)
ax.text(
-122.83,
50.5,
"Reference: 5 knots",
rotation=90,
fontproperties=theme.FONTS["axes annotation"],
color=theme.COLOURS["text"]["axes annotation"],
)
ax.text(
-122.85,
49.9,
"Winds are 4 hour\n" "average before\n" "maximum water level",
verticalalignment="top",
bbox=theme.COLOURS["axes textbox"],
fontproperties=theme.FONTS["axes annotation"],
color=theme.COLOURS["text"]["axes annotation"],
)
def _alerts_map_geo_labels(ax, theme):
geo_labels = (
# PLACES key, offset x, y, rotation, text size
("Pacific Ocean", 0, 0, 0, "left", "small"),
("Neah Bay", -0.04, -0.08, 0, "right", "large"),
("<NAME>", 0, 0, -18, "left", "small"),
("Puget Sound", 0, 0, -30, "left", "small"),
("Strait of Georgia", 0, 0, -20, "left", "small"),
("Victoria", -0.04, 0.04, 0, "right", "large"),
("<NAME>", 0.04, 0, 0, "left", "large"),
("<NAME>", 0.06, 0.16, 0, "left", "large"),
("Nanaimo", -0.04, 0, 0, "right", "large"),
("<NAME>", -0.04, -0.04, 0, "right", "large"),
("<NAME>", 0, 0, 0, "left", "small"),
("Washington State", 0, 0, 0, "left", "small"),
)
for place, dx, dy, rotation, justify, label_size in geo_labels:
lon, lat = places.PLACES[place]["lon lat"]
ax.text(
lon + dx,
lat + dy,
place,
rotation=rotation,
horizontalalignment=justify,
fontproperties=theme.FONTS[f"location label {label_size}"],
)
def _plot_legend(ax, risk_level, text, theme):
colour = theme.COLOURS["storm surge risk levels"][risk_level]
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.plot(
0.2, 0.45, marker="o", markersize=70, markeredgewidth=2, color=colour, alpha=0.6
)
colour_name = "yellow" if colour.lower() == "gold" else colour
ax.text(
0.4,
0.2,
f"{colour_name.title()}:\n{text}",
transform=ax.transAxes,
fontproperties=theme.FONTS["legend label large"],
color=theme.COLOURS["text"]["risk level label"],
)
_legend_box_hide_frame(ax, theme)
def _legend_box_hide_frame(ax, theme):
ax.set_facecolor(theme.COLOURS["figure"]["facecolor"])
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
for spine in ax.spines:
ax.spines[spine].set_visible(False)
| [
"collections.namedtuple",
"salishsea_tools.wind_tools.wind_speed_dir",
"salishsea_tools.stormtools.storm_surge_risk_level",
"nowcast.figures.shared.plot_risk_level_marker",
"nowcast.figures.shared.get_tides",
"nowcast.figures.shared.find_ssh_max",
"matplotlib.pyplot.figure",
"matplotlib.gridspec.GridS... | [((4396, 4517), 'collections.namedtuple', 'namedtuple', (['"""PlotData"""', '"""ssh_ts, max_ssh, max_ssh_time, risk_levels, u_wind_4h_avg, v_wind_4h_avg, max_wind_avg"""'], {}), "('PlotData',\n 'ssh_ts, max_ssh, max_ssh_time, risk_levels, u_wind_4h_avg, v_wind_4h_avg, max_wind_avg'\n )\n", (4406, 4517), False, 'from collections import namedtuple\n'), ((4763, 4838), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize', 'facecolor': "theme.COLOURS['figure']['facecolor']"}), "(figsize=figsize, facecolor=theme.COLOURS['figure']['facecolor'])\n", (4773, 4838), True, 'import matplotlib.pyplot as plt\n'), ((4848, 4917), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(2)', '(3)'], {'width_ratios': '[1, 1, 1]', 'height_ratios': '[6, 1]'}), '(2, 3, width_ratios=[1, 1, 1], height_ratios=[6, 1])\n', (4865, 4917), False, 'from matplotlib import gridspec\n'), ((5470, 5500), 'nowcast.figures.shared.plot_map', 'shared.plot_map', (['ax', 'coastline'], {}), '(ax, coastline)\n', (5485, 5500), False, 'from nowcast.figures import shared\n'), ((6760, 6815), 'nowcast.figures.shared.plot_wind_arrow', 'shared.plot_wind_arrow', (['ax', '(-122.5)', '(50.65)', '(0)', '(-5)', 'theme'], {}), '(ax, -122.5, 50.65, 0, -5, theme)\n', (6782, 6815), False, 'from nowcast.figures import shared\n'), ((3665, 3736), 'salishsea_tools.nc_tools.ssh_timeseries_at_point', 'nc_tools.ssh_timeseries_at_point', (['grids_15m[name]', '(0)', '(0)'], {'datetimes': '(True)'}), '(grids_15m[name], 0, 0, datetimes=True)\n', (3697, 3736), False, 'from salishsea_tools import places, nc_tools, stormtools, unit_conversions, wind_tools\n'), ((3753, 3794), 'nowcast.figures.shared.get_tides', 'shared.get_tides', (['name', 'tidal_predictions'], {}), '(name, tidal_predictions)\n', (3769, 3794), False, 'from nowcast.figures import shared\n'), ((3839, 3879), 'nowcast.figures.shared.find_ssh_max', 'shared.find_ssh_max', (['name', 'ssh_ts', 'ttide'], {}), '(name, ssh_ts, ttide)\n', (3858, 3879), False, 'from nowcast.figures import shared\n'), ((3908, 3969), 'salishsea_tools.stormtools.storm_surge_risk_level', 'stormtools.storm_surge_risk_level', (['name', 'max_ssh[name]', 'ttide'], {}), '(name, max_ssh[name], ttide)\n', (3941, 3969), False, 'from salishsea_tools import places, nc_tools, stormtools, unit_conversions, wind_tools\n'), ((4290, 4357), 'salishsea_tools.wind_tools.wind_speed_dir', 'wind_tools.wind_speed_dir', (['u_wind_4h_avg[name]', 'v_wind_4h_avg[name]'], {}), '(u_wind_4h_avg[name], v_wind_4h_avg[name])\n', (4315, 4357), False, 'from salishsea_tools import places, nc_tools, stormtools, unit_conversions, wind_tools\n'), ((5617, 5713), 'nowcast.figures.shared.plot_risk_level_marker', 'shared.plot_risk_level_marker', (['ax', 'name', 'plot_data.risk_levels[name]', '"""o"""', '(55)', 'alpha', 'theme'], {}), "(ax, name, plot_data.risk_levels[name], 'o', \n 55, alpha, theme)\n", (5646, 5713), False, 'from nowcast.figures import shared\n'), ((5739, 5872), 'nowcast.figures.shared.plot_wind_arrow', 'shared.plot_wind_arrow', (['ax', "*places.PLACES[name]['lon lat']", 'plot_data.u_wind_4h_avg[name]', 'plot_data.v_wind_4h_avg[name]', 'theme'], {}), "(ax, *places.PLACES[name]['lon lat'], plot_data.\n u_wind_4h_avg[name], plot_data.v_wind_4h_avg[name], theme)\n", (5761, 5872), False, 'from nowcast.figures import shared\n'), ((7076, 7106), 'salishsea_tools.unit_conversions.knots_mps', 'unit_conversions.knots_mps', (['(-5)'], {}), '(-5)\n', (7102, 7106), False, 'from salishsea_tools import places, nc_tools, stormtools, unit_conversions, wind_tools\n'), ((4058, 4087), 'arrow.get', 'arrow.get', (['max_ssh_time[name]'], {}), '(max_ssh_time[name])\n', (4067, 4087), False, 'import arrow\n'), ((5563, 5599), 'numpy.isnan', 'numpy.isnan', (['plot_data.max_ssh[name]'], {}), '(plot_data.max_ssh[name])\n', (5574, 5599), False, 'import numpy\n')] |
import logging
from datetime import datetime, timedelta
logger = logging.getLogger(__name__)
def print_full_report(sessions):
if len(sessions) > 1:
for index, session in enumerate(sessions):
finish_time = session.finishTime or datetime.now()
logger.warn("")
logger.warn(f"SESSION #{index + 1}")
logger.warn(f"Start time: {session.startTime}")
logger.warn(f"Finish time: {finish_time}")
logger.warn(f"Duration: {finish_time - session.startTime}")
logger.warn(
f"Total interactions: {_stringify_interactions(session.totalInteractions)}"
)
logger.warn(
f"Successful interactions: {_stringify_interactions(session.successfulInteractions)}"
)
logger.warn(
f"Total followed: {_stringify_interactions(session.totalFollowed)}"
)
logger.warn(f"Total likes: {session.totalLikes}")
logger.warn(f"Total unfollowed: {session.totalUnfollowed}")
logger.warn(
f"Removed mass followers: {_stringify_removed_mass_followers(session.removedMassFollowers)}"
)
logger.warn("")
logger.warn("TOTAL")
completed_sessions = [session for session in sessions if session.is_finished()]
logger.warn(f"Completed sessions: {len(completed_sessions)}")
duration = timedelta(0)
for session in sessions:
finish_time = session.finishTime or datetime.now()
duration += finish_time - session.startTime
logger.warn(f"Total duration: {duration}")
total_interactions = {}
successful_interactions = {}
total_followed = {}
total_removed_mass_followers = []
for session in sessions:
for source, count in session.totalInteractions.items():
if total_interactions.get(source) is None:
total_interactions[source] = count
else:
total_interactions[source] += count
for source, count in session.successfulInteractions.items():
if successful_interactions.get(source) is None:
successful_interactions[source] = count
else:
successful_interactions[source] += count
for source, count in session.totalFollowed.items():
if total_followed.get(source) is None:
total_followed[source] = count
else:
total_followed[source] += count
for username in session.removedMassFollowers:
total_removed_mass_followers.append(username)
logger.warn(f"Total interactions: {_stringify_interactions(total_interactions)}")
logger.warn(
f"Successful interactions: {_stringify_interactions(successful_interactions)}"
)
logger.warn(f"Total followed : {_stringify_interactions(total_followed)}")
total_likes = sum(session.totalLikes for session in sessions)
logger.warn(f"Total likes: {total_likes}")
total_unfollowed = sum(session.totalUnfollowed for session in sessions)
logger.warn(f"Total unfollowed: {total_unfollowed} ")
logger.warn(
f"Removed mass followers: {_stringify_removed_mass_followers(total_removed_mass_followers)}"
)
def print_short_report(source, session_state):
total_likes = session_state.totalLikes
total_followed = sum(session_state.totalFollowed.values())
interactions = session_state.successfulInteractions.get(source, 0)
logger.warn(
f"Session progress: {total_likes} likes, {total_followed} followed, {interactions} successful interaction(s) for {source}"
)
def _stringify_interactions(interactions):
if len(interactions) == 0:
return "0"
result = ""
for source, count in interactions.items():
result += str(count) + " for " + source + ", "
result = result[:-2]
return result
def _stringify_removed_mass_followers(removed_mass_followers):
if len(removed_mass_followers) == 0:
return "none"
else:
return "@" + ", @".join(removed_mass_followers)
| [
"logging.getLogger",
"datetime.datetime.now",
"datetime.timedelta"
] | [((66, 93), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (83, 93), False, 'import logging\n'), ((1423, 1435), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (1432, 1435), False, 'from datetime import datetime, timedelta\n'), ((1509, 1523), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1521, 1523), False, 'from datetime import datetime, timedelta\n'), ((254, 268), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (266, 268), False, 'from datetime import datetime, timedelta\n')] |
import json
from dataclasses import dataclass
from enum import *
from typing import List
import requests
class ConnectorError(Exception):
pass
class ResponseType(Enum):
ERROR = auto()
RESULT = auto()
FORBIDDEN = auto()
SUCCESS = auto()
SYNTAX_ERROR = auto()
class ResultType(Enum):
RELATIONAL = auto()
DOCUMENT = auto()
DEPRECATED = auto()
@dataclass
class User:
name: str
password: str
class Token(User):
name = "%TOKEN%"
def __init__(self, token: str):
self.password = token
class TableEntry:
def get(self, field: str = None) -> ...:
pass
def is_present(self, field: str) -> bool:
pass
def is_null(self, field: str) -> bool:
pass
def creation(self) -> int:
pass
class SimpleTableEntry(TableEntry):
def __init__(self, content, creation):
self.content = content
self.creation = creation
def get(self, field: str = None) -> ...:
return self.content[field]
def is_present(self, field) -> bool:
return self.content[field] is not None
def is_null(self, field: str) -> bool:
return self.content[field] is None or self.content[field] == "null"
def creation(self) -> int:
return self.creation
class SingletonTableEntry(TableEntry):
def __init__(self, content):
self.content = content
def get(self, field: str = None) -> ...:
return self.content
def is_present(self, field) -> bool:
raise ConnectorError("Unsupported in singleton entry!")
def is_null(self, field: str) -> bool:
raise ConnectorError("Unsupported in singleton entry!")
def creation(self) -> int:
raise ConnectorError("Unsupported in singleton entry!")
@dataclass
class Response:
type: ResponseType
response: ...
class SimpleResponse(Response):
def __init__(self, response: ..., exception: bool):
self.type = ResponseType[response["type"]]
self.response = response
if exception:
if type == ResponseType.ERROR:
raise ConnectorError(response["exception"])
elif type == ResponseType.FORBIDDEN:
raise ConnectorError("You don't have the permissions to do that!")
elif type == ResponseType.SYNTAX_ERROR:
raise ConnectorError("Unknown syntax!")
class ErrorResult(SimpleResponse):
def exception(self) -> str:
return self.response["exception"]
class Result(SimpleResponse):
def entries(self) -> List[TableEntry]:
results = self.response["result"] if "result" in self.response else self.response["answer"]
entries: List[TableEntry] = []
for result in results:
if type(result) is str:
entries.append(SingletonTableEntry(result))
else:
entries.append(SimpleTableEntry(result["content"], result["creation"]))
return entries
def structure(self) -> List[str]:
return self.response["structure"]
def resultType(self) -> ResultType:
return ResultType[self.response["resultType"]] if self.response["resultType"] is None else ResultType.DEPRECATED
class Connection:
def connect(self) -> None:
pass
def disconnect(self) -> None:
pass
def closeSession(self) -> None:
pass
def connected(self) -> bool:
pass
def query(self, query: str, exception: bool = True) -> Response:
pass
class TokenConnection(Connection):
def __init__(self, host: str, user: User, ignore_static_sessions: bool = True):
self.host = host
self.user = user
self.ignore_static_sessions = ignore_static_sessions
self.token = None
@staticmethod
def __send(host: str, payload) -> ...:
try:
return requests.post(host, json.dumps(payload)).json()
except Exception as ex:
raise ConnectorError(ex)
def connect(self) -> None:
if not self.host.startswith("http://") and not self.host.startswith("https://"):
self.host = "http://" + self.host
try:
self.token = \
self.__send(f"{self.host}/api/v1/session/open",
{"user": self.user.name, "password": self.user.password})[
"result"][0]
except Exception as ex:
raise ConnectorError("Connection failed!")
def disconnect(self) -> None:
self.token = None
def closeSession(self) -> None:
if self.user.name == "%TOKEN%" and self.ignore_static_sessions:
self.disconnect()
return
self.__send(f"{self.host}/api/v1/session/close", {"token": self.token})
self.disconnect()
def connected(self) -> bool:
return self.token is not None
def query(self, query: str, exception: bool = True) -> Response:
response = SimpleResponse(
self.__send(f"{self.host}/api/v1/query", {"token": self.token, 'query': query}),
exception)
if response.type == ResponseType.RESULT:
return Result(response.response, exception)
elif response.type == ResponseType.ERROR:
return ErrorResult(response.response, exception)
else:
return response
| [
"json.dumps"
] | [((3876, 3895), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (3886, 3895), False, 'import json\n')] |
# SDP: Subscription Data Protocol
import traceback
from asyncio import get_event_loop
import asyncio
import websockets
import json
from datetime import datetime
#import pytz
from rethinkdb import r
#from flatten_dict import flatten, unflatten
#from dotenv import load_dotenv
#import os
#load_dotenv()
#URI_DATABASE = os.getenv("URI_DATABASE")
async def get_connection():
return await r.connect("localhost", 28015)
methods = {}
def method(f):
#methods.append(f.__name__)
async def helper(*args, **kwargs):
return await f(*args, **kwargs)
methods[f.__name__] = helper
return helper
subs = {}
def sub(f):
#subs.append(f.__name__)
subs[f.__name__] = f
return f
def sub_with_aliases(aliases):
def decorator(f):
subs[f.__name__] = f
for alias in aliases:
subs[alias] = f
return f
return decorator
def check(attr, type):
if not isinstance(attr, type):
raise CheckError(attr + ' is not of type ' + str(type))
hooks = {'before_insert': [],
'before_update': []
}
class MethodError(Exception):
pass
class CheckError(Exception):
pass
async def sdp(websocket, path):
async def watch(sub_id, query):
connection = await get_connection()
feed = await query.changes(include_states=True, include_initial=True).run(connection)
while (await feed.fetch_next()):
item = await feed.next()
print(item)
state = item.get('state')
if state == 'ready':
await send_ready(sub_id)
elif state == 'initializing':
await send_initializing(sub_id)
else:
if item.get('old_val') is None:
await send_added(sub_id, item['new_val'])
elif item.get('new_val') is None:
await send_removed(sub_id, item['old_val']['id'])
else:
await send_changed(sub_id, item['new_val'])
async def send(data):
def helper(x):
if isinstance(x, datetime):
return {'$date': x.timestamp()*1000}
else:
return x
message = json.dumps(data, default=helper)
await websocket.send(message)
async def send_result(id, result):
await send({'msg': 'result', 'id': id, 'result': result})
async def send_error(id, error):
await send({'msg': 'error', 'id': id, 'error': error})
async def send_added(sub_id, doc):
await send({'msg': 'added', 'id': sub_id, 'doc': doc})
async def send_changed(sub_id, doc):
await send({'msg': 'changed', 'id': sub_id, 'doc': doc})
async def send_removed(sub_id, doc_id):
await send({'msg': 'removed', 'id': sub_id, 'doc_id': doc_id})
async def send_ready(sub_id):
await send({'msg': 'ready', 'id': sub_id})
async def send_initializing(sub_id):
await send({'msg': 'initializing', 'id': sub_id})
async def send_nosub(sub_id, error):
await send({'msg': 'nosub', 'id': sub_id, 'error': error})
async def send_nomethod(method_id, error):
await send({'msg': 'nomethod', 'id': method_id, 'error': error})
global method
@method
async def login(user):
nonlocal user_id
user_id = user
registered_feeds = {}
#feeds_with_observers = []
user_id = '<EMAIL>' #None
#remove_observer_from_item = {}
try:
async for msg in websocket:
#if msg == 'stop':
# return
def helper(dct):
if '$date' in dct.keys():
d = datetime.utcfromtimestamp(dct['$date']/1000.0)
return d
#return d.replace(tzinfo=pytz.UTC)
return dct
data = json.loads(msg, object_hook=helper)
print('>>>', data)
try:
message = data['msg']
id = data['id']
if message == 'method':
params = data['params']
method = data['method']
if method not in methods.keys():
await send_nomethod(id, 'method does not exist')
else:
try:
method = methods[method]
result = await method(user_id, **params)
await send_result(id, result)
except Exception as e:
await send_error(id, str(e))
traceback.print_tb(e.__traceback__)
print(e)
elif message == 'sub':
#name = data['name']
params = data['params']
if id not in subs.keys():
await send_nosub(id, 'sub does not exist')
else:
query = subs[id](user_id, **params)
#registered_feeds[id] = asyncio.create_task(watch(id, query))
registered_feeds[id] = get_event_loop().create_task(watch(id, query))
await send_ready(id)
elif message == 'unsub':
if id in registered_feeds.keys():
feed = registered_feeds[id]
feed.cancel()
#if remove_observer_from_item.get(id):
# for remove in remove_observer_from_item[id].values():
# remove()
# del remove_observer_from_item[id]
#del registered_feeds[id]
except KeyError as e:
await send_error(id, str(e))
#
finally:
#for k in remove_observer_from_item.keys():
# for remove in remove_observer_from_item[k].values():
# remove()
for feed in registered_feeds.values():
print('cancelling feed')
feed.cancel()
| [
"datetime.datetime.utcfromtimestamp",
"json.loads",
"json.dumps",
"traceback.print_tb",
"asyncio.get_event_loop",
"rethinkdb.r.connect"
] | [((391, 420), 'rethinkdb.r.connect', 'r.connect', (['"""localhost"""', '(28015)'], {}), "('localhost', 28015)\n", (400, 420), False, 'from rethinkdb import r\n'), ((2226, 2258), 'json.dumps', 'json.dumps', (['data'], {'default': 'helper'}), '(data, default=helper)\n', (2236, 2258), False, 'import json\n'), ((3858, 3893), 'json.loads', 'json.loads', (['msg'], {'object_hook': 'helper'}), '(msg, object_hook=helper)\n', (3868, 3893), False, 'import json\n'), ((3681, 3729), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (["(dct['$date'] / 1000.0)"], {}), "(dct['$date'] / 1000.0)\n", (3706, 3729), False, 'from datetime import datetime\n'), ((4630, 4665), 'traceback.print_tb', 'traceback.print_tb', (['e.__traceback__'], {}), '(e.__traceback__)\n', (4648, 4665), False, 'import traceback\n'), ((5157, 5173), 'asyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (5171, 5173), False, 'from asyncio import get_event_loop\n')] |
# -*- coding: utf-8 -*-
# Copyright 2018 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The module plug-in providing the ``env`` command.
.. moduleauthor:: <NAME> <<EMAIL>>
:copyright: (c) Copyright 2018 by <NAME>.
:license: Apache License 2.0, see LICENSE.txt for full details.
"""
import fnmatch
import logging
import os
from collections import OrderedDict
from schema import And
from schema import Optional
from schema import Or
from schema import Schema
from schema import SchemaError
from deployer.rendering import render
from deployer.result import Result
from .plugin import Plugin
LOGGER = logging.getLogger(__name__)
class Env(Plugin):
"""Manage environment variables."""
TAG = 'env'
SCHEMA = {
Optional('set'): {And(str, len): And(str, len)},
Optional('unset'): And(len, Or(And(str, len), [And(str, len)])),
}
def __init__(self, node):
"""Ctor."""
self.env_set = node['set'] if 'set' in node else {}
if 'unset' in node:
if type(node['unset']) not in (list,):
self.env_unset = [node['unset']]
else:
self.env_unset = node['unset']
else:
self.env_unset = []
@staticmethod
def valid(node):
"""Ensure node structure is valid."""
if type(node) is not OrderedDict:
return False
if Env.TAG not in node:
return False
try:
Schema(Env.SCHEMA).validate(node[Env.TAG])
except SchemaError:
return False
return True
@staticmethod
def build(node):
"""Build an Echo node."""
yield Env(node['env'])
def execute(self, context):
"""Perform the plugin's task purpose."""
for env in os.environ.copy():
for pattern in self.env_unset:
if fnmatch.fnmatchcase(env, pattern):
LOGGER.debug("Removing '%s' from system environment.", env)
try:
os.unsetenv(env)
except AttributeError: # noqa: no-cover
pass # noqa: no-cover
del os.environ[env]
else:
LOGGER.debug("Keeping '%s' present in the system environment.", env)
for key, value in self.env_set.items():
if context:
value = render(value, **context.variables.last())
LOGGER.debug("Setting '%s' to '%s', in the system environment.", key, value)
os.putenv(key, value)
os.environ[key] = value
return Result(result='success')
| [
"logging.getLogger",
"schema.Optional",
"fnmatch.fnmatchcase",
"schema.Schema",
"os.putenv",
"os.environ.copy",
"deployer.result.Result",
"os.unsetenv",
"schema.And"
] | [((1126, 1153), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1143, 1153), False, 'import logging\n'), ((1256, 1271), 'schema.Optional', 'Optional', (['"""set"""'], {}), "('set')\n", (1264, 1271), False, 'from schema import Optional\n'), ((1313, 1330), 'schema.Optional', 'Optional', (['"""unset"""'], {}), "('unset')\n", (1321, 1330), False, 'from schema import Optional\n'), ((2295, 2312), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (2310, 2312), False, 'import os\n'), ((3201, 3225), 'deployer.result.Result', 'Result', ([], {'result': '"""success"""'}), "(result='success')\n", (3207, 3225), False, 'from deployer.result import Result\n'), ((1274, 1287), 'schema.And', 'And', (['str', 'len'], {}), '(str, len)\n', (1277, 1287), False, 'from schema import And\n'), ((1289, 1302), 'schema.And', 'And', (['str', 'len'], {}), '(str, len)\n', (1292, 1302), False, 'from schema import And\n'), ((3128, 3149), 'os.putenv', 'os.putenv', (['key', 'value'], {}), '(key, value)\n', (3137, 3149), False, 'import os\n'), ((1344, 1357), 'schema.And', 'And', (['str', 'len'], {}), '(str, len)\n', (1347, 1357), False, 'from schema import And\n'), ((2376, 2409), 'fnmatch.fnmatchcase', 'fnmatch.fnmatchcase', (['env', 'pattern'], {}), '(env, pattern)\n', (2395, 2409), False, 'import fnmatch\n'), ((1360, 1373), 'schema.And', 'And', (['str', 'len'], {}), '(str, len)\n', (1363, 1373), False, 'from schema import And\n'), ((1972, 1990), 'schema.Schema', 'Schema', (['Env.SCHEMA'], {}), '(Env.SCHEMA)\n', (1978, 1990), False, 'from schema import Schema\n'), ((2540, 2556), 'os.unsetenv', 'os.unsetenv', (['env'], {}), '(env)\n', (2551, 2556), False, 'import os\n')] |
#!/usr/bin/env python
import numpy as np
dim = 3
A = np.ones(shape=(dim, dim))
B = A.copy()
b = np.empty(dim)
for i in range(dim):
b[i] = i + 2
print('A')
print(A)
print('b')
print(b)
for j in range(dim):
A[:, j] *= b[j]
print('% (1)')
print(A)
print('% (2)')
print(B * b)
| [
"numpy.empty",
"numpy.ones"
] | [((56, 81), 'numpy.ones', 'np.ones', ([], {'shape': '(dim, dim)'}), '(shape=(dim, dim))\n', (63, 81), True, 'import numpy as np\n'), ((99, 112), 'numpy.empty', 'np.empty', (['dim'], {}), '(dim)\n', (107, 112), True, 'import numpy as np\n')] |
from google.oauth2 import service_account
from googleapiclient.discovery import build
from os import path
from googleapiclient.http import MediaFileUpload
class Gdrive:
"""A class represent google drive."""
def __init__(self, credentials_filepath, scopes, download_dir, upload_dir):
"""
:str credentials_filepath: The path to the service account json file.
:str scopes: User-defined scopes to request during the
authorization grant.
:str download_path: The path to the download folder.
:str upload_path: The path to the upload folder.
"""
self.service = None
self.credentials_filepath = credentials_filepath
self.scopes = scopes
self.download_dir = download_dir
self.upload_dir = upload_dir
self.connect()
def connect(self):
"""
Creates a Credentials instance from a service account json file.
:return: None
"""
credentials = service_account.Credentials.from_service_account_file(
self.credentials_filepath, scopes=self.scopes
)
self.service = build("drive", "v3", credentials=credentials)
def check_does_folder_exist(self, folder_name):
"""
Checking if folder with specified name exist.
:str folder_name: Name of the folder which should be search.
:return: True if folder found. Otherwise False.
"""
folder_search = "mimeType = 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"name contains '{folder_name}' and {folder_search}",
)
.execute()
)
item = results.get("files", [])
if item:
return True
return False
def check_does_file_exist(self, file_name):
"""
Checking if file with specified name exist.
:str file_name: Name of the file which should be search.
:return: True if file found. Otherwise False.
"""
file_search = "mimeType != 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"name = '{file_name}' and {file_search}",
)
.execute()
)
item = results.get("files", [])
if item:
return True
return False
def find_folder(self, folder_name):
"""
Searching for folder name.
:str folder_name: Name of the folder which should be search.
:return: id, name of the founded folder.
"""
folder_search = "mimeType = 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"name = '{folder_name}' and {folder_search}",
)
.execute()
)
item = results.get("files", [])
return item[-1]["id"], item[-1]["name"]
def find_file(self, file_name):
"""
Searching for file name.
:str file_name: Name of the file which should be search.
:return: id, name of the founded folder.
"""
file_search = "mimeType != 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"name = '{file_name}' and {file_search}",
)
.execute()
)
item = results.get("files", [])
return item[-1]["id"], item[-1]["name"]
def get_files_in_folder(self, folder_id):
"""
Searching for all files sorted by createdTime inside folder.
:str folder_id: : Id of the folder.
:return: [{"id":"str", "name":"str"}]
"""
file_search = "mimeType != 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"'{folder_id}' in parents and {file_search}",
)
.execute()
)
item = results.get("files", [])
return item
def get_dirs_in_folder(self, folder_id):
"""
Searching for all folders sorted by createdTime inside folder.
:str folder_id: : Id of the folder.
:return: [{"id":"str", "name":"str"}]
"""
file_search = "mimeType = 'application/vnd.google-apps.folder'"
results = (
self.service.files()
.list(
fields="nextPageToken, files(id, name)",
orderBy="createdTime",
q=f"'{folder_id}' in parents and {file_search}",
)
.execute()
)
item = results.get("files", [])
return item
def download_file(self, file_id, file_name, download_path=None):
"""
Download file by file_id.
:str file_id: Id of the file.
:str file_name: Name of the file.
:str download_path: The path to the download folder. If None download path will be taken from self.download_dir
:return: None
"""
if download_path:
path_dir = path.join(download_path, file_name)
else:
path_dir = path.join(self.download_dir, file_name)
file_body = self.service.files().get_media(fileId=file_id).execute()
with open(path_dir, "wb") as wer:
wer.write(file_body)
def create_folder(self, parent_id, folder_name):
"""
Create folder inside parent folder.
:str parent_id: Parent folder id.
:str folder_name: Folder name.
:return: id of the folder
"""
file_metadata = {
"name": folder_name,
"mimeType": "application/vnd.google-apps.folder",
"parents": [parent_id],
}
file = self.service.files().create(body=file_metadata, fields="id").execute()
return file["id"]
def upload_file(self, folder_id, file_name, upload_path=None):
"""
Upload file to folder with folder_id.
:str folder_id: Parent id.
:str file_name: File name.
:str upload_path: The path to the upload folder. If None upload path will be taken from self.upload_dir
:return: id of the file
"""
if upload_path:
path_dir = path.join(upload_path, file_name)
else:
path_dir = path.join(self.upload_dir, file_name)
file_metadata = {"name": file_name, "parents": [folder_id]}
media = MediaFileUpload(path_dir, resumable=True)
file = (
self.service.files()
.create(body=file_metadata, media_body=media, fields="id")
.execute()
)
return file["id"]
# def delete_file(self, file_id):
# """
# delete file or folder by id
# :int file_id: Id of the file to be deleted
# :return: None
# """
# self.service.files().delete(fileId=file_id).execute()
| [
"os.path.join",
"googleapiclient.discovery.build",
"googleapiclient.http.MediaFileUpload",
"google.oauth2.service_account.Credentials.from_service_account_file"
] | [((995, 1100), 'google.oauth2.service_account.Credentials.from_service_account_file', 'service_account.Credentials.from_service_account_file', (['self.credentials_filepath'], {'scopes': 'self.scopes'}), '(self.\n credentials_filepath, scopes=self.scopes)\n', (1048, 1100), False, 'from google.oauth2 import service_account\n'), ((1141, 1186), 'googleapiclient.discovery.build', 'build', (['"""drive"""', '"""v3"""'], {'credentials': 'credentials'}), "('drive', 'v3', credentials=credentials)\n", (1146, 1186), False, 'from googleapiclient.discovery import build\n'), ((6968, 7009), 'googleapiclient.http.MediaFileUpload', 'MediaFileUpload', (['path_dir'], {'resumable': '(True)'}), '(path_dir, resumable=True)\n', (6983, 7009), False, 'from googleapiclient.http import MediaFileUpload\n'), ((5591, 5626), 'os.path.join', 'path.join', (['download_path', 'file_name'], {}), '(download_path, file_name)\n', (5600, 5626), False, 'from os import path\n'), ((5664, 5703), 'os.path.join', 'path.join', (['self.download_dir', 'file_name'], {}), '(self.download_dir, file_name)\n', (5673, 5703), False, 'from os import path\n'), ((6774, 6807), 'os.path.join', 'path.join', (['upload_path', 'file_name'], {}), '(upload_path, file_name)\n', (6783, 6807), False, 'from os import path\n'), ((6845, 6882), 'os.path.join', 'path.join', (['self.upload_dir', 'file_name'], {}), '(self.upload_dir, file_name)\n', (6854, 6882), False, 'from os import path\n')] |
#
# python_grabber
#
import cv2
import numpy as np
def save_image(filename, img):
cv2.imwrite(filename, img)
def sepia(img):
kernel = np.float32([
[0.272, 0.534, 0.131],
[0.349, 0.686, 0.168],
[0.393, 0.769, 0.189]])
return cv2.transform(img, kernel)
def edge_preserving(img):
return cv2.edgePreservingFilter(img)
def stylization(img):
return cv2.stylization(img)
def pencil_sketch(img):
_, res = cv2.pencilSketch(img)
return res
| [
"cv2.imwrite",
"cv2.transform",
"cv2.pencilSketch",
"cv2.edgePreservingFilter",
"cv2.stylization",
"numpy.float32"
] | [((89, 115), 'cv2.imwrite', 'cv2.imwrite', (['filename', 'img'], {}), '(filename, img)\n', (100, 115), False, 'import cv2\n'), ((147, 233), 'numpy.float32', 'np.float32', (['[[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]]'], {}), '([[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, \n 0.189]])\n', (157, 233), True, 'import numpy as np\n'), ((265, 291), 'cv2.transform', 'cv2.transform', (['img', 'kernel'], {}), '(img, kernel)\n', (278, 291), False, 'import cv2\n'), ((331, 360), 'cv2.edgePreservingFilter', 'cv2.edgePreservingFilter', (['img'], {}), '(img)\n', (355, 360), False, 'import cv2\n'), ((396, 416), 'cv2.stylization', 'cv2.stylization', (['img'], {}), '(img)\n', (411, 416), False, 'import cv2\n'), ((456, 477), 'cv2.pencilSketch', 'cv2.pencilSketch', (['img'], {}), '(img)\n', (472, 477), False, 'import cv2\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
def match_sxz(noun):
return re.search('[sxz]$',noun)
def apply_sxz(noun):
return re.sub('$','es',noun)
def match_h(noun):
return re.search('[^aeioudgkprt]h$',noun)
def apply_h(noun):
return re.sub('$','es',noun)
def match_y(noun):
return re.search('[^aeiou]y$',noun)
def apply_y(noun):
return re.sub('y$','ies',noun)
def match_default(noun):
return True
def apply_default(noun):
return noun + 's'
rules = ((match_sxz,apply_sxz),
(match_h,apply_h),
(match_y,apply_y),
(match_default,apply_default)
)
def plural(noun):
for matches_rule, apply_rule in rules:
if matches_rule(noun):
return apply_rule(noun)
print(plural('boy'))
'''def plural(noun):
if match_sxz(noun):
return apply_sxz(noun)
if match_h(noun):
return apply_h(noun)
if match_y(noun):
return apply_y(noun)
if match_default(noun):
return apply_default(noun)
''' | [
"re.sub",
"re.search"
] | [((91, 116), 're.search', 're.search', (['"""[sxz]$"""', 'noun'], {}), "('[sxz]$', noun)\n", (100, 116), False, 'import re\n'), ((148, 171), 're.sub', 're.sub', (['"""$"""', '"""es"""', 'noun'], {}), "('$', 'es', noun)\n", (154, 171), False, 'import re\n'), ((200, 235), 're.search', 're.search', (['"""[^aeioudgkprt]h$"""', 'noun'], {}), "('[^aeioudgkprt]h$', noun)\n", (209, 235), False, 'import re\n'), ((265, 288), 're.sub', 're.sub', (['"""$"""', '"""es"""', 'noun'], {}), "('$', 'es', noun)\n", (271, 288), False, 'import re\n'), ((317, 346), 're.search', 're.search', (['"""[^aeiou]y$"""', 'noun'], {}), "('[^aeiou]y$', noun)\n", (326, 346), False, 'import re\n'), ((376, 401), 're.sub', 're.sub', (['"""y$"""', '"""ies"""', 'noun'], {}), "('y$', 'ies', noun)\n", (382, 401), False, 'import re\n')] |
# Extrahiert die Transaktionen aus dem Mempool
def getTxnsFromPool(MasterObj):
rwo = list()
for i in MasterObj.mempool: rwo.append(i); MasterObj.mempool.remove(i); print('Transaction {} selected'.format(i.getTxHash()))
return rwo
# Gibt die Höhe aller Gebühren welche verwendet werden an
def getTransactionsTotalFee(CoinObj, *Transactions):
total = 0
for txi in Transactions:
for feei in txi.getFees():
if feei.coin() == CoinObj: total += feei.get()
return total
def CryptoNightMinerFnc(q, block_root_hash, diff):
current_nonce = 0
base_diff = 2**256-1
import time, datetime, struct, binascii, pycryptonight, time
started = time.time()
hash_count = 0
for n in range(base_diff):
hashy = pycryptonight.cn_slow_hash( bytes( block_root_hash + str(current_nonce).encode() ), 4, 0, 1)
hex_hash = binascii.hexlify(hashy)
hash_count += 1
if base_diff / int(hex_hash, 16) >= diff:
elapsed = time.time() - started
hr = int(int(hash_count) / int(elapsed))
q.put({ 'hash' : hex_hash, 'nonce' : current_nonce, 'hrate' : hr, 'trate' : hash_count })
return
else: current_nonce += 1
## CryptonightMiner
class CryptonightMiner:
def __init__(self,ChainObject,UseCPPBinary=False):
self.chain = ChainObject
self.shutdown = False
self.miner_address = None
self.running = False
self.hrate = 0
# Der Miner Thread wird deklariert
import threading
def miner_thread():
from apollon.utxo import CoinbaseInUtxo, LagacyOutUtxo, createFeeInputUtxo
from apollon.transaction import CoinbaseTransaction
from apollon.block import BlockConstruct
# Es wird Signalisiert das der Thread läuft
self.running = True
# Die Aktuelle Miner Adresse wird abgespeichert
curr_miner = self.miner_address
# Es wird eine Schleife ausgeführt, welche dafür sorgt das permant ein neuer Block entsteht
while not self.shutdown:
# Es wird eine liste offener Transaktionen aus dem Memorypool extrahiert
cur_transactions = getTxnsFromPool(self.chain)
# Der letzte Block wird extrahiert
last_block_mdata = self.chain.getLastBlockMetaData(True)
next_block_height = last_block_mdata['block_height'] + 1
last_block_hash = last_block_mdata['block_hash']
# Die Ausgangs UTXOS werden erstellt #TODO
coinbase_utxo_pairs = list()
for i in self.chain.getChainCoins():
# Es wird ermittelt ob es einen Reward gibt
has_reward = i.hasRewardForBlock(next_block_height)
# Die höhe der Gesamten Transationsgebühren wird ermittelt
transaction_total_fees = getTransactionsTotalFee(i, *cur_transactions)
# Es wird geprüt ob es einen Reward und oder die Gebühren der Transaktionen gibt # TODO
if has_reward == True and transaction_total_fees != 0:
# Es wird ein Eingangs UTXO für den Reward erstellt
reward_in_utxo = i.createNewRewardInputUtxo(next_block_height)
# Es wird ein Eingangs UTXO für die Gebühren erstellt
fee_in_utxo = createFeeInputUtxo(i, cur_transactions)
# Es wird geprüft ob ein Teil der Transaktionsgebühren Verbrannt werden sollen
if i.minerForceBurnFee() == True:
# Es wird ermittelt wieviel verbrannt werden soll
burn_value = i.calcMinerBurningAmountValue(fee_in_utxo.getCoinValue(True))
reciver_value_total = reward_in_utxo.getCoinValue(True) + (fee_in_utxo.getCoinValue(True) - burn_value)
# Es werden zwei Ausgangs UTXO's erzeugt
miner_outxo = LagacyOutUtxo(curr_miner, reciver_value_total, i, *[reward_in_utxo, fee_in_utxo])
burn_outxo = LagacyOutUtxo(curr_miner, burn_value, i, *[reward_in_utxo, fee_in_utxo])
# Die UTXOS werden der Liste hinzugefügt
coinbase_utxo_pairs.append(reward_in_utxo)
coinbase_utxo_pairs.append(fee_in_utxo)
coinbase_utxo_pairs.append(miner_outxo)
coinbase_utxo_pairs.append(burn_outxo)
else:
# Es wird ein Ausgangs UTXO erzeugt
miner_outxo = LagacyOutUtxo(curr_miner, reward_in_utxo.getCoinValue(True) + fee_in_utxo.getCoinValue(True), i, *[reward_in_utxo, fee_in_utxo])
# Die UTXOS werden der Liste hinzugefügt
coinbase_utxo_pairs.append(reward_in_utxo)
coinbase_utxo_pairs.append(fee_in_utxo)
coinbase_utxo_pairs.append(miner_outxo)
# Der Miner erhält nur einen Reward für das Finden des Blockes
elif has_reward == True and transaction_total_fees == 0:
# Es wird ein Eingangs UTXO für den Reward erstellt
reward_in_utxo = i.createNewRewardInputUtxo(next_block_height)
# Es wird ein Ausgangs UTXO für die Belohnung erstellt
reward_out_utxo = LagacyOutUtxo(curr_miner, reward_in_utxo.getCoinValue(True), i, reward_in_utxo)
# Die UTXOs werden der Liste hinzugefügt
coinbase_utxo_pairs.append(reward_in_utxo)
coinbase_utxo_pairs.append(reward_out_utxo)
# Der Miner erhält keine Block belohnung sondern nur die Gebühren der Transaktionen
elif has_reward == False and transaction_total_fees != 0:
# Es wird ein Eingangs UTXO für die Gebühren erstellt
reward_in_utxo = None
# Es wird eine Coinbase Transaktion aus allen UTXOS erstellt
coinbase = CoinbaseTransaction(*coinbase_utxo_pairs, BlockNo=next_block_height)
# Es wird eine Liste aus allen Transaktionen erzeuegt
totalls = list(); totalls.append(coinbase); totalls = totalls + cur_transactions
# Die Schwierigkeit wird ermittelt
cdiff = self.chain.getBlockDiff(next_block_height)
# Es wird ein Block Construtor erzeugt
from apollon.atime import ATimeString
try: new_block = BlockConstruct(last_block_hash, next_block_height, curr_miner, ATimeString.now(), cdiff, *totalls)
except Exception as E: raise Exception(E)
# Es wird geprüft ob es sich um ein Valides Objekt handelt
if new_block.isValidateObject() == True and new_block.validateBlockTransactions() == True:
# Der Mining vorgang wird gestartet
try: nwoblck = self.MineBlock(new_block, cdiff); print('New Blocke Mined: {} @ {} :: {}'.format(nwoblck.getHeight(), nwoblck.getBlockHash(), nwoblck.getBlockTimestampe()))
except Exception as E: raise Exception(E)
# Der Block wird der Kette angehängt
try: self.chain.addBlock(nwoblck)
except Exception as E: raise Exception(E)
else: print('Invalid New Block, abort')
# Es wird dem Objekt signalisiert dass der Thread beendet wurde
self.running = False
self.miner_thread = threading.Thread(target=miner_thread)
# Startet das Mining
def Start(self, MinerAddress):
# Es wird geprüft ob eine gültige Adresse übergeben wurde
from apollon.apollon_address import LagacyAddress, PhantomAddress
assert isinstance(MinerAddress, LagacyAddress) or isinstance(MinerAddress, PhantomAddress)
# Es wird geprüft ob der Miner bereits ausgeführt wird
if self.miner_address != None or self.running != False: raise Exception('Miner alrady running')
# Es wird versucht den Miner zu Starten
print('Starting Miner')
self.miner_address = MinerAddress
self.miner_thread.start()
# Es wird geprüft ob der Miner gestartet wurde
import time
for i in range(2*10):
if self.running == True and self.miner_address is not None: print('Miner started'); return 0 # Der Miner wurde gestartet
time.sleep(0.01)
# Der Miner konnte nicht gestartet werden
print('Miner start, aborted')
return 1
# Gibt die Hashrate aus
def getHashRate(self): return self.hrate
# Gibt den Mining Status an
def Status(self):
if self.running == True: return 0
elif self.running == False and self.miner_address is None: return 2
else: return 1
# Gibt den Aktuell zu Minenden Block aus
def getUnminedBlock(self):
return
# Mint den eigentlichen Block
def MineBlock(self, constructed_block, diff):
# Es wird geprüft ob ein gülter Block Constructor übergeben wurde
from apollon.block import BlockConstruct, MinedBlock
assert isinstance(constructed_block, BlockConstruct)
# Es wird geprüft ob der Block laut der Blockchain Regeln Valide ist
assert constructed_block.isValidateObject() == True
# Es wird geprüft ob alle Transaktionen zulässig sind
assert constructed_block.validateBlockTransactions() == True
# Der Mining Prozess wird erstellt
import multiprocessing as mp
ctx = mp.get_context('spawn')
q = ctx.Queue()
# Es wird auf das Ergebniss vom Miner gewartet
p = ctx.Process(target=CryptoNightMinerFnc, args=(q, constructed_block.getRootHash(True), diff))
p.start()
resolv = q.get()
p.terminate()
# Es wird ein neuer Geminter Block erstellt
mined_block = MinedBlock.fromConstructWithNonce(constructed_block, resolv['nonce'])
if mined_block.getRootHash(True) != constructed_block.getRootHash(True): raise Exception('Unkown error')
if mined_block.getBlockHash(True) != resolv['hash']: raise Exception('Not same hash')
# Die Hashrate wird gespeichert
self.hrate = resolv['hrate']
# Gibt den gemiten Block zurück
return mined_block | [
"apollon.block.MinedBlock.fromConstructWithNonce",
"apollon.transaction.CoinbaseTransaction",
"binascii.hexlify",
"apollon.utxo.createFeeInputUtxo",
"multiprocessing.get_context",
"apollon.utxo.LagacyOutUtxo",
"time.sleep",
"threading.Thread",
"time.time",
"apollon.atime.ATimeString.now"
] | [((710, 721), 'time.time', 'time.time', ([], {}), '()\n', (719, 721), False, 'import time\n'), ((904, 927), 'binascii.hexlify', 'binascii.hexlify', (['hashy'], {}), '(hashy)\n', (920, 927), False, 'import time, datetime, struct, binascii, pycryptonight, time\n'), ((8224, 8261), 'threading.Thread', 'threading.Thread', ([], {'target': 'miner_thread'}), '(target=miner_thread)\n', (8240, 8261), False, 'import threading\n'), ((10326, 10349), 'multiprocessing.get_context', 'mp.get_context', (['"""spawn"""'], {}), "('spawn')\n", (10340, 10349), True, 'import multiprocessing as mp\n'), ((10685, 10754), 'apollon.block.MinedBlock.fromConstructWithNonce', 'MinedBlock.fromConstructWithNonce', (['constructed_block', "resolv['nonce']"], {}), "(constructed_block, resolv['nonce'])\n", (10718, 10754), False, 'from apollon.block import BlockConstruct, MinedBlock\n'), ((9161, 9177), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (9171, 9177), False, 'import time\n'), ((1029, 1040), 'time.time', 'time.time', ([], {}), '()\n', (1038, 1040), False, 'import time\n'), ((6651, 6719), 'apollon.transaction.CoinbaseTransaction', 'CoinbaseTransaction', (['*coinbase_utxo_pairs'], {'BlockNo': 'next_block_height'}), '(*coinbase_utxo_pairs, BlockNo=next_block_height)\n', (6670, 6719), False, 'from apollon.transaction import CoinbaseTransaction\n'), ((3617, 3656), 'apollon.utxo.createFeeInputUtxo', 'createFeeInputUtxo', (['i', 'cur_transactions'], {}), '(i, cur_transactions)\n', (3635, 3656), False, 'from apollon.utxo import CoinbaseInUtxo, LagacyOutUtxo, createFeeInputUtxo\n'), ((7241, 7258), 'apollon.atime.ATimeString.now', 'ATimeString.now', ([], {}), '()\n', (7256, 7258), False, 'from apollon.atime import ATimeString\n'), ((4281, 4366), 'apollon.utxo.LagacyOutUtxo', 'LagacyOutUtxo', (['curr_miner', 'reciver_value_total', 'i', '*[reward_in_utxo, fee_in_utxo]'], {}), '(curr_miner, reciver_value_total, i, *[reward_in_utxo,\n fee_in_utxo])\n', (4294, 4366), False, 'from apollon.utxo import CoinbaseInUtxo, LagacyOutUtxo, createFeeInputUtxo\n'), ((4405, 4477), 'apollon.utxo.LagacyOutUtxo', 'LagacyOutUtxo', (['curr_miner', 'burn_value', 'i', '*[reward_in_utxo, fee_in_utxo]'], {}), '(curr_miner, burn_value, i, *[reward_in_utxo, fee_in_utxo])\n', (4418, 4477), False, 'from apollon.utxo import CoinbaseInUtxo, LagacyOutUtxo, createFeeInputUtxo\n')] |
from math import cos, sin, exp, pi
from scipy.special import roots_legendre
from typing import Callable as Call
class Integrator(object):
def __init__(self, lm: list[list[float]], n: list[int], fn: list[int]):
self.lm = lm
self.n = n
self.f1 = Integrator.simpson if (fn[0]) else Integrator.gauss
self.f2 = Integrator.simpson if (fn[1]) else Integrator.gauss
def __call__(self, p: float) -> float:
f = Integrator.__integrated(p)
inner = lambda x: self.f2(
lambda val1: f(x, val1),
self.lm[1][0],
self.lm[1][1],
self.n[1])
integ = lambda: self.f1(
inner,
self.lm[0][0],
self.lm[0][1],
self.n[0])
return integ()
@staticmethod
def __integrated(p: float) -> Call[[float, float], float]:
t = lambda x, y: 2 * cos(x) / (1 - sin(x) ** 2 * cos(y) ** 2)
return lambda x, y: 4 / pi * (1 - exp(-p * t(x, y))) * cos(x) * sin(x)
@staticmethod
def simpson(f: Call[[float], float], a: float, b: float,
n: int) -> float:
if n < 3 or n % 2 == 0:
raise Exception("Sorry, wrong n value")
h = (b - a) / (n - 1.0)
x = a
res = 0.0
for i in range((n - 1) // 2):
res += f(x) + 4 * f(x + h) + f(x + 2 * h)
x += 2 * h
return res * h / 3
@staticmethod
def gauss(f: Call[[float], float], a: float, b: float,
n: int) -> float:
def p2v(p: float, c: float, d: float) -> float:
return (d + c) / 2 + (d - c) * p / 2
x, w = roots_legendre(n)
return sum([(b - a) / 2 * w[i] * f(p2v(x[i], a, b)) for i in range(n)])
| [
"math.cos",
"math.sin",
"scipy.special.roots_legendre"
] | [((1651, 1668), 'scipy.special.roots_legendre', 'roots_legendre', (['n'], {}), '(n)\n', (1665, 1668), False, 'from scipy.special import roots_legendre\n'), ((1006, 1012), 'math.sin', 'sin', (['x'], {}), '(x)\n', (1009, 1012), False, 'from math import cos, sin, exp, pi\n'), ((893, 899), 'math.cos', 'cos', (['x'], {}), '(x)\n', (896, 899), False, 'from math import cos, sin, exp, pi\n'), ((997, 1003), 'math.cos', 'cos', (['x'], {}), '(x)\n', (1000, 1003), False, 'from math import cos, sin, exp, pi\n'), ((907, 913), 'math.sin', 'sin', (['x'], {}), '(x)\n', (910, 913), False, 'from math import cos, sin, exp, pi\n'), ((921, 927), 'math.cos', 'cos', (['y'], {}), '(y)\n', (924, 927), False, 'from math import cos, sin, exp, pi\n')] |
from collections import defaultdict
import itertools
from skorch.net import NeuralNet
from skorch.dataset import Dataset
import pandas as pd
from sklearn import preprocessing
from tqdm import tqdm
import more_itertools as mit
import numpy as np
import skorch
import torch
import logging
logging.getLogger('matplotlib').setLevel(logging.WARNING)
NORMAL_TRAFFIC = 0.
ATTACK_TRAFFIC = 1.
class ContextCriterion():
def score(self, model_output, target):
raise NotImplementedError
def __call__(self, p1, p2, p3=None):
if isinstance(p1, NeuralNet): # p1=Model, p2=dataset.X, p3=dataset.y
with torch.no_grad():
mout = p1.forward(p2)
return self.score(mout, p3)
else: # p1=model_output, p2=dataset.y
return self.score(p1, p2)
class WindowedDataGenerator():
def __init__(self, overlapping, context_len):
self.overlapping = overlapping
self.context_len = context_len
self.window_stepsize = max(int(context_len * (1 - overlapping)), 1)
def dataframe_windows(self, df):
df_len = len(df)
wnds = mit.windowed(range(df_len), self.context_len, step=self.window_stepsize)
wnds = filter(lambda x: None not in x, wnds)
wnds_values = map(lambda x: df.iloc[list(x)].reset_index(), wnds)
return wnds_values
def anomaly_metadata(self, context):
anomaly_perc = len(context[context["_isanomaly"] != "none"]) / self.context_len
anomaly_type = "none"
if anomaly_perc > 0:
anomaly_type = context.loc[context["_isanomaly"] != "none", "_isanomaly"].iloc[0]
return anomaly_type, anomaly_perc
def generate_context(self, df: pd.DataFrame):
samples = defaultdict(list)
channels = [c for c in df.columns if c[0] != "_"]
logging.debug("Windowing time series for each host")
host_ts = df.groupby(level=["_host"])
for host, ts in tqdm(host_ts):
windows = self.dataframe_windows(ts)
for context in windows:
anomaly_type, anomaly_perc = self.anomaly_metadata(context)
samples["anomaly_type"].append(anomaly_type)
samples["anomaly_perc"].append(anomaly_perc)
ctxvalues = context[channels].values
samples["context"].append(ctxvalues)
samples["host"].append(host)
samples = { k: np.stack(v) for k, v in samples.items() }
return samples
@staticmethod
def alternate_merge(ll):
return list(itertools.chain(*zip(*ll)))
def sk_dataset(self, context_dictionary):
skdset = {}
skdset["context"] = torch.Tensor(context_dictionary["context"])
skdset["host"] = preprocessing.LabelEncoder().fit_transform(context_dictionary["host"])
an_perc = context_dictionary["anomaly_perc"]
Y = np.where(an_perc==0, NORMAL_TRAFFIC, an_perc)
Y = np.where(Y > 0, ATTACK_TRAFFIC, Y)
Y = torch.Tensor(Y)
return self.Dataset2GPU(Dataset(skdset, Y))
def Dataset2GPU(self, dataset):
if torch.cuda.is_available():
dataset.X["context"] = dataset.X["context"].cuda()
dataset.y = dataset.y.cuda()
return dataset
def __call__(self, df_collection, to_sk_dataset=True, filter_anomaly=True):
model_input = defaultdict(list)
for df in df_collection:
ctxs = self.generate_context(df)
for k, v in ctxs.items():
model_input[k].append(v)
model_input = { k: np.array(self.alternate_merge(v)) for k, v in model_input.items() }
if filter_anomaly:
normal_mask = np.where(model_input["anomaly_perc"] == 0)[0]
model_input = { k: x[normal_mask] for k, x in model_input.items() }
if to_sk_dataset:
return self.sk_dataset(model_input)
return model_input
class WindowedAnomalyDetector(skorch.net.NeuralNet):
def __init__(self, *args, wlen=None, **kwargs):
self.pointwise_ctxs = None
if wlen is not None:
self.initialize_context(wlen)
super(WindowedAnomalyDetector, self).__init__(*args, **kwargs)
def initialize_context(self, wlen):
self.wlen = wlen
self.pointwise_ctxs = WindowedDataGenerator(1., wlen)
def fit(self, *args, **kwargs):
wlen = args[0].X["context"].size(1)
self.initialize_context(wlen)
super().fit(*args, **kwargs)
def pointwise_embedding(self, samples):
return self.pointwise(samples, self.module_.toembedding, "_embedding", pad_with=np.nan)
def pointwise_anomaly(self, samples):
return self.pointwise(samples, self.module_.context_anomaly, "_y_hat")
def pointwise(self, samples, fun, label, pad_with=0.):
if self.pointwise_ctxs is None:
raise AttributeError("Not fitted, missing context len")
if not isinstance(samples, list):
samples = [samples]
halfwlen = int(self.wlen / 2)
aus = 1 if (halfwlen % 2 ==0) else 0
ebs_sl = slice(halfwlen, -halfwlen + aus)
res = [ [] for i in range(len(samples)) ]
channels = [c for c in samples[0].columns if c[0] != "_"]
for i, df in enumerate(samples):
host_ts = df.groupby(level=["_host"])
for _, host_df in host_ts:
windows = self.pointwise_ctxs.dataframe_windows(host_df)
ctx_batch = np.stack([ ctx[channels].values for ctx in windows ])
def aperc(ctx):
return self.pointwise_ctxs.anomaly_metadata(ctx)[1]
windows = self.pointwise_ctxs.dataframe_windows(host_df)
aperc = np.array([ aperc(ctx) for ctx in windows ])
vaperc = np.full((len(host_df), 1), pad_with).squeeze()
vaperc[ebs_sl] = aperc
host_df["_aperc"] = vaperc
with torch.no_grad():
pred = fun(torch.tensor(ctx_batch))
# Fix windowing padding with zeros (hope no anomaly)
if len(pred.shape) == 1:
y_hat = np.full((len(host_df), 1), pad_with).squeeze()
y_hat[ebs_sl] = pred.numpy()
else:
y_hat = np.full((len(host_df), pred.size(1)), pad_with)
y_hat[ebs_sl] = pred.numpy()
y_hat = [ np.nan if np.isnan(x).any() else x for x in list(y_hat) ]
host_df[label] = y_hat
res[i].append(host_df)
res[i] = pd.concat(res[i])
if not isinstance(samples, list):
return res[0]
return res
| [
"logging.getLogger",
"sklearn.preprocessing.LabelEncoder",
"logging.debug",
"numpy.where",
"tqdm.tqdm",
"torch.Tensor",
"numpy.stack",
"torch.tensor",
"torch.cuda.is_available",
"collections.defaultdict",
"numpy.isnan",
"skorch.dataset.Dataset",
"torch.no_grad",
"pandas.concat"
] | [((288, 319), 'logging.getLogger', 'logging.getLogger', (['"""matplotlib"""'], {}), "('matplotlib')\n", (305, 319), False, 'import logging\n'), ((1754, 1771), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1765, 1771), False, 'from collections import defaultdict\n'), ((1843, 1895), 'logging.debug', 'logging.debug', (['"""Windowing time series for each host"""'], {}), "('Windowing time series for each host')\n", (1856, 1895), False, 'import logging\n'), ((1966, 1979), 'tqdm.tqdm', 'tqdm', (['host_ts'], {}), '(host_ts)\n', (1970, 1979), False, 'from tqdm import tqdm\n'), ((2699, 2742), 'torch.Tensor', 'torch.Tensor', (["context_dictionary['context']"], {}), "(context_dictionary['context'])\n", (2711, 2742), False, 'import torch\n'), ((2904, 2951), 'numpy.where', 'np.where', (['(an_perc == 0)', 'NORMAL_TRAFFIC', 'an_perc'], {}), '(an_perc == 0, NORMAL_TRAFFIC, an_perc)\n', (2912, 2951), True, 'import numpy as np\n'), ((2962, 2996), 'numpy.where', 'np.where', (['(Y > 0)', 'ATTACK_TRAFFIC', 'Y'], {}), '(Y > 0, ATTACK_TRAFFIC, Y)\n', (2970, 2996), True, 'import numpy as np\n'), ((3009, 3024), 'torch.Tensor', 'torch.Tensor', (['Y'], {}), '(Y)\n', (3021, 3024), False, 'import torch\n'), ((3134, 3159), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3157, 3159), False, 'import torch\n'), ((3391, 3408), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3402, 3408), False, 'from collections import defaultdict\n'), ((2439, 2450), 'numpy.stack', 'np.stack', (['v'], {}), '(v)\n', (2447, 2450), True, 'import numpy as np\n'), ((3066, 3084), 'skorch.dataset.Dataset', 'Dataset', (['skdset', 'Y'], {}), '(skdset, Y)\n', (3073, 3084), False, 'from skorch.dataset import Dataset\n'), ((6653, 6670), 'pandas.concat', 'pd.concat', (['res[i]'], {}), '(res[i])\n', (6662, 6670), True, 'import pandas as pd\n'), ((629, 644), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (642, 644), False, 'import torch\n'), ((2768, 2796), 'sklearn.preprocessing.LabelEncoder', 'preprocessing.LabelEncoder', ([], {}), '()\n', (2794, 2796), False, 'from sklearn import preprocessing\n'), ((3724, 3766), 'numpy.where', 'np.where', (["(model_input['anomaly_perc'] == 0)"], {}), "(model_input['anomaly_perc'] == 0)\n", (3732, 3766), True, 'import numpy as np\n'), ((5517, 5568), 'numpy.stack', 'np.stack', (['[ctx[channels].values for ctx in windows]'], {}), '([ctx[channels].values for ctx in windows])\n', (5525, 5568), True, 'import numpy as np\n'), ((5994, 6009), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6007, 6009), False, 'import torch\n'), ((6042, 6065), 'torch.tensor', 'torch.tensor', (['ctx_batch'], {}), '(ctx_batch)\n', (6054, 6065), False, 'import torch\n'), ((6489, 6500), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (6497, 6500), True, 'import numpy as np\n')] |
#
# Copyright (c) 2020 Xilinx, Inc. All rights reserved.
# SPDX-License-Identifier: MIT
#
# Bif file format
from roast.component.bif.generate import Header, Component, Block
tcase_type = "copymem"
result_msg = "Boot PDI Load: Done"
id_load_list = ["0x1C000000"]
bootheader = "image_config {{bh_auth_enable}}\n \
pskfile= {static_images_path}/pemfiles/TC_101_POS_Secure_RSA_SHA3_PPK0_BH_AES_NO_ENC_NA_TEST_PSK.pem\n \
sskfile= {static_images_path}/pemfiles/TC_101_POS_Secure_RSA_SHA3_PPK0_BH_AES_NO_ENC_NA_TEST_SSK.pem"
common_enc_str = ", authentication=rsa"
bif = (
Block(
header=Header(name="pmc_subsys"),
components=[
Component(name="plm", params=["{common_enc_str}", "path={plm_elf}"]),
Component(
name="pmccdo", params=["file={topology_cdo}", "path={pmccdo_path}"]
),
],
),
Block(
header=Header(header="metaheader"),
components=[
Component(name="cdo", params=["{common_enc_str}", "path={lpd_data_cdo}"]),
],
),
Block(
header=Header(name="lpd_subsys"),
components=[
Component(name="cdo", params=["{common_enc_str}", "path={dap_cdo}"]),
Component(name="cdo", params=["{common_enc_str}", "path={lpd_data_cdo}"]),
Component(name="psm", params=["{common_enc_str}", "path={psm_elf}"]),
],
),
Block(
header=Header(name="pl_cfi_subsys"),
components=[
Component(name="cdo", params=["{common_enc_str}", "path={rcdo_cdo}"]),
Component(name="cdo", params=["{common_enc_str}", "path={rnpi_cdo}"]),
],
),
Block(
header=Header(name="fpd_subsys"),
components=[
Component(name="cdo", params=["{common_enc_str}", "path={fpd_data_cdo}"]),
],
),
Block(
header=Header(name="subsystem", args="copy=0x400000, delay_load"),
components=[
Component(name="a72", params=["{common_enc_str}", "path={a72_ddr_elf}"]),
],
),
)
del Header, Component, Block
| [
"roast.component.bif.generate.Component",
"roast.component.bif.generate.Header"
] | [((601, 626), 'roast.component.bif.generate.Header', 'Header', ([], {'name': '"""pmc_subsys"""'}), "(name='pmc_subsys')\n", (607, 626), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((897, 924), 'roast.component.bif.generate.Header', 'Header', ([], {'header': '"""metaheader"""'}), "(header='metaheader')\n", (903, 924), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1078, 1103), 'roast.component.bif.generate.Header', 'Header', ([], {'name': '"""lpd_subsys"""'}), "(name='lpd_subsys')\n", (1084, 1103), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1421, 1449), 'roast.component.bif.generate.Header', 'Header', ([], {'name': '"""pl_cfi_subsys"""'}), "(name='pl_cfi_subsys')\n", (1427, 1449), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1682, 1707), 'roast.component.bif.generate.Header', 'Header', ([], {'name': '"""fpd_subsys"""'}), "(name='fpd_subsys')\n", (1688, 1707), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1861, 1919), 'roast.component.bif.generate.Header', 'Header', ([], {'name': '"""subsystem"""', 'args': '"""copy=0x400000, delay_load"""'}), "(name='subsystem', args='copy=0x400000, delay_load')\n", (1867, 1919), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((661, 729), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""plm"""', 'params': "['{common_enc_str}', 'path={plm_elf}']"}), "(name='plm', params=['{common_enc_str}', 'path={plm_elf}'])\n", (670, 729), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((743, 821), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""pmccdo"""', 'params': "['file={topology_cdo}', 'path={pmccdo_path}']"}), "(name='pmccdo', params=['file={topology_cdo}', 'path={pmccdo_path}'])\n", (752, 821), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((959, 1032), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={lpd_data_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={lpd_data_cdo}'])\n", (968, 1032), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1138, 1206), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={dap_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={dap_cdo}'])\n", (1147, 1206), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1220, 1293), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={lpd_data_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={lpd_data_cdo}'])\n", (1229, 1293), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1307, 1375), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""psm"""', 'params': "['{common_enc_str}', 'path={psm_elf}']"}), "(name='psm', params=['{common_enc_str}', 'path={psm_elf}'])\n", (1316, 1375), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1484, 1553), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={rcdo_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={rcdo_cdo}'])\n", (1493, 1553), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1567, 1636), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={rnpi_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={rnpi_cdo}'])\n", (1576, 1636), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1742, 1815), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""cdo"""', 'params': "['{common_enc_str}', 'path={fpd_data_cdo}']"}), "(name='cdo', params=['{common_enc_str}', 'path={fpd_data_cdo}'])\n", (1751, 1815), False, 'from roast.component.bif.generate import Header, Component, Block\n'), ((1954, 2026), 'roast.component.bif.generate.Component', 'Component', ([], {'name': '"""a72"""', 'params': "['{common_enc_str}', 'path={a72_ddr_elf}']"}), "(name='a72', params=['{common_enc_str}', 'path={a72_ddr_elf}'])\n", (1963, 2026), False, 'from roast.component.bif.generate import Header, Component, Block\n')] |
# /*
# * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# * SPDX-License-Identifier: MIT-0
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy of this
# * software and associated documentation files (the "Software"), to deal in the Software
# * without restriction, including without limitation the rights to use, copy, modify,
# * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# * permit persons to whom the Software is furnished to do so.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# */
from aws_cdk import core
import aws_cdk.aws_sns as sns
class SnsStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
self.sns_topic = sns.Topic(
self, "SNSAlertEmailTopic",
topic_name="dms-failure-alert-topic"
)
self.sns_subscription = sns.Subscription(
scope=self,
id="StepFunctionsAlert_" + 'Email',
protocol=sns.SubscriptionProtocol.EMAIL,
endpoint="<EMAIL>",
topic=self.sns_topic
)
| [
"aws_cdk.aws_sns.Topic",
"aws_cdk.aws_sns.Subscription"
] | [((1294, 1369), 'aws_cdk.aws_sns.Topic', 'sns.Topic', (['self', '"""SNSAlertEmailTopic"""'], {'topic_name': '"""dms-failure-alert-topic"""'}), "(self, 'SNSAlertEmailTopic', topic_name='dms-failure-alert-topic')\n", (1303, 1369), True, 'import aws_cdk.aws_sns as sns\n'), ((1437, 1589), 'aws_cdk.aws_sns.Subscription', 'sns.Subscription', ([], {'scope': 'self', 'id': "('StepFunctionsAlert_' + 'Email')", 'protocol': 'sns.SubscriptionProtocol.EMAIL', 'endpoint': '"""<EMAIL>"""', 'topic': 'self.sns_topic'}), "(scope=self, id='StepFunctionsAlert_' + 'Email', protocol=\n sns.SubscriptionProtocol.EMAIL, endpoint='<EMAIL>', topic=self.sns_topic)\n", (1453, 1589), True, 'import aws_cdk.aws_sns as sns\n')] |
'''https://practice.geeksforgeeks.org/problems/zigzag-tree-traversal/1
ZigZag Tree Traversal
Easy Accuracy: 49.78% Submissions: 50529 Points: 2
Given a Binary Tree. Find the Zig-Zag Level Order Traversal of the Binary Tree.
Example 1:
Input:
3
/ \
2 1
Output:
3 1 2
Example 2:
Input:
7
/ \
9 7
/ \ /
8 8 6
/ \
10 9
Output:
7 7 9 8 8 6 9 10
Your Task:
You don't need to read input or print anything. Your task is to complete the function zigZagTraversal() which takes the root node of the Binary Tree as its input and returns a list containing the node values as they appear in the Zig-Zag Level-Order Traversal of the Tree.
Expected Time Complexity: O(N).
Expected Auxiliary Space: O(N).
Constraints:
1 <= N <= 104'''
# User function Template for python3
'''
class Node:
def __init__(self,val):
self.data = val
self.left = None
self.right = None
'''
from collections import defaultdict
from collections import deque
class Solution:
# Function to store the zig zag order traversal of tree in a list.
def zigZagTraversal(self, root):
# Your code here
if root is None:
return []
q = [root, "-"]
final_list = []
final_list.append(root.data)
turn = True
temp_list = []
while(len(q) > 1):
x = q.pop(0)
if(x == "-"):
if(turn):
final_list.extend(temp_list[-1::-1])
turn = False
else:
final_list.extend(temp_list)
turn = True
temp_list = []
q.append("-")
continue
if(x.left):
q.append(x.left)
temp_list.append(x.left.data)
if(x.right):
q.append(x.right)
temp_list.append(x.right.data)
return final_list
# {
# Driver Code Starts
# Initial Template for Python 3
# contributed by RavinderSinghPB
class Node:
def __init__(self, val):
self.data = val
self.left = None
self.right = None
# Function to Build Tree
def buildTree(s):
# Corner Case
if(len(s) == 0 or s[0] == "N"):
return None
# Creating list of strings from input
# string after spliting by space
ip = list(map(str, s.split()))
# Create the root of the tree
root = Node(int(ip[0]))
size = 0
q = deque()
# Push the root to the queue
q.append(root)
size = size+1
# Starting from the second element
i = 1
while(size > 0 and i < len(ip)):
# Get and remove the front of the queue
currNode = q[0]
q.popleft()
size = size-1
# Get the current node's value from the string
currVal = ip[i]
# If the left child is not null
if(currVal != "N"):
# Create the left child for the current node
currNode.left = Node(int(currVal))
# Push it to the queue
q.append(currNode.left)
size = size+1
# For the right child
i = i+1
if(i >= len(ip)):
break
currVal = ip[i]
# If the right child is not null
if(currVal != "N"):
# Create the right child for the current node
currNode.right = Node(int(currVal))
# Push it to the queue
q.append(currNode.right)
size = size+1
i = i+1
return root
if __name__ == '__main__':
t = int(input())
for _ in range(0, t):
s = input()
root = buildTree(s)
ob = Solution()
res = ob.zigZagTraversal(root)
for i in range(len(res)):
print(res[i], end=" ")
print()
# } Driver Code Ends
| [
"collections.deque"
] | [((2539, 2546), 'collections.deque', 'deque', ([], {}), '()\n', (2544, 2546), False, 'from collections import deque\n')] |
from dagster import check
from dagster.core.host_representation.external_data import ExternalPartitionData
from dagster.core.host_representation.handle import RepositoryHandle
from .utils import execute_unary_api_cli_command
def sync_get_external_partition(repository_handle, partition_set_name, partition_name):
from dagster.cli.api import PartitionApiCommandArgs
check.inst_param(repository_handle, 'repository_handle', RepositoryHandle)
check.str_param(partition_set_name, 'partition_set_name')
check.str_param(partition_name, 'partition_name')
repository_origin = repository_handle.get_origin()
return check.inst(
execute_unary_api_cli_command(
repository_origin.executable_path,
'partition',
PartitionApiCommandArgs(
repository_origin=repository_origin,
partition_set_name=partition_set_name,
partition_name=partition_name,
),
),
ExternalPartitionData,
)
| [
"dagster.cli.api.PartitionApiCommandArgs",
"dagster.check.inst_param",
"dagster.check.str_param"
] | [((377, 451), 'dagster.check.inst_param', 'check.inst_param', (['repository_handle', '"""repository_handle"""', 'RepositoryHandle'], {}), "(repository_handle, 'repository_handle', RepositoryHandle)\n", (393, 451), False, 'from dagster import check\n'), ((456, 513), 'dagster.check.str_param', 'check.str_param', (['partition_set_name', '"""partition_set_name"""'], {}), "(partition_set_name, 'partition_set_name')\n", (471, 513), False, 'from dagster import check\n'), ((518, 567), 'dagster.check.str_param', 'check.str_param', (['partition_name', '"""partition_name"""'], {}), "(partition_name, 'partition_name')\n", (533, 567), False, 'from dagster import check\n'), ((770, 904), 'dagster.cli.api.PartitionApiCommandArgs', 'PartitionApiCommandArgs', ([], {'repository_origin': 'repository_origin', 'partition_set_name': 'partition_set_name', 'partition_name': 'partition_name'}), '(repository_origin=repository_origin,\n partition_set_name=partition_set_name, partition_name=partition_name)\n', (793, 904), False, 'from dagster.cli.api import PartitionApiCommandArgs\n')] |
import contextlib
import os
import pathlib
import pytest
FIXTURE_DIR = pathlib.Path(os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'fixtures'
))
@pytest.fixture
def fixture_file():
def _fixture_file(*args):
return pathlib.Path(os.path.join(FIXTURE_DIR, *args))
return _fixture_file
@contextlib.contextmanager
def chdir(path: pathlib.Path):
curpath = pathlib.Path.cwd()
os.chdir(path)
try:
yield
finally:
os.chdir(curpath)
| [
"pathlib.Path.cwd",
"os.path.realpath",
"os.chdir",
"os.path.join"
] | [((396, 414), 'pathlib.Path.cwd', 'pathlib.Path.cwd', ([], {}), '()\n', (412, 414), False, 'import pathlib\n'), ((419, 433), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (427, 433), False, 'import os\n'), ((478, 495), 'os.chdir', 'os.chdir', (['curpath'], {}), '(curpath)\n', (486, 495), False, 'import os\n'), ((120, 146), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (136, 146), False, 'import os\n'), ((263, 295), 'os.path.join', 'os.path.join', (['FIXTURE_DIR', '*args'], {}), '(FIXTURE_DIR, *args)\n', (275, 295), False, 'import os\n')] |
#!/usr/bin/env python
# Script for running FSL motion correction
# It's run with the framewise displacement option thres 0.9 per Siegel et al. 2014 HBM
#
# ipython bold-fsl-pp-motcor.py
import numpy as np
import os
import glob
## Study directory
studydir = '/mnt/40TB-raid6/Experiments/FCTM_S/FCTM_S_Data/Analyses'
##
phase='hab_B'
# Name html file to put all QA info together.
outhtml = studydir+'/group/bold_motion_QA_'+phase+'.html'
outpdf = studydir+'/group/bold_motion_QA_'+phase+'.pdf'
## Delete previous output fsl_motion_outliers
#os.system("rm %s"%(out_bad_bold_list))
#os.system("rm %s"%(outhtml)) #need to move
## Find raw EPI nii files
subdirs=glob.glob('%s/1[0-9][0-9][0-9][0-9]/func/B_hab_[1-9]_trrm.nii.gz'%(studydir))
#subdirs=glob.glob("%s/18301/func/A_task_[1-9]_trrm.nii.gz"%(studydir))
for cur_bold in subdirs:
print(cur_bold)
#Current directory
curdir = os.path.dirname(cur_bold)
# QA file for flagging potential bad subjects
out_bad_bold_list = studydir+'group/subsgt20_vol_scrub-'+phase+'.txt'
#Strip .nii.gz (FSL doesn't want the file ext)
cur_bold_no_nii = cur_bold[:-7]
# Strip dir stuff to isolate phase and run
fname=cur_bold_no_nii[len(curdir)+1:]
# Make dir for motion assessments
if os.path.isdir("%s/motion_assess"%(curdir))==False:
os.system("mkdir %s/motion_assess"%(curdir))
#Run fsl_motion_outlier
os.system("fsl_motion_outliers -i %s -o %s/motion_assess/confound-%s.txt --fd --thresh=0.9 -p %s/motion_assess/fd_plot-%s -v > %s/motion_assess/outlier_output-%s.txt"%(cur_bold_no_nii, curdir, fname, curdir, fname, curdir, fname))
# If no confounds create blank confound for easier scripting later
if os.path.isfile("%s/motion_assess/confound-%s.txt"%(curdir,fname))==False:
os.system("touch %s/motion_assess/confound-%s.txt"%(curdir,fname))
# Put confound info into html file for review later on
os.system("cat %s/motion_assess/outlier_output-%s.txt >> %s"%(curdir,fname, outhtml))
os.system("echo '<p>=============<p>FD plot %s <br><IMG BORDER=0 SRC=%s/motion_assess/fd_plot-%s.png WIDTH=100%s></BODY></HTML>' >> %s"%(curdir, curdir, fname,'%', outhtml))
## Convert html to pdf
os.system("pandoc %s -o %s"%(outhtml,outpdf))
| [
"os.path.isfile",
"os.path.dirname",
"os.path.isdir",
"os.system",
"glob.glob"
] | [((663, 740), 'glob.glob', 'glob.glob', (["('%s/1[0-9][0-9][0-9][0-9]/func/B_hab_[1-9]_trrm.nii.gz' % studydir)"], {}), "('%s/1[0-9][0-9][0-9][0-9]/func/B_hab_[1-9]_trrm.nii.gz' % studydir)\n", (672, 740), False, 'import glob\n'), ((2147, 2195), 'os.system', 'os.system', (["('pandoc %s -o %s' % (outhtml, outpdf))"], {}), "('pandoc %s -o %s' % (outhtml, outpdf))\n", (2156, 2195), False, 'import os\n'), ((887, 912), 'os.path.dirname', 'os.path.dirname', (['cur_bold'], {}), '(cur_bold)\n', (902, 912), False, 'import os\n'), ((1358, 1600), 'os.system', 'os.system', (["('fsl_motion_outliers -i %s -o %s/motion_assess/confound-%s.txt --fd --thresh=0.9 -p %s/motion_assess/fd_plot-%s -v > %s/motion_assess/outlier_output-%s.txt'\n % (cur_bold_no_nii, curdir, fname, curdir, fname, curdir, fname))"], {}), "(\n 'fsl_motion_outliers -i %s -o %s/motion_assess/confound-%s.txt --fd --thresh=0.9 -p %s/motion_assess/fd_plot-%s -v > %s/motion_assess/outlier_output-%s.txt'\n % (cur_bold_no_nii, curdir, fname, curdir, fname, curdir, fname))\n", (1367, 1600), False, 'import os\n'), ((1861, 1953), 'os.system', 'os.system', (["('cat %s/motion_assess/outlier_output-%s.txt >> %s' % (curdir, fname, outhtml))"], {}), "('cat %s/motion_assess/outlier_output-%s.txt >> %s' % (curdir,\n fname, outhtml))\n", (1870, 1953), False, 'import os\n'), ((1948, 2134), 'os.system', 'os.system', (['("echo \'<p>=============<p>FD plot %s <br><IMG BORDER=0 SRC=%s/motion_assess/fd_plot-%s.png WIDTH=100%s></BODY></HTML>\' >> %s"\n % (curdir, curdir, fname, \'%\', outhtml))'], {}), '(\n "echo \'<p>=============<p>FD plot %s <br><IMG BORDER=0 SRC=%s/motion_assess/fd_plot-%s.png WIDTH=100%s></BODY></HTML>\' >> %s"\n % (curdir, curdir, fname, \'%\', outhtml))\n', (1957, 2134), False, 'import os\n'), ((1234, 1276), 'os.path.isdir', 'os.path.isdir', (["('%s/motion_assess' % curdir)"], {}), "('%s/motion_assess' % curdir)\n", (1247, 1276), False, 'import os\n'), ((1287, 1331), 'os.system', 'os.system', (["('mkdir %s/motion_assess' % curdir)"], {}), "('mkdir %s/motion_assess' % curdir)\n", (1296, 1331), False, 'import os\n'), ((1661, 1729), 'os.path.isfile', 'os.path.isfile', (["('%s/motion_assess/confound-%s.txt' % (curdir, fname))"], {}), "('%s/motion_assess/confound-%s.txt' % (curdir, fname))\n", (1675, 1729), False, 'import os\n'), ((1737, 1806), 'os.system', 'os.system', (["('touch %s/motion_assess/confound-%s.txt' % (curdir, fname))"], {}), "('touch %s/motion_assess/confound-%s.txt' % (curdir, fname))\n", (1746, 1806), False, 'import os\n')] |
# No shebang line, this module is meant to be imported
#
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Configuration
=============
A small wrapper around :class:`pyfarm.core.config.Configuration`
that loads in the configuration files and provides backwards
compatibility for some environment variables.
"""
import os
from functools import partial
from pyfarm.core.config import (
Configuration as _Configuration, read_env_int, read_env, read_env_bool)
try:
WindowsError
except NameError: # pragma: no cover
WindowsError = OSError
read_env_no_log = partial(read_env, log_result=False)
env_bool_false = partial(read_env_bool, default=False)
class Configuration(_Configuration):
"""
The main configuration object for the master, models and
scheduler. This will load in the configuration files and
also handle any overrides present in the environment.
:var ENVIRONMENT_OVERRIDES:
A dictionary containing all environment variables
we support as overrides. This set is mainly provided
for backwards comparability purposes or for the rare case
where an environment override would be preferred over a
config.
"""
ENVIRONMENT_OVERRIDES = {
"secret_key": ("PYFARM_SECRET_KEY", read_env_no_log),
"autocreate_users": ("PYFARM_AUTOCREATE_USERS", read_env_bool),
"default_job_delete_time": (
"PYFARM_DEFAULT_JOB_DELETE_TIME", read_env_int),
"base_url": (
"PYFARM_BASE_URL", read_env),
"login_disabled": ("PYFARM_LOGIN_DISABLED", read_env_bool),
"pretty_json": ("PYFARM_JSON_PRETTY", read_env_bool),
"echo_sql": ("PYFARM_SQL_ECHO", read_env_bool),
"database": ("PYFARM_DATABASE_URI", read_env_no_log),
"timestamp_format": ("PYFARM_TIMESTAMP_FORMAT", read_env),
"allow_agents_from_loopback": (
"PYFARM_DEV_ALLOW_AGENT_LOOPBACK_ADDRESSES", read_env_bool),
"agent_updates_dir": ("PYFARM_AGENT_UPDATES_DIR", read_env),
"agent_updates_webdir": ("PYFARM_AGENT_UPDATES_WEBDIR", read_env),
"farm_name": ("PYFARM_FARM_NAME", read_env),
"tasklogs_dir": ("PYFARM_LOGFILES_DIR", read_env),
"dev_db_drop_all": (
"PYFARM_DEV_APP_DB_DROP_ALL", env_bool_false),
"dev_db_create_all": (
"PYFARM_DEV_APP_DB_CREATE_ALL", env_bool_false),
"instance_application": ("PYFARM_APP_INSTANCE", env_bool_false),
"scheduler_broker": ("PYFARM_SCHEDULER_BROKER", read_env),
"scheduler_lockfile_base": (
"PYFARM_SCHEDULER_LOCKFILE_BASE", read_env),
"transaction_retries": ("PYFARM_TRANSACTION_RETRIES", read_env_int),
"agent_request_timeout": (
"PYFARM_AGENT_REQUEST_TIMEOUT", read_env_int),
"smtp_server": (
"PYFARM_MAIL_SERVER", read_env),
"from_email": (
"PYFARM_FROM_ADDRESS", read_env)
}
def __init__(self): # pylint: disable=super-on-old-class
super(Configuration, self).__init__("pyfarm.master")
self.load()
self.loaded = set(self.loaded)
# Load model configuration
models_config = _Configuration("pyfarm.models", version=self.version)
models_config.load()
self.update(models_config)
self.loaded.update(models_config.loaded)
# Load scheduler configuration
sched_config = _Configuration("pyfarm.scheduler", version=self.version)
sched_config.load()
self.update(sched_config)
self.loaded.update(sched_config.loaded)
try:
items = self.ENVIRONMENT_OVERRIDES.iteritems
except AttributeError: # pragma: no cover
items = self.ENVIRONMENT_OVERRIDES.items
overrides = {}
for config_var, (envvar, load_func) in items():
if envvar in os.environ:
overrides[config_var] = load_func(envvar)
if ("PYFARM_DEV_LISTEN_ON_WILDCARD" in os.environ
and read_env_bool("PYFARM_DEV_LISTEN_ON_WILDCARD")):
self.update(flask_listen_address="0.0.0.0")
self.update(overrides)
try:
config
except NameError: # pragma: no cover
config = Configuration()
| [
"pyfarm.core.config.Configuration",
"functools.partial",
"pyfarm.core.config.read_env_bool"
] | [((1094, 1129), 'functools.partial', 'partial', (['read_env'], {'log_result': '(False)'}), '(read_env, log_result=False)\n', (1101, 1129), False, 'from functools import partial\n'), ((1147, 1184), 'functools.partial', 'partial', (['read_env_bool'], {'default': '(False)'}), '(read_env_bool, default=False)\n', (1154, 1184), False, 'from functools import partial\n'), ((3702, 3755), 'pyfarm.core.config.Configuration', '_Configuration', (['"""pyfarm.models"""'], {'version': 'self.version'}), "('pyfarm.models', version=self.version)\n", (3716, 3755), True, 'from pyfarm.core.config import Configuration as _Configuration, read_env_int, read_env, read_env_bool\n'), ((3932, 3988), 'pyfarm.core.config.Configuration', '_Configuration', (['"""pyfarm.scheduler"""'], {'version': 'self.version'}), "('pyfarm.scheduler', version=self.version)\n", (3946, 3988), True, 'from pyfarm.core.config import Configuration as _Configuration, read_env_int, read_env, read_env_bool\n'), ((4528, 4574), 'pyfarm.core.config.read_env_bool', 'read_env_bool', (['"""PYFARM_DEV_LISTEN_ON_WILDCARD"""'], {}), "('PYFARM_DEV_LISTEN_ON_WILDCARD')\n", (4541, 4574), False, 'from pyfarm.core.config import Configuration as _Configuration, read_env_int, read_env, read_env_bool\n')] |
#!/usr/bin/env python3
import os
import requests
import sys
from argparse import ArgumentParser
parser = ArgumentParser(
description=(
'weather: Get the current weather information'
'for your zipcode.'
)
)
parser.add_argument(
'zip',
help='zip code to get the weather for'
)
parser.add_argument(
'--country',
'-c',
default='NL',
help='country zipcode belongs to, default is NL'
)
parser.add_argument(
'--version',
'-v',
action='version',
version='%(prog)s 1.0'
)
args = parser.parse_args()
api_key = os.getenv('OWM_API_KEY')
if not api_key:
print("Error: no 'OWM_API_KEY' provided")
sys.exit(1)
url = (
f'http://api.openweathermap.org/data/2.5/weather?'
f'zip={args.zip},{args.country}&appid={api_key}'
)
req = requests.get(url)
if req.status_code != 200:
print(f'Error: talking to weather provider: {req.status_code}')
sys.exit(1)
print(req.json())
| [
"requests.get",
"sys.exit",
"argparse.ArgumentParser",
"os.getenv"
] | [((108, 204), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""weather: Get the current weather informationfor your zipcode."""'}), "(description=\n 'weather: Get the current weather informationfor your zipcode.')\n", (122, 204), False, 'from argparse import ArgumentParser\n'), ((569, 593), 'os.getenv', 'os.getenv', (['"""OWM_API_KEY"""'], {}), "('OWM_API_KEY')\n", (578, 593), False, 'import os\n'), ((799, 816), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (811, 816), False, 'import requests\n'), ((661, 672), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (669, 672), False, 'import sys\n'), ((917, 928), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (925, 928), False, 'import sys\n')] |
import pathlib
import typing
import urllib.parse
_PLUGIN_DIR = pathlib.Path(__file__).parent
PLUGIN_DIR = str(_PLUGIN_DIR)
CONFIGS_DIR = str(_PLUGIN_DIR.joinpath('configs'))
SCRIPTS_DIR = str(_PLUGIN_DIR.joinpath('scripts'))
def scan_sql_directory(root: str) -> typing.List[pathlib.Path]:
return [
path
for path in sorted(pathlib.Path(root).iterdir())
if path.is_file() and path.suffix == '.sql'
]
def connstr_replace_dbname(connstr: str, dbname: str) -> str:
"""Replace dbname in existing connection string."""
if connstr.endswith(' dbname='):
return connstr + dbname
if connstr.startswith('postgresql://'):
url = urllib.parse.urlparse(connstr)
url = url._replace(path=dbname) # pylint: disable=protected-access
return url.geturl()
raise RuntimeError(
f'Unsupported PostgreSQL connection string format {connstr!r}',
)
| [
"pathlib.Path"
] | [((64, 86), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (76, 86), False, 'import pathlib\n'), ((345, 363), 'pathlib.Path', 'pathlib.Path', (['root'], {}), '(root)\n', (357, 363), False, 'import pathlib\n')] |
from base64 import urlsafe_b64encode
import dash_core_components as dcc
import dash_bootstrap_components as dbc
import dash_html_components as html
from dash.dependencies import Input, Output
import dash_defer_js_import as dji
import dash_table as dt
from app import app, server
from pages import single_ticker, portfolio_metrics
with open('./README.md', 'r') as f:
readme = f.read()
with open('./documentation/gettingstarted.md', 'r') as f:
docs = f.read()
app.layout = dbc.Container(
[
dcc.Location(id='url', refresh=False),
dbc.NavbarSimple(
children=[
dbc.NavItem(
dbc.NavLink(
'Risk Dash Documentation',
href='/docs',
),
),
dbc.NavItem(
dbc.NavLink(
'Portfolio Dashboard',
href='/portfolio',
),
),
dbc.NavItem(
dbc.NavLink(
'Individual Security Dashboard',
href='/single',
),
),
],
brand='Risk Dash',
brand_href='/',
color='light',
id='nav'
),
dbc.Container(id='page_content',fluid=True),
html.Div(dt.DataTable(data=[{}]), style={'display' : 'none'}),
dji.Import(src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/latest.js?config=TeX-AMS-MML_SVG")
],
fluid=True
)
@app.callback(
Output('page_content', 'children'),
[Input('url', 'pathname')]
)
def get_layout(url):
if url != None:
if url == '/portfolio':
return(portfolio_metrics.layout)
elif url == '/single':
return(single_ticker.layout)
elif url == '/docs':
return(dcc.Markdown(docs))
else:
return(dcc.Markdown(readme))
if __name__ == '__main__':
print('Running')
app.run_server() | [
"dash_bootstrap_components.NavLink",
"dash.dependencies.Output",
"dash_core_components.Location",
"dash_bootstrap_components.Container",
"dash.dependencies.Input",
"dash_defer_js_import.Import",
"dash_core_components.Markdown",
"app.app.run_server",
"dash_table.DataTable"
] | [((1484, 1518), 'dash.dependencies.Output', 'Output', (['"""page_content"""', '"""children"""'], {}), "('page_content', 'children')\n", (1490, 1518), False, 'from dash.dependencies import Input, Output\n'), ((1920, 1936), 'app.app.run_server', 'app.run_server', ([], {}), '()\n', (1934, 1936), False, 'from app import app, server\n'), ((511, 548), 'dash_core_components.Location', 'dcc.Location', ([], {'id': '"""url"""', 'refresh': '(False)'}), "(id='url', refresh=False)\n", (523, 548), True, 'import dash_core_components as dcc\n'), ((1220, 1264), 'dash_bootstrap_components.Container', 'dbc.Container', ([], {'id': '"""page_content"""', 'fluid': '(True)'}), "(id='page_content', fluid=True)\n", (1233, 1264), True, 'import dash_bootstrap_components as dbc\n'), ((1336, 1449), 'dash_defer_js_import.Import', 'dji.Import', ([], {'src': '"""https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/latest.js?config=TeX-AMS-MML_SVG"""'}), "(src=\n 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/latest.js?config=TeX-AMS-MML_SVG'\n )\n", (1346, 1449), True, 'import dash_defer_js_import as dji\n'), ((1525, 1549), 'dash.dependencies.Input', 'Input', (['"""url"""', '"""pathname"""'], {}), "('url', 'pathname')\n", (1530, 1549), False, 'from dash.dependencies import Input, Output\n'), ((1278, 1301), 'dash_table.DataTable', 'dt.DataTable', ([], {'data': '[{}]'}), '(data=[{}])\n', (1290, 1301), True, 'import dash_table as dt\n'), ((1791, 1809), 'dash_core_components.Markdown', 'dcc.Markdown', (['docs'], {}), '(docs)\n', (1803, 1809), True, 'import dash_core_components as dcc\n'), ((1844, 1864), 'dash_core_components.Markdown', 'dcc.Markdown', (['readme'], {}), '(readme)\n', (1856, 1864), True, 'import dash_core_components as dcc\n'), ((632, 684), 'dash_bootstrap_components.NavLink', 'dbc.NavLink', (['"""Risk Dash Documentation"""'], {'href': '"""/docs"""'}), "('Risk Dash Documentation', href='/docs')\n", (643, 684), True, 'import dash_bootstrap_components as dbc\n'), ((801, 854), 'dash_bootstrap_components.NavLink', 'dbc.NavLink', (['"""Portfolio Dashboard"""'], {'href': '"""/portfolio"""'}), "('Portfolio Dashboard', href='/portfolio')\n", (812, 854), True, 'import dash_bootstrap_components as dbc\n'), ((971, 1031), 'dash_bootstrap_components.NavLink', 'dbc.NavLink', (['"""Individual Security Dashboard"""'], {'href': '"""/single"""'}), "('Individual Security Dashboard', href='/single')\n", (982, 1031), True, 'import dash_bootstrap_components as dbc\n')] |
""" A container to store latest known prices for a list of symbols """
import logging
from datetime import datetime
from market.tick_data import TickData
class MarketData:
def __init__(self):
self.__recent_ticks__ = dict()
def add_last_price(self,
time: datetime,
symbol: str,
price: float,
volume: int):
logging.debug("add last {0}, {1} - {2}/{3}".format(time, symbol,
price, volume))
tick_data = TickData(symbol=symbol, last_price=price,
total_volume=volume, timestamp=time)
self.__recent_ticks__[symbol] = tick_data
def add_open_price(self,
time: datetime,
symbol: str,
price: float):
logging.debug("add open {0}, {1} - {2}".format(time, symbol, price))
tick_data = self.get_existing_tick_data(symbol)
tick_data.open_price = price
self.__recent_ticks__[symbol] = tick_data
def get_existing_tick_data(self, symbol: str) -> TickData:
if symbol not in self.__recent_ticks__:
self.__recent_ticks__[symbol] = TickData(symbol=symbol)
return self.__recent_ticks__[symbol]
def get_last_price(self, symbol: str) -> float:
return self.__recent_ticks__[symbol].last_price
def get_open_price(self, symbol: str) -> float:
return self.__recent_ticks__[symbol].open_price
def get_timestamp(self, symbol: str) -> datetime:
return self.__recent_ticks__[symbol].timestamp
def __str__(self):
return str(self.__recent_ticks__)
| [
"market.tick_data.TickData"
] | [((584, 662), 'market.tick_data.TickData', 'TickData', ([], {'symbol': 'symbol', 'last_price': 'price', 'total_volume': 'volume', 'timestamp': 'time'}), '(symbol=symbol, last_price=price, total_volume=volume, timestamp=time)\n', (592, 662), False, 'from market.tick_data import TickData\n'), ((1261, 1284), 'market.tick_data.TickData', 'TickData', ([], {'symbol': 'symbol'}), '(symbol=symbol)\n', (1269, 1284), False, 'from market.tick_data import TickData\n')] |
""" This example shows, what a module for
providing dynamic weldx functions like parameter calculations, e.g. could look like """
import asdf
import weldx
import weldx.asdf
from weldx import Q_ # pint quantity from the weldx package
from weldx.asdf.extension import WeldxAsdfExtension, WeldxExtension
from weldx.welding.groove.iso_9692_1 import get_groove, IsoBaseGroove, _create_test_grooves
from weldx.core import TimeSeries as TS
from weldx.welding.processes import GmawProcess
def processor_desciption(display_name):
def _processor_desciption(fn):
setattr(fn, "display_name", display_name)
return fn
return _processor_desciption
@processor_desciption(display_name="seam length addition")
def add_seam_length(file: asdf.AsdfFile)->asdf.AsdfFile:
new_file = asdf.AsdfFile(file.tree) # make a copy
new_file.tree["workpiece"]["length"] = Q_(700, "mm") # in a real case, add some error handling!
return new_file
@processor_desciption(display_name="Process spray addition")
def add_process_spray(file:asdf.AsdfFile)->asdf.AsdfFile:
""" https://weldx.readthedocs.io/en/latest/tutorials/GMAW_process.html """
new_file = asdf.AsdfFile(file.tree) # make a copy
# Note: For some reasons, using integers in Q_ fails upon ASDF reading !
params_spray = dict(
wire_feedrate=Q_(10.0, "m/min"),
voltage=TS(data=Q_([40.0,20.0], "V"),time=Q_([0.0,10.0],"s")),
impedance=Q_(10.0, "percent"),
characteristic=Q_(5,"V/A"),
)
process_spray = GmawProcess(
"spray", "CLOOS", "Quinto", params_spray, tag="CLOOS/spray_arc"
)
new_file.tree["spray"] = process_spray
return new_file
def add_pulse_ui(file:asdf.AsdfFile)->asdf.AsdfFile:
new_file = asdf.AsdfFile(file.tree)
params_pulse = dict(
wire_feedrate=Q_(10.0, "m/min"),
pulse_voltage=Q_(40.0, "V"),
pulse_duration=Q_(5.0, "ms"),
pulse_frequency=Q_(100.0, "Hz"),
base_current=Q_(60.0, "A"),
)
process_pulse = GmawProcess(
"pulse",
"CLOOS",
"Quinto",
params_pulse,
tag="CLOOS/pulse",
meta={"modulation": "UI"},
)
new_file.tree["pulse_UI"] = process_pulse
return new_file
if __name__ == "__main__":
example_groove = list(_create_test_grooves().values())[0][0]
tree = {"workpiece": {"groove": example_groove}}
file = asdf.AsdfFile(tree)
file_step_1 = add_seam_length(file)
file_step_1_buffer = weldx.asdf.util.write_buffer(file_step_1.tree)
file_step_1_yaml_header = weldx.asdf.util.get_yaml_header(file_step_1_buffer)
print(file_step_1_yaml_header)
file_step_2 = add_process_spray(file_step_1)
file_step_2_buffer = weldx.asdf.util.write_buffer(file_step_2.tree)
file_step_2_yaml_header = weldx.asdf.util.get_yaml_header(file_step_2_buffer)
print(file_step_2_yaml_header)
file_step_3 = add_pulse_ui(file_step_2)
file_step_3_buffer = weldx.asdf.util.write_buffer(file_step_3.tree)
file_step_3_yaml_header = weldx.asdf.util.get_yaml_header(file_step_3_buffer)
print(file_step_3_yaml_header) | [
"weldx.welding.groove.iso_9692_1._create_test_grooves",
"weldx.asdf.util.write_buffer",
"asdf.AsdfFile",
"weldx.Q_",
"weldx.asdf.util.get_yaml_header",
"weldx.welding.processes.GmawProcess"
] | [((798, 822), 'asdf.AsdfFile', 'asdf.AsdfFile', (['file.tree'], {}), '(file.tree)\n', (811, 822), False, 'import asdf\n'), ((880, 893), 'weldx.Q_', 'Q_', (['(700)', '"""mm"""'], {}), "(700, 'mm')\n", (882, 893), False, 'from weldx import Q_\n'), ((1180, 1204), 'asdf.AsdfFile', 'asdf.AsdfFile', (['file.tree'], {}), '(file.tree)\n', (1193, 1204), False, 'import asdf\n'), ((1534, 1610), 'weldx.welding.processes.GmawProcess', 'GmawProcess', (['"""spray"""', '"""CLOOS"""', '"""Quinto"""', 'params_spray'], {'tag': '"""CLOOS/spray_arc"""'}), "('spray', 'CLOOS', 'Quinto', params_spray, tag='CLOOS/spray_arc')\n", (1545, 1610), False, 'from weldx.welding.processes import GmawProcess\n'), ((1758, 1782), 'asdf.AsdfFile', 'asdf.AsdfFile', (['file.tree'], {}), '(file.tree)\n', (1771, 1782), False, 'import asdf\n'), ((2027, 2130), 'weldx.welding.processes.GmawProcess', 'GmawProcess', (['"""pulse"""', '"""CLOOS"""', '"""Quinto"""', 'params_pulse'], {'tag': '"""CLOOS/pulse"""', 'meta': "{'modulation': 'UI'}"}), "('pulse', 'CLOOS', 'Quinto', params_pulse, tag='CLOOS/pulse',\n meta={'modulation': 'UI'})\n", (2038, 2130), False, 'from weldx.welding.processes import GmawProcess\n'), ((2406, 2425), 'asdf.AsdfFile', 'asdf.AsdfFile', (['tree'], {}), '(tree)\n', (2419, 2425), False, 'import asdf\n'), ((2493, 2539), 'weldx.asdf.util.write_buffer', 'weldx.asdf.util.write_buffer', (['file_step_1.tree'], {}), '(file_step_1.tree)\n', (2521, 2539), False, 'import weldx\n'), ((2570, 2621), 'weldx.asdf.util.get_yaml_header', 'weldx.asdf.util.get_yaml_header', (['file_step_1_buffer'], {}), '(file_step_1_buffer)\n', (2601, 2621), False, 'import weldx\n'), ((2733, 2779), 'weldx.asdf.util.write_buffer', 'weldx.asdf.util.write_buffer', (['file_step_2.tree'], {}), '(file_step_2.tree)\n', (2761, 2779), False, 'import weldx\n'), ((2810, 2861), 'weldx.asdf.util.get_yaml_header', 'weldx.asdf.util.get_yaml_header', (['file_step_2_buffer'], {}), '(file_step_2_buffer)\n', (2841, 2861), False, 'import weldx\n'), ((2968, 3014), 'weldx.asdf.util.write_buffer', 'weldx.asdf.util.write_buffer', (['file_step_3.tree'], {}), '(file_step_3.tree)\n', (2996, 3014), False, 'import weldx\n'), ((3045, 3096), 'weldx.asdf.util.get_yaml_header', 'weldx.asdf.util.get_yaml_header', (['file_step_3_buffer'], {}), '(file_step_3_buffer)\n', (3076, 3096), False, 'import weldx\n'), ((1343, 1360), 'weldx.Q_', 'Q_', (['(10.0)', '"""m/min"""'], {}), "(10.0, 'm/min')\n", (1345, 1360), False, 'from weldx import Q_\n'), ((1451, 1470), 'weldx.Q_', 'Q_', (['(10.0)', '"""percent"""'], {}), "(10.0, 'percent')\n", (1453, 1470), False, 'from weldx import Q_\n'), ((1495, 1507), 'weldx.Q_', 'Q_', (['(5)', '"""V/A"""'], {}), "(5, 'V/A')\n", (1497, 1507), False, 'from weldx import Q_\n'), ((1830, 1847), 'weldx.Q_', 'Q_', (['(10.0)', '"""m/min"""'], {}), "(10.0, 'm/min')\n", (1832, 1847), False, 'from weldx import Q_\n'), ((1871, 1884), 'weldx.Q_', 'Q_', (['(40.0)', '"""V"""'], {}), "(40.0, 'V')\n", (1873, 1884), False, 'from weldx import Q_\n'), ((1909, 1922), 'weldx.Q_', 'Q_', (['(5.0)', '"""ms"""'], {}), "(5.0, 'ms')\n", (1911, 1922), False, 'from weldx import Q_\n'), ((1948, 1963), 'weldx.Q_', 'Q_', (['(100.0)', '"""Hz"""'], {}), "(100.0, 'Hz')\n", (1950, 1963), False, 'from weldx import Q_\n'), ((1986, 1999), 'weldx.Q_', 'Q_', (['(60.0)', '"""A"""'], {}), "(60.0, 'A')\n", (1988, 1999), False, 'from weldx import Q_\n'), ((1386, 1407), 'weldx.Q_', 'Q_', (['[40.0, 20.0]', '"""V"""'], {}), "([40.0, 20.0], 'V')\n", (1388, 1407), False, 'from weldx import Q_\n'), ((1412, 1432), 'weldx.Q_', 'Q_', (['[0.0, 10.0]', '"""s"""'], {}), "([0.0, 10.0], 's')\n", (1414, 1432), False, 'from weldx import Q_\n'), ((2303, 2325), 'weldx.welding.groove.iso_9692_1._create_test_grooves', '_create_test_grooves', ([], {}), '()\n', (2323, 2325), False, 'from weldx.welding.groove.iso_9692_1 import get_groove, IsoBaseGroove, _create_test_grooves\n')] |
import os
import numpy as np
import torch
import matplotlib.pyplot as plt
import cv2
import rlkit.torch.sac.diayn
from .mode_actions_sampler import ModeActionSampler
from network import ModeDisentanglingNetwork
from env import DmControlEnvForPytorchBothObstype
class DisentanglingTester:
def __init__(self,
latent_model_path,
env,
seed,
video_dir,
):
# Latent model
self.latent_model = torch.load(latent_model_path).eval()
print("Model loaded")
# Environment
self.env = env
assert isinstance(self.env, DmControlEnvForPytorchBothObstype), \
'Both observation types (pixel and state representantion are needed' \
' to create the test video. ' \
'Take DmControlForPytorchBothObstype env-class'
assert self.env.obs_type == 'pixels'
self.action_repeat = self.env.action_repeat
# Seed
torch.manual_seed(seed)
np.random.seed(seed)
self.env.seed(seed)
# Device
self.device = torch.device(
"cuda" if torch.cuda.is_available() else "cpu"
)
# Directories
self.video_dir = video_dir
os.makedirs(video_dir, exist_ok=True)
# Video
self.video_cnt = 0
self.video_name = 'skill'
self.video = None
self._reset_video_writer(video_name=self.video_name + str(self.video_cnt))
# Sampling mode conditioned actions from the latent model
self.mode_action_sampler = ModeActionSampler(self.latent_model, device=self.device)
# Other
self.steps = 0
self.episodes = 0
def _create_video_name(self):
return self.video_name + str(self.video_cnt)
def _reset_video_writer(self, video_name):
video_path = os.path.join(self.video_dir, video_name)
video_path += '.avi'
rows = self.env.observation_space.shape[1]
cols = self.env.observation_space.shape[2]
self.video = cv2.VideoWriter(video_path,
cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'),
25,
(rows, cols),
True)
self.video_cnt += 1
def _write_img_to_video(self, img):
# (H, W, num_channels) seems to be needed by cvtColor
if img.shape[0] == 3:
img = img.transpose(1, 2, 0)
bgr_img = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_RGB2BGR)
self.video.write(bgr_img)
def _save_video(self):
self.video.release()
def _get_state_obs_enc(self):
state_obs = self.env.get_state_obs()
with torch.no_grad():
state_obs_enc = self.latent_model.encoder(
torch.from_numpy(state_obs).unsqueeze(dim=0).float().to(self.device)
)
return state_obs_enc
def generate_skill_autoregressive(self):
# Env reset
pixel_obs = self.env.reset()
state_obs = self._get_state_obs_enc()
self._write_img_to_video(pixel_obs)
# Counters
self.episodes += 1
episode_steps = 0
# Done Flag
done = False
while not done:
action = self.mode_action_sampler(state_obs)
pixel_obs, _, done, _ = self.env.step(action.cpu().numpy())
state_obs = self._get_state_obs_enc()
self.steps += self.action_repeat
episode_steps += self.action_repeat
self._write_img_to_video(pixel_obs)
def run(self, num_skills=10):
for skill in range(num_skills):
# Resets
self.mode_action_sampler.reset()
self._reset_video_writer(video_name=self._create_video_name())
# Make video
self.generate_skill_autoregressive()
self._save_video()
| [
"torch.manual_seed",
"os.makedirs",
"torch.load",
"os.path.join",
"torch.from_numpy",
"torch.cuda.is_available",
"numpy.random.seed",
"cv2.VideoWriter_fourcc",
"torch.no_grad"
] | [((990, 1013), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1007, 1013), False, 'import torch\n'), ((1022, 1042), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1036, 1042), True, 'import numpy as np\n'), ((1260, 1297), 'os.makedirs', 'os.makedirs', (['video_dir'], {'exist_ok': '(True)'}), '(video_dir, exist_ok=True)\n', (1271, 1297), False, 'import os\n'), ((1867, 1907), 'os.path.join', 'os.path.join', (['self.video_dir', 'video_name'], {}), '(self.video_dir, video_name)\n', (1879, 1907), False, 'import os\n'), ((2125, 2167), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (['"""M"""', '"""J"""', '"""P"""', '"""G"""'], {}), "('M', 'J', 'P', 'G')\n", (2147, 2167), False, 'import cv2\n'), ((2765, 2780), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2778, 2780), False, 'import torch\n'), ((495, 524), 'torch.load', 'torch.load', (['latent_model_path'], {}), '(latent_model_path)\n', (505, 524), False, 'import torch\n'), ((1147, 1172), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1170, 1172), False, 'import torch\n'), ((2853, 2880), 'torch.from_numpy', 'torch.from_numpy', (['state_obs'], {}), '(state_obs)\n', (2869, 2880), False, 'import torch\n')] |
import day18.src as src
def test_part1():
assert src.part1(src.TEST1_INPUT_FILE, 4) == 4
def test_part1_full():
assert src.part1(src.FULL_INPUT_FILE) == 768
def test_part2():
assert src.part2(src.TEST2_INPUT_FILE, 5) == 17
def test_part2_full():
assert src.part2(src.FULL_INPUT_FILE) == 781
| [
"day18.src.part2",
"day18.src.part1"
] | [((55, 89), 'day18.src.part1', 'src.part1', (['src.TEST1_INPUT_FILE', '(4)'], {}), '(src.TEST1_INPUT_FILE, 4)\n', (64, 89), True, 'import day18.src as src\n'), ((131, 161), 'day18.src.part1', 'src.part1', (['src.FULL_INPUT_FILE'], {}), '(src.FULL_INPUT_FILE)\n', (140, 161), True, 'import day18.src as src\n'), ((200, 234), 'day18.src.part2', 'src.part2', (['src.TEST2_INPUT_FILE', '(5)'], {}), '(src.TEST2_INPUT_FILE, 5)\n', (209, 234), True, 'import day18.src as src\n'), ((277, 307), 'day18.src.part2', 'src.part2', (['src.FULL_INPUT_FILE'], {}), '(src.FULL_INPUT_FILE)\n', (286, 307), True, 'import day18.src as src\n')] |
import requests
from os import system
import time
import warnings
import pyfiglet
from colorama import init
from termcolor import colored
result1 = pyfiglet.figlet_format("Ping Trace")
result2 = pyfiglet.figlet_format("By <NAME>", font = "digital" )
init()
print(colored(result1, 'green'))
print(colored(result2, 'green'))
init()
print(colored('Enter Website URL :', 'red'))
abc = input()
cbd = 'https://' + abc
warnings.filterwarnings("ignore")
x = requests.get(cbd, verify = False)
y = (x.status_code)
print("URL : ",cbd)
if y == 200:
init()
system("ping " + abc)
print(colored('Website is Reachable', 'green'))
else:
init()
system("ping " + abc)
print(colored('Website is not reachable', 'red'))
time.sleep(100000)
| [
"termcolor.colored",
"pyfiglet.figlet_format",
"requests.get",
"time.sleep",
"os.system",
"warnings.filterwarnings",
"colorama.init"
] | [((157, 193), 'pyfiglet.figlet_format', 'pyfiglet.figlet_format', (['"""Ping Trace"""'], {}), "('Ping Trace')\n", (179, 193), False, 'import pyfiglet\n'), ((205, 256), 'pyfiglet.figlet_format', 'pyfiglet.figlet_format', (['"""By <NAME>"""'], {'font': '"""digital"""'}), "('By <NAME>', font='digital')\n", (227, 256), False, 'import pyfiglet\n'), ((263, 269), 'colorama.init', 'init', ([], {}), '()\n', (267, 269), False, 'from colorama import init\n'), ((339, 345), 'colorama.init', 'init', ([], {}), '()\n', (343, 345), False, 'from colorama import init\n'), ((432, 465), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (455, 465), False, 'import warnings\n'), ((473, 504), 'requests.get', 'requests.get', (['cbd'], {'verify': '(False)'}), '(cbd, verify=False)\n', (485, 504), False, 'import requests\n'), ((762, 780), 'time.sleep', 'time.sleep', (['(100000)'], {}), '(100000)\n', (772, 780), False, 'import time\n'), ((277, 302), 'termcolor.colored', 'colored', (['result1', '"""green"""'], {}), "(result1, 'green')\n", (284, 302), False, 'from termcolor import colored\n'), ((311, 336), 'termcolor.colored', 'colored', (['result2', '"""green"""'], {}), "(result2, 'green')\n", (318, 336), False, 'from termcolor import colored\n'), ((353, 390), 'termcolor.colored', 'colored', (['"""Enter Website URL :"""', '"""red"""'], {}), "('Enter Website URL :', 'red')\n", (360, 390), False, 'from termcolor import colored\n'), ((570, 576), 'colorama.init', 'init', ([], {}), '()\n', (574, 576), False, 'from colorama import init\n'), ((582, 603), 'os.system', 'system', (["('ping ' + abc)"], {}), "('ping ' + abc)\n", (588, 603), False, 'from os import system\n'), ((669, 675), 'colorama.init', 'init', ([], {}), '()\n', (673, 675), False, 'from colorama import init\n'), ((681, 702), 'os.system', 'system', (["('ping ' + abc)"], {}), "('ping ' + abc)\n", (687, 702), False, 'from os import system\n'), ((615, 655), 'termcolor.colored', 'colored', (['"""Website is Reachable"""', '"""green"""'], {}), "('Website is Reachable', 'green')\n", (622, 655), False, 'from termcolor import colored\n'), ((714, 756), 'termcolor.colored', 'colored', (['"""Website is not reachable"""', '"""red"""'], {}), "('Website is not reachable', 'red')\n", (721, 756), False, 'from termcolor import colored\n')] |
import argparse
from pathlib import Path
from rabbit_context import RabbitContext
def publish_messages_from_json_file_path(queue_name: str, source_file_path: Path, destination_file_path: Path):
"""
NB: this exists to support the single (JSON) file model and should not be used with the new style (dump) files
"""
with RabbitContext(queue_name=queue_name) as rabbit:
for file_path in source_file_path.rglob('*.json'):
rabbit.publish_message(file_path.read_text(), 'application/json')
file_path.replace(destination_file_path.joinpath(file_path.name))
def publish_messages_from_dump_files(queue_name: str, source_file_path: Path, destination_file_path: Path):
with RabbitContext(queue_name=queue_name) as rabbit:
for file_path in source_file_path.rglob('*.dump'):
for json_message in file_path.open():
rabbit.publish_message(json_message, 'application/json')
file_path.replace(destination_file_path.joinpath(file_path.name))
def parse_arguments():
parser = argparse.ArgumentParser(
description='Publish each file in a directory as a message to a rabbit queue')
parser.add_argument('queue_name', help='Name of queue to publish to', type=str)
parser.add_argument('source_file_path', help='Directory to read input files from', type=Path)
parser.add_argument('destination_file_path', help='Directory to move published input files to', type=Path)
parser.add_argument('--separate-files', help="Each message has its own (JSON) file [LEGACY]", required=False,
action='store_true')
return parser.parse_args()
def main():
args = parse_arguments()
if args.separate_files:
publish_messages_from_json_file_path(args.queue_name, args.source_file_path, args.destination_file_path)
else:
publish_messages_from_dump_files(args.queue_name, args.source_file_path, args.destination_file_path)
if __name__ == '__main__':
main()
| [
"rabbit_context.RabbitContext",
"argparse.ArgumentParser"
] | [((1065, 1172), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Publish each file in a directory as a message to a rabbit queue"""'}), "(description=\n 'Publish each file in a directory as a message to a rabbit queue')\n", (1088, 1172), False, 'import argparse\n'), ((337, 373), 'rabbit_context.RabbitContext', 'RabbitContext', ([], {'queue_name': 'queue_name'}), '(queue_name=queue_name)\n', (350, 373), False, 'from rabbit_context import RabbitContext\n'), ((719, 755), 'rabbit_context.RabbitContext', 'RabbitContext', ([], {'queue_name': 'queue_name'}), '(queue_name=queue_name)\n', (732, 755), False, 'from rabbit_context import RabbitContext\n')] |
from dns_messages.utilities import decode_character_string
from typing import List
from .items import *
from .resource_record import ResourceRecord
from ...utilities.character_string_encoder_and_decoder import encode_character_string
class HINFO(ResourceRecord):
def __init__(self, name: str, rr_class: RRClass, ttl: int, cpu: bytes, os: bytes):
super().__init__(name=name, rr_class=rr_class, ttl=ttl)
self.cpu = cpu
self.os = os
def __str__(self) -> str:
return 'HINFO[name={}, cpu={}, os={}]'.format(self.name, self.cpu, self.os)
def get_RR_type(self) -> RRType:
return RRType.HINFO
@staticmethod
def from_bytes(raw_bytes: bytes, raw_bits: List[int], byte_offset: int, rr_data_length: int, name: str, rr_class: RRClass, ttl: int) -> 'HINFO':
byte_offset, cpu = decode_character_string(raw_bytes=raw_bytes, raw_bits=raw_bits, byte_offset=byte_offset)
_, os = decode_character_string(raw_bytes=raw_bytes, raw_bits=raw_bits, byte_offset=byte_offset)
return HINFO(name=name, rr_class=rr_class, ttl=ttl, cpu=cpu, os=os)
def _data_to_bytes(self) -> bytes:
return encode_character_string(character_string=self.cpu) + encode_character_string(character_string=self.os)
| [
"dns_messages.utilities.decode_character_string"
] | [((836, 929), 'dns_messages.utilities.decode_character_string', 'decode_character_string', ([], {'raw_bytes': 'raw_bytes', 'raw_bits': 'raw_bits', 'byte_offset': 'byte_offset'}), '(raw_bytes=raw_bytes, raw_bits=raw_bits, byte_offset\n =byte_offset)\n', (859, 929), False, 'from dns_messages.utilities import decode_character_string\n'), ((941, 1034), 'dns_messages.utilities.decode_character_string', 'decode_character_string', ([], {'raw_bytes': 'raw_bytes', 'raw_bits': 'raw_bits', 'byte_offset': 'byte_offset'}), '(raw_bytes=raw_bytes, raw_bits=raw_bits, byte_offset\n =byte_offset)\n', (964, 1034), False, 'from dns_messages.utilities import decode_character_string\n')] |
import unittest
from model.spent_time_records import WorkedDay, WorkedTask
class TestWorkedDay(unittest.TestCase):
def test_hours_normalization_one(self):
tasks = [
self.__workedTask('PZ--001.001', None, 'Scrum', '00:29:10'),
self.__workedTask('PZ--001.001', None, 'Infomeeting', '01:02:59'),
self.__workedTask('PZ--999.999', 'AP', 'SVF-1234', '05:01:00'),
]
day = WorkedDay(tasks)
day.normalize_hours()
self.assertEqual(day.tasks[0].hours, 0.5)
self.assertEqual(day.tasks[1].hours, 1)
self.assertEqual(day.tasks[2].hours, 5)
def test_total_duration_after_normalization(self):
tasks = [
self.__workedTask('PZ--102.102', 'AP', 'SVF-3798', '00:15:58'),
self.__workedTask('PA--300.001', 'AP', 'SVF-9940', '00:46:10'),
self.__workedTask('PA--300.001', 'AP', 'SVF-9740', '01:44:10'),
self.__workedTask('PA--300.001', 'AP', 'SVF-7387', '01:59:06'),
self.__workedTask('PZ--001.001', '', 'Retro', '01:00:05'),
self.__workedTask('PZ--001.001', '', 'Proxy meeting', '01:00:15'),
self.__workedTask('PZ--999.999', 'VS', 'SVF-6516', '00:40:47'),
self.__workedTask('PZ--001.001', '', 'Scrum', '00:30:06'),
]
day = WorkedDay(tasks)
day.normalize_hours()
self.assertEqual(day.total_hours(), 8)
def __get_hours(self, timestring: str):
from datetime import datetime
pt = datetime.strptime(timestring, '%H:%M:%S')
total_seconds = pt.second + pt.minute*60 + pt.hour*3600
return total_seconds / 3600
def __workedTask(self, wo: str, act: str, descr: str, h: str) -> dict:
return dict(
workorder = wo,
activity = act,
description = descr,
hours = self.__get_hours(h)
)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"model.spent_time_records.WorkedDay",
"datetime.datetime.strptime"
] | [((1927, 1942), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1940, 1942), False, 'import unittest\n'), ((432, 448), 'model.spent_time_records.WorkedDay', 'WorkedDay', (['tasks'], {}), '(tasks)\n', (441, 448), False, 'from model.spent_time_records import WorkedDay, WorkedTask\n'), ((1326, 1342), 'model.spent_time_records.WorkedDay', 'WorkedDay', (['tasks'], {}), '(tasks)\n', (1335, 1342), False, 'from model.spent_time_records import WorkedDay, WorkedTask\n'), ((1517, 1558), 'datetime.datetime.strptime', 'datetime.strptime', (['timestring', '"""%H:%M:%S"""'], {}), "(timestring, '%H:%M:%S')\n", (1534, 1558), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
def get_requirements(name):
with open(name) as f:
return f.read().splitlines()
install_requires = []
tests_require = [
'pytest',
'pytest-cov',
]
docs_require = [
'sphinx',
'sphinx_rtd_theme',
]
linting_requires = [
'flake8',
'black',
'readme_renderer',
'check-manifest',
'docutils',
]
setup(
name="fibonacci",
version="0.1",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/longr/python_packaging_example",
description="A simple example package.",
packages=find_packages(where="src"),
package_dir={"": "src"},
install_requires=install_requires,
tests_require=tests_require,
extras_require={"testing": tests_require,
'docs': docs_require,
'linting': linting_requires},
)
| [
"setuptools.find_packages"
] | [((646, 672), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (659, 672), False, 'from setuptools import setup, find_packages\n')] |
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession \
.builder \
.config("spark.python.profile", "true") \
.config("spark.sql.execution.arrow.enabled", "true") \
.getOrCreate()
df = spark.read.format("CustomDataSource").load()
df.printSchema()
pdf = df.toPandas()
spark.sparkContext.show_profiles()
| [
"pyspark.sql.SparkSession.builder.config"
] | [((78, 137), 'pyspark.sql.SparkSession.builder.config', 'SparkSession.builder.config', (['"""spark.python.profile"""', '"""true"""'], {}), "('spark.python.profile', 'true')\n", (105, 137), False, 'from pyspark.sql import SparkSession\n')] |
import sys
track_hub_text = """#############
track composite_name
compositeTrack on
type bigWig 0 1000
priority 9.5
visibility full
shortLabel composite_name
longLabel composite_name
track fw_name
parent composite_name
type bigWig 0 1000
visibility full
bigDataUrl fw_url
longLabel fw_name
shortLabel fw_name
negateValues off
color color_value
altColor color_value
alwaysZero on
autoScale on
maxHeightPixels 128:128:16
track rv_name
parent composite_name
type bigWig 0 1000
visibility full
bigDataUrl rv_url
longLabel rv_name
shortLabel rv_name
negateValues on
color color_value
altColor color_value
alwaysZero on
autoScale on
maxHeightPixels 128:128:16
"""
def output_hub_data(output_location, sequencing_files):
for file in sequencing_files:
# We first check which format the file is in
if "-dedup-FW-FJ616285.1.bw" in file:
ending_to_replace = "-dedup-FW-FJ616285.1.bw"
elif "-dedup-FJ616285.1-FW.bw" in file:
ending_to_replace = "-dedup-FJ616285.1-FW.bw"
else:
continue
output_data = track_hub_text.replace("composite_name", file.replace(ending_to_replace, ""))
output_data = output_data.replace("fw_name", file.replace(ending_to_replace, "_FW"))
output_data = output_data.replace("rv_name", file.replace(ending_to_replace, "_RV"))
output_data = output_data.replace("fw_url", "http://pricenas.biochem.uiowa.edu/" + output_location + "/" + file)
output_data = output_data.replace("rv_url", "http://pricenas.biochem.uiowa.edu/" + output_location + "/" + file.replace("FW", "RV"))
if "flavo" in file.lower():
color_value = "153,58,29"
else:
color_value = "113,35,124"
output_data = output_data.replace("color_value", color_value)
print(output_data)
def print_usage():
print("python3 bigwigs_to_track_hubs <output location on PriceNAS> <bigwigs (can use * operator)>")
def parse_args(args):
if len(args) < 2:
print_usage()
sys.exit(1)
output_location = args[0]
sequencing_files = args[1:]
fw_seq_files = []
for file in sequencing_files:
if "FW" in file and "RV" not in file:
fw_seq_files.append(file)
return output_location, fw_seq_files
def main(args):
output_location, fw_seq_files = parse_args(args)
output_hub_data(output_location, fw_seq_files)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"sys.exit"
] | [((2033, 2044), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2041, 2044), False, 'import sys\n')] |
import unittest
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../"))
import numpy as np
import tensorflow as tf
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from model.proposal.vector_rnn.model import VectoredRNN, VectoredRNNInterface
from model.proposal.vector_rnn.trainer import VectorRNNTrainer
from model.proposal.conscious_rnn.detector import ConsciousDetector
from model.dataset.dbd_reader import DbdReader
class TestVectorRNNByDbdData(unittest.TestCase):
DATA_DIR = ""
TARGET_PATH = ""
VECTOR_DATA_PATH = ""
TRAIN_DIR = ""
Reader = None
vector_size = 200
vocab_size = 2000
buckets = [(5, 10), (10, 15), (20, 25), (40, 50)]
@classmethod
def setUpClass(cls):
# development data for last year
cls.DATA_DIR = os.path.join(os.path.dirname(__file__), "../../../data/dialog_log/2015/evaluation")
cls.TARGET_PATH = os.path.join(os.path.dirname(__file__), "../../data/test_dbd_vector_rnn.txt")
cls.VECTOR_DATA_PATH = os.path.join(os.path.dirname(__file__), "../../../run/vector_rnn/store/model_tokened.vec")
cls.TRAIN_DIR = os.path.dirname(cls.TARGET_PATH) + "/training_vector_rnn"
if not os.path.exists(cls.TRAIN_DIR):
print("make training dir at {0}.".format(cls.TRAIN_DIR))
os.makedirs(cls.TRAIN_DIR)
cls.Reader = DbdReader(cls.DATA_DIR, cls.TARGET_PATH, max_vocabulary_size=cls.vocab_size, clear_when_exit=False)
cls.Reader.init(cls.VECTOR_DATA_PATH)
@classmethod
def tearDownClass(cls):
cls.Reader.remove_files()
cls.Reader = None
def make_model(self, user_vocab, system_vocab, size):
num_layers = 1
model = VectoredRNN(
source_vocab_size=len(user_vocab.vocab),
target_vocab_size=len(system_vocab.vocab),
size=size,
num_layers=num_layers,
name="vector_rnn"
)
return model
def test_save_and_load_training(self):
batch_size = 8
dataset, user_vocab, system_vocab = self.Reader.get_dataset()
vocab_vectors = self.Reader.user_loader.load_vocab_vectors()
labels = self.Reader.get_labels()
# train with save
print("## Execute training with save.")
with tf.Graph().as_default() as train:
model = self.make_model(user_vocab, system_vocab, len(vocab_vectors[0]))
trainer = VectorRNNTrainer(model, self.buckets, batch_size, vocab_vectors, self.TRAIN_DIR)
with tf.Session() as sess:
trainer.set_optimizer(sess)
for x in trainer.train(sess, dataset, labels, check_interval=10, max_iteration=100):
pass
print("## Now, load from saved model")
with tf.Graph().as_default() as prediction:
decode_user = lambda s: " ".join([tf.compat.as_str(user_vocab.rev_vocab[i]) for i in s])
model = self.make_model(user_vocab, system_vocab, len(vocab_vectors[0]))
samples = np.random.randint(len(dataset), size=5)
with tf.Session() as sess:
model_if = VectoredRNNInterface(model, self.buckets, vocab_vectors, model_path=self.TRAIN_DIR)
model_if.build(sess, predict=True)
for s in samples:
pair = dataset[s]
output, _, _ = model_if.predict(sess, pair[0], pair[1])
text = model_if.decode(output, system_vocab.rev_vocab)
print("{0} -> {1}".format(decode_user(pair[0]), text))
if __name__ == '__main__':
unittest.main()
| [
"os.path.exists",
"tensorflow.Graph",
"tensorflow.compat.as_str",
"os.makedirs",
"tensorflow.Session",
"model.proposal.vector_rnn.model.VectoredRNNInterface",
"model.proposal.vector_rnn.trainer.VectorRNNTrainer",
"os.path.dirname",
"model.dataset.dbd_reader.DbdReader",
"unittest.main"
] | [((3692, 3707), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3705, 3707), False, 'import unittest\n'), ((66, 91), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (81, 91), False, 'import os\n'), ((1449, 1552), 'model.dataset.dbd_reader.DbdReader', 'DbdReader', (['cls.DATA_DIR', 'cls.TARGET_PATH'], {'max_vocabulary_size': 'cls.vocab_size', 'clear_when_exit': '(False)'}), '(cls.DATA_DIR, cls.TARGET_PATH, max_vocabulary_size=cls.vocab_size,\n clear_when_exit=False)\n', (1458, 1552), False, 'from model.dataset.dbd_reader import DbdReader\n'), ((885, 910), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (900, 910), False, 'import os\n'), ((995, 1020), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1010, 1020), False, 'import os\n'), ((1104, 1129), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1119, 1129), False, 'import os\n'), ((1206, 1238), 'os.path.dirname', 'os.path.dirname', (['cls.TARGET_PATH'], {}), '(cls.TARGET_PATH)\n', (1221, 1238), False, 'import os\n'), ((1280, 1309), 'os.path.exists', 'os.path.exists', (['cls.TRAIN_DIR'], {}), '(cls.TRAIN_DIR)\n', (1294, 1309), False, 'import os\n'), ((1392, 1418), 'os.makedirs', 'os.makedirs', (['cls.TRAIN_DIR'], {}), '(cls.TRAIN_DIR)\n', (1403, 1418), False, 'import os\n'), ((2522, 2607), 'model.proposal.vector_rnn.trainer.VectorRNNTrainer', 'VectorRNNTrainer', (['model', 'self.buckets', 'batch_size', 'vocab_vectors', 'self.TRAIN_DIR'], {}), '(model, self.buckets, batch_size, vocab_vectors, self.TRAIN_DIR\n )\n', (2538, 2607), False, 'from model.proposal.vector_rnn.trainer import VectorRNNTrainer\n'), ((2620, 2632), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2630, 2632), True, 'import tensorflow as tf\n'), ((3177, 3189), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3187, 3189), True, 'import tensorflow as tf\n'), ((3226, 3314), 'model.proposal.vector_rnn.model.VectoredRNNInterface', 'VectoredRNNInterface', (['model', 'self.buckets', 'vocab_vectors'], {'model_path': 'self.TRAIN_DIR'}), '(model, self.buckets, vocab_vectors, model_path=self.\n TRAIN_DIR)\n', (3246, 3314), False, 'from model.proposal.vector_rnn.model import VectoredRNN, VectoredRNNInterface\n'), ((2381, 2391), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2389, 2391), True, 'import tensorflow as tf\n'), ((2873, 2883), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2881, 2883), True, 'import tensorflow as tf\n'), ((2958, 2999), 'tensorflow.compat.as_str', 'tf.compat.as_str', (['user_vocab.rev_vocab[i]'], {}), '(user_vocab.rev_vocab[i])\n', (2974, 2999), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python3
import sys
sys.path.append('/opt/attiny_daemon/') # add the path to our ATTiny module
import time
import smbus
import logging
from attiny_i2c import ATTiny
_time_const = 1.0 # used as a pause between i2c communications, the ATTiny is slow
_num_retries = 10 # the number of retries when reading from or writing to the ATTiny_Daemon
_i2c_address = 0x37 # the I2C address that is used for the ATTiny_Daemon
# set up logging
root_log = logging.getLogger()
root_log.setLevel("INFO")
# set up communication to the ATTiny_Daemon
bus = 1
attiny = ATTiny(bus, _i2c_address, _time_const, _num_retries)
# access data
logging.info("Low fuse is " + hex(attiny.get_fuse_low()))
logging.info("High fuse is " + hex(attiny.get_fuse_high()))
logging.info("Extended fuse is " + hex(attiny.get_fuse_extended()))
| [
"logging.getLogger",
"sys.path.append",
"attiny_i2c.ATTiny"
] | [((37, 75), 'sys.path.append', 'sys.path.append', (['"""/opt/attiny_daemon/"""'], {}), "('/opt/attiny_daemon/')\n", (52, 75), False, 'import sys\n'), ((466, 485), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (483, 485), False, 'import logging\n'), ((574, 626), 'attiny_i2c.ATTiny', 'ATTiny', (['bus', '_i2c_address', '_time_const', '_num_retries'], {}), '(bus, _i2c_address, _time_const, _num_retries)\n', (580, 626), False, 'from attiny_i2c import ATTiny\n')] |
import pandas as pd
import urllib.request, io, csv
import os
import yfinance as yf
from datetime import datetime
def getStock(TickerList,TickerName,path,dataframe =None): #TickerListNeeds to be a list
newFile=True
if dataframe is not None:
df = dataframe
newFile = False
lastDate = df.index[0]
tickr = yf.Ticker(path[:-9])
if newFile:
df = tickr.history(period="max")
df = df.sort_index(axis=0, ascending=False)
else:
today = datetime.today().strftime('%Y-%m-%d')
df1 = tickr.history(start=lastDate, end=today)
df1 = df1.sort_index(axis=0, ascending=False)
df1 = df1.append(df)
df = df1
df.to_excel(path)
return df
| [
"datetime.datetime.today",
"yfinance.Ticker"
] | [((340, 360), 'yfinance.Ticker', 'yf.Ticker', (['path[:-9]'], {}), '(path[:-9])\n', (349, 360), True, 'import yfinance as yf\n'), ((508, 524), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (522, 524), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
import bz2
import gzip
import json
import optparse
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urllib.error
import urllib.parse
import urllib.request
import zipfile
from ftplib import FTP
CHUNK_SIZE = 2**20 # 1mb
def cleanup_before_exit(tmp_dir):
if tmp_dir and os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
def _get_files_in_ftp_path(ftp, path):
path_contents = []
ftp.retrlines('MLSD %s' % (path), path_contents.append)
return [line.split(';')[-1].lstrip() for line in path_contents]
def _get_stream_readers_for_tar(file_obj, tmp_dir):
fasta_tar = tarfile.open(fileobj=file_obj, mode='r:*')
return [fasta_tar.extractfile(member) for member in fasta_tar.getmembers()]
def _get_stream_readers_for_zip(file_obj, tmp_dir):
fasta_zip = zipfile.ZipFile(file_obj, 'r')
rval = []
for member in fasta_zip.namelist():
fasta_zip.extract(member, tmp_dir)
rval.append(open(os.path.join(tmp_dir, member), 'rb'))
return rval
def _get_stream_readers_for_gzip(file_obj, tmp_dir):
return [gzip.GzipFile(fileobj=file_obj, mode='rb')]
def _get_stream_readers_for_bz2(file_obj, tmp_dir):
return [bz2.BZ2File(file_obj.name, 'rb')]
def download_from_ncbi(data_manager_dict, params, target_directory,
database_id, database_name):
NCBI_FTP_SERVER = 'ftp.ncbi.nlm.nih.gov'
NCBI_DOWNLOAD_PATH = '/blast/db/FASTA/'
COMPRESSED_EXTENSIONS = [('.tar.gz', _get_stream_readers_for_tar),
('.tar.bz2', _get_stream_readers_for_tar),
('.zip', _get_stream_readers_for_zip),
('.gz', _get_stream_readers_for_gzip),
('.bz2', _get_stream_readers_for_bz2)]
ncbi_identifier = params['reference_source']['requested_identifier']
ftp = FTP(NCBI_FTP_SERVER)
ftp.login()
path_contents = _get_files_in_ftp_path(ftp, NCBI_DOWNLOAD_PATH)
ncbi_file_name = None
get_stream_reader = None
ext = None
for ext, get_stream_reader in COMPRESSED_EXTENSIONS:
if "%s%s" % (ncbi_identifier, ext) in path_contents:
ncbi_file_name = "%s%s%s" % (NCBI_DOWNLOAD_PATH, ncbi_identifier, ext)
break
if not ncbi_file_name:
raise Exception('Unable to determine filename for NCBI database for %s: %s' % (ncbi_identifier, path_contents))
tmp_dir = tempfile.mkdtemp(prefix='tmp-data-manager-ncbi-')
ncbi_fasta_filename = os.path.join(tmp_dir, "%s%s" % (ncbi_identifier, ext))
# fasta_base_filename = "%s.fa" % database_id
# fasta_filename = os.path.join(target_directory, fasta_base_filename)
# fasta_writer = open(fasta_filename, 'wb+')
tmp_extract_dir = os.path.join(tmp_dir, 'extracted_fasta')
os.mkdir(tmp_extract_dir)
tmp_fasta = open(ncbi_fasta_filename, 'wb+')
ftp.retrbinary('RETR %s' % ncbi_file_name, tmp_fasta.write)
tmp_fasta.flush()
tmp_fasta.seek(0)
fasta_readers = get_stream_reader(tmp_fasta, tmp_extract_dir)
data_table_entry = _stream_fasta_to_file(fasta_readers, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
for fasta_reader in fasta_readers:
fasta_reader.close()
tmp_fasta.close()
cleanup_before_exit(tmp_dir)
def download_from_url(data_manager_dict, params, target_directory, database_id, database_name):
# TODO: we should automatically do decompression here
urls = list(filter(bool, [x.strip() for x in params['reference_source']['user_url'].split('\n')]))
fasta_reader = [urllib.request.urlopen(url) for url in urls]
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def download_from_history(data_manager_dict, params, target_directory, database_id, database_name):
# TODO: allow multiple FASTA input files
input_filename = params['reference_source']['input_fasta']
if isinstance(input_filename, list):
fasta_reader = [open(filename, 'rb') for filename in input_filename]
else:
fasta_reader = open(input_filename, 'rb')
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def copy_from_directory(data_manager_dict, params, target_directory, database_id, database_name):
input_filename = params['reference_source']['fasta_filename']
create_symlink = params['reference_source']['create_symlink'] == 'create_symlink'
if create_symlink:
data_table_entry = _create_symlink(input_filename, target_directory, database_id, database_name)
else:
if isinstance(input_filename, list):
fasta_reader = [open(filename, 'rb') for filename in input_filename]
else:
fasta_reader = open(input_filename)
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def _add_data_table_entry(data_manager_dict, data_table_entry):
data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {})
data_manager_dict['data_tables']['diamond_database'] = data_manager_dict['data_tables'].get('diamond_database', [])
data_manager_dict['data_tables']['diamond_database'].append(data_table_entry)
return data_manager_dict
def _stream_fasta_to_file(fasta_stream, target_directory, database_id,
database_name, params, close_stream=True):
fasta_base_filename = "%s.fa" % database_id
fasta_filename = os.path.join(target_directory, fasta_base_filename)
temp_fasta = tempfile.NamedTemporaryFile(delete=False, suffix=".fasta")
temp_fasta.close()
fasta_writer = open(temp_fasta.name, 'wb+')
if not isinstance(fasta_stream, list):
fasta_stream = [fasta_stream]
last_char = None
for fh in fasta_stream:
if last_char not in [None, '\n', '\r']:
fasta_writer.write('\n')
while True:
data = fh.read(CHUNK_SIZE)
if data:
fasta_writer.write(data)
last_char = data[-1]
else:
break
if close_stream:
fh.close()
fasta_writer.close()
args = ['diamond', 'makedb',
'--in', temp_fasta.name,
'--db', fasta_filename]
if params['tax_cond']['tax_select'] == "history":
for i in ["taxonmap", "taxonnodes", "taxonnames"]:
args.extend(['--' + i, params['tax_cond'][i]])
elif params['tax_cond']['tax_select'] == "ncbi":
if os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')])
else:
raise Exception('Unable to find prot.accession2taxid file in %s' % (params['tax_cond']['ncbi_tax']))
args.extend(['--taxonnodes',
os.path.join(params['tax_cond']['ncbi_tax'], 'nodes.dmp')])
args.extend(['--taxonnames',
os.path.join(params['tax_cond']['ncbi_tax'], 'names.dmp')])
tmp_stderr = tempfile.NamedTemporaryFile(prefix="tmp-data-manager-diamond-database-builder-stderr")
proc = subprocess.Popen(args=args, shell=False, cwd=target_directory,
stderr=tmp_stderr.fileno())
return_code = proc.wait()
if return_code:
tmp_stderr.flush()
tmp_stderr.seek(0)
print("Error building diamond database:", file=sys.stderr)
while True:
chunk = tmp_stderr.read(CHUNK_SIZE)
if not chunk:
break
sys.stderr.write(chunk.decode('utf-8'))
sys.exit(return_code)
tmp_stderr.close()
os.remove(temp_fasta.name)
return dict(value=database_id, name=database_name,
db_path="%s.dmnd" % fasta_base_filename)
def _create_symlink(input_filename, target_directory, database_id, database_name):
fasta_base_filename = "%s.fa" % database_id
fasta_filename = os.path.join(target_directory, fasta_base_filename)
os.symlink(input_filename, fasta_filename)
return dict(value=database_id, name=database_name, db_path=fasta_base_filename)
REFERENCE_SOURCE_TO_DOWNLOAD = dict(ncbi=download_from_ncbi,
url=download_from_url,
history=download_from_history,
directory=copy_from_directory)
def main():
# Parse Command Line
parser = optparse.OptionParser()
parser.add_option('-d', '--dbkey_description', dest='dbkey_description',
action='store', type="string", default=None,
help='dbkey_description')
(options, args) = parser.parse_args()
filename = args[0]
with open(filename) as fp:
params = json.load(fp)
target_directory = params['output_data'][0]['extra_files_path']
os.mkdir(target_directory)
data_manager_dict = {}
param_dict = params['param_dict']
database_id = param_dict['database_id']
database_name = param_dict['database_name']
if param_dict['tax_cond']['tax_select'] == "ncbi":
param_dict['tax_cond']['ncbi_tax'] = args[1]
# Fetch the FASTA
REFERENCE_SOURCE_TO_DOWNLOAD[param_dict['reference_source']['reference_source_selector']](data_manager_dict, param_dict, target_directory, database_id, database_name)
# save info to json file
open(filename, 'w').write(json.dumps(data_manager_dict, sort_keys=True))
if __name__ == "__main__":
main()
| [
"os.path.exists",
"ftplib.FTP",
"tarfile.open",
"sys.exit",
"zipfile.ZipFile",
"json.dumps",
"os.path.join",
"os.symlink",
"optparse.OptionParser",
"shutil.rmtree",
"gzip.GzipFile",
"bz2.BZ2File",
"tempfile.mkdtemp",
"os.mkdir",
"tempfile.NamedTemporaryFile",
"json.load",
"os.remove"... | [((657, 699), 'tarfile.open', 'tarfile.open', ([], {'fileobj': 'file_obj', 'mode': '"""r:*"""'}), "(fileobj=file_obj, mode='r:*')\n", (669, 699), False, 'import tarfile\n'), ((850, 880), 'zipfile.ZipFile', 'zipfile.ZipFile', (['file_obj', '"""r"""'], {}), "(file_obj, 'r')\n", (865, 880), False, 'import zipfile\n'), ((1910, 1930), 'ftplib.FTP', 'FTP', (['NCBI_FTP_SERVER'], {}), '(NCBI_FTP_SERVER)\n', (1913, 1930), False, 'from ftplib import FTP\n'), ((2469, 2518), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""tmp-data-manager-ncbi-"""'}), "(prefix='tmp-data-manager-ncbi-')\n", (2485, 2518), False, 'import tempfile\n'), ((2545, 2599), 'os.path.join', 'os.path.join', (['tmp_dir', "('%s%s' % (ncbi_identifier, ext))"], {}), "(tmp_dir, '%s%s' % (ncbi_identifier, ext))\n", (2557, 2599), False, 'import os\n'), ((2798, 2838), 'os.path.join', 'os.path.join', (['tmp_dir', '"""extracted_fasta"""'], {}), "(tmp_dir, 'extracted_fasta')\n", (2810, 2838), False, 'import os\n'), ((2843, 2868), 'os.mkdir', 'os.mkdir', (['tmp_extract_dir'], {}), '(tmp_extract_dir)\n', (2851, 2868), False, 'import os\n'), ((5810, 5861), 'os.path.join', 'os.path.join', (['target_directory', 'fasta_base_filename'], {}), '(target_directory, fasta_base_filename)\n', (5822, 5861), False, 'import os\n'), ((5880, 5938), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)', 'suffix': '""".fasta"""'}), "(delete=False, suffix='.fasta')\n", (5907, 5938), False, 'import tempfile\n'), ((8179, 8270), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'prefix': '"""tmp-data-manager-diamond-database-builder-stderr"""'}), "(prefix=\n 'tmp-data-manager-diamond-database-builder-stderr')\n", (8206, 8270), False, 'import tempfile\n'), ((8792, 8818), 'os.remove', 'os.remove', (['temp_fasta.name'], {}), '(temp_fasta.name)\n', (8801, 8818), False, 'import os\n'), ((9085, 9136), 'os.path.join', 'os.path.join', (['target_directory', 'fasta_base_filename'], {}), '(target_directory, fasta_base_filename)\n', (9097, 9136), False, 'import os\n'), ((9141, 9183), 'os.symlink', 'os.symlink', (['input_filename', 'fasta_filename'], {}), '(input_filename, fasta_filename)\n', (9151, 9183), False, 'import os\n'), ((9576, 9599), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (9597, 9599), False, 'import optparse\n'), ((9993, 10019), 'os.mkdir', 'os.mkdir', (['target_directory'], {}), '(target_directory)\n', (10001, 10019), False, 'import os\n'), ((339, 362), 'os.path.exists', 'os.path.exists', (['tmp_dir'], {}), '(tmp_dir)\n', (353, 362), False, 'import os\n'), ((372, 394), 'shutil.rmtree', 'shutil.rmtree', (['tmp_dir'], {}), '(tmp_dir)\n', (385, 394), False, 'import shutil\n'), ((1124, 1166), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'file_obj', 'mode': '"""rb"""'}), "(fileobj=file_obj, mode='rb')\n", (1137, 1166), False, 'import gzip\n'), ((1234, 1266), 'bz2.BZ2File', 'bz2.BZ2File', (['file_obj.name', '"""rb"""'], {}), "(file_obj.name, 'rb')\n", (1245, 1266), False, 'import bz2\n'), ((8743, 8764), 'sys.exit', 'sys.exit', (['return_code'], {}), '(return_code)\n', (8751, 8764), False, 'import sys\n'), ((9907, 9920), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (9916, 9920), False, 'import json\n'), ((10540, 10585), 'json.dumps', 'json.dumps', (['data_manager_dict'], {'sort_keys': '(True)'}), '(data_manager_dict, sort_keys=True)\n', (10550, 10585), False, 'import json\n'), ((1003, 1032), 'os.path.join', 'os.path.join', (['tmp_dir', 'member'], {}), '(tmp_dir, member)\n', (1015, 1032), False, 'import os\n'), ((6857, 6933), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.FULL.gz"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')\n", (6869, 6933), False, 'import os\n'), ((7107, 7180), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.FULL"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')\n", (7119, 7180), False, 'import os\n'), ((7983, 8040), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""nodes.dmp"""'], {}), "(params['tax_cond']['ncbi_tax'], 'nodes.dmp')\n", (7995, 8040), False, 'import os\n'), ((8101, 8158), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""names.dmp"""'], {}), "(params['tax_cond']['ncbi_tax'], 'names.dmp')\n", (8113, 8158), False, 'import os\n'), ((7000, 7076), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.FULL.gz"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')\n", (7012, 7076), False, 'import os\n'), ((7351, 7422), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.gz"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')\n", (7363, 7422), False, 'import os\n'), ((7247, 7320), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.FULL"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')\n", (7259, 7320), False, 'import os\n'), ((7591, 7659), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')\n", (7603, 7659), False, 'import os\n'), ((7489, 7560), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid.gz"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')\n", (7501, 7560), False, 'import os\n'), ((7726, 7794), 'os.path.join', 'os.path.join', (["params['tax_cond']['ncbi_tax']", '"""prot.accession2taxid"""'], {}), "(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')\n", (7738, 7794), False, 'import os\n')] |
import os
import shutil
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import Client
from django.utils import simplejson
from ocradmin.projects.models import Project
AJAX_HEADERS = {
"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"
}
class ProjectsTest(TestCase):
fixtures = ["test_fixtures.json"]
def setUp(self):
"""
Setup OCR tests. Creates a test user.
"""
self.testuser = User.objects.create_user("test_user", "<EMAIL>", "<PASSWORD>")
self.client = Client()
self.client.login(username="test_user", password="<PASSWORD>")
def tearDown(self):
"""
Cleanup a test.
"""
self.testuser.delete()
def test_projects_view(self):
"""
Test basic list view
"""
self.assertEqual(self.client.get("/projects/").status_code, 200)
def test_tag_filter(self):
"""
Test filtering by tag.
"""
r = self.client.get("/projects/list", {"tag": "test"})
self.assertEqual(r.status_code, 200)
def test_new_ajax_form(self):
"""
Test requesting a new upload form via Ajax works.
"""
r = self.client.get("/projects/create/", {}, **AJAX_HEADERS)
self.assertEqual(r.status_code, 200)
# make sure there's a form in the results
self.assertTrue(r.content.find("<fieldset") != -1)
def test_create_project_ajax(self):
"""
Test creating a new project from an uploaded file.
"""
# we shouldn't have any projects in the DB yet. If
# successful it'll redirect back to the list.
before = Project.objects.count()
r = self._create_test_project()
self.assertEqual(r.status_code, 302)
self.assertEqual(before + 1, Project.objects.count())
def test_edit_project_view(self):
"""
Test viewing the edit for (no Ajax).
"""
r = self.client.get("/projects/edit/1/")
self.assertEqual(r.status_code, 200)
def test_edit_project_not_found(self):
"""
Test viewing the edit form for a non-existant item.
"""
r = self.client.get("/projects/edit/666666/")
self.assertEqual(r.status_code, 404)
def test_update_project(self):
"""
Test the updating of the fixture project.
"""
r = self._update_test_project()
self.assertEqual(r.status_code, 302)
project = Project.objects.get(pk=1)
self.assertEqual(project.description, "")
def test_confirm_delete(self):
"""
Test checking if the user wants to delete a project.
"""
r = self._create_test_project()
project = Project.objects.get(pk=1)
r = self.client.get("/projects/delete/1/")
self.assertEqual(r.status_code, 200)
def test_delete_project(self):
"""
Test actually deleting a project.
"""
r = self._create_test_project()
before = Project.objects.count()
r = self.client.post("/projects/delete/1/")
self.assertEqual(r.status_code, 302)
after = Project.objects.count()
self.assertEqual(before, after + 1)
def _create_test_project(self):
"""
Insert a post test project view post
"""
return self.client.post(
"/projects/create", {
"0-name" : "Yet ANother test",
"0-description" : "",
"0-storage_backend" : "FedoraStorage",
"0-tags" : "",
"1-root" : "localhost:8080/fedora",
"1-image_name" : "IMG",
"1-password" : "<PASSWORD>",
"1-namespace" : "yet-another-test",
"1-transcript_name" : "TRANSCRIPT",
"1-username" : "fedoraAdmin",
"hash_0" : "89075382b10c271f10c479251fa68c057242ba40",
"wizard_step" : "1",
}
)
def _update_test_project(self):
"""
Update the fixture project.
"""
return self.client.post(
"/projects/edit/1/",
dict(
name="Test Update Project",
tags="test project update",
storage_backend="FileSystemStorage",
description="",
),
)
| [
"django.test.client.Client",
"ocradmin.projects.models.Project.objects.get",
"ocradmin.projects.models.Project.objects.count",
"django.contrib.auth.models.User.objects.create_user"
] | [((511, 573), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""test_user"""', '"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('test_user', '<EMAIL>', '<PASSWORD>')\n", (535, 573), False, 'from django.contrib.auth.models import User\n'), ((596, 604), 'django.test.client.Client', 'Client', ([], {}), '()\n', (602, 604), False, 'from django.test.client import Client\n'), ((1735, 1758), 'ocradmin.projects.models.Project.objects.count', 'Project.objects.count', ([], {}), '()\n', (1756, 1758), False, 'from ocradmin.projects.models import Project\n'), ((2548, 2573), 'ocradmin.projects.models.Project.objects.get', 'Project.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (2567, 2573), False, 'from ocradmin.projects.models import Project\n'), ((2803, 2828), 'ocradmin.projects.models.Project.objects.get', 'Project.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (2822, 2828), False, 'from ocradmin.projects.models import Project\n'), ((3084, 3107), 'ocradmin.projects.models.Project.objects.count', 'Project.objects.count', ([], {}), '()\n', (3105, 3107), False, 'from ocradmin.projects.models import Project\n'), ((3221, 3244), 'ocradmin.projects.models.Project.objects.count', 'Project.objects.count', ([], {}), '()\n', (3242, 3244), False, 'from ocradmin.projects.models import Project\n'), ((1881, 1904), 'ocradmin.projects.models.Project.objects.count', 'Project.objects.count', ([], {}), '()\n', (1902, 1904), False, 'from ocradmin.projects.models import Project\n')] |
from django.shortcuts import redirect
from django.utils.deprecation import MiddlewareMixin
class AutuMiddleWares(MiddlewareMixin):
def process_request(self, request):
# 先拿到url路径
path = request.path
# 判断path是否是login或reg
path_list = ['/login/', '/reg/']
if path in path_list:
return None
# 否则判断用户状态,重定向login
if not request.user.is_authenticated:
return redirect('/login/') | [
"django.shortcuts.redirect"
] | [((437, 456), 'django.shortcuts.redirect', 'redirect', (['"""/login/"""'], {}), "('/login/')\n", (445, 456), False, 'from django.shortcuts import redirect\n')] |
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import numpy as np
import matplotlib.pyplot as plt
from splane import pzmap, grpDelay, bodePlot, convert2SOS
from scipy import signal
# Esta es una liberia tomada de la comunidad [https://stackoverflow.com/questions/35304245/multiply-scipy-lti-transfer-functions?newreg=b12c460c179042b09ad75c2fb4297bc9]
from ltisys import *
e = .096
a = np.roots([-256*e,0,-640*e,0,-560*e,0,-200*e,0,-25*e,0,1])
# Coeficientes Transferencias
q_1 = 2.59
k = 1
q_2 = 4.3
w_2 = 1.025
q_3 = 1.12
w_3 = 0.703
# Genero la función transferencia T1 en S
num_t1 = [1, 0]
den_t1 = [1, q_1]
T1 = ltimul(num_t1, den_t1);
# Genero la función transferencia T2 en S
num_t2 = [1, 0, 0]
den_t2 = [1, 1 / (q_2 * w_2), 1/ (w_2**2)]
T2 = ltimul(num_t2, den_t2);
# Genero la función transferencia T3 en S
num_t3 = [1, 0, 0]
den_t3 = [1, 1 / (q_3 * w_3), 1/ (w_3**2)]
T3 = ltimul(num_t3, den_t3);
T = T1 * T2 * T3
#pzmap(T, 1);
#fig, ax = bodePlot(T1.to_ss(), 2);
#fig, ax = bodePlot(T2.to_ss(), 2);
#fig, ax = bodePlot(T3.to_ss(), 2);
fig, ax = bodePlot(T.to_ss(), 2);
#ax[0].set_xlim(1e-1,1e1)
#ax[0].set_ylim(-100, 10)
#ax[1].set_xlim(1e-1,1e1) | [
"numpy.roots"
] | [((420, 497), 'numpy.roots', 'np.roots', (['[-256 * e, 0, -640 * e, 0, -560 * e, 0, -200 * e, 0, -25 * e, 0, 1]'], {}), '([-256 * e, 0, -640 * e, 0, -560 * e, 0, -200 * e, 0, -25 * e, 0, 1])\n', (428, 497), True, 'import numpy as np\n')] |
"""Функции проверки статуса дивидендов"""
from urllib.error import URLError
import numpy as np
import pandas as pd
from local.dividends import comony_ru
from local.dividends import dohod_ru
from local.dividends import smart_lab_ru
from local.dividends.sqlite import DividendsDataManager
from local.dividends.sqlite import STATISTICS_START
from web.labels import DIVIDENDS
from web.labels import TICKER
DIVIDENDS_SOURCES = [dohod_ru.dividends_dohod,
comony_ru.dividends_conomy,
smart_lab_ru.dividends_smart_lab]
def smart_lab_status(tickers: tuple):
"""Информация об актуальности данных в основной локальной базе дивидендов
Parameters
----------
tickers
Основные тикеры, для которых нужно проверить актуальность данных
Returns
-------
tuple of list
Нулевой элемент кортежа - список тикеров из переданных без актуальной информации в локальной базе
Первый элемент кортежа - список тикеров со СмартЛаба, по которым нет актуальной информации в локальной базе
"""
df = smart_lab_ru.dividends_smart_lab()
result = ([], [])
for i in range(len(df)):
date = df.index[i]
ticker = df.iloc[i][TICKER]
value = df.iloc[i][DIVIDENDS]
local_data = DividendsDataManager(ticker).value
if (date not in local_data.index) or (local_data[date] != value):
if ticker in tickers:
result[0].append(ticker)
else:
result[1].append(ticker)
return result
def dividends_status(ticker: str):
"""Проверяет необходимость обновления данных
Сравнивает основные данные по дивидендам с альтернативными источниками и выводит результаты сравнения
Parameters
----------
ticker
Тикер
Returns
-------
list
Список из DataFrame с результатами сравнения для каждого источника данных
"""
manager = DividendsDataManager(ticker)
manager.update()
df = manager.value
result = []
for source in DIVIDENDS_SOURCES:
print(f'\nСРАВНЕНИЕ ОСНОВНЫХ ДАННЫХ С {source.__name__}\n')
try:
source_df = source(ticker)
except IndexError as err:
print(err.args[0])
except URLError as err:
print(err.args[0])
else:
source_df = source_df[source_df.index >= pd.Timestamp(STATISTICS_START)]
source_df.name = source.__name__
compare_df = pd.concat([df, source_df], axis='columns')
compare_df['STATUS'] = 'ERROR'
compare_df.loc[np.isclose(compare_df[ticker].values, compare_df[source.__name__].values), 'STATUS'] = ''
print(compare_df)
result.append(compare_df)
return result
if __name__ == '__main__':
dividends_status('ALRS')
| [
"numpy.isclose",
"local.dividends.sqlite.DividendsDataManager",
"local.dividends.smart_lab_ru.dividends_smart_lab",
"pandas.Timestamp",
"pandas.concat"
] | [((1072, 1106), 'local.dividends.smart_lab_ru.dividends_smart_lab', 'smart_lab_ru.dividends_smart_lab', ([], {}), '()\n', (1104, 1106), False, 'from local.dividends import smart_lab_ru\n'), ((1928, 1956), 'local.dividends.sqlite.DividendsDataManager', 'DividendsDataManager', (['ticker'], {}), '(ticker)\n', (1948, 1956), False, 'from local.dividends.sqlite import DividendsDataManager\n'), ((1280, 1308), 'local.dividends.sqlite.DividendsDataManager', 'DividendsDataManager', (['ticker'], {}), '(ticker)\n', (1300, 1308), False, 'from local.dividends.sqlite import DividendsDataManager\n'), ((2471, 2513), 'pandas.concat', 'pd.concat', (['[df, source_df]'], {'axis': '"""columns"""'}), "([df, source_df], axis='columns')\n", (2480, 2513), True, 'import pandas as pd\n'), ((2369, 2399), 'pandas.Timestamp', 'pd.Timestamp', (['STATISTICS_START'], {}), '(STATISTICS_START)\n', (2381, 2399), True, 'import pandas as pd\n'), ((2584, 2657), 'numpy.isclose', 'np.isclose', (['compare_df[ticker].values', 'compare_df[source.__name__].values'], {}), '(compare_df[ticker].values, compare_df[source.__name__].values)\n', (2594, 2657), True, 'import numpy as np\n')] |
import matplotlib.pyplot as plt
import numpy as np
import os
from scipy import stats
from transposonmapper.statistics import dataframe_from_pergenefile
def make_datafile(path_a,filelist_a,path_b,filelist_b):
"""Assembly the datafile name to analyze
Parameters
----------
path_a : str
Path of the files corresponding to the reference library
filelist_a : list of str
List of the filenames of the different replicates from the reference library.
It has to have minimum two replicates per library, so the list has to contain
a minimum of two files.
path_b : str
Path of the files corresponding to the experimental library
filelist_b : list of str
List of the filenames of the different replicates from the experimental library.
It has to have minimum two replicates per library, so the list has to contain
a minimum of two files.
Returns
-------
str
Complete paths of the reference and the experimental libraries
"""
datafiles_list_a = []
datafiles_list_b = []
for files in filelist_a:
datafile = os.path.join(path_a, files)
assert os.path.isfile(datafile), 'File not found at: %s' % datafile
datafiles_list_a.append(datafile)
for files in filelist_b:
datafile = os.path.join(path_b, files)
assert os.path.isfile(datafile), 'File not found at: %s' % datafile
datafiles_list_b.append(datafile)
return datafiles_list_a,datafiles_list_b
def info_from_datasets(datafiles_list_a,datafiles_list_b,variable,normalize):
"""Read the information contain in the datafiles for the volcano plot
Parameters
----------
datafiles_list_a : list of str
List of the absolute paths of all the replicates from the
reference library.
datafiles_list_b : list of str
List of the absolute paths of all the replicates from the
experimental library.
variable : str
Magnitude indicating based on what to make the volcano plot.
For example: tn_per_gene, read_per_gene or Nreadsperinsrt
normalize : bool
If True , If set to True, each gene is normalized based on
the total count in each dataset (i.e. each file in filelist_)
Returns
-------
variable_a_array : numpy.array
variable_b_array: numpy.array
volcano_df: pandas.core.frame.DataFrame
tnread_gene_a: pandas.core.frame.DataFrame
tnread_gene_b: pandas.core.frame.DataFrame
"""
tn_per_gene_zeroreplace = 5 #Add 5 insertions to every gene
read_per_gene_zeroreplace = 25 #Add 25 reads to every gene
# norm_a = 0
# norm_b = 0
for count, datafile_a in enumerate(datafiles_list_a):
tnread_gene_a = dataframe_from_pergenefile(datafile_a, verbose=False)
if normalize == True:
if variable == 'tn_per_gene':
norm_a = sum(tnread_gene_a.tn_per_gene)#*10**-4
elif variable == 'read_per_gene':
norm_a = sum(tnread_gene_a.read_per_gene)#*10**-7
elif variable == 'Nreadsperinsrt':
norm_a = sum(tnread_gene_a.Nreadsperinsrt)
#ADD A CONSTANT TO ALL VALUES TO PREVENT A ZERO DIVISION WHEN DETERMINING THE FOLD CHANGE.
tnread_gene_a.tn_per_gene = tnread_gene_a.tn_per_gene + tn_per_gene_zeroreplace
tnread_gene_a.read_per_gene = tnread_gene_a.read_per_gene + read_per_gene_zeroreplace
tnread_gene_a.Nreadsperinsrt = tnread_gene_a.Nreadsperinsrt + (read_per_gene_zeroreplace/tn_per_gene_zeroreplace)
if count == 0:
volcano_df = tnread_gene_a[['gene_names']] #initialize new dataframe with gene_names
if normalize == True:
variable_a_array = np.divide(tnread_gene_a[[variable]].to_numpy(), norm_a) #create numpy array to store normalized data
else:
variable_a_array = tnread_gene_a[[variable]].to_numpy() #create numpy array to store raw data
else:
if normalize == True:
variable_a_array = np.append(variable_a_array, np.divide(tnread_gene_a[[variable]].to_numpy(), norm_a), axis=1) #append normalized data
else:
variable_a_array = np.append(variable_a_array, tnread_gene_a[[variable]].to_numpy(), axis=1) #append raw data
for count, datafile_b in enumerate(datafiles_list_b):
tnread_gene_b = dataframe_from_pergenefile(datafile_b, verbose=False)
if normalize == True:
if variable == 'tn_per_gene':
norm_b = sum(tnread_gene_b.tn_per_gene)#*10**-4
elif variable == 'read_per_gene':
norm_b = sum(tnread_gene_b.read_per_gene)#*10**-7
elif variable == 'Nreadsperinsrt':
norm_b = sum(tnread_gene_b.Nreadsperinsrt)
#ADD A CONSTANT TO ALL VALUES TO PREVENT A ZERO DIVISION WHEN DETERMINING THE FOLD CHANGE.
tnread_gene_b.tn_per_gene = tnread_gene_b.tn_per_gene + tn_per_gene_zeroreplace
tnread_gene_b.read_per_gene = tnread_gene_b.read_per_gene + read_per_gene_zeroreplace
tnread_gene_b.Nreadsperinsrt = tnread_gene_b.Nreadsperinsrt + (read_per_gene_zeroreplace/tn_per_gene_zeroreplace)
if count == 0:
if normalize == True:
variable_b_array = np.divide(tnread_gene_b[[variable]].to_numpy(), norm_b)
else:
variable_b_array = tnread_gene_b[[variable]].to_numpy()
else:
if normalize == True:
variable_b_array = np.append(variable_b_array, np.divide(tnread_gene_b[[variable]].to_numpy(), norm_b), axis=1)
else:
variable_b_array = np.append(variable_b_array, tnread_gene_b[[variable]].to_numpy(), axis=1)
return variable_a_array,variable_b_array,volcano_df,tnread_gene_a,tnread_gene_b
def apply_stats(variable_a_array,variable_b_array,significance_threshold,volcano_df):
"""This function computes the statistics measure for the volcano plot
Parameters
----------
variable_a_array : array
The values (# of insertions or reads) of the replicates of one library
variable_b_array : array
The values (# of insertions or reads) of the replicates of the other library
significance_threshold : float
It will use the default value in the volcano function which is 0.01
Returns
-------
dataframe
A dataframe containing all the info for the volcano plot.
"""
ttest_tval_list = [np.nan]*len(variable_a_array) #initialize list for storing t statistics
ttest_pval_list = [np.nan]*len(variable_a_array) #initialize list for storing p-values
signif_thres_list = [False]*len(variable_a_array) #initialize boolean list for indicating datapoints with p-value above threshold
fc_list = [np.nan]*len(variable_a_array) #initialize list for storing fold changes
for count,val in enumerate(variable_a_array):
ttest_val = stats.ttest_ind(variable_a_array[count], variable_b_array[count]) #T-test
ttest_tval_list[count] = ttest_val[0]
if not ttest_val[1] == 0: #prevent p=0 to be inputted in log
ttest_pval_list[count] = -1*np.log10(ttest_val[1])
else:
ttest_pval_list[count] = 0
if ttest_pval_list[count] > -1*np.log10(significance_threshold):
signif_thres_list[count] = True
#DETERMINE FOLD CHANGE PER GENE
if np.mean(variable_b_array[count]) == 0 and np.mean(variable_a_array[count]) == 0:
fc_list[count] = 0
else:
fc_list[count] = np.log2(np.mean(variable_a_array[count]) / np.mean(variable_b_array[count]))
volcano_df['fold_change'] = fc_list
volcano_df['t_statistic'] = ttest_tval_list
volcano_df['p_value'] = ttest_pval_list
volcano_df['significance'] = signif_thres_list
return volcano_df
| [
"numpy.mean",
"numpy.log10",
"os.path.join",
"os.path.isfile",
"transposonmapper.statistics.dataframe_from_pergenefile",
"scipy.stats.ttest_ind"
] | [((1143, 1170), 'os.path.join', 'os.path.join', (['path_a', 'files'], {}), '(path_a, files)\n', (1155, 1170), False, 'import os\n'), ((1186, 1210), 'os.path.isfile', 'os.path.isfile', (['datafile'], {}), '(datafile)\n', (1200, 1210), False, 'import os\n'), ((1337, 1364), 'os.path.join', 'os.path.join', (['path_b', 'files'], {}), '(path_b, files)\n', (1349, 1364), False, 'import os\n'), ((1380, 1404), 'os.path.isfile', 'os.path.isfile', (['datafile'], {}), '(datafile)\n', (1394, 1404), False, 'import os\n'), ((2790, 2843), 'transposonmapper.statistics.dataframe_from_pergenefile', 'dataframe_from_pergenefile', (['datafile_a'], {'verbose': '(False)'}), '(datafile_a, verbose=False)\n', (2816, 2843), False, 'from transposonmapper.statistics import dataframe_from_pergenefile\n'), ((4450, 4503), 'transposonmapper.statistics.dataframe_from_pergenefile', 'dataframe_from_pergenefile', (['datafile_b'], {'verbose': '(False)'}), '(datafile_b, verbose=False)\n', (4476, 4503), False, 'from transposonmapper.statistics import dataframe_from_pergenefile\n'), ((7019, 7084), 'scipy.stats.ttest_ind', 'stats.ttest_ind', (['variable_a_array[count]', 'variable_b_array[count]'], {}), '(variable_a_array[count], variable_b_array[count])\n', (7034, 7084), False, 'from scipy import stats\n'), ((7248, 7270), 'numpy.log10', 'np.log10', (['ttest_val[1]'], {}), '(ttest_val[1])\n', (7256, 7270), True, 'import numpy as np\n'), ((7363, 7395), 'numpy.log10', 'np.log10', (['significance_threshold'], {}), '(significance_threshold)\n', (7371, 7395), True, 'import numpy as np\n'), ((7489, 7521), 'numpy.mean', 'np.mean', (['variable_b_array[count]'], {}), '(variable_b_array[count])\n', (7496, 7521), True, 'import numpy as np\n'), ((7531, 7563), 'numpy.mean', 'np.mean', (['variable_a_array[count]'], {}), '(variable_a_array[count])\n', (7538, 7563), True, 'import numpy as np\n'), ((7652, 7684), 'numpy.mean', 'np.mean', (['variable_a_array[count]'], {}), '(variable_a_array[count])\n', (7659, 7684), True, 'import numpy as np\n'), ((7687, 7719), 'numpy.mean', 'np.mean', (['variable_b_array[count]'], {}), '(variable_b_array[count])\n', (7694, 7719), True, 'import numpy as np\n')] |
"""
See: `libfuturize.main`
"""
from __future__ import (absolute_import, print_function, unicode_literals)
import json
import logging
import optparse
import os
import shutil
import sys
from lib2to3 import refactor
from lib2to3.main import warn
import future.utils
from do_py import DataObject, R
from do_py.common.managed_list import ManagedList
from dominate import document
from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr
from future import __version__
from libfuturize.fixes import (lib2to3_fix_names_stage1,
lib2to3_fix_names_stage2,
libfuturize_fix_names_stage1,
libfuturize_fix_names_stage2)
from py2to3cov.data_model.diff_summary import DiffSummary
from py2to3cov.futurizer.refactoring_tool import FileRefactoringTool
from py2to3cov.mgmt.const import DIFF_DIR, RESULTS_DIR
fixer_pkg = 'libfuturize.fixes'
class TestFailure(DataObject):
_restrictions = {
'type': R.STR,
'message': R.STR,
}
class TestCase(DataObject):
_restrictions = {
'file': R.STR,
'failure': R(TestFailure, type(None))
}
class TestReport(DataObject):
_restrictions = {
'testsuite': ManagedList(TestCase)
}
def to_xml(self):
"""
:rtype: str
"""
return '<testsuite>%s</testsuite>' % ''.join(test_case.to_xml() for test_case in self.testsuite)
def futurize_code(args=None):
"""Main program.
Args:
fixer_pkg: the name of a package where the fixers are located.
args: optional; a list of command line arguments. If omitted,
sys.argv[1:] is used.
Returns a suggested exit status (0, 1, 2).
"""
# Set up option parser
parser = optparse.OptionParser(usage="futurize [options] file|dir ...")
parser.add_option("-V", "--version", action="store_true",
help="Report the version number of futurize")
parser.add_option("-a", "--all-imports", action="store_true",
help="Add all __future__ and future imports to each module")
parser.add_option("-1", "--stage1", action="store_true",
help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)")
parser.add_option("-2", "--stage2", action="store_true",
help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.")
parser.add_option("-0", "--both-stages", action="store_true",
help="Apply both stages 1 and 2")
parser.add_option("-u", "--unicode-literals", action="store_true",
help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings")
parser.add_option("-f", "--fix", action="append", default=[],
help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'")
parser.add_option("-j", "--processes", action="store", default=1,
type="int", help="Run 2to3 concurrently")
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a fixer from being run.")
parser.add_option("-l", "--list-fixes", action="store_true",
help="List available transformations")
parser.add_option("-p", "--print-function", action="store_true",
help="Modify the grammar so that print() is a function")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
parser.add_option("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_option("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
parser.add_option("-o", "--output-dir", action="store", type="str",
default="", help="Put output files in this directory "
"instead of overwriting the input files. Requires -n. "
"For Python >= 2.7 only.")
parser.add_option("-W", "--write-unchanged-files", action="store_true",
help="Also write files even if no changes were required"
" (useful with --output-dir); implies -w.")
parser.add_option("--add-suffix", action="store", type="str", default="",
help="Append this string to all output filenames."
" Requires -n if non-empty. For Python >= 2.7 only."
"ex: --add-suffix='3' will generate .py3 files.")
# Parse command line arguments
flags = {}
refactor_stdin = False
options, args = parser.parse_args(args)
if options.write_unchanged_files:
flags["write_unchanged_files"] = True
if not options.write:
warn("--write-unchanged-files/-W implies -w.")
options.write = True
# If we allowed these, the original files would be renamed to backup names
# but not replaced.
if options.output_dir and not options.nobackups:
parser.error("Can't use --output-dir/-o without -n.")
if options.add_suffix and not options.nobackups:
parser.error("Can't use --add-suffix without -n.")
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if "-" in args:
refactor_stdin = True
if options.write:
print("Can't write to stdin.", file=sys.stderr)
return 2
# Is this ever necessary?
if options.print_function:
flags["print_function"] = True
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
logger = logging.getLogger('libfuturize.main')
if options.stage1 or options.stage2:
assert options.both_stages is None
options.both_stages = False
else:
options.both_stages = True
avail_fixes = set()
if options.stage1 or options.both_stages:
avail_fixes.update(lib2to3_fix_names_stage1)
avail_fixes.update(libfuturize_fix_names_stage1)
if options.stage2 or options.both_stages:
avail_fixes.update(lib2to3_fix_names_stage2)
avail_fixes.update(libfuturize_fix_names_stage2)
if options.unicode_literals:
avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import')
if options.version:
print(__version__)
return 0
if options.list_fixes:
print("Available transformations for the -f/--fix option:")
# for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)):
for fixname in sorted(avail_fixes):
print(fixname)
if not args:
return 0
if not args:
print("At least one file or directory argument required.",
file=sys.stderr)
print("Use --help to show usage.", file=sys.stderr)
return 2
unwanted_fixes = set()
for fix in options.nofix:
if ".fix_" in fix:
unwanted_fixes.add(fix)
else:
# Infer the full module name for the fixer.
# First ensure that no names clash (e.g.
# lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
found = [f for f in avail_fixes
if f.endswith('fix_{0}'.format(fix))]
if len(found) > 1:
print("Ambiguous fixer name. Choose a fully qualified "
"module name instead from these:\n" +
"\n".join(" " + myf for myf in found),
file=sys.stderr)
return 2
elif len(found) == 0:
print("Unknown fixer. Use --list-fixes or -l for a list.",
file=sys.stderr)
return 2
unwanted_fixes.add(found[0])
extra_fixes = set()
if options.all_imports:
if options.stage1:
prefix = 'libfuturize.fixes.'
extra_fixes.add(prefix +
'fix_add__future__imports_except_unicode_literals')
else:
# In case the user hasn't run stage1 for some reason:
prefix = 'libpasteurize.fixes.'
extra_fixes.add(prefix + 'fix_add_all__future__imports')
extra_fixes.add(prefix + 'fix_add_future_standard_library_import')
extra_fixes.add(prefix + 'fix_add_all_future_builtins')
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == 'all':
all_present = True
else:
if ".fix_" in fix:
explicit.add(fix)
else:
# Infer the full module name for the fixer.
# First ensure that no names clash (e.g.
# lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
found = [f for f in avail_fixes
if f.endswith('fix_{0}'.format(fix))]
if len(found) > 1:
print("Ambiguous fixer name. Choose a fully qualified "
"module name instead from these:\n" +
"\n".join(" " + myf for myf in found),
file=sys.stderr)
return 2
elif len(found) == 0:
print("Unknown fixer. Use --list-fixes or -l for a list.",
file=sys.stderr)
return 2
explicit.add(found[0])
if len(explicit & unwanted_fixes) > 0:
print("Conflicting usage: the following fixers have been "
"simultaneously requested and disallowed:\n" +
"\n".join(" " + myf for myf in (explicit & unwanted_fixes)),
file=sys.stderr)
return 2
requested = avail_fixes.union(explicit) if all_present else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = (requested | extra_fixes) - unwanted_fixes
input_base_dir = os.path.commonprefix(args)
if (input_base_dir and not input_base_dir.endswith(os.sep)
and not os.path.isdir(input_base_dir)):
# One or more similar names were passed, their directory is the base.
# os.path.commonprefix() is ignorant of path elements, this corrects
# for that weird API.
input_base_dir = os.path.dirname(input_base_dir)
# if options.output_dir:
# input_base_dir = input_base_dir.rstrip(os.sep)
# logger.info('Output in %r will mirror the input directory %r layout.',
# options.output_dir, input_base_dir)
# Initialize the refactoring tool
if future.utils.PY26:
extra_kwargs = {}
else:
extra_kwargs = {
'append_suffix': options.add_suffix,
'output_dir': options.output_dir,
'input_base_dir': input_base_dir,
}
# Remove results directory.
if os.path.isdir(RESULTS_DIR):
shutil.rmtree(RESULTS_DIR)
os.mkdir(RESULTS_DIR)
if os.path.isdir(DIFF_DIR):
shutil.rmtree(DIFF_DIR)
os.mkdir(DIFF_DIR)
# We override their RefactoringTool with `FileRefactoringTool`
rt = FileRefactoringTool(
sorted(fixer_names), flags, sorted(explicit),
options.nobackups, not options.no_diffs,
**extra_kwargs)
# Refactor all files and directories passed as arguments
if not rt.errors:
if refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, None,
options.processes)
except refactor.MultiprocessingUnsupported:
assert options.processes > 1
print("Sorry, -j isn't supported on this platform.", file=sys.stderr)
return 1
rt.summarize()
# This is our own custom html reporting.
table_body = tbody()
remove_line_count_total = 0
with table_body:
for file_name, file_summary in DiffSummary.list_all():
with tr():
td(a(file_name, href=file_summary.href))
td(file_summary.add_line_count, style="text-align:right")
td(file_summary.remove_line_count, style="text-align:right")
td(file_summary.percent_coverage, style="text-align:right")
remove_line_count_total += file_summary.remove_line_count
with document(title='2/3 Summary') as doc:
h1('2/3 Summary', style='padding: 0 40px;')
p('Total lines that need to be removed:', style='padding: 0 40px;').add(b(remove_line_count_total))
summary_table = table(width='100%', style="padding: 20px 40px; margin: 0 auto;")
with summary_table.add(thead()):
with tr():
th('File Name', style="text-align:left")
th('Add Lines', style="text-align:right")
th('Remove Lines', style="text-align:right")
th('Coverage %', style="text-align:right")
summary_table.add(table_body)
with open('{results_dir}/summary.html'.format(results_dir=RESULTS_DIR), 'w+') as summary_file:
summary_file.write(doc.render())
# Write a machine readable report that can be parsed later.
json_report = {
'summary': {
'remove_line_count_total': remove_line_count_total
},
'files': [
{
'file_name': file_name,
'add_line_count': file_summary.add_line_count,
'remove_line_count': file_summary.remove_line_count,
'percent_coverage': file_summary.percent_coverage,
} for file_name, file_summary in DiffSummary.list_all()
]
}
json.dump(json_report, open('{results_dir}/report.json'.format(results_dir=RESULTS_DIR), 'w+'))
# Return error status (0 if rt.errors is zero)
return int(bool(rt.errors))
| [
"logging.getLogger",
"dominate.tags.h1",
"dominate.tags.th",
"dominate.tags.thead",
"dominate.tags.a",
"dominate.tags.table",
"dominate.tags.tbody",
"do_py.common.managed_list.ManagedList",
"os.path.isdir",
"os.mkdir",
"py2to3cov.data_model.diff_summary.DiffSummary.list_all",
"dominate.documen... | [((1795, 1857), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': '"""futurize [options] file|dir ..."""'}), "(usage='futurize [options] file|dir ...')\n", (1816, 1857), False, 'import optparse\n'), ((6270, 6334), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(name)s: %(message)s"""', 'level': 'level'}), "(format='%(name)s: %(message)s', level=level)\n", (6289, 6334), False, 'import logging\n'), ((6348, 6385), 'logging.getLogger', 'logging.getLogger', (['"""libfuturize.main"""'], {}), "('libfuturize.main')\n", (6365, 6385), False, 'import logging\n'), ((10759, 10785), 'os.path.commonprefix', 'os.path.commonprefix', (['args'], {}), '(args)\n', (10779, 10785), False, 'import os\n'), ((11689, 11715), 'os.path.isdir', 'os.path.isdir', (['RESULTS_DIR'], {}), '(RESULTS_DIR)\n', (11702, 11715), False, 'import os\n'), ((11756, 11777), 'os.mkdir', 'os.mkdir', (['RESULTS_DIR'], {}), '(RESULTS_DIR)\n', (11764, 11777), False, 'import os\n'), ((11786, 11809), 'os.path.isdir', 'os.path.isdir', (['DIFF_DIR'], {}), '(DIFF_DIR)\n', (11799, 11809), False, 'import os\n'), ((11847, 11865), 'os.mkdir', 'os.mkdir', (['DIFF_DIR'], {}), '(DIFF_DIR)\n', (11855, 11865), False, 'import os\n'), ((12665, 12672), 'dominate.tags.tbody', 'tbody', ([], {}), '()\n', (12670, 12672), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((1253, 1274), 'do_py.common.managed_list.ManagedList', 'ManagedList', (['TestCase'], {}), '(TestCase)\n', (1264, 1274), False, 'from do_py.common.managed_list import ManagedList\n'), ((5747, 5819), 'lib2to3.main.warn', 'warn', (['"""not writing files and not printing diffs; that\'s not very useful"""'], {}), '("not writing files and not printing diffs; that\'s not very useful")\n', (5751, 5819), False, 'from lib2to3.main import warn\n'), ((11111, 11142), 'os.path.dirname', 'os.path.dirname', (['input_base_dir'], {}), '(input_base_dir)\n', (11126, 11142), False, 'import os\n'), ((11725, 11751), 'shutil.rmtree', 'shutil.rmtree', (['RESULTS_DIR'], {}), '(RESULTS_DIR)\n', (11738, 11751), False, 'import shutil\n'), ((11819, 11842), 'shutil.rmtree', 'shutil.rmtree', (['DIFF_DIR'], {}), '(DIFF_DIR)\n', (11832, 11842), False, 'import shutil\n'), ((12765, 12787), 'py2to3cov.data_model.diff_summary.DiffSummary.list_all', 'DiffSummary.list_all', ([], {}), '()\n', (12785, 12787), False, 'from py2to3cov.data_model.diff_summary import DiffSummary\n'), ((13180, 13209), 'dominate.document', 'document', ([], {'title': '"""2/3 Summary"""'}), "(title='2/3 Summary')\n", (13188, 13209), False, 'from dominate import document\n'), ((13226, 13269), 'dominate.tags.h1', 'h1', (['"""2/3 Summary"""'], {'style': '"""padding: 0 40px;"""'}), "('2/3 Summary', style='padding: 0 40px;')\n", (13228, 13269), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13402, 13466), 'dominate.tags.table', 'table', ([], {'width': '"""100%"""', 'style': '"""padding: 20px 40px; margin: 0 auto;"""'}), "(width='100%', style='padding: 20px 40px; margin: 0 auto;')\n", (13407, 13466), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((5285, 5331), 'lib2to3.main.warn', 'warn', (['"""--write-unchanged-files/-W implies -w."""'], {}), "('--write-unchanged-files/-W implies -w.')\n", (5289, 5331), False, 'from lib2to3.main import warn\n'), ((10869, 10898), 'os.path.isdir', 'os.path.isdir', (['input_base_dir'], {}), '(input_base_dir)\n', (10882, 10898), False, 'import os\n'), ((13350, 13376), 'dominate.tags.b', 'b', (['remove_line_count_total'], {}), '(remove_line_count_total)\n', (13351, 13376), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((12806, 12810), 'dominate.tags.tr', 'tr', ([], {}), '()\n', (12808, 12810), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((12885, 12942), 'dominate.tags.td', 'td', (['file_summary.add_line_count'], {'style': '"""text-align:right"""'}), "(file_summary.add_line_count, style='text-align:right')\n", (12887, 12942), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((12959, 13019), 'dominate.tags.td', 'td', (['file_summary.remove_line_count'], {'style': '"""text-align:right"""'}), "(file_summary.remove_line_count, style='text-align:right')\n", (12961, 13019), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13036, 13095), 'dominate.tags.td', 'td', (['file_summary.percent_coverage'], {'style': '"""text-align:right"""'}), "(file_summary.percent_coverage, style='text-align:right')\n", (13038, 13095), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13278, 13345), 'dominate.tags.p', 'p', (['"""Total lines that need to be removed:"""'], {'style': '"""padding: 0 40px;"""'}), "('Total lines that need to be removed:', style='padding: 0 40px;')\n", (13279, 13345), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13498, 13505), 'dominate.tags.thead', 'thead', ([], {}), '()\n', (13503, 13505), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13525, 13529), 'dominate.tags.tr', 'tr', ([], {}), '()\n', (13527, 13529), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13547, 13587), 'dominate.tags.th', 'th', (['"""File Name"""'], {'style': '"""text-align:left"""'}), "('File Name', style='text-align:left')\n", (13549, 13587), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13604, 13645), 'dominate.tags.th', 'th', (['"""Add Lines"""'], {'style': '"""text-align:right"""'}), "('Add Lines', style='text-align:right')\n", (13606, 13645), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13662, 13706), 'dominate.tags.th', 'th', (['"""Remove Lines"""'], {'style': '"""text-align:right"""'}), "('Remove Lines', style='text-align:right')\n", (13664, 13706), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((13723, 13765), 'dominate.tags.th', 'th', (['"""Coverage %"""'], {'style': '"""text-align:right"""'}), "('Coverage %', style='text-align:right')\n", (13725, 13765), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n'), ((14454, 14476), 'py2to3cov.data_model.diff_summary.DiffSummary.list_all', 'DiffSummary.list_all', ([], {}), '()\n', (14474, 14476), False, 'from py2to3cov.data_model.diff_summary import DiffSummary\n'), ((12831, 12867), 'dominate.tags.a', 'a', (['file_name'], {'href': 'file_summary.href'}), '(file_name, href=file_summary.href)\n', (12832, 12867), False, 'from dominate.tags import a, b, h1, p, table, tbody, td, th, thead, tr\n')] |
from xml.etree.ElementTree import ElementTree
import glob
import subprocess
import os
import codecs
import threading
import time
taskParam_noThread = 10
taskParam_apklistfilename = './apklist.txt'
taskParam_resultfilename_F1 = 'F1_UPS_mani.txt'
taskParam_resultfilename_F2 = 'F2_UCS_mani.txt'
taskParam_resultfilename_F3 = 'F3_UFS_mani.txt'
taskParam_resultfilename_F4 = 'F4_ULS_mani.txt'
taskParam_resultfilename_F5 = 'F5_DPS_mani.txt'
taskParam_resultfilename_F6 = 'F6_USS_mani.txt'
taskParam_resultfilename_F7 = 'F7_ACT_mani.txt'
taskParam_resultfilename_F8 = 'F8_SERV_mani.txt'
taskParam_resultfilename_F9 = 'F9_PROV_mani.txt'
taskParam_resultfilename_F10 = 'F10_RECV_mani.txt'
taskParam_rawdatadirname = "./RawData(3000)/"
def extracManifFeatVect(filename):
tree = ElementTree()
root = tree.parse(filename)
usesPermSet = set()
for upElement in root.findall("uses-permission"):
if '{http://schemas.android.com/apk/res/android}name' in upElement.attrib.keys():
usesPermSet.add(upElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
usesPermSet.add('name')
susesPermSet = sorted(usesPermSet)
usesConfSet = set()
for ucElement in root.findall("uses-configuration"):
if '{http://schemas.android.com/apk/res/android}name' in ucElement.attrib.keys():
usesConfSet.add(ucElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
usesConfSet.add('name')
susesConfSet = sorted(usesConfSet)
usesFeatSet = set()
for ufElement in root.findall("uses-feature"):
if '{http://schemas.android.com/apk/res/android}name' in ufElement.attrib.keys():
usesFeatSet.add(ufElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
usesFeatSet.add('name')
susesFeatSet = sorted(usesFeatSet)
usesLibSet = set()
for ulElement in root.findall("uses-library"):
if '{http://schemas.android.com/apk/res/android}name' in ulElement.attrib.keys():
usesLibSet.add(ulElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
usesLibSet.add('name')
susesLibSet = sorted(usesLibSet)
declarePermSet = set()
for dpElement in root.findall("permission"):
dPerm = list()
if '{http://schemas.android.com/apk/res/android}name' in dpElement.attrib.keys():
dPerm.append(dpElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
dPerm.append('name')
if '{http://schemas.android.com/apk/res/android}permissionGroup' in dpElement.attrib.keys():
dPerm.append(dpElement.attrib['{http://schemas.android.com/apk/res/android}permissionGroup'])
else:
dPerm.append('permissionGroup')
if '{http://schemas.android.com/apk/res/android}protectionLevel' in dpElement.attrib.keys():
dPerm.append(dpElement.attrib['{http://schemas.android.com/apk/res/android}protectionLevel'])
else:
dPerm.append('protectionLevel')
if not len(dPerm) == 0:
declarePermSet.add(str(dPerm))
sdeclarePermSet = sorted(declarePermSet)
usesSdkSet = set()
for usElement in root.findall("uses-sdk"):
if '{http://schemas.android.com/apk/res/android}name' in usElement.attrib.keys():
usesSdkSet.add(usElement.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
usesSdkSet.add('name')
susesSdkSet = sorted(usesSdkSet)
ActivityList = list() ##
ServiceList = list() ##
ProviderList = list() ##
ReceiverList = list() ##
for appElement in root.findall("application"):
for actElement in appElement.findall("activity"):
Activity = list() ##
if '{http://schemas.android.com/apk/res/android}permission' in actElement.attrib.keys():
Activity.append(actElement.attrib['{http://schemas.android.com/apk/res/android}permission'])
else:
Activity.append('no_permission')
intentlist = list()
for intentElement in actElement.findall("intent-filter"):
intent = list()
setAct = set()
setCate = set()
setData = set()
for act in intentElement.findall('action'):
if '{http://schemas.android.com/apk/res/android}name' in act.attrib.keys():
setAct.add(act.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setAct.add('no_name')
for cate in intentElement.findall('category'):
if '{http://schemas.android.com/apk/res/android}name' in cate.attrib.keys():
setCate.add(cate.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setCate.add('no_name')
for data in intentElement.findall('data'):
if '{http://schemas.android.com/apk/res/android}name' in data.attrib.keys():
setData.add(data.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setData.add('no_name')
if not len(setAct) == 0:
intent.append(sorted(setAct))
else:
intent.append('no_action')
if not len(setCate) == 0:
intent.append(sorted(setCate))
else:
intent.append('no_category')
if not len(setData) == 0:
intent.append(sorted(setData))
else:
intent.append('no_data')
if not len(intent) == 0:
intentlist.append(intent)
else:
intentlist.append('no_intent')
if not len(intentlist) == 0:
Activity.append(intentlist) ##
else:
Activity.append('no_intent')
if not len(Activity) == 0:
ActivityList.append(Activity) ##
for servElement in appElement.findall("service"):
Service = list() ##
if '{http://schemas.android.com/apk/res/android}permission' in servElement.attrib.keys():
Service.append(servElement.attrib['{http://schemas.android.com/apk/res/android}permission'])
else:
Service.append('no_permission')
intentlist = list()
for intentElement in servElement.findall("intent-filter"):
intent = list()
setAct = set()
setCate = set()
setData = set()
for act in intentElement.findall('action'):
if '{http://schemas.android.com/apk/res/android}name' in act.attrib.keys():
setAct.add(act.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setAct.add('no_name')
for cate in intentElement.findall('category'):
if '{http://schemas.android.com/apk/res/android}name' in cate.attrib.keys():
setCate.add(cate.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setCate.add('no_name')
for data in intentElement.findall('data'):
if '{http://schemas.android.com/apk/res/android}name' in data.attrib.keys():
setData.add(data.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setData.add('no_name')
if not len(setAct) == 0:
intent.append(sorted(setAct))
else:
intent.append('no_action')
if not len(setCate) == 0:
intent.append(sorted(setCate))
else:
intent.append('no_category')
if not len(setData) == 0:
intent.append(sorted(setData))
else:
intent.append('no_data')
if not len(intent) == 0:
intentlist.append(intent)
else:
intentlist.append('no_intent')
if not len(intentlist) == 0:
Service.append(intentlist) ##
else:
Service.append('no_intent')
if not len(Service) == 0:
ServiceList.append(Service) #
for provElement in appElement.findall("provider"):
Provider = list() ##
if '{http://schemas.android.com/apk/res/android}permission' in provElement.attrib.keys():
Provider.append(provElement.attrib['{http://schemas.android.com/apk/res/android}permission'])
else:
Provider.append('no_permission')
GUPlist = list()
for GUPElement in provElement.findall('grant-uri-permission'):
GUP = list();
if '{http://schemas.android.com/apk/res/android}path' in GUPElement.attrib.keys():
GUP.append(GUPElement.attrib['{http://schemas.android.com/apk/res/android}path']) ##
else:
GUP.append('no_name')
if '{http://schemas.android.com/apk/res/android}pathPattern' in GUPElement.attrib.keys():
GUP.append(GUPElement.attrib['{http://schemas.android.com/apk/res/android}pathPattern']) ##
else:
GUP.append('no_name')
if '{http://schemas.android.com/apk/res/android}pathPrefix' in GUPElement.attrib.keys():
GUP.append(GUPElement.attrib['{http://schemas.android.com/apk/res/android}pathPrefix']) ##
else:
GUP.append('no_name')
if not len(GUP) == 0:
GUPlist.append(GUP)
if not len(GUPlist)==0:
Provider.append(GUPlist)
else:
Provider.append('no_grant-uri-permission')
intentlist = list()
for intentElement in provElement.findall("intent-filter"):
intent = list()
setAct = set()
setCate = set()
setData = set()
for act in intentElement.findall('action'):
if '{http://schemas.android.com/apk/res/android}name' in act.attrib.keys():
setAct.add(act.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setAct.add('no_name')
for cate in intentElement.findall('category'):
if '{http://schemas.android.com/apk/res/android}name' in cate.attrib.keys():
setCate.add(cate.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setCate.add('no_name')
for data in intentElement.findall('data'):
if '{http://schemas.android.com/apk/res/android}name' in data.attrib.keys():
setData.add(data.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setData.add('no_name')
if not len(setAct) == 0:
intent.append(sorted(setAct))
else:
intent.append('no_action')
if not len(setCate) == 0:
intent.append(sorted(setCate))
else:
intent.append('no_category')
if not len(setData) == 0:
intent.append(sorted(setData))
else:
intent.append('no_data')
if not len(intent) == 0:
intentlist.append(intent)
else:
intentlist.append('no_intent')
if not len(intentlist) == 0:
Provider.append(intentlist) ##
else:
Provider.append('no_intent')
if not len(Provider) == 0:
ProviderList.append(Provider) ##
for recvElement in appElement.findall("receiver"):
Receiver = list() ##
if '{http://schemas.android.com/apk/res/android}permission' in recvElement.attrib.keys():
Receiver.append(recvElement.attrib['{http://schemas.android.com/apk/res/android}permission'])
else:
Receiver.append('no_permission')
intentlist = list()
for intentElement in recvElement.findall("intent-filter"):
intent = list()
setAct = set()
setCate = set()
setData = set()
for act in intentElement.findall('action'):
if '{http://schemas.android.com/apk/res/android}name' in act.attrib.keys():
setAct.add(act.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setAct.add('no_name')
for cate in intentElement.findall('category'):
if '{http://schemas.android.com/apk/res/android}name' in cate.attrib.keys():
setCate.add(cate.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setCate.add('no_name')
for data in intentElement.findall('data'):
if '{http://schemas.android.com/apk/res/android}name' in data.attrib.keys():
setData.add(data.attrib['{http://schemas.android.com/apk/res/android}name'])
else:
setData.add('no_name')
if not len(setAct) == 0:
intent.append(sorted(setAct))
else:
intent.append('no_action')
if not len(setCate) == 0:
intent.append(sorted(setCate))
else:
intent.append('no_category')
if not len(setData) == 0:
intent.append(sorted(setData))
else:
intent.append('no_data')
if not len(intent) == 0:
intentlist.append(intent)
else:
intentlist.append('no_intent')
if not len(intentlist) == 0:
Receiver.append(intentlist) ##
else:
Receiver.append('no_intent')
if not len(Receiver) == 0:
ReceiverList.append(Receiver) ##
return [susesPermSet, susesConfSet, susesFeatSet, susesLibSet, sdeclarePermSet, susesSdkSet, ActivityList, ServiceList, ProviderList, ReceiverList]
class CollectManifestFeatures(threading.Thread):
def __init__(self, ups, ucs, ufs, uls, uss, dps, acts, servs, provs, recvs, apklist, locklist, flog) :
threading.Thread.__init__(self)
self.ups = ups
self.ucs = ucs
self.ufs = ufs
self.uls = uls
self.dps = dps
self.uss = uss
self.acts = acts
self.servs = servs
self.provs = provs
self.recvs = recvs
self.locklist = locklist
self.apklist = apklist
self.flog = flog
def run(self):
for apk in self.apklist:
print("stage 1: "+apk+' start')
dirname = taskParam_rawdatadirname+apk
manifile = dirname+'/'+apk+'_mani.xml'
if not os.path.exists(manifile):
continue
xmlFeats = extracManifFeatVect(manifile)
self.ups.update(xmlFeats[0])
self.ucs.update(xmlFeats[1])
self.ufs.update(xmlFeats[2])
self.uls.update(xmlFeats[3])
self.dps.update(xmlFeats[4])
self.uss.update(xmlFeats[5])
i=0
for ai in xmlFeats[6]:
self.acts.add(str(ai))
for si in xmlFeats[7]:
self.servs.add(str(si))
for pi in xmlFeats[8]:
self.provs.add(str(pi))
for ri in xmlFeats[9]:
self.recvs.add(str(ri))
self.flog.write(apk)
self.flog.write('\n')
self.flog.flush()
############################## main ##############################
fapklist = open(taskParam_apklistfilename,'r') # get apk list
lines = fapklist.readlines()
fapklist.close()
noofapks = len(lines)
#seperate the total apks into the serveral group (for assigning the apks to the threads)
listOfapklist = list()
for m in range(0, taskParam_noThread+1):
listOfapklist.append(list())
order = 0
for line in lines:
order = order+1
listOfapklist[(order%taskParam_noThread)].append(line[:-1])
# total main feature sets
tUPset = set()
tUCset = set()
tUFset = set()
tULset = set()
tDPset = set()
tUSset = set()
tACTset = set()
tSERVset = set()
tPROVset = set()
tRECVset = set()
loglist = list()
for m in range(0, taskParam_noThread+1):
fhowmany = open('howmany'+str(m)+'.txt','w')
loglist.append(fhowmany)
# thread creation (# of thread = taskParam_noThread)
locklist = list()
for m in range(0, taskParam_noThread+1):
locklist.append(threading.Lock())
t = list()
for m in range(0, taskParam_noThread+1):
t.append(CollectManifestFeatures(tUPset, tUCset, tUFset, tULset, tDPset, tUSset, tACTset, tSERVset, tPROVset, tRECVset, listOfapklist[m], locklist, loglist[m]))
#thread start
for m in range(0, taskParam_noThread+1):
t[m].start()
#thread end
for m in range(0, taskParam_noThread+1):
t[m].join()
#log all features
fupslog = open(taskParam_resultfilename_F1,'w')
for upsitem in tUPset:
fupslog.write(upsitem)
fupslog.write('\n')
fupslog.close()
#
fucslog = open(taskParam_resultfilename_F2,'w')
for ucsitem in tUCset:
fucslog.write(ucsitem)
fucslog.write('\n')
fucslog.close()
#
fufslog = open(taskParam_resultfilename_F3,'w')
for ufsitem in tUFset:
fufslog.write(ufsitem)
fufslog.write('\n')
fufslog.close()
#
fulslog = open(taskParam_resultfilename_F4,'w')
for ulsitem in tULset:
fulslog.write(ulsitem)
fulslog.write('\n')
fulslog.close()
#
fdpslog = open(taskParam_resultfilename_F5,'w')
for dpsitem in tDPset:
fdpslog.write(dpsitem)
fdpslog.write('\n')
fdpslog.close()
#
fusslog = open(taskParam_resultfilename_F6,'w')
for ussitem in tUSset:
fusslog.write(ussitem)
fusslog.write('\n')
fusslog.close()
#
factlog = open(taskParam_resultfilename_F7,'w')
for actitem in tACTset:
factlog.write(actitem)
factlog.write('\n')
factlog.close()
#
fservlog = open(taskParam_resultfilename_F8,'w')
for servitem in tSERVset:
fservlog.write(servitem)
fservlog.write('\n')
fservlog.close()
#
fprovlog = open(taskParam_resultfilename_F9,'w')
for provitem in tPROVset:
fprovlog.write(provitem)
fprovlog.write('\n')
fprovlog.close()
#
frecvlog = open(taskParam_resultfilename_F10,'w')
for recvitem in tRECVset:
frecvlog.write(recvitem)
frecvlog.write('\n')
frecvlog.close()
############################## end ###############################
| [
"xml.etree.ElementTree.ElementTree",
"threading.Lock",
"os.path.exists",
"threading.Thread.__init__"
] | [((777, 790), 'xml.etree.ElementTree.ElementTree', 'ElementTree', ([], {}), '()\n', (788, 790), False, 'from xml.etree.ElementTree import ElementTree\n'), ((15137, 15168), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (15162, 15168), False, 'import threading\n'), ((17440, 17456), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (17454, 17456), False, 'import threading\n'), ((15721, 15745), 'os.path.exists', 'os.path.exists', (['manifile'], {}), '(manifile)\n', (15735, 15745), False, 'import os\n')] |
import logging
from typing import NamedTuple, Optional, Union
LOG = logging.getLogger(__name__)
class AdsRecord(NamedTuple):
supplier_domain: str
pub_id: str
supplier_relationship: str
cert_authority: Optional[str]
class AdsVariable(NamedTuple):
key: str
value: str
def process_row(row: str) -> Union[AdsRecord, AdsVariable, None]:
"""Process a ads.txt row and return a tuple of data.
Args:
row (str): Raw string from the crawler to be processed.
Returns:
Union[AdsRecord, AdsVariable, None]: Depending upon row.
This just follows the adstxt spec.
"""
# Filter out comments.
if row.startswith('#'):
return None
# If the row has a '#' that's probably an inline comment.
if '#' in row:
# Mutate row if there's an inline comment in it.
row = row.split('#')[0]
# If a row contains an equals then it's a variable.
# TODO: Remove this hack to get around bad switch contepts vars.
if '=' in row and 'concepts' not in row:
# The value can contain arbitrary text, so find the first
# equals to split on.
for pos, char in enumerate(row):
if char == '=':
split_pos = pos
break
# Split the string based off the index position.
key = row[:split_pos]
# We don't want to include the seperator here.
value = row[split_pos + 1:]
return AdsVariable(key=key, value=value)
# In this case it might be a record.
# Filter out based upon 3.4.2 THE RECORD.
# Remove all whitespace.
LOG.debug('Processing a Record, %r', row)
# Strip tabs and spaces.
clean_row = row.strip(' ').strip('\t')
record_vals = clean_row.split(',')
# If it's less than 3 it's not a proper record. Exit.
if len(record_vals) < 3:
LOG.debug('Bad record found, %r', row)
return None
# Domain names are case insensitive so lowercase.
supplier_domain = record_vals[0].lower().strip().strip('\t')
# Pub ID shows this as a string or int.
pub_id = record_vals[1].strip().strip('\t')
# This can only be one of two values, try and extract that.
relationship = record_vals[2].lower().strip().strip('\t')
if 'reseller' in relationship:
supplier_relationship = 'reseller'
elif 'direct' in relationship:
supplier_relationship = 'direct'
else:
LOG.debug('Found a bad record; %s', row)
return None
# Cert authority is optional.
if len(record_vals) == 4:
cert_authority = record_vals[3].strip()
# mypy trips up on sqlalchemy, nullable=True so just ignore it.
else:
cert_authority = None # type: ignore
ret_val = AdsRecord(supplier_domain=supplier_domain,
pub_id=pub_id,
supplier_relationship=supplier_relationship,
cert_authority=cert_authority)
LOG.debug('Returning record... %r', ret_val)
return ret_val
| [
"logging.getLogger"
] | [((70, 97), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (87, 97), False, 'import logging\n')] |
from perception_msgs.msg import State
from geometry_msgs.msg import Point, Pose
def to_state_pose_msg_list(value_dict, filter_xyz_dict):
state_list = []
pose_list = []
for sig in value_dict:
state = State()
state.type = 'cnn'
state.name = str(sig)
state.value = value_dict[sig]
state_list.append(state)
pose = Pose()
if not state.value == 0:
pose.position.x = filter_xyz_dict[sig][0]
pose.position.y = filter_xyz_dict[sig][1]
pose.position.z = filter_xyz_dict[sig][2]
pose.orientation.x = 0
pose.orientation.y = 0
pose.orientation.z = 0
pose.orientation.w = 1
pose_list.append(pose)
return state_list, pose_list
| [
"geometry_msgs.msg.Pose",
"perception_msgs.msg.State"
] | [((220, 227), 'perception_msgs.msg.State', 'State', ([], {}), '()\n', (225, 227), False, 'from perception_msgs.msg import State\n'), ((372, 378), 'geometry_msgs.msg.Pose', 'Pose', ([], {}), '()\n', (376, 378), False, 'from geometry_msgs.msg import Point, Pose\n')] |
# coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class ExecutionApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cancel_execution(self, execution_id, **kwargs): # noqa: E501
"""Cancel execution by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_execution(execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int execution_id: The ID of the execution to cancel (required)
:param str expected_status: The expected status of the execution to cancel to prevent cancelling a queued job that has transitioned to a running state since the request was issued.
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cancel_execution_with_http_info(execution_id, **kwargs) # noqa: E501
else:
(data) = self.cancel_execution_with_http_info(execution_id, **kwargs) # noqa: E501
return data
def cancel_execution_with_http_info(self, execution_id, **kwargs): # noqa: E501
"""Cancel execution by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_execution_with_http_info(execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int execution_id: The ID of the execution to cancel (required)
:param str expected_status: The expected status of the execution to cancel to prevent cancelling a queued job that has transitioned to a running state since the request was issued.
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['execution_id', 'expected_status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_execution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'execution_id' is set
if self.api_client.client_side_validation and ('execution_id' not in params or
params['execution_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `execution_id` when calling `cancel_execution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'execution_id' in params:
path_params['executionId'] = params['execution_id'] # noqa: E501
query_params = []
if 'expected_status' in params:
query_params.append(('expectedStatus', params['expected_status'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/executions/{executionId}/cancel', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Execution', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_execution(self, body, **kwargs): # noqa: E501
"""Create execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_execution(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Execution body: The execution to create (required)
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_execution_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_execution_with_http_info(body, **kwargs) # noqa: E501
return data
def create_execution_with_http_info(self, body, **kwargs): # noqa: E501
"""Create execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_execution_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Execution body: The execution to create (required)
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_execution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_execution`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/executions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Execution', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_executions(self, **kwargs): # noqa: E501
"""Get all executions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_executions(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int job_id: The ID of the job to get all executions for
:param int page_number: The page number for which to get executions. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: ExecutionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_executions_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_executions_with_http_info(**kwargs) # noqa: E501
return data
def get_all_executions_with_http_info(self, **kwargs): # noqa: E501
"""Get all executions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_executions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int job_id: The ID of the job to get all executions for
:param int page_number: The page number for which to get executions. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: ExecutionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['job_id', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_executions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'job_id' in params:
query_params.append(('job_id', params['job_id'])) # noqa: E501
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/executions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExecutionList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_execution_by_id(self, execution_id, **kwargs): # noqa: E501
"""Get execution by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_execution_by_id(execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int execution_id: The ID of the execution to get (required)
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_execution_by_id_with_http_info(execution_id, **kwargs) # noqa: E501
else:
(data) = self.get_execution_by_id_with_http_info(execution_id, **kwargs) # noqa: E501
return data
def get_execution_by_id_with_http_info(self, execution_id, **kwargs): # noqa: E501
"""Get execution by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_execution_by_id_with_http_info(execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int execution_id: The ID of the execution to get (required)
:return: Execution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['execution_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_execution_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'execution_id' is set
if self.api_client.client_side_validation and ('execution_id' not in params or
params['execution_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `execution_id` when calling `get_execution_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'execution_id' in params:
path_params['executionId'] = params['execution_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/executions/{executionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Execution', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"dxm.lib.masking_api.api_client.ApiClient",
"six.iteritems"
] | [((2992, 3023), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (3005, 3023), False, 'import six\n'), ((6782, 6813), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (6795, 6813), False, 'import six\n'), ((10905, 10936), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (10918, 10936), False, 'import six\n'), ((14499, 14530), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (14512, 14530), False, 'import six\n'), ((689, 700), 'dxm.lib.masking_api.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (698, 700), False, 'from dxm.lib.masking_api.api_client import ApiClient\n')] |
import streamlit as st
import streamlit.components.v1 as stc
from eda_app import run_eda_app
from ml_app import run_ml_app
html_temp = """
<div style="background-color:#3872fb;padding:10px;border-radius:10px">
<h1 style="color:white;text-align:center;">Early Stage DM Risk Data App </h1>
<h4 style="color:white;text-align:center;">Diabetes </h4>
</div>
"""
def main():
# st.title("ML Web App with Streamlit")
stc.html(html_temp)
menu = ["Home","EDA","ML","About"]
choice = st.sidebar.selectbox("Menu",menu)
if choice == "Home":
st.subheader("Home")
st.write("""
### Early Stage Diabetes Risk Predictor App
This dataset contains the sign and symptoms data of newly diabetic or would be diabetic patient.
#### Datasource
- https://archive.ics.uci.edu/ml/datasets/Early+stage+diabetes+risk+prediction+dataset.
#### App Content
- EDA Section: Exploratory Data Analysis of Data
- ML Section: ML Predictor App
""")
elif choice == "EDA":
run_eda_app()
elif choice == "ML":
run_ml_app()
else:
st.subheader("About")
st.text("Learn Streamlit Course")
st.text("Jesus Saves @JCharisTech")
st.text("By <NAME>(JCharis)")
if __name__ == '__main__':
main() | [
"eda_app.run_eda_app",
"ml_app.run_ml_app",
"streamlit.write",
"streamlit.text",
"streamlit.subheader",
"streamlit.sidebar.selectbox",
"streamlit.components.v1.html"
] | [((425, 444), 'streamlit.components.v1.html', 'stc.html', (['html_temp'], {}), '(html_temp)\n', (433, 444), True, 'import streamlit.components.v1 as stc\n'), ((492, 526), 'streamlit.sidebar.selectbox', 'st.sidebar.selectbox', (['"""Menu"""', 'menu'], {}), "('Menu', menu)\n", (512, 526), True, 'import streamlit as st\n'), ((551, 571), 'streamlit.subheader', 'st.subheader', (['"""Home"""'], {}), "('Home')\n", (563, 571), True, 'import streamlit as st\n'), ((574, 971), 'streamlit.write', 'st.write', (['"""\n\t\t\t### Early Stage Diabetes Risk Predictor App\n\t\t\tThis dataset contains the sign and symptoms data of newly diabetic or would be diabetic patient.\n\t\t\t#### Datasource\n\t\t\t\t- https://archive.ics.uci.edu/ml/datasets/Early+stage+diabetes+risk+prediction+dataset.\n\t\t\t#### App Content\n\t\t\t\t- EDA Section: Exploratory Data Analysis of Data\n\t\t\t\t- ML Section: ML Predictor App\n\n\t\t\t"""'], {}), '(\n """\n\t\t\t### Early Stage Diabetes Risk Predictor App\n\t\t\tThis dataset contains the sign and symptoms data of newly diabetic or would be diabetic patient.\n\t\t\t#### Datasource\n\t\t\t\t- https://archive.ics.uci.edu/ml/datasets/Early+stage+diabetes+risk+prediction+dataset.\n\t\t\t#### App Content\n\t\t\t\t- EDA Section: Exploratory Data Analysis of Data\n\t\t\t\t- ML Section: ML Predictor App\n\n\t\t\t"""\n )\n', (582, 971), True, 'import streamlit as st\n'), ((987, 1000), 'eda_app.run_eda_app', 'run_eda_app', ([], {}), '()\n', (998, 1000), False, 'from eda_app import run_eda_app\n'), ((1025, 1037), 'ml_app.run_ml_app', 'run_ml_app', ([], {}), '()\n', (1035, 1037), False, 'from ml_app import run_ml_app\n'), ((1047, 1068), 'streamlit.subheader', 'st.subheader', (['"""About"""'], {}), "('About')\n", (1059, 1068), True, 'import streamlit as st\n'), ((1071, 1104), 'streamlit.text', 'st.text', (['"""Learn Streamlit Course"""'], {}), "('Learn Streamlit Course')\n", (1078, 1104), True, 'import streamlit as st\n'), ((1107, 1142), 'streamlit.text', 'st.text', (['"""Jesus Saves @JCharisTech"""'], {}), "('Jesus Saves @JCharisTech')\n", (1114, 1142), True, 'import streamlit as st\n'), ((1145, 1174), 'streamlit.text', 'st.text', (['"""By <NAME>(JCharis)"""'], {}), "('By <NAME>(JCharis)')\n", (1152, 1174), True, 'import streamlit as st\n')] |
from __future__ import print_function
import os
from setuptools import setup, find_packages
here = os.path.dirname(os.path.abspath(__file__))
node_root = os.path.join(here, 'js')
is_repo = os.path.exists(os.path.join(here, '.git'))
from distutils import log
log.set_verbosity(log.DEBUG)
log.info('setup.py entered')
log.info('$PATH=%s' % os.environ['PATH'])
LONG_DESCRIPTION = 'iclientpy ext jupyterhub'
version_ns = {}
with open(os.path.join(here, 'iclientpyjupyterhubext', '_version.py')) as f:
exec(f.read(), {}, version_ns)
setup_args = {
'name': 'iclientpy.jupyterhub.ext',
'version': version_ns['__version__'],
'description': 'iclientpy for jupyterhub',
'long_description': LONG_DESCRIPTION,
'include_package_data': True,
'install_requires': [
'pamela>=0.3.0',
'python_dateutil>=2.6.1',
'tornado>=4.5.3',
'jupyterhub>=0.8.1'
],
'packages': find_packages(exclude=("*.test", "*.test.*", "test.*", "test")),
'zip_safe': False,
'author': 'supermap',
'author_email': '<EMAIL>',
'classifiers': [
'Development Status :: 4 - Beta',
'Framework :: IPython',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Graphics',
'Programming Language :: Python :: 3.6',
],
}
setup(**setup_args)
| [
"setuptools.find_packages",
"os.path.join",
"setuptools.setup",
"distutils.log.info",
"distutils.log.set_verbosity",
"os.path.abspath"
] | [((160, 184), 'os.path.join', 'os.path.join', (['here', '"""js"""'], {}), "(here, 'js')\n", (172, 184), False, 'import os\n'), ((271, 299), 'distutils.log.set_verbosity', 'log.set_verbosity', (['log.DEBUG'], {}), '(log.DEBUG)\n', (288, 299), False, 'from distutils import log\n'), ((301, 329), 'distutils.log.info', 'log.info', (['"""setup.py entered"""'], {}), "('setup.py entered')\n", (309, 329), False, 'from distutils import log\n'), ((331, 372), 'distutils.log.info', 'log.info', (["('$PATH=%s' % os.environ['PATH'])"], {}), "('$PATH=%s' % os.environ['PATH'])\n", (339, 372), False, 'from distutils import log\n'), ((1400, 1419), 'setuptools.setup', 'setup', ([], {}), '(**setup_args)\n', (1405, 1419), False, 'from setuptools import setup, find_packages\n'), ((120, 145), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (135, 145), False, 'import os\n'), ((211, 237), 'os.path.join', 'os.path.join', (['here', '""".git"""'], {}), "(here, '.git')\n", (223, 237), False, 'import os\n'), ((952, 1015), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('*.test', '*.test.*', 'test.*', 'test')"}), "(exclude=('*.test', '*.test.*', 'test.*', 'test'))\n", (965, 1015), False, 'from setuptools import setup, find_packages\n'), ((452, 511), 'os.path.join', 'os.path.join', (['here', '"""iclientpyjupyterhubext"""', '"""_version.py"""'], {}), "(here, 'iclientpyjupyterhubext', '_version.py')\n", (464, 511), False, 'import os\n')] |
__version__ = '0.1.13'
import logging
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.WARNING)
from cartesian_explorer import lazy_imports
from cartesian_explorer.lib.lru_cache import lru_cache
from cartesian_explorer.lib.lru_cache_mproc import lru_cache as lru_cache_mproc
from cartesian_explorer.lib.dict_product import dict_product
from cartesian_explorer.Explorer import Explorer
def get_example_explorer():
""" Create a demonstrative explorer.
The explorer describes radioactve decay
of Pb isotopes.
Provides:
Mass: mass left of isotope of type `isotope`
Speed: curernt speed of decay at `time_sec`
Requires:
time_sec: time to calculate outputs at.
isotope: type of isotope: one of "Pb186", "Pb187", "Pb188"
"""
import numpy as np
ex = Explorer()
@ex.provider
@ex.add_function(provides='Mass', requires=('time_sec', 'T'))
def mass(time_sec, T):
return np.exp(-T*time_sec)
@ex.add_function(provides='Speed', requires=('time_sec', 'T'))
def speed(time_sec, T):
return -T*np.exp(-T*time_sec)
@ex.provider
def T(isotope):
if isotope == 'Pb186':
return np.log(2)/4.82
if isotope == 'Pb187':
return np.log(2)/15.2
if isotope == 'Pb188':
return np.log(2)/35.2
return ex
| [
"logging.getLogger",
"numpy.exp",
"numpy.log",
"cartesian_explorer.Explorer.Explorer"
] | [((51, 82), 'logging.getLogger', 'logging.getLogger', (['"""matplotlib"""'], {}), "('matplotlib')\n", (68, 82), False, 'import logging\n'), ((837, 847), 'cartesian_explorer.Explorer.Explorer', 'Explorer', ([], {}), '()\n', (845, 847), False, 'from cartesian_explorer.Explorer import Explorer\n'), ((973, 994), 'numpy.exp', 'np.exp', (['(-T * time_sec)'], {}), '(-T * time_sec)\n', (979, 994), True, 'import numpy as np\n'), ((1107, 1128), 'numpy.exp', 'np.exp', (['(-T * time_sec)'], {}), '(-T * time_sec)\n', (1113, 1128), True, 'import numpy as np\n'), ((1215, 1224), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (1221, 1224), True, 'import numpy as np\n'), ((1280, 1289), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (1286, 1289), True, 'import numpy as np\n'), ((1345, 1354), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (1351, 1354), True, 'import numpy as np\n')] |
import torch
import argparse
import scipy
import numpy as np
import pickle
from deeprobust.graph.targeted_attack import Nettack
from deeprobust.graph.utils import *
from deeprobust.graph.data import Dataset
from deeprobust.graph.defense import *
from sklearn.preprocessing import normalize
from tqdm import tqdm
from scipy.sparse.linalg import eigs
from scipy.sparse import csgraph,lil_matrix
from scipy import spatial
# arguments
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", type = str, default = "cora",choices = ["cora","citeseer"],help="dataset")
parser.add_argument("--defense", type = bool, default = False, help="defense or not") # with --defense flag, the value of flag is true
parser.add_argument("--model", type = str, default = "GCN", choices= ["GCN","GAT","GIN"])
parser.add_argument("--debug", type = bool, default = True, choices= [True,False])
parser.add_argument("--seed", type = int, default = 29, help="Random Seed" )
parser.add_argument("--direct", action = "store_false", help = "direct attack / influence attack") # with --direct flag, the val of flag is false
args = parser.parse_args()
args.cuda = torch.cuda.is_available()
device = torch.device("cuda" if args.cuda else "cpu")
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.direct:
influencers = 0
else:
influencers = 5
if args.cuda:
torch.cuda.manual_seed(args.cuda)
if args.debug:
print('cuda :: {}\ndataset :: {}\nDefense Algo :: {}\nmodel :: {}\nDirect attack :: {}'.format(args.cuda, args.dataset, args.defense, args.model, args.direct))
#get data from deeprobust/Dataset
data = Dataset(root='/tmp/',name=args.dataset)
#adj matrix, features, labels
adj, features, labels = data.adj, data.features, data.labels
#train,test sets
idx_train, idx_val, idx_test = data.idx_train, data.idx_val, data.idx_test
adj=adj+adj.T
adj[adj>1] = 1
#setup surrogate model
surrogate=GCN(nfeat = features.shape[1], nclass = labels.max().item()+1, nhid = 16, dropout = 0, with_relu = False,
with_bias = False, device = device).to(device)
surrogate.fit(features, adj, labels, idx_train, idx_val, patience = 30, train_iters = 100)
"""
features:
features of nodes
adj:
adjacency matrix
labels:
labels
patience:
patience for early stopping (valid when val is given)
train_iters:
epochs
"""
# setup attack model
target_node = 384 #1554
model = Nettack(surrogate, nnodes = adj.shape[0], attack_structure = True, attack_features = False, device = device).to(device)
#set defense
defense = args.defense
def main():
degrees = adj.sum(0).A1
print('index ::', np.where(degrees == max(degrees)))
per_num = int(degrees[target_node])
if args.debug:
print('degrees (# of perturbations) :: {}'.format(per_num))
model.attack(features, adj, labels, target_node, per_num, direct = args.direct, n_influencers = influencers)
m_adj = model.modified_adj
m_features = model.modified_features
#S_D_Clean = SpectralDistance(adj,adj)
#print(S_D_Clean)
#S_D_Same = SpectralDistance(m_adj,m_adj)
#print(S_D_Same)
#print(adj.shape)
S_Distance,eigv_dif = SpectralDistance(adj,m_adj)
#print(S_Distance)
#dif = m_adj-adj
#for r,c in zip(*dif.nonzero()):
# print(r,c,dif[r,c])
print(S_Distance)
def SpectralDistance(adj,m_adj):
#I = lil_matrix(np.eye(adj.shape[0]))
L_norm = csgraph.laplacian(adj)
L_norm_m = csgraph.laplacian(m_adj)
evals,evecs = np.linalg.eig(L_norm.todense())
evals = evals.real
#print(evals)
print(evecs.shape)
m_evals, m_evecs = np.linalg.eig(L_norm_m.todense())
m_evals = m_evals.real
evec_dif = evecs - m_evecs
print("Evec difference:")
print(evec_dif)
print("================")
#dif = (evals-m_evals)
dif2 = sum(m_evals)-sum(evals)
dif3 = np.linalg.norm(m_evals)-np.linalg.norm(evals)
#print(dif2)
#np.set_printoptions(threshold=np.inf)
#with open('Eigenvalus.log','a+') as f:
# print(dif2,file=f)
#print(dif,file=f)
#L_norm = csgraph.laplacian(np.diag(evals))
#L_norm_m = csgraph.laplacian(np.diag(m_evals))
#dif = (L_norm - L_norm_m)
#dif = (np.diag(evals)-np.diag(evals))
#print(np.linalg.norm(dif,axis=1))
dif1 = (np.diag(evals)-np.diag(m_evals))
"""
Dis here is the difference of eigenvalues:
"""
#d = evals - m_evals
#Dis = {dis:idx for idx,dis in enumerate(d) if dis>1}
#print(Dis)
S_Dis = np.linalg.norm(dif1)
#print(S_Dis)
#Dis = {d:idx for idx,d in enumerate(S_Dis) if d>=1}
#Dis = sorted(Dis,reverse=True)
#print(Dis)
#print(len(Dis))
#print(np.where(S_Dis == max(S_Dis)))
#dif = evals-m_evals
return S_Dis, dif2, evec_dif
"""
print("=======test on clean adj===================")
print("without defense :: ")
test(adj, features, target_node,defense_al=False)
print("with defense (with default setting):: ")
test(adj, features, target_node, defense_al = defense)
print("================ test on perturbed adj =================")
print("without defense ::")
test(m_adj, m_features, target_node,defense_al=False)
print("with defense (with default setting)::")
test(m_adj, m_features, target_node, defense_al = defense)
def test(adj, features, target, defense_al=False):
target_model = globals()[args.model](nfeat = features.shape[1], nhid = 16, nclass = labels.max().item()+1, dropout = 0.5, device = device)
target_model = target_model.to(device)
target_model.fit(features, adj, labels, idx_train, idx_val=idx_val, attention = defense_al)
target_model.eval()
_, output = target_model.test(idx_test=idx_test)
probs = torch.exp(output[[target_node]])[0]
print('probs: {}'.format(probs.detach().cpu().numpy()))
acc_test = accuracy(output[idx_test], labels[idx_test])
print('Test set accuracy:',
"accuracy = {:.4f}".format(acc_test.item()))
return acc_test.item()
"""
def multi_evecs():
cnt = 0
degrees = adj.sum(0).A1
node_list = select_nodes(num_target=10)
print(node_list)
a = []
angle = []
num = len(node_list)
def get_angle(x,y):
u1 = x/np.linalg.norm(x)
u2 = y/np.linalg.norm(y)
return np.arccos(np.clip(np.real(np.dot(u1, u2.T)), -1.0, 1.0))
print('=== Attacking %s nodes respectively ===' % num)
for target_node in tqdm(node_list):
n_perturbations = int(degrees[target_node])
if n_perturbations <1: # at least one perturbation
continue
model = Nettack(surrogate, nnodes=adj.shape[0], attack_structure=True, attack_features=False, device=device)
model = model.to(device)
model.attack(features, adj, labels, target_node, n_perturbations, direct=args.direct, n_influencers = influencers, verbose=False)
modified_adj = model.modified_adj
modified_features = model.modified_features
S_Dis, sum_eigv_dif, evec_dif = SpectralDistance(adj,modified_adj)
a.append(evec_dif.flatten())
if(len(a)==2):
print('angle test:{}'.format(get_angle(a[0], a[1])))
a_list = [get_angle(x,y) for x in a for y in a]
mean = np.mean(a, axis=0)
var = np.var(a, axis=0)
np.set_printoptions(threshold=np.inf)
with open(args.dataset+'_'+args.model+'_Directions_rf.log','a+') as f:
print('Angle:',file=f)
print(a_list,file=f)
print('Mean:{}, Var:{}'.format(mean, var),file=f)
def multi_test():
cnt = 0
degrees = adj.sum(0).A1
node_list = select_nodes(num_target=10)
print(node_list)
num = len(node_list)
print('=== Attacking %s nodes respectively ===' % num)
num_tar = 0
for target_node in tqdm(node_list):
n_perturbations = int(degrees[target_node])
if n_perturbations <1: # at least one perturbation
continue
model = Nettack(surrogate, nnodes=adj.shape[0], attack_structure=True, attack_features=False, device=device)
model = model.to(device)
model.attack(features, adj, labels, target_node, n_perturbations, direct=args.direct, n_influencers = influencers, verbose=False)
modified_adj = model.modified_adj
modified_features = model.modified_features
S_Dis, sum_eigv_dif = SpectralDistance(adj,modified_adj)
print(target_node,'::',S_Dis)
with open(args.dataset+'_'+args.model+'_SpectralDistance_sum.log','a+') as f:
print('Target Node: {}, S_Dis: {}, Eigv_dif: {}'.format(target_node,S_Dis,sum_eigv_dif),file=f)
"""
acc = single_test(modified_adj, modified_features, target_node)
if acc == 0:
cnt += 1
num_tar += 1
with open(args.dataset+"_"+args.model+"_gsl.log","a+") as f:
print('classification rate : %s' % (1-cnt/num_tar), '# of targets:',num_tar,file=f)
print('classification rate : %s' % (1-cnt/num_tar), '# of targets:', num_tar)
"""
def single_test(adj, features, target_node):
'ALL the baselines'
# """defense models"""
# classifier = globals()[args.defensemodel](nnodes=adj.shape[0], nfeat=features.shape[1], nhid=16,
# nclass=labels.max().item() + 1, dropout=0.5, device=device)
# ''' test on GCN (poisoning attack), model could be GCN, GAT, GIN'''
classifier = globals()[args.model](nfeat=features.shape[1], nhid=16, nclass=labels.max().item() + 1, dropout=0.5, device=device)
classifier = classifier.to(device)
classifier.fit(features, adj, labels, idx_train,
idx_val=idx_val,
idx_test=idx_test,
verbose=False, attention=defense) #model_name=model_name
classifier.eval()
acc_overall, output = classifier.test(idx_test, ) #model_name=model_name
probs = torch.exp(output[[target_node]])
acc_test, pred_y, true_y = accuracy_1(output[[target_node]], labels[target_node])
with open(args.dataset+"_"+args.model+"_gsl.log","a+") as f:
print('Defense: {}, target:{}, pred:{}, label: {}'.format(defense, target_node, pred_y.item(),true_y.item()),file=f)
print('target:{}, pred:{}, label: {}'.format(target_node, pred_y.item(), true_y.item()))
print('Pred probs', probs.data)
return acc_test.item()
"""=======Basic Functions============="""
def select_nodes(num_target = 10):
'''
selecting nodes as reported in nettack paper:
(i) the 10 nodes with highest margin of classification, i.e. they are clearly correctly classified,
(ii) the 10 nodes with lowest margin (but still correctly classified) and
(iii) 20 more nodes randomly
'''
gcn = globals()[args.model](nfeat=features.shape[1],
nhid=16,
nclass=labels.max().item() + 1,
dropout=0.5, device=device)
gcn = gcn.to(device)
gcn.fit(features, adj, labels, idx_train, idx_test, verbose=True)
gcn.eval()
output = gcn.predict()
degrees = adj.sum(0).A1
margin_dict = {}
for idx in tqdm(idx_test):
margin = classification_margin(output[idx], labels[idx])
acc, _, _ = accuracy_1(output[[idx]], labels[idx])
if acc==0 or int(degrees[idx])<1: # only keep the correctly classified nodes
continue
"""check the outliers:"""
neighbours = list(adj.todense()[idx].nonzero()[1])
y = [labels[i] for i in neighbours]
node_y = labels[idx]
aa = node_y==y
outlier_score = 1- aa.sum()/len(aa)
if outlier_score >=0.5:
continue
margin_dict[idx] = margin
sorted_margins = sorted(margin_dict.items(), key=lambda x:x[1], reverse=True)
high = [x for x, y in sorted_margins[: num_target]]
low = [x for x, y in sorted_margins[-num_target: ]]
other = [x for x, y in sorted_margins[num_target: -num_target]]
other = np.random.choice(other, 2*num_target, replace=False).tolist()
return other + high + low
def accuracy_1(output,labels):
try:
num = len(labels)
except:
num = 1
if type(labels) is not torch.Tensor:
labels = torch.LongTensor([labels])
preds = output.max(1)[1].type_as(labels)
correct = preds.eq(labels).double()
correct = correct.sum()
return correct/num, preds, labels
if __name__ == "__main__":
#main()
#multi_test()
multi_evecs()
| [
"torch.LongTensor",
"torch.exp",
"torch.cuda.is_available",
"deeprobust.graph.targeted_attack.Nettack",
"numpy.linalg.norm",
"numpy.mean",
"argparse.ArgumentParser",
"numpy.dot",
"numpy.random.seed",
"numpy.random.choice",
"numpy.set_printoptions",
"torch.device",
"torch.manual_seed",
"dee... | [((442, 467), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (465, 467), False, 'import argparse\n'), ((1148, 1173), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1171, 1173), False, 'import torch\n'), ((1183, 1227), 'torch.device', 'torch.device', (["('cuda' if args.cuda else 'cpu')"], {}), "('cuda' if args.cuda else 'cpu')\n", (1195, 1227), False, 'import torch\n'), ((1228, 1253), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (1242, 1253), True, 'import numpy as np\n'), ((1254, 1282), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (1271, 1282), False, 'import torch\n'), ((1621, 1661), 'deeprobust.graph.data.Dataset', 'Dataset', ([], {'root': '"""/tmp/"""', 'name': 'args.dataset'}), "(root='/tmp/', name=args.dataset)\n", (1628, 1661), False, 'from deeprobust.graph.data import Dataset\n'), ((1365, 1398), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.cuda'], {}), '(args.cuda)\n', (1387, 1398), False, 'import torch\n'), ((3400, 3422), 'scipy.sparse.csgraph.laplacian', 'csgraph.laplacian', (['adj'], {}), '(adj)\n', (3417, 3422), False, 'from scipy.sparse import csgraph, lil_matrix\n'), ((3438, 3462), 'scipy.sparse.csgraph.laplacian', 'csgraph.laplacian', (['m_adj'], {}), '(m_adj)\n', (3455, 3462), False, 'from scipy.sparse import csgraph, lil_matrix\n'), ((4534, 4554), 'numpy.linalg.norm', 'np.linalg.norm', (['dif1'], {}), '(dif1)\n', (4548, 4554), True, 'import numpy as np\n'), ((6470, 6485), 'tqdm.tqdm', 'tqdm', (['node_list'], {}), '(node_list)\n', (6474, 6485), False, 'from tqdm import tqdm\n'), ((7269, 7287), 'numpy.mean', 'np.mean', (['a'], {'axis': '(0)'}), '(a, axis=0)\n', (7276, 7287), True, 'import numpy as np\n'), ((7298, 7315), 'numpy.var', 'np.var', (['a'], {'axis': '(0)'}), '(a, axis=0)\n', (7304, 7315), True, 'import numpy as np\n'), ((7321, 7358), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.inf'}), '(threshold=np.inf)\n', (7340, 7358), True, 'import numpy as np\n'), ((7801, 7816), 'tqdm.tqdm', 'tqdm', (['node_list'], {}), '(node_list)\n', (7805, 7816), False, 'from tqdm import tqdm\n'), ((9915, 9947), 'torch.exp', 'torch.exp', (['output[[target_node]]'], {}), '(output[[target_node]])\n', (9924, 9947), False, 'import torch\n'), ((11109, 11123), 'tqdm.tqdm', 'tqdm', (['idx_test'], {}), '(idx_test)\n', (11113, 11123), False, 'from tqdm import tqdm\n'), ((2396, 2500), 'deeprobust.graph.targeted_attack.Nettack', 'Nettack', (['surrogate'], {'nnodes': 'adj.shape[0]', 'attack_structure': '(True)', 'attack_features': '(False)', 'device': 'device'}), '(surrogate, nnodes=adj.shape[0], attack_structure=True,\n attack_features=False, device=device)\n', (2403, 2500), False, 'from deeprobust.graph.targeted_attack import Nettack\n'), ((3863, 3886), 'numpy.linalg.norm', 'np.linalg.norm', (['m_evals'], {}), '(m_evals)\n', (3877, 3886), True, 'import numpy as np\n'), ((3887, 3908), 'numpy.linalg.norm', 'np.linalg.norm', (['evals'], {}), '(evals)\n', (3901, 3908), True, 'import numpy as np\n'), ((4311, 4325), 'numpy.diag', 'np.diag', (['evals'], {}), '(evals)\n', (4318, 4325), True, 'import numpy as np\n'), ((4326, 4342), 'numpy.diag', 'np.diag', (['m_evals'], {}), '(m_evals)\n', (4333, 4342), True, 'import numpy as np\n'), ((6637, 6741), 'deeprobust.graph.targeted_attack.Nettack', 'Nettack', (['surrogate'], {'nnodes': 'adj.shape[0]', 'attack_structure': '(True)', 'attack_features': '(False)', 'device': 'device'}), '(surrogate, nnodes=adj.shape[0], attack_structure=True,\n attack_features=False, device=device)\n', (6644, 6741), False, 'from deeprobust.graph.targeted_attack import Nettack\n'), ((7968, 8072), 'deeprobust.graph.targeted_attack.Nettack', 'Nettack', (['surrogate'], {'nnodes': 'adj.shape[0]', 'attack_structure': '(True)', 'attack_features': '(False)', 'device': 'device'}), '(surrogate, nnodes=adj.shape[0], attack_structure=True,\n attack_features=False, device=device)\n', (7975, 8072), False, 'from deeprobust.graph.targeted_attack import Nettack\n'), ((12215, 12241), 'torch.LongTensor', 'torch.LongTensor', (['[labels]'], {}), '([labels])\n', (12231, 12241), False, 'import torch\n'), ((6264, 6281), 'numpy.linalg.norm', 'np.linalg.norm', (['x'], {}), '(x)\n', (6278, 6281), True, 'import numpy as np\n'), ((6297, 6314), 'numpy.linalg.norm', 'np.linalg.norm', (['y'], {}), '(y)\n', (6311, 6314), True, 'import numpy as np\n'), ((11950, 12004), 'numpy.random.choice', 'np.random.choice', (['other', '(2 * num_target)'], {'replace': '(False)'}), '(other, 2 * num_target, replace=False)\n', (11966, 12004), True, 'import numpy as np\n'), ((6356, 6372), 'numpy.dot', 'np.dot', (['u1', 'u2.T'], {}), '(u1, u2.T)\n', (6362, 6372), True, 'import numpy as np\n')] |
import argparse
import logging
log = logging.getLogger('flair')
def def_task(s):
try:
task, path = s.split(':')
except:
raise argparse.ArgumentTypeError('Task should be in format: TaskName:DataPath.')
return task, path
parser = argparse.ArgumentParser(description='Beam search decoding for separately trained hybrid NER-LM model')
parser.add_argument('--task', type=def_task, required=True, help='Task and data path')
parser.add_argument('--tagger-model', required=True, help='Path to the tagger model')
parser.add_argument('--language-model', required=True, help='Path to the tag language model')
parser.add_argument('--lm-weight', type=float, default=0.2, help='Beam size')
parser.add_argument('--beam-size', type=int, default=-1, help='Beam size')
parser.add_argument('--batch-size', type=int, default=32, help='Batch size')
parser.add_argument('--no-interpolation', action='store_true', help='Do not interpolate CRF and RNN Tag LM scores')
args = parser.parse_args()
from flair.models.language_model import MyLanguageMode
from flair.data_fetcher import NLPTaskDataFetcher
from flair.data import TaggedCorpus
from flair.models import SequenceTagger
from flair.models.sequence_tagger_model import evalute_beam_search
tagger = SequenceTagger.load_from_file(args.tagger_model, eval=True)
lm = MyLanguageMode.load_from_file(args.language_model)
task, path = args.task
beam_size = len(tagger.tag_dictionary.item2idx) if args.beam_size == -1 else args.beam_size
batch_size = args.batch_size
log.info(f'Beam size {beam_size}')
log.info(f'Batch size {batch_size}')
log.info(f'LM weight: {args.lm_weight}')
if tagger.use_crf:
log.info(f'Interpolate CRF and RNN Tag LM scores: {not args.no_interpolation}')
corpus: TaggedCorpus = NLPTaskDataFetcher.load_corpus(task, path)
metric, _ = evalute_beam_search(tagger, lm, corpus.test, args.lm_weight, beam_size, not args.no_interpolation, batch_size)
log.info('F1 score: ' + str(metric.micro_avg_f_score()))
| [
"logging.getLogger",
"flair.models.language_model.MyLanguageMode.load_from_file",
"argparse.ArgumentParser",
"flair.models.sequence_tagger_model.evalute_beam_search",
"flair.models.SequenceTagger.load_from_file",
"argparse.ArgumentTypeError",
"flair.data_fetcher.NLPTaskDataFetcher.load_corpus"
] | [((38, 64), 'logging.getLogger', 'logging.getLogger', (['"""flair"""'], {}), "('flair')\n", (55, 64), False, 'import logging\n'), ((261, 368), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Beam search decoding for separately trained hybrid NER-LM model"""'}), "(description=\n 'Beam search decoding for separately trained hybrid NER-LM model')\n", (284, 368), False, 'import argparse\n'), ((1266, 1325), 'flair.models.SequenceTagger.load_from_file', 'SequenceTagger.load_from_file', (['args.tagger_model'], {'eval': '(True)'}), '(args.tagger_model, eval=True)\n', (1295, 1325), False, 'from flair.models import SequenceTagger\n'), ((1331, 1381), 'flair.models.language_model.MyLanguageMode.load_from_file', 'MyLanguageMode.load_from_file', (['args.language_model'], {}), '(args.language_model)\n', (1360, 1381), False, 'from flair.models.language_model import MyLanguageMode\n'), ((1767, 1809), 'flair.data_fetcher.NLPTaskDataFetcher.load_corpus', 'NLPTaskDataFetcher.load_corpus', (['task', 'path'], {}), '(task, path)\n', (1797, 1809), False, 'from flair.data_fetcher import NLPTaskDataFetcher\n'), ((1824, 1938), 'flair.models.sequence_tagger_model.evalute_beam_search', 'evalute_beam_search', (['tagger', 'lm', 'corpus.test', 'args.lm_weight', 'beam_size', '(not args.no_interpolation)', 'batch_size'], {}), '(tagger, lm, corpus.test, args.lm_weight, beam_size, not\n args.no_interpolation, batch_size)\n', (1843, 1938), False, 'from flair.models.sequence_tagger_model import evalute_beam_search\n'), ((153, 227), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""Task should be in format: TaskName:DataPath."""'], {}), "('Task should be in format: TaskName:DataPath.')\n", (179, 227), False, 'import argparse\n')] |
from typing import Any, List
import httpx
from healthpy._http import _check, _is_json
class _Request:
def __init__(self, url: str, **args):
with httpx.Client(timeout=args.pop("timeout", (1, 5)), **args) as client:
self.response = client.get(url)
def is_error(self) -> bool:
return self.response.is_error
def content(self) -> Any:
return (
self.response.json()
if _is_json(self.response.headers.get("content-type"))
else self.response.text
)
def check(
service_name: str,
url: str,
status_extracting: callable = None,
failure_status: str = None,
affected_endpoints: List[str] = None,
additional_keys: dict = None,
error_status_extracting: callable = None,
**httpx_args,
) -> (str, dict):
"""
Return Health "Checks object" for an external service connection.
:param service_name: External service name.
:param url: External service health check URL.
:param status_extracting: Function returning status according to the JSON or text response (as parameter).
Default to the way status should be extracted from a service following healthcheck RFC.
:param error_status_extracting: Function returning status according to the JSON or text response (as parameter).
Default to the way status should be extracted from a service following healthcheck RFC or fail_status.
Note that the response might be None as this is called to extract the default status in case of failure as well.
:param affected_endpoints: List of endpoints affected if dependency is down. Default to None.
:param additional_keys: Additional user defined keys to send in checks.
:param httpx_args: All other parameters will be provided to the httpx.Client instance.
:return: A tuple with a string providing the status (amongst healthpy.*_status variable) and the "Checks object".
Based on https://inadarei.github.io/rfc-healthcheck/
"""
return _check(
service_name=service_name,
url=url,
request_class=_Request,
status_extracting=status_extracting,
failure_status=failure_status,
affected_endpoints=affected_endpoints,
additional_keys=additional_keys,
error_status_extracting=error_status_extracting,
**httpx_args,
)
| [
"healthpy._http._check"
] | [((2000, 2281), 'healthpy._http._check', '_check', ([], {'service_name': 'service_name', 'url': 'url', 'request_class': '_Request', 'status_extracting': 'status_extracting', 'failure_status': 'failure_status', 'affected_endpoints': 'affected_endpoints', 'additional_keys': 'additional_keys', 'error_status_extracting': 'error_status_extracting'}), '(service_name=service_name, url=url, request_class=_Request,\n status_extracting=status_extracting, failure_status=failure_status,\n affected_endpoints=affected_endpoints, additional_keys=additional_keys,\n error_status_extracting=error_status_extracting, **httpx_args)\n', (2006, 2281), False, 'from healthpy._http import _check, _is_json\n')] |
# -*- coding: utf-8 -*-
# Author <NAME>
# mascot csv process v1.5
# import all csv files in a folder
import os
import re
combine = [] #combined protein list
dir_input = input('Please input folder path(e.g. D:\Study\Inbox): ')
print('input directory is: ', dir_input)
number_of_csv = 0 # to get how many files are read by us
for filename in os.listdir(dir_input): # add list
# print(filename)
if re.match('F\d+?\.csv', filename): # re to find target mascot files
number_of_csv += 1
#print(filename, 'match') # test re
csv = open(dir_input + '\\' + filename)
data = csv.read()
protein_list = re.findall('\d+,.*?\n', data)
samplename = re.findall('(?<=Peak list data path,).*',data)[0] # get raw file name in csv line 9
#print(samplename) #test of read file name
for i in protein_list:
if re.findall('OS=', i): # note that it's only in uniprot format data (with 'OS=' string)
combine.append(samplename + ',' + i)
#print(len(combine)) #test of how many lines are appended.
# output to flie
output = open(dir_input + '\\combined.csv', 'w') # output to dir_input with "combined.csv"
for i in combine:
output.write(i)
print('\noutput to "combined.csv".\nread %d files, output %d lines'%(number_of_csv, len(combine)))
| [
"re.findall",
"os.listdir",
"re.match"
] | [((359, 380), 'os.listdir', 'os.listdir', (['dir_input'], {}), '(dir_input)\n', (369, 380), False, 'import os\n'), ((424, 458), 're.match', 're.match', (['"""F\\\\d+?\\\\.csv"""', 'filename'], {}), "('F\\\\d+?\\\\.csv', filename)\n", (432, 458), False, 'import re\n'), ((664, 694), 're.findall', 're.findall', (['"""\\\\d+,.*?\n"""', 'data'], {}), "('\\\\d+,.*?\\n', data)\n", (674, 694), False, 'import re\n'), ((718, 765), 're.findall', 're.findall', (['"""(?<=Peak list data path,).*"""', 'data'], {}), "('(?<=Peak list data path,).*', data)\n", (728, 765), False, 'import re\n'), ((902, 922), 're.findall', 're.findall', (['"""OS="""', 'i'], {}), "('OS=', i)\n", (912, 922), False, 'import re\n')] |
# Generated with FixedForceElongationMethod
#
from enum import Enum
from enum import auto
class FixedForceElongationMethod(Enum):
""""""
PRETENSION_LOCAL = auto()
PRETENSION_GLOBAL = auto()
BOTH_ENDS = auto()
CONSTANT_TENSION_WINCH = auto()
def label(self):
if self == FixedForceElongationMethod.PRETENSION_LOCAL:
return "Pretension and local direction"
if self == FixedForceElongationMethod.PRETENSION_GLOBAL:
return "Pretension and global direction"
if self == FixedForceElongationMethod.BOTH_ENDS:
return "Coordinates of both end points"
if self == FixedForceElongationMethod.CONSTANT_TENSION_WINCH:
return "Constant tension winch" | [
"enum.auto"
] | [((166, 172), 'enum.auto', 'auto', ([], {}), '()\n', (170, 172), False, 'from enum import auto\n'), ((197, 203), 'enum.auto', 'auto', ([], {}), '()\n', (201, 203), False, 'from enum import auto\n'), ((220, 226), 'enum.auto', 'auto', ([], {}), '()\n', (224, 226), False, 'from enum import auto\n'), ((256, 262), 'enum.auto', 'auto', ([], {}), '()\n', (260, 262), False, 'from enum import auto\n')] |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_VERSION_GL_4_4'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_VERSION_GL_4_4',error_checker=_errors._error_checker)
GL_BUFFER_IMMUTABLE_STORAGE=_C('GL_BUFFER_IMMUTABLE_STORAGE',0x821F)
GL_BUFFER_STORAGE_FLAGS=_C('GL_BUFFER_STORAGE_FLAGS',0x8220)
GL_CLEAR_TEXTURE=_C('GL_CLEAR_TEXTURE',0x9365)
GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT=_C('GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT',0x00004000)
GL_CLIENT_STORAGE_BIT=_C('GL_CLIENT_STORAGE_BIT',0x0200)
GL_DYNAMIC_STORAGE_BIT=_C('GL_DYNAMIC_STORAGE_BIT',0x0100)
GL_LOCATION_COMPONENT=_C('GL_LOCATION_COMPONENT',0x934A)
GL_MAP_COHERENT_BIT=_C('GL_MAP_COHERENT_BIT',0x0080)
GL_MAP_PERSISTENT_BIT=_C('GL_MAP_PERSISTENT_BIT',0x0040)
GL_MAP_READ_BIT=_C('GL_MAP_READ_BIT',0x0001)
GL_MAP_WRITE_BIT=_C('GL_MAP_WRITE_BIT',0x0002)
GL_MAX_VERTEX_ATTRIB_STRIDE=_C('GL_MAX_VERTEX_ATTRIB_STRIDE',0x82E5)
GL_MIRROR_CLAMP_TO_EDGE=_C('GL_MIRROR_CLAMP_TO_EDGE',0x8743)
GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED=_C('GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED',0x8221)
GL_QUERY_BUFFER=_C('GL_QUERY_BUFFER',0x9192)
GL_QUERY_BUFFER_BARRIER_BIT=_C('GL_QUERY_BUFFER_BARRIER_BIT',0x00008000)
GL_QUERY_BUFFER_BINDING=_C('GL_QUERY_BUFFER_BINDING',0x9193)
GL_QUERY_RESULT_NO_WAIT=_C('GL_QUERY_RESULT_NO_WAIT',0x9194)
GL_STENCIL_INDEX=_C('GL_STENCIL_INDEX',0x1901)
GL_STENCIL_INDEX8=_C('GL_STENCIL_INDEX8',0x8D48)
GL_TEXTURE_BUFFER_BINDING=_C('GL_TEXTURE_BUFFER_BINDING',0x8C2A)
GL_TRANSFORM_FEEDBACK_BUFFER=_C('GL_TRANSFORM_FEEDBACK_BUFFER',0x8C8E)
GL_TRANSFORM_FEEDBACK_BUFFER_INDEX=_C('GL_TRANSFORM_FEEDBACK_BUFFER_INDEX',0x934B)
GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE=_C('GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE',0x934C)
GL_UNSIGNED_INT_10F_11F_11F_REV=_C('GL_UNSIGNED_INT_10F_11F_11F_REV',0x8C3B)
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray)
def glBindBuffersBase(target,first,count,buffers):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray,ctypes.POINTER(_cs.GLintptr),ctypes.POINTER(_cs.GLsizeiptr))
def glBindBuffersRange(target,first,count,buffers,offsets,sizes):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray)
def glBindImageTextures(first,count,textures):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray)
def glBindSamplers(first,count,samplers):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray)
def glBindTextures(first,count,textures):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLuintArray,ctypes.POINTER(_cs.GLintptr),arrays.GLsizeiArray)
def glBindVertexBuffers(first,count,buffers,offsets,strides):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLsizeiptr,ctypes.c_void_p,_cs.GLbitfield)
def glBufferStorage(target,size,data,flags):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLint,_cs.GLenum,_cs.GLenum,ctypes.c_void_p)
def glClearTexImage(texture,level,format,type,data):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLint,_cs.GLint,_cs.GLint,_cs.GLint,_cs.GLsizei,_cs.GLsizei,_cs.GLsizei,_cs.GLenum,_cs.GLenum,ctypes.c_void_p)
def glClearTexSubImage(texture,level,xoffset,yoffset,zoffset,width,height,depth,format,type,data):pass
| [
"OpenGL.platform.types",
"OpenGL.constant.Constant",
"ctypes.POINTER",
"OpenGL.platform.createFunction"
] | [((517, 557), 'OpenGL.constant.Constant', '_C', (['"""GL_BUFFER_IMMUTABLE_STORAGE"""', '(33311)'], {}), "('GL_BUFFER_IMMUTABLE_STORAGE', 33311)\n", (519, 557), True, 'from OpenGL.constant import Constant as _C\n'), ((582, 618), 'OpenGL.constant.Constant', '_C', (['"""GL_BUFFER_STORAGE_FLAGS"""', '(33312)'], {}), "('GL_BUFFER_STORAGE_FLAGS', 33312)\n", (584, 618), True, 'from OpenGL.constant import Constant as _C\n'), ((636, 665), 'OpenGL.constant.Constant', '_C', (['"""GL_CLEAR_TEXTURE"""', '(37733)'], {}), "('GL_CLEAR_TEXTURE', 37733)\n", (638, 665), True, 'from OpenGL.constant import Constant as _C\n'), ((702, 750), 'OpenGL.constant.Constant', '_C', (['"""GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT"""', '(16384)'], {}), "('GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT', 16384)\n", (704, 750), True, 'from OpenGL.constant import Constant as _C\n'), ((777, 809), 'OpenGL.constant.Constant', '_C', (['"""GL_CLIENT_STORAGE_BIT"""', '(512)'], {}), "('GL_CLIENT_STORAGE_BIT', 512)\n", (779, 809), True, 'from OpenGL.constant import Constant as _C\n'), ((835, 868), 'OpenGL.constant.Constant', '_C', (['"""GL_DYNAMIC_STORAGE_BIT"""', '(256)'], {}), "('GL_DYNAMIC_STORAGE_BIT', 256)\n", (837, 868), True, 'from OpenGL.constant import Constant as _C\n'), ((893, 927), 'OpenGL.constant.Constant', '_C', (['"""GL_LOCATION_COMPONENT"""', '(37706)'], {}), "('GL_LOCATION_COMPONENT', 37706)\n", (895, 927), True, 'from OpenGL.constant import Constant as _C\n'), ((948, 978), 'OpenGL.constant.Constant', '_C', (['"""GL_MAP_COHERENT_BIT"""', '(128)'], {}), "('GL_MAP_COHERENT_BIT', 128)\n", (950, 978), True, 'from OpenGL.constant import Constant as _C\n'), ((1003, 1034), 'OpenGL.constant.Constant', '_C', (['"""GL_MAP_PERSISTENT_BIT"""', '(64)'], {}), "('GL_MAP_PERSISTENT_BIT', 64)\n", (1005, 1034), True, 'from OpenGL.constant import Constant as _C\n'), ((1054, 1078), 'OpenGL.constant.Constant', '_C', (['"""GL_MAP_READ_BIT"""', '(1)'], {}), "('GL_MAP_READ_BIT', 1)\n", (1056, 1078), True, 'from OpenGL.constant import Constant as _C\n'), ((1100, 1125), 'OpenGL.constant.Constant', '_C', (['"""GL_MAP_WRITE_BIT"""', '(2)'], {}), "('GL_MAP_WRITE_BIT', 2)\n", (1102, 1125), True, 'from OpenGL.constant import Constant as _C\n'), ((1158, 1198), 'OpenGL.constant.Constant', '_C', (['"""GL_MAX_VERTEX_ATTRIB_STRIDE"""', '(33509)'], {}), "('GL_MAX_VERTEX_ATTRIB_STRIDE', 33509)\n", (1160, 1198), True, 'from OpenGL.constant import Constant as _C\n'), ((1223, 1259), 'OpenGL.constant.Constant', '_C', (['"""GL_MIRROR_CLAMP_TO_EDGE"""', '(34627)'], {}), "('GL_MIRROR_CLAMP_TO_EDGE', 34627)\n", (1225, 1259), True, 'from OpenGL.constant import Constant as _C\n'), ((1303, 1358), 'OpenGL.constant.Constant', '_C', (['"""GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED"""', '(33313)'], {}), "('GL_PRIMITIVE_RESTART_FOR_PATCHES_SUPPORTED', 33313)\n", (1305, 1358), True, 'from OpenGL.constant import Constant as _C\n'), ((1375, 1403), 'OpenGL.constant.Constant', '_C', (['"""GL_QUERY_BUFFER"""', '(37266)'], {}), "('GL_QUERY_BUFFER', 37266)\n", (1377, 1403), True, 'from OpenGL.constant import Constant as _C\n'), ((1432, 1472), 'OpenGL.constant.Constant', '_C', (['"""GL_QUERY_BUFFER_BARRIER_BIT"""', '(32768)'], {}), "('GL_QUERY_BUFFER_BARRIER_BIT', 32768)\n", (1434, 1472), True, 'from OpenGL.constant import Constant as _C\n'), ((1501, 1537), 'OpenGL.constant.Constant', '_C', (['"""GL_QUERY_BUFFER_BINDING"""', '(37267)'], {}), "('GL_QUERY_BUFFER_BINDING', 37267)\n", (1503, 1537), True, 'from OpenGL.constant import Constant as _C\n'), ((1562, 1598), 'OpenGL.constant.Constant', '_C', (['"""GL_QUERY_RESULT_NO_WAIT"""', '(37268)'], {}), "('GL_QUERY_RESULT_NO_WAIT', 37268)\n", (1564, 1598), True, 'from OpenGL.constant import Constant as _C\n'), ((1616, 1644), 'OpenGL.constant.Constant', '_C', (['"""GL_STENCIL_INDEX"""', '(6401)'], {}), "('GL_STENCIL_INDEX', 6401)\n", (1618, 1644), True, 'from OpenGL.constant import Constant as _C\n'), ((1664, 1694), 'OpenGL.constant.Constant', '_C', (['"""GL_STENCIL_INDEX8"""', '(36168)'], {}), "('GL_STENCIL_INDEX8', 36168)\n", (1666, 1694), True, 'from OpenGL.constant import Constant as _C\n'), ((1721, 1759), 'OpenGL.constant.Constant', '_C', (['"""GL_TEXTURE_BUFFER_BINDING"""', '(35882)'], {}), "('GL_TEXTURE_BUFFER_BINDING', 35882)\n", (1723, 1759), True, 'from OpenGL.constant import Constant as _C\n'), ((1789, 1830), 'OpenGL.constant.Constant', '_C', (['"""GL_TRANSFORM_FEEDBACK_BUFFER"""', '(35982)'], {}), "('GL_TRANSFORM_FEEDBACK_BUFFER', 35982)\n", (1791, 1830), True, 'from OpenGL.constant import Constant as _C\n'), ((1866, 1913), 'OpenGL.constant.Constant', '_C', (['"""GL_TRANSFORM_FEEDBACK_BUFFER_INDEX"""', '(37707)'], {}), "('GL_TRANSFORM_FEEDBACK_BUFFER_INDEX', 37707)\n", (1868, 1913), True, 'from OpenGL.constant import Constant as _C\n'), ((1950, 1998), 'OpenGL.constant.Constant', '_C', (['"""GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE"""', '(37708)'], {}), "('GL_TRANSFORM_FEEDBACK_BUFFER_STRIDE', 37708)\n", (1952, 1998), True, 'from OpenGL.constant import Constant as _C\n'), ((2031, 2075), 'OpenGL.constant.Constant', '_C', (['"""GL_UNSIGNED_INT_10F_11F_11F_REV"""', '(35899)'], {}), "('GL_UNSIGNED_INT_10F_11F_11F_REV', 35899)\n", (2033, 2075), True, 'from OpenGL.constant import Constant as _C\n'), ((2081, 2152), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLuint', '_cs.GLsizei', 'arrays.GLuintArray'], {}), '(None, _cs.GLenum, _cs.GLuint, _cs.GLsizei, arrays.GLuintArray)\n', (2089, 2152), True, 'from OpenGL import platform as _p, arrays\n'), ((2412, 2471), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLuint', '_cs.GLsizei', 'arrays.GLuintArray'], {}), '(None, _cs.GLuint, _cs.GLsizei, arrays.GLuintArray)\n', (2420, 2471), True, 'from OpenGL import platform as _p, arrays\n'), ((2525, 2584), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLuint', '_cs.GLsizei', 'arrays.GLuintArray'], {}), '(None, _cs.GLuint, _cs.GLsizei, arrays.GLuintArray)\n', (2533, 2584), True, 'from OpenGL import platform as _p, arrays\n'), ((2633, 2692), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLuint', '_cs.GLsizei', 'arrays.GLuintArray'], {}), '(None, _cs.GLuint, _cs.GLsizei, arrays.GLuintArray)\n', (2641, 2692), True, 'from OpenGL import platform as _p, arrays\n'), ((2918, 2993), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLsizeiptr', 'ctypes.c_void_p', '_cs.GLbitfield'], {}), '(None, _cs.GLenum, _cs.GLsizeiptr, ctypes.c_void_p, _cs.GLbitfield)\n', (2926, 2993), True, 'from OpenGL import platform as _p, arrays\n'), ((3044, 3122), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLuint', '_cs.GLint', '_cs.GLenum', '_cs.GLenum', 'ctypes.c_void_p'], {}), '(None, _cs.GLuint, _cs.GLint, _cs.GLenum, _cs.GLenum, ctypes.c_void_p)\n', (3052, 3122), True, 'from OpenGL import platform as _p, arrays\n'), ((3180, 3335), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLuint', '_cs.GLint', '_cs.GLint', '_cs.GLint', '_cs.GLint', '_cs.GLsizei', '_cs.GLsizei', '_cs.GLsizei', '_cs.GLenum', '_cs.GLenum', 'ctypes.c_void_p'], {}), '(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLint, _cs.GLint, _cs.\n GLsizei, _cs.GLsizei, _cs.GLsizei, _cs.GLenum, _cs.GLenum, ctypes.c_void_p)\n', (3188, 3335), True, 'from OpenGL import platform as _p, arrays\n'), ((388, 494), 'OpenGL.platform.createFunction', '_p.createFunction', (['function', '_p.PLATFORM.GL', '"""GL_VERSION_GL_4_4"""'], {'error_checker': '_errors._error_checker'}), "(function, _p.PLATFORM.GL, 'GL_VERSION_GL_4_4',\n error_checker=_errors._error_checker)\n", (405, 494), True, 'from OpenGL import platform as _p, arrays\n'), ((2276, 2304), 'ctypes.POINTER', 'ctypes.POINTER', (['_cs.GLintptr'], {}), '(_cs.GLintptr)\n', (2290, 2304), False, 'import ctypes\n'), ((2305, 2335), 'ctypes.POINTER', 'ctypes.POINTER', (['_cs.GLsizeiptr'], {}), '(_cs.GLsizeiptr)\n', (2319, 2335), False, 'import ctypes\n'), ((2797, 2825), 'ctypes.POINTER', 'ctypes.POINTER', (['_cs.GLintptr'], {}), '(_cs.GLintptr)\n', (2811, 2825), False, 'import ctypes\n')] |
import numpy as np
import json
from os.path import join
from tqdm import tqdm
from scipy.optimize import least_squares
from pose_optimize.multiview_geo import reproject_error
DEBUG=False
def reproject_error_loss(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23):
'''
Return:
kp4_e, kp6_e: error array, both (23,) shape
'''
assert p3d.shape == (num_kpt, 3)
assert p4.shape == (num_kpt, 2)
assert p6.shape == (num_kpt, 2)
kp4_recon = np.dot(cam_proj_4[0:3,0:3],p3d.T) + cam_proj_4[0:3,3].reshape([-1,1])
kp6_recon = np.dot(cam_proj_6[0:3,0:3],p3d.T) + cam_proj_6[0:3,3].reshape([-1,1])
kp4_recon = kp4_recon[0:2,:]/kp4_recon[2,:]
kp6_recon = kp6_recon[0:2,:]/kp6_recon[2,:]
# kp4_e = np.linalg.norm(kp4_recon.T - p4, axis=1)
# kp6_e = np.linalg.norm(kp6_recon.T - p6, axis=1)
kp4_e = np.sqrt(np.sum(np.square(kp4_recon.T - p4), axis=1))
kp6_e = np.sqrt(np.sum(np.square(kp6_recon.T - p6), axis=1))
return kp4_e, kp6_e
def reproject_error_loss_score(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23):
'''
Return:
kp4_e, kp6_e: error array, both (23,) shape
'''
assert p3d.shape == (num_kpt, 3)
assert p4.shape == (num_kpt, 3)
assert p6.shape == (num_kpt, 3)
kp4_recon = np.dot(cam_proj_4[0:3,0:3],p3d.T) + cam_proj_4[0:3,3].reshape([-1,1])
kp6_recon = np.dot(cam_proj_6[0:3,0:3],p3d.T) + cam_proj_6[0:3,3].reshape([-1,1])
kp4_recon = kp4_recon[0:2,:]/kp4_recon[2,:]
kp6_recon = kp6_recon[0:2,:]/kp6_recon[2,:]
# kp4_e = np.linalg.norm(kp4_recon.T - p4, axis=1)
# kp6_e = np.linalg.norm(kp6_recon.T - p6, axis=1)
kp4_e = p4[:,2]*np.sqrt(np.sum(np.square(kp4_recon.T - p4[:,:2]), axis=1))
kp6_e = p6[:,2]*np.sqrt(np.sum(np.square(kp6_recon.T - p6[:,:2]), axis=1))
return kp4_e, kp6_e
def optimze_loss_2d(p3d_faltten, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23, lambda_reproj = 1):
'''
Only consider reprojection loss
'''
l1 = lambda_reproj
p3d = p3d_faltten.reshape([-1,3])
kp4_e, kp6_e = reproject_error_loss(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23)
return np.concatenate((l1*kp4_e, l1*kp6_e))
def shape_dis_loss(kpt_3d_array, median_bone, left_list, right_list, num_kpt=23):
'''
Shape loss given prior shape information
'''
assert kpt_3d_array.shape == (num_kpt, 3)
assert len(left_list) == len(right_list)
assert len(left_list) == len(median_bone.keys())
num_bone = len(left_list)
left_error = []
right_error = []
left_error = np.zeros(num_bone)
right_error = np.zeros(num_bone)
for i in range(num_bone):
bon_vec_left = kpt_3d_array[left_list[i][1],:] - kpt_3d_array[left_list[i][0],:]
left_error_i = np.sqrt(np.dot(bon_vec_left, bon_vec_left)) - median_bone[str(i)]
left_error[i] = abs(left_error_i)
bon_vec_right = kpt_3d_array[right_list[i][1],:] - kpt_3d_array[right_list[i][0],:]
right_error_i = np.sqrt(np.dot(bon_vec_right, bon_vec_right)) - median_bone[str(i)]
right_error[i] = abs(right_error_i)
return left_error, right_error
def optimze_loss(p3d_faltten, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone, num_kpt=23, lambda_reproj = 0.1, lambda_shape=5.0):
'''
Full Loss with shape prior
'''
l1 = lambda_reproj
l2 = lambda_shape
p3d = p3d_faltten.reshape([-1,3])
kp4_e, kp6_e = reproject_error_loss_score(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23)
left_error, right_error = shape_dis_loss(p3d, median_bone, left_list, right_list, num_kpt=23)
return np.concatenate((l1*kp4_e, l1*kp6_e, l2*left_error, l2*right_error))
def optimze_loss_no_score(p3d_faltten, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone, num_kpt=23, lambda_reproj = 0.1, lambda_shape=1.0):
'''
Full Loss with shape prior
'''
l1 = lambda_reproj
l2 = lambda_shape
p3d = p3d_faltten.reshape([-1,3])
kp4_e, kp6_e = reproject_error_loss(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23)
left_error, right_error = shape_dis_loss(p3d, median_bone, left_list, right_list, num_kpt=23)
return np.concatenate((l1*kp4_e, l1*kp6_e, l2*left_error, l2*right_error))
def centerize_keypoint(p1, p2, norm_dst):
'''
Centeralize two points
'''
assert p1.shape == (3,)
assert p2.shape == (3,)
p_center = (p1+p2)/2
p_vec = (p1-p2)
p_dis = np.sqrt(np.dot(p_vec, p_vec))
p1_shift = p_center + 0.5*p_vec/p_dis
p2_shift = p_center - 0.5*p_vec/p_dis
return p1_shift, p2_shift
def shape_initialize(left_list, right_list, median_bone, kpt_3d_array, num_kpt=23):
'''
Initialize human joints 3D position from shape prior
'''
assert kpt_3d_array.shape == (num_kpt,3)
assert len(left_list) == len(right_list)
assert len(left_list) == len(median_bone.keys())
num_bone = len(left_list)
left_ratio_list, right_ratio_list = [],[]
vec_left_list, vec_right_list = [], []
ratio_outlier = 1.5
ratio_draw_back = 1.1
for i in range(num_bone):
bon_vec_left = kpt_3d_array[left_list[i][1],:] - kpt_3d_array[left_list[i][0],:]
ratio_left = np.sqrt(np.dot(bon_vec_left, bon_vec_left))/ median_bone[str(i)]
left_ratio_list += [ratio_left]
vec_left_list += [bon_vec_left]
for i in range(num_bone):
bon_vec_right = kpt_3d_array[right_list[i][1],:] - kpt_3d_array[right_list[i][0],:]
ratio_right = np.sqrt(np.dot(bon_vec_right, bon_vec_right))/median_bone[str(i)]
right_ratio_list += [ratio_right]
vec_right_list += [bon_vec_right]
kp_3d_new = np.zeros(kpt_3d_array.shape)
# Adjust Shoulder to hip
kp_3d_new[left_list[2][0], :], kp_3d_new[left_list[2][1], :] = centerize_keypoint(kpt_3d_array[left_list[2][0], :], kpt_3d_array[left_list[2][1], :] , median_bone["2"])
kp_3d_new[right_list[2][0], :], kp_3d_new[right_list[2][1], :] = centerize_keypoint(kpt_3d_array[right_list[2][0], :], kpt_3d_array[right_list[2][1], :] , median_bone["2"])
# Adjust shoulder and Hip pair
sh_p = left_list[0]
hi_p = left_list[1]
kp_3d_new[sh_p[0]], kp_3d_new[sh_p[1]] = centerize_keypoint(kp_3d_new[sh_p[0]], kp_3d_new[sh_p[1]], median_bone["0"]) # shoulder
kp_3d_new[hi_p[0]], kp_3d_new[hi_p[1]] = centerize_keypoint(kp_3d_new[hi_p[0]], kp_3d_new[hi_p[1]], median_bone["1"]) # hip
# left part
for i in range(2, num_bone):
start_indx, end_indx = tuple(left_list[i])
if left_ratio_list[i] < ratio_outlier:
kp_3d_new[end_indx, :] = kp_3d_new[start_indx, :] + vec_left_list[i]
else:
kp_3d_new[end_indx, :] = kp_3d_new[start_indx, :] + vec_left_list[i]/left_ratio_list[i]*ratio_draw_back
for i in range(2, num_bone):
start_indx, end_indx = tuple(right_list[i])
if right_ratio_list[i] < ratio_outlier:
kp_3d_new[end_indx, :] = kp_3d_new[start_indx, :] + vec_right_list[i]
else:
kp_3d_new[end_indx, :] = kp_3d_new[start_indx, :] + vec_right_list[i]/right_ratio_list[i]*ratio_draw_back
# left_error, right_error = loss_kpt_3d(kp_3d_new, median_bone, left_list, right_list)
# print(left_error)
# print(right_error)
# print("OK")
return kp_3d_new
def fintune_human_keypoint_2d(P4, P6, path4, path6, path3D, path_finetune=None):
with open(path3D,"r") as f:
data_3d = json.load(f)
with open(path4, "r") as f:
data_dict4 = json.load(f)
with open(path6, "r") as f:
data_dict6 = json.load(f)
# frame_id = next(iter(data_3d["3D"].keys()))
# person_id = next(iter(data_3d["3D"][frame_id].keys()))
# # frame_id = "000005"
# # person_id = "000"
cam_proj_4 = np.array(data_3d["P4"])
cam_proj_6 = np.array(data_3d["P6"])
data_3d_dict = {}
data_3d_dict["P4"] = data_3d["P4"]
data_3d_dict["P6"] = data_3d["P6"]
data_3d_dict["3D"] = {}
data_3d_dict["kp4_e"] = {}
data_3d_dict["kp6_e"] = {}
frame_list = [k for k in data_dict4.keys()]
frame_list.sort()
for i, frame_id in enumerate(tqdm(frame_list)):
frame_3d_dict = {}
kp4_dict = {}
kp6_dict = {}
person_list = [k for k in data_dict4[frame_id].keys()]
person_list.sort()
for person_id in person_list:
p3d_flatten = np.array(data_3d["3D"][frame_id][person_id]).ravel()
p4_homo = np.array(data_dict4[frame_id][person_id]).reshape([-1,3])
p6_homo = np.array(data_dict6[frame_id][person_id]).reshape([-1,3])
p4 = p4_homo[:,:2]
p6 = p6_homo[:,:2]
if DEBUG:
loss_init = optimze_loss_2d(p3d_flatten, p4, p6, cam_proj_4, cam_proj_6)
print("Initial error", str(np.sqrt(np.sum(np.square(loss_init)))) )
res = least_squares(optimze_loss_2d, p3d_flatten, verbose=0, x_scale='jac', ftol=1e-4, method='trf',args=(p4, p6, cam_proj_4, cam_proj_6))
if DEBUG:
loss_final = res.fun
print("Final error", str(np.sqrt(np.sum(np.square(loss_final)))) )
loss_final = optimze_loss_2d(res.x, p4, p6, cam_proj_4, cam_proj_6)
print("Final error", str(np.sqrt(np.sum(np.square(loss_final)))) )
p3d_tune = res.x.reshape([-1,3])
kp4_recon, kp6_recon, kp4_e, kp6_e = reproject_error(p3d_tune, p4, p6, cam_proj_4, cam_proj_6)
frame_3d_dict[person_id] = p3d_tune.tolist()
kp4_dict[person_id] = kp4_e.tolist()
kp6_dict[person_id] = kp6_e.tolist()
data_3d_dict["3D"][frame_id] = frame_3d_dict
data_3d_dict["kp4_e"][frame_id] = kp4_dict
data_3d_dict["kp6_e"][frame_id] = kp6_dict
if path_finetune is not None:
with open(path_finetune, "w") as f:
json.dump(data_3d_dict, f)
return data_3d_dict
def finetune_human_3d(path_finetune_input, path4, path6, shape_prior_path, shape_prior_finetune_output, frame_list=None):
'''
path_finetune_input:
path4: data_C4.json
path6: data_C6.json
shape_prior_path:
shape_prior_finetune_output:
'''
with open(path_finetune_input,"r") as f:
data_3d = json.load(f)
with open(path4, "r") as f:
data_dict4 = json.load(f)
with open(path6, "r") as f:
data_dict6 = json.load(f)
with open(shape_prior_path, 'r') as f:
data_prior = json.load(f)
left_list = data_prior["left_list"]
right_list = data_prior["right_list"]
median_bone = data_prior["median_bone"]
cam_proj_4 = np.array(data_3d["P4"])
cam_proj_6 = np.array(data_3d["P6"])
data_3d_dict = {}
data_3d_dict["P4"] = data_3d["P4"]
data_3d_dict["P6"] = data_3d["P6"]
data_3d_dict["3D"] = {}
data_3d_dict["kp4_e"] = {}
data_3d_dict["kp6_e"] = {}
if frame_list:
for f in frame_list:
if f not in data_dict4.keys():
print("KEY ERROR!")
assert 0
else:
frame_list = [k for k in data_dict4.keys()]
frame_list.sort()
for i, frame_id in enumerate(tqdm(frame_list)):
frame_3d_dict = {}
kp4_dict = {}
kp6_dict = {}
person_list = [k for k in data_dict4[frame_id].keys()]
person_list.sort()
for person_id in person_list:
p3d = np.array(data_3d["3D"][frame_id][person_id]).reshape([-1,3])
p3d_init = shape_initialize(left_list, right_list, median_bone, p3d)
p4_homo = np.array(data_dict4[frame_id][person_id]).reshape([-1,3])
p6_homo = np.array(data_dict6[frame_id][person_id]).reshape([-1,3])
p4 = p4_homo
p6 = p6_homo
p3d_flatten = p3d_init.flatten()
# loss_init = optimze_loss(p3d_flatten, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)
#print(np.linalg.norm(loss_init))
res = least_squares(optimze_loss, p3d_flatten, verbose=0, x_scale='jac', ftol=1e-2, method='trf',args=(p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone))
p3d_tune = res.x.reshape([-1,3])
# loss_res = optimze_loss(res.x, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)
# print(np.linalg.norm(loss_res))
kp4_recon, kp6_recon, kp4_e, kp6_e = reproject_error(p3d_tune, p4[:,:2], p6[:,:2], cam_proj_4, cam_proj_6)
frame_3d_dict[person_id] = p3d_tune.tolist()
kp4_dict[person_id] = kp4_e.tolist()
kp6_dict[person_id] = kp6_e.tolist()
data_3d_dict["3D"][frame_id] = frame_3d_dict
data_3d_dict["kp4_e"][frame_id] = kp4_dict
data_3d_dict["kp6_e"][frame_id] = kp6_dict
with open(shape_prior_finetune_output, "w") as f:
json.dump(data_3d_dict, f)
def finetune_human_3d_no_score(path_finetune_input, path4, path6, shape_prior_path, shape_prior_finetune_output, frame_list=None):
'''
path_finetune_input:
path4: data_C4.json
path6: data_C6.json
shape_prior_path:
shape_prior_finetune_output:
'''
with open(path_finetune_input,"r") as f:
data_3d = json.load(f)
with open(path4, "r") as f:
data_dict4 = json.load(f)
with open(path6, "r") as f:
data_dict6 = json.load(f)
with open(shape_prior_path, 'r') as f:
data_prior = json.load(f)
left_list = data_prior["left_list"]
right_list = data_prior["right_list"]
median_bone = data_prior["median_bone"]
cam_proj_4 = np.array(data_3d["P4"])
cam_proj_6 = np.array(data_3d["P6"])
data_3d_dict = {}
data_3d_dict["P4"] = data_3d["P4"]
data_3d_dict["P6"] = data_3d["P6"]
data_3d_dict["3D"] = {}
data_3d_dict["kp4_e"] = {}
data_3d_dict["kp6_e"] = {}
if frame_list:
for f in frame_list:
if f not in data_dict4.keys():
print("KEY ERROR!")
assert 0
else:
frame_list = [k for k in data_dict4.keys()]
frame_list.sort()
for i, frame_id in enumerate(tqdm(frame_list)):
if i > 300:
import sys
sys.exit()
frame_3d_dict = {}
kp4_dict = {}
kp6_dict = {}
person_list = [k for k in data_dict4[frame_id].keys()]
person_list.sort()
for person_id in person_list:
try:
p3d = np.array(data_3d["3D"][frame_id][person_id]).reshape([-1,3])
p3d_init = shape_initialize(left_list, right_list, median_bone, p3d)
p4_homo = np.array(data_dict4[frame_id][person_id]).reshape([-1,3])
p6_homo = np.array(data_dict6[frame_id][person_id]).reshape([-1,3])
p4 = p4_homo[:,:2]
p6 = p6_homo[:,:2]
p3d_flatten = p3d_init.flatten()
# loss_init = optimze_loss(p3d_flatten, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)
#print(np.linalg.norm(loss_init))
res = least_squares(optimze_loss_no_score, p3d_flatten, verbose=2, x_scale='jac', ftol=1e-2, method='trf',args=(p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone))
p3d_tune = res.x.reshape([-1,3])
# loss_res = optimze_loss(res.x, p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)
# print(np.linalg.norm(loss_res))
kp4_recon, kp6_recon, kp4_e, kp6_e = reproject_error(p3d_tune, p4[:,:2], p6[:,:2], cam_proj_4, cam_proj_6)
frame_3d_dict[person_id] = p3d_tune.tolist()
kp4_dict[person_id] = kp4_e.tolist()
kp6_dict[person_id] = kp6_e.tolist()
except:
print("Error")
data_3d_dict["3D"][frame_id] = frame_3d_dict
data_3d_dict["kp4_e"][frame_id] = kp4_dict
data_3d_dict["kp6_e"][frame_id] = kp6_dict
with open(shape_prior_finetune_output, "w") as f:
json.dump(data_3d_dict, f)
| [
"scipy.optimize.least_squares",
"tqdm.tqdm",
"numpy.square",
"numpy.array",
"numpy.dot",
"numpy.zeros",
"pose_optimize.multiview_geo.reproject_error",
"numpy.concatenate",
"sys.exit",
"json.load",
"json.dump"
] | [((2154, 2194), 'numpy.concatenate', 'np.concatenate', (['(l1 * kp4_e, l1 * kp6_e)'], {}), '((l1 * kp4_e, l1 * kp6_e))\n', (2168, 2194), True, 'import numpy as np\n'), ((2567, 2585), 'numpy.zeros', 'np.zeros', (['num_bone'], {}), '(num_bone)\n', (2575, 2585), True, 'import numpy as np\n'), ((2604, 2622), 'numpy.zeros', 'np.zeros', (['num_bone'], {}), '(num_bone)\n', (2612, 2622), True, 'import numpy as np\n'), ((3642, 3717), 'numpy.concatenate', 'np.concatenate', (['(l1 * kp4_e, l1 * kp6_e, l2 * left_error, l2 * right_error)'], {}), '((l1 * kp4_e, l1 * kp6_e, l2 * left_error, l2 * right_error))\n', (3656, 3717), True, 'import numpy as np\n'), ((4201, 4276), 'numpy.concatenate', 'np.concatenate', (['(l1 * kp4_e, l1 * kp6_e, l2 * left_error, l2 * right_error)'], {}), '((l1 * kp4_e, l1 * kp6_e, l2 * left_error, l2 * right_error))\n', (4215, 4276), True, 'import numpy as np\n'), ((5689, 5717), 'numpy.zeros', 'np.zeros', (['kpt_3d_array.shape'], {}), '(kpt_3d_array.shape)\n', (5697, 5717), True, 'import numpy as np\n'), ((7810, 7833), 'numpy.array', 'np.array', (["data_3d['P4']"], {}), "(data_3d['P4'])\n", (7818, 7833), True, 'import numpy as np\n'), ((7851, 7874), 'numpy.array', 'np.array', (["data_3d['P6']"], {}), "(data_3d['P6'])\n", (7859, 7874), True, 'import numpy as np\n'), ((10745, 10768), 'numpy.array', 'np.array', (["data_3d['P4']"], {}), "(data_3d['P4'])\n", (10753, 10768), True, 'import numpy as np\n'), ((10786, 10809), 'numpy.array', 'np.array', (["data_3d['P6']"], {}), "(data_3d['P6'])\n", (10794, 10809), True, 'import numpy as np\n'), ((13757, 13780), 'numpy.array', 'np.array', (["data_3d['P4']"], {}), "(data_3d['P4'])\n", (13765, 13780), True, 'import numpy as np\n'), ((13798, 13821), 'numpy.array', 'np.array', (["data_3d['P6']"], {}), "(data_3d['P6'])\n", (13806, 13821), True, 'import numpy as np\n'), ((471, 506), 'numpy.dot', 'np.dot', (['cam_proj_4[0:3, 0:3]', 'p3d.T'], {}), '(cam_proj_4[0:3, 0:3], p3d.T)\n', (477, 506), True, 'import numpy as np\n'), ((557, 592), 'numpy.dot', 'np.dot', (['cam_proj_6[0:3, 0:3]', 'p3d.T'], {}), '(cam_proj_6[0:3, 0:3], p3d.T)\n', (563, 592), True, 'import numpy as np\n'), ((1286, 1321), 'numpy.dot', 'np.dot', (['cam_proj_4[0:3, 0:3]', 'p3d.T'], {}), '(cam_proj_4[0:3, 0:3], p3d.T)\n', (1292, 1321), True, 'import numpy as np\n'), ((1372, 1407), 'numpy.dot', 'np.dot', (['cam_proj_6[0:3, 0:3]', 'p3d.T'], {}), '(cam_proj_6[0:3, 0:3], p3d.T)\n', (1378, 1407), True, 'import numpy as np\n'), ((4476, 4496), 'numpy.dot', 'np.dot', (['p_vec', 'p_vec'], {}), '(p_vec, p_vec)\n', (4482, 4496), True, 'import numpy as np\n'), ((7475, 7487), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7484, 7487), False, 'import json\n'), ((7541, 7553), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7550, 7553), False, 'import json\n'), ((7607, 7619), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7616, 7619), False, 'import json\n'), ((8170, 8186), 'tqdm.tqdm', 'tqdm', (['frame_list'], {}), '(frame_list)\n', (8174, 8186), False, 'from tqdm import tqdm\n'), ((10359, 10371), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10368, 10371), False, 'import json\n'), ((10425, 10437), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10434, 10437), False, 'import json\n'), ((10491, 10503), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10500, 10503), False, 'import json\n'), ((10572, 10584), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10581, 10584), False, 'import json\n'), ((11280, 11296), 'tqdm.tqdm', 'tqdm', (['frame_list'], {}), '(frame_list)\n', (11284, 11296), False, 'from tqdm import tqdm\n'), ((13005, 13031), 'json.dump', 'json.dump', (['data_3d_dict', 'f'], {}), '(data_3d_dict, f)\n', (13014, 13031), False, 'import json\n'), ((13371, 13383), 'json.load', 'json.load', (['f'], {}), '(f)\n', (13380, 13383), False, 'import json\n'), ((13437, 13449), 'json.load', 'json.load', (['f'], {}), '(f)\n', (13446, 13449), False, 'import json\n'), ((13503, 13515), 'json.load', 'json.load', (['f'], {}), '(f)\n', (13512, 13515), False, 'import json\n'), ((13584, 13596), 'json.load', 'json.load', (['f'], {}), '(f)\n', (13593, 13596), False, 'import json\n'), ((14292, 14308), 'tqdm.tqdm', 'tqdm', (['frame_list'], {}), '(frame_list)\n', (14296, 14308), False, 'from tqdm import tqdm\n'), ((16235, 16261), 'json.dump', 'json.dump', (['data_3d_dict', 'f'], {}), '(data_3d_dict, f)\n', (16244, 16261), False, 'import json\n'), ((862, 889), 'numpy.square', 'np.square', (['(kp4_recon.T - p4)'], {}), '(kp4_recon.T - p4)\n', (871, 889), True, 'import numpy as np\n'), ((927, 954), 'numpy.square', 'np.square', (['(kp6_recon.T - p6)'], {}), '(kp6_recon.T - p6)\n', (936, 954), True, 'import numpy as np\n'), ((8940, 9080), 'scipy.optimize.least_squares', 'least_squares', (['optimze_loss_2d', 'p3d_flatten'], {'verbose': '(0)', 'x_scale': '"""jac"""', 'ftol': '(0.0001)', 'method': '"""trf"""', 'args': '(p4, p6, cam_proj_4, cam_proj_6)'}), "(optimze_loss_2d, p3d_flatten, verbose=0, x_scale='jac', ftol=\n 0.0001, method='trf', args=(p4, p6, cam_proj_4, cam_proj_6))\n", (8953, 9080), False, 'from scipy.optimize import least_squares\n'), ((9504, 9561), 'pose_optimize.multiview_geo.reproject_error', 'reproject_error', (['p3d_tune', 'p4', 'p6', 'cam_proj_4', 'cam_proj_6'], {}), '(p3d_tune, p4, p6, cam_proj_4, cam_proj_6)\n', (9519, 9561), False, 'from pose_optimize.multiview_geo import reproject_error\n'), ((9977, 10003), 'json.dump', 'json.dump', (['data_3d_dict', 'f'], {}), '(data_3d_dict, f)\n', (9986, 10003), False, 'import json\n'), ((12111, 12286), 'scipy.optimize.least_squares', 'least_squares', (['optimze_loss', 'p3d_flatten'], {'verbose': '(0)', 'x_scale': '"""jac"""', 'ftol': '(0.01)', 'method': '"""trf"""', 'args': '(p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)'}), "(optimze_loss, p3d_flatten, verbose=0, x_scale='jac', ftol=\n 0.01, method='trf', args=(p4, p6, cam_proj_4, cam_proj_6, left_list,\n right_list, median_bone))\n", (12124, 12286), False, 'from scipy.optimize import least_squares\n'), ((12544, 12615), 'pose_optimize.multiview_geo.reproject_error', 'reproject_error', (['p3d_tune', 'p4[:, :2]', 'p6[:, :2]', 'cam_proj_4', 'cam_proj_6'], {}), '(p3d_tune, p4[:, :2], p6[:, :2], cam_proj_4, cam_proj_6)\n', (12559, 12615), False, 'from pose_optimize.multiview_geo import reproject_error\n'), ((14367, 14377), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14375, 14377), False, 'import sys\n'), ((1685, 1719), 'numpy.square', 'np.square', (['(kp4_recon.T - p4[:, :2])'], {}), '(kp4_recon.T - p4[:, :2])\n', (1694, 1719), True, 'import numpy as np\n'), ((1764, 1798), 'numpy.square', 'np.square', (['(kp6_recon.T - p6[:, :2])'], {}), '(kp6_recon.T - p6[:, :2])\n', (1773, 1798), True, 'import numpy as np\n'), ((2773, 2807), 'numpy.dot', 'np.dot', (['bon_vec_left', 'bon_vec_left'], {}), '(bon_vec_left, bon_vec_left)\n', (2779, 2807), True, 'import numpy as np\n'), ((3006, 3042), 'numpy.dot', 'np.dot', (['bon_vec_right', 'bon_vec_right'], {}), '(bon_vec_right, bon_vec_right)\n', (3012, 3042), True, 'import numpy as np\n'), ((5237, 5271), 'numpy.dot', 'np.dot', (['bon_vec_left', 'bon_vec_left'], {}), '(bon_vec_left, bon_vec_left)\n', (5243, 5271), True, 'import numpy as np\n'), ((5526, 5562), 'numpy.dot', 'np.dot', (['bon_vec_right', 'bon_vec_right'], {}), '(bon_vec_right, bon_vec_right)\n', (5532, 5562), True, 'import numpy as np\n'), ((15258, 15441), 'scipy.optimize.least_squares', 'least_squares', (['optimze_loss_no_score', 'p3d_flatten'], {'verbose': '(2)', 'x_scale': '"""jac"""', 'ftol': '(0.01)', 'method': '"""trf"""', 'args': '(p4, p6, cam_proj_4, cam_proj_6, left_list, right_list, median_bone)'}), "(optimze_loss_no_score, p3d_flatten, verbose=2, x_scale='jac',\n ftol=0.01, method='trf', args=(p4, p6, cam_proj_4, cam_proj_6,\n left_list, right_list, median_bone))\n", (15271, 15441), False, 'from scipy.optimize import least_squares\n'), ((15720, 15791), 'pose_optimize.multiview_geo.reproject_error', 'reproject_error', (['p3d_tune', 'p4[:, :2]', 'p6[:, :2]', 'cam_proj_4', 'cam_proj_6'], {}), '(p3d_tune, p4[:, :2], p6[:, :2], cam_proj_4, cam_proj_6)\n', (15735, 15791), False, 'from pose_optimize.multiview_geo import reproject_error\n'), ((8424, 8468), 'numpy.array', 'np.array', (["data_3d['3D'][frame_id][person_id]"], {}), "(data_3d['3D'][frame_id][person_id])\n", (8432, 8468), True, 'import numpy as np\n'), ((8499, 8540), 'numpy.array', 'np.array', (['data_dict4[frame_id][person_id]'], {}), '(data_dict4[frame_id][person_id])\n', (8507, 8540), True, 'import numpy as np\n'), ((8579, 8620), 'numpy.array', 'np.array', (['data_dict6[frame_id][person_id]'], {}), '(data_dict6[frame_id][person_id])\n', (8587, 8620), True, 'import numpy as np\n'), ((11527, 11571), 'numpy.array', 'np.array', (["data_3d['3D'][frame_id][person_id]"], {}), "(data_3d['3D'][frame_id][person_id])\n", (11535, 11571), True, 'import numpy as np\n'), ((11692, 11733), 'numpy.array', 'np.array', (['data_dict4[frame_id][person_id]'], {}), '(data_dict4[frame_id][person_id])\n', (11700, 11733), True, 'import numpy as np\n'), ((11772, 11813), 'numpy.array', 'np.array', (['data_dict6[frame_id][person_id]'], {}), '(data_dict6[frame_id][person_id])\n', (11780, 11813), True, 'import numpy as np\n'), ((14626, 14670), 'numpy.array', 'np.array', (["data_3d['3D'][frame_id][person_id]"], {}), "(data_3d['3D'][frame_id][person_id])\n", (14634, 14670), True, 'import numpy as np\n'), ((14799, 14840), 'numpy.array', 'np.array', (['data_dict4[frame_id][person_id]'], {}), '(data_dict4[frame_id][person_id])\n', (14807, 14840), True, 'import numpy as np\n'), ((14883, 14924), 'numpy.array', 'np.array', (['data_dict6[frame_id][person_id]'], {}), '(data_dict6[frame_id][person_id])\n', (14891, 14924), True, 'import numpy as np\n'), ((8883, 8903), 'numpy.square', 'np.square', (['loss_init'], {}), '(loss_init)\n', (8892, 8903), True, 'import numpy as np\n'), ((9202, 9223), 'numpy.square', 'np.square', (['loss_final'], {}), '(loss_final)\n', (9211, 9223), True, 'import numpy as np\n'), ((9369, 9390), 'numpy.square', 'np.square', (['loss_final'], {}), '(loss_final)\n', (9378, 9390), True, 'import numpy as np\n')] |
import pseudopol.ppseudopol as p_pp
import numpy as np
import sys
max_val=int(sys.argv[1])
vals=list(np.random.randint(1,500000,5000, dtype=np.uint32))
print(p_pp.find_max_subsum(max_val, vals))
| [
"numpy.random.randint",
"pseudopol.ppseudopol.find_max_subsum"
] | [((103, 154), 'numpy.random.randint', 'np.random.randint', (['(1)', '(500000)', '(5000)'], {'dtype': 'np.uint32'}), '(1, 500000, 5000, dtype=np.uint32)\n', (120, 154), True, 'import numpy as np\n'), ((160, 195), 'pseudopol.ppseudopol.find_max_subsum', 'p_pp.find_max_subsum', (['max_val', 'vals'], {}), '(max_val, vals)\n', (180, 195), True, 'import pseudopol.ppseudopol as p_pp\n')] |