code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import pandas
import os
from subprocess import check_output
worldclim_dir = "/Users/mperry/data/worldclim"
OUTDIR = os.path.join(worldclim_dir, "stacked")
def make_stack(variable, period, month, rcp, paths):
outpath = os.path.join(OUTDIR,
"{}_{}_{}_{}_stack.tif".format(variable, period, month, rcp))
if not os.path.exists(outpath):
vargs = ["rio", "stack",
"--co", "COMPRESS=DEFLATE",
"--co", "PREDICTOR=2", "--co", "ZLEVEL=6",
"--co", "TILED=YES",
"--co", "BLOCKXSIZE=16", "--co", "BLOCKYSIZE=16",
"-o", outpath]
vargs.extend(paths)
check_output(vargs)
return outpath
# Load dataframe in global scope
df = pandas.read_csv("../climate_data.csv")
def main():
# import glob
# for f in glob.glob(OUTDIR + "/*stack.tif"):
# os.remove(f)
# Only precipt, temp variables
dfs = df[(df['variable'] == 'tx') |
(df['variable'] == 'tn') |
(df['variable'] == 'pr')]
dfg = dfs.groupby(['variable', 'period', 'month', 'rcp'])
with open("../climate_stacks.csv", 'w') as fh:
fh.write("variable,period,month,rcp,path\n")
for i, record in dfg:
if i[3] == '45': # ignore rcp 4.5 for now
continue
print(i)
outpath = make_stack(*i, paths=list(record['path']))
line = ','.join([str(x) for x in i]) + "," + outpath + "\n"
fh.write(line)
fh.flush()
if __name__ == "__main__":
main()
|
[
"pandas.read_csv",
"subprocess.check_output",
"os.path.join",
"os.path.exists"
] |
[((117, 155), 'os.path.join', 'os.path.join', (['worldclim_dir', '"""stacked"""'], {}), "(worldclim_dir, 'stacked')\n", (129, 155), False, 'import os\n'), ((760, 798), 'pandas.read_csv', 'pandas.read_csv', (['"""../climate_data.csv"""'], {}), "('../climate_data.csv')\n", (775, 798), False, 'import pandas\n'), ((345, 368), 'os.path.exists', 'os.path.exists', (['outpath'], {}), '(outpath)\n', (359, 368), False, 'import os\n'), ((681, 700), 'subprocess.check_output', 'check_output', (['vargs'], {}), '(vargs)\n', (693, 700), False, 'from subprocess import check_output\n')]
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import run_params_pb2
import sys
from google.protobuf.text_format import MessageToString, Merge
import argparse
arg_parser = argparse.ArgumentParser("Create a definitions file for run_params")
arg_parser.add_argument('output_file', help='Path of output file')
arguments = arg_parser.parse_args()
run_def = run_params_pb2.params()
run_def.data_params.use_delta = True
run_def.data_params.normalization = run_params_pb2.DataParams.CMVN
run_def.data_params.num_frames_per_pt = 15
run_def.batch_size = 128
run_def.max_bad_epochs = 10
run_def.max_epochs = 40
# Write the new address book back to disk.
f = open(sys.argv[1], "wb")
f.write(MessageToString(run_def))
f.close()
|
[
"run_params_pb2.params",
"google.protobuf.text_format.MessageToString",
"argparse.ArgumentParser"
] |
[((204, 271), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Create a definitions file for run_params"""'], {}), "('Create a definitions file for run_params')\n", (227, 271), False, 'import argparse\n'), ((386, 409), 'run_params_pb2.params', 'run_params_pb2.params', ([], {}), '()\n', (407, 409), False, 'import run_params_pb2\n'), ((716, 740), 'google.protobuf.text_format.MessageToString', 'MessageToString', (['run_def'], {}), '(run_def)\n', (731, 740), False, 'from google.protobuf.text_format import MessageToString, Merge\n')]
|
import pytest
from django.test import Client
from pollaris.app.models import SearchLog
def post_to_search_log(payload):
return Client().post("/api/v1/search/log", payload, content_type="application/json")
@pytest.mark.django_db
def test_search_log_request_search_string():
payload = {
"address_entered": {"search_string": "Needham, MA"},
"heap_id": "1232435",
"status": "INCOMPLETE_ADDRESS",
"autocomplete_selected": True,
}
response = post_to_search_log(payload)
assert response.status_code == 204
search_logs = SearchLog.objects.all()
count = search_logs.count()
assert count == 1
log = search_logs.first()
assert not log.success
assert log.search_status == "INCOMPLETE_ADDRESS"
assert log.heap_id == "1232435"
assert log.autocomplete_selected == True
assert log.search_string == "Needham, MA"
assert not log.city
@pytest.mark.django_db
def test_search_log_request_full_address():
payload = {
"address_entered": {
"street_number": "3899",
"street": "Grand Ave",
"city": "Des Moines",
"state": "IA",
"zip5": "50312",
"zip9": "503122807",
},
"heap_id": "asdfasdf",
"status": "INCOMPLETE_ADDRESS",
"autocomplete_selected": False,
}
response = post_to_search_log(payload)
assert response.status_code == 204
search_logs = SearchLog.objects.all()
count = search_logs.count()
assert count == 1
log = search_logs.first()
assert not log.success
assert log.search_status == "INCOMPLETE_ADDRESS"
assert log.heap_id == "asdfasdf"
assert log.autocomplete_selected == False
assert log.street_number == "3899"
assert log.street == "Grand Ave"
assert log.city == "Des Moines"
assert log.state_code == "IA"
assert log.zip5 == "50312"
assert log.zip9 == "503122807"
|
[
"pollaris.app.models.SearchLog.objects.all",
"django.test.Client"
] |
[((574, 597), 'pollaris.app.models.SearchLog.objects.all', 'SearchLog.objects.all', ([], {}), '()\n', (595, 597), False, 'from pollaris.app.models import SearchLog\n'), ((1450, 1473), 'pollaris.app.models.SearchLog.objects.all', 'SearchLog.objects.all', ([], {}), '()\n', (1471, 1473), False, 'from pollaris.app.models import SearchLog\n'), ((134, 142), 'django.test.Client', 'Client', ([], {}), '()\n', (140, 142), False, 'from django.test import Client\n')]
|
"""Main plotting module."""
import math
import locale
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def init_params(german_labels=True, font_size=20, font_family='Carlito',
pdf_padding=0.1, pdf_bbox='tight', pdf_fonttype=42,
deact_warnings=True):
"""Initialize RC parameters for matplotlib plots."""
if german_labels:
locale.setlocale(locale.LC_TIME, 'de_DE.UTF-8')
mpl.rcParams['font.size'] = font_size
mpl.rcParams['font.family'] = font_family
mpl.rcParams['savefig.pad_inches'] = pdf_padding
mpl.rcParams['savefig.bbox'] = pdf_bbox
plt.rcParams['pdf.fonttype'] = pdf_fonttype
mpl.rcParams['hatch.linewidth'] = 2
if deact_warnings:
mpl.rcParams.update({'figure.max_open_warning': 0})
def znes_colors(n=None):
"""Return dict with ZNES colors.
Examples
--------
>>> znes_colors().keys() # doctest: +ELLIPSIS
dict_keys(['darkblue', 'red', 'lightblue', 'orange', 'grey',...
Original author: @ckaldemeyer
"""
colors = {
'darkblue': '#00395B',
'red': '#B54036',
'lightblue': '#74ADC0',
'orange': '#EC6707',
'grey': '#BFBFBF',
'dimgrey': 'dimgrey',
'lightgrey': 'lightgrey',
'slategrey': 'slategrey',
'darkgrey': '#A9A9A9'
}
# allow for a dict of n colors
if n is not None:
if n > len(colors):
raise IndexError('Number of requested colors is too big.')
else:
return {k: colors[k] for k in list(colors)[:n]}
else:
return colors
def znes_colors_hatched(n, diff_colors=4):
"""Return list of dicts with ZNES colors with hatches."""
colors = list(znes_colors().values())
hatches = ['//', '\\\\', '////', '\\\\\\\\']
return_list = list()
for i in range(n):
if i < diff_colors:
return_list += [{'color': colors[i % diff_colors],
'edgecolor': 'w'}]
else:
return_list += [{'color': colors[i % diff_colors],
'hatch': hatches[((math.floor(i/diff_colors) - 1)
% 4)],
'edgecolor': 'w'}]
return return_list
def get_colors(nr_cols, **kwargs):
"""Get color parameters list of dictionaries."""
color_params = list()
if 'colors' in kwargs:
for color in kwargs['colors']:
color_params += [{'color': color}]
elif 'hatches' in kwargs:
if 'diff_colors' in kwargs:
color_params = znes_colors_hatched(
nr_cols, diff_colors=kwargs['diff_colors'])
else:
color_params = znes_colors_hatched(nr_cols)
else:
colors = list(znes_colors(nr_cols).values())
for color in colors:
color_params += [{'color': color}]
return color_params
def create_multipage_pdf(file_name='plots.pdf', figs=None, dpi=300,
mute=False):
"""Save all open matplotlib figures into a multipage pdf-file.
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>>
>>> df1 = pd.DataFrame(np.random.randn(24, 2))
>>> ax1 = df1.plot(kind='line')
>>>
>>> df2 = pd.DataFrame(np.random.randn(24, 2))
>>> ax2 = df2.plot(kind='scatter', x=0, y=1)
>>>
>>> # mute is set to true to surpress writing a pdf file
>>> create_multipage_pdf(file_name='plots.pdf', dpi=300, mute=True)
False
Original author: @ckaldemeyer
"""
if mute is True:
# set return flag to false if no output is written
flag = False
else:
pp = PdfPages(file_name)
if figs is None:
figs = [plt.figure(n) for n in plt.get_fignums()]
for fig in figs:
fig.savefig(pp, format='pdf')
pp.close()
# close all existing figures
for fig in figs:
plt.close(fig)
# set return flag
flag = True
return flag
def monthlyBar(data, figsize=[12, 5.5], legend_loc='best', legend=True,
return_objs=False, **kwargs):
"""Create bar chart of sum of monthly unit commitment."""
monSum = data.resample('M').sum()/1e3
monSum.rename(index=lambda x: x.strftime('%b'), inplace=True)
nr_cols = len(monSum.columns)
if 'color_params' in kwargs:
color_params = kwargs['color_params']
else:
color_params = get_colors(nr_cols, **kwargs)
fig, ax = plt.subplots(figsize=figsize)
pos_bottom = 0
neg_bottom = 0
for col, color_param in zip(monSum.columns, color_params):
mean_val = monSum[col].mean()
if mean_val >= 0:
ax.bar(monSum.index, monSum[col],
bottom=pos_bottom, **color_param)
pos_bottom += monSum[col]
elif mean_val < 0:
ax.bar(monSum.index, monSum[col],
bottom=neg_bottom, **color_param)
neg_bottom += monSum[col]
if 'demand' in kwargs:
monDemand = kwargs['demand'].resample('M').sum()/1e3
monDemand.rename(index=lambda x: x.strftime('%b'), inplace=True)
ax.bar(monSum.index, monDemand,
width=0.25, color=znes_colors()['lightgrey'], alpha=0.75,
linewidth=0)
ax.grid(linestyle='--', which='major', axis='y')
if 'ylabel' in kwargs:
ax.set_ylabel(kwargs['ylabel'])
else:
ax.set_ylabel('Gesamtwärmemenge in GWh')
if 'xlabel' in kwargs:
ax.set_xlabel(kwargs['xlabel'])
if 'title' in kwargs:
ax.set_title(kwargs['title'])
if 'suptitle' in kwargs:
fig.suptitle(kwargs['suptitle'])
if legend:
if 'labels' in kwargs:
labels = kwargs['labels']
else:
labels = monSum.columns.to_list()
if legend_loc[:7] == 'outside':
if legend_loc[8:] == 'right':
ax.legend(labels=labels, loc='upper right',
bbox_to_anchor=(1.27, 1),
ncol=1)
elif legend_loc[8:] == 'bottom':
ax.legend(labels=labels, loc='lower left',
bbox_to_anchor=(0, -0.265),
ncol=nr_cols)
else:
ax.legend(labels=labels, loc=legend_loc)
if return_objs:
return fig, ax
def load_curve(data, figsize=[8, 5], linewidth=2.5, legend_loc='best', return_objs=False,
**kwargs):
"""Plot the sorted (annual) load curves of units."""
data = data.apply(lambda x: x.sort_values(ascending=False).values)
data.reset_index(drop=True, inplace=True)
nr_cols = len(data.columns)
color_params = get_colors(nr_cols, **kwargs)
fig, ax = plt.subplots(figsize=figsize)
for col, color_param in zip(data.columns, color_params):
ax.plot(data[col], linewidth=linewidth, **color_param)
ax.grid(linestyle='--')
if 'ylabel' in kwargs:
ax.set_ylabel(kwargs['ylabel'])
else:
ax.set_ylabel(r'Wärmestrom $\dot{Q}$ in MW')
if 'xlabel' in kwargs:
ax.set_xlabel(kwargs['xlabel'])
else:
ax.set_xlabel('Stunden')
if 'title' in kwargs:
ax.set_title(kwargs['title'])
if 'suptitle' in kwargs:
fig.suptitle(kwargs['suptitle'])
if 'labels' in kwargs:
labels = kwargs['labels']
else:
labels = data.columns.to_list()
if legend_loc[:7] == 'outside':
if legend_loc[8:] == 'right':
ax.legend(labels=labels, loc='upper right',
bbox_to_anchor=(1.33, 1),
ncol=1)
elif legend_loc[8:] == 'bottom':
anchor = (0, -0.35)
if nr_cols > 4:
nr_cols = round(nr_cols/2)
anchor = (0, -0.45)
ax.legend(labels=labels, loc='lower left',
bbox_to_anchor=anchor,
ncol=nr_cols)
else:
ax.legend(labels=labels, loc=legend_loc)
if return_objs:
return fig, ax
|
[
"matplotlib.backends.backend_pdf.PdfPages",
"matplotlib.pyplot.close",
"matplotlib.rcParams.update",
"math.floor",
"matplotlib.pyplot.figure",
"locale.setlocale",
"matplotlib.pyplot.get_fignums",
"matplotlib.pyplot.subplots"
] |
[((4561, 4590), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (4573, 4590), True, 'import matplotlib.pyplot as plt\n'), ((6819, 6848), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (6831, 6848), True, 'import matplotlib.pyplot as plt\n'), ((433, 480), 'locale.setlocale', 'locale.setlocale', (['locale.LC_TIME', '"""de_DE.UTF-8"""'], {}), "(locale.LC_TIME, 'de_DE.UTF-8')\n", (449, 480), False, 'import locale\n'), ((786, 837), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (["{'figure.max_open_warning': 0}"], {}), "({'figure.max_open_warning': 0})\n", (805, 837), True, 'import matplotlib as mpl\n'), ((3732, 3751), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (['file_name'], {}), '(file_name)\n', (3740, 3751), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((4000, 4014), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (4009, 4014), True, 'import matplotlib.pyplot as plt\n'), ((3797, 3810), 'matplotlib.pyplot.figure', 'plt.figure', (['n'], {}), '(n)\n', (3807, 3810), True, 'import matplotlib.pyplot as plt\n'), ((3820, 3837), 'matplotlib.pyplot.get_fignums', 'plt.get_fignums', ([], {}), '()\n', (3835, 3837), True, 'import matplotlib.pyplot as plt\n'), ((2160, 2187), 'math.floor', 'math.floor', (['(i / diff_colors)'], {}), '(i / diff_colors)\n', (2170, 2187), False, 'import math\n')]
|
#!/usr/bin/env python
# coding: utf-8
# ### Module Import
# In[4]:
from for_CL7_use import NbaPlayer
nba = NbaPlayer()
nba.add_player('<NAME>', 'Los Angeles Lakers', 36)
nba.add_player('<NAME>', '<NAME>', 31)
nba.add_player('<NAME>', 'Phoenix Suns', 36)
nba.add_player('<NAME>', '<NAME>', 32)
nba.add_player('<NAME>', 'Golden State Warriors', 33)
nba.add_player('<NAME>', 'New Orleans Pelicans', 20)
nba.compare_age('age')
|
[
"for_CL7_use.NbaPlayer"
] |
[((112, 123), 'for_CL7_use.NbaPlayer', 'NbaPlayer', ([], {}), '()\n', (121, 123), False, 'from for_CL7_use import NbaPlayer\n')]
|
import pandas as pd
import os
import numpy as np
def read_file(path):
f = open(path,'r')
txt = f.read()
lines = txt.split('\n')
lines = lines[2:10]
data = [x.split(':') for x in lines]
out = dict()
out['shortname'] = path.split('/')[-2]
for d in data:
out[d[0]] = [d[1]]
return out
roots = ['1/1A0','1/1B0','1/1C0','1/1D0','1/1E0','1/1F0','1/1G0','1/1H0','1/1I0','1/1J0','1/1K0','1/1L0','1/1M0','1/1N0','1/1O0','1/1P0','1/1Q0','1/1R0','1/1S0','1/1T0','1/1U0','1/1V0','1/1W0','1/1X0','1/1Y0','1/1Z0']
path = os.path.join('RAW_Data','Synthetic',roots[0],'Description.txt')
data = read_file(path)
df = pd.DataFrame(data)
for root in roots[1:]:
path = os.path.join('RAW_Data','Synthetic',root,'Description.txt')
data = read_file(path)
df = df.append(pd.DataFrame(data))
print('generating spreadsheet')
df.to_excel('lit_appliances.xlsx')
|
[
"pandas.DataFrame",
"os.path.join"
] |
[((523, 589), 'os.path.join', 'os.path.join', (['"""RAW_Data"""', '"""Synthetic"""', 'roots[0]', '"""Description.txt"""'], {}), "('RAW_Data', 'Synthetic', roots[0], 'Description.txt')\n", (535, 589), False, 'import os\n'), ((615, 633), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (627, 633), True, 'import pandas as pd\n'), ((666, 728), 'os.path.join', 'os.path.join', (['"""RAW_Data"""', '"""Synthetic"""', 'root', '"""Description.txt"""'], {}), "('RAW_Data', 'Synthetic', root, 'Description.txt')\n", (678, 728), False, 'import os\n'), ((766, 784), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (778, 784), True, 'import pandas as pd\n')]
|
"""extend entity key length
Revision ID: 84683329b0a5
Revises: <KEY>
Create Date: 2019-02-20 07:00:21.047707
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '84683329b0a5'
down_revision = '<KEY>'
def upgrade():
op.drop_column('document_record', 'sheet')
op.drop_column('match', 'document_id')
op.alter_column('match', 'entity_id',
existing_type=sa.VARCHAR(length=64),
type_=sa.String(length=128),
existing_nullable=False)
op.alter_column('match', 'match_id',
existing_type=sa.VARCHAR(length=64),
type_=sa.String(length=128),
existing_nullable=False)
op.alter_column('entity', 'id',
existing_type=sa.VARCHAR(length=32),
type_=sa.String(length=128),
existing_nullable=False)
def downgrade():
pass
|
[
"sqlalchemy.String",
"alembic.op.drop_column",
"sqlalchemy.VARCHAR"
] |
[((275, 317), 'alembic.op.drop_column', 'op.drop_column', (['"""document_record"""', '"""sheet"""'], {}), "('document_record', 'sheet')\n", (289, 317), False, 'from alembic import op\n'), ((322, 360), 'alembic.op.drop_column', 'op.drop_column', (['"""match"""', '"""document_id"""'], {}), "('match', 'document_id')\n", (336, 360), False, 'from alembic import op\n'), ((437, 458), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(64)'}), '(length=64)\n', (447, 458), True, 'import sqlalchemy as sa\n'), ((486, 507), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (495, 507), True, 'import sqlalchemy as sa\n'), ((629, 650), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(64)'}), '(length=64)\n', (639, 650), True, 'import sqlalchemy as sa\n'), ((678, 699), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (687, 699), True, 'import sqlalchemy as sa\n'), ((816, 837), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(32)'}), '(length=32)\n', (826, 837), True, 'import sqlalchemy as sa\n'), ((865, 886), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (874, 886), True, 'import sqlalchemy as sa\n')]
|
# practice_challenge/study_part1.py
## Starting From Scratch
import os
import sqlite3
DB_FILEPATH = os.path.join(os.path.dirname(__file__),
"..",
"practice_challenge",
"study_part1.sqlite3")
conn = sqlite3.connect(DB_FILEPATH)
cur = conn.cursor()
create_table = """
CREATE TABLE IF NOT EXISTS
STUDENTS (
STUDENT TEXT NOT NULL,
STUDIED BOOLEAN,
GRADE INTEGER NOT NULL,
AGE INTEGER NOT NULL,
SEX TEXT NOT NULL
);
"""
cur.execute(create_table)
# insert_data = """
# INSERT INTO STUDENTS
# (STUDENT, STUDIED, GRADE, AGE, SEX)
# VALUES
# ('Lion-O', 'True', 85, 24, 'Male'),
# ('Cheetara', 'True', 95, 22, 'Female'),
# ('Mumm-Ra', 'False', 65, 153, 'Male'),
# ('Snarf', 'False', 70, 15, 'Male'),
# ('Panthro', 'True', 80, 30, 'Male');
# """
# cur.execute(insert_data)
conn.commit()
avg_age = """
SELECT avg(AGE)
FROM STUDENTS;
"""
avg_age_result = cur.execute(avg_age).fetchall()
print("Average Age:", avg_age_result[0][0])
select_female = """
SELECT STUDENT
FROM STUDENTS
WHERE SEX = 'Female'
"""
females = cur.execute(select_female).fetchall()
print("Females:", females[0][0])
alphabetical = """
SELECT *
FROM STUDENTS
ORDER BY STUDENT
"""
alphabetical_results = cur.execute(alphabetical).fetchall()
print('')
for i in range(0, 5):
print(alphabetical_results[i][0], alphabetical_results[i][1],
alphabetical_results[i][2], alphabetical_results[i][3],
alphabetical_results[i][4])
|
[
"os.path.dirname",
"sqlite3.connect"
] |
[((332, 360), 'sqlite3.connect', 'sqlite3.connect', (['DB_FILEPATH'], {}), '(DB_FILEPATH)\n', (347, 360), False, 'import sqlite3\n'), ((116, 141), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (131, 141), False, 'import os\n')]
|
import numpy as np
import bokeh.plotting as bp
bp.output_file("bokeh1.html")
x = np.linspace(0, 2 * np.pi, 1024)
y = np.cos(x)
fig = bp.figure( )
fig.line(x, y)
bp.show(fig)
|
[
"bokeh.plotting.figure",
"bokeh.plotting.output_file",
"bokeh.plotting.show",
"numpy.cos",
"numpy.linspace"
] |
[((47, 76), 'bokeh.plotting.output_file', 'bp.output_file', (['"""bokeh1.html"""'], {}), "('bokeh1.html')\n", (61, 76), True, 'import bokeh.plotting as bp\n'), ((81, 112), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1024)'], {}), '(0, 2 * np.pi, 1024)\n', (92, 112), True, 'import numpy as np\n'), ((117, 126), 'numpy.cos', 'np.cos', (['x'], {}), '(x)\n', (123, 126), True, 'import numpy as np\n'), ((133, 144), 'bokeh.plotting.figure', 'bp.figure', ([], {}), '()\n', (142, 144), True, 'import bokeh.plotting as bp\n'), ((161, 173), 'bokeh.plotting.show', 'bp.show', (['fig'], {}), '(fig)\n', (168, 173), True, 'import bokeh.plotting as bp\n')]
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# ==============================================================================
from flask import jsonify
from flask import g
from common import get_custom_command_profile
from common import get_custom_command_profile_list
from common import create_or_update_custom_command_profile
from common import delete_custom_command_profile
from database import DBSession
from api_constants import RESPONSE_STATUS
from api_constants import RESPONSE_STATUS_MESSAGE
from api_constants import RESPONSE_ENVELOPE
from api_constants import APIStatus
from api_constants import HTTP_OK
from api_constants import HTTP_MULTI_STATUS_ERROR
from api_utils import validate_url_parameters
from api_utils import convert_json_request_to_list
from api_utils import validate_required_keys_in_dict
from api_utils import validate_acceptable_keys_in_dict
from api_utils import convert_value_to_list
from utils import is_empty
from utils import get_acceptable_string
# Acceptable JSON keys
KEY_PROFILE_NAME = 'profile_name'
KEY_COMMAND_LIST = 'command_list'
KEY_CREATED_BY = 'created_by'
def api_create_custom_command_profiles(request):
"""
POST:
http://localhost:5000/api/v1/custom_command_profiles
BODY:
[{
"profile_name": "Profile_1",
"command_list": ["show inventory"]
},{
"profile_name": "Profile_2",
"command_list": ["show platform"]
}]
"""
rows = []
db_session = DBSession()
error_found = False
json_list = convert_json_request_to_list(request)
for data in json_list:
row = dict()
try:
validate_required_keys_in_dict(data, [KEY_PROFILE_NAME])
profile_name = get_acceptable_string(data[KEY_PROFILE_NAME])
row[KEY_PROFILE_NAME] = profile_name
if profile_name is None or len(profile_name) == 0:
raise ValueError("Invalid custom command profile name '{}'.".format(data[KEY_PROFILE_NAME]))
validate_acceptable_keys_in_dict(data, [KEY_PROFILE_NAME, KEY_COMMAND_LIST])
command_list = convert_value_to_list(data, KEY_COMMAND_LIST)
command_list = None if command_list is None else ','.join(command_list)
custom_command_profile = get_custom_command_profile(db_session, profile_name)
if custom_command_profile is not None and command_list is None:
command_list = custom_command_profile.command_list
create_or_update_custom_command_profile(db_session=db_session,
profile_name=profile_name,
command_list=command_list,
created_by=g.api_user.username,
custom_command_profile=custom_command_profile)
row[RESPONSE_STATUS] = APIStatus.SUCCESS
except Exception as e:
row[RESPONSE_STATUS] = APIStatus.FAILED
row[RESPONSE_STATUS_MESSAGE] = e.message
error_found = True
rows.append(row)
return jsonify(**{RESPONSE_ENVELOPE: {'custom_command_profile_list': rows}}), \
(HTTP_OK if not error_found else HTTP_MULTI_STATUS_ERROR)
def api_get_custom_command_profiles(request):
"""
GET:
http://localhost:5000/api/v1/custom_command_profiles
http://localhost:5000/api/v1/custom_command_profiles?profile_name=Profile_1
"""
validate_url_parameters(request, [KEY_PROFILE_NAME])
rows = []
db_session = DBSession
profile_name = request.args.get(KEY_PROFILE_NAME)
if profile_name:
ccp = get_custom_command_profile(db_session, profile_name)
if ccp is None:
raise ValueError("Custom command profile '{}' does not exist in the database.".format(profile_name))
ccps = [ccp]
else:
ccps = get_custom_command_profile_list(db_session)
for ccp in ccps:
if ccp is not None:
row = dict()
row[KEY_PROFILE_NAME] = ccp.profile_name
row[KEY_COMMAND_LIST] = [] if is_empty(ccp.command_list) else ccp.command_list.split(',')
row[KEY_CREATED_BY] = ccp.created_by
rows.append(row)
return jsonify(**{RESPONSE_ENVELOPE: {'custom_command_profile_list': rows}})
def api_delete_custom_command_profile(profile_name):
db_session = DBSession()
delete_custom_command_profile(db_session, profile_name)
return jsonify(**{RESPONSE_ENVELOPE: {KEY_PROFILE_NAME: profile_name, RESPONSE_STATUS: APIStatus.SUCCESS}})
|
[
"common.get_custom_command_profile_list",
"database.DBSession",
"utils.get_acceptable_string",
"api_utils.convert_value_to_list",
"utils.is_empty",
"common.delete_custom_command_profile",
"api_utils.validate_url_parameters",
"common.create_or_update_custom_command_profile",
"api_utils.convert_json_request_to_list",
"flask.jsonify",
"api_utils.validate_required_keys_in_dict",
"api_utils.validate_acceptable_keys_in_dict",
"common.get_custom_command_profile"
] |
[((2822, 2833), 'database.DBSession', 'DBSession', ([], {}), '()\n', (2831, 2833), False, 'from database import DBSession\n'), ((2875, 2912), 'api_utils.convert_json_request_to_list', 'convert_json_request_to_list', (['request'], {}), '(request)\n', (2903, 2912), False, 'from api_utils import convert_json_request_to_list\n'), ((4861, 4913), 'api_utils.validate_url_parameters', 'validate_url_parameters', (['request', '[KEY_PROFILE_NAME]'], {}), '(request, [KEY_PROFILE_NAME])\n', (4884, 4913), False, 'from api_utils import validate_url_parameters\n'), ((5647, 5716), 'flask.jsonify', 'jsonify', ([], {}), "(**{RESPONSE_ENVELOPE: {'custom_command_profile_list': rows}})\n", (5654, 5716), False, 'from flask import jsonify\n'), ((5789, 5800), 'database.DBSession', 'DBSession', ([], {}), '()\n', (5798, 5800), False, 'from database import DBSession\n'), ((5806, 5861), 'common.delete_custom_command_profile', 'delete_custom_command_profile', (['db_session', 'profile_name'], {}), '(db_session, profile_name)\n', (5835, 5861), False, 'from common import delete_custom_command_profile\n'), ((5874, 5978), 'flask.jsonify', 'jsonify', ([], {}), '(**{RESPONSE_ENVELOPE: {KEY_PROFILE_NAME: profile_name,\n RESPONSE_STATUS: APIStatus.SUCCESS}})\n', (5881, 5978), False, 'from flask import jsonify\n'), ((4498, 4567), 'flask.jsonify', 'jsonify', ([], {}), "(**{RESPONSE_ENVELOPE: {'custom_command_profile_list': rows}})\n", (4505, 4567), False, 'from flask import jsonify\n'), ((5046, 5098), 'common.get_custom_command_profile', 'get_custom_command_profile', (['db_session', 'profile_name'], {}), '(db_session, profile_name)\n', (5072, 5098), False, 'from common import get_custom_command_profile\n'), ((5283, 5326), 'common.get_custom_command_profile_list', 'get_custom_command_profile_list', (['db_session'], {}), '(db_session)\n', (5314, 5326), False, 'from common import get_custom_command_profile_list\n'), ((2987, 3043), 'api_utils.validate_required_keys_in_dict', 'validate_required_keys_in_dict', (['data', '[KEY_PROFILE_NAME]'], {}), '(data, [KEY_PROFILE_NAME])\n', (3017, 3043), False, 'from api_utils import validate_required_keys_in_dict\n'), ((3072, 3117), 'utils.get_acceptable_string', 'get_acceptable_string', (['data[KEY_PROFILE_NAME]'], {}), '(data[KEY_PROFILE_NAME])\n', (3093, 3117), False, 'from utils import get_acceptable_string\n'), ((3353, 3429), 'api_utils.validate_acceptable_keys_in_dict', 'validate_acceptable_keys_in_dict', (['data', '[KEY_PROFILE_NAME, KEY_COMMAND_LIST]'], {}), '(data, [KEY_PROFILE_NAME, KEY_COMMAND_LIST])\n', (3385, 3429), False, 'from api_utils import validate_acceptable_keys_in_dict\n'), ((3458, 3503), 'api_utils.convert_value_to_list', 'convert_value_to_list', (['data', 'KEY_COMMAND_LIST'], {}), '(data, KEY_COMMAND_LIST)\n', (3479, 3503), False, 'from api_utils import convert_value_to_list\n'), ((3626, 3678), 'common.get_custom_command_profile', 'get_custom_command_profile', (['db_session', 'profile_name'], {}), '(db_session, profile_name)\n', (3652, 3678), False, 'from common import get_custom_command_profile\n'), ((3835, 4040), 'common.create_or_update_custom_command_profile', 'create_or_update_custom_command_profile', ([], {'db_session': 'db_session', 'profile_name': 'profile_name', 'command_list': 'command_list', 'created_by': 'g.api_user.username', 'custom_command_profile': 'custom_command_profile'}), '(db_session=db_session, profile_name\n =profile_name, command_list=command_list, created_by=g.api_user.\n username, custom_command_profile=custom_command_profile)\n', (3874, 4040), False, 'from common import create_or_update_custom_command_profile\n'), ((5497, 5523), 'utils.is_empty', 'is_empty', (['ccp.command_list'], {}), '(ccp.command_list)\n', (5505, 5523), False, 'from utils import is_empty\n')]
|
import copy
from typing import Iterator
import torch
from scores import ContentScore, StyleScore
class StyleTransfer:
@staticmethod
def inject_scoring(base, content, style, content_layers, style_layers):
model = torch.nn.Sequential()
c_i = 0
for name, layer in copy.deepcopy(base).named_children():
if hasattr(layer, "inplace"):
layer.inplace = False
model.add_module(name, layer)
if isinstance(layer, torch.nn.Conv2d):
c_i += 1
else:
continue
if c_i in content_layers:
target = model(content).detach()
model.add_module(f"content_score_{c_i}", ContentScore(target))
if c_i in style_layers:
target = model(style).detach()
model.add_module(f"style_score_{c_i}", StyleScore(target))
def is_score(layer):
return isinstance(layer, (ContentScore, StyleScore))
last_i = max(i for i, l in enumerate(model.children()) if is_score(l))
model = model[:(last_i + 1)]
return model
def __call__(self,
base: torch.nn.Module,
content: torch.Tensor,
style: torch.Tensor,
content_layers: set[int],
style_layers: set[int],
style_weight=1e6) -> Iterator[torch.Tensor]:
x = content.clone()
model = self.inject_scoring(base, content, style, content_layers, style_layers)
optimizer = torch.optim.Adam([x.requires_grad_()], lr=0.03)
content_layers = [l for l in model.children() if isinstance(l, ContentScore)]
style_layers = [l for l in model.children() if isinstance(l, StyleScore)]
while True:
model(x)
content_loss = torch.stack([l.score for l in content_layers]).mean()
style_loss = torch.stack([l.score for l in style_layers]).mean()
loss = style_loss * style_weight + content_loss
optimizer.zero_grad()
loss.backward()
optimizer.step()
with torch.no_grad():
x.clip_(0, 1)
yield x
|
[
"copy.deepcopy",
"torch.stack",
"torch.nn.Sequential",
"scores.ContentScore",
"torch.no_grad",
"scores.StyleScore"
] |
[((233, 254), 'torch.nn.Sequential', 'torch.nn.Sequential', ([], {}), '()\n', (252, 254), False, 'import torch\n'), ((299, 318), 'copy.deepcopy', 'copy.deepcopy', (['base'], {}), '(base)\n', (312, 318), False, 'import copy\n'), ((2149, 2164), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2162, 2164), False, 'import torch\n'), ((724, 744), 'scores.ContentScore', 'ContentScore', (['target'], {}), '(target)\n', (736, 744), False, 'from scores import ContentScore, StyleScore\n'), ((885, 903), 'scores.StyleScore', 'StyleScore', (['target'], {}), '(target)\n', (895, 903), False, 'from scores import ContentScore, StyleScore\n'), ((1850, 1896), 'torch.stack', 'torch.stack', (['[l.score for l in content_layers]'], {}), '([l.score for l in content_layers])\n', (1861, 1896), False, 'import torch\n'), ((1929, 1973), 'torch.stack', 'torch.stack', (['[l.score for l in style_layers]'], {}), '([l.score for l in style_layers])\n', (1940, 1973), False, 'import torch\n')]
|
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from django.test.client import Client
from geoevents.core.tests import R3TestCaseMixin
from geoevents.notes.models import Note
@override_settings(AUTHENTICATION_BACKENDS=('django.contrib.auth.backends.ModelBackend',))
class SimpleTest(R3TestCaseMixin, TestCase):
fixtures = ['maps.json']
def setUp(self):
super(SimpleTest, self).setUp()
self.note = Note.objects.create(title='Test note',
content='This is a test note.',
public=True,
owner=self.admin_user,
content_object=self.admin_user,
)
def test_notes_view_notes(self):
'''Test if the notes page renders.'''
c = Client()
response = c.get(reverse('notes-view-notes'))
self.failUnlessEqual(response.status_code, 200)
def test_note_view_note(self):
'''Test if an individual note view renders.'''
c = Client()
response = c.get(reverse('notes-view-note', args=[self.note.id]))
self.failUnlessEqual(response.status_code, 200)
def test_notes_manage_note(self):
'''Test if the add new note view works.'''
c = Client()
c.login(username='admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note'))
self.failUnlessEqual(response.status_code, 200)
def test_perms_notes_manage_note(self):
'''Test if the add new note view returns 302.'''
c = Client()
c.login(username='non_admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note'))
self.failUnlessEqual(response.status_code, 302)
def test_notes_manage_note_id(self):
'''Test if the add new note view works.'''
c = Client()
c.login(username='admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note-id', args=[self.note.id]))
self.failUnlessEqual(response.status_code, 200)
def test_perms_notes_manage_note_id(self):
'''Test if the add new note view returns 302.'''
c = Client()
c.login(username='non_admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note-id', args=[self.note.id]))
self.failUnlessEqual(response.status_code, 302)
def test_notes_delete_note_id(self):
'''Test if the add new note view works.'''
c = Client()
c.login(username='admin', password='<PASSWORD>')
response = c.get(reverse('notes-delete-note-id', args=[self.note.id]))
self.failUnlessEqual(response.status_code, 200)
def test_perms_notes_delete_note_id(self):
'''Test if the delete note view returns 302.'''
c = Client()
c.login(username='non_admin', password='<PASSWORD>')
response = c.get(reverse('notes-delete-note-id', args=[self.note.id]))
self.failUnlessEqual(response.status_code, 302)
def test_perms_notes_manage_model_id(self):
'''Test if the manage view returns a 302 when model and object_id are provided.'''
c = Client()
c.login(username='non_admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note-model-id', kwargs={'model': 'user', 'pk': 1}))
self.failUnlessEqual(response.status_code, 302)
c.logout()
c.login(username='admin', password='<PASSWORD>')
response = c.get(reverse('notes-manage-note-model-id', kwargs={'model': 'user', 'pk': 1}))
self.failUnlessEqual(response.status_code, 200)
def test_note_create_view(self):
'''Tests the notes create view.'''
c = Client()
c.login(username='admin', password='<PASSWORD>')
table_name = 'user'
user_table = ContentType.objects.get(model=table_name).id
object_id = 1
data = {'title': 'Testing',
'content': 'This is a test',
'public': True,
'content_type': user_table,
'object_id': object_id,
}
response = c.post(reverse('notes-manage-note'), data=data, follow=True)
self.failUnlessEqual(response.status_code, 200)
n = Note.objects.get(title='Testing')
self.assertEqual(n.content_type.id, user_table)
self.assertEqual(n.object_id, object_id)
n.delete()
for key in ['content_type', 'object_id']:
data.pop(key)
response = c.post(reverse('notes-manage-note-model-id', kwargs={'model': table_name, 'pk': object_id}),
data=data, follow=True)
self.failUnlessEqual(response.status_code, 200)
n = Note.objects.get(title='Testing')
self.assertEqual(n.content_type.id, user_table)
self.assertEqual(n.object_id, object_id)
def tearDown(self):
self.admin_user.delete()
self.non_admin_user.delete()
self.note.delete()
|
[
"django.core.urlresolvers.reverse",
"django.contrib.contenttypes.models.ContentType.objects.get",
"geoevents.notes.models.Note.objects.create",
"django.test.client.Client",
"django.test.utils.override_settings",
"geoevents.notes.models.Note.objects.get"
] |
[((566, 660), 'django.test.utils.override_settings', 'override_settings', ([], {'AUTHENTICATION_BACKENDS': "('django.contrib.auth.backends.ModelBackend',)"}), "(AUTHENTICATION_BACKENDS=(\n 'django.contrib.auth.backends.ModelBackend',))\n", (583, 660), False, 'from django.test.utils import override_settings\n'), ((813, 955), 'geoevents.notes.models.Note.objects.create', 'Note.objects.create', ([], {'title': '"""Test note"""', 'content': '"""This is a test note."""', 'public': '(True)', 'owner': 'self.admin_user', 'content_object': 'self.admin_user'}), "(title='Test note', content='This is a test note.',\n public=True, owner=self.admin_user, content_object=self.admin_user)\n", (832, 955), False, 'from geoevents.notes.models import Note\n'), ((1218, 1226), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1224, 1226), False, 'from django.test.client import Client\n'), ((1440, 1448), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1446, 1448), False, 'from django.test.client import Client\n'), ((1681, 1689), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1687, 1689), False, 'from django.test.client import Client\n'), ((1972, 1980), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1978, 1980), False, 'from django.test.client import Client\n'), ((2258, 2266), 'django.test.client.Client', 'Client', ([], {}), '()\n', (2264, 2266), False, 'from django.test.client import Client\n'), ((2576, 2584), 'django.test.client.Client', 'Client', ([], {}), '()\n', (2582, 2584), False, 'from django.test.client import Client\n'), ((2886, 2894), 'django.test.client.Client', 'Client', ([], {}), '()\n', (2892, 2894), False, 'from django.test.client import Client\n'), ((3203, 3211), 'django.test.client.Client', 'Client', ([], {}), '()\n', (3209, 3211), False, 'from django.test.client import Client\n'), ((3560, 3568), 'django.test.client.Client', 'Client', ([], {}), '()\n', (3566, 3568), False, 'from django.test.client import Client\n'), ((4110, 4118), 'django.test.client.Client', 'Client', ([], {}), '()\n', (4116, 4118), False, 'from django.test.client import Client\n'), ((4648, 4681), 'geoevents.notes.models.Note.objects.get', 'Note.objects.get', ([], {'title': '"""Testing"""'}), "(title='Testing')\n", (4664, 4681), False, 'from geoevents.notes.models import Note\n'), ((5115, 5148), 'geoevents.notes.models.Note.objects.get', 'Note.objects.get', ([], {'title': '"""Testing"""'}), "(title='Testing')\n", (5131, 5148), False, 'from geoevents.notes.models import Note\n'), ((1252, 1279), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-view-notes"""'], {}), "('notes-view-notes')\n", (1259, 1279), False, 'from django.core.urlresolvers import reverse\n'), ((1474, 1521), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-view-note"""'], {'args': '[self.note.id]'}), "('notes-view-note', args=[self.note.id])\n", (1481, 1521), False, 'from django.core.urlresolvers import reverse\n'), ((1772, 1800), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note"""'], {}), "('notes-manage-note')\n", (1779, 1800), False, 'from django.core.urlresolvers import reverse\n'), ((2067, 2095), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note"""'], {}), "('notes-manage-note')\n", (2074, 2095), False, 'from django.core.urlresolvers import reverse\n'), ((2349, 2401), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note-id"""'], {'args': '[self.note.id]'}), "('notes-manage-note-id', args=[self.note.id])\n", (2356, 2401), False, 'from django.core.urlresolvers import reverse\n'), ((2671, 2723), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note-id"""'], {'args': '[self.note.id]'}), "('notes-manage-note-id', args=[self.note.id])\n", (2678, 2723), False, 'from django.core.urlresolvers import reverse\n'), ((2977, 3029), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-delete-note-id"""'], {'args': '[self.note.id]'}), "('notes-delete-note-id', args=[self.note.id])\n", (2984, 3029), False, 'from django.core.urlresolvers import reverse\n'), ((3298, 3350), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-delete-note-id"""'], {'args': '[self.note.id]'}), "('notes-delete-note-id', args=[self.note.id])\n", (3305, 3350), False, 'from django.core.urlresolvers import reverse\n'), ((3655, 3727), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note-model-id"""'], {'kwargs': "{'model': 'user', 'pk': 1}"}), "('notes-manage-note-model-id', kwargs={'model': 'user', 'pk': 1})\n", (3662, 3727), False, 'from django.core.urlresolvers import reverse\n'), ((3887, 3959), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note-model-id"""'], {'kwargs': "{'model': 'user', 'pk': 1}"}), "('notes-manage-note-model-id', kwargs={'model': 'user', 'pk': 1})\n", (3894, 3959), False, 'from django.core.urlresolvers import reverse\n'), ((4225, 4266), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'model': 'table_name'}), '(model=table_name)\n', (4248, 4266), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((4525, 4553), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note"""'], {}), "('notes-manage-note')\n", (4532, 4553), False, 'from django.core.urlresolvers import reverse\n'), ((4911, 4999), 'django.core.urlresolvers.reverse', 'reverse', (['"""notes-manage-note-model-id"""'], {'kwargs': "{'model': table_name, 'pk': object_id}"}), "('notes-manage-note-model-id', kwargs={'model': table_name, 'pk':\n object_id})\n", (4918, 4999), False, 'from django.core.urlresolvers import reverse\n')]
|
"""The library of functions used by py-subscribers."""
from requests import get
def get_repos(api, user):
"""Get list of all repos for a given user.
Parameters
----------
api : str
The API's fully-qualified domain name.
user : str
The user of interest.
The parameters help to form the API endpoint.
Returns
-------
repos : list
The repos for the user of interest.
"""
repos = []
endpoint = "{}/users/{}/repos".format(api, user)
response = get(endpoint)
response_body = response.json()
for element in response_body:
repo = element["name"]
repos.append(repo)
return repos
def get_subscribers_for_one_repo(api, user, repo):
"""Get list of all subscribers for a given repo.
Parameters
----------
api : str
The API's fully-qualified domain name.
user : str
The user of interest.
repos : str
The repo of interest.
The parameters help to form the API endpoint.
Returns
-------
subscribers : dict
The subscribers for the repo of interest.
"""
subscribers = {}
endpoint = "{}/repos/{}/{}/subscribers".format(api, user, repo)
response = get(endpoint)
response_body = response.json()
subscribers[repo] = response_body
return subscribers
def get_subscribers_for_all_repos(api, user, repos):
"""Get list of all subscribers for a given list of repos.
Parameters
----------
api : str
The API's fully-qualified domain name.
user : str
The user of interest.
repos : list
The repos of interest.
The parameters help to form the API endpoint.
Returns
-------
subscribers : dict
The subscribers for the repos of interest.
"""
subscribers = {}
for repo in repos:
endpoint = "{}/repos/{}/{}/subscribers".format(api, user, repo)
response = get(endpoint)
response_body = response.json()
subscribers[repo] = response_body
return subscribers
def write_results(user=None, repo=None, subscribers=None):
"""Write results to disk.
Parameters
---------
user : str
The user of interest.
repo : str, optional
The repo of interest.
subscribers : str in JSON format
The subscribers associated with the user and repo.
The parameters help to form the filename.
Returns
-------
N/A
"""
if (user is not None) and (repo is not None):
filename = "subscribers-{0}-{1}.json".format(user, repo)
if (user is not None) and (repo is None):
filename = "subscribers-{0}.json".format(user)
with open(filename, "w") as output:
output.write(subscribers)
|
[
"requests.get"
] |
[((525, 538), 'requests.get', 'get', (['endpoint'], {}), '(endpoint)\n', (528, 538), False, 'from requests import get\n'), ((1240, 1253), 'requests.get', 'get', (['endpoint'], {}), '(endpoint)\n', (1243, 1253), False, 'from requests import get\n'), ((1951, 1964), 'requests.get', 'get', (['endpoint'], {}), '(endpoint)\n', (1954, 1964), False, 'from requests import get\n')]
|
import solver
print(solver.solve(10001))
|
[
"solver.solve"
] |
[((21, 40), 'solver.solve', 'solver.solve', (['(10001)'], {}), '(10001)\n', (33, 40), False, 'import solver\n')]
|
import pdb
import torch
import numpy as np
import time
from tools.utils import Progbar,AverageMeter
from matplotlib import pyplot as plt
from scipy.integrate import simps
def predict_set(nets, dataloader, runtime_params):
run_type = runtime_params['run_type']
#net = net.eval()
progbar = Progbar(len(dataloader.dataset), stateful_metrics=['run-type'])
batch_time = AverageMeter()
names = []
pred_landmarks = np.array([])
gt_landmarks = np.array([])
with torch.no_grad():
for i, (landmarks, imgs, img_paths) in enumerate(dataloader):
s_time = time.time()
imgs = imgs.cuda()
names.extend(img_paths)
net = nets[0]
if 'half' in runtime_params.values():
output = net(imgs.half())
else:
output = net(imgs)
output = output.cpu().numpy()
pred_landmarks = np.concatenate((pred_landmarks,output),axis=0)
gt_landmarks = np.concatenate((gt_landmarks,landmarks.data.numpy()),axis=0)
progbar.add(imgs.size(0), values=[('run-type', run_type)]) # ,('batch_time', batch_time.val)])
batch_time.update(time.time() - s_time)
if runtime_params['debug'] and i:
break
pred_landmarks = pred_landmarks.reshape((-1,28,2))
gt_landmarks = gt_landmarks.reshape((-1,28,2))
assert gt_landmarks.shape == pred_landmarks.shape
return gt_landmarks, gt_landmarks, names
def dist(gtLandmark, dist_type='centers', left_pt=0, right_pt=8, num_eye_pts=8):
if dist_type=='centers':
normDist = np.linalg.norm(np.mean(gtLandmark[left_pt:left_pt+num_eye_pts], axis=0) -
np.mean(gtLandmark[right_pt:right_pt+num_eye_pts], axis=0))
elif dist_type=='corners':
normDist = np.linalg.norm(gtLandmark[left_pt] - gtLandmark[right_pt+num_eye_pts/2])
elif dist_type=='diagonal':
height, width = np.max(gtLandmark, axis=0) - np.min(gtLandmark, axis=0)
normDist = np.sqrt(width**2 + height**2)
return normDist
def landmark_error(gtLandmarks, predict_Landmarks, dist_type='centers', show_results=False, verbose=False):
norm_errors = []
errors = []
for i in range(len(gtLandmarks)):
norm_dist = dist(gtLandmarks[i], dist_type=dist_type)
error = np.mean(np.sqrt(np.sum((gtLandmarks[i] - predict_Landmarks[i])**2, axis=1)))
norm_error = error/norm_dist
errors.append(error)
norm_errors.append(norm_error)
if verbose:
print('{0}: {1}'.format(i, error))
if verbose:
print("Image idxs sorted by error")
print(np.argsort(errors))
avg_error = np.mean(errors)
avg_norm_error = np.mean(norm_errors)
print("Average error: {0}".format(avg_error))
print("Average norm error: {0}".format(avg_norm_error))
return norm_errors, errors
def auc_error(errors, failure_threshold=0.03, step=0.0001, save_path='', showCurve=True):
nErrors = len(errors)
xAxis = list(np.arange(0., failure_threshold+step, step))
ced = [float(np.count_nonzero([errors <= x])) / nErrors for x in xAxis]
auc = simps(ced, x=xAxis) / failure_threshold
failure_rate = 1. - ced[-1]
print("AUC @ {0}: {1}".format(failure_threshold, auc))
print("Failure rate: {0}".format(failure_rate))
if showCurve:
plt.plot(xAxis, ced)
plt.savefig(save_path)
return auc, failure_rate
def evaluate(gt_landmarks, landmarks,th,save_path):
gt_landmarks = gt_landmarks.permute((1,0,2)).cpu().numpy()
landmarks = landmarks.permute((1, 0, 2)).cpu().numpy()
norm_errors, errors = landmark_error(gt_landmarks,landmarks)
auc, failure_rate = auc_error(errors,th,save_path=save_path)
return {'auc':auc,'failure_rate':failure_rate,"errors":errors}
|
[
"matplotlib.pyplot.savefig",
"numpy.sum",
"numpy.concatenate",
"matplotlib.pyplot.plot",
"numpy.count_nonzero",
"time.time",
"numpy.argsort",
"numpy.max",
"numpy.mean",
"numpy.array",
"numpy.arange",
"numpy.linalg.norm",
"numpy.min",
"torch.no_grad",
"scipy.integrate.simps",
"tools.utils.AverageMeter",
"numpy.sqrt"
] |
[((383, 397), 'tools.utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (395, 397), False, 'from tools.utils import Progbar, AverageMeter\n'), ((434, 446), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (442, 446), True, 'import numpy as np\n'), ((466, 478), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (474, 478), True, 'import numpy as np\n'), ((2713, 2728), 'numpy.mean', 'np.mean', (['errors'], {}), '(errors)\n', (2720, 2728), True, 'import numpy as np\n'), ((2750, 2770), 'numpy.mean', 'np.mean', (['norm_errors'], {}), '(norm_errors)\n', (2757, 2770), True, 'import numpy as np\n'), ((488, 503), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (501, 503), False, 'import torch\n'), ((3046, 3092), 'numpy.arange', 'np.arange', (['(0.0)', '(failure_threshold + step)', 'step'], {}), '(0.0, failure_threshold + step, step)\n', (3055, 3092), True, 'import numpy as np\n'), ((3177, 3196), 'scipy.integrate.simps', 'simps', (['ced'], {'x': 'xAxis'}), '(ced, x=xAxis)\n', (3182, 3196), False, 'from scipy.integrate import simps\n'), ((3387, 3407), 'matplotlib.pyplot.plot', 'plt.plot', (['xAxis', 'ced'], {}), '(xAxis, ced)\n', (3395, 3407), True, 'from matplotlib import pyplot as plt\n'), ((3416, 3438), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save_path'], {}), '(save_path)\n', (3427, 3438), True, 'from matplotlib import pyplot as plt\n'), ((596, 607), 'time.time', 'time.time', ([], {}), '()\n', (605, 607), False, 'import time\n'), ((919, 967), 'numpy.concatenate', 'np.concatenate', (['(pred_landmarks, output)'], {'axis': '(0)'}), '((pred_landmarks, output), axis=0)\n', (933, 967), True, 'import numpy as np\n'), ((1837, 1913), 'numpy.linalg.norm', 'np.linalg.norm', (['(gtLandmark[left_pt] - gtLandmark[right_pt + num_eye_pts / 2])'], {}), '(gtLandmark[left_pt] - gtLandmark[right_pt + num_eye_pts / 2])\n', (1851, 1913), True, 'import numpy as np\n'), ((2677, 2695), 'numpy.argsort', 'np.argsort', (['errors'], {}), '(errors)\n', (2687, 2695), True, 'import numpy as np\n'), ((1634, 1692), 'numpy.mean', 'np.mean', (['gtLandmark[left_pt:left_pt + num_eye_pts]'], {'axis': '(0)'}), '(gtLandmark[left_pt:left_pt + num_eye_pts], axis=0)\n', (1641, 1692), True, 'import numpy as np\n'), ((1727, 1787), 'numpy.mean', 'np.mean', (['gtLandmark[right_pt:right_pt + num_eye_pts]'], {'axis': '(0)'}), '(gtLandmark[right_pt:right_pt + num_eye_pts], axis=0)\n', (1734, 1787), True, 'import numpy as np\n'), ((2041, 2074), 'numpy.sqrt', 'np.sqrt', (['(width ** 2 + height ** 2)'], {}), '(width ** 2 + height ** 2)\n', (2048, 2074), True, 'import numpy as np\n'), ((2369, 2429), 'numpy.sum', 'np.sum', (['((gtLandmarks[i] - predict_Landmarks[i]) ** 2)'], {'axis': '(1)'}), '((gtLandmarks[i] - predict_Landmarks[i]) ** 2, axis=1)\n', (2375, 2429), True, 'import numpy as np\n'), ((3108, 3139), 'numpy.count_nonzero', 'np.count_nonzero', (['[errors <= x]'], {}), '([errors <= x])\n', (3124, 3139), True, 'import numpy as np\n'), ((1192, 1203), 'time.time', 'time.time', ([], {}), '()\n', (1201, 1203), False, 'import time\n'), ((1966, 1992), 'numpy.max', 'np.max', (['gtLandmark'], {'axis': '(0)'}), '(gtLandmark, axis=0)\n', (1972, 1992), True, 'import numpy as np\n'), ((1995, 2021), 'numpy.min', 'np.min', (['gtLandmark'], {'axis': '(0)'}), '(gtLandmark, axis=0)\n', (2001, 2021), True, 'import numpy as np\n')]
|
from celery import shared_task
from celery_progress.backend import ProgressRecorder
from time import sleep
@shared_task(bind=True)
def go_to_sleep(self, duration):
progress_recorder = ProgressRecorder(self)
for i in range(100):
sleep(duration)
progress_recorder.set_progress(i + 1, 100, f'On iteration {i}')
return 'Done'
|
[
"celery_progress.backend.ProgressRecorder",
"celery.shared_task",
"time.sleep"
] |
[((110, 132), 'celery.shared_task', 'shared_task', ([], {'bind': '(True)'}), '(bind=True)\n', (121, 132), False, 'from celery import shared_task\n'), ((190, 212), 'celery_progress.backend.ProgressRecorder', 'ProgressRecorder', (['self'], {}), '(self)\n', (206, 212), False, 'from celery_progress.backend import ProgressRecorder\n'), ((246, 261), 'time.sleep', 'sleep', (['duration'], {}), '(duration)\n', (251, 261), False, 'from time import sleep\n')]
|
# -- encoding: UTF-8 --
from datetime import time
import babel.dates as dates
import pytest
@pytest.mark.parametrize("locale, time, expected_period_id", [
("de", time(7, 42), "morning1"), # (from, before)
("de", time(3, 11), "night1"), # (after, before)
("fi", time(0), "midnight"), # (at)
("en_US", time(12), "noon"), # (at)
("agq", time(10), "am"), # no periods defined
("agq", time(22), "pm"), # no periods defined
("am", time(14), "afternoon1"), # (before, after)
])
def test_day_period_rules(locale, time, expected_period_id):
assert dates.get_period_id(time, locale=locale) == expected_period_id
|
[
"datetime.time",
"babel.dates.get_period_id"
] |
[((581, 621), 'babel.dates.get_period_id', 'dates.get_period_id', (['time'], {'locale': 'locale'}), '(time, locale=locale)\n', (600, 621), True, 'import babel.dates as dates\n'), ((169, 180), 'datetime.time', 'time', (['(7)', '(42)'], {}), '(7, 42)\n', (173, 180), False, 'from datetime import time\n'), ((224, 235), 'datetime.time', 'time', (['(3)', '(11)'], {}), '(3, 11)\n', (228, 235), False, 'from datetime import time\n'), ((278, 285), 'datetime.time', 'time', (['(0)'], {}), '(0)\n', (282, 285), False, 'from datetime import time\n'), ((322, 330), 'datetime.time', 'time', (['(12)'], {}), '(12)\n', (326, 330), False, 'from datetime import time\n'), ((361, 369), 'datetime.time', 'time', (['(10)'], {}), '(10)\n', (365, 369), False, 'from datetime import time\n'), ((412, 420), 'datetime.time', 'time', (['(22)'], {}), '(22)\n', (416, 420), False, 'from datetime import time\n'), ((462, 470), 'datetime.time', 'time', (['(14)'], {}), '(14)\n', (466, 470), False, 'from datetime import time\n')]
|
from django.urls import path
from reserva.core.views.name_application import check, done, pay, send, success
app_name = "name_application"
urlpatterns = [
path("check", check, name="check"),
path("send", send, name="send"),
path("pay", pay, name="pay"),
path("done", done, name="done"),
path("success", success, name="success"),
]
|
[
"django.urls.path"
] |
[((162, 196), 'django.urls.path', 'path', (['"""check"""', 'check'], {'name': '"""check"""'}), "('check', check, name='check')\n", (166, 196), False, 'from django.urls import path\n'), ((202, 233), 'django.urls.path', 'path', (['"""send"""', 'send'], {'name': '"""send"""'}), "('send', send, name='send')\n", (206, 233), False, 'from django.urls import path\n'), ((239, 267), 'django.urls.path', 'path', (['"""pay"""', 'pay'], {'name': '"""pay"""'}), "('pay', pay, name='pay')\n", (243, 267), False, 'from django.urls import path\n'), ((273, 304), 'django.urls.path', 'path', (['"""done"""', 'done'], {'name': '"""done"""'}), "('done', done, name='done')\n", (277, 304), False, 'from django.urls import path\n'), ((310, 350), 'django.urls.path', 'path', (['"""success"""', 'success'], {'name': '"""success"""'}), "('success', success, name='success')\n", (314, 350), False, 'from django.urls import path\n')]
|
import time
import cv2
import sys
from rasp import camera_detect
from rasp import client_socket
if __name__ == "__main__":
image_detect = camera_detect.run(sys.argv[1])
index = 1
try:
while True:
img = next(image_detect)
print('{}> image detected'.format(index), flush=True)
cv2.imshow('{}> Detecting Image'.format(index), img)
cv2.waitKey(0)
cv2.destroyAllWindows()
print('{}> send img to server....'.format(index), flush=True)
label = client_socket.run('127.0.0.1', 9000, img)
print('{}> received label...'.format(index), flush=True)
print('{}> label = {}'.format(index, label))
print('-------------------------------', flush=True)
time.sleep(1)
index = index + 1
except StopIteration:
pass
finally:
del image_detect
|
[
"rasp.client_socket.run",
"cv2.waitKey",
"time.sleep",
"rasp.camera_detect.run",
"cv2.destroyAllWindows"
] |
[((144, 174), 'rasp.camera_detect.run', 'camera_detect.run', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (161, 174), False, 'from rasp import camera_detect\n'), ((399, 413), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (410, 413), False, 'import cv2\n'), ((426, 449), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (447, 449), False, 'import cv2\n'), ((545, 586), 'rasp.client_socket.run', 'client_socket.run', (['"""127.0.0.1"""', '(9000)', 'img'], {}), "('127.0.0.1', 9000, img)\n", (562, 586), False, 'from rasp import client_socket\n'), ((790, 803), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (800, 803), False, 'import time\n')]
|
# Copyright 2016-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
from __future__ import print_function
from awsglue.context import GlueContext
from hive_metastore_migration import *
CONNECTION_TYPE_NAME = 'com.amazonaws.services.glue.connections.DataCatalogConnection'
def transform_catalog_to_df(dyf):
return dyf.toDF()
def datacatalog_migrate_to_s3(databases, tables, partitions, output_path):
# load
databases.write.format('json').mode('overwrite').save(output_path + 'databases')
tables.write.format('json').mode('overwrite').save(output_path + 'tables')
partitions.write.format('json').mode('overwrite').save(output_path + 'partitions')
# apply hard-coded schema on dataframes, ensure schema is consistent for transformations
def change_schemas(sql_context, databases, tables, partitions):
databases = sql_context.read.json(databases.toJSON(), schema=DATACATALOG_DATABASE_SCHEMA)
tables = sql_context.read.json(tables.toJSON(), schema=DATACATALOG_TABLE_SCHEMA)
partitions = sql_context.read.json(partitions.toJSON(), schema=DATACATALOG_PARTITION_SCHEMA)
return (databases, tables, partitions)
def datacatalog_migrate_to_hive_metastore(sc, sql_context, databases, tables, partitions, connection):
hive_metastore = HiveMetastore(connection, sql_context)
transform_databases_tables_partitions(sc, sql_context, hive_metastore, databases, tables, partitions)
hive_metastore.export_to_metastore()
def read_databases_from_catalog(sql_context, glue_context, datacatalog_name, database_arr, region):
databases = None
tables = None
partitions = None
for database in database_arr:
dyf = glue_context.create_dynamic_frame.from_options(
connection_type=CONNECTION_TYPE_NAME,
connection_options={'catalog.name': datacatalog_name,
'catalog.database': database,
'catalog.region': region})
df = transform_catalog_to_df(dyf)
# filter into databases, tables, and partitions
dc_databases_no_schema = df.where('type = "database"')
dc_tables_no_schema = df.where('type = "table"')
dc_partitions_no_schema = df.where('type = "partition"')
# apply schema to dataframes
(dc_databases, dc_tables, dc_partitions) = \
change_schemas(sql_context, dc_databases_no_schema, dc_tables_no_schema, dc_partitions_no_schema)
(a_databases, a_tables, a_partitions) = \
transform_items_to_item(dc_databases=dc_databases, dc_tables=dc_tables, dc_partitions=dc_partitions)
databases = databases.union(a_databases) if databases else a_databases
tables = tables.union(a_tables) if tables else a_tables
partitions = partitions.union(a_partitions) if partitions else a_partitions
return (databases, tables, partitions)
def main():
to_s3 = 'to-s3'
to_jdbc = 'to-jdbc'
parser = argparse.ArgumentParser(prog=sys.argv[0])
parser.add_argument('-m', '--mode', required=True, choices=[to_s3, to_jdbc], help='Choose to migrate from datacatalog to s3 or to metastore')
parser.add_argument('--database-names', required=True, help='Semicolon-separated list of names of database in Datacatalog to export')
parser.add_argument('-o', '--output-path', required=False, help='Output path, either local directory or S3 path')
parser.add_argument('-c', '--connection-name', required=False, help='Glue Connection name for Hive metastore JDBC connection')
parser.add_argument('-R', '--region', required=False, help='AWS region of source Glue DataCatalog, default to "us-east-1"')
options = get_options(parser, sys.argv)
if options['mode'] == to_s3:
validate_options_in_mode(
options=options, mode=to_s3,
required_options=['output_path'],
not_allowed_options=['connection_name']
)
elif options['mode'] == to_jdbc:
validate_options_in_mode(
options=options, mode=to_jdbc,
required_options=['connection_name'],
not_allowed_options=['output_path']
)
else:
raise AssertionError('unknown mode ' + options['mode'])
validate_aws_regions(options['region'])
# spark env
(conf, sc, sql_context) = get_spark_env()
glue_context = GlueContext(sc)
# extract from datacatalog reader
database_arr = options['database_names'].split(';')
(databases, tables, partitions) = read_databases_from_catalog(
sql_context=sql_context,
glue_context=glue_context,
datacatalog_name='datacatalog',
database_arr=database_arr,
region=options.get('region') or 'us-east-1'
)
if options['mode'] == to_s3:
output_path = get_output_dir(options['output_path'])
datacatalog_migrate_to_s3(
databases=databases,
tables=tables,
partitions=partitions,
output_path=output_path
)
elif options['mode'] == to_jdbc:
connection_name = options['connection_name']
datacatalog_migrate_to_hive_metastore(
sc=sc,
sql_context=sql_context,
databases=databases,
tables=tables,
partitions=partitions,
connection=glue_context.extract_jdbc_conf(connection_name)
)
if __name__ == '__main__':
main()
|
[
"awsglue.context.GlueContext"
] |
[((4386, 4401), 'awsglue.context.GlueContext', 'GlueContext', (['sc'], {}), '(sc)\n', (4397, 4401), False, 'from awsglue.context import GlueContext\n')]
|
import csv
from players.models import Joueur
def run():
file = open('new_data.csv')
reader = csv.reader(file)
Joueur.objects.all().delete()
for row in reader:
print(row)
player, is_created = Joueur.objects.get_or_create(nom=row[1], prenom=row[2], adresse=row[3], code_postal=row[4],
intra_extra=row[5], age=row[6], naissance=row[7], licence=row[8], genre=row[9], taille=row[10], prix=row[11], categorie=row[12])
|
[
"players.models.Joueur.objects.all",
"csv.reader",
"players.models.Joueur.objects.get_or_create"
] |
[((102, 118), 'csv.reader', 'csv.reader', (['file'], {}), '(file)\n', (112, 118), False, 'import csv\n'), ((227, 460), 'players.models.Joueur.objects.get_or_create', 'Joueur.objects.get_or_create', ([], {'nom': 'row[1]', 'prenom': 'row[2]', 'adresse': 'row[3]', 'code_postal': 'row[4]', 'intra_extra': 'row[5]', 'age': 'row[6]', 'naissance': 'row[7]', 'licence': 'row[8]', 'genre': 'row[9]', 'taille': 'row[10]', 'prix': 'row[11]', 'categorie': 'row[12]'}), '(nom=row[1], prenom=row[2], adresse=row[3],\n code_postal=row[4], intra_extra=row[5], age=row[6], naissance=row[7],\n licence=row[8], genre=row[9], taille=row[10], prix=row[11], categorie=\n row[12])\n', (255, 460), False, 'from players.models import Joueur\n'), ((124, 144), 'players.models.Joueur.objects.all', 'Joueur.objects.all', ([], {}), '()\n', (142, 144), False, 'from players.models import Joueur\n')]
|
from django.urls import path
from django.views.generic import TemplateView
from . import views
app_name = 'todo'
urlpatterns = [
path('', views.getHome, name='home'),
path('about/', views.AboutView.as_view(), name='about'),
path('task/', views.PriorityTaskView.as_view(), name='task'),
path('add/', views.TaskAddView.as_view(), name='add'),
path('edit/<int:pk>/', views.TaskEditView.as_view(), name='edit'),
path('delete/<int:pk>/', views.TaskDeleteView.as_view(), name='delete'),
path('complete/<int:pk>/', views.completeTask, name='complete'),
path('signup/', views.CreateUserView.as_view(), name='signup'),
path('task-list/', views.TaskListView.as_view(), name='task-list'),
path('api/task/priority', views.PriorityGraphView.as_view(), name='api-priority-data'),
path('analytics/priority', TemplateView.as_view(template_name='analytics/task_priority.html')),
]
|
[
"django.views.generic.TemplateView.as_view",
"django.urls.path"
] |
[((134, 170), 'django.urls.path', 'path', (['""""""', 'views.getHome'], {'name': '"""home"""'}), "('', views.getHome, name='home')\n", (138, 170), False, 'from django.urls import path\n'), ((510, 573), 'django.urls.path', 'path', (['"""complete/<int:pk>/"""', 'views.completeTask'], {'name': '"""complete"""'}), "('complete/<int:pk>/', views.completeTask, name='complete')\n", (514, 573), False, 'from django.urls import path\n'), ((838, 904), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""analytics/task_priority.html"""'}), "(template_name='analytics/task_priority.html')\n", (858, 904), False, 'from django.views.generic import TemplateView\n')]
|
import pytest
import crystalball
@pytest.mark.parametrize('json_text', [
'{"foo": "bar"}', # Normal JSON
'[{"foo": "bar"}]', # Object within a list
'{"baz": {"foo": "bar"},}', # Object within another
])
def test_finds_json_keys(json_text):
"""
Values can be extracted from JSON-like documents
"""
cb = crystalball.parse(json_text)
assert cb.first('foo') == 'bar'
|
[
"pytest.mark.parametrize",
"crystalball.parse"
] |
[((37, 145), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""json_text"""', '[\'{"foo": "bar"}\', \'[{"foo": "bar"}]\', \'{"baz": {"foo": "bar"},}\']'], {}), '(\'json_text\', [\'{"foo": "bar"}\', \'[{"foo": "bar"}]\',\n \'{"baz": {"foo": "bar"},}\'])\n', (60, 145), False, 'import pytest\n'), ((336, 364), 'crystalball.parse', 'crystalball.parse', (['json_text'], {}), '(json_text)\n', (353, 364), False, 'import crystalball\n')]
|
########################################
# MIT License
#
# Copyright (c) 2020 <NAME>
########################################
'''
Define the data_types.for the variables to use in the package.
'''
import ctypes
import numpy as np
__all__ = []
# Types for numpy.ndarray objects
cpu_float = np.float64 # double
cpu_complex = np.complex128 # complex double
cpu_int = np.int32 # int
# unsigned integer (char does not seem to be allowed in CUDA), and int16 is too small (must not be equal to cpu_int)
cpu_bool = np.uint32
cpu_real_bool = np.bool # bool (not allowed in PyOpenCL)
def array_float(*args, **kwargs):
return np.array(*args, dtype=cpu_float, **kwargs)
def array_int(*args, **kwargs):
return np.array(*args, dtype=cpu_int, **kwargs)
def empty_float(*args, **kwargs):
return np.empty(*args, dtype=cpu_float, **kwargs)
def empty_int(*args, **kwargs):
return np.empty(*args, dtype=cpu_int, **kwargs)
def fromiter_float(i):
return np.fromiter(i, dtype=cpu_float)
def fromiter_int(i):
return np.fromiter(i, dtype=cpu_int)
def full_float(n, f):
return np.full(n, f, dtype=cpu_float)
def full_int(n, i):
return np.full(n, i, dtype=cpu_int)
# Expose ctypes objects
c_double = ctypes.c_double
c_double_p = ctypes.POINTER(c_double)
c_int = ctypes.c_int
c_int_p = ctypes.POINTER(c_int)
py_object = ctypes.py_object
# Functions to deal with ctypes and numpy value types
def as_c_double(*args):
'''
Transform arguments to a :mod:`ctypes` "double".
'''
if len(args) == 1:
return c_double(*args)
else:
return tuple(c_double(a) for a in args)
def as_double(*args):
'''
Transform arguments to a :mod:`numpy` "double".
'''
if len(args) == 1:
return cpu_float(*args)
else:
return tuple(cpu_float(a) for a in args)
def data_as_c_double(*args):
'''
Transform arguments to a :mod:`ctypes` "double*".
'''
if len(args) == 1:
return args[0].ctypes.data_as(c_double_p)
else:
return tuple(a.ctypes.data_as(c_double_p) for a in args)
def as_c_integer(*args):
'''
Transform arguments to a :mode:`ctypes` "int".
'''
if len(args) == 1:
return c_int(*args)
else:
return tuple(c_int(a) for a in args)
def as_integer(*args):
'''
Transform arguments to a :mod:`numpy` "integral" type.
'''
if len(args) == 1:
return cpu_int(*args)
else:
return tuple(cpu_int(a) for a in args)
def data_as_c_int(*args):
'''
Transform arguments to a :mod:`ctypes` "int*".
'''
if len(args) == 1:
return args[0].ctypes.data_as(c_int_p)
else:
return tuple(a.ctypes.data_as(c_int_p) for a in args)
def as_py_object(*args):
'''
Transform arguments to a :mod:`ctypes` "PyObject".
'''
if len(args) == 1:
return py_object(*args)
else:
return tuple(py_object(a) for a in args)
|
[
"numpy.full",
"numpy.empty",
"numpy.array",
"numpy.fromiter",
"ctypes.POINTER"
] |
[((1258, 1282), 'ctypes.POINTER', 'ctypes.POINTER', (['c_double'], {}), '(c_double)\n', (1272, 1282), False, 'import ctypes\n'), ((1314, 1335), 'ctypes.POINTER', 'ctypes.POINTER', (['c_int'], {}), '(c_int)\n', (1328, 1335), False, 'import ctypes\n'), ((627, 669), 'numpy.array', 'np.array', (['*args'], {'dtype': 'cpu_float'}), '(*args, dtype=cpu_float, **kwargs)\n', (635, 669), True, 'import numpy as np\n'), ((715, 755), 'numpy.array', 'np.array', (['*args'], {'dtype': 'cpu_int'}), '(*args, dtype=cpu_int, **kwargs)\n', (723, 755), True, 'import numpy as np\n'), ((803, 845), 'numpy.empty', 'np.empty', (['*args'], {'dtype': 'cpu_float'}), '(*args, dtype=cpu_float, **kwargs)\n', (811, 845), True, 'import numpy as np\n'), ((891, 931), 'numpy.empty', 'np.empty', (['*args'], {'dtype': 'cpu_int'}), '(*args, dtype=cpu_int, **kwargs)\n', (899, 931), True, 'import numpy as np\n'), ((968, 999), 'numpy.fromiter', 'np.fromiter', (['i'], {'dtype': 'cpu_float'}), '(i, dtype=cpu_float)\n', (979, 999), True, 'import numpy as np\n'), ((1034, 1063), 'numpy.fromiter', 'np.fromiter', (['i'], {'dtype': 'cpu_int'}), '(i, dtype=cpu_int)\n', (1045, 1063), True, 'import numpy as np\n'), ((1099, 1129), 'numpy.full', 'np.full', (['n', 'f'], {'dtype': 'cpu_float'}), '(n, f, dtype=cpu_float)\n', (1106, 1129), True, 'import numpy as np\n'), ((1163, 1191), 'numpy.full', 'np.full', (['n', 'i'], {'dtype': 'cpu_int'}), '(n, i, dtype=cpu_int)\n', (1170, 1191), True, 'import numpy as np\n')]
|
import pickle
from bs4 import BeautifulSoup
soup = BeautifulSoup(open('data/html/四角号码检字表(勘误版).htm', encoding='gb18030'))
section = soup.body.div
print(section['class'])
# get printable text, instead of using element for more quickly
raw_data = section.text
data_list = raw_data.split()
print(len(data_list))
clean_data_list = [i for i in data_list if i.startswith('*')]
print(len(clean_data_list))
print('-' * 20)
dict_data = {}
duplicate_coding_char = []
for item in clean_data_list:
try:
raw_code, chars = item.split(':')
assert raw_code.startswith('*')
assert len(chars)
assert ' ' not in chars
code = raw_code[1:]
char_list = [i for i in chars]
assert len(char_list)
for char in char_list:
if char in dict_data:
duplicate_coding_char.append(char)
dict_data[char] = code
print((code, char_list))
except:
print('*' * 20)
print(item)
raise
with open('data.pkl', 'wb') as fd:
pickle.dump(dict_data, fd)
print('{} has at least two different coding'.format(duplicate_coding_char))
|
[
"pickle.dump"
] |
[((1044, 1070), 'pickle.dump', 'pickle.dump', (['dict_data', 'fd'], {}), '(dict_data, fd)\n', (1055, 1070), False, 'import pickle\n')]
|
from setuptools import setup, find_packages
requires = ['requests==2.18.1']
setup(name='pywordlist',
version='0.0.2',
description='Random phrase generator. https://xkcd.com/936/',
url='http://github.com/f0rkz/pywordlist',
author='f0rkz',
author_email='<EMAIL>',
license='MIT',
packages=['pywordlist'],
install_requires=[requires],
zip_safe=True)
|
[
"setuptools.setup"
] |
[((78, 367), 'setuptools.setup', 'setup', ([], {'name': '"""pywordlist"""', 'version': '"""0.0.2"""', 'description': '"""Random phrase generator. https://xkcd.com/936/"""', 'url': '"""http://github.com/f0rkz/pywordlist"""', 'author': '"""f0rkz"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['pywordlist']", 'install_requires': '[requires]', 'zip_safe': '(True)'}), "(name='pywordlist', version='0.0.2', description=\n 'Random phrase generator. https://xkcd.com/936/', url=\n 'http://github.com/f0rkz/pywordlist', author='f0rkz', author_email=\n '<EMAIL>', license='MIT', packages=['pywordlist'], install_requires=[\n requires], zip_safe=True)\n", (83, 367), False, 'from setuptools import setup, find_packages\n')]
|
# Copyright (C) 2020 <NAME>.
# All rights reserved.
import asyncio
import glob
import os
import subprocess
import time
import requests
from bs4 import BeautifulSoup
from hachoir.metadata import extractMetadata
from hachoir.parser import createParser
from telethon import events
from telethon.errors import (
MessageEmptyError,
MessageNotModifiedError,
MessageTooLongError,
)
from telethon.errors.rpcerrorlist import YouBlockedUserError
from telethon.tl.types import DocumentAttributeVideo
from userbot import CMD_HELP, bot
from userbot.events import register
from userbot.utils import progress
# For song module
def get_readable_time(seconds: int) -> str:
count = 0
up_time = ""
time_list = []
time_suffix_list = ["s", "m", "h", "days"]
while count < 4:
count += 1
if count < 3:
remainder, result = divmod(seconds, 60)
else:
remainder, result = divmod(seconds, 24)
if seconds == 0 and remainder == 0:
break
time_list.append(int(result))
seconds = int(remainder)
for x in range(len(time_list)):
time_list[x] = str(time_list[x]) + time_suffix_list[x]
if len(time_list) == 4:
up_time += time_list.pop() + ", "
time_list.reverse()
up_time += ":".join(time_list)
return up_time
def getmusic(get, DEFAULT_AUDIO_QUALITY):
search = get
headers = {'User-Agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'}
html = requests.get('https://www.youtube.com/results?search_query='+search, headers=headers).text
soup = BeautifulSoup(html, 'html.parser')
for link in soup.find_all('a'):
if '/watch?v=' in link.get('href'):
# May change when Youtube Website may get updated in the future.
video_link = link.get('href')
break
video_link = 'http://www.youtube.com/'+video_link
command = ('youtube-dl --extract-audio --audio-format mp3 --audio-quality ' +DEFAULT_AUDIO_QUALITY + ' ' + video_link)
os.system(command)
# For getvideosong
def getmusicvideo(cat):
search = cat
headers = {
"User-Agent": "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
}
html = requests.get(
"https://www.youtube.com/results?search_query=" +
search,
headers=headers).text
soup = BeautifulSoup(html, "html.parser")
for link in soup.find_all("a"):
if "/watch?v=" in link.get("href"):
# May change when Youtube Website may get updated in the future.
video_link = link.get("href")
break
video_link = "http://www.youtube.com/" + video_link
command = 'youtube-dl -f "[filesize<20M]" ' + video_link
os.system(command)
@register(outgoing=True, pattern=r"^\.song (.*)")
async def _(event):
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
reply = await event.get_reply_message()
if event.pattern_match.group(1):
query = event.pattern_match.group(1)
await event.edit("`Wait..! I am finding your song..`")
elif reply.message:
query = reply.message
await event.edit("`Wait..! I am finding your song..`")
else:
await event.edit("`What I am Supposed to find?`")
return
getmusic(str(query),"320k")
l = glob.glob("*.mp3")
loa = l[0]
await event.edit("`Yeah.. Uploading your song..`")
c_time = time.time()
await event.client.send_file(
event.chat_id,
loa,
force_document=True,
allow_cache=False,
caption=query,
reply_to=reply_to_id,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "[UPLOAD]", loa)
),
)
await event.delete()
os.system("rm -rf *.mp3")
subprocess.check_output("rm -rf *.mp3",shell=True)
@register(outgoing=True, pattern=r"^\.vsong(?: |$)(.*)")
async def _(event):
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
reply = await event.get_reply_message()
if event.pattern_match.group(1):
query = event.pattern_match.group(1)
await event.edit("`Wait..! I am finding your videosong..`")
elif reply.message:
query = reply.message
await event.edit("`Wait..! I am finding your videosong..`")
else:
await event.edit("`What I am Supposed to find?`")
return
getmusicvideo(query)
l = glob.glob(("*.mp4")) + glob.glob(("*.mkv")) + glob.glob(("*.webm"))
if l:
await event.edit("`Yeah..! i found something..`")
else:
await event.edit(f"Sorry..! i can't find anything with `{query}`")
loa = l[0]
metadata = extractMetadata(createParser(loa))
duration = 0
width = 0
height = 0
if metadata.has("duration"):
duration = metadata.get("duration").seconds
if metadata.has("width"):
width = metadata.get("width")
if metadata.has("height"):
height = metadata.get("height")
await event.edit("`Uploading video.. Please wait..`")
c_time = time.time()
await event.client.send_file(
event.chat_id,
loa,
force_document=True,
allow_cache=False,
caption=query,
supports_streaming=True,
reply_to=reply_to_id,
attributes=[
DocumentAttributeVideo(
duration=duration,
w=width,
h=height,
round_message=False,
supports_streaming=True,
)
],
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "[UPLOAD]", loa)
),
)
await event.delete()
os.system("rm -rf *.mkv")
os.system("rm -rf *.mp4")
os.system("rm -rf *.webm")
@register(outgoing=True, pattern=r"^\.smd(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
link = event.pattern_match.group(1)
chat = "@SpotifyMusicDownloaderBot"
await event.edit("```Getting Your Music```")
async with bot.conversation(chat) as conv:
await asyncio.sleep(2)
await event.edit("`Downloading music taking some times, Stay Tuned.....`")
try:
response = conv.wait_event(
events.NewMessage(incoming=True, from_users=752979930)
)
await bot.send_message(chat, link)
respond = await response
await bot.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.reply(
"```Please unblock @SpotifyMusicDownloaderBot and try again```"
)
return
await event.delete()
await bot.forward_messages(event.chat_id, respond.message)
await bot.send_read_acknowledge(event.chat_id)
@register(outgoing=True, pattern=r"^\.net(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
song = event.pattern_match.group(1)
chat = "@WooMaiBot"
link = f"/netease {song}"
await event.edit("```Getting Your Music```")
async with bot.conversation(chat) as conv:
await asyncio.sleep(2)
await event.edit("`Downloading...Please wait`")
try:
msg = await conv.send_message(link)
response = await conv.get_response()
respond = await conv.get_response()
""" - don't spam notif - """
await bot.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.reply("```Please unblock @WooMaiBot and try again```")
return
await event.edit("`Sending Your Music...`")
await asyncio.sleep(3)
await bot.send_file(event.chat_id, respond)
await event.client.delete_messages(conv.chat_id,
[msg.id, response.id, respond.id])
await event.delete()
@register(outgoing=True, pattern="^\.sdd(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
d_link = event.pattern_match.group(1)
if ".com" not in d_link:
await event.edit("` I need a link to download something pro.`**(._.)**")
else:
await event.edit("**Initiating Download!**")
chat = "@MusicHuntersBot"
async with bot.conversation(chat) as conv:
try:
msg_start = await conv.send_message("/start")
response = await conv.get_response()
msg = await conv.send_message(d_link)
details = await conv.get_response()
song = await conv.get_response()
""" - don't spam notif - """
await bot.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.edit("**Error:** `unblock` @MusicHuntersBot `and retry!`")
return
await bot.send_file(event.chat_id, song, caption=details.text)
await event.client.delete_messages(
conv.chat_id, [msg_start.id, response.id, msg.id, details.id, song.id]
)
await event.delete()
CMD_HELP.update({
"getmusic":
">`.song` **Artist - Song Title**"
"\nUsage: Finding and uploading song."
"\n\n>`.vsong` **Artist - Song Title**"
"\nUsage: Finding and uploading videoclip."
"\n\n>`.net` **<Artist - Song Title>**"
"\nUsage: Download music with @WooMaiBot"
"\n\n>`.sdd` **<Spotify/Deezer Link>**"
"\nUsage: Download Spotify/Deezer Music with @MusicHuntersBot"
"\n\n>`.smd` **<Artist - Song Title>**"
"\nUsage: Download Spotify Music with @SpotifyMusicDownloaderBot"
"\n\n>`.deezload` **<spotify/deezer link> <Format>**"
"\nUsage: Download music from deezer."
"\n__Format=__ `FLAC`, `MP3_320`, `MP3_256`, `MP3_128`."
})
|
[
"glob.glob",
"userbot.utils.progress",
"telethon.events.NewMessage",
"telethon.tl.types.DocumentAttributeVideo",
"hachoir.parser.createParser",
"userbot.bot.send_read_acknowledge",
"requests.get",
"userbot.bot.conversation",
"asyncio.get_event_loop",
"asyncio.sleep",
"userbot.events.register",
"subprocess.check_output",
"os.system",
"userbot.bot.forward_messages",
"userbot.CMD_HELP.update",
"bs4.BeautifulSoup",
"userbot.bot.send_message",
"userbot.bot.send_file",
"time.time"
] |
[((2748, 2796), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^\\\\.song (.*)"""'}), "(outgoing=True, pattern='^\\\\.song (.*)')\n", (2756, 2796), False, 'from userbot.events import register\n'), ((3915, 3970), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^\\\\.vsong(?: |$)(.*)"""'}), "(outgoing=True, pattern='^\\\\.vsong(?: |$)(.*)')\n", (3923, 3970), False, 'from userbot.events import register\n'), ((5901, 5954), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^\\\\.smd(?: |$)(.*)"""'}), "(outgoing=True, pattern='^\\\\.smd(?: |$)(.*)')\n", (5909, 5954), False, 'from userbot.events import register\n'), ((6918, 6971), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^\\\\.net(?: |$)(.*)"""'}), "(outgoing=True, pattern='^\\\\.net(?: |$)(.*)')\n", (6926, 6971), False, 'from userbot.events import register\n'), ((8020, 8073), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^\\\\.sdd(?: |$)(.*)"""'}), "(outgoing=True, pattern='^\\\\.sdd(?: |$)(.*)')\n", (8028, 8073), False, 'from userbot.events import register\n'), ((9162, 9751), 'userbot.CMD_HELP.update', 'CMD_HELP.update', (['{\'getmusic\':\n """>`.song` **Artist - Song Title**\nUsage: Finding and uploading song.\n\n>`.vsong` **Artist - Song Title**\nUsage: Finding and uploading videoclip.\n\n>`.net` **<Artist - Song Title>**\nUsage: Download music with @WooMaiBot\n\n>`.sdd` **<Spotify/Deezer Link>**\nUsage: Download Spotify/Deezer Music with @MusicHuntersBot\n\n>`.smd` **<Artist - Song Title>**\nUsage: Download Spotify Music with @SpotifyMusicDownloaderBot\n\n>`.deezload` **<spotify/deezer link> <Format>**\nUsage: Download music from deezer.\n__Format=__ `FLAC`, `MP3_320`, `MP3_256`, `MP3_128`."""\n }'], {}), '({\'getmusic\':\n """>`.song` **Artist - Song Title**\nUsage: Finding and uploading song.\n\n>`.vsong` **Artist - Song Title**\nUsage: Finding and uploading videoclip.\n\n>`.net` **<Artist - Song Title>**\nUsage: Download music with @WooMaiBot\n\n>`.sdd` **<Spotify/Deezer Link>**\nUsage: Download Spotify/Deezer Music with @MusicHuntersBot\n\n>`.smd` **<Artist - Song Title>**\nUsage: Download Spotify Music with @SpotifyMusicDownloaderBot\n\n>`.deezload` **<spotify/deezer link> <Format>**\nUsage: Download music from deezer.\n__Format=__ `FLAC`, `MP3_320`, `MP3_256`, `MP3_128`."""\n })\n', (9177, 9751), False, 'from userbot import CMD_HELP, bot\n'), ((1602, 1636), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (1615, 1636), False, 'from bs4 import BeautifulSoup\n'), ((2013, 2031), 'os.system', 'os.system', (['command'], {}), '(command)\n', (2022, 2031), False, 'import os\n'), ((2353, 2387), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (2366, 2387), False, 'from bs4 import BeautifulSoup\n'), ((2726, 2744), 'os.system', 'os.system', (['command'], {}), '(command)\n', (2735, 2744), False, 'import os\n'), ((3356, 3374), 'glob.glob', 'glob.glob', (['"""*.mp3"""'], {}), "('*.mp3')\n", (3365, 3374), False, 'import glob\n'), ((3458, 3469), 'time.time', 'time.time', ([], {}), '()\n', (3467, 3469), False, 'import time\n'), ((3831, 3856), 'os.system', 'os.system', (['"""rm -rf *.mp3"""'], {}), "('rm -rf *.mp3')\n", (3840, 3856), False, 'import os\n'), ((3861, 3912), 'subprocess.check_output', 'subprocess.check_output', (['"""rm -rf *.mp3"""'], {'shell': '(True)'}), "('rm -rf *.mp3', shell=True)\n", (3884, 3912), False, 'import subprocess\n'), ((5159, 5170), 'time.time', 'time.time', ([], {}), '()\n', (5168, 5170), False, 'import time\n'), ((5811, 5836), 'os.system', 'os.system', (['"""rm -rf *.mkv"""'], {}), "('rm -rf *.mkv')\n", (5820, 5836), False, 'import os\n'), ((5841, 5866), 'os.system', 'os.system', (['"""rm -rf *.mp4"""'], {}), "('rm -rf *.mp4')\n", (5850, 5866), False, 'import os\n'), ((5871, 5897), 'os.system', 'os.system', (['"""rm -rf *.webm"""'], {}), "('rm -rf *.webm')\n", (5880, 5897), False, 'import os\n'), ((1502, 1593), 'requests.get', 'requests.get', (["('https://www.youtube.com/results?search_query=' + search)"], {'headers': 'headers'}), "('https://www.youtube.com/results?search_query=' + search,\n headers=headers)\n", (1514, 1593), False, 'import requests\n'), ((2224, 2315), 'requests.get', 'requests.get', (["('https://www.youtube.com/results?search_query=' + search)"], {'headers': 'headers'}), "('https://www.youtube.com/results?search_query=' + search,\n headers=headers)\n", (2236, 2315), False, 'import requests\n'), ((4578, 4597), 'glob.glob', 'glob.glob', (['"""*.webm"""'], {}), "('*.webm')\n", (4587, 4597), False, 'import glob\n'), ((4799, 4816), 'hachoir.parser.createParser', 'createParser', (['loa'], {}), '(loa)\n', (4811, 4816), False, 'from hachoir.parser import createParser\n'), ((6157, 6179), 'userbot.bot.conversation', 'bot.conversation', (['chat'], {}), '(chat)\n', (6173, 6179), False, 'from userbot import CMD_HELP, bot\n'), ((7188, 7210), 'userbot.bot.conversation', 'bot.conversation', (['chat'], {}), '(chat)\n', (7204, 7210), False, 'from userbot import CMD_HELP, bot\n'), ((8391, 8413), 'userbot.bot.conversation', 'bot.conversation', (['chat'], {}), '(chat)\n', (8407, 8413), False, 'from userbot import CMD_HELP, bot\n'), ((4532, 4550), 'glob.glob', 'glob.glob', (['"""*.mp4"""'], {}), "('*.mp4')\n", (4541, 4550), False, 'import glob\n'), ((4555, 4573), 'glob.glob', 'glob.glob', (['"""*.mkv"""'], {}), "('*.mkv')\n", (4564, 4573), False, 'import glob\n'), ((6203, 6219), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (6216, 6219), False, 'import asyncio\n'), ((6807, 6859), 'userbot.bot.forward_messages', 'bot.forward_messages', (['event.chat_id', 'respond.message'], {}), '(event.chat_id, respond.message)\n', (6827, 6859), False, 'from userbot import CMD_HELP, bot\n'), ((6874, 6914), 'userbot.bot.send_read_acknowledge', 'bot.send_read_acknowledge', (['event.chat_id'], {}), '(event.chat_id)\n', (6899, 6914), False, 'from userbot import CMD_HELP, bot\n'), ((7236, 7252), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (7249, 7252), False, 'import asyncio\n'), ((7790, 7806), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (7803, 7806), False, 'import asyncio\n'), ((7823, 7860), 'userbot.bot.send_file', 'bot.send_file', (['event.chat_id', 'respond'], {}), '(event.chat_id, respond)\n', (7836, 7860), False, 'from userbot import CMD_HELP, bot\n'), ((8937, 8993), 'userbot.bot.send_file', 'bot.send_file', (['event.chat_id', 'song'], {'caption': 'details.text'}), '(event.chat_id, song, caption=details.text)\n', (8950, 8993), False, 'from userbot import CMD_HELP, bot\n'), ((6373, 6427), 'telethon.events.NewMessage', 'events.NewMessage', ([], {'incoming': '(True)', 'from_users': '(752979930)'}), '(incoming=True, from_users=752979930)\n', (6390, 6427), False, 'from telethon import events\n'), ((6460, 6488), 'userbot.bot.send_message', 'bot.send_message', (['chat', 'link'], {}), '(chat, link)\n', (6476, 6488), False, 'from userbot import CMD_HELP, bot\n'), ((6544, 6583), 'userbot.bot.send_read_acknowledge', 'bot.send_read_acknowledge', (['conv.chat_id'], {}), '(conv.chat_id)\n', (6569, 6583), False, 'from userbot import CMD_HELP, bot\n'), ((7540, 7579), 'userbot.bot.send_read_acknowledge', 'bot.send_read_acknowledge', (['conv.chat_id'], {}), '(conv.chat_id)\n', (7565, 7579), False, 'from userbot import CMD_HELP, bot\n'), ((8745, 8784), 'userbot.bot.send_read_acknowledge', 'bot.send_read_acknowledge', (['conv.chat_id'], {}), '(conv.chat_id)\n', (8770, 8784), False, 'from userbot import CMD_HELP, bot\n'), ((5416, 5527), 'telethon.tl.types.DocumentAttributeVideo', 'DocumentAttributeVideo', ([], {'duration': 'duration', 'w': 'width', 'h': 'height', 'round_message': '(False)', 'supports_streaming': '(True)'}), '(duration=duration, w=width, h=height, round_message=\n False, supports_streaming=True)\n', (5438, 5527), False, 'from telethon.tl.types import DocumentAttributeVideo\n'), ((3738, 3784), 'userbot.utils.progress', 'progress', (['d', 't', 'event', 'c_time', '"""[UPLOAD]"""', 'loa'], {}), "(d, t, event, c_time, '[UPLOAD]', loa)\n", (3746, 3784), False, 'from userbot.utils import progress\n'), ((5718, 5764), 'userbot.utils.progress', 'progress', (['d', 't', 'event', 'c_time', '"""[UPLOAD]"""', 'loa'], {}), "(d, t, event, c_time, '[UPLOAD]', loa)\n", (5726, 5764), False, 'from userbot.utils import progress\n'), ((3688, 3712), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (3710, 3712), False, 'import asyncio\n'), ((5668, 5692), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (5690, 5692), False, 'import asyncio\n')]
|
#
# Copyright (c) 2020 by <NAME>, Ryazan State Radio Engineering University.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import arp
from ryu.lib.packet import icmp
class IpTableEntry:
def __init__(self, adjacent_dpid, mac_address, ip_address):
self.adjacent_dpid = adjacent_dpid
self.mac_address = mac_address
self.ip_address = ip_address
class MacDetector(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(MacDetector, self).__init__(*args, **kwargs)
self.ip_table = {}
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
# install the table-miss flow entry.
match = parser.OFPMatch()
actions = [
parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
ofproto.OFPCML_NO_BUFFER)
]
self.add_flow(datapath, 0, match, actions)
self.logger.debug('datapath %016x registered', datapath.id)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
eth_pkt = pkt.get_protocol(ethernet.ethernet)
icmp_pkt = pkt.get_protocol(icmp.icmp)
arp_pkt = pkt.get_protocol(arp.arp)
if arp_pkt:
self._handle_arp(arp_pkt, eth_pkt, datapath, in_port)
def _handle_arp(self, arp_pkt, eth_pkt, datapath, in_port):
if arp_pkt.opcode != arp.ARP_REQUEST:
return
dst_mac = arp_pkt.dst_mac
src_mac = arp_pkt.src_mac
dst_ip = arp_pkt.dst_ip
src_ip = arp_pkt.src_ip
self.logger.debug(
"ARP packet from %016x; src_mac=%s dst_mac=%s, src_ip=%s, dst_ip=%s",
datapath.id, src_mac, dst_mac, src_ip, dst_ip)
if arp_pkt.src_ip not in self.ip_table:
self.ip_table[src_ip] = IpTableEntry(datapath.id, src_mac, src_ip)
if arp_pkt.dst_ip in self.ip_table:
host = self.ip_table[arp_pkt.dst_ip]
self.reply_arp(datapath, in_port, host, arp_pkt, eth_pkt)
return
def build_arp_replay_pkt(self, ethertype, src_mac, dst_mac, src_ip,
dst_ip):
pkt = packet.Packet()
pkt.add_protocol(
ethernet.ethernet(ethertype=ethertype, dst=dst_mac, src=src_mac))
pkt.add_protocol(
arp.arp(opcode=arp.ARP_REPLY,
src_mac=src_mac,
src_ip=src_ip,
dst_mac=dst_mac,
dst_ip=dst_ip))
return pkt
def reply_arp(self, datapath, in_port, host, arp_pkt, eth_pkt):
arp_reply_pkt = self.build_arp_replay_pkt(eth_pkt.ethertype,
host.mac_address,
arp_pkt.src_mac,
host.ip_address,
arp_pkt.src_ip)
self.send_packet(datapath, in_port, arp_reply_pkt)
def send_packet(self, datapath, out_port, pkt):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
pkt.serialize()
data = pkt.data
actions = [parser.OFPActionOutput(port=out_port)]
out = parser.OFPPacketOut(datapath=datapath,
buffer_id=ofproto.OFP_NO_BUFFER,
in_port=ofproto.OFPP_CONTROLLER,
actions=actions,
data=data)
datapath.send_msg(out)
def add_flow(self, datapath, priority, match, actions):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
inst = [
parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions)
]
mod = parser.OFPFlowMod(datapath=datapath,
priority=priority,
match=match,
instructions=inst)
datapath.send_msg(mod)
|
[
"ryu.lib.packet.packet.Packet",
"ryu.controller.handler.set_ev_cls",
"ryu.lib.packet.ethernet.ethernet",
"ryu.lib.packet.arp.arp"
] |
[((1918, 1981), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPSwitchFeatures', 'CONFIG_DISPATCHER'], {}), '(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n', (1928, 1981), False, 'from ryu.controller.handler import set_ev_cls\n'), ((2493, 2548), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPPacketIn', 'MAIN_DISPATCHER'], {}), '(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n', (2503, 2548), False, 'from ryu.controller.handler import set_ev_cls\n'), ((2694, 2717), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['msg.data'], {}), '(msg.data)\n', (2707, 2717), False, 'from ryu.lib.packet import packet\n'), ((3816, 3831), 'ryu.lib.packet.packet.Packet', 'packet.Packet', ([], {}), '()\n', (3829, 3831), False, 'from ryu.lib.packet import packet\n'), ((3870, 3934), 'ryu.lib.packet.ethernet.ethernet', 'ethernet.ethernet', ([], {'ethertype': 'ethertype', 'dst': 'dst_mac', 'src': 'src_mac'}), '(ethertype=ethertype, dst=dst_mac, src=src_mac)\n', (3887, 3934), False, 'from ryu.lib.packet import ethernet\n'), ((3974, 4072), 'ryu.lib.packet.arp.arp', 'arp.arp', ([], {'opcode': 'arp.ARP_REPLY', 'src_mac': 'src_mac', 'src_ip': 'src_ip', 'dst_mac': 'dst_mac', 'dst_ip': 'dst_ip'}), '(opcode=arp.ARP_REPLY, src_mac=src_mac, src_ip=src_ip, dst_mac=\n dst_mac, dst_ip=dst_ip)\n', (3981, 4072), False, 'from ryu.lib.packet import arp\n')]
|
# bezier_curve()生成贝塞尔曲线坐标。
import numpy as np
from skimage.draw import bezier_curve
img = np.zeros((10, 10), dtype=np.uint8)
print(img[1, 5])
rr, cc = bezier_curve(1, 5, 5, -2, 8, 8, 2)
img[rr, cc] = 1
print(img)
print(img[1, 5])
print(img[5, -2])
print(img[8, 8])
|
[
"numpy.zeros",
"skimage.draw.bezier_curve"
] |
[((90, 124), 'numpy.zeros', 'np.zeros', (['(10, 10)'], {'dtype': 'np.uint8'}), '((10, 10), dtype=np.uint8)\n', (98, 124), True, 'import numpy as np\n'), ((151, 185), 'skimage.draw.bezier_curve', 'bezier_curve', (['(1)', '(5)', '(5)', '(-2)', '(8)', '(8)', '(2)'], {}), '(1, 5, 5, -2, 8, 8, 2)\n', (163, 185), False, 'from skimage.draw import bezier_curve\n')]
|
# coding:utf-8
import getpass
import os
import re
import tempfile
import time
from datetime import datetime as dt
import requests_cache
from errors import EtherscanIoException
def _bool(text: str):
"""Convert str to bool"""
if text.lower() in ["0", "false", "none", "null", "n/a", ""]:
return False
return True
def to_snake_case(name):
if name == "timeStamp":
return "timestamp"
if name == "txreceipt_status":
return "tx_receipt_status"
name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
name = re.sub("__([A-Z])", r"_\1", name)
name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", name)
return name.lower()
def _convert(source):
converted = {}
for key, value in source.items():
key = to_snake_case(key)
if key.startswith("is_") or key.endswith("_status"):
converted[key] = _bool(value)
continue
if value == "":
converted[key] = None
continue
if value.isdigit():
converted[key] = int(value)
continue
converted[key] = value
return source
def shared(func):
shared.instances = {}
def getinstance(self, *args, **kwargs):
if func.__name__ not in shared.instances:
shared.instances[func.__name__] = func(self, *args, **kwargs)
return shared.instances[func.__name__]
return getinstance
def single_excercise(func):
# pylint: disable=protected-access
def inner(self, *args, **kwargs):
if not hasattr(self, "_instances"):
setattr(self, "_instances", {})
if func.__name__ not in self._instances:
self._instances[func.__name__] = func(self, *args, **kwargs)
return self._instances[func.__name__]
# pylint: enable=protected-access
return inner
class BaseClient:
def __init__(
self,
api_key=None,
network=None,
cache_backend="sqlite",
cache_expire_after=5,
):
# API URL
self._api_url = "https://api.etherscan.io/api"
# API Key
self._api_key = api_key
# Network
if network:
if network not in ["ropsten", "kovan", "rinkeby"]:
raise Exception(
"network could only be None(mainnet) /ropsten/kovan/rinkeby"
)
self._api_url = "https://api-{network}.etherscan.io/api".format(
network=network
)
# params
self._reset_params()
# session & cache
self._cache_backend = cache_backend
self._cache_expire_after = cache_expire_after
self._rate_count = None
@property
@shared
def api_key(self):
if self._api_key:
os.environ["ETHERSCAN_KEY"] = self._api_key
return self._api_key
self._api_key = os.getenv("ETHERSCAN_KEY")
if not self._api_key:
self._api_key = getpass.getpass("Input etherscan key: ")
os.environ["ETHERSCAN_KEY"] = self._api_key
return self._api_key
@property
@shared
def cache_name(self):
return os.path.join(tempfile.gettempdir(), "etherscan_cache")
@property
@shared
def session(self):
session = requests_cache.core.CachedSession(
cache_name=self.cache_name,
backend=self._cache_backend,
expire_after=self._cache_expire_after,
)
session.headers.update(
{
"User-agent": "etherscan - python wrapper "
"around etherscan.io (github.com/neoctobers/etherscan)"
}
)
return session
def _req(self):
response = self.session.post(url=self._api_url, data=self._params).json()
self._reset_params()
if response["status"] == "0":
print("--- Etherscan.io Message ---", response["message"])
return response["result"]
def _reset_params(self):
self._params = {
"apikey": self.api_key,
}
class Accounts(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "account"}
def get_eth_balance(self, address: str):
"""Get ETH balance by address."""
self._params["action"] = "balance"
self._params["address"] = address
return int(self._req())
def get_eth_balances(self, addresses: list):
"""Get ETH balances by addresses list."""
self._params["action"] = "balancemulti"
self._params["address"] = ",".join(addresses)
balances = {}
for row in self._req():
balances[row["account"]] = int(row["balance"])
return balances
def get_transactions_by_address(
self,
address: str,
start_block: int = 0,
end_block: int = 999999999,
page: int = 1,
limit: int = 10000,
sort: str = "asc",
): # pylint: disable=too-many-arguments
"""Get transactions by address."""
self._params["action"] = "txlist"
self._params["address"] = address
self._params["startblock"] = start_block
self._params["endblock"] = end_block
self._params["page"] = page
self._params["offset"] = limit
self._params["sort"] = sort
response = self._req()
transactions = []
for transaction in response:
transactions.append(_convert(transaction))
return transactions
def get_internal_transactions_by_address(
self,
address: str,
start_block: int = 0,
end_block: int = 999999999,
page: int = 1,
limit: int = 10000,
sort: str = "asc",
): # pylint: disable=too-many-arguments
"""Get transactions by address."""
self._params["action"] = "txlistinternal"
self._params["address"] = address
self._params["startblock"] = start_block
self._params["endblock"] = end_block
self._params["page"] = page
self._params["offset"] = limit
self._params["sort"] = sort
response = self._req()
transactions = []
for transaction in response:
transactions.append(_convert(transaction))
return transactions
def get_token_transactions(
self,
contract_address: str = None,
address: str = None,
start_block: int = 0,
end_block: int = 999999999,
page: int = 1,
limit: int = 10000,
sort: str = "asc",
): # pylint: disable=too-many-arguments
"""Get ERC20 token transactions by contract address."""
if contract_address is None and address is None:
raise EtherscanIoException(
"Param `contract_address` and `address` cannot be None at the same time."
)
self._params["action"] = "tokentx"
if contract_address:
self._params["contractaddress"] = contract_address
if address:
self._params["address"] = address
self._params["startblock"] = start_block
self._params["endblock"] = end_block
self._params["page"] = page
self._params["offset"] = limit
self._params["sort"] = sort
response = self._req()
token_transactions = []
for transaction in response:
token_transactions.append(_convert(transaction))
return token_transactions
class Contracts(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "contract"}
def get_abi(self, address):
self._params["action"] = "getabi"
self._params["address"] = address
return self._req()
def get_source_code(self, address):
self._params["action"] = "getsourcecode"
self._params["address"] = address
return self._req()
class Transactions(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "transaction"}
def get_tx_receipt_status(self, tx_hash):
self._params["action"] = "gettxreceiptstatus"
self._params["txhash"] = tx_hash
return self._req()
class Blocks(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "block"}
def get_block_countdown(self, block_no):
self._params["action"] = "getblockcountdown"
self._params["blockno"] = block_no
return self._req()
def get_block_no_by_time(self, timestamp, closest="before"):
if closest not in ["before", "after"]:
raise ValueError(f"Something went wrong: {closest}")
self._params["action"] = "getblocknobytime"
self._params["timestamp"] = timestamp
self._params["closest"] = closest
return int(self._req())
@property
def latest_block(self):
return self.get_block_no_by_time(int(time.time()))
class Logs(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "logs"}
def get_logs(self, from_block, to_block, address, topic):
self._params["action"] = "getlogs"
self._params["fromBlock"] = from_block
self._params["toBlock"] = to_block
self._params["address"] = address
self._params["topic0"] = topic
return self._req()
class GethParityProxy(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "proxy"}
def get_block_number(self):
"""Get latest block number."""
self._params["action"] = "eth_blockNumber"
return int(self._req(), 16)
def get_block_by_number(self, block_number, boolean=True):
"""Get block by number."""
self._params["action"] = "eth_getBlockByNumber"
self._params["tag"] = hex(block_number)
self._params["boolean"] = boolean
return self._req()
def get_uncle_by_block_number_and_index(self, block_number):
self._params["action"] = "eth_getUncleByBlockNumberAndIndex"
self._params["tag"] = hex(block_number)
return self._req()
def get_block_transaction_count_by_number(self, block_number):
self._params["action"] = "eth_getBlockTransactionCountByNumber"
self._params["tag"] = hex(block_number)
return self._req()
def get_transaction_by_hash(self, tx_hash):
self._params["action"] = "eth_getTransactionByHash"
self._params["txhash"] = tx_hash
return self._req()
def get_transaction_by_block_number_and_index(self, block_number, index):
self._params["action"] = "eth_getTransactionByBlockNumberAndIndex"
self._params["tag"] = hex(block_number)
self._params["index"] = hex(index)
return self._req()
def get_transaction_count(self, address, tag):
if tag not in ["earlist", "pending", "latest"]:
raise ValueError(f"Something went wrong: {tag}")
self._params["action"] = "eth_getTransactionCount"
self._params["address"] = address
self._params["tag"] = tag
return self._req()
def send_raw_transaction(self, hex_):
self._params["action"] = "eth_sendRawTransaction"
self._params["hex"] = hex(hex_)
return self._req()
def get_transaction_receipt(self, tx_hash):
self._params["action"] = "eth_getTransactionReceipt"
self._params["txhash"] = tx_hash
return self._req()
def call(self, to_, data, tag):
if tag not in ["earlist", "pending", "latest"]:
raise ValueError(f"Something went wrong: {tag}")
self._params["action"] = "eth_call"
self._params["to"] = to_
self._params["data"] = data
self._params["tag"] = tag
return self._req()
def get_code(self, address, tag):
if tag not in ["earlist", "pending", "latest"]:
raise ValueError(f"Something went wrong: {tag}")
self._params["action"] = "eth_getCode"
self._params["address"] = address
self._params["tag"] = tag
return self._req()
def get_storage_at(self, position, tag):
if tag not in ["earlist", "pending", "latest"]:
raise ValueError(f"Something went wrong: {tag}")
self._params["action"] = "eth_getStorageAt"
self._params["position"] = hex(position)
self._params["tag"] = tag
return self._req()
def get_gas_price(self):
"""Get gas price."""
self._params["action"] = "eth_gasPrice"
return int(self._req(), 16)
def estimate_gas(self):
"""Get gas price."""
self._params["action"] = "eth_estimateGas"
raise NotImplementedError()
class Tokens(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "token"}
class GasTracker(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "gas"}
class Stats(BaseClient):
def _reset_params(self):
self._params = {"apikey": self.api_key, "module": "stats"}
def get_eth_price(self):
"""Get ETH price."""
self._params["action"] = "ethprice"
response = self._req()
return {
"ethbtc": float(response["ethbtc"]),
"ethbtc_timestamp": int(response["ethbtc_timestamp"]),
"ethusd": float(response["ethusd"]),
"ethusd_timestamp": int(response["ethbtc_timestamp"]),
}
def get_eth_supply(self):
self._params["action"] = "ethsupply"
return int(self._req())
class Client(BaseClient):
def __init__(self, *args, **kwargs):
super().__init__(self, *args, **kwargs)
self._start_time = None
@property
@single_excercise
def accounts(self):
return Accounts()
@property
@single_excercise
def contracts(self):
return Contracts()
@property
@single_excercise
def transactions(self):
return Transactions()
@property
@single_excercise
def blocks(self):
return Blocks()
@property
@single_excercise
def logs(self):
return Logs()
@property
@single_excercise
def geth_parity_proxy(self):
return GethParityProxy()
@property
@single_excercise
def tokens(self):
return Tokens()
@property
@single_excercise
def gas_tracker(self):
return GasTracker()
@property
@single_excercise
def stats(self):
return Stats()
def get_transaction_history_by_address(self, address, start=None, end=None):
form = "%m/%d/%Y"
if start:
self._start_time = start
if not self._start_time:
raise ValueError("Something went wrong")
start_timestamp = int(dt.timestamp(dt.strptime(self._start_time, form)))
if end:
end_timestamp = int(dt.timestamp(dt.strptime(end, form)))
else:
end_timestamp = int(time.time())
end = dt.strftime(dt.fromtimestamp(end_timestamp), form)
if start_timestamp > end_timestamp:
return
twenty_four_hours = 60 * 60 * 24
transactions = self.accounts.get_transactions_by_address(
address,
start_block=self.blocks.get_block_no_by_time(
end_timestamp - twenty_four_hours
),
end_block=self.blocks.get_block_no_by_time(end_timestamp),
)
yield transactions
end_timestamp -= twenty_four_hours
end = dt.strftime(dt.fromtimestamp(end_timestamp), form)
self.get_transaction_history_by_address(address, end=end)
|
[
"requests_cache.core.CachedSession",
"getpass.getpass",
"errors.EtherscanIoException",
"tempfile.gettempdir",
"time.time",
"datetime.datetime.strptime",
"datetime.datetime.fromtimestamp",
"re.sub",
"os.getenv"
] |
[((497, 540), 're.sub', 're.sub', (['"""(.)([A-Z][a-z]+)"""', '"""\\\\1_\\\\2"""', 'name'], {}), "('(.)([A-Z][a-z]+)', '\\\\1_\\\\2', name)\n", (503, 540), False, 'import re\n'), ((551, 584), 're.sub', 're.sub', (['"""__([A-Z])"""', '"""_\\\\1"""', 'name'], {}), "('__([A-Z])', '_\\\\1', name)\n", (557, 584), False, 'import re\n'), ((596, 640), 're.sub', 're.sub', (['"""([a-z0-9])([A-Z])"""', '"""\\\\1_\\\\2"""', 'name'], {}), "('([a-z0-9])([A-Z])', '\\\\1_\\\\2', name)\n", (602, 640), False, 'import re\n'), ((2865, 2891), 'os.getenv', 'os.getenv', (['"""ETHERSCAN_KEY"""'], {}), "('ETHERSCAN_KEY')\n", (2874, 2891), False, 'import os\n'), ((3267, 3401), 'requests_cache.core.CachedSession', 'requests_cache.core.CachedSession', ([], {'cache_name': 'self.cache_name', 'backend': 'self._cache_backend', 'expire_after': 'self._cache_expire_after'}), '(cache_name=self.cache_name, backend=self.\n _cache_backend, expire_after=self._cache_expire_after)\n', (3300, 3401), False, 'import requests_cache\n'), ((2950, 2990), 'getpass.getpass', 'getpass.getpass', (['"""Input etherscan key: """'], {}), "('Input etherscan key: ')\n", (2965, 2990), False, 'import getpass\n'), ((3157, 3178), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3176, 3178), False, 'import tempfile\n'), ((6732, 6832), 'errors.EtherscanIoException', 'EtherscanIoException', (['"""Param `contract_address` and `address` cannot be None at the same time."""'], {}), "(\n 'Param `contract_address` and `address` cannot be None at the same time.')\n", (6752, 6832), False, 'from errors import EtherscanIoException\n'), ((15595, 15626), 'datetime.datetime.fromtimestamp', 'dt.fromtimestamp', (['end_timestamp'], {}), '(end_timestamp)\n', (15611, 15626), True, 'from datetime import datetime as dt\n'), ((8935, 8946), 'time.time', 'time.time', ([], {}), '()\n', (8944, 8946), False, 'import time\n'), ((14847, 14882), 'datetime.datetime.strptime', 'dt.strptime', (['self._start_time', 'form'], {}), '(self._start_time, form)\n', (14858, 14882), True, 'from datetime import datetime as dt\n'), ((15018, 15029), 'time.time', 'time.time', ([], {}), '()\n', (15027, 15029), False, 'import time\n'), ((15061, 15092), 'datetime.datetime.fromtimestamp', 'dt.fromtimestamp', (['end_timestamp'], {}), '(end_timestamp)\n', (15077, 15092), True, 'from datetime import datetime as dt\n'), ((14947, 14969), 'datetime.datetime.strptime', 'dt.strptime', (['end', 'form'], {}), '(end, form)\n', (14958, 14969), True, 'from datetime import datetime as dt\n')]
|
import boto.provider
from boto.compat import urllib
qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging',
'partNumber', 'policy', 'requestPayment', 'torrent',
'versioning', 'versionId', 'versions', 'website',
'uploads', 'uploadId', 'response-content-type',
'response-content-language', 'response-expires',
'response-cache-control', 'response-content-disposition',
'response-content-encoding', 'delete', 'lifecycle',
'tagging', 'restore',
# storageClass is a QSA for buckets in Google Cloud Storage.
# (StorageClass is associated to individual keys in S3, but
# having it listed here should cause no problems because
# GET bucket?storageClass is not part of the S3 API.)
'storageClass',
# websiteConfig is a QSA for buckets in Google Cloud
# Storage.
'websiteConfig',
# compose is a QSA for objects in Google Cloud Storage.
'compose']
def unquote_v(nv):
if len(nv) == 1:
return nv
else:
return (nv[0], urllib.parse.unquote(nv[1]))
def canonical_string(method, path, headers, expires=None,
provider=None):
"""
Generates the aws canonical string for the given parameters
"""
if not provider:
provider = boto.provider.get_default()
interesting_headers = {}
for key in headers:
lk = key.lower()
if headers[key] is not None and \
(lk in ['content-md5', 'content-type', 'date'] or
lk.startswith(provider.header_prefix)):
interesting_headers[lk] = str(headers[key]).strip()
# these keys get empty strings if they don't exist
if 'content-type' not in interesting_headers:
interesting_headers['content-type'] = ''
if 'content-md5' not in interesting_headers:
interesting_headers['content-md5'] = ''
# just in case someone used this. it's not necessary in this lib.
if provider.date_header in interesting_headers:
interesting_headers['date'] = ''
# if you're using expires for query string auth, then it trumps date
# (and provider.date_header)
if expires:
interesting_headers['date'] = str(expires)
sorted_header_keys = sorted(interesting_headers.keys())
buf = "%s\n" % method
for key in sorted_header_keys:
val = interesting_headers[key]
if key.startswith(provider.header_prefix):
buf += "%s:%s\n" % (key, val)
else:
buf += "%s\n" % val
# don't include anything after the first ? in the resource...
# unless it is one of the QSA of interest, defined above
t = path.split('?')
buf += t[0]
if len(t) > 1:
qsa = t[1].split('&')
qsa = [a.split('=', 1) for a in qsa]
qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest]
if len(qsa) > 0:
qsa.sort(key=lambda x: x[0])
qsa = ['='.join(a) for a in qsa]
buf += '?'
buf += '&'.join(qsa)
return buf
def merge_meta(headers, metadata, provider=None):
if not provider:
provider = boto.provider.get_default()
metadata_prefix = provider.metadata_prefix
final_headers = headers.copy()
for k in metadata.keys():
if k.lower() in boto.s3.key.Key.base_user_settable_fields:
final_headers[k] = metadata[k]
else:
final_headers[metadata_prefix + k] = metadata[k]
return final_headers
def get_aws_metadata(headers, provider=None):
if not provider:
provider = boto.provider.get_default()
metadata_prefix = provider.metadata_prefix
metadata = {}
for hkey in headers.keys():
if hkey.lower().startswith(metadata_prefix):
val = urllib.parse.unquote(headers[hkey])
if isinstance(val, bytes):
try:
val = val.decode('utf-8')
except UnicodeDecodeError:
# Just leave the value as-is
pass
metadata[hkey[len(metadata_prefix):]] = val
del headers[hkey]
return metadata
|
[
"boto.compat.urllib.parse.unquote"
] |
[((1275, 1302), 'boto.compat.urllib.parse.unquote', 'urllib.parse.unquote', (['nv[1]'], {}), '(nv[1])\n', (1295, 1302), False, 'from boto.compat import urllib\n'), ((3990, 4025), 'boto.compat.urllib.parse.unquote', 'urllib.parse.unquote', (['headers[hkey]'], {}), '(headers[hkey])\n', (4010, 4025), False, 'from boto.compat import urllib\n')]
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Pocket PiAP
# ......................................................................
# Copyright (c) 2017-2020, <NAME>
# ......................................................................
# Licensed under MIT (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# ......................................................................
# http://www.github.com/reactive-firewall/PiAP-python-tools/LICENSE.rst
# ......................................................................
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ......................................................................
# try:
# from . import config as config
# except Exception:
# import config as config
try:
import sys
if sys.__name__ is None:
raise ImportError("[CWE-758] Could not import the sys.")
except Exception as err:
raise ImportError(err)
try:
if 'os' not in sys.modules:
import os
else: # pragma: no branch
os = sys.modules["""os"""]
except Exception:
raise ImportError("[CWE-758] Could not import the os. We're like in the matrix!")
try:
if 'argparse' not in sys.modules:
import argparse as argparse
else: # pragma: no branch
argparse = sys.modules["""argparse"""]
except Exception:
raise ImportError("[CWE-758] We could not import argparse.")
try:
if str("book") in __file__:
__sys_path__ = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if __sys_path__ not in sys.path:
sys.path.insert(0, __sys_path__)
except Exception as importErr:
print(str(importErr))
print(str(importErr.args))
importErr = None
del importErr
raise ImportError("Failed to import " + str(__file__))
try:
if 'piaplib' not in sys.modules:
raise ImportError("Pocket Book failed to import.") # import piaplib as piaplib
piaplib = sys.modules["""piaplib"""]
except Exception:
raise ImportError("[CWE-758] Could not import the piaplib. We're in need of a fix! ABORT.")
try:
if str("piaplib.pku.utils") not in sys.modules:
from piaplib.pku import utils as utils
else:
utils = sys.modules[str("piaplib.pku.utils")]
except Exception:
try:
import piaplib.pku.utils as utils
except Exception as err:
raise ImportError(err, "Error Importing piaplib.pku.utils")
try:
if str("piaplib.pku.remediation") not in sys.modules:
from piaplib.pku import remediation as remediation
else:
remediation = sys.modules[str("piaplib.pku.remediation")]
except Exception:
try:
import piaplib.pku.remediation as remediation
except Exception as err:
raise ImportError(err, "Error Importing remediation")
try:
if str("piaplib.book.logs.logs") not in sys.modules:
from .logs import logs as logs
else:
logs = sys.modules[str("piaplib.book.logs.logs")]
except Exception:
try:
import logs.logs as logs
except Exception as err:
raise ImportError(err, "Error Importing logs")
try:
if utils.__name__ is None:
raise ImportError("Failed to open PKU Utils")
if remediation.__name__ is None:
raise ImportError("Failed to open PKU Remediation")
if logs.__name__ is None:
raise ImportError("Failed to open Pocket LogBook")
except Exception as importErr:
print(str(importErr))
print(str(importErr.args))
importErr = None
del importErr
raise ImportError("Failed to import " + str(__file__))
exit(255)
__prog__ = """piaplib.book.version"""
"""The name of this PiAPLib tool is pocket version"""
@remediation.error_handling
def getKeyringVersion(verbose=False):
"""returns the keyring version."""
try:
from piaplib import keyring
if keyring.__name__ is False:
raise NotImplementedError("[CWE-758] Failed to import keyring")
except Exception:
import piaplib.keyring
try:
from keyring import clarify as clarify
except Exception:
import piaplib.keyring.clarify as clarify
import piaplib.keyring.__main__
keyring_version = str(
"{name} {version}"
).format(
name=str(piaplib.keyring.__main__.__prog__),
version=str(piaplib.__version__)
)
if verbose:
if clarify.hasBackendCommand():
keyring_version = str(
"Pocket Keyring: {version}\nBackend Cryptographic Library: {backend}\n" +
"Cryptographic Algorithm: {algo}"
).format(
version=str(keyring_version),
backend=str(clarify.getBackendCommand()),
algo=str(clarify.getAlgoForOS())
)
return keyring_version
@remediation.error_handling
def getPythonVersion(verbose=False):
"""returns which version of python is this"""
python_version = str(
"Python {major}.{minor}"
).format(
major=str(sys.version_info[0]),
minor=str(sys.version_info[1])
)
if verbose:
python_version = str(
"Python: {version}\n{flags}\n{copyright}\nBackend Python Library: {backend}"
).format(
version=str(sys.version),
flags=str(sys.flags),
copyright=str(sys.copyright),
backend=str(sys.executable)
)
return python_version
@remediation.error_handling
def getOSVersion(*args, **kwargs):
"""returns which version of the platform this is"""
return str("Platform: {}").format(str(sys.platform))
@remediation.error_handling
def getVersion(verbose=False):
"""Returns the piaplib version."""
piaplib_version = str("piaplib: {}").format(str(piaplib.__version__))
if verbose:
piaplib_version = str(
"{version}\n{python}\n{os}"
).format(
version=str(piaplib_version),
python=str(getPythonVersion(verbose)),
os=str(getOSVersion(verbose))
)
return piaplib_version
VERSION_UNITS = {
u'all': getVersion,
u'keyring': getKeyringVersion,
u'python': getPythonVersion,
u'os': getOSVersion
}
""" The Pocket Book Unit actions.
None - the piaplib version.
keyring - the keyring version.
python - which version of python is this.
os - which platform is this.
"""
@remediation.error_handling
def getRunVersion(tool, verbose_mode=False):
"""Handler for checking versions."""
if tool is None:
return getVersion(verbose_mode)
theResult = None
if tool in VERSION_UNITS.keys():
try:
theResult = VERSION_UNITS[tool](verbose_mode)
except Exception:
theResult = str("{} {}").format(str(__prog__), str(piaplib.__version__))
return theResult
@remediation.error_handling
def generateParser(calling_parser_group):
"""Parses the CLI arguments."""
if calling_parser_group is None:
parser = argparse.ArgumentParser(
prog=__prog__,
description='Handles PiAP pocket version reports',
epilog="PiAP Book Controller for version tools."
)
else:
parser = calling_parser_group.add_parser(
str(__prog__).split(".")[-1], help="PiAP Book Controller for version tools."
)
parser.add_argument(
nargs='?',
dest='version_unit',
choices=VERSION_UNITS.keys(),
default=u'all',
help='The pocket version option.'
)
parser = utils._handleVerbosityArgs(parser, default=False)
parser = utils._handleVersionArgs(parser)
if calling_parser_group is None:
calling_parser_group = parser
return calling_parser_group
@remediation.error_handling
def parseArgs(arguments=None):
"""Parses the CLI arguments."""
parser = generateParser(None)
return parser.parse_known_args(arguments)
@remediation.bug_handling
def main(argv=None):
"""The Main Event makes no sense to logs yet."""
try:
args, extra = parseArgs(argv)
del extra
output = str(getRunVersion(args.version_unit, args.verbose_mode))
if __name__ in u'__main__':
print(str(output))
return 0
else:
print(str(output))
return output
except Exception as err:
logs.log(str(type(err)), "Critical")
logs.log(str(err), "Critical")
logs.log(str(err.args), "Critical")
return 3
if __name__ in u'__main__':
exitcode = 255
try:
exitcode = main(sys.argv[1:])
except Exception:
exitcode = 3
exit(exitcode)
# vcgencmd get_config int
|
[
"piaplib.keyring.clarify.getAlgoForOS",
"piaplib.pku.utils._handleVersionArgs",
"argparse.ArgumentParser",
"os.path.dirname",
"sys.path.insert",
"piaplib.keyring.clarify.hasBackendCommand",
"piaplib.keyring.clarify.getBackendCommand",
"piaplib.pku.utils._handleVerbosityArgs"
] |
[((7011, 7060), 'piaplib.pku.utils._handleVerbosityArgs', 'utils._handleVerbosityArgs', (['parser'], {'default': '(False)'}), '(parser, default=False)\n', (7037, 7060), True, 'import piaplib.pku.utils as utils\n'), ((7071, 7103), 'piaplib.pku.utils._handleVersionArgs', 'utils._handleVersionArgs', (['parser'], {}), '(parser)\n', (7095, 7103), True, 'import piaplib.pku.utils as utils\n'), ((4324, 4351), 'piaplib.keyring.clarify.hasBackendCommand', 'clarify.hasBackendCommand', ([], {}), '()\n', (4349, 4351), True, 'import piaplib.keyring.clarify as clarify\n'), ((6566, 6715), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '__prog__', 'description': '"""Handles PiAP pocket version reports"""', 'epilog': '"""PiAP Book Controller for version tools."""'}), "(prog=__prog__, description=\n 'Handles PiAP pocket version reports', epilog=\n 'PiAP Book Controller for version tools.')\n", (6589, 6715), True, 'import argparse as argparse\n'), ((1821, 1853), 'sys.path.insert', 'sys.path.insert', (['(0)', '__sys_path__'], {}), '(0, __sys_path__)\n', (1836, 1853), False, 'import sys\n'), ((1749, 1774), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1764, 1774), False, 'import os\n'), ((4558, 4585), 'piaplib.keyring.clarify.getBackendCommand', 'clarify.getBackendCommand', ([], {}), '()\n', (4583, 4585), True, 'import piaplib.keyring.clarify as clarify\n'), ((4601, 4623), 'piaplib.keyring.clarify.getAlgoForOS', 'clarify.getAlgoForOS', ([], {}), '()\n', (4621, 4623), True, 'import piaplib.keyring.clarify as clarify\n')]
|
# passed
from wrapper.client import Client
api_server='testnet'
private_keys=['<KEY>',
'<KEY>']
addresses=['GDJJB2QA7XZSBHCGIRUAIUC5RJ6MJGV2DG5THNNJOTUQWXEDKXLVFZMI',
'GCKEZY2FW2MLANNJTQZIKFISGP2MZ57MZWMGECG3WD2FJHEQAGAA7ZUA']
# key_pairs=[]
# for key in private_keys:
# key_pair=Keypair.from_seed(seed=key)
# key_pairs.append(key_pair)
#
# issuer_priv=private_keys[0]
# distributor_priv=private_keys[1]
#
# issuer=key_pairs[0].address().decode()
# distributor=key_pairs[1].address().decode()
#
#
# # sdk.issue_asset(issuer_priv,distributor_priv,'eth',100,'testnet')
#
# builder = Builder(secret=distributor_priv, network='testnet')
# builder.append_payment_op(destination=issuer, amount=(84000000-1), asset_type='ltc', asset_issuer=issuer)
# builder.sign()
# result = builder.submit()
client=Client(private_key=private_keys[0],api_server=api_server)
client.pay_to(addresses[1],123.45)
client.pay_to(addresses[1],543.21,asset_code='ltc', asset_issuer=addresses[0] )
|
[
"wrapper.client.Client"
] |
[((833, 891), 'wrapper.client.Client', 'Client', ([], {'private_key': 'private_keys[0]', 'api_server': 'api_server'}), '(private_key=private_keys[0], api_server=api_server)\n', (839, 891), False, 'from wrapper.client import Client\n')]
|
import numpy as np
import cv2
import imutils
import os
import time
# This function calculates the distance between the center of two individuals in a
# single frame of the video
def Check(a, b):
dist = (((a[0] - b[0]) ** 2) + (a[1] - b[1]) ** 2) ** 0.5
calibration = (a[1] + b[1]) / 2
if 0 < dist < 0.25 * calibration:
return True
else:
return False
# This function joins the path components, reads the network model stored in Darknet,
# and gets all the layers of the network model to only store the indexes of layers with
# unconnected outputs
def Setup(yolo):
global net, ln, LABELS
weights = os.path.sep.join([yolo, "/users/anshsahny/darknet/yolov3.weights"])
config = os.path.sep.join([yolo, "/users/anshsahny/darknet/cfg/yolov3.cfg"])
labelsPath = os.path.sep.join([yolo, "/users/anshsahny/darknet/data/coco.names"])
LABELS = open(labelsPath).read().strip().split("\n")
net = cv2.dnn.readNetFromDarknet(config, weights)
ln = net.getLayerNames()
ln = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()]
# This function processes each frame of the video with the "Check" function between
# every individual and returns in to the main function
def ImageProcess(image):
global processedImg
(H, W) = (None, None)
frame = image.copy()
if W is None or H is None:
(H, W) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(frame, 1 / 255.0, (416, 416), swapRB=True, crop=False)
net.setInput(blob)
starttime = time.time()
layerOutputs = net.forward(ln)
stoptime = time.time()
print("Video is Getting Processed at {:.4f} seconds per frame".format((stoptime - starttime)))
confidences = []
outline = []
for output in layerOutputs:
for detection in output:
scores = detection[5:]
maxi_class = np.argmax(scores)
confidence = scores[maxi_class]
if LABELS[maxi_class] == "person":
if confidence > 0.5:
box = detection[0:4] * np.array([W, H, W, H])
(centerX, centerY, width, height) = box.astype("int")
x = int(centerX - (width / 2))
y = int(centerY - (height / 2))
outline.append([x, y, int(width), int(height)])
confidences.append(float(confidence))
box_line = cv2.dnn.NMSBoxes(outline, confidences, 0.5, 0.3)
if len(box_line) > 0:
flat_box = box_line.flatten()
pairs = []
center = []
status = []
for i in flat_box:
(x, y) = (outline[i][0], outline[i][1])
(w, h) = (outline[i][2], outline[i][3])
center.append([int(x + w / 2), int(y + h / 2)])
status.append(False)
for i in range(len(center)):
for j in range(len(center)):
close = Check(center[i], center[j])
if close:
pairs.append([center[i], center[j]])
status[i] = True
status[j] = True
index = 0
for i in flat_box:
(x, y) = (outline[i][0], outline[i][1])
(w, h) = (outline[i][2], outline[i][3])
if status[index] == True:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 150), 2)
elif status[index] == False:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
index += 1
for h in pairs:
cv2.line(frame, tuple(h[0]), tuple(h[1]), (0, 0, 255), 2)
processedImg = frame.copy()
create = None
frameno = 0
filename = "input.gif"
yolo = ""
opname = "output.mp4"
cap = cv2.VideoCapture(filename)
# Main function where input video is broken into single frames and performs all the
# functions above, creates output frame and combines it to create an output video
time1 = time.time()
while (True):
ret, frame = cap.read()
if not ret:
break
current_img = frame.copy()
current_img = imutils.resize(current_img, width=480)
video = current_img.shape
frameno += 1
if (frameno % 2 == 0 or frameno == 1):
Setup(yolo)
ImageProcess(current_img)
Frame = processedImg
if create is None:
fourcc = cv2.VideoWriter_fourcc(*'XVID')
create = cv2.VideoWriter(opname, fourcc, 30, (Frame.shape[1], Frame.shape[0]), True)
create.write(Frame)
if cv2.waitKey(1) & 0xFF == ord('s'):
break
time2 = time.time()
print("Completed. Total Time Taken: {} minutes".format((time2 - time1) / 60))
cap.release()
cv2.destroyAllWindows()
|
[
"cv2.dnn.NMSBoxes",
"cv2.VideoWriter_fourcc",
"numpy.argmax",
"cv2.waitKey",
"cv2.dnn.blobFromImage",
"cv2.dnn.readNetFromDarknet",
"time.time",
"cv2.VideoCapture",
"cv2.rectangle",
"numpy.array",
"cv2.VideoWriter",
"imutils.resize",
"cv2.destroyAllWindows",
"os.path.sep.join"
] |
[((3685, 3711), 'cv2.VideoCapture', 'cv2.VideoCapture', (['filename'], {}), '(filename)\n', (3701, 3711), False, 'import cv2\n'), ((3888, 3899), 'time.time', 'time.time', ([], {}), '()\n', (3897, 3899), False, 'import time\n'), ((4501, 4512), 'time.time', 'time.time', ([], {}), '()\n', (4510, 4512), False, 'import time\n'), ((4606, 4629), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4627, 4629), False, 'import cv2\n'), ((640, 707), 'os.path.sep.join', 'os.path.sep.join', (["[yolo, '/users/anshsahny/darknet/yolov3.weights']"], {}), "([yolo, '/users/anshsahny/darknet/yolov3.weights'])\n", (656, 707), False, 'import os\n'), ((721, 788), 'os.path.sep.join', 'os.path.sep.join', (["[yolo, '/users/anshsahny/darknet/cfg/yolov3.cfg']"], {}), "([yolo, '/users/anshsahny/darknet/cfg/yolov3.cfg'])\n", (737, 788), False, 'import os\n'), ((806, 874), 'os.path.sep.join', 'os.path.sep.join', (["[yolo, '/users/anshsahny/darknet/data/coco.names']"], {}), "([yolo, '/users/anshsahny/darknet/data/coco.names'])\n", (822, 874), False, 'import os\n'), ((942, 985), 'cv2.dnn.readNetFromDarknet', 'cv2.dnn.readNetFromDarknet', (['config', 'weights'], {}), '(config, weights)\n', (968, 985), False, 'import cv2\n'), ((1394, 1470), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['frame', '(1 / 255.0)', '(416, 416)'], {'swapRB': '(True)', 'crop': '(False)'}), '(frame, 1 / 255.0, (416, 416), swapRB=True, crop=False)\n', (1415, 1470), False, 'import cv2\n'), ((1510, 1521), 'time.time', 'time.time', ([], {}), '()\n', (1519, 1521), False, 'import time\n'), ((1572, 1583), 'time.time', 'time.time', ([], {}), '()\n', (1581, 1583), False, 'import time\n'), ((2378, 2426), 'cv2.dnn.NMSBoxes', 'cv2.dnn.NMSBoxes', (['outline', 'confidences', '(0.5)', '(0.3)'], {}), '(outline, confidences, 0.5, 0.3)\n', (2394, 2426), False, 'import cv2\n'), ((4022, 4060), 'imutils.resize', 'imutils.resize', (['current_img'], {'width': '(480)'}), '(current_img, width=480)\n', (4036, 4060), False, 'import imutils\n'), ((1847, 1864), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (1856, 1864), True, 'import numpy as np\n'), ((4284, 4315), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'XVID'"], {}), "(*'XVID')\n", (4306, 4315), False, 'import cv2\n'), ((4337, 4412), 'cv2.VideoWriter', 'cv2.VideoWriter', (['opname', 'fourcc', '(30)', '(Frame.shape[1], Frame.shape[0])', '(True)'], {}), '(opname, fourcc, 30, (Frame.shape[1], Frame.shape[0]), True)\n', (4352, 4412), False, 'import cv2\n'), ((4444, 4458), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (4455, 4458), False, 'import cv2\n'), ((3268, 3328), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 0, 150)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 0, 150), 2)\n', (3281, 3328), False, 'import cv2\n'), ((3386, 3446), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (3399, 3446), False, 'import cv2\n'), ((2036, 2058), 'numpy.array', 'np.array', (['[W, H, W, H]'], {}), '([W, H, W, H])\n', (2044, 2058), True, 'import numpy as np\n')]
|
import os
from enum import Enum
from ricecooker.classes import nodes, questions, files
from ricecooker.classes.licenses import get_license
from ricecooker.exceptions import UnknownContentKindError, UnknownFileTypeError, UnknownQuestionTypeError, raise_for_invalid_channel
from le_utils.constants import content_kinds,file_formats, format_presets, licenses, exercises, languages
from pressurecooker.encodings import get_base64_encoding
import configparser
import sys, getopt
class FileTypes(Enum):
""" Enum containing all file types Ricecooker can have
Steps:
AUDIO_FILE: mp3 files
THUMBNAIL: png, jpg, or jpeg files
DOCUMENT_FILE: pdf files
"""
AUDIO_FILE = 0
THUMBNAIL = 1
DOCUMENT_FILE = 2
VIDEO_FILE = 3
YOUTUBE_VIDEO_FILE = 4
VECTORIZED_VIDEO_FILE = 5
VIDEO_THUMBNAIL = 6
YOUTUBE_VIDEO_THUMBNAIL_FILE = 7
HTML_ZIP_FILE = 8
SUBTITLE_FILE = 9
TILED_THUMBNAIL_FILE = 10
UNIVERSAL_SUBS_SUBTITLE_FILE = 11
BASE64_FILE = 12
WEB_VIDEO_FILE = 13
FILE_TYPE_MAPPING = {
content_kinds.AUDIO : {
file_formats.MP3 : FileTypes.AUDIO_FILE,
file_formats.PNG : FileTypes.THUMBNAIL,
file_formats.JPG : FileTypes.THUMBNAIL,
file_formats.JPEG : FileTypes.THUMBNAIL,
},
content_kinds.DOCUMENT : {
file_formats.PDF : FileTypes.DOCUMENT_FILE,
file_formats.PNG : FileTypes.THUMBNAIL,
file_formats.JPG : FileTypes.THUMBNAIL,
file_formats.JPEG : FileTypes.THUMBNAIL,
},
content_kinds.HTML5 : {
file_formats.HTML5 : FileTypes.HTML_ZIP_FILE,
file_formats.PNG : FileTypes.THUMBNAIL,
file_formats.JPG : FileTypes.THUMBNAIL,
file_formats.JPEG : FileTypes.THUMBNAIL,
},
content_kinds.VIDEO : {
file_formats.MP4 : FileTypes.VIDEO_FILE,
file_formats.VTT : FileTypes.SUBTITLE_FILE,
file_formats.PNG : FileTypes.THUMBNAIL,
file_formats.JPG : FileTypes.THUMBNAIL,
file_formats.JPEG : FileTypes.THUMBNAIL,
},
content_kinds.EXERCISE : {
file_formats.PNG : FileTypes.THUMBNAIL,
file_formats.JPG : FileTypes.THUMBNAIL,
file_formats.JPEG : FileTypes.THUMBNAIL,
},
}
def guess_file_type(kind, filepath=None, youtube_id=None, web_url=None, encoding=None):
""" guess_file_class: determines what file the content is
Args:
filepath (str): filepath of file to check
Returns: string indicating file's class
"""
if youtube_id:
return FileTypes.YOUTUBE_VIDEO_FILE
elif web_url:
return FileTypes.WEB_VIDEO_FILE
elif encoding:
return FileTypes.BASE64_FILE
else:
ext = os.path.splitext(filepath)[1][1:].lower()
if kind in FILE_TYPE_MAPPING and ext in FILE_TYPE_MAPPING[kind]:
return FILE_TYPE_MAPPING[kind][ext]
return None
def guess_content_kind(path=None, web_video_data=None, questions=None):
""" guess_content_kind: determines what kind the content is
Args:
files (str or list): files associated with content
Returns: string indicating node's kind
"""
# If there are any questions, return exercise
if questions and len(questions) > 0:
return content_kinds.EXERCISE
# See if any files match a content kind
if path:
ext = path.rsplit('/', 1)[-1].split(".")[-1].lower()
if ext in content_kinds.MAPPING:
return content_kinds.MAPPING[ext]
raise InvalidFormatException("Invalid file type: Allowed formats are {0}".format([key for key, value in content_kinds.MAPPING.items()]))
elif web_video_data:
return content_kinds.VIDEO
else:
return content_kinds.TOPIC
def construct_channel(**kwargs):
channel = nodes.ChannelNode(
source_domain=channeldata["domain"],
source_id=channeldata["source_id"],
title=channeldata["title"],
thumbnail=channeldata.get("thumbnail", None),
)
_build_tree(channel, SAMPLE_TREE)
raise_for_invalid_channel(channel)
return channel
def _build_tree(node, sourcetree):
for child_source_node in sourcetree:
try:
main_file = child_source_node['files'][0] if 'files' in child_source_node else {}
kind = guess_content_kind(path=main_file.get('path'), web_video_data=main_file.get('youtube_id') or main_file.get('web_url'), questions=child_source_node.get("questions"))
except UnknownContentKindError:
continue
if kind == content_kinds.TOPIC:
child_node = nodes.TopicNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
author=child_source_node.get("author"),
description=child_source_node.get("description"),
thumbnail=child_source_node.get("thumbnail"),
)
node.add_child(child_node)
source_tree_children = child_source_node.get("children", [])
_build_tree(child_node, source_tree_children)
elif kind == content_kinds.VIDEO:
child_node = nodes.VideoNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
license=child_source_node.get("license"),
author=child_source_node.get("author"),
description=child_source_node.get("description"),
derive_thumbnail=True, # video-specific data
thumbnail=child_source_node.get('thumbnail'),
)
add_files(child_node, child_source_node.get("files") or [])
node.add_child(child_node)
elif kind == content_kinds.AUDIO:
child_node = nodes.AudioNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
license=child_source_node.get("license"),
author=child_source_node.get("author"),
description=child_source_node.get("description"),
thumbnail=child_source_node.get("thumbnail"),
)
add_files(child_node, child_source_node.get("files") or [])
node.add_child(child_node)
elif kind == content_kinds.DOCUMENT:
child_node = nodes.DocumentNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
license=child_source_node.get("license"),
author=child_source_node.get("author"),
description=child_source_node.get("description"),
thumbnail=child_source_node.get("thumbnail"),
)
add_files(child_node, child_source_node.get("files") or [])
node.add_child(child_node)
elif kind == content_kinds.EXERCISE:
child_node = nodes.ExerciseNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
license=child_source_node.get("license"),
author=child_source_node.get("author"),
description=child_source_node.get("description"),
exercise_data={}, # Just set to default
thumbnail=child_source_node.get("thumbnail"),
)
add_files(child_node, child_source_node.get("files") or [])
for q in child_source_node.get("questions"):
question = create_question(q)
child_node.add_question(question)
node.add_child(child_node)
elif kind == content_kinds.HTML5:
child_node = nodes.HTML5AppNode(
source_id=child_source_node["id"],
title=child_source_node["title"],
license=child_source_node.get("license"),
author=child_source_node.get("author"),
description=child_source_node.get("description"),
thumbnail=child_source_node.get("thumbnail"),
)
add_files(child_node, child_source_node.get("files") or [])
node.add_child(child_node)
else: # unknown content file format
continue
return node
def add_files(node, file_list):
for f in file_list:
file_type = guess_file_type(node.kind, filepath=f.get('path'), youtube_id=f.get('youtube_id'), web_url=f.get('web_url'), encoding=f.get('encoding'))
if file_type == FileTypes.AUDIO_FILE:
node.add_file(files.AudioFile(path=f['path'], language=f.get('language')))
elif file_type == FileTypes.THUMBNAIL:
node.add_file(files.ThumbnailFile(path=f['path']))
elif file_type == FileTypes.DOCUMENT_FILE:
node.add_file(files.DocumentFile(path=f['path'], language=f.get('language')))
elif file_type == FileTypes.HTML_ZIP_FILE:
node.add_file(files.HTMLZipFile(path=f['path'], language=f.get('language')))
elif file_type == FileTypes.VIDEO_FILE:
node.add_file(files.VideoFile(path=f['path'], language=f.get('language'), ffmpeg_settings=f.get('ffmpeg_settings')))
elif file_type == FileTypes.SUBTITLE_FILE:
node.add_file(files.SubtitleFile(path=f['path'], language=f['language']))
elif file_type == FileTypes.BASE64_FILE:
node.add_file(files.Base64ImageFile(encoding=f['encoding']))
elif file_type == FileTypes.WEB_VIDEO_FILE:
node.add_file(files.WebVideoFile(web_url=f['web_url'], high_resolution=f.get('high_resolution')))
elif file_type == FileTypes.YOUTUBE_VIDEO_FILE:
node.add_file(files.YouTubeVideoFile(youtube_id=f['youtube_id'], high_resolution=f.get('high_resolution')))
else:
raise UnknownFileTypeError("Unrecognized file type '{0}'".format(f['path']))
def create_question(raw_question):
if raw_question["type"] == exercises.MULTIPLE_SELECTION:
return questions.MultipleSelectQuestion(
id=raw_question["id"],
question=raw_question["question"],
correct_answers=raw_question["correct_answers"],
all_answers=raw_question["all_answers"],
hints=raw_question.get("hints"),
)
if raw_question["type"] == exercises.SINGLE_SELECTION:
return questions.SingleSelectQuestion(
id=raw_question["id"],
question=raw_question["question"],
correct_answer=raw_question["correct_answer"],
all_answers=raw_question["all_answers"],
hints=raw_question.get("hints"),
)
if raw_question["type"] == exercises.INPUT_QUESTION:
return questions.InputQuestion(
id=raw_question["id"],
question=raw_question["question"],
answers=raw_question["answers"],
hints=raw_question.get("hints"),
)
if raw_question["type"] == exercises.FREE_RESPONSE:
return questions.FreeResponseQuestion(
id=raw_question["id"],
question=raw_question["question"],
hints=raw_question.get("hints"),
)
if raw_question["type"] == exercises.PERSEUS_QUESTION:
return questions.PerseusQuestion(
id=raw_question["id"],
raw_data=raw_question["item_data"],
source_url="https://www.google.com/",
)
else:
raise UnknownQuestionTypeError("Unrecognized question type '{0}': accepted types are {1}".format(raw_question["type"], [key for key, value in exercises.question_choices]))
|
[
"le_utils.constants.content_kinds.MAPPING.items",
"ricecooker.exceptions.raise_for_invalid_channel",
"ricecooker.classes.files.ThumbnailFile",
"ricecooker.classes.files.SubtitleFile",
"os.path.splitext",
"ricecooker.classes.files.Base64ImageFile",
"ricecooker.classes.questions.PerseusQuestion"
] |
[((4046, 4080), 'ricecooker.exceptions.raise_for_invalid_channel', 'raise_for_invalid_channel', (['channel'], {}), '(channel)\n', (4071, 4080), False, 'from ricecooker.exceptions import UnknownContentKindError, UnknownFileTypeError, UnknownQuestionTypeError, raise_for_invalid_channel\n'), ((11199, 11326), 'ricecooker.classes.questions.PerseusQuestion', 'questions.PerseusQuestion', ([], {'id': "raw_question['id']", 'raw_data': "raw_question['item_data']", 'source_url': '"""https://www.google.com/"""'}), "(id=raw_question['id'], raw_data=raw_question[\n 'item_data'], source_url='https://www.google.com/')\n", (11224, 11326), False, 'from ricecooker.classes import nodes, questions, files\n'), ((8658, 8693), 'ricecooker.classes.files.ThumbnailFile', 'files.ThumbnailFile', ([], {'path': "f['path']"}), "(path=f['path'])\n", (8677, 8693), False, 'from ricecooker.classes import nodes, questions, files\n'), ((3612, 3641), 'le_utils.constants.content_kinds.MAPPING.items', 'content_kinds.MAPPING.items', ([], {}), '()\n', (3639, 3641), False, 'from le_utils.constants import content_kinds, file_formats, format_presets, licenses, exercises, languages\n'), ((2717, 2743), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (2733, 2743), False, 'import os\n'), ((9230, 9288), 'ricecooker.classes.files.SubtitleFile', 'files.SubtitleFile', ([], {'path': "f['path']", 'language': "f['language']"}), "(path=f['path'], language=f['language'])\n", (9248, 9288), False, 'from ricecooker.classes import nodes, questions, files\n'), ((9365, 9410), 'ricecooker.classes.files.Base64ImageFile', 'files.Base64ImageFile', ([], {'encoding': "f['encoding']"}), "(encoding=f['encoding'])\n", (9386, 9410), False, 'from ricecooker.classes import nodes, questions, files\n')]
|
from discord import Role, TextChannel, Permissions, Embed, Color
from discord.ext import commands
from discord.utils import get
from datetime import datetime
from sqlite3 import connect
class Setup(commands.Cog, name='Setup'):
"""
Module rassemblant les commandes pour configurer le bot.
"""
def __init__(self, bot):
self.bot = bot
@commands.command(brief='!setup [verif/mute/logs/temp] [@role/#channel]', description='Définir un role pour "verif" ou "mute"')
@commands.has_permissions(administrator=True)
async def setup(self, ctx, mtype: str, setup_data):
if mtype.lower() in ['verif', 'mute']:
mod = get(ctx.guild.roles, id=int(setup_data.strip('<@&>')))
elif mtype.lower() in ['temp', 'logs']:
mod = get(ctx.guild.channels, id=int(setup_data.strip('<#>')))
else:
embed = Embed(title='❌ Oups ! Il y a une erreur :', description='Choix invalide ! (verif/mute/logs/temp)', color=0xe74c3c)
await ctx.send(embed=embed); return
with connect('data.db') as conn:
c = conn.cursor()
c.execute('SELECT * FROM setup WHERE Guild_ID=?', (ctx.guild.id,))
if c.fetchone() is None:
c.execute(f'INSERT INTO setup (Guild_ID, {mtype.capitalize()}) VALUES (?, ?)', (ctx.guild.id, mod.id))
else:
c.execute(f'UPDATE setup SET {mtype.capitalize()}=? WHERE Guild_ID=?', (mod.id, ctx.guild.id))
conn.commit()
embed = (Embed(description=f'{ctx.author.mention} a défini {mod.mention} pour "{mtype}"', color=0xa84300)
.set_author(name=f'{ctx.author} a modifié "{mtype}"', icon_url=ctx.author.avatar_url))
await ctx.send(embed=embed)
@commands.command(hidden=True)
@commands.has_permissions(manage_messages=True)
async def regles(self, ctx):
rules = {
'👍 Règle n°1': "Respect mutuel ! Pour un chat sympa et bienveillant, pas d'insultes ou de méchancetés",
'🗳️ Règle n°2': "C'est un serveur dédié à @E - Wizard#3217. Pas de sujets politiques, religieux et pas de racisme, de harcèlement ou de contenu offensif.",
'🔕 Règle n°3': "Pas de spam ou de mentions abusives. Pour éviter d'avoir un chat qui ressemble à rien, évitez les abus.",
'👦 Règle n°4': "Ayez un avatar et un pseudo approprié (family-friendly)",
'🔒 Règle n°5': "Ne partagez pas vos informations personnelles ! Protégez votre intimité et celle des autres.",
'💛 Règle n°6': "Utilisez votre bon sens. Ne faites pas aux autres ce que vous ne voudriez pas qu'on vous fasse.",
'💬 Règle n°7': "Évitez la pub ! Vous pouvez partager vos projets dans #vos-projects.",
'🙏 Règle n°8': "Pas de mandiage de role. C'est juste une perte de temps et ça ne marchera jamais.",
'📑 Règle n°9': "Repectez les [Guidelines de la Communauté Discord](https://discord.com/guidelines) et les [Conditions d'utilisation](https://discord.com/terms).",
}
embed = Embed(title="📃 Règles du serveur:", description='Appuie sur ✅ après avoir lu les règles :',color=0xa84300)
for key, value in rules.items():
embed.add_field(name=key, value=f"{value}\n", inline=False)
await ctx.message.delete()
msg = await ctx.send(embed=embed)
await msg.add_reaction('✅')
@commands.command(brief='!roles', description='Setup a role menu')
@commands.has_permissions(manage_roles=True)
async def roles(self, ctx):
def check(role: Role):
r = role.permissions
return (r.manage_messages or r.administrator or r.manage_roles or r.ban_members
or r.kick_members or r.manage_guild or r.manage_nicknames or r.manage_channels
or r.mute_members or r.deafen_members or r.move_members or r.manage_emojis
or role.managed or role==role.guild.default_role)
roles = [role for role in ctx.guild.roles if not check(role)]
embed = Embed(title='🔧 Menu de setup de roles', description='Réagis au message pour définir un emoji au role sélectionné.\n Tu peux aussi appuyer sur ❌ pour supprimer le role du menu.', color=0x11806a)
for i, role in enumerate(roles):
embed.add_field(name=role.name if i else f'>> {role.name} <<' , value="Pas d'émoji défini", inline=False)
embed.set_footer(text=f"{embed.fields[0].name.strip('>< ')} • Pas d'émoji défini • Appuie sur 🔧 quand tu as fini")
msg = await ctx.send(embed=embed)
for reaction in ['⏪', '⏩', '❌', '🔧']:
await msg.add_reaction(reaction)
@commands.Cog.listener()
async def on_raw_reaction_add(self, payload):
if payload.member.bot: return
member = payload.member
guild = member.guild
emoji = payload.emoji
channel = self.bot.get_channel(payload.channel_id)
message = await channel.fetch_message(payload.message_id)
reaction = get(message.reactions, emoji=emoji.name)
embed = message.embeds[0]
if emoji.name == '✅':
with connect('data.db') as conn:
c = conn.cursor()
c.execute('SELECT Verif FROM setup WHERE Guild_ID=?', (guild.id,))
role = get(guild.roles, id=c.fetchone()[0])
if not role in member.roles:
await member.add_roles(role)
return
if '🗺️ Menu des roles' == embed.title:
for field in embed.fields:
if emoji.name in field.value:
role = get(guild.roles, name=field.value.split(' : ')[1].strip('`'))
await member.add_roles(role)
return
if not payload.member.guild_permissions.administrator: return
for i, field in enumerate(embed.fields):
if '>>' not in field.name:
continue
if emoji.name in ['⏪', '⏩', '❌']:
field_nb, name = len(embed.fields)-1, field.name.strip('<> ')
index = field_nb if (i==0 and emoji.name in ['⏪', '❌']) else (0 if (i==field_nb and emoji.name in ['⏩', '❌']) else (i-1 if emoji.name=='⏪' else i+1))
switch = embed.fields[index]
embed.set_field_at(index if emoji!='❌' else i, name=f'>> {switch.name} <<', value=switch.value, inline=False)
embed.set_footer(text=f"{switch.name} • {switch.value} • Appuie sur 🔧 quand tu as fini")
if emoji.name == '❌':
role = get(guild.roles, name=name)
embed.remove_field(i)
else:
embed.set_field_at(i, name=name, value=field.value, inline=False)
elif emoji.name == '🔧':
emojis = [field.value.strip("Émoji → Pas d'émoji défini") for field in embed.fields]
if '' in emojis:
await channel.send('Vous devez définir tous les emojis avant de confirmer !', delete_after=5)
return
role_embed = Embed(title='🗺️ Menu des roles', color=0xf1c40f)
for emoji, field in zip(emojis, embed.fields):
name = field.name.strip('<> ')
role_embed.add_field(name='\u200b', value=f"{emoji} : `{name}`", inline=False)
await channel.purge(limit=2)
msg = await channel.send(embed=role_embed)
for emoji in emojis:
await msg.add_reaction(emoji)
else:
embed.set_field_at(i, name=embed.fields[i].name, value=f'Émoji → {emoji}')
try:
await message.edit(embed=embed)
await reaction.remove(member)
except:
pass
@commands.Cog.listener()
async def on_raw_reaction_remove(self, payload):
guild = await self.bot.fetch_guild(payload.guild_id)
member = await guild.fetch_member(payload.user_id)
if member.bot: return
emoji = payload.emoji
channel = self.bot.get_channel(payload.channel_id)
message = await channel.fetch_message(payload.message_id)
embed = message.embeds[0]
if '🗺️ Menu des roles' == embed.title:
for field in embed.fields:
if emoji.name in field.value:
role = get(guild.roles, name=field.value.split(' : ')[1].strip('`'))
await member.remove_roles(role)
@commands.Cog.listener()
async def on_guild_join(self, guild):
with connect('data.db') as conn:
c = conn.cursor()
c.execute(f'CREATE TABLE IF NOT EXISTS "{guild.id}" (User_ID INTEGER, Warns TEXT, Mute INTEGER, Verif INTEGER, Temp INTEGER)')
c.execute("INSERT INTO logs (ID, State) VALUES (?, ?)", (guild.id, 0))
conn.commit()
channel = await self.bot.fetch_channel(747480897426817095)
embed = (Embed(color=0xf1c40f, timestamp=datetime.now())
.add_field(name='👥 Membres', value=f'{guild.member_count} members')
.add_field(name='🌍 Région', value=str(guild.region).capitalize())
.add_field(name='🗝️ Owner', value=guild.owner)
.set_author(name=f'''J'ai rejoint "{guild.name}"''', icon_url=guild.icon_url))
await channel.send(embed=embed)
@commands.command(hidden=True)
@commands.is_owner()
async def update_db(self, ctx):
with connect('data.db') as conn:
c = conn.cursor()
for guild in self.bot.guilds:
c.execute(f'CREATE TABLE IF NOT EXISTS "{guild.id}" (User_ID INTEGER, Warns TEXT, Mute INTEGER, Verif INTEGER, Temp INTEGER)')
c.execute("SELECT * FROM logs WHERE ID=?", (guild.id,))
if c.fetchone() is None:
c.execute("INSERT INTO logs (ID, State) VALUES (?, ?)", (guild.id, 0))
c.execute("SELECT * FROM setup WHERE Guild_ID=?", (guild.id,))
if c.fetchone() is None:
c.execute("INSERT INTO setup (Guild_ID, Verif, Mute, Logs, Temp) VALUES (?, ?, ?, ?, ?)", (guild.id, None, None, None, None))
conn.commit()
await ctx.message.delete()
await ctx.send('Base de donnée mise à jour', delete_after=5)
def setup(bot):
bot.add_cog(Setup(bot))
|
[
"discord.utils.get",
"discord.ext.commands.command",
"discord.Embed",
"discord.ext.commands.has_permissions",
"datetime.datetime.now",
"discord.ext.commands.Cog.listener",
"sqlite3.connect",
"discord.ext.commands.is_owner"
] |
[((367, 497), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""!setup [verif/mute/logs/temp] [@role/#channel]"""', 'description': '"""Définir un role pour "verif" ou "mute\\""""'}), '(brief=\'!setup [verif/mute/logs/temp] [@role/#channel]\',\n description=\'Définir un role pour "verif" ou "mute"\')\n', (383, 497), False, 'from discord.ext import commands\n'), ((499, 543), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'administrator': '(True)'}), '(administrator=True)\n', (523, 543), False, 'from discord.ext import commands\n'), ((1761, 1790), 'discord.ext.commands.command', 'commands.command', ([], {'hidden': '(True)'}), '(hidden=True)\n', (1777, 1790), False, 'from discord.ext import commands\n'), ((1796, 1842), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'manage_messages': '(True)'}), '(manage_messages=True)\n', (1820, 1842), False, 'from discord.ext import commands\n'), ((3398, 3463), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""!roles"""', 'description': '"""Setup a role menu"""'}), "(brief='!roles', description='Setup a role menu')\n", (3414, 3463), False, 'from discord.ext import commands\n'), ((3469, 3512), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'manage_roles': '(True)'}), '(manage_roles=True)\n', (3493, 3512), False, 'from discord.ext import commands\n'), ((4666, 4689), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (4687, 4689), False, 'from discord.ext import commands\n'), ((7804, 7827), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (7825, 7827), False, 'from discord.ext import commands\n'), ((8499, 8522), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (8520, 8522), False, 'from discord.ext import commands\n'), ((9390, 9419), 'discord.ext.commands.command', 'commands.command', ([], {'hidden': '(True)'}), '(hidden=True)\n', (9406, 9419), False, 'from discord.ext import commands\n'), ((9425, 9444), 'discord.ext.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (9442, 9444), False, 'from discord.ext import commands\n'), ((3059, 3171), 'discord.Embed', 'Embed', ([], {'title': '"""📃 Règles du serveur:"""', 'description': '"""Appuie sur ✅ après avoir lu les règles :"""', 'color': '(11027200)'}), "(title='📃 Règles du serveur:', description=\n 'Appuie sur ✅ après avoir lu les règles :', color=11027200)\n", (3064, 3171), False, 'from discord import Role, TextChannel, Permissions, Embed, Color\n'), ((4051, 4256), 'discord.Embed', 'Embed', ([], {'title': '"""🔧 Menu de setup de roles"""', 'description': '"""Réagis au message pour définir un emoji au role sélectionné.\n Tu peux aussi appuyer sur ❌ pour supprimer le role du menu."""', 'color': '(1146986)'}), '(title=\'🔧 Menu de setup de roles\', description=\n """Réagis au message pour définir un emoji au role sélectionné.\n Tu peux aussi appuyer sur ❌ pour supprimer le role du menu."""\n , color=1146986)\n', (4056, 4256), False, 'from discord import Role, TextChannel, Permissions, Embed, Color\n'), ((5013, 5053), 'discord.utils.get', 'get', (['message.reactions'], {'emoji': 'emoji.name'}), '(message.reactions, emoji=emoji.name)\n', (5016, 5053), False, 'from discord.utils import get\n'), ((1053, 1071), 'sqlite3.connect', 'connect', (['"""data.db"""'], {}), "('data.db')\n", (1060, 1071), False, 'from sqlite3 import connect\n'), ((8578, 8596), 'sqlite3.connect', 'connect', (['"""data.db"""'], {}), "('data.db')\n", (8585, 8596), False, 'from sqlite3 import connect\n'), ((9494, 9512), 'sqlite3.connect', 'connect', (['"""data.db"""'], {}), "('data.db')\n", (9501, 9512), False, 'from sqlite3 import connect\n'), ((877, 996), 'discord.Embed', 'Embed', ([], {'title': '"""❌ Oups ! Il y a une erreur :"""', 'description': '"""Choix invalide ! (verif/mute/logs/temp)"""', 'color': '(15158332)'}), "(title='❌ Oups ! Il y a une erreur :', description=\n 'Choix invalide ! (verif/mute/logs/temp)', color=15158332)\n", (882, 996), False, 'from discord import Role, TextChannel, Permissions, Embed, Color\n'), ((1518, 1624), 'discord.Embed', 'Embed', ([], {'description': 'f"""{ctx.author.mention} a défini {mod.mention} pour "{mtype}\\""""', 'color': '(11027200)'}), '(description=\n f\'{ctx.author.mention} a défini {mod.mention} pour "{mtype}"\', color=\n 11027200)\n', (1523, 1624), False, 'from discord import Role, TextChannel, Permissions, Embed, Color\n'), ((5144, 5162), 'sqlite3.connect', 'connect', (['"""data.db"""'], {}), "('data.db')\n", (5151, 5162), False, 'from sqlite3 import connect\n'), ((6566, 6593), 'discord.utils.get', 'get', (['guild.roles'], {'name': 'name'}), '(guild.roles, name=name)\n', (6569, 6593), False, 'from discord.utils import get\n'), ((7084, 7132), 'discord.Embed', 'Embed', ([], {'title': '"""🗺️ Menu des roles"""', 'color': '(15844367)'}), "(title='🗺️ Menu des roles', color=15844367)\n", (7089, 7132), False, 'from discord import Role, TextChannel, Permissions, Embed, Color\n'), ((9000, 9014), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9012, 9014), False, 'from datetime import datetime\n')]
|
from django.test import TestCase
from django.urls import reverse
from freezegun import freeze_time
from rest_framework.test import APIClient
from rest_framework import status
from .utils import (
create_user,
create_friend_request,
create_friend,
create_message,
)
from core.models import Message, Friend, User
CREATE_MESSAGE_URL = reverse('api:message_create')
LIST_MESSAGE_URL = reverse('api:message_list')
MANAGE_MESSAGE_URL = 'api:message_manage'
class TestPublicMessageAPI(TestCase):
"""Tests for the public API for the Message model"""
def setUp(self) -> None:
"""Sets up the APIClient for the tests and creates users"""
self.user_one = create_user(
email='<EMAIL>',
password='<PASSWORD>',
username='test_username_one',
)
self.user_two = create_user(
email='<EMAIL>',
password='<PASSWORD>',
username='test_username_two',
)
self.client = APIClient()
def test_create_message_unauthorized(self) -> None:
"""
Tests what happens if a Message is created by an anonymous User
"""
payload = {
'content': 'text',
'to_user': self.user_one,
'from_user': self.user_two,
}
response = self.client.post(CREATE_MESSAGE_URL, payload)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_message_unauthorized(self) -> None:
"""
Tests what happens if a Message is listed by an anonymous User
"""
response = self.client.get(CREATE_MESSAGE_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_manage_message_unauthorized(self) -> None:
"""
Tests what happens if a Message is managed by an anonymous User
"""
response = self.client.get(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': 1})
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TestPrivateMessageAPI(TestCase):
"""Tests for the private API for the Message model"""
def setUp(self) -> None:
"""Sets up the APIClient for the tests and creates users"""
self.user_one = create_user(
email='<EMAIL>',
password='<PASSWORD>',
username='test_username_one',
)
self.user_two = create_user(
email='<EMAIL>',
password='<PASSWORD>',
username='test_username_two',
)
self.client = APIClient()
self.client.force_authenticate(self.user_one)
def test_create_message_successfully(self) -> None:
"""Tests if Message is created successfully"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
payload = {
'content': 'text',
'to_user': self.user_one.pk,
'from_user': self.user_two.pk,
}
response = self.client.post(CREATE_MESSAGE_URL, payload)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_message_users_are_not_friends(self) -> None:
"""Tests if Message is created successfully"""
payload = {
'content': 'text',
'to_user': self.user_one,
'from_user': self.user_two,
}
response = self.client.post(CREATE_MESSAGE_URL, payload)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_list_message_successful(self) -> None:
"""Tests if Message is listed successfully"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
first_message = create_message(
content='first message',
to_user=self.user_two,
from_user=self.user_one,
)
second_message = create_message(
content='second message',
to_user=self.user_one,
from_user=self.user_two,
)
data = {'friend_pk': self.user_two.pk}
response = self.client.get(LIST_MESSAGE_URL, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(response.data[0]['content'], second_message.content)
self.assertEqual(response.data[1]['content'], first_message.content)
def test_list_message_no_friend_pk_provided(self) -> None:
"""
Tests what happens when no friend_pk is
provided when calling the list view
"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
response = self.client.get(LIST_MESSAGE_URL)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_list_message_friend_does_not_exist(self) -> None:
"""
Tests what happens if User's friend doesn't exist
"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
User.objects.filter(pk=self.user_two.pk).delete()
data = {'friend_pk': self.user_two.pk}
response = self.client.get(LIST_MESSAGE_URL, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_list_message_users_are_not_friends(self) -> None:
"""
Tests what happens if users aren't friends and the list view is called
"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
Friend.objects.filter(
user=self.user_two, friend_of=self.user_one
).delete()
data = {'friend_pk': self.user_two.pk}
response = self.client.get(LIST_MESSAGE_URL, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_retrieve_message_which_does_not_exist(self) -> None:
"""
Tests what happens if a User tries to
retrieve a Message which doesn't exist
"""
response = self.client.get(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': 1})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_manage_message_forbidden(self) -> None:
"""
Tests what happens if User's trying to
manage Message not meant for them
"""
user_three = create_user(
email='<EMAIL>',
password='<PASSWORD>',
username='test_username_three',
)
create_friend_request(
from_user=self.user_two, to_user=user_three, is_accepted=True
)
create_friend(user=self.user_two, friend_of=user_three)
message = create_message(
content='message', to_user=self.user_two, from_user=user_three
)
response = self.client.get(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})
)
error_message = b'You don\'t have permission to manage this Message'
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertIn(error_message, response.content)
@freeze_time('2020-09-25 17:17:17')
def test_retrieve_message_successful(self) -> None:
"""
Tests what happens if a Message is retrieved successfully
"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
message = create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
response = self.client.get(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})
)
expected_result = {
'content': 'message',
'to_user': self.user_two.pk,
'from_user': self.user_one.pk,
'is_new': True,
'sent_on': '2020-09-25T17:17:17Z',
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, expected_result)
def test_update_message_successful(self) -> None:
"""Tests if a Message is updated successfully"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
message = create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
payload = {'content': 'new message', 'is_new': False}
response = self.client.patch(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk}), payload
)
message.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(message.content, 'new message')
self.assertFalse(message.is_new)
def test_delete_message_successful(self) -> None:
"""Tests if a Message is deleted successfully"""
create_friend_request(
from_user=self.user_two, to_user=self.user_one, is_accepted=True
)
create_friend(user=self.user_two, friend_of=self.user_one)
message = create_message(
content='message', to_user=self.user_two, from_user=self.user_one
)
response = self.client.delete(
reverse(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})
)
message_exists = Message.objects.filter(pk=message.pk).exists()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(message_exists)
|
[
"core.models.Friend.objects.filter",
"django.urls.reverse",
"core.models.User.objects.filter",
"core.models.Message.objects.filter",
"freezegun.freeze_time",
"rest_framework.test.APIClient"
] |
[((352, 381), 'django.urls.reverse', 'reverse', (['"""api:message_create"""'], {}), "('api:message_create')\n", (359, 381), False, 'from django.urls import reverse\n'), ((401, 428), 'django.urls.reverse', 'reverse', (['"""api:message_list"""'], {}), "('api:message_list')\n", (408, 428), False, 'from django.urls import reverse\n'), ((7980, 8014), 'freezegun.freeze_time', 'freeze_time', (['"""2020-09-25 17:17:17"""'], {}), "('2020-09-25 17:17:17')\n", (7991, 8014), False, 'from freezegun import freeze_time\n'), ((995, 1006), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1004, 1006), False, 'from rest_framework.test import APIClient\n'), ((2589, 2600), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (2598, 2600), False, 'from rest_framework.test import APIClient\n'), ((1929, 1974), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': 1}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': 1})\n", (1936, 1974), False, 'from django.urls import reverse\n'), ((6905, 6950), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': 1}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': 1})\n", (6912, 6950), False, 'from django.urls import reverse\n'), ((7702, 7756), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': message.pk}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})\n", (7709, 7756), False, 'from django.urls import reverse\n'), ((8517, 8571), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': message.pk}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})\n", (8524, 8571), False, 'from django.urls import reverse\n'), ((9470, 9524), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': message.pk}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})\n", (9477, 9524), False, 'from django.urls import reverse\n'), ((10215, 10269), 'django.urls.reverse', 'reverse', (['MANAGE_MESSAGE_URL'], {'kwargs': "{'pk': message.pk}"}), "(MANAGE_MESSAGE_URL, kwargs={'pk': message.pk})\n", (10222, 10269), False, 'from django.urls import reverse\n'), ((5689, 5729), 'core.models.User.objects.filter', 'User.objects.filter', ([], {'pk': 'self.user_two.pk'}), '(pk=self.user_two.pk)\n', (5708, 5729), False, 'from core.models import Message, Friend, User\n'), ((6393, 6459), 'core.models.Friend.objects.filter', 'Friend.objects.filter', ([], {'user': 'self.user_two', 'friend_of': 'self.user_one'}), '(user=self.user_two, friend_of=self.user_one)\n', (6414, 6459), False, 'from core.models import Message, Friend, User\n'), ((10305, 10342), 'core.models.Message.objects.filter', 'Message.objects.filter', ([], {'pk': 'message.pk'}), '(pk=message.pk)\n', (10327, 10342), False, 'from core.models import Message, Friend, User\n')]
|
"""
Transformer 实现
Author: <NAME>
Date: 2021/3/7
REF: http://nlp.seas.harvard.edu/2018/04/03/attention.html
"""
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import math
from torch.autograd import Variable
from torch.nn.modules.container import Sequential
from torch.nn.modules.normalization import LayerNorm
class EncoderDecoder(nn.Module):
def __init__(self, encoder, decoder, src_embed, tgt_embed, generator):
super(EncoderDecoder, self).__init__()
self.encoder = encoder
self.decoder = decoder
self.src_embed = src_embed
self.tgt_embed = tgt_embed
self.generator = generator
def forward(self, src, tgt, src_mask, tgt_mask):
return self.decode(self.encode(src, src_mask), src_mask, tgt, tgt_mask)
def encode(self, src, src_mask):
return self.encoder(self.src_embed(src), src_mask)
def decode(self, memory, src_mask, tgt, tgt_mask):
return self.decoder(self.tgt_embed(tgt), memory, src_mask)
class Generator(nn.Module):
def __init__(self, d_model, vocab):
super(Generator, self).__init__()
self.proj = nn.Linear(d_model, vocab)
def forward(self, X):
return F.log_softmax(self.proj(X), dim=-1)
def clones(module, N):
""" 将一个模型拷贝多次叠加 """
return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
class Encoder(nn.Module):
""" 编码器 """
def __init__(self, layer, N):
super(Encoder, self).__init__()
self.layers = clones(layer, N)
self.norm = LayerNorm(layer.size)
def forward(self, X, mask):
for layer in self.layers:
X = layer(X, mask)
return self.norm(X)
class LayerNorm(nn.Module):
def __init__(self, features, eps=1e-6):
super(LayerNorm, self).__init__()
self.a_2 = nn.Parameter(torch.ones(features))
self.b_2 = nn.Parameter(torch.zeros(features))
self.eps = eps
def forward(self, X):
mean = X.mean(-1, keepdim=True)
std = X.std(-1, keepdim=True)
return self.a_2 * (X - mean) / (std + self.eps) + self.b_2
class SubLayerConnection(nn.Module):
""" 残差连接 """
def __init__(self, size, dropout):
"""
Param
-----
:size
:dropout
"""
super(SubLayerConnection, self).__init__()
self.norm = LayerNorm(size)
self.dropout = nn.Dropout(dropout)
def forward(self, X, sublayer):
return X + self.dropout(sublayer(self.norm(X)))
class EncoderLayer(nn.Module):
""" 编码器的一层 """
def __init__(self, size, self_attn, feed_forward, dropout):
"""
Param
-----
:size
:self_attn
:feed_forward
:dropout
"""
super(EncoderLayer, self).__init__()
self.self_attn = self_attn
self.feed_forward = feed_forward
self.sublayer = clones(SubLayerConnection(size, dropout), 2)
self.size = size
def forward(self, X, mask):
X = self.sublayer[0](X, lambda x: self.self_attn(x, x, x, mask))
return self.sublayer[1](X, self.feed_forward)
class Decoder(nn.Module):
def __init__(self, layer, N):
super(Decoder, self).__init__()
self.layers = clones(layer, N)
self.norm = LayerNorm(layer.size)
def forward(self, X, memory, src_mask, tgt_mask):
for layer in self.layers:
X = layer(X, memory, src_mask, tgt_mask)
return self.norm(X)
class DecoderLayer(nn.Module):
def __init__(self, size, self_attn, src_attn, feed_forward, dropout):
super(DecoderLayer, self).__init__()
self.size = size
self.self_attn = self_attn
self.src_attn = src_attn
self.feed_forward = feed_forward
self.sublayer = clones(SubLayerConnection(size, dropout), 3)
def forward(self, X, memory, src_mask, tgt_mask):
m = memory
X = self.sublayer[0](X, lambda x:self.self_attn(x, x, x, tgt_mask))
X = self.sublayer[1](X, lambda x: self.src_attn(x, m, m, src_mask))
return self.sublayer[2](X, self.feed_forward)
def subsequent_mask(size):
attn_shape = (1, size, size)
subsequent_mask = np.triu(np.ones(attn_shape), k = 1).astype('uint8')
return torch.from_numpy(subsequent_mask) == 0
def attention(query, key, value, mask=None, dropout=None):
d_k = query.size(-1)
scores = torch.matmul(query, key.transpose(-2, -1)) / torch.sqrt(d_k)
if mask is not None:
scores = scores.masked_fill(mask == 0, -100)
p_attn = F.softmax(scores, dim=-1)
if dropout is not None:
p_attn = dropout(p_attn)
return torch.matmul(p_attn, value), p_attn
class MultiHeadAttention(nn.Module):
def __init__(self, h, d_model, dropout=0.1):
super(MultiHeadAttention, self).__init__()
assert d_model % h == 0
self.d_k = d_model // h
self.h = h
self.linears = clones(nn.Linear(d_model, d_model), 4)
self.attn = None
self.dropout = nn.Dropout(p=dropout)
def forward(self, query, key, value, mask=None):
if mask is not None:
mask = mask.unsqueeze(1)
nbatches = query.size(0)
query, key, value = [l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2) for l, x in zip(self.linears, (query, key, value))]
x, self.attn = attention(query, key, value, mask=mask, dropout=self.dropout)
x = x.transpose(1, 2).contiguous().view(nbatches, -1, self.h*self.d_k)
return self.linears[-1](x)
class PositionwiseFeedForward(nn.Module):
def __init__(self, d_model, d_ff, dropout=0.1):
super(PositionwiseFeedForward, self).__init__()
self.w_1 = nn.Linear(d_model, d_ff)
self.w_2 = nn.Linear(d_ff, d_model)
self.dropout = nn.Dropout(dropout)
def forward(self, X):
return self.w_2(self.dropout(F.relu(self.w_1(X))))
class Embeddings(nn.Module):
def __init__(self, d_model, vocab):
super(Embeddings, self).__init__()
self.lut = nn.Embedding(vocab, d_model)
self.d_model = d_model
def forward(self, X):
return self.lut(X) * torch.sqrt(self.d_model)
class PositionalEncoding(nn.Module):
def __init__(self, d_model, dropout, max_len=5000):
"""
位置编码
Param
-----
:d_model 模型的维度(输出的编码维度)
:dropout
:max_len 最大句子长度
"""
super(PositionalEncoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len).unsqueeze(1)
# (max_len, 1)
div_term = torch.exp(torch.arange(0, d_model, 2) * -(math.log(10000.0) / d_model))
# (d_model/2)
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
# (1, max_len, d_model)
self.register_buffer('pe', pe)
def forward(self, X):
X = X + Variable(self.pe[:, :X.size(1)], requires_grad=False)
return self.dropout(X)
import matplotlib.pyplot as plt
plt.figure(figsize=(15, 5))
pe = PositionalEncoding(20, 0)
y = pe.forward(Variable(torch.zeros(1, 100, 20)))
plt.plot(np.arange(100), y[0,:,4:8].data.numpy())
plt.show()
def make_model(src_vocab, tgt_vocab, N=6, d_model=512, d_ff=2048, h=8, dropout=0.1):
"""
构造模型
Param
-----
:src_vocab
:tgt_vocab
:N
:d_model 模型维度
:d_ff
:h 多头注意力的头数
:dropout
"""
c = copy.deepcopy
attn = MultiHeadAttention(h, d_model)
ff = PositionwiseFeedForward(d_model, d_ff, dropout)
position = PositionalEncoding(d_model, dropout)
model = EncoderDecoder(
Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N),
Decoder(DecoderLayer(d_model, c(attn), c(attn), c(ff), dropout), N),
nn.Sequential(Embeddings(d_model, src_vocab), c(position)),
nn.Sequential(Embeddings(d_model, tgt_vocab), c(position)),
Generator(d_model, tgt_vocab)
)
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
return model
|
[
"torch.nn.Dropout",
"torch.sqrt",
"torch.nn.Embedding",
"numpy.ones",
"torch.cos",
"matplotlib.pyplot.figure",
"numpy.arange",
"torch.arange",
"torch.ones",
"torch.nn.Linear",
"torch.zeros",
"math.log",
"torch.matmul",
"torch.nn.modules.normalization.LayerNorm",
"copy.deepcopy",
"matplotlib.pyplot.show",
"torch.nn.init.xavier_uniform_",
"torch.from_numpy",
"torch.nn.functional.softmax",
"torch.sin"
] |
[((7195, 7222), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 5)'}), '(figsize=(15, 5))\n', (7205, 7222), True, 'import matplotlib.pyplot as plt\n'), ((7354, 7364), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7362, 7364), True, 'import matplotlib.pyplot as plt\n'), ((4623, 4648), 'torch.nn.functional.softmax', 'F.softmax', (['scores'], {'dim': '(-1)'}), '(scores, dim=-1)\n', (4632, 4648), True, 'import torch.nn.functional as F\n'), ((7313, 7327), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (7322, 7327), True, 'import numpy as np\n'), ((1178, 1203), 'torch.nn.Linear', 'nn.Linear', (['d_model', 'vocab'], {}), '(d_model, vocab)\n', (1187, 1203), True, 'import torch.nn as nn\n'), ((1578, 1599), 'torch.nn.modules.normalization.LayerNorm', 'LayerNorm', (['layer.size'], {}), '(layer.size)\n', (1587, 1599), False, 'from torch.nn.modules.normalization import LayerNorm\n'), ((2404, 2419), 'torch.nn.modules.normalization.LayerNorm', 'LayerNorm', (['size'], {}), '(size)\n', (2413, 2419), False, 'from torch.nn.modules.normalization import LayerNorm\n'), ((2443, 2462), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (2453, 2462), True, 'import torch.nn as nn\n'), ((3349, 3370), 'torch.nn.modules.normalization.LayerNorm', 'LayerNorm', (['layer.size'], {}), '(layer.size)\n', (3358, 3370), False, 'from torch.nn.modules.normalization import LayerNorm\n'), ((4334, 4367), 'torch.from_numpy', 'torch.from_numpy', (['subsequent_mask'], {}), '(subsequent_mask)\n', (4350, 4367), False, 'import torch\n'), ((4516, 4531), 'torch.sqrt', 'torch.sqrt', (['d_k'], {}), '(d_k)\n', (4526, 4531), False, 'import torch\n'), ((4721, 4748), 'torch.matmul', 'torch.matmul', (['p_attn', 'value'], {}), '(p_attn, value)\n', (4733, 4748), False, 'import torch\n'), ((5089, 5110), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (5099, 5110), True, 'import torch.nn as nn\n'), ((5779, 5803), 'torch.nn.Linear', 'nn.Linear', (['d_model', 'd_ff'], {}), '(d_model, d_ff)\n', (5788, 5803), True, 'import torch.nn as nn\n'), ((5823, 5847), 'torch.nn.Linear', 'nn.Linear', (['d_ff', 'd_model'], {}), '(d_ff, d_model)\n', (5832, 5847), True, 'import torch.nn as nn\n'), ((5871, 5890), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (5881, 5890), True, 'import torch.nn as nn\n'), ((6114, 6142), 'torch.nn.Embedding', 'nn.Embedding', (['vocab', 'd_model'], {}), '(vocab, d_model)\n', (6126, 6142), True, 'import torch.nn as nn\n'), ((6565, 6586), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (6575, 6586), True, 'import torch.nn as nn\n'), ((6601, 6630), 'torch.zeros', 'torch.zeros', (['max_len', 'd_model'], {}), '(max_len, d_model)\n', (6612, 6630), False, 'import torch\n'), ((6846, 6876), 'torch.sin', 'torch.sin', (['(position * div_term)'], {}), '(position * div_term)\n', (6855, 6876), False, 'import torch\n'), ((6899, 6929), 'torch.cos', 'torch.cos', (['(position * div_term)'], {}), '(position * div_term)\n', (6908, 6929), False, 'import torch\n'), ((7278, 7301), 'torch.zeros', 'torch.zeros', (['(1)', '(100)', '(20)'], {}), '(1, 100, 20)\n', (7289, 7301), False, 'import torch\n'), ((1360, 1381), 'copy.deepcopy', 'copy.deepcopy', (['module'], {}), '(module)\n', (1373, 1381), False, 'import copy\n'), ((1878, 1898), 'torch.ones', 'torch.ones', (['features'], {}), '(features)\n', (1888, 1898), False, 'import torch\n'), ((1932, 1953), 'torch.zeros', 'torch.zeros', (['features'], {}), '(features)\n', (1943, 1953), False, 'import torch\n'), ((5009, 5036), 'torch.nn.Linear', 'nn.Linear', (['d_model', 'd_model'], {}), '(d_model, d_model)\n', (5018, 5036), True, 'import torch.nn as nn\n'), ((6234, 6258), 'torch.sqrt', 'torch.sqrt', (['self.d_model'], {}), '(self.d_model)\n', (6244, 6258), False, 'import torch\n'), ((8186, 8212), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (8209, 8212), True, 'import torch.nn as nn\n'), ((4279, 4298), 'numpy.ones', 'np.ones', (['attn_shape'], {}), '(attn_shape)\n', (4286, 4298), True, 'import numpy as np\n'), ((6650, 6674), 'torch.arange', 'torch.arange', (['(0)', 'max_len'], {}), '(0, max_len)\n', (6662, 6674), False, 'import torch\n'), ((6740, 6767), 'torch.arange', 'torch.arange', (['(0)', 'd_model', '(2)'], {}), '(0, d_model, 2)\n', (6752, 6767), False, 'import torch\n'), ((6772, 6789), 'math.log', 'math.log', (['(10000.0)'], {}), '(10000.0)\n', (6780, 6789), False, 'import math\n')]
|
# Generated by Django 3.1.6 on 2021-03-05 03:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('infra', '0006_networkinterfacemodel_subnetid'),
]
operations = [
migrations.AddField(
model_name='instancemodel',
name='netint',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
|
[
"django.db.models.CharField"
] |
[((366, 421), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (382, 421), False, 'from django.db import migrations, models\n')]
|
from setuptools import setup, find_packages
setup(
name='django-inlineobjects',
packages=find_packages(),
version='2.0.3',
description='A reusable Django application used to insert content objects into other pieces of content.',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/pigmonkey/django-inlineobjects',
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Framework :: Django",
],
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
include_package_data=True,
zip_safe=False,
install_requires=['beautifulsoup4'],
)
|
[
"setuptools.find_packages"
] |
[((98, 113), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (111, 113), False, 'from setuptools import setup, find_packages\n')]
|
from django import forms
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.forms import AuthenticationForm
from authtools import forms as authforms
User = get_user_model()
class LoginForm(AuthenticationForm):
username = forms.CharField(
required=True,
widget=forms.EmailInput(attrs={'placeholder': 'E-mail'})
)
password = forms.CharField(
required=True,
widget=forms.PasswordInput(attrs={'placeholder': 'Password'})
)
class Meta:
model = User
fields = ('username', 'password')
class SignUpForm(authforms.UserCreationForm):
email = forms.CharField(
required=True,
widget=forms.EmailInput(attrs={'placeholder': 'E-mail'})
)
password1 = forms.CharField(
required=True,
widget=forms.PasswordInput(attrs={'placeholder': 'Password'})
)
password2 = forms.CharField(
required=True,
widget=forms.PasswordInput(attrs={'placeholder': 'Verify password'})
)
class Meta:
model = User
fields = ('email',)
|
[
"django.forms.EmailInput",
"django.contrib.auth.get_user_model",
"django.forms.PasswordInput"
] |
[((194, 210), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (208, 210), False, 'from django.contrib.auth import authenticate, get_user_model\n'), ((320, 369), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'placeholder': 'E-mail'}"}), "(attrs={'placeholder': 'E-mail'})\n", (336, 369), False, 'from django import forms\n'), ((446, 500), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'placeholder': 'Password'}"}), "(attrs={'placeholder': 'Password'})\n", (465, 500), False, 'from django import forms\n'), ((703, 752), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'placeholder': 'E-mail'}"}), "(attrs={'placeholder': 'E-mail'})\n", (719, 752), False, 'from django import forms\n'), ((830, 884), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'placeholder': 'Password'}"}), "(attrs={'placeholder': 'Password'})\n", (849, 884), False, 'from django import forms\n'), ((962, 1023), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'placeholder': 'Verify password'}"}), "(attrs={'placeholder': 'Verify password'})\n", (981, 1023), False, 'from django import forms\n')]
|
import plotly.express as px
import pandas as pd
data = dict(
concept = ["root concept","name","forename","surname","initials","contact details","address","address line","postcoce","telephone","email","identification num","passport ","driving license","nat insurance","healthcare identifier","nhs num","hosp num","emerg dep num","lab num","gmc num","date","date of birth","url"],
parent = ["","root concept","name","name","name","root concept","contact details","address","address","contact details","contact details","root concept","identification num","identification num","identification num","root concept","healthcare identifier","healthcare identifier","healthcare identifier","healthcare identifier","healthcare identifier","root concept","date","contact details"],
value = ["0","1000","1100","1200","1300","2000","2100","2110","2120","2200","2300","2000","2410","2420","2430","3000","3100","3200","3300","3400","3500","4000","4100","2300"]
)
fig =px.sunburst(
data,
names='concept',
parents='parent',
values='value',
)
fig.update_traces(textfont_color="black")
fig.show()
|
[
"plotly.express.sunburst"
] |
[((968, 1036), 'plotly.express.sunburst', 'px.sunburst', (['data'], {'names': '"""concept"""', 'parents': '"""parent"""', 'values': '"""value"""'}), "(data, names='concept', parents='parent', values='value')\n", (979, 1036), True, 'import plotly.express as px\n')]
|
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
# This is our main program file that displays the menu for the user to use
import helpers as h
import search_colleges as sc
import average_stats as avg_stat
import recommender_helper as r_helper
import bestcolleges_helper as bs_helper
import fetch_all_data as data_fetcher
def college_helper():
print("Welcome to college helper!")
print("Please select the following prompt:")
print("1. See recommended college based on my preferences")
print("2. See top level stats about colleges")
print("3. Browse careers")
print("4. Search colleges")
print("5. Help")
print("6. Refresh all data - Takes 15 mins!")
print("7. Exit")
x = h.get_input(7)
if x == 1:
get_recommendation()
elif x == 2:
view_general_data()
elif x == 3:
browse_careers()
elif x == 4:
sc.search_colleges_wrapper()
print("\n==========================")
college_helper()
elif x == 5:
help_message()
elif x == 6:
data_fetcher.refresh_all_data()
college_helper()
else:
h.exitMessage(x)
return
# Gets recommendation for the client given his preferences
def get_recommendation():
print("Please input your preferences:")
print("What state would you prefer to study in: (ex. LA, PA) ")
preferred_state = h.get_states()
print("Your SAT Score:")
sat_score = h.get_input(1600)
print("How much are you ready to pay for college: (in numbers)")
total_4_year_cost = h.get_input(1000000)
r_helper.view_recommendations(
preferred_state, sat_score, total_4_year_cost
)
print("\n==========================")
college_helper()
# Show general data to the client about school across the US
def view_general_data():
print("==========================")
print("Viewing college by filters:")
print("1. View the average stats of all states")
print("2. View ROI by states")
print("3. View total 4 year costs by states")
print("4. View average loan amount by states")
print("5. Go back to menu")
x = h.get_input(5)
if x == 1:
avg_stat.get_average_stats()
view_general_data()
elif x == 2:
avg_stat.compute_roi_and_draw_map()
view_general_data()
elif x == 3:
avg_stat.compute_cost_and_draw_map()
view_general_data()
elif x == 4:
avg_stat.compute_loan_and_draw_map()
view_general_data()
else:
print("\n==========================")
college_helper()
# I am sure you don't understand all the careers out there.
# This function allows you to search through careers and see
# how can one pursue that career
def browse_careers():
print(
"""How do you want to browse careers? (choose an option)
1. Show all careers:
2. View career information by name
3. Exit"""
)
user_input = h.get_input(3)
if user_input == 1:
bs_helper.view_all_careers()
browse_careers()
elif user_input == 2:
print("Career Name:")
user_input = input()
bs_helper.view_career_info_by_name(user_input)
browse_careers()
else:
print("\n==========================")
college_helper()
# Displays the helper string for the client
def help_message():
print("==========================")
print("College helper is good to help you find colleges!")
print(
"""
You can navigate through the menu and browse useful information!
The information would be valuable for you to find the college that matches the best with your preferences!
"""
)
print("==========================")
college_helper()
# Where everything begins!
if __name__ == "__main__":
college_helper()
|
[
"recommender_helper.view_recommendations",
"bestcolleges_helper.view_all_careers",
"bestcolleges_helper.view_career_info_by_name",
"helpers.get_input",
"average_stats.compute_loan_and_draw_map",
"fetch_all_data.refresh_all_data",
"helpers.get_states",
"average_stats.compute_cost_and_draw_map",
"average_stats.get_average_stats",
"search_colleges.search_colleges_wrapper",
"helpers.exitMessage",
"average_stats.compute_roi_and_draw_map"
] |
[((740, 754), 'helpers.get_input', 'h.get_input', (['(7)'], {}), '(7)\n', (751, 754), True, 'import helpers as h\n'), ((1406, 1420), 'helpers.get_states', 'h.get_states', ([], {}), '()\n', (1418, 1420), True, 'import helpers as h\n'), ((1467, 1484), 'helpers.get_input', 'h.get_input', (['(1600)'], {}), '(1600)\n', (1478, 1484), True, 'import helpers as h\n'), ((1579, 1599), 'helpers.get_input', 'h.get_input', (['(1000000)'], {}), '(1000000)\n', (1590, 1599), True, 'import helpers as h\n'), ((1605, 1681), 'recommender_helper.view_recommendations', 'r_helper.view_recommendations', (['preferred_state', 'sat_score', 'total_4_year_cost'], {}), '(preferred_state, sat_score, total_4_year_cost)\n', (1634, 1681), True, 'import recommender_helper as r_helper\n'), ((2159, 2173), 'helpers.get_input', 'h.get_input', (['(5)'], {}), '(5)\n', (2170, 2173), True, 'import helpers as h\n'), ((2949, 2963), 'helpers.get_input', 'h.get_input', (['(3)'], {}), '(3)\n', (2960, 2963), True, 'import helpers as h\n'), ((2198, 2226), 'average_stats.get_average_stats', 'avg_stat.get_average_stats', ([], {}), '()\n', (2224, 2226), True, 'import average_stats as avg_stat\n'), ((2996, 3024), 'bestcolleges_helper.view_all_careers', 'bs_helper.view_all_careers', ([], {}), '()\n', (3022, 3024), True, 'import bestcolleges_helper as bs_helper\n'), ((2280, 2315), 'average_stats.compute_roi_and_draw_map', 'avg_stat.compute_roi_and_draw_map', ([], {}), '()\n', (2313, 2315), True, 'import average_stats as avg_stat\n'), ((3143, 3189), 'bestcolleges_helper.view_career_info_by_name', 'bs_helper.view_career_info_by_name', (['user_input'], {}), '(user_input)\n', (3177, 3189), True, 'import bestcolleges_helper as bs_helper\n'), ((2369, 2405), 'average_stats.compute_cost_and_draw_map', 'avg_stat.compute_cost_and_draw_map', ([], {}), '()\n', (2403, 2405), True, 'import average_stats as avg_stat\n'), ((912, 940), 'search_colleges.search_colleges_wrapper', 'sc.search_colleges_wrapper', ([], {}), '()\n', (938, 940), True, 'import search_colleges as sc\n'), ((2459, 2495), 'average_stats.compute_loan_and_draw_map', 'avg_stat.compute_loan_and_draw_map', ([], {}), '()\n', (2493, 2495), True, 'import average_stats as avg_stat\n'), ((1077, 1108), 'fetch_all_data.refresh_all_data', 'data_fetcher.refresh_all_data', ([], {}), '()\n', (1106, 1108), True, 'import fetch_all_data as data_fetcher\n'), ((1152, 1168), 'helpers.exitMessage', 'h.exitMessage', (['x'], {}), '(x)\n', (1165, 1168), True, 'import helpers as h\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Test various building blocks
"""
import pytest
import mframework._mframework as mf
import mframework._utils as mfu
from mframework import log as log
log.on
def test_log():
log.off
log.on
def test_version():
from mframework.__version__ import __version__
print(__version__)
def test_is_primitive_type():
assert mf.is_primitve_type(bool)
assert mf.is_primitve_type(int)
class MyType:
pass
assert not mf.is_primitve_type(MyType)
def test_check_type():
assert mf.check_type('x', 11, int) is None
assert mf.check_type('y', 11.2, float) is None
assert mf.check_type('z', 'abc', str) is None
with pytest.raises(TypeError):
mf.check_type('x', 55.5, int)
def test_DictLike():
d = mfu.DictLike()
d['x'] = 2
d['y'] = 3
d['z'] = 77
assert str(d) == "{'x': 2, 'y': 3, 'z': 77}"
assert repr(d) == "DictLike({'x': 2, 'y': 3, 'z': 77})"
assert len(d) == 3
assert d['x'] == 2
assert d.pop('x')
with pytest.raises(KeyError):
print(d['x'])
def test_SpecDict():
d = mf.SpecDict()
d['spec1'] = mf.SpecEntry(int)
d['spec2'] = mf.SpecEntry(bool)
# Cannot redefine a specification
with pytest.raises(KeyError):
d['spec2'] = mf.SpecEntry(str)
# Value must be SpecEntry
with pytest.raises(ValueError):
d['spec3'] = 3
assert d['spec1'].schema == int
assert d['spec2'].schema == bool
def test_ItemDict():
d = mf.ItemDict()
assert d['x'] == {}
d['y'] = 1
d['y'] = 2
d['y'] = 3
assert len(d['y']) == 3
assert list(d['y'].values()) == [1, 2, 3]
# Test assigning UIDs
d.assign_uid('y', 'special_entry')
assert 3 == d.get_by_uid('y', 'special_entry')
d.assign_uid('y', 'another_special_entry', 1)
assert 2 == d.get_by_uid('y', 'another_special_entry')
# ----- Test -----
d = mf.ItemDict()
d['a'] = 'one'
d['a'] = 'two'
d['a'] = 'three'
assert d['a'] == {0: 'one', 1: 'two', 2: 'three'}
d.assign_uid('a', 'myUID', 1)
# Cannot assign same UID twice
with pytest.raises(KeyError):
d.assign_uid('a', 'myUID', 1)
assert d.get_by_uid('a', 'myUID') == 'two'
d.assign_uid('a', 'myUID2')
exp_uids = ('myUID', 'myUID2')
exp_values = ('two', 'three')
for i, (uid, value) in enumerate(d.iter_uids('a')):
assert exp_uids[i] == uid
assert exp_values[i] == value
def test_SpecEntry():
s = mf.SpecEntry(schema=int, required=0, max_items=1, doc='abc')
assert s.schema is int
assert s.max_items == 1
assert s.singleton is True
assert s.required == 0
assert s.doc == 'abc'
with pytest.raises(TypeError):
s.max_items = 'FALSE'
with pytest.raises(TypeError):
s.doc = 123
|
[
"mframework._mframework.SpecDict",
"mframework._mframework.ItemDict",
"mframework._utils.DictLike",
"mframework._mframework.is_primitve_type",
"mframework._mframework.check_type",
"pytest.raises",
"mframework._mframework.SpecEntry"
] |
[((390, 415), 'mframework._mframework.is_primitve_type', 'mf.is_primitve_type', (['bool'], {}), '(bool)\n', (409, 415), True, 'import mframework._mframework as mf\n'), ((427, 451), 'mframework._mframework.is_primitve_type', 'mf.is_primitve_type', (['int'], {}), '(int)\n', (446, 451), True, 'import mframework._mframework as mf\n'), ((806, 820), 'mframework._utils.DictLike', 'mfu.DictLike', ([], {}), '()\n', (818, 820), True, 'import mframework._utils as mfu\n'), ((1134, 1147), 'mframework._mframework.SpecDict', 'mf.SpecDict', ([], {}), '()\n', (1145, 1147), True, 'import mframework._mframework as mf\n'), ((1165, 1182), 'mframework._mframework.SpecEntry', 'mf.SpecEntry', (['int'], {}), '(int)\n', (1177, 1182), True, 'import mframework._mframework as mf\n'), ((1200, 1218), 'mframework._mframework.SpecEntry', 'mf.SpecEntry', (['bool'], {}), '(bool)\n', (1212, 1218), True, 'import mframework._mframework as mf\n'), ((1526, 1539), 'mframework._mframework.ItemDict', 'mf.ItemDict', ([], {}), '()\n', (1537, 1539), True, 'import mframework._mframework as mf\n'), ((1943, 1956), 'mframework._mframework.ItemDict', 'mf.ItemDict', ([], {}), '()\n', (1954, 1956), True, 'import mframework._mframework as mf\n'), ((2522, 2582), 'mframework._mframework.SpecEntry', 'mf.SpecEntry', ([], {'schema': 'int', 'required': '(0)', 'max_items': '(1)', 'doc': '"""abc"""'}), "(schema=int, required=0, max_items=1, doc='abc')\n", (2534, 2582), True, 'import mframework._mframework as mf\n'), ((500, 527), 'mframework._mframework.is_primitve_type', 'mf.is_primitve_type', (['MyType'], {}), '(MyType)\n', (519, 527), True, 'import mframework._mframework as mf\n'), ((564, 591), 'mframework._mframework.check_type', 'mf.check_type', (['"""x"""', '(11)', 'int'], {}), "('x', 11, int)\n", (577, 591), True, 'import mframework._mframework as mf\n'), ((611, 642), 'mframework._mframework.check_type', 'mf.check_type', (['"""y"""', '(11.2)', 'float'], {}), "('y', 11.2, float)\n", (624, 642), True, 'import mframework._mframework as mf\n'), ((662, 692), 'mframework._mframework.check_type', 'mf.check_type', (['"""z"""', '"""abc"""', 'str'], {}), "('z', 'abc', str)\n", (675, 692), True, 'import mframework._mframework as mf\n'), ((711, 735), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (724, 735), False, 'import pytest\n'), ((745, 774), 'mframework._mframework.check_type', 'mf.check_type', (['"""x"""', '(55.5)', 'int'], {}), "('x', 55.5, int)\n", (758, 774), True, 'import mframework._mframework as mf\n'), ((1056, 1079), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (1069, 1079), False, 'import pytest\n'), ((1267, 1290), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (1280, 1290), False, 'import pytest\n'), ((1313, 1330), 'mframework._mframework.SpecEntry', 'mf.SpecEntry', (['str'], {}), '(str)\n', (1325, 1330), True, 'import mframework._mframework as mf\n'), ((1371, 1396), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1384, 1396), False, 'import pytest\n'), ((2150, 2173), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2163, 2173), False, 'import pytest\n'), ((2732, 2756), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2745, 2756), False, 'import pytest\n'), ((2798, 2822), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2811, 2822), False, 'import pytest\n')]
|
#!/usr/bin/env python
#
# Copyright (C) 2015 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import stat
import yaml
from managesf.services import base
# from managesf.services import exceptions as exc
from managesf.services.gerrit import utils
logger = logging.getLogger(__name__)
class SFGerritReviewManager(base.CodeReviewManager):
def get(self, **kwargs):
client = self.plugin.get_client()
return client.get_open_changes()
def propose_test_definition(self, project_name, requester):
config_git = utils.GerritRepo('config', self.plugin._full_conf)
config_git.clone()
job_file = os.path.join(config_git.infos['localcopy_path'],
'jobs', 'projects.yaml')
zuul_file = os.path.join(config_git.infos['localcopy_path'],
'zuul', 'projects.yaml')
unit_test = '%s-unit-tests' % project_name
with open(job_file, 'r') as fd:
job_yaml = yaml.load(fd)
projects = [x['project']['name'] for x in job_yaml
if x.get('project')]
if project_name not in projects:
msg = '[%s] Adding project %s to the jobs definition file'
logger.debug(msg % (self.plugin.service_name, project_name))
with open(job_file, 'w') as fd:
project = {'project':
{'name': project_name,
'jobs': ['{name}-unit-tests', ],
'node': 'master'}}
job_yaml.append(project)
fd.write(yaml.safe_dump(job_yaml))
with open(zuul_file, 'r') as fd:
zuul_yaml = yaml.load(fd)
projects = [x['name'] for x in zuul_yaml['projects']]
if project_name not in projects:
msg = '[%s] Adding project %s to the zuul pipeline file'
logger.debug(msg % (self.plugin.service_name, project_name))
with open(zuul_file, 'w') as fd:
project = {'name': project_name,
'check': [unit_test, ],
'gate': [unit_test, ]}
zuul_yaml['projects'].append(project)
fd.write(yaml.safe_dump(zuul_yaml))
config_git.review_changes(
'%s proposes initial test definition for project %s' %
(requester, project_name))
def propose_test_scripts(self, project_name, requester):
test_script_template = '''#!/bin/bash
echo "Modify this script to run your project's unit tests."
exit 0;'''
project_git = utils.GerritRepo(project_name, self.plugin._full_conf)
project_git.clone()
project_git.add_file('run_tests.sh', test_script_template)
os.chmod(os.path.join(project_git.infos['localcopy_path'],
'run_tests.sh'), stat.S_IRWXU)
msg = '[%s] submitting template test review on %s on behalf of %s'
logger.debug(msg % (self.plugin.service_name, project_name, requester))
project_git.review_changes(
'%s proposes initial test scripts for project %s' %
(requester, project_name))
|
[
"managesf.services.gerrit.utils.GerritRepo",
"yaml.load",
"yaml.safe_dump",
"os.path.join",
"logging.getLogger"
] |
[((800, 827), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (817, 827), False, 'import logging\n'), ((1081, 1131), 'managesf.services.gerrit.utils.GerritRepo', 'utils.GerritRepo', (['"""config"""', 'self.plugin._full_conf'], {}), "('config', self.plugin._full_conf)\n", (1097, 1131), False, 'from managesf.services.gerrit import utils\n'), ((1178, 1251), 'os.path.join', 'os.path.join', (["config_git.infos['localcopy_path']", '"""jobs"""', '"""projects.yaml"""'], {}), "(config_git.infos['localcopy_path'], 'jobs', 'projects.yaml')\n", (1190, 1251), False, 'import os\n'), ((1304, 1377), 'os.path.join', 'os.path.join', (["config_git.infos['localcopy_path']", '"""zuul"""', '"""projects.yaml"""'], {}), "(config_git.infos['localcopy_path'], 'zuul', 'projects.yaml')\n", (1316, 1377), False, 'import os\n'), ((3139, 3193), 'managesf.services.gerrit.utils.GerritRepo', 'utils.GerritRepo', (['project_name', 'self.plugin._full_conf'], {}), '(project_name, self.plugin._full_conf)\n', (3155, 3193), False, 'from managesf.services.gerrit import utils\n'), ((1526, 1539), 'yaml.load', 'yaml.load', (['fd'], {}), '(fd)\n', (1535, 1539), False, 'import yaml\n'), ((2231, 2244), 'yaml.load', 'yaml.load', (['fd'], {}), '(fd)\n', (2240, 2244), False, 'import yaml\n'), ((3306, 3371), 'os.path.join', 'os.path.join', (["project_git.infos['localcopy_path']", '"""run_tests.sh"""'], {}), "(project_git.infos['localcopy_path'], 'run_tests.sh')\n", (3318, 3371), False, 'import os\n'), ((2139, 2163), 'yaml.safe_dump', 'yaml.safe_dump', (['job_yaml'], {}), '(job_yaml)\n', (2153, 2163), False, 'import yaml\n'), ((2769, 2794), 'yaml.safe_dump', 'yaml.safe_dump', (['zuul_yaml'], {}), '(zuul_yaml)\n', (2783, 2794), False, 'import yaml\n')]
|
from django import forms
from Online_Library.web_app.models import Profile, Book
class CreateBookForm(forms.ModelForm):
class Meta:
model=Book
fields=('title','description','image','type')
widgets={
'description':forms.Textarea(
attrs={
'rows':3,
}
)
}
class EditBookForm(forms.ModelForm):
class Meta:
model=Book
fields=('title','description','image','type')
widgets={
'description':forms.Textarea(
attrs={
'rows':3,
}
)
}
class DeleteBookForm(forms.ModelForm):
def save(self, commit=True):
self.instance.delete()
return self.instance
class Meta:
model=Book
fields= ('title','description','image','type')
class CreateProfileForm(forms.ModelForm):
class Meta:
model=Profile
fields=('first_name','last_name','image_url')
labels={
'first_name':'<NAME>',
'last_name':'<NAME>',
'image_url':'Image URL',
}
class EditProfileForm(forms.ModelForm):
class Meta:
model=Profile
fields = ('first_name', 'last_name', 'image_url')
labels = {
'first_name': 'First Name',
'last_name': '<NAME>',
'image_url': 'Image URL',
}
class DeleteProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for _, field in self.fields.items():
field.widget.attrs['disabled'] = 'disabled'
field.required = False
def save(self, commit=True):
Book.objects.all().delete()
self.instance.delete()
return self.instance
class Meta:
model=Profile
fields = ('first_name', 'last_name', 'image_url')
|
[
"Online_Library.web_app.models.Book.objects.all",
"django.forms.Textarea"
] |
[((257, 290), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'rows': 3}"}), "(attrs={'rows': 3})\n", (271, 290), False, 'from django import forms\n'), ((539, 572), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'rows': 3}"}), "(attrs={'rows': 3})\n", (553, 572), False, 'from django import forms\n'), ((1724, 1742), 'Online_Library.web_app.models.Book.objects.all', 'Book.objects.all', ([], {}), '()\n', (1740, 1742), False, 'from Online_Library.web_app.models import Profile, Book\n')]
|
import json
from .column import Column
class JSON(Column):
def from_backend(self, value):
if type(value) == list or type(value) == dict:
return value
if not value:
return None
try:
return json.loads(value)
except json.JSONDecodeError:
return None
def to_backend(self, data):
if self.name in data:
data[self.name] = json.dumps(data[self.name]) if data[self.name] else ''
return data
|
[
"json.loads",
"json.dumps"
] |
[((254, 271), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (264, 271), False, 'import json\n'), ((426, 453), 'json.dumps', 'json.dumps', (['data[self.name]'], {}), '(data[self.name])\n', (436, 453), False, 'import json\n')]
|
import matplotlib.pyplot as plt
import numpy as np
import sys
from environment import MountainCar
def get_state(mode, state):
if mode == 'raw':
s = np.zeros((2, 1))
else:
s = np.zeros((2048, 1))
for k in state:
s[k] = state[k]
return s
def get_q(s, w, b):
return w.T @ s + b
def train_q_learning(mode,
episodes,
max_episode_len,
epsilon,
gamma,
lr):
n_S = 2048
n_A = 3
if mode == 'raw':
n_S = 2
w = np.zeros((n_S, n_A))
b = 0
returns = []
car = MountainCar(mode)
for i in range(episodes):
s = get_state(mode, car.reset())
R = 0.
g = 1.
for t in range(max_episode_len):
assert s.shape[0] <= n_S
isGreedy = np.random.uniform(0, 1, 1) >= epsilon
if isGreedy:
a = int(np.argmax(get_q(s, w, b)))
else:
a = int(np.random.randint(0, n_A, 1))
state, r, done = car.step(a)
s_ = get_state(mode, state)
R += (r * g)
# g *= gamma
q_sa = float(get_q(s, w, b)[a])
# if done:
# max_q_s_ = 0
# else:
max_q_s_ = float(np.max(get_q(s_, w, b)))
td = q_sa - (r + gamma * max_q_s_)
w[:, a] = w[:, a] - lr * td * s.flatten()
b = b - lr * td
s = s_
if done:
break
returns.append(R)
return np.array(returns), w, b
def write_array_to_file(filepath, array):
"""Write a numpy matrix to a file.
Args:
filepath (str): File path.
array (ndarray): Numpy 1D array.
"""
assert len(array.shape) < 2
out_str = '\n'.join(array.astype('U'))
with open(filepath, 'w') as f:
f.write(out_str)
print('Array written to {0} successfully!'.format(filepath))
def moving_average(a, n=3) :
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
if __name__ == "__main__":
assert len(sys.argv) == 1 + 8
mode = sys.argv[1]
weight_out = sys.argv[2]
returns_out = sys.argv[3]
episodes = int(sys.argv[4])
max_episode_len = int(sys.argv[5])
epsilon = float(sys.argv[6])
gamma = float(sys.argv[7])
lr = float(sys.argv[8])
print(f'{mode = }')
print(f'{weight_out = }')
print(f'{returns_out = }')
print(f'{episodes = }')
print(f'{max_episode_len = }')
print(f'{epsilon = }')
print(f'{gamma = }')
print(f'{lr = }')
returns, w, b = train_q_learning(mode,
episodes,
max_episode_len,
epsilon,
gamma,
lr)
weights = np.concatenate((np.array([b]), w.flatten()))
write_array_to_file(weight_out, weights)
write_array_to_file(returns_out, returns)
plt.plot(list(range(1, episodes + 1)), returns, 'ro-', label='returns')
plt.plot(list(range(25, episodes + 1)), moving_average(returns, 25), 'bo-', label='rolling_mean')
plt.title('Tile')
plt.legend()
plt.show()
|
[
"matplotlib.pyplot.title",
"numpy.random.uniform",
"matplotlib.pyplot.show",
"environment.MountainCar",
"matplotlib.pyplot.legend",
"numpy.zeros",
"numpy.cumsum",
"numpy.random.randint",
"numpy.array"
] |
[((618, 638), 'numpy.zeros', 'np.zeros', (['(n_S, n_A)'], {}), '((n_S, n_A))\n', (626, 638), True, 'import numpy as np\n'), ((683, 700), 'environment.MountainCar', 'MountainCar', (['mode'], {}), '(mode)\n', (694, 700), False, 'from environment import MountainCar\n'), ((2142, 2167), 'numpy.cumsum', 'np.cumsum', (['a'], {'dtype': 'float'}), '(a, dtype=float)\n', (2151, 2167), True, 'import numpy as np\n'), ((3409, 3426), 'matplotlib.pyplot.title', 'plt.title', (['"""Tile"""'], {}), "('Tile')\n", (3418, 3426), True, 'import matplotlib.pyplot as plt\n'), ((3432, 3444), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3442, 3444), True, 'import matplotlib.pyplot as plt\n'), ((3450, 3460), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3458, 3460), True, 'import matplotlib.pyplot as plt\n'), ((172, 188), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (180, 188), True, 'import numpy as np\n'), ((213, 232), 'numpy.zeros', 'np.zeros', (['(2048, 1)'], {}), '((2048, 1))\n', (221, 232), True, 'import numpy as np\n'), ((1677, 1694), 'numpy.array', 'np.array', (['returns'], {}), '(returns)\n', (1685, 1694), True, 'import numpy as np\n'), ((3098, 3111), 'numpy.array', 'np.array', (['[b]'], {}), '([b])\n', (3106, 3111), True, 'import numpy as np\n'), ((918, 944), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(1)'], {}), '(0, 1, 1)\n', (935, 944), True, 'import numpy as np\n'), ((1080, 1108), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n_A', '(1)'], {}), '(0, n_A, 1)\n', (1097, 1108), True, 'import numpy as np\n')]
|
# Generated by Django 3.2.7 on 2021-09-14 03:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0032_unit_reveal_at_level'),
]
operations = [
migrations.RemoveField(
model_name='unit',
name='reveal_at_level',
),
]
|
[
"django.db.migrations.RemoveField"
] |
[((226, 291), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""unit"""', 'name': '"""reveal_at_level"""'}), "(model_name='unit', name='reveal_at_level')\n", (248, 291), False, 'from django.db import migrations\n')]
|
import argparse
import json
import os
import tensorflow as tf
from trainer import model
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Required arguments
parser.add_argument('--data_bucket_name',
help='GCS location of the training images',
required=True)
parser.add_argument('--checkpoint_path',
help='GCS location to save model weights to',
required=True)
# Optional arguments
parser.add_argument('--job-dir',
help='This model ignores this field, but it is required by gcloud',
default='junk')
parser.add_argument('--resolution',
help='Final resolution of the output images',
type=int,
default=128)
parser.add_argument('--batch_size',
help='Training batch size',
type=int,
default=64)
parser.add_argument('--latent_size',
help='Dimension of the latent space the generator '
'input is sampled from',
type=int,
default=None)
parser.add_argument('--fmap_base',
help='Base value for number of feature maps.',
type=int,
default=8192)
parser.add_argument('--fmap_max',
help='Max value for number of feature maps.',
type=int,
default=512)
parser.add_argument('--fmap_decay',
help='Decay value for number of feature maps.',
type=float,
default=1.0)
parser.add_argument('--normalize_latents',
help='Toggles normalizing the latent vector for the '
'generator',
type=bool,
default=True)
parser.add_argument('--use_wscale',
help='Toggles weight scaling',
type=bool,
default=True)
parser.add_argument('--use_pixel_norm',
help='Toggles pixelwise normalization in convolutional '
'layers',
type=bool,
default=True)
parser.add_argument('--use_leaky_relu',
help='Toggles using leaky ReLU activation',
type=bool,
default=True)
parser.add_argument('--num_channels',
help='Number of output channels',
type=int,
default=3)
parser.add_argument('--mbstd_group_size',
help='Minibatch standard deviation size',
type=int,
default=4)
parser.add_argument('--learning_rate',
help='Optimizer learning rate',
type=float,
default=0.001)
parser.add_argument('--learning_rate_decay',
help='Learning rate decay rate',
type=float,
default=0.8)
parser.add_argument('--gradient_weight',
help='Gradient penalty loss term weight',
type=float,
default=10.0)
parser.add_argument('--D_repeat',
help='Train batches for the critic per generator '
'training batch',
type=int,
default=1)
parser.add_argument('--kimage_4x4',
help='Number of training images for 4x4 resolution',
type=int,
default=1000)
parser.add_argument('--kimage',
help='Number of training images for resolutions < 128',
type=int,
default=2000)
parser.add_argument('--kimage_large',
help='Number of training images for resolutions >= 128',
type=int,
default=4000)
parser.add_argument('--data_filename',
help='Name of the .zip file with the data in GCP',
type=str,
default='celeba.zip')
parser.add_argument('--print_every_n_batches',
help='Logs progress every N training batches',
type=int,
default=100)
parser.add_argument('--save_every_n_batches',
help='Saves model every N training batches',
type=int,
default=1000)
parser.add_argument('--start_from_resolution',
help='Load previously trained weights at this resolution',
type=int,
default=None)
parser.add_argument('--previous_weights_path',
help='Path to the weights trained for a smaller resolution',
type=str,
default=None)
args = parser.parse_args()
arguments = args.__dict__
arguments.pop("job_dir", None)
arguments.pop("job-dir", None)
model.train(**arguments)
|
[
"argparse.ArgumentParser",
"trainer.model.train"
] |
[((129, 154), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (152, 154), False, 'import argparse\n'), ((5231, 5255), 'trainer.model.train', 'model.train', ([], {}), '(**arguments)\n', (5242, 5255), False, 'from trainer import model\n')]
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_MESAX_texture_stack'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_MESAX_texture_stack',error_checker=_errors._error_checker)
GL_PROXY_TEXTURE_1D_STACK_MESAX=_C('GL_PROXY_TEXTURE_1D_STACK_MESAX',0x875B)
GL_PROXY_TEXTURE_2D_STACK_MESAX=_C('GL_PROXY_TEXTURE_2D_STACK_MESAX',0x875C)
GL_TEXTURE_1D_STACK_BINDING_MESAX=_C('GL_TEXTURE_1D_STACK_BINDING_MESAX',0x875D)
GL_TEXTURE_1D_STACK_MESAX=_C('GL_TEXTURE_1D_STACK_MESAX',0x8759)
GL_TEXTURE_2D_STACK_BINDING_MESAX=_C('GL_TEXTURE_2D_STACK_BINDING_MESAX',0x875E)
GL_TEXTURE_2D_STACK_MESAX=_C('GL_TEXTURE_2D_STACK_MESAX',0x875A)
|
[
"OpenGL.constant.Constant",
"OpenGL.platform.createFunction"
] |
[((544, 588), 'OpenGL.constant.Constant', '_C', (['"""GL_PROXY_TEXTURE_1D_STACK_MESAX"""', '(34651)'], {}), "('GL_PROXY_TEXTURE_1D_STACK_MESAX', 34651)\n", (546, 588), True, 'from OpenGL.constant import Constant as _C\n'), ((622, 666), 'OpenGL.constant.Constant', '_C', (['"""GL_PROXY_TEXTURE_2D_STACK_MESAX"""', '(34652)'], {}), "('GL_PROXY_TEXTURE_2D_STACK_MESAX', 34652)\n", (624, 666), True, 'from OpenGL.constant import Constant as _C\n'), ((702, 748), 'OpenGL.constant.Constant', '_C', (['"""GL_TEXTURE_1D_STACK_BINDING_MESAX"""', '(34653)'], {}), "('GL_TEXTURE_1D_STACK_BINDING_MESAX', 34653)\n", (704, 748), True, 'from OpenGL.constant import Constant as _C\n'), ((776, 814), 'OpenGL.constant.Constant', '_C', (['"""GL_TEXTURE_1D_STACK_MESAX"""', '(34649)'], {}), "('GL_TEXTURE_1D_STACK_MESAX', 34649)\n", (778, 814), True, 'from OpenGL.constant import Constant as _C\n'), ((850, 896), 'OpenGL.constant.Constant', '_C', (['"""GL_TEXTURE_2D_STACK_BINDING_MESAX"""', '(34654)'], {}), "('GL_TEXTURE_2D_STACK_BINDING_MESAX', 34654)\n", (852, 896), True, 'from OpenGL.constant import Constant as _C\n'), ((924, 962), 'OpenGL.constant.Constant', '_C', (['"""GL_TEXTURE_2D_STACK_MESAX"""', '(34650)'], {}), "('GL_TEXTURE_2D_STACK_MESAX', 34650)\n", (926, 962), True, 'from OpenGL.constant import Constant as _C\n'), ((405, 516), 'OpenGL.platform.createFunction', '_p.createFunction', (['function', '_p.PLATFORM.GL', '"""GL_MESAX_texture_stack"""'], {'error_checker': '_errors._error_checker'}), "(function, _p.PLATFORM.GL, 'GL_MESAX_texture_stack',\n error_checker=_errors._error_checker)\n", (422, 516), True, 'from OpenGL import platform as _p, arrays\n')]
|
# - * - coding: utf - 8 - * -
"""
This module tests the functions in dam_tol.py.
"""
__version__ = '1.0'
__author__ = '<NAME>'
import sys
import pytest
import numpy as np
sys.path.append(r'C:\LAYLA')
from src.LAYLA_V02.constraints import Constraints
from src.guidelines.dam_tol import is_dam_tol
@pytest.mark.parametrize(
"stack, constraints, expect", [
(np.array([45, 0, -45]), Constraints(dam_tol=True, dam_tol_rule=1), True),
(np.array([45, 0, 0]), Constraints(dam_tol=True, dam_tol_rule=1), False),
(np.array([45, -45, 0, 45, -45]), Constraints(dam_tol=True, dam_tol_rule=2), True),
(np.array([45, -45, 0, 90, -45]), Constraints(dam_tol=True, dam_tol_rule=2), False),
])
def test_is_dam_tol(stack, constraints, expect):
output = is_dam_tol(stack, constraints)
assert output == expect
|
[
"sys.path.append",
"src.LAYLA_V02.constraints.Constraints",
"numpy.array",
"src.guidelines.dam_tol.is_dam_tol"
] |
[((176, 204), 'sys.path.append', 'sys.path.append', (['"""C:\\\\LAYLA"""'], {}), "('C:\\\\LAYLA')\n", (191, 204), False, 'import sys\n'), ((788, 818), 'src.guidelines.dam_tol.is_dam_tol', 'is_dam_tol', (['stack', 'constraints'], {}), '(stack, constraints)\n', (798, 818), False, 'from src.guidelines.dam_tol import is_dam_tol\n'), ((373, 395), 'numpy.array', 'np.array', (['[45, 0, -45]'], {}), '([45, 0, -45])\n', (381, 395), True, 'import numpy as np\n'), ((397, 438), 'src.LAYLA_V02.constraints.Constraints', 'Constraints', ([], {'dam_tol': '(True)', 'dam_tol_rule': '(1)'}), '(dam_tol=True, dam_tol_rule=1)\n', (408, 438), False, 'from src.LAYLA_V02.constraints import Constraints\n'), ((456, 476), 'numpy.array', 'np.array', (['[45, 0, 0]'], {}), '([45, 0, 0])\n', (464, 476), True, 'import numpy as np\n'), ((478, 519), 'src.LAYLA_V02.constraints.Constraints', 'Constraints', ([], {'dam_tol': '(True)', 'dam_tol_rule': '(1)'}), '(dam_tol=True, dam_tol_rule=1)\n', (489, 519), False, 'from src.LAYLA_V02.constraints import Constraints\n'), ((538, 569), 'numpy.array', 'np.array', (['[45, -45, 0, 45, -45]'], {}), '([45, -45, 0, 45, -45])\n', (546, 569), True, 'import numpy as np\n'), ((571, 612), 'src.LAYLA_V02.constraints.Constraints', 'Constraints', ([], {'dam_tol': '(True)', 'dam_tol_rule': '(2)'}), '(dam_tol=True, dam_tol_rule=2)\n', (582, 612), False, 'from src.LAYLA_V02.constraints import Constraints\n'), ((630, 661), 'numpy.array', 'np.array', (['[45, -45, 0, 90, -45]'], {}), '([45, -45, 0, 90, -45])\n', (638, 661), True, 'import numpy as np\n'), ((663, 704), 'src.LAYLA_V02.constraints.Constraints', 'Constraints', ([], {'dam_tol': '(True)', 'dam_tol_rule': '(2)'}), '(dam_tol=True, dam_tol_rule=2)\n', (674, 704), False, 'from src.LAYLA_V02.constraints import Constraints\n')]
|
from unittest import TestCase
from deferrable.backend.base import BackendFactory
class TestBackendFactory(TestCase):
def test_queue_name_with_None(self):
self.assertEqual('deferrable', BackendFactory._queue_name(None))
def test_queue_name_with_group(self):
self.assertEqual('deferrable_test', BackendFactory._queue_name('test'))
class TestBackend(TestCase):
pass
|
[
"deferrable.backend.base.BackendFactory._queue_name"
] |
[((199, 231), 'deferrable.backend.base.BackendFactory._queue_name', 'BackendFactory._queue_name', (['None'], {}), '(None)\n', (225, 231), False, 'from deferrable.backend.base import BackendFactory\n'), ((320, 354), 'deferrable.backend.base.BackendFactory._queue_name', 'BackendFactory._queue_name', (['"""test"""'], {}), "('test')\n", (346, 354), False, 'from deferrable.backend.base import BackendFactory\n')]
|
import numpy as np
import torch
import logging
logger = logging.getLogger(__name__)
class Learner(object):
def __init__(self, config):
self.config = config
self.npr = np.random.RandomState(config.seed)
self.calibrate = config.learner.calibrate
self.semi_supervised = config.learner.semi_supervised
self.risk_thres = config.learner.risk_thres
self.use_cuda = torch.cuda.is_available()
self.early_stop_scope = config.learner.early_stop_scope
self.prototype_as_val = config.learner.prototype_as_val
def save_state(self):
raise NotImplementedError
def load_state(self):
raise NotImplementedError
def fit_and_predict(self, features, prototype_targets, belief, n_annotation, ground_truth):
raise NotImplementedError
class DummyLearner(Learner):
def __init__(self, config):
Learner.__init__(self, config)
logger.info('No learner is used')
def save_state(self):
pass
def load_state(self, state):
pass
def fit_and_predict(self, features, prototype_targets, belief, n_annotation, ground_truth):
return None
def get_learner_class(config):
from .nn_learner import LinearNNLearner
if config.learner.algo == 'dummy':
return DummyLearner
elif config.learner.algo == 'mlp':
return LinearNNLearner
else:
raise ValueError
|
[
"torch.cuda.is_available",
"numpy.random.RandomState",
"logging.getLogger"
] |
[((57, 84), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (74, 84), False, 'import logging\n'), ((190, 224), 'numpy.random.RandomState', 'np.random.RandomState', (['config.seed'], {}), '(config.seed)\n', (211, 224), True, 'import numpy as np\n'), ((413, 438), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (436, 438), False, 'import torch\n')]
|
'''
Beginning with just a plot
Let's get started on the Gapminder app. Your job is to make the ColumnDataSource object, prepare the plot, and add circles for Life expectancy vs Fertility. You'll also set x and y ranges for the axes.
As in the previous chapter, the DataCamp environment executes the bokeh serve command to run the app for you. When you hit 'Submit Answer', you'll see in the IPython Shell that bokeh serve script.py gets called to run the app. This is something to keep in mind when you are creating your own interactive visualizations outside of the DataCamp environment.
INSTRUCTIONS
100XP
Make a ColumnDataSource object called source with 'x', 'y', 'country', 'pop' and 'region' keys. The Pandas selections are provided for you.
Save the minimum and maximum values of the life expectancy column data.life as ymin and ymax. As a guide, you can refer to the way we saved the minimum and maximum values of the fertility column data.fertility as xmin and xmax.
Create a plot called plot() by specifying the title, setting plot_height to 400, plot_width to 700, and adding the x_range and y_range parameters.
Add circle glyphs to the plot. Specify an fill_alpha of 0.8 and source=source.
'''
# Import the necessary modules
from bokeh.io import curdoc
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure
# Make the ColumnDataSource: source
source = ColumnDataSource(data={
'x' : data.loc[1970].fertility,
'y' : data.loc[1970].life,
'country' : data.loc[1970].Country,
'pop' : (data.loc[1970].population / 20000000) + 2,
'region' : data.loc[1970].region,
})
# Save the minimum and maximum values of the fertility column: xmin, xmax
xmin, xmax = min(data.fertility), max(data.fertility)
# Save the minimum and maximum values of the life expectancy column: ymin, ymax
ymin, ymax = min(data.life), max(data.life)
# Create the figure: plot
plot = figure(title='Gapminder Data for 1970', plot_height=400, plot_width=700, x_range=(xmin, xmax), y_range=(ymin, ymax))
# Add circle glyphs to the plot
plot.circle(x='x', y='y', fill_alpha=0.8, source=source)
# Set the x-axis label
plot.xaxis.axis_label ='Fertility (children per woman)'
# Set the y-axis label
plot.yaxis.axis_label = 'Life Expectancy (years)'
# Add the plot to the current document and add a title
curdoc().add_root(plot)
curdoc().title = 'Gapminder'
|
[
"bokeh.models.ColumnDataSource",
"bokeh.io.curdoc",
"bokeh.plotting.figure"
] |
[((1390, 1597), 'bokeh.models.ColumnDataSource', 'ColumnDataSource', ([], {'data': "{'x': data.loc[1970].fertility, 'y': data.loc[1970].life, 'country': data.\n loc[1970].Country, 'pop': data.loc[1970].population / 20000000 + 2,\n 'region': data.loc[1970].region}"}), "(data={'x': data.loc[1970].fertility, 'y': data.loc[1970].\n life, 'country': data.loc[1970].Country, 'pop': data.loc[1970].\n population / 20000000 + 2, 'region': data.loc[1970].region})\n", (1406, 1597), False, 'from bokeh.models import ColumnDataSource\n'), ((1933, 2053), 'bokeh.plotting.figure', 'figure', ([], {'title': '"""Gapminder Data for 1970"""', 'plot_height': '(400)', 'plot_width': '(700)', 'x_range': '(xmin, xmax)', 'y_range': '(ymin, ymax)'}), "(title='Gapminder Data for 1970', plot_height=400, plot_width=700,\n x_range=(xmin, xmax), y_range=(ymin, ymax))\n", (1939, 2053), False, 'from bokeh.plotting import figure\n'), ((2374, 2382), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (2380, 2382), False, 'from bokeh.io import curdoc\n'), ((2350, 2358), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (2356, 2358), False, 'from bokeh.io import curdoc\n')]
|
"""Queries, the Q in CQRS."""
from typing import Any, AsyncGenerator, Dict, Generic, Tuple, TypeVar
from abc import ABCMeta, abstractmethod
from . import schema
from .context import Context
# pylint: disable=invalid-name
_T_Context = TypeVar("_T_Context", bound=Context)
_T_Result = TypeVar("_T_Result", bound=schema.SchemaBase)
# pylint: enable=invalid-name
class SubscriptionDefinitionError(Exception):
"""An exception caused by an invalid subscription class definition."""
def __init__(self, query_name: str, problem: str):
"""Initialize a new SubscriptionDefinitionError."""
self.query_name = query_name
self.problem = problem
super().__init__(f"{self.query_name}: {self.problem}")
class SubscriptionMeta(ABCMeta, schema.SchemaMeta):
"""The metaclass all subscription classes must have."""
__subscription_mixin__: bool
Result: schema.SchemaMeta
def __new__(
cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any], **extra
):
"""Construct a new Subscription class."""
attrs.setdefault("__subscription_mixin__", extra.pop("mixin", False))
mewcls = super().__new__(cls, name, bases, attrs)
return mewcls
def __init__(
cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any], **_extra
):
"""Initialize and validate a newly created Subscription class."""
super().__init__(name, bases, attrs)
if not cls.__subscription_mixin__:
SubscriptionMeta._validate_subscribe(cls)
SubscriptionMeta._validate_result(cls)
@staticmethod
def _validate_result(the_cls: type):
result_class = getattr(the_cls, "Result", None)
if result_class is None:
raise SubscriptionDefinitionError(
the_cls.__name__, "Result must be defined"
)
if not isinstance(result_class, schema.SchemaMeta):
raise SubscriptionDefinitionError(
the_cls.__name__,
f"Result must be a type with a schema (have {type(result_class)})",
)
@staticmethod
def _validate_subscribe(the_cls: type):
subscribe = getattr(the_cls, "subscribe", None)
if subscribe is None or getattr(subscribe, "__isabstractmethod__", False):
raise SubscriptionDefinitionError(
the_cls.__name__, "subscribe method must be defined"
)
if not callable(subscribe):
raise SubscriptionDefinitionError(
the_cls.__name__, "subscribe must be callable"
)
class SubscriptionBase(
Generic[_T_Context, _T_Result], schema.SchemaBase, metaclass=SubscriptionMeta
):
"""The class that all subscriptions must subclass.
Each subclass must specify a `Result` member which points to some class
where `isinstance(Result, SchemaMeta)` is `True`
If a subclass specifies a member `__subscription_mixin__` with a value of `True`,
that class can serve as a common base class for multiple subscriptions.
"""
__subscription_mixin__ = True
@abstractmethod
def subscribe(self, context: _T_Context) -> AsyncGenerator[_T_Result, None]:
"""Generate the result items for this subscription."""
...
|
[
"typing.TypeVar"
] |
[((237, 273), 'typing.TypeVar', 'TypeVar', (['"""_T_Context"""'], {'bound': 'Context'}), "('_T_Context', bound=Context)\n", (244, 273), False, 'from typing import Any, AsyncGenerator, Dict, Generic, Tuple, TypeVar\n'), ((286, 331), 'typing.TypeVar', 'TypeVar', (['"""_T_Result"""'], {'bound': 'schema.SchemaBase'}), "('_T_Result', bound=schema.SchemaBase)\n", (293, 331), False, 'from typing import Any, AsyncGenerator, Dict, Generic, Tuple, TypeVar\n')]
|
import json
from nltk.stem import SnowballStemmer
MIN_WORD_LEN = 4
WORDS_IN_KEY = 3
stemmer = SnowballStemmer('russian')
raw = open('nalkod.json', 'rt')
vocab = {}
corpus = json.load(raw)
def phrases(sentence, n=2):
words = sentence.split(' ')
phrases = []
first = 0
last = min(n, len(words))
while last <= len(words):
phrases.append(words[first:last])
first += 1
last += 1
return phrases
def prune(token):
letters = [char for char in token.lower() if char in 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя-']
return ''.join(letters)
def check_length(words):
result = True
for word in words:
result &= len(word) >= MIN_WORD_LEN
return result
def normalize(phrase):
normalized = [stemmer.stem(prune(word)) for word in phrase]
long_enough = check_length(normalized)
return (' '.join(normalized), long_enough)
i = 0
print('codex loaded, generating vocab, n = ', WORDS_IN_KEY)
for article in corpus:
j = 0
for clause in article['clauses']:
bag = phrases(clause['text'], WORDS_IN_KEY)
for phrase in bag:
root, long_enough = normalize(phrase)
if long_enough:
if root in vocab:
vocab[root].append([i, j])
else:
vocab[root] = [[i, j]]
j += 1
i += 1
print('vocab completed, saving')
# словарь сформирован
dump = open('vocab_nalkod%d.json' % (WORDS_IN_KEY), 'wt')
raw = str(vocab).replace("'", '"')
dump.write(raw)
dump.flush()
dump.close()
print('done')
|
[
"nltk.stem.SnowballStemmer",
"json.load"
] |
[((101, 127), 'nltk.stem.SnowballStemmer', 'SnowballStemmer', (['"""russian"""'], {}), "('russian')\n", (116, 127), False, 'from nltk.stem import SnowballStemmer\n'), ((202, 216), 'json.load', 'json.load', (['raw'], {}), '(raw)\n', (211, 216), False, 'import json\n')]
|
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
class EmailClient:
def send_email(self, api_key, from_email, to_email, subject, body):
message = Mail(
from_email=from_email,
to_emails=to_email,
subject=subject,
html_content=body)
sendgrid_client = SendGridAPIClient(api_key=api_key)
sendgrid_client.send(message)
|
[
"sendgrid.SendGridAPIClient",
"sendgrid.helpers.mail.Mail"
] |
[((190, 277), 'sendgrid.helpers.mail.Mail', 'Mail', ([], {'from_email': 'from_email', 'to_emails': 'to_email', 'subject': 'subject', 'html_content': 'body'}), '(from_email=from_email, to_emails=to_email, subject=subject,\n html_content=body)\n', (194, 277), False, 'from sendgrid.helpers.mail import Mail\n'), ((349, 383), 'sendgrid.SendGridAPIClient', 'SendGridAPIClient', ([], {'api_key': 'api_key'}), '(api_key=api_key)\n', (366, 383), False, 'from sendgrid import SendGridAPIClient\n')]
|
import nltk
from nltk import FreqDist
from nltk import word_tokenize
from nltk.corpus import stopwords
import re
import json
class Tweet_Print():
""" This file should take in Twitter stream data from python.json, then
manipulate it using NLTK. Current version tokenizes tweets, prints
frequency distribution (with exclusion of stopwords currently in testing),
and plots it as a cumulative frequency distribution (in testing).
"""
def tweet_print(tweet_data):
# 'tokenized_tweets.txt' below, for example
tt_txtfile = tweet_data
# Initialize this list so that it may be appended below
json_data=[]
with open('python.json', 'r') as f:
# Allows for JSON manipulation using Python
for line in f:
append_data = json_data.append(json.loads(line))
for item in json_data:
tweets = item['text']
# Open tokenized_tweets.txt
with open(tt_txtfile, 'a') as new:
# Tokenize tweets
tokenized_tweets = nltk.word_tokenize(tweets)
print(tokenized_tweets)
# Write tweets to file, each on a new line
write_tweets = new.write(str(tokenized_tweets) + "\n")
print(write_tweets)
# Converts tweets to format that may be manipulated by NLTK
tweets_nltk = nltk.Text(tokenized_tweets)
# Creates a frequency distribution of tweets_nltk
tweets_fdist = FreqDist(tweets_nltk)
# Ignores samples that consist of only stopwords or punctuation (in testing)
stopwords = nltk.corpus.stopwords.words('english')
punctuation = ['!', '.', ',', '@', '&', '(', ')']
print([sample for sample in tokenized_tweets if sample not in stopwords or punctuation])
# Plots a cumulative frequency distribution of words in tweets (in testing)
tweets_cfd = tweets_fdist.plot(10, cumulative=True)
tweet_print('tokenized_tweets.txt')
|
[
"json.loads",
"nltk.corpus.stopwords.words",
"nltk.Text",
"nltk.FreqDist",
"nltk.word_tokenize"
] |
[((1469, 1496), 'nltk.Text', 'nltk.Text', (['tokenized_tweets'], {}), '(tokenized_tweets)\n', (1478, 1496), False, 'import nltk\n'), ((1578, 1599), 'nltk.FreqDist', 'FreqDist', (['tweets_nltk'], {}), '(tweets_nltk)\n', (1586, 1599), False, 'from nltk import FreqDist\n'), ((1705, 1743), 'nltk.corpus.stopwords.words', 'nltk.corpus.stopwords.words', (['"""english"""'], {}), "('english')\n", (1732, 1743), False, 'import nltk\n'), ((827, 843), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (837, 843), False, 'import json\n'), ((1114, 1140), 'nltk.word_tokenize', 'nltk.word_tokenize', (['tweets'], {}), '(tweets)\n', (1132, 1140), False, 'import nltk\n')]
|
from typing import Optional, Callable
import os
import torch
from torch import nn
from torch.nn import functional as F
from torch.utils.data import Dataset, DataLoader
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torch.serialization import save
import numpy as np
import pandas as pd
from glog import logger
import joblib as jl
from sklearn.impute import SimpleImputer
from sklearn.feature_selection import VarianceThreshold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import make_pipeline
from fire import Fire
class Baseline(nn.Module):
def __init__(self, n_features, inner_size=1024):
super().__init__()
self.features = nn.Linear(n_features, inner_size)
self.model = nn.Sequential(
nn.LeakyReLU(),
nn.Dropout(p=.5),
nn.Linear(inner_size, 14),
)
def forward(self, x):
x = self.features(x)
return self.model(x)
class PlasticcDataset(Dataset):
def __init__(self, x_data: np.array, y_data: np.array, folds: tuple):
data = zip(x_data, y_data)
self.data = [x for i, x in enumerate(data) if i % 5 in folds]
logger.info(f'There are {len(self.data)} records in the dataset')
self.features_shape = x_data.shape
def __len__(self):
return len(self.data)
def __getitem__(self, item):
return self.data[item]
def map_classes(y_full):
classes = sorted(list(set(y_full)))
mapping = {}
for i, y in enumerate(classes):
mapping[y] = i
logger.info(f'Mapping is {mapping}')
return np.array([mapping[y] for y in y_full])
def read_train():
data = pd.read_csv('data/processed_training.csv', engine='c', sep=';')
data.pop('object_id')
y_full = data.pop('target').values
x_full = data.values.astype('float32')
x_full[np.isnan(x_full)] = 0
x_full[np.isinf(x_full)] = 0
return x_full, y_full
def prepare_data():
if os.path.exists('data/train.bin'):
return jl.load('data/train.bin')
x_full, y_full = read_train()
imputer = SimpleImputer()
vt = VarianceThreshold(threshold=.0001)
pipeline = make_pipeline(imputer, vt, StandardScaler())
x_full = pipeline.fit_transform(x_full)
jl.dump(pipeline, 'preprocess.bin')
y_full = map_classes(y_full)
x_full = x_full.astype('float32')
jl.dump((x_full, y_full), 'data/train.bin')
return x_full, y_full
def make_dataloaders():
x_full, y_full = prepare_data()
train = PlasticcDataset(x_data=x_full, y_data=y_full, folds=(0, 1, 2, 3))
val = PlasticcDataset(x_data=x_full, y_data=y_full, folds=(4,))
shared_params = {'batch_size': 2048, 'shuffle': True}
train = DataLoader(train, drop_last=True, **shared_params)
val = DataLoader(val, drop_last=False, **shared_params)
return train, val
class Trainer:
def __init__(self,
model: nn.Module,
train: DataLoader,
val: DataLoader,
epochs: int = 500,
optimizer: Optional[torch.optim.Optimizer] = None,
loss_fn: Optional[Callable] = None,
scheduler: Optional[ReduceLROnPlateau] = None,
reg_lambda: float = .00002,
reg_norm: int = 1,
device: str = 'cuda:0',
checkpoint: str = './model.pt',
):
self.epochs = epochs
self.model = model.to(device)
self.device = device
self.train = train
self.val = val
self.optimizer = optimizer if optimizer is not None else torch.optim.Adam(model.parameters(), lr=1e-3)
self.scheduler = scheduler if scheduler is not None else ReduceLROnPlateau(optimizer=self.optimizer,
verbose=True)
self.loss_fn = loss_fn if loss_fn is not None else F.cross_entropy
self.reg_lambda = reg_lambda
self.reg_norm = reg_norm
self.current_metric = -np.inf
self.last_improvement = 0
self.checkpoint = checkpoint
def fit_one_epoch(self, n_epoch):
self.model.train(True)
losses, reg_losses = [], []
for i, (x, y) in enumerate(self.train):
x, y = x.to(self.device), y.to(self.device)
self.optimizer.zero_grad()
outputs = self.model(x)
loss = self.loss_fn(outputs, y)
losses.append(loss.item())
for param in self.model.model.parameters():
loss += self.reg_lambda * torch.norm(param, p=self.reg_norm)
reg_loss = loss.item()
reg_losses.append(reg_loss)
loss.backward()
nn.utils.clip_grad_norm_(self.model.parameters(), 1)
self.optimizer.step()
self.model.train(False)
val_losses = []
y_pred_acc, y_true_acc = [], []
with torch.no_grad():
for i, (x, y) in enumerate(self.val):
x, y = x.to(self.device), y.to(self.device)
outputs = self.model(x)
loss = self.loss_fn(outputs, y)
val_losses.append(loss.item())
y_pred_acc.append(outputs.detach().cpu().numpy())
y_true_acc.append(y.detach().cpu().numpy())
train_loss = np.mean(losses)
train_reg_loss = np.mean(reg_losses)
val_loss = np.mean(val_losses)
msg = f'Epoch {n_epoch}: train loss is {train_loss:.5f} (raw), {train_reg_loss:.5f} (reg); val loss is {val_loss:.5f}'
logger.info(msg)
self.scheduler.step(metrics=val_loss, epoch=n_epoch)
y_true_acc, y_pred_acc = map(np.vstack, (y_true_acc, y_pred_acc))
metric = self.evaluate(y_pred=y_pred_acc, y_true=y_true_acc)
if metric > self.current_metric:
self.current_metric = metric
self.last_improvement = n_epoch
save(self.model, f=self.checkpoint)
logger.info(f'Best model has been saved at {n_epoch}, accuracy is {metric:.4f}')
return train_loss, val_loss, metric
def evaluate(self, y_pred, y_true):
return (y_pred.argmax(-1) == y_true).sum() / y_true.shape[-1]
def fit(self):
for i in range(self.epochs):
self.fit_one_epoch(i)
def fit(inner_size=1024, **kwargs):
train, val = make_dataloaders()
trainer = Trainer(model=Baseline(n_features=train.dataset.features_shape[1],
inner_size=inner_size,
),
train=train,
val=val,
loss_fn=F.cross_entropy,
**kwargs
)
trainer.fit()
if __name__ == '__main__':
Fire(fit)
|
[
"torch.nn.Dropout",
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"joblib.dump",
"numpy.isnan",
"numpy.mean",
"torch.no_grad",
"sklearn.impute.SimpleImputer",
"torch.utils.data.DataLoader",
"os.path.exists",
"torch.optim.lr_scheduler.ReduceLROnPlateau",
"torch.nn.Linear",
"sklearn.feature_selection.VarianceThreshold",
"torch.norm",
"numpy.isinf",
"torch.nn.LeakyReLU",
"glog.logger.info",
"fire.Fire",
"numpy.array",
"joblib.load",
"torch.serialization.save"
] |
[((1547, 1583), 'glog.logger.info', 'logger.info', (['f"""Mapping is {mapping}"""'], {}), "(f'Mapping is {mapping}')\n", (1558, 1583), False, 'from glog import logger\n'), ((1595, 1633), 'numpy.array', 'np.array', (['[mapping[y] for y in y_full]'], {}), '([mapping[y] for y in y_full])\n', (1603, 1633), True, 'import numpy as np\n'), ((1665, 1728), 'pandas.read_csv', 'pd.read_csv', (['"""data/processed_training.csv"""'], {'engine': '"""c"""', 'sep': '""";"""'}), "('data/processed_training.csv', engine='c', sep=';')\n", (1676, 1728), True, 'import pandas as pd\n'), ((1958, 1990), 'os.path.exists', 'os.path.exists', (['"""data/train.bin"""'], {}), "('data/train.bin')\n", (1972, 1990), False, 'import os\n'), ((2082, 2097), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {}), '()\n', (2095, 2097), False, 'from sklearn.impute import SimpleImputer\n'), ((2107, 2142), 'sklearn.feature_selection.VarianceThreshold', 'VarianceThreshold', ([], {'threshold': '(0.0001)'}), '(threshold=0.0001)\n', (2124, 2142), False, 'from sklearn.feature_selection import VarianceThreshold\n'), ((2251, 2286), 'joblib.dump', 'jl.dump', (['pipeline', '"""preprocess.bin"""'], {}), "(pipeline, 'preprocess.bin')\n", (2258, 2286), True, 'import joblib as jl\n'), ((2363, 2406), 'joblib.dump', 'jl.dump', (['(x_full, y_full)', '"""data/train.bin"""'], {}), "((x_full, y_full), 'data/train.bin')\n", (2370, 2406), True, 'import joblib as jl\n'), ((2715, 2765), 'torch.utils.data.DataLoader', 'DataLoader', (['train'], {'drop_last': '(True)'}), '(train, drop_last=True, **shared_params)\n', (2725, 2765), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((2776, 2825), 'torch.utils.data.DataLoader', 'DataLoader', (['val'], {'drop_last': '(False)'}), '(val, drop_last=False, **shared_params)\n', (2786, 2825), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((6802, 6811), 'fire.Fire', 'Fire', (['fit'], {}), '(fit)\n', (6806, 6811), False, 'from fire import Fire\n'), ((688, 721), 'torch.nn.Linear', 'nn.Linear', (['n_features', 'inner_size'], {}), '(n_features, inner_size)\n', (697, 721), False, 'from torch import nn\n'), ((1848, 1864), 'numpy.isnan', 'np.isnan', (['x_full'], {}), '(x_full)\n', (1856, 1864), True, 'import numpy as np\n'), ((1881, 1897), 'numpy.isinf', 'np.isinf', (['x_full'], {}), '(x_full)\n', (1889, 1897), True, 'import numpy as np\n'), ((2007, 2032), 'joblib.load', 'jl.load', (['"""data/train.bin"""'], {}), "('data/train.bin')\n", (2014, 2032), True, 'import joblib as jl\n'), ((2184, 2200), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (2198, 2200), False, 'from sklearn.preprocessing import StandardScaler\n'), ((5355, 5370), 'numpy.mean', 'np.mean', (['losses'], {}), '(losses)\n', (5362, 5370), True, 'import numpy as np\n'), ((5396, 5415), 'numpy.mean', 'np.mean', (['reg_losses'], {}), '(reg_losses)\n', (5403, 5415), True, 'import numpy as np\n'), ((5435, 5454), 'numpy.mean', 'np.mean', (['val_losses'], {}), '(val_losses)\n', (5442, 5454), True, 'import numpy as np\n'), ((5590, 5606), 'glog.logger.info', 'logger.info', (['msg'], {}), '(msg)\n', (5601, 5606), False, 'from glog import logger\n'), ((770, 784), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (782, 784), False, 'from torch import nn\n'), ((798, 815), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (808, 815), False, 'from torch import nn\n'), ((828, 853), 'torch.nn.Linear', 'nn.Linear', (['inner_size', '(14)'], {}), '(inner_size, 14)\n', (837, 853), False, 'from torch import nn\n'), ((3727, 3784), 'torch.optim.lr_scheduler.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'optimizer': 'self.optimizer', 'verbose': '(True)'}), '(optimizer=self.optimizer, verbose=True)\n', (3744, 3784), False, 'from torch.optim.lr_scheduler import ReduceLROnPlateau\n'), ((4943, 4958), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4956, 4958), False, 'import torch\n'), ((5952, 5987), 'torch.serialization.save', 'save', (['self.model'], {'f': 'self.checkpoint'}), '(self.model, f=self.checkpoint)\n', (5956, 5987), False, 'from torch.serialization import save\n'), ((6000, 6085), 'glog.logger.info', 'logger.info', (['f"""Best model has been saved at {n_epoch}, accuracy is {metric:.4f}"""'], {}), "(f'Best model has been saved at {n_epoch}, accuracy is {metric:.4f}'\n )\n", (6011, 6085), False, 'from glog import logger\n'), ((4591, 4625), 'torch.norm', 'torch.norm', (['param'], {'p': 'self.reg_norm'}), '(param, p=self.reg_norm)\n', (4601, 4625), False, 'import torch\n')]
|
import torch
import torch.nn as nn
class Model(object):
def __init__(self, model_config, data_config, is_train=True):
super(Model, self).__init__()
trj_model = None
if model_config['CAMERA_EMBDDING']:
extrinsic_dim = model_config['EXTRINSIC_DIM']
embedd_dim = model_config['EMBEDD_DIM']
else:
extrinsic_dim = 0
embedd_dim = 0
if model_config['MODEL'] == 'RIE':
from lib.model.rie import RIEModel
filter_widths = [int(x) for x in model_config['ARCHITECTURE'].split(',')]
num_joints_in = model_config['NUM_KPTS']
num_joints_out = model_config['NUM_KPTS']
from lib.model.rie import RIEModel
pos_model = RIEModel(num_joints_in, model_config['INPUT_DIM'],
num_joints_out,
filter_widths=filter_widths, causal=model_config['CAUSAL'],
dropout=model_config['DROPOUT'],
channels=model_config['CHANNELS'], latten_features=model_config['LATENT_FEATURES_DIM'],
dense=model_config['DENSE'], is_train=is_train,
Optimize1f=not model_config['DISABLE_OPTIMIZATIONS'], stage=model_config['STAGE'],
extrinsic_dim=extrinsic_dim,
embedd_dim=embedd_dim)
if model_config['TRAJECTORY_MODEL']:
from lib.model.rie import RIETrajectoryModel
trj_model = RIETrajectoryModel(num_joints_in, model_config['INPUT_DIM'],
num_joints_out,
filter_widths=filter_widths, causal=model_config['CAUSAL'],
dropout=model_config['DROPOUT'],
channels=model_config['CHANNELS'],
latten_features=model_config['LATENT_FEATURES_DIM'],
dense=model_config['DENSE'], is_train=is_train,
Optimize1f=not model_config['DISABLE_OPTIMIZATIONS'],
stage=model_config['STAGE'],
extrinsic_dim=extrinsic_dim,
embedd_dim=embedd_dim)
else:
raise ValueError('Unrecognized mdoel {}'.format(model_config['MODEL']))
if torch.cuda.is_available():
pos_model = nn.DataParallel(pos_model).cuda()
trj_model = nn.DataParallel(trj_model).cuda() if not trj_model is None else None
self.pos_model = pos_model
self.trj_model = trj_model
def get_pos_model(self):
return self.pos_model
def get_trj_model(self):
return self.trj_model
|
[
"lib.model.rie.RIETrajectoryModel",
"torch.nn.DataParallel",
"torch.cuda.is_available",
"lib.model.rie.RIEModel"
] |
[((2625, 2650), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2648, 2650), False, 'import torch\n'), ((770, 1228), 'lib.model.rie.RIEModel', 'RIEModel', (['num_joints_in', "model_config['INPUT_DIM']", 'num_joints_out'], {'filter_widths': 'filter_widths', 'causal': "model_config['CAUSAL']", 'dropout': "model_config['DROPOUT']", 'channels': "model_config['CHANNELS']", 'latten_features': "model_config['LATENT_FEATURES_DIM']", 'dense': "model_config['DENSE']", 'is_train': 'is_train', 'Optimize1f': "(not model_config['DISABLE_OPTIMIZATIONS'])", 'stage': "model_config['STAGE']", 'extrinsic_dim': 'extrinsic_dim', 'embedd_dim': 'embedd_dim'}), "(num_joints_in, model_config['INPUT_DIM'], num_joints_out,\n filter_widths=filter_widths, causal=model_config['CAUSAL'], dropout=\n model_config['DROPOUT'], channels=model_config['CHANNELS'],\n latten_features=model_config['LATENT_FEATURES_DIM'], dense=model_config\n ['DENSE'], is_train=is_train, Optimize1f=not model_config[\n 'DISABLE_OPTIMIZATIONS'], stage=model_config['STAGE'], extrinsic_dim=\n extrinsic_dim, embedd_dim=embedd_dim)\n", (778, 1228), False, 'from lib.model.rie import RIEModel\n'), ((1603, 2071), 'lib.model.rie.RIETrajectoryModel', 'RIETrajectoryModel', (['num_joints_in', "model_config['INPUT_DIM']", 'num_joints_out'], {'filter_widths': 'filter_widths', 'causal': "model_config['CAUSAL']", 'dropout': "model_config['DROPOUT']", 'channels': "model_config['CHANNELS']", 'latten_features': "model_config['LATENT_FEATURES_DIM']", 'dense': "model_config['DENSE']", 'is_train': 'is_train', 'Optimize1f': "(not model_config['DISABLE_OPTIMIZATIONS'])", 'stage': "model_config['STAGE']", 'extrinsic_dim': 'extrinsic_dim', 'embedd_dim': 'embedd_dim'}), "(num_joints_in, model_config['INPUT_DIM'], num_joints_out,\n filter_widths=filter_widths, causal=model_config['CAUSAL'], dropout=\n model_config['DROPOUT'], channels=model_config['CHANNELS'],\n latten_features=model_config['LATENT_FEATURES_DIM'], dense=model_config\n ['DENSE'], is_train=is_train, Optimize1f=not model_config[\n 'DISABLE_OPTIMIZATIONS'], stage=model_config['STAGE'], extrinsic_dim=\n extrinsic_dim, embedd_dim=embedd_dim)\n", (1621, 2071), False, 'from lib.model.rie import RIETrajectoryModel\n'), ((2676, 2702), 'torch.nn.DataParallel', 'nn.DataParallel', (['pos_model'], {}), '(pos_model)\n', (2691, 2702), True, 'import torch.nn as nn\n'), ((2734, 2760), 'torch.nn.DataParallel', 'nn.DataParallel', (['trj_model'], {}), '(trj_model)\n', (2749, 2760), True, 'import torch.nn as nn\n')]
|
#!/usr/bin/python
import argparse
import os
import re
import sys
import cv2
from lib.diagManager import DiagManager
from lib.roadManager import RoadManager
from lib.cameraCal import CameraCal
from matplotlib import pyplot as plt
# process_road_image handles rendering a single image through the pipeline
def process_road_image(img, roadMgr, diagMgr, scrType=0):
# Run the functions
roadMgr.findLanes(img)
roadMgr.findVehicles()
# debug/diagnostics requested
if False:
#if debug:
# offset for text rendering overlay
offset = 0
color = (192, 192, 0)
# default - full diagnostics
if scrType & 5 == 5:
diagScreen = diagMgr.projectionHD()
offset = 30
if scrType & 4 == 4:
diagScreen = diagMgr.projectionHD()
offset = 30
elif scrType & 3 == 3:
diagScreen = diagMgr.fullDiag()
offset = 30
elif scrType & 3 == 2:
diagScreen = diagMgr.projectionDiag()
offset = 30
elif scrType & 3 == 1:
diagScreen = diagMgr.filterDiag()
offset = 30
color = (192, 192, 192)
if scrType & 8 == 8:
diagScreen = diagMgr.textOverlay(diagScreen, offset=offset, color=color)
result = diagScreen
else:
# if scrType & 8 == 8:
# roadMgr.drawLaneStats()
result = roadMgr.final
#result = roadMgr.projMgr.curImgRoad
return result
# process_image handles rendering a single image through the pipeline
# within the moviepy video rendering context
def process_image(image):
result = process_road_image(image, roadMgr, diagMgr)
current_frame = (int(roadMgr.curFrame)+startFrame)
im_name = './test_videos_out/images/' + '%06d.png' % current_frame
cv2.imwrite(im_name, cv2.cvtColor(result, cv2.COLOR_RGB2BGR))
return result
# our main CLI code. use --help to get full options list
if __name__ == "__main__":
global roadMgr
global diagMgr
global debug
global scrType
global startFrame
# initialize argparse to parse the CLI
usage = 'python %(prog)s [options] infilename outfilename'
desc = 'DiWu\'s Udacity SDC Project 5: Vehicle Detection and Tracking'
diagHelp = 'display diagnostics: [0=off], 1=filter, 2=proj 3=full '
diagHelp += '4=projHD,complete 5=projHD,sentinal'
defaultInput = 'project_video.mp4'
inputHelp = 'input image or video file to process'
defaultOutput = 'project_video_out.mp4'
outputHelp = 'output image or video file'
# set default - final/no diagnostics
parser = argparse.ArgumentParser(prog='main.py', usage=usage, description=desc)
parser.add_argument('--scrType', type=int, default=0, help=diagHelp)
parser.add_argument('--notext', action='store_true', default=False, help='do not render text overlay')
#parser.add_argument('--infilename', type=str, default='./test_images/test6.jpg', help=inputHelp)
parser.add_argument('--infilename', type=str, default='./test_videos/project_video.mp4', help=inputHelp)
args = parser.parse_args()
file_dir = '/'.join(args.infilename.split('/')[:-1])
out_file_dir = file_dir + '_out'
if not os.path.exists(out_file_dir):
os.mkdir(out_file_dir)
args.outfilename = os.path.join(out_file_dir, args.infilename.split('/')[-1])
debug = True
videopattern = re.compile("^.+\.mp4$")
imagepattern = re.compile("^.+\.(jpg|jpeg|JPG|png|PNG)$")
image = None
videoin = None
# set up pipeline processing options
pleaseCheck = "Please check and try again."
pleaseRemove = "Please remove and try again."
invalidExt = "Invalid %s filename extension for output. %s"
validImageExt = "Must end with one of [jpg,jpeg,JPG,png,PNG]"
validVideoExt = "Must end with '.mp4'"
# if video - set up in/out videos
if videopattern.match(args.infilename):
if videopattern.match(args.outfilename):
if not os.path.exists(args.infilename):
print("Video input file: %s does not exist. %s" % (args.infilename, pleaseCheck))
sys.exit(1)
else:
videoin = args.infilename
videoout = args.outfilename
valid = True
else:
print(invalidExt % ("video", validVideoExt))
sys.exit(3)
# if image - set up image processing options
elif imagepattern.match(args.infilename):
if imagepattern.match(args.outfilename):
if not os.path.exists(args.infilename):
print("Image input file: %s does not exist. %s" % (args.infilename, pleaseCheck))
sys.exit(4)
else:
image = cv2.cvtColor(cv2.imread(args.infilename), cv2.COLOR_BGR2RGB)
valid = True
else:
print(invalidExt % ("image", validImageExt))
sys.exit(6)
# set up diagnostic pipeline options if requested
if valid:
# initialization
# load or perform camera calibrations
camCal = CameraCal('camera_cal', 'camera_cal/calibrationdata.p')
# override camCal image size
if image is not None:
camCal.setImageSize(image.shape)
# initialize road manager and its managed pipeline components/modules
roadMgr = RoadManager(camCal)
# initialize diag manager and its managed diagnostics components
diagMgr = DiagManager(roadMgr)
# Image only?
if image is not None:
print("image processing %s..." % args.infilename)
imageout = process_image(image)
cv2.imwrite(args.outfilename, cv2.cvtColor(imageout, cv2.COLOR_RGB2BGR))
print("done image processing %s..." % args.infilename)
# Full video pipeline
elif videoin is not None and videoout is not None:
print("video processing %s..." % videoin)
cap = cv2.VideoCapture(videoin)
# Set the start frame number,
# see: https://stackoverflow.com/questions/11420748/setting-camera-parameters-in-opencv-python
startFrame = 200
cap.set(1, startFrame)
while cap.isOpened():
ret, frame = cap.read()
image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
process_image(image)
cap.release()
cv2.destroyAllWindows()
print("done video processing %s..." % videoin)
else:
print("error detected. exiting.")
|
[
"os.mkdir",
"argparse.ArgumentParser",
"cv2.cvtColor",
"os.path.exists",
"cv2.VideoCapture",
"cv2.imread",
"lib.diagManager.DiagManager",
"lib.cameraCal.CameraCal",
"lib.roadManager.RoadManager",
"cv2.destroyAllWindows",
"sys.exit",
"re.compile"
] |
[((2636, 2706), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""main.py"""', 'usage': 'usage', 'description': 'desc'}), "(prog='main.py', usage=usage, description=desc)\n", (2659, 2706), False, 'import argparse\n'), ((3417, 3441), 're.compile', 're.compile', (['"""^.+\\\\.mp4$"""'], {}), "('^.+\\\\.mp4$')\n", (3427, 3441), False, 'import re\n'), ((3460, 3503), 're.compile', 're.compile', (['"""^.+\\\\.(jpg|jpeg|JPG|png|PNG)$"""'], {}), "('^.+\\\\.(jpg|jpeg|JPG|png|PNG)$')\n", (3470, 3503), False, 'import re\n'), ((1846, 1885), 'cv2.cvtColor', 'cv2.cvtColor', (['result', 'cv2.COLOR_RGB2BGR'], {}), '(result, cv2.COLOR_RGB2BGR)\n', (1858, 1885), False, 'import cv2\n'), ((3236, 3264), 'os.path.exists', 'os.path.exists', (['out_file_dir'], {}), '(out_file_dir)\n', (3250, 3264), False, 'import os\n'), ((3274, 3296), 'os.mkdir', 'os.mkdir', (['out_file_dir'], {}), '(out_file_dir)\n', (3282, 3296), False, 'import os\n'), ((5098, 5153), 'lib.cameraCal.CameraCal', 'CameraCal', (['"""camera_cal"""', '"""camera_cal/calibrationdata.p"""'], {}), "('camera_cal', 'camera_cal/calibrationdata.p')\n", (5107, 5153), False, 'from lib.cameraCal import CameraCal\n'), ((5364, 5383), 'lib.roadManager.RoadManager', 'RoadManager', (['camCal'], {}), '(camCal)\n', (5375, 5383), False, 'from lib.roadManager import RoadManager\n'), ((5476, 5496), 'lib.diagManager.DiagManager', 'DiagManager', (['roadMgr'], {}), '(roadMgr)\n', (5487, 5496), False, 'from lib.diagManager import DiagManager\n'), ((4379, 4390), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (4387, 4390), False, 'import sys\n'), ((4004, 4035), 'os.path.exists', 'os.path.exists', (['args.infilename'], {}), '(args.infilename)\n', (4018, 4035), False, 'import os\n'), ((4151, 4162), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4159, 4162), False, 'import sys\n'), ((4929, 4940), 'sys.exit', 'sys.exit', (['(6)'], {}), '(6)\n', (4937, 4940), False, 'import sys\n'), ((5698, 5739), 'cv2.cvtColor', 'cv2.cvtColor', (['imageout', 'cv2.COLOR_RGB2BGR'], {}), '(imageout, cv2.COLOR_RGB2BGR)\n', (5710, 5739), False, 'import cv2\n'), ((5970, 5995), 'cv2.VideoCapture', 'cv2.VideoCapture', (['videoin'], {}), '(videoin)\n', (5986, 5995), False, 'import cv2\n'), ((6421, 6444), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (6442, 6444), False, 'import cv2\n'), ((4555, 4586), 'os.path.exists', 'os.path.exists', (['args.infilename'], {}), '(args.infilename)\n', (4569, 4586), False, 'import os\n'), ((4702, 4713), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (4710, 4713), False, 'import sys\n'), ((6307, 6345), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (6319, 6345), False, 'import cv2\n'), ((4769, 4796), 'cv2.imread', 'cv2.imread', (['args.infilename'], {}), '(args.infilename)\n', (4779, 4796), False, 'import cv2\n')]
|
#!/usr/bin/env python
import datetime
import getopt
import inspect
import json
import os
import platform
import sys
import time
import threading
from Global import __MULTIPROCESSING__
__version__ = "0.1"
if __MULTIPROCESSING__:
import multiprocessing
from multiprocessing import Queue
from multiprocessing import Array
else:
if sys.version_info[0] < 3:
from Queue import Queue
else:
from queue import Queue
import Logger
starttime = datetime.datetime.now().strftime("%b %d %Y %H:%M:%S")
#-----------------------
class myApp(object):
logger = None
logconfig = None
pTwo = None # Worker Two thread/process
pThr = None # Worker Thr thread/process
def main(self, argv):
self.logger = Logger.logging.getLogger(__name__)
self.logconfig = Logger.logconfig
self.logger.info("Start time: " + starttime)
# parse command line arguments
try:
opts, args = getopt.getopt(argv, "h", ["help"])
except getopt.GetoptError as e:
self.logger.exception(str(e))
self.usage()
return
for opt, arg in opts:
if opt in ("-h", "--help"):
self.usage()
return
else:
self.usage()
return
# initilize and run
self.initilize()
self.start()
#-----------------------
def initilize(self):
try:
# identify platform
self.logger.info("------------------------------")
self.logger.info(" machine: " + platform.machine())
self.logger.info(" version: " + platform.version())
self.logger.info(" platform: " + platform.platform())
self.logger.info(" system: " + platform.system())
self.logger.info("processor: " + platform.processor())
if __MULTIPROCESSING__:
self.logger.info(" cores: " + str(multiprocessing.cpu_count()))
self.logger.info(" nodes: " + platform.node())
self.logger.info("PythonImp: " + platform.python_implementation())
self.logger.info("PythonVer: " + platform.python_version())
self.logger.info("starttime: " + starttime)
self.logger.info("scriptver: " + __version__)
self.logger.info("------------------------------")
# initialize queues
if __MULTIPROCESSING__:
self.qOne = multiprocessing.Queue()
self.qTwo = multiprocessing.Queue()
self.qThr = multiprocessing.Queue()
else:
self.qOne = Queue()
self.qTwo = Queue()
self.qThr = Queue()
# initialize 'two' process
try:
import Two
self.pTwo = Two.Two(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)
except Exception as e:
self.logger.exception(e)
print( "Two Initialization Error: " + str(e) )
# initialize 'three' process
try:
import Three
self.pThr = Three.Three(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)
except Exception as e:
self.logger.exception(e)
print( "Three Initialization Error: " + str(e) )
# Queue for main process
self.getMsgQue = self.qOne
self.putMsgTwo = self.qTwo.put
self.putMsgThr = self.qThr.put
self.RUNNING = True
except Exception as e:
self.logger.exception(e)
#-----------------------
def start(self):
try:
# start two
self.pTwo.start()
# start three
self.pThr.start()
simpleCnt = 0
while self.RUNNING:
try:
#-----------------------
# process main
if (not self.getMsgQue.empty()):
msg = self.getMsgQue.get()
self.logger.debug('Main : ' + str(self.msg))
if (msg != None):
event = msg['event']
type = msg['data']
else:
time.sleep(.2)
simpleCnt += 1
if (simpleCnt % 6):
msgOne = { 'event' : 'print',
'data' : ['Hello from Main', 'two', 3, 4, 'V', 'VI', 'VII', 8, 'nine']}
self.putMsgTwo( msgOne )
if (simpleCnt > 30):
simpleCnt = 0
except (KeyboardInterrupt, SystemExit):
self.logger.info("Interrupted")
self.stop()
except Exception as e:
self.logger.exception(str(e))
self.stop()
except Exception as e:
self.logger.exception(str(e))
self.stop()
finally:
self.logger.info("Exiting")
#-----------------------
def stop(self):
# stop processes
if(self.pTwo != None):
self.pTwo.stop()
if(self.pThr != None):
self.pThr.stop()
if(self.pTwo != None):
self.pTwo.join()
if(self.pThr != None):
self.pThr.join()
self.RUNNING = False
if __name__== '__main__':
myApp().main(sys.argv[1:])
|
[
"Logger.logging.getLogger",
"platform.processor",
"platform.python_implementation",
"platform.python_version",
"platform.node",
"getopt.getopt",
"platform.platform",
"platform.system",
"Two.Two",
"time.sleep",
"multiprocessing.Queue",
"Three.Three",
"platform.version",
"platform.machine",
"datetime.datetime.now",
"queue.Queue",
"multiprocessing.cpu_count"
] |
[((476, 499), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (497, 499), False, 'import datetime\n'), ((759, 793), 'Logger.logging.getLogger', 'Logger.logging.getLogger', (['__name__'], {}), '(__name__)\n', (783, 793), False, 'import Logger\n'), ((968, 1002), 'getopt.getopt', 'getopt.getopt', (['argv', '"""h"""', "['help']"], {}), "(argv, 'h', ['help'])\n", (981, 1002), False, 'import getopt\n'), ((2492, 2515), 'multiprocessing.Queue', 'multiprocessing.Queue', ([], {}), '()\n', (2513, 2515), False, 'import multiprocessing\n'), ((2544, 2567), 'multiprocessing.Queue', 'multiprocessing.Queue', ([], {}), '()\n', (2565, 2567), False, 'import multiprocessing\n'), ((2596, 2619), 'multiprocessing.Queue', 'multiprocessing.Queue', ([], {}), '()\n', (2617, 2619), False, 'import multiprocessing\n'), ((2666, 2673), 'queue.Queue', 'Queue', ([], {}), '()\n', (2671, 2673), False, 'from queue import Queue\n'), ((2702, 2709), 'queue.Queue', 'Queue', ([], {}), '()\n', (2707, 2709), False, 'from queue import Queue\n'), ((2738, 2745), 'queue.Queue', 'Queue', ([], {}), '()\n', (2743, 2745), False, 'from queue import Queue\n'), ((2858, 2927), 'Two.Two', 'Two.Two', (['self.logger', 'self.logconfig', 'self.qOne', 'self.qTwo', 'self.qThr'], {}), '(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)\n', (2865, 2927), False, 'import Two\n'), ((3183, 3256), 'Three.Three', 'Three.Three', (['self.logger', 'self.logconfig', 'self.qOne', 'self.qTwo', 'self.qThr'], {}), '(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)\n', (3194, 3256), False, 'import Three\n'), ((1604, 1622), 'platform.machine', 'platform.machine', ([], {}), '()\n', (1620, 1622), False, 'import platform\n'), ((1669, 1687), 'platform.version', 'platform.version', ([], {}), '()\n', (1685, 1687), False, 'import platform\n'), ((1734, 1753), 'platform.platform', 'platform.platform', ([], {}), '()\n', (1751, 1753), False, 'import platform\n'), ((1800, 1817), 'platform.system', 'platform.system', ([], {}), '()\n', (1815, 1817), False, 'import platform\n'), ((1864, 1884), 'platform.processor', 'platform.processor', ([], {}), '()\n', (1882, 1884), False, 'import platform\n'), ((2050, 2065), 'platform.node', 'platform.node', ([], {}), '()\n', (2063, 2065), False, 'import platform\n'), ((2112, 2144), 'platform.python_implementation', 'platform.python_implementation', ([], {}), '()\n', (2142, 2144), False, 'import platform\n'), ((2191, 2216), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (2214, 2216), False, 'import platform\n'), ((4364, 4379), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4374, 4379), False, 'import time\n'), ((1975, 2002), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (2000, 2002), False, 'import multiprocessing\n')]
|
import os
import uuid
from datetime import datetime, timedelta
import mock
import pytz
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
from utils.widget import quill
from wiki import modelgetters
from wiki.models import wikipage, wikisection
from wiki.models.wikipage import Keywords, WikiPage
from wiki.models.wikisection import WikiSection
class WikiModelGettersTestCase(TestCase):
def setUp(self):
self.firstUser = User(is_superuser=True, username='test1', password='<PASSWORD>', email='<EMAIL>', first_name='testname1', last_name='testlast2')
self.secondUser = User(is_superuser=False, username='test2', password='<PASSWORD>', email='<EMAIL>', first_name='testname2', last_name='testlast2')
self.firstUser.save()
self.secondUser.save()
self.wikiuuid = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4(), uuid.uuid4()]
self.wikistext = ['{"ops":[{"insert":"123123\\n"}]}', 'text', None]
self.wikisuuid = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4(), uuid.uuid4(), uuid.uuid4()]
self.wikipath = 'wiki'
self.createdtime = datetime.now(pytz.utc)
self.wikiPages = []
for i in range(2):
self.wikiPages.append(WikiPage(unid=self.wikiuuid[i], createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testpage'+str(i+1)))
self.wikiPages[i].save()
self.wikiPages[i].createdon=self.createdtime + timedelta(hours=i)
self.wikiPages[i].updatedon=self.createdtime + timedelta(hours=i)
self.wikiPages[i].save()
self.wikiSections = []
for i in range(3):
self.wikiSections.append(WikiSection(unid=self.wikisuuid[i], createdon=self.createdtime, updatedon=self.createdtime, createdby=self.firstUser, updatedby=self.secondUser, title='testsec'+str(i+1), pageorder=i+1, text=self.wikistext[i], wikipage=self.wikiPages[0]))
self.wikiSections[i].save()
self.wikiSections[i].createdon=self.createdtime + timedelta(hours=i)
self.wikiSections[i].updatedon=self.createdtime + timedelta(hours=i)
if i==1:
self.wikiSections[1].createdby = None
self.wikiSections[1].updatedby = None
self.wikiSections[i].save()
def test_form_all_wiki_pages_data_success(self):
self.wikiPages.reverse()
self.assertListEqual(list(modelgetters.form_all_wiki_pages_data(self.firstUser)['wiki_pages']), self.wikiPages)
def test_form_all_wiki_pages_data_fail_permission(self):
self.assertListEqual(list(modelgetters.form_all_wiki_pages_data(self.secondUser)['wiki_pages']), [])
def test_form_all_wiki_pages_data_failed(self):
with mock.patch('wiki.models.wikipage.WikiPage.objects.all') as failmock:
failmock.side_effect = Exception("random error")
self.assertIsNone(modelgetters.form_all_wiki_pages_data(self.firstUser))
def test_form_get_one_wiki_page_data_success_3(self):
result = modelgetters.get_one_wiki_page_data(self.wikiPages[0].unid, self.firstUser)
self.assertEqual(result['wiki_page'], self.wikiPages[0])
self.assertListEqual(list(result['wiki_page_sections']), self.wikiSections)
def test_form_get_one_wiki_page_data_success_2_common_knowledge(self):
self.wikiSections[0].commonknowledge = True
self.wikiSections[0].save()
self.wikiSections[1].commonknowledge = True
self.wikiSections[1].save()
result = modelgetters.get_one_wiki_page_data(self.wikiPages[0].unid, self.secondUser)
self.assertEqual(result['wiki_page'], self.wikiPages[0])
self.assertListEqual(list(result['wiki_page_sections']), self.wikiSections[0:2])
def test_form_get_one_wiki_page_data_fail_permission(self):
result = modelgetters.get_one_wiki_page_data(self.wikiPages[0].unid, self.secondUser)
self.assertIsNone(result)
def test_form_get_one_wiki_page_data_success_0(self):
result = modelgetters.get_one_wiki_page_data(self.wikiPages[1].unid, self.firstUser)
self.assertEqual(result['wiki_page'], self.wikiPages[1])
self.assertListEqual(list(result['wiki_page_sections']), [])
def test_form_get_one_wiki_page_data_fail_no_page(self):
self.assertIsNone(modelgetters.get_one_wiki_page_data(uuid.uuid4(), self.firstUser))
def test_form_get_one_wiki_page_data_fail_error(self):
with mock.patch('wiki.models.wikipage.WikiPage.objects.get') as failmock:
failmock.side_effect = Exception("random error")
self.assertIsNone(modelgetters.get_one_wiki_page_data(self.wikiPages[0].unid, self.firstUser))
|
[
"wiki.modelgetters.get_one_wiki_page_data",
"uuid.uuid4",
"wiki.modelgetters.form_all_wiki_pages_data",
"django.contrib.auth.models.User",
"mock.patch",
"datetime.timedelta",
"datetime.datetime.now"
] |
[((527, 660), 'django.contrib.auth.models.User', 'User', ([], {'is_superuser': '(True)', 'username': '"""test1"""', 'password': '"""<PASSWORD>"""', 'email': '"""<EMAIL>"""', 'first_name': '"""testname1"""', 'last_name': '"""testlast2"""'}), "(is_superuser=True, username='test1', password='<PASSWORD>', email=\n '<EMAIL>', first_name='testname1', last_name='testlast2')\n", (531, 660), False, 'from django.contrib.auth.models import User\n'), ((682, 816), 'django.contrib.auth.models.User', 'User', ([], {'is_superuser': '(False)', 'username': '"""test2"""', 'password': '"""<PASSWORD>"""', 'email': '"""<EMAIL>"""', 'first_name': '"""testname2"""', 'last_name': '"""testlast2"""'}), "(is_superuser=False, username='test2', password='<PASSWORD>', email=\n '<EMAIL>', first_name='testname2', last_name='testlast2')\n", (686, 816), False, 'from django.contrib.auth.models import User\n'), ((1184, 1206), 'datetime.datetime.now', 'datetime.now', (['pytz.utc'], {}), '(pytz.utc)\n', (1196, 1206), False, 'from datetime import datetime, timedelta\n'), ((3139, 3214), 'wiki.modelgetters.get_one_wiki_page_data', 'modelgetters.get_one_wiki_page_data', (['self.wikiPages[0].unid', 'self.firstUser'], {}), '(self.wikiPages[0].unid, self.firstUser)\n', (3174, 3214), False, 'from wiki import modelgetters\n'), ((3637, 3713), 'wiki.modelgetters.get_one_wiki_page_data', 'modelgetters.get_one_wiki_page_data', (['self.wikiPages[0].unid', 'self.secondUser'], {}), '(self.wikiPages[0].unid, self.secondUser)\n', (3672, 3713), False, 'from wiki import modelgetters\n'), ((3954, 4030), 'wiki.modelgetters.get_one_wiki_page_data', 'modelgetters.get_one_wiki_page_data', (['self.wikiPages[0].unid', 'self.secondUser'], {}), '(self.wikiPages[0].unid, self.secondUser)\n', (3989, 4030), False, 'from wiki import modelgetters\n'), ((4149, 4224), 'wiki.modelgetters.get_one_wiki_page_data', 'modelgetters.get_one_wiki_page_data', (['self.wikiPages[1].unid', 'self.firstUser'], {}), '(self.wikiPages[1].unid, self.firstUser)\n', (4184, 4224), False, 'from wiki import modelgetters\n'), ((898, 910), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (908, 910), False, 'import uuid\n'), ((912, 924), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (922, 924), False, 'import uuid\n'), ((926, 938), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (936, 938), False, 'import uuid\n'), ((940, 952), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (950, 952), False, 'import uuid\n'), ((1056, 1068), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1066, 1068), False, 'import uuid\n'), ((1070, 1082), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1080, 1082), False, 'import uuid\n'), ((1084, 1096), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1094, 1096), False, 'import uuid\n'), ((1098, 1110), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1108, 1110), False, 'import uuid\n'), ((1112, 1124), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1122, 1124), False, 'import uuid\n'), ((2844, 2899), 'mock.patch', 'mock.patch', (['"""wiki.models.wikipage.WikiPage.objects.all"""'], {}), "('wiki.models.wikipage.WikiPage.objects.all')\n", (2854, 2899), False, 'import mock\n'), ((4587, 4642), 'mock.patch', 'mock.patch', (['"""wiki.models.wikipage.WikiPage.objects.get"""'], {}), "('wiki.models.wikipage.WikiPage.objects.get')\n", (4597, 4642), False, 'import mock\n'), ((1561, 1579), 'datetime.timedelta', 'timedelta', ([], {'hours': 'i'}), '(hours=i)\n', (1570, 1579), False, 'from datetime import datetime, timedelta\n'), ((1639, 1657), 'datetime.timedelta', 'timedelta', ([], {'hours': 'i'}), '(hours=i)\n', (1648, 1657), False, 'from datetime import datetime, timedelta\n'), ((2131, 2149), 'datetime.timedelta', 'timedelta', ([], {'hours': 'i'}), '(hours=i)\n', (2140, 2149), False, 'from datetime import datetime, timedelta\n'), ((2212, 2230), 'datetime.timedelta', 'timedelta', ([], {'hours': 'i'}), '(hours=i)\n', (2221, 2230), False, 'from datetime import datetime, timedelta\n'), ((3004, 3057), 'wiki.modelgetters.form_all_wiki_pages_data', 'modelgetters.form_all_wiki_pages_data', (['self.firstUser'], {}), '(self.firstUser)\n', (3041, 3057), False, 'from wiki import modelgetters\n'), ((4483, 4495), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4493, 4495), False, 'import uuid\n'), ((4747, 4822), 'wiki.modelgetters.get_one_wiki_page_data', 'modelgetters.get_one_wiki_page_data', (['self.wikiPages[0].unid', 'self.firstUser'], {}), '(self.wikiPages[0].unid, self.firstUser)\n', (4782, 4822), False, 'from wiki import modelgetters\n'), ((2521, 2574), 'wiki.modelgetters.form_all_wiki_pages_data', 'modelgetters.form_all_wiki_pages_data', (['self.firstUser'], {}), '(self.firstUser)\n', (2558, 2574), False, 'from wiki import modelgetters\n'), ((2703, 2757), 'wiki.modelgetters.form_all_wiki_pages_data', 'modelgetters.form_all_wiki_pages_data', (['self.secondUser'], {}), '(self.secondUser)\n', (2740, 2757), False, 'from wiki import modelgetters\n')]
|
import HSCIC as hs
import tensorflow as tf
from tensorflow import keras
from keras import layers
import requests
requests.packages.urllib3.disable_warnings()
import ssl
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
pass
else:
ssl._create_default_https_context = _create_unverified_https_context
class Model(tf.keras.Model) :
def __init__(self,
model_loss_weight = 1.0,
kl_loss_weight = 1.0,
hscic_loss_weight = 1.0):
super(Model, self).__init__()
# define the encoder
encoder_inputs = keras.Input(shape=(2,))
x = layers.Dense(200, activation="relu")(encoder_inputs)
x = layers.Dense(1000, activation="relu")(x)
x = layers.Dense(1000, activation="relu")(x)
x = layers.Dense(1000, activation="relu")(x)
x = layers.Dense(200, activation="relu")(x)
latent_mean = layers.Dense(2, name="mean")(x)
latent_log_var = layers.Dense(2, name="log_var")(x)
latent_sample = self.gaussian_sample([latent_mean, latent_log_var])
self.encoder = keras.Model(encoder_inputs,
[latent_mean, latent_log_var, latent_sample],
name="encoder")
# define the decoder
self.decoder = keras.Sequential(
[
layers.Dense(200,activation="relu"),
layers.Dense(200,activation="relu"),
layers.Dense(200,activation="relu"),
layers.Dense(200,activation="relu"),
layers.Dense(1),
]
)
# define additional losses
self.HSCIC = hs.HSCIC()
# weights for loss functions
self.model_loss_weight = model_loss_weight
self.kl_loss_weight = kl_loss_weight
self.hscic_loss_weight = hscic_loss_weight
# define loss trackers
self.model_loss_tracker = keras.metrics.Mean(name="model_loss")
self.kl_loss_tracker = keras.metrics.Mean(name="kl_loss")
self.hscic_loss_tracker = keras.metrics.Mean(name="hscic_loss")
self.total_loss_tracker = keras.metrics.Mean(name="total_loss")
@property
def metrics(self):
return [
self.model_loss_tracker,
self.kl_loss_tracker,
self.hscic_loss_tracker,
self.total_loss_tracker
]
# samle from Gaussian distribution
def gaussian_sample(self, inputs):
mean, log_var = inputs
batch = tf.shape(mean)[0]
dim = tf.shape(mean)[1]
eps = tf.keras.backend.random_normal(shape=(batch, dim))
return mean + tf.exp(0.5 * log_var) * eps
# call function for the model
def call(self, inputs):
_, _, latent_sample = self.encoder(inputs)
output = self.decoder(latent_sample)
return output
# training step to run on each minibatch
def train_step(self, data):
# train the model
inputs, y_train = data
with tf.GradientTape() as tape:
# train the model and get loss
latent_mean, latent_log_var, latent_sample = self.encoder(inputs)
y_pred = self.decoder(latent_sample)
# get model losse
model_loss = self.loss(y_train, y_pred)
# get HSCIC loss
hscic_loss = self.HSCIC(Y = y_pred,
A = inputs[:, 1],
X = inputs[:, 0],
H = latent_sample)
# get KL loss
kl_loss = -0.5 * (1 + latent_log_var - tf.math.square(latent_mean) - tf.math.exp(latent_log_var))
kl_loss = tf.reduce_mean(tf.reduce_sum(kl_loss, axis=1))
# get weighted total loss
total_loss = self.model_loss_weight * model_loss
total_loss += self.kl_loss_weight * kl_loss
total_loss += self.hscic_loss_weight * hscic_loss
# update parameters
grads = tape.gradient(total_loss, self.trainable_weights)
self.optimizer.apply_gradients(zip(grads, self.trainable_weights))
# update trakcer
self.model_loss_tracker.update_state(model_loss)
self.kl_loss_tracker.update_state(kl_loss)
self.hscic_loss_tracker.update_state(hscic_loss)
self.total_loss_tracker.update_state(total_loss)
return {
"model_loss" : self.model_loss_tracker.result(),
"kl_loss" : self.kl_loss_tracker.result(),
"hscic_loss" : self.hscic_loss_tracker.result(),
"total_loss" : self.total_loss_tracker.result()
}
|
[
"requests.packages.urllib3.disable_warnings",
"tensorflow.reduce_sum",
"tensorflow.keras.metrics.Mean",
"tensorflow.keras.Input",
"tensorflow.keras.backend.random_normal",
"tensorflow.keras.Model",
"tensorflow.shape",
"keras.layers.Dense",
"tensorflow.exp",
"HSCIC.HSCIC",
"tensorflow.math.exp",
"tensorflow.math.square",
"tensorflow.GradientTape"
] |
[((114, 158), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (156, 158), False, 'import requests\n'), ((630, 653), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(2,)'}), '(shape=(2,))\n', (641, 653), False, 'from tensorflow import keras\n'), ((1150, 1243), 'tensorflow.keras.Model', 'keras.Model', (['encoder_inputs', '[latent_mean, latent_log_var, latent_sample]'], {'name': '"""encoder"""'}), "(encoder_inputs, [latent_mean, latent_log_var, latent_sample],\n name='encoder')\n", (1161, 1243), False, 'from tensorflow import keras\n'), ((1721, 1731), 'HSCIC.HSCIC', 'hs.HSCIC', ([], {}), '()\n', (1729, 1731), True, 'import HSCIC as hs\n'), ((1986, 2023), 'tensorflow.keras.metrics.Mean', 'keras.metrics.Mean', ([], {'name': '"""model_loss"""'}), "(name='model_loss')\n", (2004, 2023), False, 'from tensorflow import keras\n'), ((2058, 2092), 'tensorflow.keras.metrics.Mean', 'keras.metrics.Mean', ([], {'name': '"""kl_loss"""'}), "(name='kl_loss')\n", (2076, 2092), False, 'from tensorflow import keras\n'), ((2127, 2164), 'tensorflow.keras.metrics.Mean', 'keras.metrics.Mean', ([], {'name': '"""hscic_loss"""'}), "(name='hscic_loss')\n", (2145, 2164), False, 'from tensorflow import keras\n'), ((2199, 2236), 'tensorflow.keras.metrics.Mean', 'keras.metrics.Mean', ([], {'name': '"""total_loss"""'}), "(name='total_loss')\n", (2217, 2236), False, 'from tensorflow import keras\n'), ((2640, 2690), 'tensorflow.keras.backend.random_normal', 'tf.keras.backend.random_normal', ([], {'shape': '(batch, dim)'}), '(shape=(batch, dim))\n', (2670, 2690), True, 'import tensorflow as tf\n'), ((667, 703), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (679, 703), False, 'from keras import layers\n'), ((732, 769), 'keras.layers.Dense', 'layers.Dense', (['(1000)'], {'activation': '"""relu"""'}), "(1000, activation='relu')\n", (744, 769), False, 'from keras import layers\n'), ((785, 822), 'keras.layers.Dense', 'layers.Dense', (['(1000)'], {'activation': '"""relu"""'}), "(1000, activation='relu')\n", (797, 822), False, 'from keras import layers\n'), ((838, 875), 'keras.layers.Dense', 'layers.Dense', (['(1000)'], {'activation': '"""relu"""'}), "(1000, activation='relu')\n", (850, 875), False, 'from keras import layers\n'), ((891, 927), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (903, 927), False, 'from keras import layers\n'), ((957, 985), 'keras.layers.Dense', 'layers.Dense', (['(2)'], {'name': '"""mean"""'}), "(2, name='mean')\n", (969, 985), False, 'from keras import layers\n'), ((1014, 1045), 'keras.layers.Dense', 'layers.Dense', (['(2)'], {'name': '"""log_var"""'}), "(2, name='log_var')\n", (1026, 1045), False, 'from keras import layers\n'), ((2572, 2586), 'tensorflow.shape', 'tf.shape', (['mean'], {}), '(mean)\n', (2580, 2586), True, 'import tensorflow as tf\n'), ((2606, 2620), 'tensorflow.shape', 'tf.shape', (['mean'], {}), '(mean)\n', (2614, 2620), True, 'import tensorflow as tf\n'), ((3072, 3089), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (3087, 3089), True, 'import tensorflow as tf\n'), ((1411, 1447), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (1423, 1447), False, 'from keras import layers\n'), ((1464, 1500), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (1476, 1500), False, 'from keras import layers\n'), ((1517, 1553), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (1529, 1553), False, 'from keras import layers\n'), ((1570, 1606), 'keras.layers.Dense', 'layers.Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (1582, 1606), False, 'from keras import layers\n'), ((1623, 1638), 'keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (1635, 1638), False, 'from keras import layers\n'), ((2713, 2734), 'tensorflow.exp', 'tf.exp', (['(0.5 * log_var)'], {}), '(0.5 * log_var)\n', (2719, 2734), True, 'import tensorflow as tf\n'), ((3768, 3798), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['kl_loss'], {'axis': '(1)'}), '(kl_loss, axis=1)\n', (3781, 3798), True, 'import tensorflow as tf\n'), ((3702, 3729), 'tensorflow.math.exp', 'tf.math.exp', (['latent_log_var'], {}), '(latent_log_var)\n', (3713, 3729), True, 'import tensorflow as tf\n'), ((3672, 3699), 'tensorflow.math.square', 'tf.math.square', (['latent_mean'], {}), '(latent_mean)\n', (3686, 3699), True, 'import tensorflow as tf\n')]
|
import argparse
import os
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import PIL
import scipy.io
import scipy.misc
import tensorflow as tf
from keras import backend as K
from keras.layers import Conv2D, Input, Lambda
from keras.models import Model, load_model
from matplotlib.pyplot import imshow
from yad2k.models.keras_yolo import (preprocess_true_boxes, yolo_body,
yolo_boxes_to_corners, yolo_head,
yolo_loss)
from yolo_utils import (draw_boxes, generate_colors, preprocess_image,
read_anchors, read_classes, scale_boxes)
IMG_SZ = [416., 416., 3.]
class YOLO:
def __init__(self):
self.model = load_model("model_data/yolo.h5")
self.class_names = read_classes("model_data/coco_classes.txt")
self.anchors = read_anchors("model_data/yolo_anchors.txt")
self._create_placeholders()
self._evaluate_output()
self.scores, self.boxes, self.classes = self._yolo_eval(self.outputs,
(720., 1280.))
def _create_placeholders(self):
with tf.name_scope('placeholders'):
self.X = tf.placeholder(tf.float32, shape=[None, *IMG_SZ], name='X')
def _evaluate_output(self):
with tf.name_scope('output'):
self.outputs = yolo_head(self.model.output, self.anchors,
len(self.class_names))
def _yolo_filter_boxes(self, box_confidence, boxes, box_class_probs,
threshold=.6):
'''
box_confidence : bs x 19 x 19 x anchors x 1
boxes : bs x 19 x 19 x anchors x 4
box_class_probs: bs x 19 x 19 x anchors x #classes
threshold : float
'''
# bs x 19 x 19 x anchors x #classes
box_scores = box_confidence * box_class_probs
# bs x 19 x 19 x anchors
box_classes = K.argmax(box_scores, axis=-1)
box_class_scores = K.max(box_scores, axis=-1)
# bs x 19 x 19 x anchors
mask = box_class_scores > threshold
scores = tf.boolean_mask(box_class_scores, mask)
boxes = tf.boolean_mask(boxes, mask)
classes = tf.boolean_mask(box_classes, mask)
return scores, boxes, classes
def _yolo_non_max_suppression(self, scores, boxes, classes, max_boxes=10,
iou_threshold=0.5):
max_boxes_tensor = K.variable(max_boxes, dtype='int32')
K.get_session().run(tf.variables_initializer([max_boxes_tensor]))
nms_indices = tf.image.non_max_suppression(boxes, scores, max_boxes,
iou_threshold)
scores = K.gather(scores, nms_indices)
boxes = K.gather(boxes, nms_indices)
classes = K.gather(classes, nms_indices)
return scores, boxes, classes
def _yolo_eval(self, yolo_outputs, image_shape, max_boxes=10,
score_threshold=0.6, iou_threshold=0.5):
box_confidence, box_xy, box_wh, box_class_probs = yolo_outputs
boxes = yolo_boxes_to_corners(box_xy, box_wh)
scores, boxes, classes = self._yolo_filter_boxes(box_confidence, boxes,
box_class_probs,
score_threshold)
boxes = scale_boxes(boxes, image_shape)
scores, boxes, classes = self._yolo_non_max_suppression(scores, boxes,
classes, max_boxes,
iou_threshold)
return scores, boxes, classes
def predict():
sess = K.get_session()
yolo = YOLO()
image_file = 'test.jpg'
image, image_data = preprocess_image("images/" + image_file,
model_image_size=list(map(lambda x: int(x), IMG_SZ[:2])))
out_scores, out_boxes, out_classes = sess.run(
[yolo.scores, yolo.boxes, yolo.classes],
feed_dict={yolo.model.input:image_data, K.learning_phase():0})
print('Found {} boxes for {}'.format(len(out_boxes), image_file))
colors = generate_colors(yolo.class_names)
draw_boxes(image, out_scores, out_boxes, out_classes, yolo.class_names,
colors)
image.save(os.path.join("out", image_file), quality=90)
output_image = scipy.misc.imread(os.path.join("out", image_file))
imshow(output_image)
if __name__ == '__main__':
predict()
|
[
"keras.models.load_model",
"yolo_utils.scale_boxes",
"tensorflow.variables_initializer",
"yolo_utils.generate_colors",
"os.path.join",
"keras.backend.argmax",
"matplotlib.pyplot.imshow",
"tensorflow.placeholder",
"yad2k.models.keras_yolo.yolo_boxes_to_corners",
"yolo_utils.read_classes",
"tensorflow.name_scope",
"yolo_utils.read_anchors",
"keras.backend.max",
"keras.backend.learning_phase",
"keras.backend.gather",
"yolo_utils.draw_boxes",
"tensorflow.image.non_max_suppression",
"keras.backend.get_session",
"tensorflow.boolean_mask",
"keras.backend.variable"
] |
[((3564, 3579), 'keras.backend.get_session', 'K.get_session', ([], {}), '()\n', (3577, 3579), True, 'from keras import backend as K\n'), ((4038, 4071), 'yolo_utils.generate_colors', 'generate_colors', (['yolo.class_names'], {}), '(yolo.class_names)\n', (4053, 4071), False, 'from yolo_utils import draw_boxes, generate_colors, preprocess_image, read_anchors, read_classes, scale_boxes\n'), ((4074, 4153), 'yolo_utils.draw_boxes', 'draw_boxes', (['image', 'out_scores', 'out_boxes', 'out_classes', 'yolo.class_names', 'colors'], {}), '(image, out_scores, out_boxes, out_classes, yolo.class_names, colors)\n', (4084, 4153), False, 'from yolo_utils import draw_boxes, generate_colors, preprocess_image, read_anchors, read_classes, scale_boxes\n'), ((4295, 4315), 'matplotlib.pyplot.imshow', 'imshow', (['output_image'], {}), '(output_image)\n', (4301, 4315), False, 'from matplotlib.pyplot import imshow\n'), ((732, 764), 'keras.models.load_model', 'load_model', (['"""model_data/yolo.h5"""'], {}), "('model_data/yolo.h5')\n", (742, 764), False, 'from keras.models import Model, load_model\n'), ((788, 831), 'yolo_utils.read_classes', 'read_classes', (['"""model_data/coco_classes.txt"""'], {}), "('model_data/coco_classes.txt')\n", (800, 831), False, 'from yolo_utils import draw_boxes, generate_colors, preprocess_image, read_anchors, read_classes, scale_boxes\n'), ((851, 894), 'yolo_utils.read_anchors', 'read_anchors', (['"""model_data/yolo_anchors.txt"""'], {}), "('model_data/yolo_anchors.txt')\n", (863, 894), False, 'from yolo_utils import draw_boxes, generate_colors, preprocess_image, read_anchors, read_classes, scale_boxes\n'), ((1880, 1909), 'keras.backend.argmax', 'K.argmax', (['box_scores'], {'axis': '(-1)'}), '(box_scores, axis=-1)\n', (1888, 1909), True, 'from keras import backend as K\n'), ((1933, 1959), 'keras.backend.max', 'K.max', (['box_scores'], {'axis': '(-1)'}), '(box_scores, axis=-1)\n', (1938, 1959), True, 'from keras import backend as K\n'), ((2044, 2083), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['box_class_scores', 'mask'], {}), '(box_class_scores, mask)\n', (2059, 2083), True, 'import tensorflow as tf\n'), ((2096, 2124), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['boxes', 'mask'], {}), '(boxes, mask)\n', (2111, 2124), True, 'import tensorflow as tf\n'), ((2139, 2173), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['box_classes', 'mask'], {}), '(box_classes, mask)\n', (2154, 2173), True, 'import tensorflow as tf\n'), ((2362, 2398), 'keras.backend.variable', 'K.variable', (['max_boxes'], {'dtype': '"""int32"""'}), "(max_boxes, dtype='int32')\n", (2372, 2398), True, 'from keras import backend as K\n'), ((2488, 2557), 'tensorflow.image.non_max_suppression', 'tf.image.non_max_suppression', (['boxes', 'scores', 'max_boxes', 'iou_threshold'], {}), '(boxes, scores, max_boxes, iou_threshold)\n', (2516, 2557), True, 'import tensorflow as tf\n'), ((2619, 2648), 'keras.backend.gather', 'K.gather', (['scores', 'nms_indices'], {}), '(scores, nms_indices)\n', (2627, 2648), True, 'from keras import backend as K\n'), ((2661, 2689), 'keras.backend.gather', 'K.gather', (['boxes', 'nms_indices'], {}), '(boxes, nms_indices)\n', (2669, 2689), True, 'from keras import backend as K\n'), ((2704, 2734), 'keras.backend.gather', 'K.gather', (['classes', 'nms_indices'], {}), '(classes, nms_indices)\n', (2712, 2734), True, 'from keras import backend as K\n'), ((2973, 3010), 'yad2k.models.keras_yolo.yolo_boxes_to_corners', 'yolo_boxes_to_corners', (['box_xy', 'box_wh'], {}), '(box_xy, box_wh)\n', (2994, 3010), False, 'from yad2k.models.keras_yolo import preprocess_true_boxes, yolo_body, yolo_boxes_to_corners, yolo_head, yolo_loss\n'), ((3241, 3272), 'yolo_utils.scale_boxes', 'scale_boxes', (['boxes', 'image_shape'], {}), '(boxes, image_shape)\n', (3252, 3272), False, 'from yolo_utils import draw_boxes, generate_colors, preprocess_image, read_anchors, read_classes, scale_boxes\n'), ((4180, 4211), 'os.path.join', 'os.path.join', (['"""out"""', 'image_file'], {}), "('out', image_file)\n", (4192, 4211), False, 'import os\n'), ((4260, 4291), 'os.path.join', 'os.path.join', (['"""out"""', 'image_file'], {}), "('out', image_file)\n", (4272, 4291), False, 'import os\n'), ((1148, 1177), 'tensorflow.name_scope', 'tf.name_scope', (['"""placeholders"""'], {}), "('placeholders')\n", (1161, 1177), True, 'import tensorflow as tf\n'), ((1194, 1253), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, *IMG_SZ]', 'name': '"""X"""'}), "(tf.float32, shape=[None, *IMG_SZ], name='X')\n", (1208, 1253), True, 'import tensorflow as tf\n'), ((1294, 1317), 'tensorflow.name_scope', 'tf.name_scope', (['"""output"""'], {}), "('output')\n", (1307, 1317), True, 'import tensorflow as tf\n'), ((2423, 2467), 'tensorflow.variables_initializer', 'tf.variables_initializer', (['[max_boxes_tensor]'], {}), '([max_boxes_tensor])\n', (2447, 2467), True, 'import tensorflow as tf\n'), ((2403, 2418), 'keras.backend.get_session', 'K.get_session', ([], {}), '()\n', (2416, 2418), True, 'from keras import backend as K\n'), ((3935, 3953), 'keras.backend.learning_phase', 'K.learning_phase', ([], {}), '()\n', (3951, 3953), True, 'from keras import backend as K\n')]
|
# -*- coding: utf-8 -*-
#@+leo-ver=5-thin
#@+node:ekr.20210926044012.1: * @file ../unittests/test_doctests.py
#@@first
"""Run all doctests."""
import doctest
import glob
import os
import unittest
from leo.core import leoGlobals as g
unittest_dir = os.path.dirname(__file__)
leo_dir = os.path.abspath(os.path.join(unittest_dir, '..'))
#@+others # Define a function containing a doctest.
#@+node:ekr.20210926053601.1: ** factorial (test_dectests.py)
def factorial(n):
# Modified from https://docs.python.org/3/library/doctest.html
# Must import factorial. See: stackoverflow.com/questions/65066002
"""Return the factorial of n, an exact integer >= 0.
>>> from leo.unittests.test_doctests import factorial
>>> [factorial(n) for n in range(6)]
[1, 1, 2, 6, 24, 120]
>>> factorial(30)
265252859812191058636308480000000
>>> factorial(-1)
Traceback (most recent call last):
...
ValueError: n must be >= 0
Factorials of floats are OK, but the float must be an exact integer:
>>> factorial(30.1)
Traceback (most recent call last):
...
ValueError: n must be exact integer
>>> factorial(30.0)
265252859812191058636308480000000
It must also not be ridiculously large:
>>> factorial(1e100)
Traceback (most recent call last):
...
OverflowError: n too large
""" # Blank line above is required.
import math
if not n >= 0:
raise ValueError("n must be >= 0")
if math.floor(n) != n:
raise ValueError("n must be exact integer")
if n+1 == n: # catch a value like 1e300
raise OverflowError("n too large")
result = 1
factor = 2
while factor <= n:
result *= factor
factor += 1
return result
#@-others
class TestDocTests(unittest.TestCase): # No need to be a subclass of leoTest2.LeoUnitTest.
def test_all_doctests(self):
fails_list = [] # List of files with failing doctests.
files_list = [] # List of files containing a doctest.
n = 0 # Total doctests found
for module in ('core', 'plugins', 'unittests'):
module_path = os.path.join(leo_dir, module)
self.assertTrue(os.path.exists(module_path), msg=repr(module_path))
path = os.path.join(module_path, '**', '*.py')
files = glob.glob(path, recursive=True)
files = [z for z in files if not z.endswith('__init__.py')]
for f in files:
# Exclude two problematic files.
if 'dtest.py' in f or 'javascript.py' in f:
continue
fails, count = doctest.testfile(f)
n += count
if count:
files_list.append(f)
if fails:
fails_list.append(f)
print(f"{fails} failures in {g.shortFileName(f)}")
self.assertEqual(fails_list, [])
if 0:
g.trace(f"{n} doctests found in {len(files_list)} file{g.plural(len(files_list))}")
g.printObj(files_list, tag="files containing any doctest")
g.printObj(fails_list, tag="files containing a failed doctest")
#@-leo
|
[
"leo.core.leoGlobals.printObj",
"os.path.dirname",
"math.floor",
"os.path.exists",
"doctest.testfile",
"leo.core.leoGlobals.shortFileName",
"glob.glob",
"os.path.join"
] |
[((249, 274), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (264, 274), False, 'import os\n'), ((301, 333), 'os.path.join', 'os.path.join', (['unittest_dir', '""".."""'], {}), "(unittest_dir, '..')\n", (313, 333), False, 'import os\n'), ((1489, 1502), 'math.floor', 'math.floor', (['n'], {}), '(n)\n', (1499, 1502), False, 'import math\n'), ((2149, 2178), 'os.path.join', 'os.path.join', (['leo_dir', 'module'], {}), '(leo_dir, module)\n', (2161, 2178), False, 'import os\n'), ((2278, 2317), 'os.path.join', 'os.path.join', (['module_path', '"""**"""', '"""*.py"""'], {}), "(module_path, '**', '*.py')\n", (2290, 2317), False, 'import os\n'), ((2338, 2369), 'glob.glob', 'glob.glob', (['path'], {'recursive': '(True)'}), '(path, recursive=True)\n', (2347, 2369), False, 'import glob\n'), ((3058, 3116), 'leo.core.leoGlobals.printObj', 'g.printObj', (['files_list'], {'tag': '"""files containing any doctest"""'}), "(files_list, tag='files containing any doctest')\n", (3068, 3116), True, 'from leo.core import leoGlobals as g\n'), ((3129, 3192), 'leo.core.leoGlobals.printObj', 'g.printObj', (['fails_list'], {'tag': '"""files containing a failed doctest"""'}), "(fails_list, tag='files containing a failed doctest')\n", (3139, 3192), True, 'from leo.core import leoGlobals as g\n'), ((2207, 2234), 'os.path.exists', 'os.path.exists', (['module_path'], {}), '(module_path)\n', (2221, 2234), False, 'import os\n'), ((2639, 2658), 'doctest.testfile', 'doctest.testfile', (['f'], {}), '(f)\n', (2655, 2658), False, 'import doctest\n'), ((2869, 2887), 'leo.core.leoGlobals.shortFileName', 'g.shortFileName', (['f'], {}), '(f)\n', (2884, 2887), True, 'from leo.core import leoGlobals as g\n')]
|
import logging
from abc import abstractmethod, ABCMeta
class AbstractJob(metaclass=ABCMeta):
'''AbstractJob provides common functionality for jobs that executes various set of tasks on spark.'''
def __init__(self, sc, spark, gc, config):
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(self.__class__.__name__)
self.sc = sc
self.spark = spark
self.gc = gc
self.config = config
@abstractmethod
def try_execute(self, sc, spark, gc, config):
pass
def execute(self):
self.logger.info("Executing job %s", (self.__class__.__name__,))
try:
self.try_execute( self.sc, self.spark, self.gc, self.config)
self.logger.error("Executing job %s completed", self.__class__.__name__)
except Exception as exception:
# unexpected error, should retry
self.logger.error("Error while executing job %s", self.__class__.__name__, exc_info=exception)
|
[
"logging.getLogger",
"logging.basicConfig"
] |
[((257, 296), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (276, 296), False, 'import logging\n'), ((319, 361), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (336, 361), False, 'import logging\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-14 20:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0010_class_remarks'),
]
operations = [
migrations.RenameField(
model_name='class',
old_name='lastPaid',
new_name='last_paid',
),
migrations.RenameField(
model_name='lesson',
old_name='attendingStudents',
new_name='attending_students',
),
migrations.RenameField(
model_name='lesson',
old_name='inClass',
new_name='in_class',
),
migrations.RenameField(
model_name='student',
old_name='firstName',
new_name='first_name',
),
migrations.RenameField(
model_name='student',
old_name='inClass',
new_name='in_class',
),
migrations.RenameField(
model_name='student',
old_name='lastName',
new_name='last_name',
),
]
|
[
"django.db.migrations.RenameField"
] |
[((281, 371), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""class"""', 'old_name': '"""lastPaid"""', 'new_name': '"""last_paid"""'}), "(model_name='class', old_name='lastPaid', new_name=\n 'last_paid')\n", (303, 371), False, 'from django.db import migrations\n'), ((423, 531), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""lesson"""', 'old_name': '"""attendingStudents"""', 'new_name': '"""attending_students"""'}), "(model_name='lesson', old_name='attendingStudents',\n new_name='attending_students')\n", (445, 531), False, 'from django.db import migrations\n'), ((584, 673), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""lesson"""', 'old_name': '"""inClass"""', 'new_name': '"""in_class"""'}), "(model_name='lesson', old_name='inClass', new_name=\n 'in_class')\n", (606, 673), False, 'from django.db import migrations\n'), ((725, 819), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""student"""', 'old_name': '"""firstName"""', 'new_name': '"""first_name"""'}), "(model_name='student', old_name='firstName', new_name\n ='first_name')\n", (747, 819), False, 'from django.db import migrations\n'), ((871, 961), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""student"""', 'old_name': '"""inClass"""', 'new_name': '"""in_class"""'}), "(model_name='student', old_name='inClass', new_name=\n 'in_class')\n", (893, 961), False, 'from django.db import migrations\n'), ((1013, 1105), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""student"""', 'old_name': '"""lastName"""', 'new_name': '"""last_name"""'}), "(model_name='student', old_name='lastName', new_name=\n 'last_name')\n", (1035, 1105), False, 'from django.db import migrations\n')]
|
import logging
from textnn.utils import json
from pathlib import Path
from typing import Iterable, Tuple, Union, Generator
from textnn.dataset import KerasModelTrainingProgram
from textnn.utils import read_text_file_lines as read_lines, skip, FixedLengthIterable
def yelp_star_rating_generator(data_file: Path, trim_text: int = None) -> Generator[Tuple[str, float], None, None]:
"""
Generate a text to star-rating tuples from a review.json file.
:param data_file: the tsv file containing the reviews
:param trim_text: trim the text attribute to the given number of characters
:return: an iterable over the reviews from `data_file`
"""
def get_text_and_label(line: str) -> Tuple[str, float]:
data = json.loads(line)
return data["text"][:trim_text] if trim_text else data["text"], float(data["stars"])
return (get_text_and_label(line) for line in read_lines(file_path=data_file))
def yelp_binary_review_generator(data_file: Path, trim_text: int = None, label_3_stars_as=None,
) -> Generator[Tuple[str, int], None, None]:
"""
Generate a text to binary-label tuples from a review.json file.
:param data_file: the tsv file containing the reviews
:param trim_text: trim the text attribute to the given number of characters
:param label_3_stars_as: specify the binary label for 3-star reviews (if `None`, 3-star reviews are going to be
ignored)
:return: an iterable over the reviews from `data_file`
"""
def stars_to_binary(rating):
if rating == 3.0:
return label_3_stars_as
return 0 if rating < 3.0 else 1
# transform each rating according to stars_to_binary
binary_ratings = ((text, stars_to_binary(stars)) for text, stars in yelp_star_rating_generator(data_file=data_file,
trim_text=trim_text))
# remove None-labels
# noinspection PyTypeChecker
return filter(lambda tup: tup[1] is not None, binary_ratings)
class YelpReviewClassifier(KerasModelTrainingProgram):
def __init__(self, data_file, vocabulary_size: int = 4096, max_text_length: int = 64,
pad_beginning: bool = True, use_start_end_indicators: bool = True,
embeddings: Union[int, str, Path] = 16, update_embeddings: bool = True,
layer_definitions: str = None,
batch_size: int = 1024, num_epochs: int = 100,
learning_rate: float = 0.001, learning_decay: float = 0.,
shuffle_training_data: Union[int, bool] = 113,
log_config: bool = True,
):
"""
Initialize a new YELP review experiment.
:param data_file: the file containing the YELP review data (`review.json`)
:param vocabulary_size: size of the input vocabulary
:param max_text_length: the maximum amount of token to consider during sequence encoding
:param pad_beginning: if True, add padding at start and end of an encoded token sequence
:param use_start_end_indicators: if True, use reserved indicator token `<START>` and `<END>` during token
sequence encoding
:param embeddings: either the size of the embedding layer or the path to a vector file containing pretrained
embeddings
:param update_embeddings: if False, the embedding layer will not be updated during training (i.e., for
pretrained embeddings)
:param layer_definitions: additional layer definitions downstream the embeddings
:param batch_size: Number of samples per gradient update
:param learning_rate: Learning rate
:param learning_decay: Learning decay
:param num_epochs: Number of epochs to train the model. An epoch is an iteration over the entire `x` and `y`
data provided.
:param shuffle_training_data: shuffle the training to avoid problems in input order (e.g., if data is sorted by
label). If `shuffle_data=False`, the data will not be shuffled, if `shuffle_data=True`, the data will be
shuffled randomly, if `shuffle_data` is an integer, this value will be used as seed for the random function.
:param log_config: if True a the config of this instance is printed after setup
"""
if not isinstance(data_file, Path):
data_file = Path(data_file)
assert data_file.exists(), f"Unable to find specified dataset in '{data_file}'!"
super().__init__(
base_folder=data_file.parent,
vocabulary_size=vocabulary_size, max_text_length=max_text_length,
pad_beginning=pad_beginning, use_start_end_indicators=use_start_end_indicators,
embeddings=embeddings, update_embeddings=update_embeddings,
layer_definitions=layer_definitions,
batch_size=batch_size, num_epochs=num_epochs,
learning_rate=learning_rate, learning_decay=learning_decay, shuffle_training_data=shuffle_training_data,
)
self._data_file = data_file
self._test_set_skip = 1000
if log_config:
logging.info(f"{self.__class__.__name__}-configuration:\n{self.config}")
def _get_data(self, test_set: bool) -> Iterable[Tuple[str, int]]:
def gen_source():
if not test_set:
start_at = self._test_set_skip
else:
start_at = -self._test_set_skip
return skip(yelp_binary_review_generator(self._data_file, trim_text=self._max_text_length*10),
at_start=start_at)
return FixedLengthIterable(gen_source=gen_source)
|
[
"textnn.utils.FixedLengthIterable",
"textnn.utils.read_text_file_lines",
"textnn.utils.json.loads",
"logging.info",
"pathlib.Path"
] |
[((737, 753), 'textnn.utils.json.loads', 'json.loads', (['line'], {}), '(line)\n', (747, 753), False, 'from textnn.utils import json\n'), ((5662, 5704), 'textnn.utils.FixedLengthIterable', 'FixedLengthIterable', ([], {'gen_source': 'gen_source'}), '(gen_source=gen_source)\n', (5681, 5704), False, 'from textnn.utils import read_text_file_lines as read_lines, skip, FixedLengthIterable\n'), ((897, 928), 'textnn.utils.read_text_file_lines', 'read_lines', ([], {'file_path': 'data_file'}), '(file_path=data_file)\n', (907, 928), True, 'from textnn.utils import read_text_file_lines as read_lines, skip, FixedLengthIterable\n'), ((4427, 4442), 'pathlib.Path', 'Path', (['data_file'], {}), '(data_file)\n', (4431, 4442), False, 'from pathlib import Path\n'), ((5183, 5258), 'logging.info', 'logging.info', (['f"""{self.__class__.__name__}-configuration:\n{self.config}"""'], {}), '(f"""{self.__class__.__name__}-configuration:\n{self.config}""")\n', (5195, 5258), False, 'import logging\n')]
|
import numpy as np
import sympy
from enum import Enum
from detmodel.hit import Hit
from detmodel.signal import Signal, Segment
from detmodel.muon import Muon
from detmodel import util
class DetType(Enum):
MM = 'mm'
MDT = 'mdt'
STGC = 'stgc'
def asint(self):
return {
DetType.MM: 0,
DetType.MDT: 1,
DetType.STGC: 2
}.get(self)
class Plane:
## planes are aligned in z
## tilt only on x segmentation so far
## width_x: geometrical width of detector in x
## width_y: geometrical width of detector in y
## width_t: time window (in BCs = 25ns) to integrate signal
## n_?_seg: number of allowed segments in each coordinate
## segment size is then width_?/n_?_seg
def __init__(self, type, z, width_x=10, width_y=10, width_t=10,
n_x_seg=10, n_y_seg=0, n_t_seg=10,
x_res=0, y_res=0, z_res=0, t_res=0,
tilt=0, offset=0, max_hits=0, sig_eff=0):
## type
self.p_type = DetType(type)
## geometry
self.z = z
self.point = sympy.Point3D(0,0,z,evaluate=False)
self.plane = sympy.Plane(self.point, normal_vector=(0,0,1))
## noise info
self.noise_rate = 0
self.noise_type = 'constant'
## detector plane tilt and offset
self.tilt = tilt
self.offset = offset
self.max_hits = max_hits
self.sig_eff = sig_eff
## detector geometrical boundaries, assuming squares now
self.sizes = {
'x': width_x, 'y': width_y,
't': width_t
}
## detector spatial segmentation in x and y,
## and timing segmentation in t
## Note: if you have a tilt in x, you need to increase the range
## of the segmentation to ensure full coverage
tilt_width_x_min = -0.5*width_x
tilt_width_x_max = 0.5*width_x
if abs(self.tilt) > 0:
tilt_dx = width_y*np.abs( np.tan(tilt) )
tilt_width_x_max = 0.5*width_x + 0.5*tilt_dx
tilt_width_x_min = -0.5*width_x - 0.5*tilt_dx
self.segmentations = {
'x': np.linspace( tilt_width_x_min+self.offset, tilt_width_x_max+self.offset, n_x_seg+1 ),
'y': np.linspace( -0.5*width_y, 0.5*width_y, n_y_seg+1 ),
't': np.linspace( -0.5*width_t, 0.5*width_t, n_t_seg+1 )
}
self.seg_mids = {}
for coord in self.segmentations:
if len(self.segmentations[coord]) > 1:
self.seg_mids[coord] = 0.5*(self.segmentations[coord][:-1] + self.segmentations[coord][1:])
else:
self.seg_mids[coord] = self.segmentations[coord]
## plane segmentation lines (centers)
self.seg_lines = {'x':[], 'y':[]}
for this_x_center in self.seg_mids['x']:
this_p1 = sympy.Point3D(this_x_center, 0, self.z, evaluate=False)
this_p2 = sympy.Point3D(this_x_center + 0.5*width_y*np.tan(tilt), 0.5*width_y, self.z, evaluate=False)
self.seg_lines['x'].append(Segment( sympy.Line3D(this_p1, this_p2), coord='x', z=self.z ))
for this_y_center in self.seg_mids['y']:
this_p1 = sympy.Point3D(0, this_y_center, self.z, evaluate=False)
this_p2 = sympy.Point3D(0.5*width_x, this_y_center, self.z, evaluate=False)
self.seg_lines['y'].append(Segment( sympy.Line3D(this_p1, this_p2), coord='y', z=self.z ))
## keeping position resolution as 0 for now
## timing resolution of 5 BCs
## Resolution smearings are applied to muon only, since noise is random
self.resolutions = {
'x': x_res, 'y': y_res,
'z': z_res, 't': t_res
}
## raw hits
self.hits = []
def get_edge(self, edge):
# x
# __|__ y
# |
# top and bottom below refer to this orientation
if 'right' in edge:
return sympy.Line3D( sympy.Point3D(-0.5*self.sizes['x'], 0.5*self.sizes['y'], self.z, evaluate=False),
sympy.Point3D( 0.5*self.sizes['x'], 0.5*self.sizes['y'], self.z, evaluate=False) )
elif 'left' in edge:
return sympy.Line3D( sympy.Point3D(-0.5*self.sizes['x'], -0.5*self.sizes['y'], self.z, evaluate=False),
sympy.Point3D( 0.5*self.sizes['x'], -0.5*self.sizes['y'], self.z, evaluate=False) )
elif 'bottom' in edge:
return sympy.Line3D( sympy.Point3D(-0.5*self.sizes['x'], -0.5*self.sizes['y'], self.z, evaluate=False),
sympy.Point3D(-0.5*self.sizes['x'], 0.5*self.sizes['y'], self.z, evaluate=False) )
elif 'top' in edge:
return sympy.Line3D( sympy.Point3D( 0.5*self.sizes['x'], -0.5*self.sizes['y'], self.z, evaluate=False),
sympy.Point3D( 0.5*self.sizes['x'], 0.5*self.sizes['y'], self.z, evaluate=False) )
elif 'midx' in edge:
return sympy.Line3D( sympy.Point3D( 0, 0, self.z, evaluate=False),
sympy.Point3D( 1, 0, self.z, evaluate=False) )
elif 'midy' in edge:
return sympy.Line3D( sympy.Point3D( 0, 0, self.z, evaluate=False),
sympy.Point3D( 0, 1, self.z, evaluate=False) )
else:
print('Must specify: right, left, bottom, top, midx or midy')
return -1
def clear_hits(self):
self.hits = []
for slx in self.seg_lines['x']:
slx.reset()
for sly in self.seg_lines['y']:
sly.reset()
def smear(self, pos, coord):
## smear muon hit position and time
if coord not in self.resolutions:
print('Could not understand coordinate, must be x y z or t, but received', coord)
return -99
if self.resolutions[coord] > 0:
return np.random.normal(pos, self.resolutions[coord])
else:
return pos
def pass_muon(self, muon, randseed=42):
np.random.seed(int(randseed + 10*(self.z)))
## apply signal efficiency
if self.sig_eff > 0:
rnd_number_eff = np.random.uniform(0.0, 1.0)
if rnd_number_eff > self.sig_eff:
return 0 ## missed muon signal
## find intersection of muon and detector plane
pmu_intersect = self.plane.intersection(muon.line)
if len(pmu_intersect) == 0 or len(pmu_intersect) > 1:
print("There should always be one and only one muon-plane intersection. What's happening?")
print(pmu_intersect)
return -1
intersection_point = pmu_intersect[0]
mu_ip_x = self.smear(float(intersection_point.x), 'x')
mu_ip_y = self.smear(float(intersection_point.y), 'y')
mu_ip_z = self.smear(float(intersection_point.z), 'z')
mu_ip_t = self.smear(muon.time, 't')
## if muon is outside the detector fiducial volume
## or outside the time window, return 0
if np.abs(mu_ip_x) > 0.5*self.sizes['x']:
return 0
if np.abs(mu_ip_y) > 0.5*self.sizes['y']:
return 0
if np.abs(mu_ip_t) > 0.5*self.sizes['t']:
return 0
# To compute the drift radius (for MDT detector), need to find detector element (i.e. wire)
# for which this muon has the smallest distance of closest approach to the wire
# Caveat: the calculation below assumes tubes are exactly vertical
mu_ix = -9999
mu_rdrift = 9999.
if self.p_type == DetType.MDT:
for islx, slx in enumerate(self.seg_lines['x']):
wirepos = sympy.Point(slx.line.p1.x, slx.line.p1.z, evaluate=False)
muonpos1 = sympy.Point(muon.line.p1.x, muon.line.p1.z, evaluate=False)
muonpos2 = sympy.Point(muon.line.p2.x, muon.line.p2.z, evaluate=False)
muonline = sympy.Line(muonpos1, muonpos2)
rdrift = muonline.distance(wirepos)
if rdrift.evalf() < mu_rdrift:
mu_ix = islx
mu_rdrift = rdrift.evalf()
# do not record hit if closest wire further than tube radius
if mu_rdrift > 0.5*self.sizes['x']/len(self.seg_lines['x']):
return 0
muhit = Hit(mu_ip_x,
mu_ip_y,
mu_ip_z,
mu_ip_t,
mu_ix,
mu_rdrift,
True)
self.hits.append(muhit)
return 1
def set_noise(self, noise_rate, noise_type='constant'):
self.noise_rate = noise_rate
self.noise_type = noise_type
def add_noise(self, noise_scale, override_n_noise_per_plane=-1, randseed=42):
'''
p_width_t is the time window in which to integrate the signal (in nano seconds)
therefore, the number of noise hits is:
noise_scale * noise_rate per strip (Hz) * number of strips * p_width_t (ns) * 1e-9
'''
if 'constant' not in self.noise_type:
print('Only support constant noise now')
return -1
if self.sizes['t'] < 1e-15:
print('Time integration width must be larger than 0')
return -1
n_noise_init = noise_scale * (len(self.segmentations['x']) -1) \
* self.noise_rate * self.sizes['t'] * 1e-9
if override_n_noise_per_plane > 0:
n_noise_init = override_n_noise_per_plane
np.random.seed(int(randseed + (self.z)))
n_noise = np.random.poisson(n_noise_init)
noise_x = np.random.uniform(-0.5*self.sizes['x'], 0.5*self.sizes['x'], int(n_noise))
noise_y = np.random.uniform(-0.5*self.sizes['y'], 0.5*self.sizes['y'], int(n_noise))
noise_z = self.z*np.ones(int(n_noise))
noise_t = np.random.uniform(-0.5*self.sizes['t'], 0.5*self.sizes['t'], int(n_noise))
noise_r = np.random.uniform(0.0, 0.5*self.sizes['x']/len(self.seg_lines['x']), int(n_noise))
for inoise in range(int(n_noise)):
# find detector element (segment) closest to each noise hit along x, as needed for MDT
noise_ix = np.argmin( [ np.abs(noise_x[inoise]-xseg.line.p1.x) for xseg in self.seg_lines['x'] ] )
noise_hit = Hit(noise_x[inoise],
noise_y[inoise],
noise_z[inoise],
noise_t[inoise],
noise_ix,
noise_r[inoise],
False)
self.hits.append(noise_hit)
def find_signal(self, this_hit):
## find which detector segment this hit has activated
hit_distancex_seg = None
hit_hash_ix = -10
hit_distancey_seg = None
hit_hash_iy = -10
if self.p_type == DetType.MDT: # association between hit and detector element (segment) already done according to rdrift
hit_hash_ix = this_hit.seg_ix
else:
# if no tilt in this plane or the hit is in the middle of detector element along y,
# speed up finding of nearest element
#hit_hash_ix = np.argmin( [ xseg.line.distance(this_hit.point()) for xseg in self.seg_lines['x'] ] )
hit_hash_ix = np.argmin( [ util.distpoint2line(xseg, this_hit) for xseg in self.seg_lines['x'] ] )
#hit_hash_iy = np.argmin( [ yseg.line.distance(this_hit.point()) for yseg in self.seg_lines['y'] ] )
hit_hash_iy = np.argmin( [ util.distpoint2line(yseg, this_hit) for yseg in self.seg_lines['y'] ] )
## if segment already has signal, skip (but set to muon if new signal is from muon)
if self.seg_lines['x'][hit_hash_ix].is_sig == False or \
self.seg_lines['y'][hit_hash_iy].is_sig == False:
if self.p_type == DetType.STGC and this_hit.rdrift < -9998.: # do not promote as signal
return None
isig = Signal( hash_seg_line_x=hit_hash_ix, hash_seg_line_y=hit_hash_iy,
x=this_hit.x, y=this_hit.y, z=this_hit.z,
time=this_hit.time, seg_ix=this_hit.seg_ix, rdrift=this_hit.rdrift,
is_muon=this_hit.is_muon )
self.seg_lines['x'][hit_hash_ix].add_signal(isig)
self.seg_lines['y'][hit_hash_iy].add_signal(isig)
return isig.get_info_wrt_plane(self, display=False)
else:
if this_hit.is_muon:
if self.seg_lines['x'][hit_hash_ix].is_sig and \
self.seg_lines['y'][hit_hash_iy].is_sig:
self.seg_lines['x'][hit_hash_ix].sig.is_muon = True
self.seg_lines['y'][hit_hash_iy].sig.is_muon = True
return None
def hit_processor(self, summary=False):
## decide on overlapping hits
## sorting hits by which one arrived first
if self.p_type == DetType.MDT:
self.hits.sort(key=lambda hit: hit.rdrift)
else:
self.hits.sort(key=lambda hit: hit.time)
## apply additional position smearing by combining muon and noise hits if sTGC plane
if len(self.hits) > 1 and self.p_type == DetType.STGC:
self.combine_hits(False)
out_signals = []
if summary:
print("Total number of hits:", len(self.hits) )
for ihit in self.hits:
isig_info = self.find_signal(ihit)
if isig_info is not None:
out_signals.append(isig_info)
if self.max_hits > 0 and len(out_signals) == self.max_hits:
break
n_sigs = len(out_signals)
if n_sigs < 1:
return None
n_props = len(out_signals[0])
sig_matrix = np.zeros( (n_sigs, n_props) )
for ns in range(n_sigs):
sig_matrix[ns][:] = list( out_signals[ns].values() )
return (sig_matrix, list(out_signals[ns].keys()) )
def combine_hits(self, summary=False):
## Combine hits in the same plane into one hit
## For the time being, only do so if a muon hit exists
## Background noise hit positions are averaged with muon hit position but with a reduced weight
imu = -1
sumx = 0.
sumw = 0.
ibkg = []
list_seg_ix = [] # store detector segment with hits
for ihit, hit in enumerate(self.hits):
hit_ix = np.argmin( [ util.distpoint2line(xseg, hit) for xseg in self.seg_lines['x'] ] )
# Here we rely on the hits being ordered to avoid multiple hits on same detector segment
if hit_ix in list_seg_ix:
continue
list_seg_ix.append(hit_ix)
if hit.is_muon:
imu = ihit
weight = 1.0
else: # background noise hit
ibkg.append(ihit)
weight = 0.2
sumx += weight*hit.x
sumw += weight
## Update x position of muon hit (if one exists)
if imu >= 0:
self.hits[imu].x = sumx/sumw
## Flag background noise hits
for i in ibkg:
self.hits[i].rdrift = -9999. # use as flag not to promote hit as signal
return None
def return_signal(self, summary=False):
return self.hit_processor(summary)
|
[
"detmodel.util.distpoint2line",
"numpy.random.uniform",
"numpy.abs",
"detmodel.hit.Hit",
"sympy.Line3D",
"sympy.Point",
"numpy.zeros",
"sympy.Plane",
"sympy.Line",
"numpy.tan",
"detmodel.signal.Signal",
"numpy.random.poisson",
"numpy.linspace",
"numpy.random.normal",
"sympy.Point3D"
] |
[((1130, 1168), 'sympy.Point3D', 'sympy.Point3D', (['(0)', '(0)', 'z'], {'evaluate': '(False)'}), '(0, 0, z, evaluate=False)\n', (1143, 1168), False, 'import sympy\n'), ((1187, 1235), 'sympy.Plane', 'sympy.Plane', (['self.point'], {'normal_vector': '(0, 0, 1)'}), '(self.point, normal_vector=(0, 0, 1))\n', (1198, 1235), False, 'import sympy\n'), ((8626, 8689), 'detmodel.hit.Hit', 'Hit', (['mu_ip_x', 'mu_ip_y', 'mu_ip_z', 'mu_ip_t', 'mu_ix', 'mu_rdrift', '(True)'], {}), '(mu_ip_x, mu_ip_y, mu_ip_z, mu_ip_t, mu_ix, mu_rdrift, True)\n', (8629, 8689), False, 'from detmodel.hit import Hit\n'), ((9915, 9946), 'numpy.random.poisson', 'np.random.poisson', (['n_noise_init'], {}), '(n_noise_init)\n', (9932, 9946), True, 'import numpy as np\n'), ((14362, 14389), 'numpy.zeros', 'np.zeros', (['(n_sigs, n_props)'], {}), '((n_sigs, n_props))\n', (14370, 14389), True, 'import numpy as np\n'), ((2191, 2283), 'numpy.linspace', 'np.linspace', (['(tilt_width_x_min + self.offset)', '(tilt_width_x_max + self.offset)', '(n_x_seg + 1)'], {}), '(tilt_width_x_min + self.offset, tilt_width_x_max + self.offset,\n n_x_seg + 1)\n', (2202, 2283), True, 'import numpy as np\n'), ((2290, 2345), 'numpy.linspace', 'np.linspace', (['(-0.5 * width_y)', '(0.5 * width_y)', '(n_y_seg + 1)'], {}), '(-0.5 * width_y, 0.5 * width_y, n_y_seg + 1)\n', (2301, 2345), True, 'import numpy as np\n'), ((2356, 2411), 'numpy.linspace', 'np.linspace', (['(-0.5 * width_t)', '(0.5 * width_t)', '(n_t_seg + 1)'], {}), '(-0.5 * width_t, 0.5 * width_t, n_t_seg + 1)\n', (2367, 2411), True, 'import numpy as np\n'), ((2906, 2961), 'sympy.Point3D', 'sympy.Point3D', (['this_x_center', '(0)', 'self.z'], {'evaluate': '(False)'}), '(this_x_center, 0, self.z, evaluate=False)\n', (2919, 2961), False, 'import sympy\n'), ((3264, 3319), 'sympy.Point3D', 'sympy.Point3D', (['(0)', 'this_y_center', 'self.z'], {'evaluate': '(False)'}), '(0, this_y_center, self.z, evaluate=False)\n', (3277, 3319), False, 'import sympy\n'), ((3342, 3409), 'sympy.Point3D', 'sympy.Point3D', (['(0.5 * width_x)', 'this_y_center', 'self.z'], {'evaluate': '(False)'}), '(0.5 * width_x, this_y_center, self.z, evaluate=False)\n', (3355, 3409), False, 'import sympy\n'), ((6173, 6219), 'numpy.random.normal', 'np.random.normal', (['pos', 'self.resolutions[coord]'], {}), '(pos, self.resolutions[coord])\n', (6189, 6219), True, 'import numpy as np\n'), ((6448, 6475), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)'], {}), '(0.0, 1.0)\n', (6465, 6475), True, 'import numpy as np\n'), ((7315, 7330), 'numpy.abs', 'np.abs', (['mu_ip_x'], {}), '(mu_ip_x)\n', (7321, 7330), True, 'import numpy as np\n'), ((7386, 7401), 'numpy.abs', 'np.abs', (['mu_ip_y'], {}), '(mu_ip_y)\n', (7392, 7401), True, 'import numpy as np\n'), ((7457, 7472), 'numpy.abs', 'np.abs', (['mu_ip_t'], {}), '(mu_ip_t)\n', (7463, 7472), True, 'import numpy as np\n'), ((10654, 10763), 'detmodel.hit.Hit', 'Hit', (['noise_x[inoise]', 'noise_y[inoise]', 'noise_z[inoise]', 'noise_t[inoise]', 'noise_ix', 'noise_r[inoise]', '(False)'], {}), '(noise_x[inoise], noise_y[inoise], noise_z[inoise], noise_t[inoise],\n noise_ix, noise_r[inoise], False)\n', (10657, 10763), False, 'from detmodel.hit import Hit\n'), ((12423, 12633), 'detmodel.signal.Signal', 'Signal', ([], {'hash_seg_line_x': 'hit_hash_ix', 'hash_seg_line_y': 'hit_hash_iy', 'x': 'this_hit.x', 'y': 'this_hit.y', 'z': 'this_hit.z', 'time': 'this_hit.time', 'seg_ix': 'this_hit.seg_ix', 'rdrift': 'this_hit.rdrift', 'is_muon': 'this_hit.is_muon'}), '(hash_seg_line_x=hit_hash_ix, hash_seg_line_y=hit_hash_iy, x=this_hit\n .x, y=this_hit.y, z=this_hit.z, time=this_hit.time, seg_ix=this_hit.\n seg_ix, rdrift=this_hit.rdrift, is_muon=this_hit.is_muon)\n', (12429, 12633), False, 'from detmodel.signal import Signal, Segment\n'), ((4083, 4171), 'sympy.Point3D', 'sympy.Point3D', (["(-0.5 * self.sizes['x'])", "(0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(-0.5 * self.sizes['x'], 0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4096, 4171), False, 'import sympy\n'), ((4216, 4303), 'sympy.Point3D', 'sympy.Point3D', (["(0.5 * self.sizes['x'])", "(0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(0.5 * self.sizes['x'], 0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4229, 4303), False, 'import sympy\n'), ((7956, 8013), 'sympy.Point', 'sympy.Point', (['slx.line.p1.x', 'slx.line.p1.z'], {'evaluate': '(False)'}), '(slx.line.p1.x, slx.line.p1.z, evaluate=False)\n', (7967, 8013), False, 'import sympy\n'), ((8041, 8100), 'sympy.Point', 'sympy.Point', (['muon.line.p1.x', 'muon.line.p1.z'], {'evaluate': '(False)'}), '(muon.line.p1.x, muon.line.p1.z, evaluate=False)\n', (8052, 8100), False, 'import sympy\n'), ((8128, 8187), 'sympy.Point', 'sympy.Point', (['muon.line.p2.x', 'muon.line.p2.z'], {'evaluate': '(False)'}), '(muon.line.p2.x, muon.line.p2.z, evaluate=False)\n', (8139, 8187), False, 'import sympy\n'), ((8215, 8245), 'sympy.Line', 'sympy.Line', (['muonpos1', 'muonpos2'], {}), '(muonpos1, muonpos2)\n', (8225, 8245), False, 'import sympy\n'), ((11935, 11970), 'detmodel.util.distpoint2line', 'util.distpoint2line', (['yseg', 'this_hit'], {}), '(yseg, this_hit)\n', (11954, 11970), False, 'from detmodel import util\n'), ((2016, 2028), 'numpy.tan', 'np.tan', (['tilt'], {}), '(tilt)\n', (2022, 2028), True, 'import numpy as np\n'), ((3125, 3155), 'sympy.Line3D', 'sympy.Line3D', (['this_p1', 'this_p2'], {}), '(this_p1, this_p2)\n', (3137, 3155), False, 'import sympy\n'), ((3456, 3486), 'sympy.Line3D', 'sympy.Line3D', (['this_p1', 'this_p2'], {}), '(this_p1, this_p2)\n', (3468, 3486), False, 'import sympy\n'), ((4362, 4451), 'sympy.Point3D', 'sympy.Point3D', (["(-0.5 * self.sizes['x'])", "(-0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(-0.5 * self.sizes['x'], -0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4375, 4451), False, 'import sympy\n'), ((4495, 4583), 'sympy.Point3D', 'sympy.Point3D', (["(0.5 * self.sizes['x'])", "(-0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(0.5 * self.sizes['x'], -0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4508, 4583), False, 'import sympy\n'), ((10554, 10594), 'numpy.abs', 'np.abs', (['(noise_x[inoise] - xseg.line.p1.x)'], {}), '(noise_x[inoise] - xseg.line.p1.x)\n', (10560, 10594), True, 'import numpy as np\n'), ((11705, 11740), 'detmodel.util.distpoint2line', 'util.distpoint2line', (['xseg', 'this_hit'], {}), '(xseg, this_hit)\n', (11724, 11740), False, 'from detmodel import util\n'), ((15079, 15109), 'detmodel.util.distpoint2line', 'util.distpoint2line', (['xseg', 'hit'], {}), '(xseg, hit)\n', (15098, 15109), False, 'from detmodel import util\n'), ((3026, 3038), 'numpy.tan', 'np.tan', (['tilt'], {}), '(tilt)\n', (3032, 3038), True, 'import numpy as np\n'), ((4643, 4732), 'sympy.Point3D', 'sympy.Point3D', (["(-0.5 * self.sizes['x'])", "(-0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(-0.5 * self.sizes['x'], -0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4656, 4732), False, 'import sympy\n'), ((4776, 4864), 'sympy.Point3D', 'sympy.Point3D', (["(-0.5 * self.sizes['x'])", "(0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(-0.5 * self.sizes['x'], 0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4789, 4864), False, 'import sympy\n'), ((4921, 5009), 'sympy.Point3D', 'sympy.Point3D', (["(0.5 * self.sizes['x'])", "(-0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(0.5 * self.sizes['x'], -0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (4934, 5009), False, 'import sympy\n'), ((5054, 5141), 'sympy.Point3D', 'sympy.Point3D', (["(0.5 * self.sizes['x'])", "(0.5 * self.sizes['y'])", 'self.z'], {'evaluate': '(False)'}), "(0.5 * self.sizes['x'], 0.5 * self.sizes['y'], self.z,\n evaluate=False)\n", (5067, 5141), False, 'import sympy\n'), ((5200, 5243), 'sympy.Point3D', 'sympy.Point3D', (['(0)', '(0)', 'self.z'], {'evaluate': '(False)'}), '(0, 0, self.z, evaluate=False)\n', (5213, 5243), False, 'import sympy\n'), ((5296, 5339), 'sympy.Point3D', 'sympy.Point3D', (['(1)', '(0)', 'self.z'], {'evaluate': '(False)'}), '(1, 0, self.z, evaluate=False)\n', (5309, 5339), False, 'import sympy\n'), ((5405, 5448), 'sympy.Point3D', 'sympy.Point3D', (['(0)', '(0)', 'self.z'], {'evaluate': '(False)'}), '(0, 0, self.z, evaluate=False)\n', (5418, 5448), False, 'import sympy\n'), ((5501, 5544), 'sympy.Point3D', 'sympy.Point3D', (['(0)', '(1)', 'self.z'], {'evaluate': '(False)'}), '(0, 1, self.z, evaluate=False)\n', (5514, 5544), False, 'import sympy\n')]
|
# **********************************************************************************************************************
#
# brief: simple script to plot runtimes
#
# author: <NAME>
# date: 14.08.2020
#
# **********************************************************************************************************************
import os
import sys
import random
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
print(ROOT_DIR)
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import utils
from mrcnn import visualize
import mrcnn.model as modellib
from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet
SUN_DIR = "D:/Data/sun_rgbd/resized/"
MODEL_DIR = os.path.join(ROOT_DIR, "logs/")
print(MODEL_DIR)
model_file_rgb = os.path.join(MODEL_DIR, "weights/mask_rcnn_sunrgb_0050.h5")
model_file_d3 = os.path.join(MODEL_DIR, "weights/mask_rcnn_sund3_0050.h5")
model_file_rgbd = os.path.join(MODEL_DIR, "weights/mask_rcnn_sunrgbd_0050.h5")
model_file_rgbd_fusenet = os.path.join(MODEL_DIR, "weights/mask_rcnn_sunrgbd_fusenet_0050.h5")
config_rgb = sunrgb.SunRGBConfig()
config_rgb.BACKBONE = "resnet50"
config_rgb.DROPOUT_RATE = -1
config_rgb.DETECTION_MIN_CONFIDENCE = 0.6
config_rgb.TRAIN_ROIS_PER_IMAGE = 100
config_rgb.BATCH_SIZE = 1
config_rgb.IMAGES_PER_GPU = 1
config_d3 = sund3.SunD3Config()
config_d3.BACKBONE = "resnet50"
config_d3.DROPOUT_RATE = -1
config_d3.DETECTION_MIN_CONFIDENCE = 0.7
config_d3.TRAIN_ROIS_PER_IMAGE = 200
config_d3.BATCH_SIZE = 1
config_d3.IMAGES_PER_GPU = 1
config_rgbd = sunrgbd.SunRGBDConfig()
config_rgbd.BACKBONE = "resnet50"
config_rgbd.DROPOUT_RATE = -1
config_rgbd.DETECTION_MIN_CONFIDENCE = 0.8
config_rgbd.TRAIN_ROIS_PER_IMAGE = 50
config_rgbd.BATCH_SIZE = 1
config_rgbd.IMAGES_PER_GPU = 1
config_rgbd_fusenet = sunrgbd_fusenet.SunRGBDFusenetConfig()
config_rgbd_fusenet.DETECTION_MIN_CONFIDENCE = 0.8
config_rgbd_fusenet.TRAIN_ROIS_PER_IMAGE = 50
config_rgbd_fusenet.BATCH_SIZE = 1
config_rgbd_fusenet.IMAGES_PER_GPU = 1
config_rgbd_fusenet.NUM_FILTERS = [32, 32, 64, 128, 256]
dataset_rgb = sunrgb.SunRGBDataset()
dataset_rgb.load_sun_rgb(SUN_DIR, "split/test13")
dataset_rgb.prepare()
dataset_d3 = sund3.SunD3Dataset()
dataset_d3.load_sun_d3(SUN_DIR, "split/test13")
dataset_d3.prepare()
dataset_rgbd = sunrgbd.SunRGBDDataset()
dataset_rgbd.load_sun_rgbd(SUN_DIR, "split/test13")
dataset_rgbd.prepare()
dataset_rgbd_fusenet = sunrgbd_fusenet.SunRGBDFusenetDataset()
dataset_rgbd_fusenet.load_sun_rgbd_fusenet(SUN_DIR, "split/test13")
dataset_rgbd_fusenet.prepare()
model_rgb = modellib.MaskRCNN(mode="inference", config=config_rgb, model_dir=MODEL_DIR)
model_rgb.load_weights(model_file_rgb, by_name=True)
model_d3 = modellib.MaskRCNN(mode="inference", config=config_d3, model_dir=MODEL_DIR)
model_d3.load_weights(model_file_d3, by_name=True)
model_rgbd = modellib.MaskRCNN(mode="inference", config=config_rgbd, model_dir=MODEL_DIR)
model_rgbd.load_weights(model_file_rgbd, by_name=True)
model_rgbd_fusenet = modellib.MaskRCNN(mode="inference", config=config_rgbd_fusenet, model_dir=MODEL_DIR)
model_rgbd_fusenet.load_weights(model_file_rgbd_fusenet, by_name=True)
def plot_inference(model, dataset, image_id, rgb_image):
image = dataset.load_image(image_id)
results = model.detect([image], verbose=1)
r = results[0]
return visualize.display_instances(rgb_image, r['rois'], r['masks'], r['class_ids'],
dataset.class_names, r['scores'])
def plot_sun_rgb():
image_ids = random.choices(dataset_rgb.image_ids, k=10)
for image_id in image_ids:
print(image_id)
image = dataset_rgb.load_image(image_id)
mask, class_ids = dataset_rgb.load_mask(image_id)
bbox = utils.extract_bboxes(mask)
_, ground_truth = visualize.display_instances(image, bbox, mask, class_ids, dataset_rgb.class_names)
_,result_rgb = plot_inference(model_rgb, dataset_rgb, image_id, image)
_,result_d3 = plot_inference(model_d3, dataset_d3, image_id, image)
_,result_rgbd = plot_inference(model_rgbd, dataset_rgbd, image_id, image)
_,result_rgbd_fusenet = plot_inference(model_rgbd_fusenet, dataset_rgbd_fusenet, image_id, image)
ground_truth.savefig("inference_" + str(image_id) + "_ground_truth.png")
result_rgb.savefig("inference_" + str(image_id) + "_sun_rgb.png")
result_d3.savefig("inference_" + str(image_id) + "_sun_d3.png")
result_rgbd.savefig("inference_" + str(image_id) + "_sun_rgbd.png")
result_rgbd_fusenet.savefig("inference_" + str(image_id) + "_sun_rgbd_fusenet.png")
plot_sun_rgb()
|
[
"sys.path.append",
"os.path.abspath",
"samples.sun.sund3.SunD3Config",
"samples.sun.sunrgb.SunRGBDataset",
"samples.sun.sunrgbd.SunRGBDDataset",
"samples.sun.sunrgbd.SunRGBDConfig",
"random.choices",
"samples.sun.sund3.SunD3Dataset",
"mrcnn.utils.extract_bboxes",
"samples.sun.sunrgbd_fusenet.SunRGBDFusenetConfig",
"samples.sun.sunrgb.SunRGBConfig",
"samples.sun.sunrgbd_fusenet.SunRGBDFusenetDataset",
"mrcnn.visualize.display_instances",
"mrcnn.model.MaskRCNN",
"os.path.join"
] |
[((414, 439), 'os.path.abspath', 'os.path.abspath', (['"""../../"""'], {}), "('../../')\n", (429, 439), False, 'import os\n'), ((475, 500), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (490, 500), False, 'import sys\n'), ((740, 771), 'os.path.join', 'os.path.join', (['ROOT_DIR', '"""logs/"""'], {}), "(ROOT_DIR, 'logs/')\n", (752, 771), False, 'import os\n'), ((807, 866), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""weights/mask_rcnn_sunrgb_0050.h5"""'], {}), "(MODEL_DIR, 'weights/mask_rcnn_sunrgb_0050.h5')\n", (819, 866), False, 'import os\n'), ((883, 941), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""weights/mask_rcnn_sund3_0050.h5"""'], {}), "(MODEL_DIR, 'weights/mask_rcnn_sund3_0050.h5')\n", (895, 941), False, 'import os\n'), ((960, 1020), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""weights/mask_rcnn_sunrgbd_0050.h5"""'], {}), "(MODEL_DIR, 'weights/mask_rcnn_sunrgbd_0050.h5')\n", (972, 1020), False, 'import os\n'), ((1047, 1115), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""weights/mask_rcnn_sunrgbd_fusenet_0050.h5"""'], {}), "(MODEL_DIR, 'weights/mask_rcnn_sunrgbd_fusenet_0050.h5')\n", (1059, 1115), False, 'import os\n'), ((1130, 1151), 'samples.sun.sunrgb.SunRGBConfig', 'sunrgb.SunRGBConfig', ([], {}), '()\n', (1149, 1151), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((1363, 1382), 'samples.sun.sund3.SunD3Config', 'sund3.SunD3Config', ([], {}), '()\n', (1380, 1382), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((1590, 1613), 'samples.sun.sunrgbd.SunRGBDConfig', 'sunrgbd.SunRGBDConfig', ([], {}), '()\n', (1611, 1613), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((1840, 1878), 'samples.sun.sunrgbd_fusenet.SunRGBDFusenetConfig', 'sunrgbd_fusenet.SunRGBDFusenetConfig', ([], {}), '()\n', (1876, 1878), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((2122, 2144), 'samples.sun.sunrgb.SunRGBDataset', 'sunrgb.SunRGBDataset', ([], {}), '()\n', (2142, 2144), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((2230, 2250), 'samples.sun.sund3.SunD3Dataset', 'sund3.SunD3Dataset', ([], {}), '()\n', (2248, 2250), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((2335, 2359), 'samples.sun.sunrgbd.SunRGBDDataset', 'sunrgbd.SunRGBDDataset', ([], {}), '()\n', (2357, 2359), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((2458, 2497), 'samples.sun.sunrgbd_fusenet.SunRGBDFusenetDataset', 'sunrgbd_fusenet.SunRGBDFusenetDataset', ([], {}), '()\n', (2495, 2497), False, 'from samples.sun import sunrgb, sund3, sunrgbd, sunrgbd_fusenet\n'), ((2610, 2685), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config_rgb', 'model_dir': 'MODEL_DIR'}), "(mode='inference', config=config_rgb, model_dir=MODEL_DIR)\n", (2627, 2685), True, 'import mrcnn.model as modellib\n'), ((2751, 2825), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config_d3', 'model_dir': 'MODEL_DIR'}), "(mode='inference', config=config_d3, model_dir=MODEL_DIR)\n", (2768, 2825), True, 'import mrcnn.model as modellib\n'), ((2891, 2967), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config_rgbd', 'model_dir': 'MODEL_DIR'}), "(mode='inference', config=config_rgbd, model_dir=MODEL_DIR)\n", (2908, 2967), True, 'import mrcnn.model as modellib\n'), ((3045, 3134), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config_rgbd_fusenet', 'model_dir': 'MODEL_DIR'}), "(mode='inference', config=config_rgbd_fusenet, model_dir=\n MODEL_DIR)\n", (3062, 3134), True, 'import mrcnn.model as modellib\n'), ((3379, 3495), 'mrcnn.visualize.display_instances', 'visualize.display_instances', (['rgb_image', "r['rois']", "r['masks']", "r['class_ids']", 'dataset.class_names', "r['scores']"], {}), "(rgb_image, r['rois'], r['masks'], r['class_ids'\n ], dataset.class_names, r['scores'])\n", (3406, 3495), False, 'from mrcnn import visualize\n'), ((3561, 3604), 'random.choices', 'random.choices', (['dataset_rgb.image_ids'], {'k': '(10)'}), '(dataset_rgb.image_ids, k=10)\n', (3575, 3604), False, 'import random\n'), ((3784, 3810), 'mrcnn.utils.extract_bboxes', 'utils.extract_bboxes', (['mask'], {}), '(mask)\n', (3804, 3810), False, 'from mrcnn import utils\n'), ((3837, 3924), 'mrcnn.visualize.display_instances', 'visualize.display_instances', (['image', 'bbox', 'mask', 'class_ids', 'dataset_rgb.class_names'], {}), '(image, bbox, mask, class_ids, dataset_rgb.\n class_names)\n', (3864, 3924), False, 'from mrcnn import visualize\n')]
|
# coding:utf-8
from LxBasic import bscMtdCore
from LxCore.config import appConfig
#
class MaConfig(object):
DEF_mya_node_pathsep = '|'
DEF_mya_set_separator = '>'
DEF_mya_node_namespace_pathsep = ':'
DEF_mya_node_port_pathsep = '.'
# Nde_Node Type
DEF_mya_type_transform = 'transform'
DEF_mya_type_shading_engine = 'shadingEngine'
DEF_mya_type_mesh = 'mesh'
DEF_mya_type_assembly_reference = 'assemblyReference'
DEF_mya_type_assembly_definition = 'assemblyDefinition'
#
DEF_mya_type_group_id = 'groupId'
DEF_mya_type_set = 'set'
#
DEF_mya_type_light = 'light'
#
DEF_mya_portname_message = 'message'
DEF_mya_portname_inst_obj_groups = 'instObjGroups'
DEF_mya_portname_dag_set_members = 'dagSetMembers'
#
DEF_mya_key_mesh_vertex = 'vtx'
DEF_mya_key_mesh_edge = 'e'
DEF_mya_key_mesh_face = 'f'
DEF_mya_default_matrix = [1.0, .0, .0, .0, .0, 1.0, .0, .0, .0, .0, 1.0, .0, .0, .0, .0, 1.0]
class Cfg_M2(object):
pass
class MaAssemblyConfig(object):
pass
class MaPlugConfig(object):
MaPlugName_AlembicExport = 'AbcExport'
MaPlugName_GpuCache = 'gpuCache'
MaPlugName_Arnold = 'mtoa'
MaPlugName_Yeti = 'pgYetiMaya'
#
MaNodeType_Plug_Yeti = 'pgYetiMaya'
class MaUnitConfig(object):
# Key
MaUnit_Key_Time = 'time'
MaUnit_Key_Angle = 'angle'
MaUnit_Key_Linear = 'linear'
#
MaUnit_DefaultValue_Time = 'film'
MaUnit_DefaultValue_Angle = 'degree'
MaUnit_DefaultValue_Linear = 'cm'
#
MaUnit_UiDic_Time = {
'12fps': '12 Fps',
'game': '15 Fps',
'16fps': '16 Fps',
'film': '24 Fps',
'pal': '25 Fps',
'ntsc': '30 Fps',
'show': '48 Fps',
'palf': '50 Fps',
'ntscf': '60 Fps'
}
MaUnit_UiDic_Angle = {
'deg': 'Degree',
'rad': 'Radian'
}
MaUnit_UiDic_Linear = {
'mm': 'Millimeter',
'cm': 'Centimeter',
'm': 'Meter',
'km': 'Kilometer',
'in': 'Inch',
'ft': 'Foot',
'yd': 'Yard',
'mi': 'Mile'
}
@classmethod
def _toViewTimeUnit(cls, unit):
return cls.MaUnit_UiDic_Time.get(unit, 'N/a')
@classmethod
def _toViewAngleUnit(cls, unit):
return cls.MaUnit_UiDic_Angle.get(unit, 'N/a')
@classmethod
def _toViewLinearUnit(cls, unit):
return cls.MaUnit_UiDic_Linear.get(unit, 'N/a')
class MaRenderConfig(object):
# Renderer
MaRenderer_Arnold = 'arnold'
MaRenderer_Software = 'mayaSoftware'
MaRenderer_Hardware = 'mayaHardware'
MaRenderer_Hardware2 = 'mayaHardware2'
# Arnold
MaArnold_DefaultRenderPass = 'beauty'
# Software
MaNode_DefaultRenderGlobals = 'defaultRenderGlobals'
MaNode_DefaultResolution = 'defaultResolution'
MaNode_DefaultRenderQuality = 'defaultRenderQuality'
# Hardware
MaNode_DefaultHardwareRenderGlobals = 'defaultHardwareRenderGlobals'
MaNode_HardwareRenderGlobals = 'hardwareRenderGlobals'
MaNode_HardwareRenderingGlobals = 'hardwareRenderingGlobals'
# Arnold
MaNode_DefaultArnoldRenderOptions = 'defaultArnoldRenderOptions'
MaNode_DefaultArnoldDisplayDriver = 'defaultArnoldDisplayDriver'
MaNode_DefaultArnoldFilter = 'defaultArnoldFilter'
MaNode_DefaultArnoldDriver = 'defaultArnoldDriver'
#
MaRender_Software_Node_Lis = [
MaNode_DefaultRenderGlobals,
MaNode_DefaultResolution,
#
MaNode_DefaultRenderQuality
]
MaRender_Hardware_Node_Lis = [
MaNode_DefaultRenderGlobals,
MaNode_DefaultResolution,
#
MaNode_DefaultHardwareRenderGlobals,
MaNode_HardwareRenderGlobals
]
MaRender_Hardware2_Node_Lis = [
MaNode_DefaultRenderGlobals,
MaNode_DefaultResolution,
#
MaNode_DefaultHardwareRenderGlobals,
MaNode_HardwareRenderingGlobals
]
MaRender_Arnold_Node_Lis = [
MaNode_DefaultRenderGlobals,
MaNode_DefaultResolution,
#
MaNode_DefaultArnoldRenderOptions,
MaNode_DefaultArnoldDisplayDriver,
MaNode_DefaultArnoldFilter,
MaNode_DefaultArnoldDriver
]
class MaUiConfig(object):
MaUiName_MainWindow = 'MayaWindow'
#
MaUiName_MainControl = 'MainPane'
MaUiName_OutlinerControl = 'Outliner'
MaUiName_AttributeControl = 'AttributeEditor'
class MaNodeAttributeConfig(appConfig.LxAttributeConfig):
MaAttrNameLis_ShaderExcept = [
'computedFileTextureNamePattern',
'expression'
]
MaAttrTypeLis_Readable = [
'bool',
'byte',
'enum',
'string',
'short',
'float',
'double',
'time',
'doubleLinear',
'doubleAngle',
'matrix',
'long',
'lightData',
'addr',
'fltMatrix',
'char',
'floatAngle',
'floatLinear'
]
MaAttrTypeLis_NonDefaultValue = [
'string'
]
#
MaAttrNameDic_Convert = {
'internalExpression': 'expression'
}
MaAttrName_Visible = 'visibility'
class MaNodeConfig(appConfig.LxNodeConfig):
pass
class MaLightNodeConfig(object):
MaNodeTypeLis_LightDefaultSet_Except = [
'aiLightDecay'
]
MaNodeName_LightLink = 'lightLinker1'
MaNodeName_DefaultLightSet = 'defaultLightSet'
#
MaAttrNameLis_LightLink = ['link', 'light', 'object']
MaAttrNameLis_LightLink_Ignore = ['ignore', 'lightIgnored', 'objectIgnored']
#
MaAttrNameLis_ShadowLink = ['shadowLink', 'shadowLight', 'shadowObject']
MaAttrNameLis_ShadowLink_Ignore = ['shadowIgnore', 'shadowLightIgnored', 'shadowObjectIgnored']
#
MaAttrNameDic_LightLink = {
'link': ['light', 'object'],
'shadowLink': ['shadowLight', 'shadowObject'],
'ignore': ['lightIgnored', 'objectIgnored'],
'shadowIgnore': ['shadowLightIgnored', 'shadowObjectIgnored']
}
#
MaAttrPrevNameDic = {}
@classmethod
def maAttrPrettifyNameDic_lightLink(cls):
return bscMtdCore.orderedDict(
[
('light', 'Light(s)'),
('object', 'Object(s)'),
#
('defaultLightSet', 'Default Set(s)'),
#
('link', 'Light Link(s)'),
('shadowLink', 'Shadow Link(s)'),
('ignore', 'Light Ignore(s)'),
('shadowIgnore', 'Shadow Ignore(s)')
]
)
class MaNodeGraphConfig(object):
pass
class MaYetiPlugConfig(object):
MaYetiImportType_Geometry = 'yetiGeometry'
MaYetiImportType_Groom = 'yetiGroom'
MaYetiImportType_Guide = 'yetiGuide'
MaYetiImportType_Feather = 'yetiFeather'
#
MaYetiImportTypeLis = [
MaYetiImportType_Geometry,
MaYetiImportType_Groom,
MaYetiImportType_Guide,
MaYetiImportType_Feather
]
class MaArnoldPlugConfig(object):
pass
|
[
"LxBasic.bscMtdCore.orderedDict"
] |
[((6088, 6344), 'LxBasic.bscMtdCore.orderedDict', 'bscMtdCore.orderedDict', (["[('light', 'Light(s)'), ('object', 'Object(s)'), ('defaultLightSet',\n 'Default Set(s)'), ('link', 'Light Link(s)'), ('shadowLink',\n 'Shadow Link(s)'), ('ignore', 'Light Ignore(s)'), ('shadowIgnore',\n 'Shadow Ignore(s)')]"], {}), "([('light', 'Light(s)'), ('object', 'Object(s)'), (\n 'defaultLightSet', 'Default Set(s)'), ('link', 'Light Link(s)'), (\n 'shadowLink', 'Shadow Link(s)'), ('ignore', 'Light Ignore(s)'), (\n 'shadowIgnore', 'Shadow Ignore(s)')])\n", (6110, 6344), False, 'from LxBasic import bscMtdCore\n')]
|
import os
import sys
import pickle # see if useful
import numpy as np
import cv2
import constants
import video_interpreter as vi
import to_nodes
def generate_ctm_dataset(activity):
# func to extract frames correctly here, should return either frames by frames
# or multiple frames in a convenient data format (more realistic) after a certain
# length of recording (e.g. 30 sec video)
# nodes_extraceted = to_nodes.f2n(*frames) # maybe map(to_nodes.f2n(), frames)??
# save in certain way node_extracted (maybe trough func)
pass
if __name__ == '__main__':
constants.initialize_directories()
|
[
"constants.initialize_directories"
] |
[((598, 632), 'constants.initialize_directories', 'constants.initialize_directories', ([], {}), '()\n', (630, 632), False, 'import constants\n')]
|
from os import environ
environ['PYGAME_HIDE_SUPPORT_PROMPT'] = '1'
# For `debug` support on Mac, we need to preload tkinter
from designer.system import setup_debug_mode
setup_debug_mode()
# Actually import all dependencies
import pygame
from designer.core.director import *
from designer.core.event import *
from designer.helpers import *
from designer.animation import *
from designer.utilities.easings import *
from designer.objects import *
from designer.colors import *
from designer.positioning import *
from designer.keyboard import *
from designer.mouse import *
from designer.movement import *
GLOBAL_DIRECTOR: Director = None
__all__ = [
'circle', 'ellipse',
'arc', 'line',
'rectangle',
'text',
'shape', 'lines', 'pen',
'background_image',
'image', 'emoji',
'group',
'draw',
# Window information
'set_window_color', 'get_window_color',
'set_window_size',
'get_height', 'get_window_height',
'get_width', 'get_window_width',
# Events
'when', 'starting', 'updating', 'typing', 'clicking',
'start', 'debug',
'stop',
'pause',
'colliding', 'colliding_with_mouse',
'destroy',
'DesignerObject',
# Positioning
'above', 'below',
# Director stuff
'get_director',
# Window stuff
'set_window_title', 'get_window_title', 'set_window_image',
# Keyboard stuff
'get_keyboard_repeat', 'set_keyboard_repeat',
'get_keyboard_delay', 'set_keyboard_delay',
'get_keyboard_interval', 'set_keyboard_interval',
'enable_keyboard_repeating', 'disable_keyboard_repeating',
# Mouse stuff
'get_mouse_cursor', 'set_mouse_cursor',
'get_mouse_visible', 'set_mouse_visible',
'get_mouse_position', 'set_mouse_position',
'get_mouse_x', 'get_mouse_y',
# Animations
'Animation', 'linear_animation', 'sequence_animation',
'glide_around',
'glide_right',
'glide_left',
'glide_up',
'glide_down',
'glide_in_degrees',
'spin',
# Easings
'Linear', 'Iterate',
# Music
'play_sound',
'play_music', 'background_music', 'pause_music', 'set_music_volume', 'is_music_playing',
'get_music_volume', 'stop_music', 'rewind_music', 'continue_music', 'set_music_position', 'get_music_position',
# Movement
'move_forward', 'move_backward', 'turn_left', 'turn_right', 'go_to', 'go_to_xy', 'go_to_mouse',
'point_towards', 'point_towards_mouse', 'point_in_direction', 'change_xy', 'change_x', 'change_y', 'set_x', 'set_y',
'get_angle', 'get_x', 'get_y',
'flip_x', 'flip_y', 'set_flip_x', 'set_flip_y', 'set_scale', 'set_scale_x', 'set_scale_y', 'set_background_image',
'get_scale', 'get_scale_x', 'get_scale_y', 'get_visible', 'get_flip_x', 'get_flip_y', 'show', 'hide',
'grow', 'grow_x', 'grow_y', 'shrink',
'move_to_x', 'move_to_y', 'move_to', 'move_to_mouse', 'move_to_xy',
'set_visible', 'change_scale',
# Emoji specific
'get_emoji_name', 'set_emoji_name'
]
|
[
"designer.system.setup_debug_mode"
] |
[((171, 189), 'designer.system.setup_debug_mode', 'setup_debug_mode', ([], {}), '()\n', (187, 189), False, 'from designer.system import setup_debug_mode\n')]
|
# Generated by Django 2.0.7 on 2018-08-06 08:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tft', '0013_auto_20180806_1606'),
]
operations = [
migrations.AlterModelOptions(
name='shortcut',
options={'verbose_name': '快捷标题', 'verbose_name_plural': '快捷标题'},
),
migrations.AlterModelOptions(
name='shortcutcontent',
options={'verbose_name': '快捷内容', 'verbose_name_plural': '快捷内容'},
),
migrations.AlterField(
model_name='shortcutcontent',
name='content',
field=models.CharField(max_length=30, verbose_name='内容'),
),
]
|
[
"django.db.models.CharField",
"django.db.migrations.AlterModelOptions"
] |
[((231, 345), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""shortcut"""', 'options': "{'verbose_name': '快捷标题', 'verbose_name_plural': '快捷标题'}"}), "(name='shortcut', options={'verbose_name':\n '快捷标题', 'verbose_name_plural': '快捷标题'})\n", (259, 345), False, 'from django.db import migrations, models\n'), ((386, 508), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""shortcutcontent"""', 'options': "{'verbose_name': '快捷内容', 'verbose_name_plural': '快捷内容'}"}), "(name='shortcutcontent', options={\n 'verbose_name': '快捷内容', 'verbose_name_plural': '快捷内容'})\n", (414, 508), False, 'from django.db import migrations, models\n'), ((659, 709), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'verbose_name': '"""内容"""'}), "(max_length=30, verbose_name='内容')\n", (675, 709), False, 'from django.db import migrations, models\n')]
|
from rest_framework import serializers
from django.db import transaction
from delivery.services import create_delivery_config, create_pick_period_line
from product.services import create_default_group_by_shop
from shop.constant import ShopStatus
from shop.services import (
create_shop,
create_pay_channel,
create_shop_mini_program_qcode,
create_shop_reject_reason_by_shop_id,
create_shop_creator_history_realname,
)
from staff.services import create_super_admin_staff
from user.serializers import UserSerializer, operatorSerializer
from wsc_django.utils.constant import DateFormat
from config.services import (
create_receipt_by_shop,
create_share_setup,
create_some_config_by_shop_id,
create_msg_notify_by_shop_id,
)
from wsc_django.utils.validators import (
mobile_validator,
shop_verify_status_validator,
shop_verify_type_validator,
shop_status_validator,
)
class ShopCreateSerializer(serializers.Serializer):
"""总后台创建商铺序列化器类"""
id = serializers.IntegerField(read_only=True, label="商铺id")
shop_code = serializers.CharField(read_only=True, label="商铺code")
shop_name = serializers.CharField(required=True, max_length=128, label="商铺名称")
shop_img = serializers.CharField(required=True, max_length=300, label="商铺logo")
shop_province = serializers.CharField(required=True, label="商铺省份编号")
shop_city = serializers.CharField(required=True, label="商铺城市编号")
shop_county = serializers.CharField(required=True, label="商铺区编号")
shop_address = serializers.CharField(required=True, max_length=100, label="详细地址")
description = serializers.CharField(required=True, max_length=200, label="商铺描述")
inviter_phone = serializers.CharField(required=False, validators=[mobile_validator], label="推荐人手机号")
realname = serializers.CharField(required=False, label="历史真实姓名")
def create(self, validated_data):
user = self.context['user']
# 申请时的历史真实姓名
history_realname = validated_data.pop("realname", None)
with transaction.atomic():
# 创建一个保存点
save_id = transaction.savepoint()
try:
# 创建商铺
shop = create_shop(validated_data, user)
# 创建商铺小程序码
create_shop_mini_program_qcode(shop.shop_code)
# 创建小票
create_receipt_by_shop(shop.id)
# 创建默认配送设置
delivery_config = create_delivery_config(shop.id)
create_pick_period_line(delivery_config, "12:00", "13:00")
create_pick_period_line(delivery_config, "17:00", "18:00")
create_pick_period_line(delivery_config, "21:00", "22:00")
# 创建默认商品分组
create_default_group_by_shop(shop)
# 将店铺创建者创建为超级管理员员工
create_super_admin_staff(shop, shop.super_admin)
# 创建店铺分享设置
create_share_setup(shop.id, shop.shop_name)
# 创建一些奇怪的设置
create_some_config_by_shop_id(shop.id)
# 创建默认消息通知配置
create_msg_notify_by_shop_id(shop.id)
# 储存申请者的历史真实姓名
if history_realname:
create_shop_creator_history_realname(shop.id, history_realname)
except Exception as e:
print(e)
# 回滚到保存点
transaction.savepoint_rollback(save_id)
raise
# 提交事务
transaction.savepoint_commit(save_id)
return shop
class SuperShopSerializer(serializers.Serializer):
"""总后台商铺详情序列化器"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
description = serializers.CharField(label="商铺描述")
create_time = serializers.DateTimeField(label="商铺创建时间")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
create_user_data = UserSerializer(read_only=True, label="商铺创建人信息")
super_admin_data = UserSerializer(label="超管信息")
class SuperShopListSerializer(serializers.Serializer):
"""总后台商铺列表序列化器类"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
product_species_count = serializers.IntegerField(label="商铺货品种类数量")
is_super_admin = serializers.IntegerField(label="该用户是否为该店的超级管理员")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
cerify_active = serializers.IntegerField(label="商铺是否认证")
pay_active = serializers.IntegerField(label="商铺是否开通支付")
shop_verify_content = serializers.CharField(label="商铺认证内容")
class AdminShopSerializer(serializers.Serializer):
"""后台商铺信息序列化器类"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(label="商铺名称")
shop_img = serializers.CharField(label="商铺logo")
shop_phone = serializers.CharField(label="商铺联系电话")
shop_status = serializers.IntegerField(source="status", label="商铺状态")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
shop_code = serializers.CharField(label="商铺编号")
cerify_active = serializers.IntegerField(label="商铺是否认证")
shop_verify_type = serializers.IntegerField(label="商铺认证类型")
pay_active = serializers.IntegerField(label="商铺是否开通支付")
shop_verify_content = serializers.CharField(label="商铺认证内容")
create_time = serializers.DateTimeField(format=DateFormat.TIME, label="商铺创建时间")
create_user = UserSerializer(read_only=True, label="商铺创建人信息")
class MallShopSerializer(serializers.Serializer):
"""商城端商铺信息序列化器类"""
shop_name = serializers.CharField(label="商铺名称")
shop_code = serializers.CharField(label="商铺编号")
shop_img = serializers.CharField(label="商铺logo")
shop_province = serializers.CharField(label="商铺省份编号")
shop_city = serializers.CharField(label="商铺城市编号")
shop_county = serializers.CharField(label="商铺区编号")
shop_address = serializers.CharField(label="详细地址")
shop_phone = serializers.CharField(label="商铺联系电话")
class SuperShopStatusSerializer(serializers.Serializer):
"""总后台商铺状态"""
shop_id = serializers.IntegerField(read_only=True, source="id", label="商铺id")
shop_name = serializers.CharField(read_only=True, label="商铺名称")
shop_img = serializers.CharField(read_only=True, label="商铺logo")
shop_address = serializers.CharField(read_only=True, label="详细地址")
shop_province = serializers.CharField(read_only=True, label="商铺省份编号")
shop_city = serializers.CharField(read_only=True, label="商铺城市编号")
shop_county = serializers.CharField(read_only=True, label="商铺区编号")
shop_status = serializers.IntegerField(
required=True, source="status", validators=[shop_status_validator], label="商铺状态"
)
create_time = serializers.DateTimeField(read_only=True, format=DateFormat.TIME, label="商铺创建时间")
creator = UserSerializer(read_only=True, label="商铺创建者")
operate_time = serializers.DateTimeField(read_only=True, source="update_at", format=DateFormat.TIME, label="操作时间")
operator = operatorSerializer(read_only=True, label="审核操作人")
reject_reason = serializers.CharField(required=False, default='', label="拒绝理由")
description = serializers.CharField(read_only=True, label="商铺描述")
inviter_phone = serializers.CharField(read_only=True, label="推荐人手机号")
current_realname = serializers.CharField(read_only=True, label="创建时的用户真实姓名")
def update(self, instance, validated_data):
shop_status = validated_data["status"]
instance.status = shop_status
if shop_status == ShopStatus.REJECTED:
create_shop_reject_reason_by_shop_id(instance.id, validated_data['reject_reason'])
instance.save()
return instance
class SuperShopVerifySerializer(serializers.Serializer):
"""总后台商铺认证状态"""
shop_id = serializers.IntegerField(source='id', read_only=True, label="商铺id")
verify_status = serializers.IntegerField(
write_only=True, required=True, validators=[shop_verify_status_validator], label="商铺认证状态"
)
verify_type = serializers.IntegerField(
write_only=True, required=True, validators=[shop_verify_type_validator], label="商铺认证类型,个人/企业"
)
verify_content = serializers.CharField(
write_only=True, min_length=0, max_length=200, required=True, label="认证内容"
)
def update(self, instance, validated_data):
cerify_active = validated_data["verify_status"]
verify_type = validated_data["verify_type"]
verify_content = validated_data["verify_content"]
instance.cerify_active = cerify_active
instance.shop_verify_type = verify_type
instance.shop_verify_content = verify_content
instance.save()
return instance
class ShopPayChannelSerializer(serializers.Serializer):
"""总后台支付渠道序列化器类"""
smerchant_no = serializers.CharField(label="商户号")
terminal_id1 = serializers.CharField(label="终端号1")
access_token = serializers.CharField(label="扫呗access_token")
channel_type = serializers.IntegerField(label="支付渠道, 1:利楚, 2:建行")
def create(self, validated_data):
shop = self.context["shop"]
shop_pay_channel = create_pay_channel(validated_data, shop.id)
return shop_pay_channel
|
[
"django.db.transaction.savepoint_rollback",
"shop.services.create_shop_mini_program_qcode",
"config.services.create_share_setup",
"shop.services.create_shop_creator_history_realname",
"rest_framework.serializers.IntegerField",
"shop.services.create_shop",
"django.db.transaction.atomic",
"user.serializers.UserSerializer",
"config.services.create_msg_notify_by_shop_id",
"delivery.services.create_delivery_config",
"shop.services.create_shop_reject_reason_by_shop_id",
"delivery.services.create_pick_period_line",
"product.services.create_default_group_by_shop",
"shop.services.create_pay_channel",
"staff.services.create_super_admin_staff",
"rest_framework.serializers.DateTimeField",
"config.services.create_receipt_by_shop",
"django.db.transaction.savepoint",
"rest_framework.serializers.CharField",
"config.services.create_some_config_by_shop_id",
"user.serializers.operatorSerializer",
"django.db.transaction.savepoint_commit"
] |
[((1002, 1056), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'label': '"""商铺id"""'}), "(read_only=True, label='商铺id')\n", (1026, 1056), False, 'from rest_framework import serializers\n'), ((1073, 1126), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺code"""'}), "(read_only=True, label='商铺code')\n", (1094, 1126), False, 'from rest_framework import serializers\n'), ((1143, 1209), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'max_length': '(128)', 'label': '"""商铺名称"""'}), "(required=True, max_length=128, label='商铺名称')\n", (1164, 1209), False, 'from rest_framework import serializers\n'), ((1225, 1293), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'max_length': '(300)', 'label': '"""商铺logo"""'}), "(required=True, max_length=300, label='商铺logo')\n", (1246, 1293), False, 'from rest_framework import serializers\n'), ((1314, 1366), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'label': '"""商铺省份编号"""'}), "(required=True, label='商铺省份编号')\n", (1335, 1366), False, 'from rest_framework import serializers\n'), ((1383, 1435), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'label': '"""商铺城市编号"""'}), "(required=True, label='商铺城市编号')\n", (1404, 1435), False, 'from rest_framework import serializers\n'), ((1454, 1505), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'label': '"""商铺区编号"""'}), "(required=True, label='商铺区编号')\n", (1475, 1505), False, 'from rest_framework import serializers\n'), ((1525, 1591), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'max_length': '(100)', 'label': '"""详细地址"""'}), "(required=True, max_length=100, label='详细地址')\n", (1546, 1591), False, 'from rest_framework import serializers\n'), ((1610, 1676), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)', 'max_length': '(200)', 'label': '"""商铺描述"""'}), "(required=True, max_length=200, label='商铺描述')\n", (1631, 1676), False, 'from rest_framework import serializers\n'), ((1697, 1786), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'validators': '[mobile_validator]', 'label': '"""推荐人手机号"""'}), "(required=False, validators=[mobile_validator], label=\n '推荐人手机号')\n", (1718, 1786), False, 'from rest_framework import serializers\n'), ((1797, 1850), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'label': '"""历史真实姓名"""'}), "(required=False, label='历史真实姓名')\n", (1818, 1850), False, 'from rest_framework import serializers\n'), ((3615, 3682), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'source': '"""id"""', 'label': '"""商铺id"""'}), "(read_only=True, source='id', label='商铺id')\n", (3639, 3682), False, 'from rest_framework import serializers\n'), ((3699, 3734), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺名称"""'}), "(label='商铺名称')\n", (3720, 3734), False, 'from rest_framework import serializers\n'), ((3750, 3787), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺logo"""'}), "(label='商铺logo')\n", (3771, 3787), False, 'from rest_framework import serializers\n'), ((3808, 3845), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺省份编号"""'}), "(label='商铺省份编号')\n", (3829, 3845), False, 'from rest_framework import serializers\n'), ((3862, 3899), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺城市编号"""'}), "(label='商铺城市编号')\n", (3883, 3899), False, 'from rest_framework import serializers\n'), ((3918, 3954), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺区编号"""'}), "(label='商铺区编号')\n", (3939, 3954), False, 'from rest_framework import serializers\n'), ((3974, 4009), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""详细地址"""'}), "(label='详细地址')\n", (3995, 4009), False, 'from rest_framework import serializers\n'), ((4028, 4063), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺描述"""'}), "(label='商铺描述')\n", (4049, 4063), False, 'from rest_framework import serializers\n'), ((4082, 4123), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'label': '"""商铺创建时间"""'}), "(label='商铺创建时间')\n", (4107, 4123), False, 'from rest_framework import serializers\n'), ((4142, 4197), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'source': '"""status"""', 'label': '"""商铺状态"""'}), "(source='status', label='商铺状态')\n", (4166, 4197), False, 'from rest_framework import serializers\n'), ((4221, 4268), 'user.serializers.UserSerializer', 'UserSerializer', ([], {'read_only': '(True)', 'label': '"""商铺创建人信息"""'}), "(read_only=True, label='商铺创建人信息')\n", (4235, 4268), False, 'from user.serializers import UserSerializer, operatorSerializer\n'), ((4292, 4320), 'user.serializers.UserSerializer', 'UserSerializer', ([], {'label': '"""超管信息"""'}), "(label='超管信息')\n", (4306, 4320), False, 'from user.serializers import UserSerializer, operatorSerializer\n'), ((4416, 4483), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'source': '"""id"""', 'label': '"""商铺id"""'}), "(read_only=True, source='id', label='商铺id')\n", (4440, 4483), False, 'from rest_framework import serializers\n'), ((4500, 4535), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺名称"""'}), "(label='商铺名称')\n", (4521, 4535), False, 'from rest_framework import serializers\n'), ((4551, 4588), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺logo"""'}), "(label='商铺logo')\n", (4572, 4588), False, 'from rest_framework import serializers\n'), ((4617, 4659), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺货品种类数量"""'}), "(label='商铺货品种类数量')\n", (4641, 4659), False, 'from rest_framework import serializers\n'), ((4681, 4729), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""该用户是否为该店的超级管理员"""'}), "(label='该用户是否为该店的超级管理员')\n", (4705, 4729), False, 'from rest_framework import serializers\n'), ((4748, 4803), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'source': '"""status"""', 'label': '"""商铺状态"""'}), "(source='status', label='商铺状态')\n", (4772, 4803), False, 'from rest_framework import serializers\n'), ((4824, 4864), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺是否认证"""'}), "(label='商铺是否认证')\n", (4848, 4864), False, 'from rest_framework import serializers\n'), ((4882, 4924), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺是否开通支付"""'}), "(label='商铺是否开通支付')\n", (4906, 4924), False, 'from rest_framework import serializers\n'), ((4951, 4988), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺认证内容"""'}), "(label='商铺认证内容')\n", (4972, 4988), False, 'from rest_framework import serializers\n'), ((5079, 5146), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'source': '"""id"""', 'label': '"""商铺id"""'}), "(read_only=True, source='id', label='商铺id')\n", (5103, 5146), False, 'from rest_framework import serializers\n'), ((5163, 5198), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺名称"""'}), "(label='商铺名称')\n", (5184, 5198), False, 'from rest_framework import serializers\n'), ((5214, 5251), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺logo"""'}), "(label='商铺logo')\n", (5235, 5251), False, 'from rest_framework import serializers\n'), ((5269, 5306), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺联系电话"""'}), "(label='商铺联系电话')\n", (5290, 5306), False, 'from rest_framework import serializers\n'), ((5325, 5380), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'source': '"""status"""', 'label': '"""商铺状态"""'}), "(source='status', label='商铺状态')\n", (5349, 5380), False, 'from rest_framework import serializers\n'), ((5401, 5438), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺省份编号"""'}), "(label='商铺省份编号')\n", (5422, 5438), False, 'from rest_framework import serializers\n'), ((5455, 5492), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺城市编号"""'}), "(label='商铺城市编号')\n", (5476, 5492), False, 'from rest_framework import serializers\n'), ((5511, 5547), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺区编号"""'}), "(label='商铺区编号')\n", (5532, 5547), False, 'from rest_framework import serializers\n'), ((5567, 5602), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""详细地址"""'}), "(label='详细地址')\n", (5588, 5602), False, 'from rest_framework import serializers\n'), ((5619, 5654), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺编号"""'}), "(label='商铺编号')\n", (5640, 5654), False, 'from rest_framework import serializers\n'), ((5675, 5715), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺是否认证"""'}), "(label='商铺是否认证')\n", (5699, 5715), False, 'from rest_framework import serializers\n'), ((5739, 5779), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺认证类型"""'}), "(label='商铺认证类型')\n", (5763, 5779), False, 'from rest_framework import serializers\n'), ((5797, 5839), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""商铺是否开通支付"""'}), "(label='商铺是否开通支付')\n", (5821, 5839), False, 'from rest_framework import serializers\n'), ((5866, 5903), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺认证内容"""'}), "(label='商铺认证内容')\n", (5887, 5903), False, 'from rest_framework import serializers\n'), ((5922, 5987), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'format': 'DateFormat.TIME', 'label': '"""商铺创建时间"""'}), "(format=DateFormat.TIME, label='商铺创建时间')\n", (5947, 5987), False, 'from rest_framework import serializers\n'), ((6006, 6053), 'user.serializers.UserSerializer', 'UserSerializer', ([], {'read_only': '(True)', 'label': '"""商铺创建人信息"""'}), "(read_only=True, label='商铺创建人信息')\n", (6020, 6053), False, 'from user.serializers import UserSerializer, operatorSerializer\n'), ((6146, 6181), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺名称"""'}), "(label='商铺名称')\n", (6167, 6181), False, 'from rest_framework import serializers\n'), ((6198, 6233), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺编号"""'}), "(label='商铺编号')\n", (6219, 6233), False, 'from rest_framework import serializers\n'), ((6249, 6286), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺logo"""'}), "(label='商铺logo')\n", (6270, 6286), False, 'from rest_framework import serializers\n'), ((6307, 6344), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺省份编号"""'}), "(label='商铺省份编号')\n", (6328, 6344), False, 'from rest_framework import serializers\n'), ((6361, 6398), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺城市编号"""'}), "(label='商铺城市编号')\n", (6382, 6398), False, 'from rest_framework import serializers\n'), ((6417, 6453), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺区编号"""'}), "(label='商铺区编号')\n", (6438, 6453), False, 'from rest_framework import serializers\n'), ((6473, 6508), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""详细地址"""'}), "(label='详细地址')\n", (6494, 6508), False, 'from rest_framework import serializers\n'), ((6526, 6563), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商铺联系电话"""'}), "(label='商铺联系电话')\n", (6547, 6563), False, 'from rest_framework import serializers\n'), ((6656, 6723), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'source': '"""id"""', 'label': '"""商铺id"""'}), "(read_only=True, source='id', label='商铺id')\n", (6680, 6723), False, 'from rest_framework import serializers\n'), ((6740, 6791), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺名称"""'}), "(read_only=True, label='商铺名称')\n", (6761, 6791), False, 'from rest_framework import serializers\n'), ((6807, 6860), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺logo"""'}), "(read_only=True, label='商铺logo')\n", (6828, 6860), False, 'from rest_framework import serializers\n'), ((6880, 6931), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""详细地址"""'}), "(read_only=True, label='详细地址')\n", (6901, 6931), False, 'from rest_framework import serializers\n'), ((6952, 7005), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺省份编号"""'}), "(read_only=True, label='商铺省份编号')\n", (6973, 7005), False, 'from rest_framework import serializers\n'), ((7022, 7075), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺城市编号"""'}), "(read_only=True, label='商铺城市编号')\n", (7043, 7075), False, 'from rest_framework import serializers\n'), ((7094, 7146), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺区编号"""'}), "(read_only=True, label='商铺区编号')\n", (7115, 7146), False, 'from rest_framework import serializers\n'), ((7165, 7276), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'required': '(True)', 'source': '"""status"""', 'validators': '[shop_status_validator]', 'label': '"""商铺状态"""'}), "(required=True, source='status', validators=[\n shop_status_validator], label='商铺状态')\n", (7189, 7276), False, 'from rest_framework import serializers\n'), ((7304, 7390), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'read_only': '(True)', 'format': 'DateFormat.TIME', 'label': '"""商铺创建时间"""'}), "(read_only=True, format=DateFormat.TIME, label=\n '商铺创建时间')\n", (7329, 7390), False, 'from rest_framework import serializers\n'), ((7400, 7445), 'user.serializers.UserSerializer', 'UserSerializer', ([], {'read_only': '(True)', 'label': '"""商铺创建者"""'}), "(read_only=True, label='商铺创建者')\n", (7414, 7445), False, 'from user.serializers import UserSerializer, operatorSerializer\n'), ((7465, 7569), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'read_only': '(True)', 'source': '"""update_at"""', 'format': 'DateFormat.TIME', 'label': '"""操作时间"""'}), "(read_only=True, source='update_at', format=\n DateFormat.TIME, label='操作时间')\n", (7490, 7569), False, 'from rest_framework import serializers\n'), ((7580, 7629), 'user.serializers.operatorSerializer', 'operatorSerializer', ([], {'read_only': '(True)', 'label': '"""审核操作人"""'}), "(read_only=True, label='审核操作人')\n", (7598, 7629), False, 'from user.serializers import UserSerializer, operatorSerializer\n'), ((7650, 7713), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'default': '""""""', 'label': '"""拒绝理由"""'}), "(required=False, default='', label='拒绝理由')\n", (7671, 7713), False, 'from rest_framework import serializers\n'), ((7732, 7783), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""商铺描述"""'}), "(read_only=True, label='商铺描述')\n", (7753, 7783), False, 'from rest_framework import serializers\n'), ((7804, 7857), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""推荐人手机号"""'}), "(read_only=True, label='推荐人手机号')\n", (7825, 7857), False, 'from rest_framework import serializers\n'), ((7881, 7938), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)', 'label': '"""创建时的用户真实姓名"""'}), "(read_only=True, label='创建时的用户真实姓名')\n", (7902, 7938), False, 'from rest_framework import serializers\n'), ((8357, 8424), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'source': '"""id"""', 'read_only': '(True)', 'label': '"""商铺id"""'}), "(source='id', read_only=True, label='商铺id')\n", (8381, 8424), False, 'from rest_framework import serializers\n'), ((8445, 8565), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[shop_verify_status_validator]', 'label': '"""商铺认证状态"""'}), "(write_only=True, required=True, validators=[\n shop_verify_status_validator], label='商铺认证状态')\n", (8469, 8565), False, 'from rest_framework import serializers\n'), ((8593, 8717), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[shop_verify_type_validator]', 'label': '"""商铺认证类型,个人/企业"""'}), "(write_only=True, required=True, validators=[\n shop_verify_type_validator], label='商铺认证类型,个人/企业')\n", (8617, 8717), False, 'from rest_framework import serializers\n'), ((8748, 8849), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)', 'min_length': '(0)', 'max_length': '(200)', 'required': '(True)', 'label': '"""认证内容"""'}), "(write_only=True, min_length=0, max_length=200,\n required=True, label='认证内容')\n", (8769, 8849), False, 'from rest_framework import serializers\n'), ((9373, 9407), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""商户号"""'}), "(label='商户号')\n", (9394, 9407), False, 'from rest_framework import serializers\n'), ((9427, 9462), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""终端号1"""'}), "(label='终端号1')\n", (9448, 9462), False, 'from rest_framework import serializers\n'), ((9482, 9527), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""扫呗access_token"""'}), "(label='扫呗access_token')\n", (9503, 9527), False, 'from rest_framework import serializers\n'), ((9547, 9597), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'label': '"""支付渠道, 1:利楚, 2:建行"""'}), "(label='支付渠道, 1:利楚, 2:建行')\n", (9571, 9597), False, 'from rest_framework import serializers\n'), ((9700, 9743), 'shop.services.create_pay_channel', 'create_pay_channel', (['validated_data', 'shop.id'], {}), '(validated_data, shop.id)\n', (9718, 9743), False, 'from shop.services import create_shop, create_pay_channel, create_shop_mini_program_qcode, create_shop_reject_reason_by_shop_id, create_shop_creator_history_realname\n'), ((2024, 2044), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (2042, 2044), False, 'from django.db import transaction\n'), ((2090, 2113), 'django.db.transaction.savepoint', 'transaction.savepoint', ([], {}), '()\n', (2111, 2113), False, 'from django.db import transaction\n'), ((3467, 3504), 'django.db.transaction.savepoint_commit', 'transaction.savepoint_commit', (['save_id'], {}), '(save_id)\n', (3495, 3504), False, 'from django.db import transaction\n'), ((8132, 8219), 'shop.services.create_shop_reject_reason_by_shop_id', 'create_shop_reject_reason_by_shop_id', (['instance.id', "validated_data['reject_reason']"], {}), "(instance.id, validated_data[\n 'reject_reason'])\n", (8168, 8219), False, 'from shop.services import create_shop, create_pay_channel, create_shop_mini_program_qcode, create_shop_reject_reason_by_shop_id, create_shop_creator_history_realname\n'), ((2177, 2210), 'shop.services.create_shop', 'create_shop', (['validated_data', 'user'], {}), '(validated_data, user)\n', (2188, 2210), False, 'from shop.services import create_shop, create_pay_channel, create_shop_mini_program_qcode, create_shop_reject_reason_by_shop_id, create_shop_creator_history_realname\n'), ((2254, 2300), 'shop.services.create_shop_mini_program_qcode', 'create_shop_mini_program_qcode', (['shop.shop_code'], {}), '(shop.shop_code)\n', (2284, 2300), False, 'from shop.services import create_shop, create_pay_channel, create_shop_mini_program_qcode, create_shop_reject_reason_by_shop_id, create_shop_creator_history_realname\n'), ((2340, 2371), 'config.services.create_receipt_by_shop', 'create_receipt_by_shop', (['shop.id'], {}), '(shop.id)\n', (2362, 2371), False, 'from config.services import create_receipt_by_shop, create_share_setup, create_some_config_by_shop_id, create_msg_notify_by_shop_id\n'), ((2433, 2464), 'delivery.services.create_delivery_config', 'create_delivery_config', (['shop.id'], {}), '(shop.id)\n', (2455, 2464), False, 'from delivery.services import create_delivery_config, create_pick_period_line\n'), ((2481, 2539), 'delivery.services.create_pick_period_line', 'create_pick_period_line', (['delivery_config', '"""12:00"""', '"""13:00"""'], {}), "(delivery_config, '12:00', '13:00')\n", (2504, 2539), False, 'from delivery.services import create_delivery_config, create_pick_period_line\n'), ((2556, 2614), 'delivery.services.create_pick_period_line', 'create_pick_period_line', (['delivery_config', '"""17:00"""', '"""18:00"""'], {}), "(delivery_config, '17:00', '18:00')\n", (2579, 2614), False, 'from delivery.services import create_delivery_config, create_pick_period_line\n'), ((2631, 2689), 'delivery.services.create_pick_period_line', 'create_pick_period_line', (['delivery_config', '"""21:00"""', '"""22:00"""'], {}), "(delivery_config, '21:00', '22:00')\n", (2654, 2689), False, 'from delivery.services import create_delivery_config, create_pick_period_line\n'), ((2733, 2767), 'product.services.create_default_group_by_shop', 'create_default_group_by_shop', (['shop'], {}), '(shop)\n', (2761, 2767), False, 'from product.services import create_default_group_by_shop\n'), ((2819, 2867), 'staff.services.create_super_admin_staff', 'create_super_admin_staff', (['shop', 'shop.super_admin'], {}), '(shop, shop.super_admin)\n', (2843, 2867), False, 'from staff.services import create_super_admin_staff\n'), ((2911, 2954), 'config.services.create_share_setup', 'create_share_setup', (['shop.id', 'shop.shop_name'], {}), '(shop.id, shop.shop_name)\n', (2929, 2954), False, 'from config.services import create_receipt_by_shop, create_share_setup, create_some_config_by_shop_id, create_msg_notify_by_shop_id\n'), ((2999, 3037), 'config.services.create_some_config_by_shop_id', 'create_some_config_by_shop_id', (['shop.id'], {}), '(shop.id)\n', (3028, 3037), False, 'from config.services import create_receipt_by_shop, create_share_setup, create_some_config_by_shop_id, create_msg_notify_by_shop_id\n'), ((3083, 3120), 'config.services.create_msg_notify_by_shop_id', 'create_msg_notify_by_shop_id', (['shop.id'], {}), '(shop.id)\n', (3111, 3120), False, 'from config.services import create_receipt_by_shop, create_share_setup, create_some_config_by_shop_id, create_msg_notify_by_shop_id\n'), ((3209, 3272), 'shop.services.create_shop_creator_history_realname', 'create_shop_creator_history_realname', (['shop.id', 'history_realname'], {}), '(shop.id, history_realname)\n', (3245, 3272), False, 'from shop.services import create_shop, create_pay_channel, create_shop_mini_program_qcode, create_shop_reject_reason_by_shop_id, create_shop_creator_history_realname\n'), ((3374, 3413), 'django.db.transaction.savepoint_rollback', 'transaction.savepoint_rollback', (['save_id'], {}), '(save_id)\n', (3404, 3413), False, 'from django.db import transaction\n')]
|
"""An agent that makes random decisions using a TensorFlow policy."
This agent creates and uses a new randomly initialized
TensorFlow NN policy for each step but doesn't do any
learning.
"""
import agentos
from tensorflow import keras
import numpy as np
class Policy:
def __init__(self):
self.nn = keras.Sequential(
[
keras.layers.Dense(
4, activation="relu", input_shape=(4,), dtype="float64"
),
keras.layers.Dense(1, activation="sigmoid", dtype="float64"),
]
)
def compute_action(self, obs):
return int(round(self.nn(np.array(obs)[np.newaxis]).numpy()[0][0]))
class RandomTFAgent(agentos.Agent):
def _init(self):
self.ret_vals = []
def advance(self):
ret = sum(self.evaluate_policy(Policy(), max_steps=2000))
self.ret_vals.append(ret)
def __del__(self):
print(
f"Agent done!\n"
f"Num rollouts: {len(self.ret_vals)}\n"
f"Avg return: {np.mean(self.ret_vals)}\n"
f"Max return: {max(self.ret_vals)}\n"
f"Median return: {np.median(self.ret_vals)}\n"
)
if __name__ == "__main__":
from gym.envs.classic_control import CartPoleEnv
agentos.run_agent(RandomTFAgent, CartPoleEnv, max_iters=5)
|
[
"tensorflow.keras.layers.Dense",
"numpy.median",
"numpy.mean",
"numpy.array",
"agentos.run_agent"
] |
[((1280, 1338), 'agentos.run_agent', 'agentos.run_agent', (['RandomTFAgent', 'CartPoleEnv'], {'max_iters': '(5)'}), '(RandomTFAgent, CartPoleEnv, max_iters=5)\n', (1297, 1338), False, 'import agentos\n'), ((361, 436), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'activation': '"""relu"""', 'input_shape': '(4,)', 'dtype': '"""float64"""'}), "(4, activation='relu', input_shape=(4,), dtype='float64')\n", (379, 436), False, 'from tensorflow import keras\n'), ((492, 552), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '"""sigmoid"""', 'dtype': '"""float64"""'}), "(1, activation='sigmoid', dtype='float64')\n", (510, 552), False, 'from tensorflow import keras\n'), ((1047, 1069), 'numpy.mean', 'np.mean', (['self.ret_vals'], {}), '(self.ret_vals)\n', (1054, 1069), True, 'import numpy as np\n'), ((1154, 1178), 'numpy.median', 'np.median', (['self.ret_vals'], {}), '(self.ret_vals)\n', (1163, 1178), True, 'import numpy as np\n'), ((647, 660), 'numpy.array', 'np.array', (['obs'], {}), '(obs)\n', (655, 660), True, 'import numpy as np\n')]
|
# Generated by Django 2.2.4 on 2019-11-07 16:46
import apps.documents.models
import apps.simple_history
import apps.utils
from django.db import migrations, models
import django.db.models.deletion
import enumfields.fields
import shipchain_common.utils
import simple_history.models
import uuid
class Migration(migrations.Migration):
dependencies = [
('shipments', '0005_use_related_name_for_historical_relation'),
('documents', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='document',
options={'ordering': ('-created_at',)},
),
migrations.CreateModel(
name='HistoricalDocument',
fields=[
('id', models.CharField(db_index=True, default=shipchain_common.utils.random_id, max_length=36)),
('name', models.CharField(max_length=36)),
('description', models.CharField(blank=True, max_length=250, null=True)),
('owner_id', models.CharField(max_length=36)),
('document_type', enumfields.fields.EnumIntegerField(default=0, enum=apps.documents.models.DocumentType)),
('file_type', enumfields.fields.EnumIntegerField(default=0, enum=apps.documents.models.FileType)),
('upload_status', enumfields.fields.EnumIntegerField(default=0, enum=apps.utils.UploadStatus)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.CharField(blank=True, max_length=36, null=True)),
('shipment', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='shipments.HistoricalShipment')),
],
options={
'verbose_name': 'historical document',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model, apps.simple_history.HistoricalChangesMixin),
),
]
|
[
"django.db.models.UUIDField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.migrations.AlterModelOptions",
"django.db.models.DateTimeField"
] |
[((501, 591), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""document"""', 'options': "{'ordering': ('-created_at',)}"}), "(name='document', options={'ordering': (\n '-created_at',)})\n", (529, 591), False, 'from django.db import migrations, models\n'), ((738, 830), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'default': 'shipchain_common.utils.random_id', 'max_length': '(36)'}), '(db_index=True, default=shipchain_common.utils.random_id,\n max_length=36)\n', (754, 830), False, 'from django.db import migrations, models\n'), ((854, 885), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (870, 885), False, 'from django.db import migrations, models\n'), ((920, 975), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(250)', 'null': '(True)'}), '(blank=True, max_length=250, null=True)\n', (936, 975), False, 'from django.db import migrations, models\n'), ((1007, 1038), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (1023, 1038), False, 'from django.db import migrations, models\n'), ((1422, 1470), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'editable': '(False)'}), '(blank=True, editable=False)\n', (1442, 1470), False, 'from django.db import migrations, models\n'), ((1504, 1552), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'editable': '(False)'}), '(blank=True, editable=False)\n', (1524, 1552), False, 'from django.db import migrations, models\n'), ((1586, 1677), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (1602, 1677), False, 'from django.db import migrations, models\n'), ((1709, 1731), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1729, 1731), False, 'from django.db import migrations, models\n'), ((1776, 1819), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (1792, 1819), False, 'from django.db import migrations, models\n'), ((1855, 1953), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')]", 'max_length': '(1)'}), "(choices=[('+', 'Created'), ('~', 'Changed'), ('-',\n 'Deleted')], max_length=1)\n", (1871, 1953), False, 'from django.db import migrations, models\n'), ((1985, 2039), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(36)', 'null': '(True)'}), '(blank=True, max_length=36, null=True)\n', (2001, 2039), False, 'from django.db import migrations, models\n'), ((2071, 2220), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'db_constraint': '(False)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'to': '"""shipments.HistoricalShipment"""'}), "(blank=True, db_constraint=False, null=True, on_delete=\n django.db.models.deletion.DO_NOTHING, to='shipments.HistoricalShipment')\n", (2088, 2220), False, 'from django.db import migrations, models\n')]
|
# --*-- coding=utf-8 --*--
import io
from conf.config import SAVEFILE
def savefile(res):
with io.open(SAVEFILE, mode='a+', encoding='utf-8') as savefile:
if isinstance(res, basestring):
savefile.write(res.encode('utf-8') + u"\n")
savefile.flush()
elif isinstance(res, list):
savefile.writelines(res)
savefile.write(u"\n")
savefile.flush()
else:
savefile.write(bytes(res.encode('utf-8')))
savefile.flush()
if __name__ == '__main__':
savefile(['1', '2', '3'])
savefile(["seiee"])
|
[
"io.open"
] |
[((102, 148), 'io.open', 'io.open', (['SAVEFILE'], {'mode': '"""a+"""', 'encoding': '"""utf-8"""'}), "(SAVEFILE, mode='a+', encoding='utf-8')\n", (109, 148), False, 'import io\n')]
|
import hashlib
import uuid
from datetime import datetime
from skygear.container import SkygearContainer
from skygear.models import Record, RecordID, Reference
from skygear.options import options as skyoptions
from skygear.transmitter.encoding import serialize_record
from skygear.utils.context import current_user_id
class Receipt:
def __init__(self, user_id: str, message_id: str):
if not isinstance(user_id, str):
raise ValueError('user_id is not str')
if not isinstance(message_id, str):
raise ValueError('message_id is not str')
self.record = Record(
RecordID('receipt', Receipt.consistent_id(user_id, message_id)),
user_id,
None,
data={
'user_id': Reference(RecordID('user', user_id)),
'message_id': Reference(RecordID('message', message_id))
}
)
@classmethod
def consistent_id(cls, user_id: str, message_id: str) -> str:
seed = message_id + user_id
sha = hashlib.sha256(bytes(seed, 'utf8'))
return str(uuid.UUID(bytes=sha.digest()[0:16]))
def mark_as_delivered(self) -> None:
self.record['delivered_at'] = datetime.utcnow()
def mark_as_read(self) -> None:
self.record['read_at'] = datetime.utcnow()
class ReceiptCollection(list):
"""
ReceiptCollection is a collection to provide batch saving function
to many receipts.
"""
def save(self) -> None:
"""
Save the collection of receipts to the database. This function
does nothing if there is nothing in the collection.
"""
if not len(self):
return
records_to_save = [
serialize_record(receipt.record)
for receipt in self
]
container = SkygearContainer(api_key=skyoptions.masterkey,
user_id=current_user_id())
container.send_action('record:save', {
'database_id': '_public',
'records': records_to_save,
'atomic': True
})
def create_delivered_receipts(
user_id: str,
message_ids: [str]
) -> ReceiptCollection:
"""
This is a helper function to create a collection of delivered receipts.
"""
receipts = ReceiptCollection()
for message_id in message_ids:
receipt = Receipt(user_id, message_id)
receipt.mark_as_delivered()
receipts.append(receipt)
return receipts
def create_read_receipts(
user_id: str,
message_ids: [str]
) -> ReceiptCollection:
"""
This is a helper function to create a collection of read receipts.
"""
receipts = ReceiptCollection()
for message_id in message_ids:
receipt = Receipt(user_id, message_id)
receipt.mark_as_delivered() # message that is read is also delivered
receipt.mark_as_read()
receipts.append(receipt)
return receipts
|
[
"datetime.datetime.utcnow",
"skygear.utils.context.current_user_id",
"skygear.transmitter.encoding.serialize_record",
"skygear.models.RecordID"
] |
[((1213, 1230), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1228, 1230), False, 'from datetime import datetime\n'), ((1301, 1318), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1316, 1318), False, 'from datetime import datetime\n'), ((1730, 1762), 'skygear.transmitter.encoding.serialize_record', 'serialize_record', (['receipt.record'], {}), '(receipt.record)\n', (1746, 1762), False, 'from skygear.transmitter.encoding import serialize_record\n'), ((1918, 1935), 'skygear.utils.context.current_user_id', 'current_user_id', ([], {}), '()\n', (1933, 1935), False, 'from skygear.utils.context import current_user_id\n'), ((782, 807), 'skygear.models.RecordID', 'RecordID', (['"""user"""', 'user_id'], {}), "('user', user_id)\n", (790, 807), False, 'from skygear.models import Record, RecordID, Reference\n'), ((850, 881), 'skygear.models.RecordID', 'RecordID', (['"""message"""', 'message_id'], {}), "('message', message_id)\n", (858, 881), False, 'from skygear.models import Record, RecordID, Reference\n')]
|
#!/usr/bin/env python
"""
Downloads Google's AudioSet dataset locally
"""
import argparse
import atexit
import collections
import csv
import logging.handlers
import multiprocessing as mp
import os
import random
import shutil
import sys
import traceback as tb
import urllib.request
from functools import partial
import multiprocessing_logging
import pafy
from errors import SubprocessError, FfmpegValidationError, FfmpegIncorrectDurationError
from log import init_file_logger, init_console_logger
from utils import run_command, is_url, get_filename, \
get_subset_name, get_media_filename, HTTP_ERR_PATTERN
from validation import validate_audio, validate_video
LOGGER = logging.getLogger('audiosetdl')
LOGGER.setLevel(logging.DEBUG)
EVAL_URL = 'http://storage.googleapis.com/us_audioset/youtube_corpus/v1/csv/eval_segments.csv'
BALANCED_TRAIN_URL = 'http://storage.googleapis.com/us_audioset/youtube_corpus/v1/csv/balanced_train_segments.csv'
UNBALANCED_TRAIN_URL = 'http://storage.googleapis.com/us_audioset/youtube_corpus/v1/csv/unbalanced_train_segments.csv'
def parse_arguments():
"""
Parse arguments from the command line
Returns:
args: Argument dictionary
(Type: dict[str, str])
"""
parser = argparse.ArgumentParser(description='Download AudioSet data locally')
parser.add_argument('-f',
'--ffmpeg',
dest='ffmpeg_path',
action='store',
type=str,
default='./bin/ffmpeg/ffmpeg',
help='Path to ffmpeg executable')
parser.add_argument('-fp',
'--ffprobe',
dest='ffprobe_path',
action='store',
type=str,
default='./bin/ffmpeg/ffprobe',
help='Path to ffprobe executable')
parser.add_argument('-e',
'--eval',
dest='eval_segments_path',
action='store',
type=str,
default=EVAL_URL,
help='Path to evaluation segments file')
parser.add_argument('-b',
'--balanced-train',
dest='balanced_train_segments_path',
action='store',
type=str,
default=BALANCED_TRAIN_URL,
help='Path to balanced train segments file')
parser.add_argument('-u',
'--unbalanced-train',
dest='unbalanced_train_segments_path',
action='store',
type=str,
default=UNBALANCED_TRAIN_URL,
help='Path to unbalanced train segments file')
parser.add_argument('-ac',
'--audio-codec',
dest='audio_codec',
action='store',
type=str,
default='flac',
help='Name of audio codec used by ffmpeg to encode output audio')
parser.add_argument('-asr',
'--audio-sample-rate',
dest='audio_sample_rate',
action='store',
type=int,
default=48000,
help='Target audio sample rate (in Hz)')
parser.add_argument('-abd',
'--audio-bit-depth',
dest='audio_bit_depth',
action='store',
type=int,
default=16,
help='Target audio sample bit depth')
parser.add_argument('-vc',
'--video-codec',
dest='video_codec',
action='store',
type=str,
default='h264',
help='Name of video codec used by ffmpeg to encode output audio')
parser.add_argument('-af',
'--audio-format',
dest='audio_format',
action='store',
type=str,
default='flac',
help='Name of audio format used by ffmpeg for output audio')
parser.add_argument('-vf',
'--video-format',
dest='video_format',
action='store',
type=str,
default='mp4',
help='Name of video format used by ffmpeg for output video')
parser.add_argument('-vm',
'--video-mode',
dest='video_mode',
action='store',
type=str,
default='bestvideoaudio',
help="Name of the method in which video is downloaded. " \
"'bestvideo' obtains the best quality video that " \
"does not contain an audio stream. 'bestvideoaudio' " \
"obtains the best quality video that contains an " \
"audio stream. 'bestvideowithaudio' obtains the " \
"best quality video without an audio stream and " \
" merges it with audio stream")
parser.add_argument('-vfr',
'--video-frame-rate',
dest='video_frame_rate',
action='store',
type=int,
default=30,
help='Target video frame rate (in fps)')
parser.add_argument('-nr',
'--num-retries',
dest='num_retries',
action='store',
type=int,
default=10,
help='Number of retries when ffmpeg encounters an HTTP' \
'issue, which could be to unpredictable network behavior')
parser.add_argument('-n',
'--num-workers',
dest='num_workers',
action='store',
type=int,
default=4,
help='Number of multiprocessing workers used to download videos')
parser.add_argument('-nl',
'--no-logging',
dest='disable_logging',
action='store_true',
default=False,
help='Disables logging if flag enabled')
parser.add_argument('-lp',
'--log-path',
dest='log_path',
action='store',
default=None,
help='Path to log file generated by this script. ' \
'By default, the path is "./audiosetdl.log".')
parser.add_argument('-v',
'--verbose',
dest='verbose',
action='store_true',
default=False,
help='Prints verbose info to stdout')
parser.add_argument('--data_dir',
action='store',
type=str,
help='Path to directory where AudioSet data will be stored')
return vars(parser.parse_args())
def ffmpeg(ffmpeg_path, input_path, output_path, input_args=None,
output_args=None, log_level='error', num_retries=10,
validation_callback=None, validation_args=None):
"""
Transform an input file using `ffmpeg`
Args:
ffmpeg_path: Path to ffmpeg executable
(Type: str)
input_path: Path/URL to input file(s)
(Type: str or iterable)
output_path: Path/URL to output file
(Type: str)
input_args: Options/flags for input files
(Type: list[str])
output_args: Options/flags for output files
(Type: list[str])
log_level: ffmpeg logging level
(Type: str)
num_retries: Number of retries if ffmpeg encounters an HTTP issue
(Type: int)
"""
if type(input_path) == str:
inputs = ['-i', input_path]
elif isinstance(input_path, collections.Iterable):
inputs = []
for path in input_path:
inputs.append('-i')
inputs.append(path)
else:
error_msg = '"input_path" must be a str or an iterable, but got type {}'
raise ValueError(error_msg.format(str(type(input_path))))
if not input_args:
input_args = []
if not output_args:
output_args = []
last_err = None
for attempt in range(num_retries):
try:
args = [ffmpeg_path] + input_args + inputs + output_args + [output_path, '-loglevel', log_level]
run_command(args)
# Validate if a callback was passed in
# if validation_callback is not None:
# validation_args = validation_args or {}
# validation_callback(output_path, **validation_args)
break
except SubprocessError as e:
last_err = e
stderr = e.cmd_stderr.rstrip()
if stderr.endswith('already exists. Exiting.'):
LOGGER.info('ffmpeg output file "{}" already exists.'.format(output_path))
break
elif HTTP_ERR_PATTERN.match(stderr):
# Retry if we got a 4XX or 5XX, in case it was just a network issue
continue
LOGGER.error(str(e) + '. Retrying...')
if os.path.exists(output_path):
os.remove(output_path)
except FfmpegIncorrectDurationError as e:
last_err = e
if attempt < num_retries - 1 and os.path.exists(output_path):
os.remove(output_path)
# If the duration of the output audio is different, alter the
# duration argument to account for this difference and try again
duration_diff = e.target_duration - e.actual_duration
try:
duration_idx = input_args.index('-t') + 1
input_args[duration_idx] = str(float(input_args[duration_idx]) + duration_diff)
except ValueError:
duration_idx = output_args.index('-t') + 1
output_args[duration_idx] = str(float(output_args[duration_idx]) + duration_diff)
LOGGER.warning(str(e) +'; Retrying...')
continue
except FfmpegValidationError as e:
last_err = e
if attempt < num_retries - 1 and os.path.exists(output_path):
os.remove(output_path)
# Retry if the output did not validate
LOGGER.info('ffmpeg output file "{}" did not validate: {}. Retrying...'.format(output_path, e))
continue
else:
error_msg = 'Maximum number of retries ({}) reached. Could not obtain inputs at {}. Error: {}'
LOGGER.error(error_msg.format(num_retries, input_path, str(last_err)))
def download_yt_video(ytid, ts_start, ts_end, output_dir, ffmpeg_path, ffprobe_path,
audio_codec='flac', audio_format='flac',
audio_sample_rate=48000, audio_bit_depth=16,
video_codec='h264', video_format='mp4',
video_mode='bestvideoaudio', video_frame_rate=30,
num_retries=10):
"""
Download a Youtube video (with the audio and video separated).
The audio will be saved in <output_dir>/audio and the video will be saved in
<output_dir>/video.
The output filename is of the format:
<YouTube ID>_<start time in ms>_<end time in ms>.<extension>
Args:
ytid: Youtube ID string
(Type: str)
ts_start: Segment start time (in seconds)
(Type: float)
ts_start: Segment end time (in seconds)
(Type: float)
output_dir: Output directory where video will be saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
Keyword Args:
audio_codec: Name of audio codec used by ffmpeg to encode
output audio
(Type: str)
audio_format: Name of audio container format used for output audio
(Type: str)
audio_sample_rate: Target audio sample rate (in Hz)
(Type: int)
audio_bit_depth: Target audio sample bit depth
(Type: int)
video_codec: Name of video codec used by ffmpeg to encode
output video
(Type: str)
video_format: Name of video container format used for output video
(Type: str)
video_mode: Name of the method in which video is downloaded.
'bestvideo' obtains the best quality video that does not
contain an audio stream. 'bestvideoaudio' obtains the
best quality video that contains an audio stream.
'bestvideowithaudio' obtains the best quality video
without an audio stream and merges it with audio stream.
(Type: bool)
video_frame_rate: Target video frame rate (in fps)
(Type: int)
num_retries: Number of attempts to download and process an audio
or video file with ffmpeg
(Type: int)
Returns:
video_filepath: Filepath to video file
(Type: str)
audio_filepath: Filepath to audio file
(Type: str)
"""
# Compute some things from the segment boundaries
duration = ts_end - ts_start
# Make the output format and video URL
# Output format is in the format:
# <YouTube ID>_<start time in ms>_<end time in ms>.<extension>
media_filename = get_media_filename(ytid, ts_start, ts_end)
video_filepath = os.path.join(output_dir, 'video', media_filename + '.' + video_format)
audio_filepath = os.path.join(output_dir, 'audio', media_filename + '.' + audio_format)
video_page_url = 'https://www.youtube.com/watch?v={}'.format(ytid)
# Get the direct URLs to the videos with best audio and with best video (with audio)
video = pafy.new(video_page_url)
video_duration = video.length
end_past_video_end = False
if ts_end > video_duration:
warn_msg = "End time for segment ({} - {}) of video {} extends past end of video (length {} sec)"
LOGGER.warning(warn_msg.format(ts_start, ts_end, ytid, video_duration))
duration = video_duration - ts_start
ts_end = ts_start + duration
end_past_video_end = True
if video_mode in ('bestvideo', 'bestvideowithaudio'):
best_video = video.getbestvideo()
# If there isn't a video only option, go with best video with audio
if best_video is None:
best_video = video.getbest()
elif video_mode in ('bestvideoaudio', 'bestvideoaudionoaudio'):
best_video = video.getbest()
else:
raise ValueError('Invalid video mode: {}'.format(video_mode))
best_audio = video.getbestaudio()
best_video_url = best_video.url
best_audio_url = best_audio.url
audio_info = {
'sample_rate': audio_sample_rate,
'channels': 2,
'bitrate': audio_bit_depth,
'encoding': audio_codec.upper(),
'duration': duration
}
video_info = {
"r_frame_rate": "{}/1".format(video_frame_rate),
"avg_frame_rate": "{}/1".format(video_frame_rate),
'codec_name': video_codec.lower(),
'duration': duration
}
# Download the audio
audio_input_args = ['-n', '-ss', str(ts_start)]
audio_output_args = ['-t', str(duration),
'-ar', str(audio_sample_rate),
'-vn',
'-ac', str(audio_info['channels']),
'-sample_fmt', 's{}'.format(audio_bit_depth),
'-f', audio_format,
'-acodec', audio_codec]
ffmpeg(ffmpeg_path, best_audio_url, audio_filepath,
input_args=audio_input_args, output_args=audio_output_args,
num_retries=num_retries, validation_callback=validate_audio,
validation_args={'audio_info': audio_info,
'end_past_video_end': end_past_video_end})
if video_mode != 'bestvideowithaudio':
# Download the video
video_input_args = ['-n', '-ss', str(ts_start)]
video_output_args = ['-t', str(duration),
'-f', video_format,
'-r', str(video_frame_rate),
'-vcodec', video_codec]
# Suppress audio stream if we don't want to audio in the video
if video_mode in ('bestvideo', 'bestvideoaudionoaudio'):
video_output_args.append('-an')
ffmpeg(ffmpeg_path, best_video_url, video_filepath,
input_args=video_input_args, output_args=video_output_args,
num_retries=num_retries, validation_callback=validate_video,
validation_args={'ffprobe_path': ffprobe_path,
'video_info': video_info,
'end_past_video_end': end_past_video_end})
else:
# Download the best quality video, in lossless encoding
if video_codec != 'h264':
error_msg = 'Not currently supporting merging of best quality video with video for codec: {}'
raise NotImplementedError(error_msg.format(video_codec))
video_input_args = ['-n', '-ss', str(ts_start)]
video_output_args = ['-t', str(duration),
'-f', video_format,
'-crf', '0',
'-preset', 'medium',
'-r', str(video_frame_rate),
'-an',
'-vcodec', video_codec]
ffmpeg(ffmpeg_path, best_video_url, video_filepath,
input_args=video_input_args, output_args=video_output_args,
num_retries=num_retries)
# Merge the best lossless video with the lossless audio, and compress
merge_video_filepath = os.path.splitext(video_filepath)[0] \
+ '_merge.' + video_format
video_input_args = ['-n']
video_output_args = ['-f', video_format,
'-r', str(video_frame_rate),
'-vcodec', video_codec,
'-acodec', 'aac',
'-ar', str(audio_sample_rate),
'-ac', str(audio_info['channels']),
'-strict', 'experimental']
ffmpeg(ffmpeg_path, [video_filepath, audio_filepath], merge_video_filepath,
input_args=video_input_args, output_args=video_output_args,
num_retries=num_retries, validation_callback=validate_video,
validation_args={'ffprobe_path': ffprobe_path,
'video_info': video_info,
'end_past_video_end': end_past_video_end})
# Remove the original video file and replace with the merged version
if os.path.exists(merge_video_filepath):
os.remove(video_filepath)
shutil.move(merge_video_filepath, video_filepath)
else:
error_msg = 'Cannot find merged video for {} ({} - {}) at {}'
LOGGER.error(error_msg.format(ytid, ts_start, ts_end, merge_video_filepath))
LOGGER.info('Downloaded video {} ({} - {})'.format(ytid, ts_start, ts_end))
return video_filepath, audio_filepath
def segment_mp_worker(ytid, ts_start, ts_end, data_dir, ffmpeg_path,
ffprobe_path, **ffmpeg_cfg):
"""
Pool worker that downloads video segments.o
Wraps around the download_yt_video function to catch errors and log them.
Args:
ytid: Youtube ID string
(Type: str)
ts_start: Segment start time (in seconds)
(Type: float)
ts_end: Segment end time (in seconds)
(Type: float)
data_dir: Directory where videos will be saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
Keyword Args:
**ffmpeg_cfg: Configuration for audio and video
downloading and decoding done by ffmpeg
(Type: dict[str, *])
"""
LOGGER.info('Attempting to download video {} ({} - {})'.format(ytid, ts_start, ts_end))
# Download the video
try:
download_yt_video(ytid, ts_start, ts_end, data_dir, ffmpeg_path,
ffprobe_path, **ffmpeg_cfg)
except SubprocessError as e:
err_msg = 'Error while downloading video {}: {}; {}'.format(ytid, e, tb.format_exc())
LOGGER.error(err_msg)
except Exception as e:
err_msg = 'Error while processing video {}: {}; {}'.format(ytid, e, tb.format_exc())
LOGGER.error(err_msg)
def init_subset_data_dir(dataset_dir, subset_name):
"""
Creates the data directories for the given subset
Args:
dataset_dir: Path to dataset directory
(Type: str)
subset_name: Name of subset
(Type: str)
Returns:
data_dir: Path to subset data dir
(Type: str)
"""
# Derive audio and video directory names for this subset
data_dir = os.path.join(dataset_dir, 'data', subset_name)
audio_dir = os.path.join(data_dir, 'audio')
video_dir = os.path.join(data_dir, 'video')
os.makedirs(audio_dir, exist_ok=True)
os.makedirs(video_dir, exist_ok=True)
return data_dir
def download_subset_file(subset_url, dataset_dir):
"""
Download a subset segments file from the given url to the given directory.
Args:
subset_url: URL to subset segments file
(Type: str)
dataset_dir: Dataset directory where subset segment file will be stored
(Type: str)
Returns:
subset_path: Path to subset segments file
(Type: str)
"""
# Get filename of the subset file
subset_filename = get_filename(subset_url)
subset_name = get_subset_name(subset_url)
subset_path = os.path.join(dataset_dir, subset_filename)
os.makedirs(dataset_dir, exist_ok=True)
# Open subset file as a CSV
if not os.path.exists(subset_path):
LOGGER.info('Downloading subset file for "{}"'.format(subset_name))
with open(subset_path, 'w') as f:
subset_data = urllib.request.urlopen(subset_url).read().decode()
f.write(subset_data)
return subset_path
def download_subset_videos(subset_path, data_dir, ffmpeg_path, ffprobe_path,
num_workers, **ffmpeg_cfg):
"""
Download subset segment file and videos
Args:
subset_path: Path to subset segments file
(Type: str)
data_dir: Directory where dataset files will be saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
num_workers: Number of multiprocessing workers used to download videos
(Type: int)
Keyword Args:
**ffmpeg_cfg: Configuration for audio and video
downloading and decoding done by ffmpeg
(Type: dict[str, *])
"""
subset_name = get_subset_name(subset_path)
LOGGER.info('Starting download jobs for subset "{}"'.format(subset_name))
with open(subset_path, 'r') as f:
subset_data = csv.reader(f)
# Set up multiprocessing pool
pool = mp.Pool(num_workers)
try:
for row_idx, row in enumerate(subset_data):
# Skip commented lines
if row[0][0] == '#':
continue
ytid, ts_start, ts_end = row[0], float(row[1]), float(row[2])
# Skip files that already have been downloaded
media_filename = get_media_filename(ytid, ts_start, ts_end)
video_filepath = os.path.join(data_dir, 'video', media_filename + '.' + ffmpeg_cfg.get('video_format', 'mp4'))
audio_filepath = os.path.join(data_dir, 'audio', media_filename + '.' + ffmpeg_cfg.get('audio_format', 'flac'))
if os.path.exists(video_filepath) and os.path.exists(audio_filepath):
info_msg = 'Already downloaded video {} ({} - {}). Skipping.'
LOGGER.info(info_msg.format(ytid, ts_start, ts_end))
continue
worker_args = [ytid, ts_start, ts_end, data_dir, ffmpeg_path, ffprobe_path]
pool.apply_async(partial(segment_mp_worker, **ffmpeg_cfg), worker_args)
# Run serially
#segment_mp_worker(*worker_args, **ffmpeg_cfg)
except csv.Error as e:
err_msg = 'Encountered error in {} at line {}: {}'
LOGGER.error(err_msg)
sys.exit(err_msg.format(subset_path, row_idx+1, e))
except KeyboardInterrupt:
LOGGER.info("Forcing exit.")
exit()
finally:
try:
pool.close()
pool.join()
except KeyboardInterrupt:
LOGGER.info("Forcing exit.")
exit()
LOGGER.info('Finished download jobs for subset "{}"'.format(subset_name))
def download_random_subset_files(subset_url, dataset_dir, ffmpeg_path, ffprobe_path,
num_workers, max_videos=None, **ffmpeg_cfg):
"""
Download a a random subset (of size `max_videos`) of subset segment file and videos
Args:
subset_path: Path to subset segments file
(Type: str)
dataset_dir: Directory where dataset files will be saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
num_workers: Number of multiprocessing workers used to download videos
(Type: int)
Keyword Args:
max_videos: Maximum number of videos to download in this subset. If
None, download all files in this subset.
(Type int or None)
**ffmpeg_cfg: Configuration for audio and video
downloading and decoding done by ffmpeg
(Type: dict[str, *])
"""
# FIXME: This code is outdated and shouldn't be used
# Validate max_videos
if max_videos is not None and (max_videos < 1 or type(max_videos) != int):
err_msg = 'max_videos must be a positive integer, or None'
LOGGER.error(err_msg)
raise ValueError(err_msg)
# Get filename of the subset file
subset_filename = get_filename(subset_url)
subset_name = get_subset_name(subset_url)
subset_path = os.path.join(dataset_dir, subset_filename)
data_dir = init_subset_data_dir(dataset_dir, subset_name)
# Open subset file as a CSV
if not os.path.exists(subset_path):
LOGGER.info('Downloading subset file for "{}"'.format(subset_name))
with open(subset_path, 'w') as f:
subset_data = urllib.request.urlopen(subset_url).read().decode()
f.write(subset_data)
subset_data = []
LOGGER.info('Starting download jobs for random subset (of size {}) of subset "{}"'.format(max_videos, subset_name))
with open(subset_path, 'r') as f:
subset_data_reader = csv.reader(f)
try:
for row_idx, row in enumerate(subset_data_reader):
# Skip commented lines
if row[0][0] == '#':
continue
subset_data.append(row[:3])
except csv.Error as e:
err_msg = 'Encountered error in {} at line {}: {}'
LOGGER.error(err_msg)
sys.exit(err_msg.format(subset_filename, row_idx+1, e))
# Shuffle data
random.shuffle(subset_data)
# Set up multiprocessing pool
pool = mp.Pool(num_workers)
try:
for idx, row in enumerate(subset_data):
worker_args = [row[0], float(row[1]), float(row[2]), data_dir, ffmpeg_path, ffprobe_path]
pool.apply_async(partial(segment_mp_worker, **ffmpeg_cfg), worker_args)
# Run serially
#segment_mp_worker(*worker_args, **ffmpeg_cfg)
if max_videos is not None:
if idx + 1 >= max_videos:
info_msg = 'Reached maximum ({}) for subset {}'
LOGGER.info(info_msg.format(max_videos, subset_name))
break
except KeyboardInterrupt:
LOGGER.info("Forcing exit.")
exit()
finally:
try:
pool.close()
pool.join()
except KeyboardInterrupt:
LOGGER.info("Forcing exit.")
exit()
LOGGER.info('Finished download jobs for subset "{}"'.format(subset_name))
def download_subset(subset_path, dataset_dir, ffmpeg_path, ffprobe_path,
num_workers, **ffmpeg_cfg):
"""
Download all files for a subset, including the segment file, and the audio and video files.
Args:
subset_path: Path to subset segments file
(Type: str)
dataset_dir: Path to dataset directory where files are saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
num_workers: Number of workers to download and process videos
(Type: int)
Keyword Args:
**ffmpeg_cfg: Configuration for audio and video
downloading and decoding done by ffmpeg
(Type: dict[str, *])
Returns:
"""
if is_url(subset_path):
subset_path = download_subset_file(subset_path, dataset_dir)
subset_name = get_subset_name(subset_path)
data_dir = init_subset_data_dir(dataset_dir, subset_name)
download_subset_videos(subset_path, data_dir, ffmpeg_path, ffprobe_path,
num_workers, **ffmpeg_cfg)
def download_audioset(data_dir, ffmpeg_path, ffprobe_path, eval_segments_path,
balanced_train_segments_path, unbalanced_train_segments_path,
disable_logging=False, verbose=False, num_workers=4,
log_path=None, **ffmpeg_cfg):
"""
Download AudioSet files
Args:
data_dir: Directory where dataset files will
be saved
(Type: str)
ffmpeg_path: Path to ffmpeg executable
(Type: str)
ffprobe_path: Path to ffprobe executable
(Type: str)
eval_segments_path: Path to evaluation segments file
(Type: str)
balanced_train_segments_path: Path to balanced train segments file
(Type: str)
unbalanced_train_segments_path: Path to unbalanced train segments file
(Type: str)
Keyword Args:
disable_logging: Disables logging to a file if True
(Type: bool)
verbose: Prints verbose information to stdout
if True
(Type: bool)
num_workers: Number of multiprocessing workers used
to download videos
(Type: int)
log_path: Path where log file will be saved. If
None, saved to './audiosetdl.log'
(Type: str or None)
**ffmpeg_cfg: Configuration for audio and video
downloading and decoding done by ffmpeg
(Type: dict[str, *])
"""
init_console_logger(LOGGER, verbose=verbose)
if not disable_logging:
init_file_logger(LOGGER, log_path=log_path)
multiprocessing_logging.install_mp_handler()
LOGGER.debug('Initialized logging.')
#download_subset(eval_segments_path, data_dir, ffmpeg_path, ffprobe_path,
# num_workers, **ffmpeg_cfg)
download_subset(balanced_train_segments_path, data_dir, ffmpeg_path, ffprobe_path,
num_workers, **ffmpeg_cfg)
#download_subset(unbalanced_train_segments_path, data_dir, ffmpeg_path, ffprobe_path,
# num_workers, **ffmpeg_cfg)
if __name__ == '__main__':
# TODO: Handle killing of ffmpeg (https://stackoverflow.com/questions/6488275/terminal-text-becomes-invisible-after-terminating-subprocess)
# so we don't have to use this hack
atexit.register(lambda: os.system('stty sane') if sys.stdin.isatty() else None)
download_audioset(**parse_arguments())
|
[
"os.remove",
"csv.reader",
"argparse.ArgumentParser",
"log.init_console_logger",
"random.shuffle",
"utils.get_filename",
"os.path.join",
"sys.stdin.isatty",
"utils.HTTP_ERR_PATTERN.match",
"utils.get_subset_name",
"pafy.new",
"os.path.exists",
"multiprocessing_logging.install_mp_handler",
"traceback.format_exc",
"utils.run_command",
"functools.partial",
"os.system",
"multiprocessing.Pool",
"utils.get_media_filename",
"os.makedirs",
"utils.is_url",
"log.init_file_logger",
"os.path.splitext",
"shutil.move"
] |
[((1252, 1321), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Download AudioSet data locally"""'}), "(description='Download AudioSet data locally')\n", (1275, 1321), False, 'import argparse\n'), ((14755, 14797), 'utils.get_media_filename', 'get_media_filename', (['ytid', 'ts_start', 'ts_end'], {}), '(ytid, ts_start, ts_end)\n', (14773, 14797), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((14819, 14889), 'os.path.join', 'os.path.join', (['output_dir', '"""video"""', "(media_filename + '.' + video_format)"], {}), "(output_dir, 'video', media_filename + '.' + video_format)\n", (14831, 14889), False, 'import os\n'), ((14911, 14981), 'os.path.join', 'os.path.join', (['output_dir', '"""audio"""', "(media_filename + '.' + audio_format)"], {}), "(output_dir, 'audio', media_filename + '.' + audio_format)\n", (14923, 14981), False, 'import os\n'), ((15156, 15180), 'pafy.new', 'pafy.new', (['video_page_url'], {}), '(video_page_url)\n', (15164, 15180), False, 'import pafy\n'), ((22662, 22708), 'os.path.join', 'os.path.join', (['dataset_dir', '"""data"""', 'subset_name'], {}), "(dataset_dir, 'data', subset_name)\n", (22674, 22708), False, 'import os\n'), ((22725, 22756), 'os.path.join', 'os.path.join', (['data_dir', '"""audio"""'], {}), "(data_dir, 'audio')\n", (22737, 22756), False, 'import os\n'), ((22773, 22804), 'os.path.join', 'os.path.join', (['data_dir', '"""video"""'], {}), "(data_dir, 'video')\n", (22785, 22804), False, 'import os\n'), ((22809, 22846), 'os.makedirs', 'os.makedirs', (['audio_dir'], {'exist_ok': '(True)'}), '(audio_dir, exist_ok=True)\n', (22820, 22846), False, 'import os\n'), ((22851, 22888), 'os.makedirs', 'os.makedirs', (['video_dir'], {'exist_ok': '(True)'}), '(video_dir, exist_ok=True)\n', (22862, 22888), False, 'import os\n'), ((23429, 23453), 'utils.get_filename', 'get_filename', (['subset_url'], {}), '(subset_url)\n', (23441, 23453), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((23472, 23499), 'utils.get_subset_name', 'get_subset_name', (['subset_url'], {}), '(subset_url)\n', (23487, 23499), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((23518, 23560), 'os.path.join', 'os.path.join', (['dataset_dir', 'subset_filename'], {}), '(dataset_dir, subset_filename)\n', (23530, 23560), False, 'import os\n'), ((23566, 23605), 'os.makedirs', 'os.makedirs', (['dataset_dir'], {'exist_ok': '(True)'}), '(dataset_dir, exist_ok=True)\n', (23577, 23605), False, 'import os\n'), ((24815, 24843), 'utils.get_subset_name', 'get_subset_name', (['subset_path'], {}), '(subset_path)\n', (24830, 24843), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((28309, 28333), 'utils.get_filename', 'get_filename', (['subset_url'], {}), '(subset_url)\n', (28321, 28333), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((28352, 28379), 'utils.get_subset_name', 'get_subset_name', (['subset_url'], {}), '(subset_url)\n', (28367, 28379), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((28398, 28440), 'os.path.join', 'os.path.join', (['dataset_dir', 'subset_filename'], {}), '(dataset_dir, subset_filename)\n', (28410, 28440), False, 'import os\n'), ((29473, 29500), 'random.shuffle', 'random.shuffle', (['subset_data'], {}), '(subset_data)\n', (29487, 29500), False, 'import random\n'), ((29547, 29567), 'multiprocessing.Pool', 'mp.Pool', (['num_workers'], {}), '(num_workers)\n', (29554, 29567), True, 'import multiprocessing as mp\n'), ((31462, 31481), 'utils.is_url', 'is_url', (['subset_path'], {}), '(subset_path)\n', (31468, 31481), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((31571, 31599), 'utils.get_subset_name', 'get_subset_name', (['subset_path'], {}), '(subset_path)\n', (31586, 31599), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((33894, 33938), 'log.init_console_logger', 'init_console_logger', (['LOGGER'], {'verbose': 'verbose'}), '(LOGGER, verbose=verbose)\n', (33913, 33938), False, 'from log import init_file_logger, init_console_logger\n'), ((34023, 34067), 'multiprocessing_logging.install_mp_handler', 'multiprocessing_logging.install_mp_handler', ([], {}), '()\n', (34065, 34067), False, 'import multiprocessing_logging\n'), ((20222, 20258), 'os.path.exists', 'os.path.exists', (['merge_video_filepath'], {}), '(merge_video_filepath)\n', (20236, 20258), False, 'import os\n'), ((23650, 23677), 'os.path.exists', 'os.path.exists', (['subset_path'], {}), '(subset_path)\n', (23664, 23677), False, 'import os\n'), ((24983, 24996), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (24993, 24996), False, 'import csv\n'), ((25051, 25071), 'multiprocessing.Pool', 'mp.Pool', (['num_workers'], {}), '(num_workers)\n', (25058, 25071), True, 'import multiprocessing as mp\n'), ((28547, 28574), 'os.path.exists', 'os.path.exists', (['subset_path'], {}), '(subset_path)\n', (28561, 28574), False, 'import os\n'), ((29013, 29026), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (29023, 29026), False, 'import csv\n'), ((33975, 34018), 'log.init_file_logger', 'init_file_logger', (['LOGGER'], {'log_path': 'log_path'}), '(LOGGER, log_path=log_path)\n', (33991, 34018), False, 'from log import init_file_logger, init_console_logger\n'), ((9266, 9283), 'utils.run_command', 'run_command', (['args'], {}), '(args)\n', (9277, 9283), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((20272, 20297), 'os.remove', 'os.remove', (['video_filepath'], {}), '(video_filepath)\n', (20281, 20297), False, 'import os\n'), ((20310, 20359), 'shutil.move', 'shutil.move', (['merge_video_filepath', 'video_filepath'], {}), '(merge_video_filepath, video_filepath)\n', (20321, 20359), False, 'import shutil\n'), ((10035, 10062), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (10049, 10062), False, 'import os\n'), ((22012, 22027), 'traceback.format_exc', 'tb.format_exc', ([], {}), '()\n', (22025, 22027), True, 'import traceback as tb\n'), ((22162, 22177), 'traceback.format_exc', 'tb.format_exc', ([], {}), '()\n', (22175, 22177), True, 'import traceback as tb\n'), ((25421, 25463), 'utils.get_media_filename', 'get_media_filename', (['ytid', 'ts_start', 'ts_end'], {}), '(ytid, ts_start, ts_end)\n', (25439, 25463), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((29756, 29796), 'functools.partial', 'partial', (['segment_mp_worker'], {}), '(segment_mp_worker, **ffmpeg_cfg)\n', (29763, 29796), False, 'from functools import partial\n'), ((34782, 34800), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (34798, 34800), False, 'import sys\n'), ((34756, 34778), 'os.system', 'os.system', (['"""stty sane"""'], {}), "('stty sane')\n", (34765, 34778), False, 'import os\n'), ((9827, 9857), 'utils.HTTP_ERR_PATTERN.match', 'HTTP_ERR_PATTERN.match', (['stderr'], {}), '(stderr)\n', (9849, 9857), False, 'from utils import run_command, is_url, get_filename, get_subset_name, get_media_filename, HTTP_ERR_PATTERN\n'), ((10080, 10102), 'os.remove', 'os.remove', (['output_path'], {}), '(output_path)\n', (10089, 10102), False, 'import os\n'), ((10224, 10251), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (10238, 10251), False, 'import os\n'), ((10269, 10291), 'os.remove', 'os.remove', (['output_path'], {}), '(output_path)\n', (10278, 10291), False, 'import os\n'), ((11056, 11083), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (11070, 11083), False, 'import os\n'), ((11101, 11123), 'os.remove', 'os.remove', (['output_path'], {}), '(output_path)\n', (11110, 11123), False, 'import os\n'), ((19184, 19216), 'os.path.splitext', 'os.path.splitext', (['video_filepath'], {}), '(video_filepath)\n', (19200, 19216), False, 'import os\n'), ((25738, 25768), 'os.path.exists', 'os.path.exists', (['video_filepath'], {}), '(video_filepath)\n', (25752, 25768), False, 'import os\n'), ((25773, 25803), 'os.path.exists', 'os.path.exists', (['audio_filepath'], {}), '(audio_filepath)\n', (25787, 25803), False, 'import os\n'), ((26115, 26155), 'functools.partial', 'partial', (['segment_mp_worker'], {}), '(segment_mp_worker, **ffmpeg_cfg)\n', (26122, 26155), False, 'from functools import partial\n')]
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import time
import Image
import ImageDraw
import ImageFont
from Adafruit_LED_Backpack import Matrix8x8
def print_8x8(image):
display.clear()
display.set_image(image)
display.write_display()
a = u'동해물과 백두산이 마르고 닳도록 하느님이 보우하사 '
display = Matrix8x8.Matrix8x8()
display.begin()
image = Image.new('1', (1250,8))
draw = ImageDraw.Draw(image)
font = ImageFont.truetype('AppleGothic.ttf', 8)
draw.text((0, 0), a, font=font, fill=255)
w, h = draw.textsize(a, font=font)
while True:
for x in range(0,w-1):
b = image.crop((x, 0, x+8, 8))
print_8x8(b)
time.sleep(0.02)
|
[
"ImageDraw.Draw",
"Image.new",
"ImageFont.truetype",
"time.sleep",
"Adafruit_LED_Backpack.Matrix8x8.Matrix8x8"
] |
[((306, 327), 'Adafruit_LED_Backpack.Matrix8x8.Matrix8x8', 'Matrix8x8.Matrix8x8', ([], {}), '()\n', (325, 327), False, 'from Adafruit_LED_Backpack import Matrix8x8\n'), ((352, 377), 'Image.new', 'Image.new', (['"""1"""', '(1250, 8)'], {}), "('1', (1250, 8))\n", (361, 377), False, 'import Image\n'), ((384, 405), 'ImageDraw.Draw', 'ImageDraw.Draw', (['image'], {}), '(image)\n', (398, 405), False, 'import ImageDraw\n'), ((414, 454), 'ImageFont.truetype', 'ImageFont.truetype', (['"""AppleGothic.ttf"""', '(8)'], {}), "('AppleGothic.ttf', 8)\n", (432, 454), False, 'import ImageFont\n'), ((648, 664), 'time.sleep', 'time.sleep', (['(0.02)'], {}), '(0.02)\n', (658, 664), False, 'import time\n')]
|
import pyrealsense as pyrs
pyrs.start()
import datetime
import scipy.misc
class CameraCalibration(Sensor):
def __init__(self, dataStore, pattern, length=100):
## this is a trick to disable the recording later on
self.calibDone = False
self.cnt = -1
self.dataStore = dataStore
self.pattern = pattern
self.length = length
def _datetime_now():
datetime_format = "%Y-%m-%d_%H:%M:%S:%f"
return datetime.datetime.now().strftime(datetime_format)
self.capture_function = {
'd' : pyrs.get_depth,
'c' : pyrs.get_colour,
'v' : pyrs.get_pointcloud,
't' : _datetime_now,
}
def _prepare_nframes(self, session, length):
dataMap = {
'c' : ((480, 640, 3), 'u1'), # colour map
'd' : ((480, 640), 'i2'), # depth map
't': ((), 'S26'), # sample time
}
self.dataStore.create_group(session)
datagrp = self.dataStore[session]
datagrp.attrs.create('length', length)
for k,v in dataMap.items():
datagrp.create_dataset(k, (length,)+v[0], dtype=v[1])
def start(self):
pattern = scipy.misc.imread(self.pattern)
self.dataStore.create_dataset('pattern', data=pattern)
self._prepare_nframes('calib/before', self.length)
rec = RecordSensors('calib/before', self.length, [self])
rec.run()
def prepare_nframes(self, session, length): pass
def record_frame(self, session, frame):
if self.calibDone == False:
self.cnt += 1
if self.cnt < self.length:
for key in 'cdt':
self.dataStore[session][key][frame] = self.capture_function[key]()
else:
calibDone = True
class Camera(Sensor):
def __init__(self, types, dataStore):
self.types = types
self.dataStore = dataStore
def _datetime_now():
datetime_format = "%Y-%m-%d_%H:%M:%S:%f"
return datetime.datetime.now().strftime(datetime_format)
self.capture_function = {
'd' : pyrs.get_depth,
'c' : pyrs.get_colour,
'v' : pyrs.get_pointcloud,
't' : _datetime_now,
}
def start(self): pass
def prepare_nframes(self, session, length):
dataMap = {
'd' : ((480, 640), 'i2'), # depth map
't': ((), 'S26'), # sample time
}
self.dataStore.create_group(session)
datagrp = self.dataStore[session]
datagrp.attrs.create('length', length)
for k,v in dataMap.items():
datagrp.create_dataset(k, (length,)+v[0], dtype=v[1])
def record_frame(self, session, frame):
for key in self.types:
self.dataStore[session][key][frame] = self.capture_function[key]()
|
[
"pyrealsense.start",
"datetime.datetime.now"
] |
[((28, 40), 'pyrealsense.start', 'pyrs.start', ([], {}), '()\n', (38, 40), True, 'import pyrealsense as pyrs\n'), ((479, 502), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (500, 502), False, 'import datetime\n'), ((2085, 2108), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2106, 2108), False, 'import datetime\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-07 15:55
from __future__ import unicode_literals
import re
from django.core.validators import RegexValidator
from django.db import migrations, models
from django.conf import settings
import django.utils.timezone
script_regex = re.compile(r'(<script[\sa-zA-Z0-9"\';-_=/]*>([\s\S]+?)</script>)')
def migrate_script_tags_to_javascript(apps, schema_editor):
TestList = apps.get_model('qa', 'TestList')
TestListCycle = apps.get_model('qa', 'TestListCycle')
for tl in TestList.objects.all():
if tl.description and script_regex.search(tl.description) is not None:
js = script_regex.search(tl.description).groups(0)[1]
js_tags = script_regex.search(tl.description).groups(0)[0]
tl.javascript = js
tl.description = tl.description.replace(js_tags, '')
tl.save()
for tlc in TestListCycle.objects.all():
if tlc.description and script_regex.search(tlc.description) is not None:
js = script_regex.search(tlc.description).groups(0)[1]
js_tags = script_regex.search(tlc.description).groups(0)[0]
tlc.javascript = js
tlc.description = tlc.description.replace(js_tags, '')
tlc.save()
def migrate_test_list_instance_comments(apps, schema):
TestListInstance = apps.get_model('qa', 'TestListInstance')
ContentType = apps.get_model('contenttypes', 'ContentType')
Comment = apps.get_model('django_comments', 'Comment')
tli_ct = ContentType.objects.get_for_model(TestListInstance)
tli_qs = TestListInstance.objects.filter(comment__isnull=False).exclude(comment='')
for tli in tli_qs:
comment = Comment(
content_type=tli_ct,
object_pk=tli.id,
comment=tli.comment,
submit_date=tli.created,
site_id=getattr(settings, "SITE_ID", 1),
)
comment.save()
class Migration(migrations.Migration):
dependencies = [
('qa', '0001_initial'),
('django_comments', '0003_add_submit_date_index'),
]
operations = [
migrations.AlterField(
model_name='autoreviewrule',
name='pass_fail',
field=models.CharField(choices=[('not_done', 'Not Done'), ('ok', 'OK'), ('tolerance', 'Tolerance'), ('action', 'Action'), ('no_tol', 'No Tol Set')], max_length=15, unique=True),
),
migrations.AlterField(
model_name='reference',
name='type',
field=models.CharField(choices=[('numerical', 'Numerical'), ('boolean', 'Yes / No')], default='numerical', max_length=15),
),
migrations.AlterField(
model_name='test',
name='chart_visibility',
field=models.BooleanField(default=True, verbose_name='Test item visible in charts?'),
),
migrations.AlterField(
model_name='test',
name='display_image',
field=models.BooleanField(default=False, help_text='Image uploads only: Show uploaded images under the testlist', verbose_name='Display image'),
),
migrations.AlterField(
model_name='test',
name='slug',
field=models.SlugField(help_text='A short variable name consisting of alphanumeric characters and underscores for this test (to be used in composite calculations). ', max_length=128, verbose_name='Macro name'),
),
migrations.AlterField(
model_name='test',
name='type',
field=models.CharField(choices=[('boolean', 'Boolean'), ('simple', 'Simple Numerical'), ('multchoice', 'Multiple Choice'), ('constant', 'Constant'), ('composite', 'Composite'), ('string', 'String'), ('scomposite', 'String Composite'), ('upload', 'File Upload')], default='simple', help_text='Indicate if this test is a Boolean,Simple Numerical,Multiple Choice,Constant,Composite,String,String Composite,File Upload', max_length=10),
),
migrations.AlterField(
model_name='testinstance',
name='pass_fail',
field=models.CharField(choices=[('not_done', 'Not Done'), ('ok', 'OK'), ('tolerance', 'Tolerance'), ('action', 'Action'), ('no_tol', 'No Tol Set')], db_index=True, editable=False, max_length=20),
),
migrations.AlterField(
model_name='testinstance',
name='work_completed',
field=models.DateTimeField(db_index=True, default=django.utils.timezone.now, help_text='Format DD-MM-YY hh:mm (hh:mm is 24h time e.g. 31-05-12 14:30)'),
),
migrations.AlterField(
model_name='testlist',
name='warning_message',
field=models.CharField(default='Do not treat', help_text='Message given when a test value is out of tolerance', max_length=255),
),
migrations.AlterField(
model_name='testlistcycle',
name='day_option_text',
field=models.CharField(choices=[('day', 'Day'), ('tlname', 'Test List Name')], default='day', max_length=8),
),
migrations.AlterField(
model_name='testlistcycle',
name='drop_down_label',
field=models.CharField(default='Choose Day', max_length=128),
),
migrations.AlterField(
model_name='tolerance',
name='type',
field=models.CharField(choices=[('absolute', 'Absolute'), ('percent', 'Percentage'), ('multchoice', 'Multiple Choice')], help_text='Select whether this will be an absolute or relative tolerance criteria', max_length=20),
),
migrations.AddField(
model_name='testlist',
name='javascript',
field=models.TextField(blank=True, help_text='Any extra javascript to run when loading perform page', null=True),
),
migrations.AddField(
model_name='testlistcycle',
name='javascript',
field=models.TextField(blank=True, help_text='Any extra javascript to run when loading perform page', null=True),
),
migrations.AddField(
model_name='testlistinstance',
name='due_date',
field=models.DateTimeField(blank=True, help_text='When was this session due when it was performed', null=True),
),
migrations.RunPython(migrate_script_tags_to_javascript),
migrations.AddField(
model_name='unittestcollection',
name='name',
field=models.CharField(db_index=True, default='', editable=False, max_length=255),
),
migrations.AlterField(
model_name='unittestcollection',
name='frequency',
field=models.ForeignKey(blank=True, help_text='Frequency with which this test list is to be performed', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='unittestcollections', to='qa.Frequency'),
),
migrations.RunPython(migrate_test_list_instance_comments),
migrations.RemoveField(
model_name='testlistinstance',
name='comment',
),
]
|
[
"django.db.migrations.RunPython",
"django.db.models.TextField",
"django.db.migrations.RemoveField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.SlugField",
"django.db.models.DateTimeField",
"re.compile"
] |
[((294, 364), 're.compile', 're.compile', (['"""(<script[\\\\sa-zA-Z0-9"\\\\\';-_=/]*>([\\\\s\\\\S]+?)</script>)"""'], {}), '(\'(<script[\\\\sa-zA-Z0-9"\\\\\\\';-_=/]*>([\\\\s\\\\S]+?)</script>)\')\n', (304, 364), False, 'import re\n'), ((6352, 6407), 'django.db.migrations.RunPython', 'migrations.RunPython', (['migrate_script_tags_to_javascript'], {}), '(migrate_script_tags_to_javascript)\n', (6372, 6407), False, 'from django.db import migrations, models\n'), ((6967, 7024), 'django.db.migrations.RunPython', 'migrations.RunPython', (['migrate_test_list_instance_comments'], {}), '(migrate_test_list_instance_comments)\n', (6987, 7024), False, 'from django.db import migrations, models\n'), ((7034, 7103), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""testlistinstance"""', 'name': '"""comment"""'}), "(model_name='testlistinstance', name='comment')\n", (7056, 7103), False, 'from django.db import migrations, models\n'), ((2261, 2440), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('not_done', 'Not Done'), ('ok', 'OK'), ('tolerance', 'Tolerance'), (\n 'action', 'Action'), ('no_tol', 'No Tol Set')]", 'max_length': '(15)', 'unique': '(True)'}), "(choices=[('not_done', 'Not Done'), ('ok', 'OK'), (\n 'tolerance', 'Tolerance'), ('action', 'Action'), ('no_tol',\n 'No Tol Set')], max_length=15, unique=True)\n", (2277, 2440), False, 'from django.db import migrations, models\n'), ((2554, 2673), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('numerical', 'Numerical'), ('boolean', 'Yes / No')]", 'default': '"""numerical"""', 'max_length': '(15)'}), "(choices=[('numerical', 'Numerical'), ('boolean',\n 'Yes / No')], default='numerical', max_length=15)\n", (2570, 2673), False, 'from django.db import migrations, models\n'), ((2799, 2877), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Test item visible in charts?"""'}), "(default=True, verbose_name='Test item visible in charts?')\n", (2818, 2877), False, 'from django.db import migrations, models\n'), ((3004, 3150), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Image uploads only: Show uploaded images under the testlist"""', 'verbose_name': '"""Display image"""'}), "(default=False, help_text=\n 'Image uploads only: Show uploaded images under the testlist',\n verbose_name='Display image')\n", (3023, 3150), False, 'from django.db import migrations, models\n'), ((3259, 3472), 'django.db.models.SlugField', 'models.SlugField', ([], {'help_text': '"""A short variable name consisting of alphanumeric characters and underscores for this test (to be used in composite calculations). """', 'max_length': '(128)', 'verbose_name': '"""Macro name"""'}), "(help_text=\n 'A short variable name consisting of alphanumeric characters and underscores for this test (to be used in composite calculations). '\n , max_length=128, verbose_name='Macro name')\n", (3275, 3472), False, 'from django.db import migrations, models\n'), ((3580, 4033), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('boolean', 'Boolean'), ('simple', 'Simple Numerical'), ('multchoice',\n 'Multiple Choice'), ('constant', 'Constant'), ('composite', 'Composite'\n ), ('string', 'String'), ('scomposite', 'String Composite'), ('upload',\n 'File Upload')]", 'default': '"""simple"""', 'help_text': '"""Indicate if this test is a Boolean,Simple Numerical,Multiple Choice,Constant,Composite,String,String Composite,File Upload"""', 'max_length': '(10)'}), "(choices=[('boolean', 'Boolean'), ('simple',\n 'Simple Numerical'), ('multchoice', 'Multiple Choice'), ('constant',\n 'Constant'), ('composite', 'Composite'), ('string', 'String'), (\n 'scomposite', 'String Composite'), ('upload', 'File Upload')], default=\n 'simple', help_text=\n 'Indicate if this test is a Boolean,Simple Numerical,Multiple Choice,Constant,Composite,String,String Composite,File Upload'\n , max_length=10)\n", (3596, 4033), False, 'from django.db import migrations, models\n'), ((4136, 4333), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('not_done', 'Not Done'), ('ok', 'OK'), ('tolerance', 'Tolerance'), (\n 'action', 'Action'), ('no_tol', 'No Tol Set')]", 'db_index': '(True)', 'editable': '(False)', 'max_length': '(20)'}), "(choices=[('not_done', 'Not Done'), ('ok', 'OK'), (\n 'tolerance', 'Tolerance'), ('action', 'Action'), ('no_tol',\n 'No Tol Set')], db_index=True, editable=False, max_length=20)\n", (4152, 4333), False, 'from django.db import migrations, models\n'), ((4460, 4609), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'db_index': '(True)', 'default': 'django.utils.timezone.now', 'help_text': '"""Format DD-MM-YY hh:mm (hh:mm is 24h time e.g. 31-05-12 14:30)"""'}), "(db_index=True, default=django.utils.timezone.now,\n help_text='Format DD-MM-YY hh:mm (hh:mm is 24h time e.g. 31-05-12 14:30)')\n", (4480, 4609), False, 'from django.db import migrations, models\n'), ((4738, 4864), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Do not treat"""', 'help_text': '"""Message given when a test value is out of tolerance"""', 'max_length': '(255)'}), "(default='Do not treat', help_text=\n 'Message given when a test value is out of tolerance', max_length=255)\n", (4754, 4864), False, 'from django.db import migrations, models\n'), ((4997, 5102), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('day', 'Day'), ('tlname', 'Test List Name')]", 'default': '"""day"""', 'max_length': '(8)'}), "(choices=[('day', 'Day'), ('tlname', 'Test List Name')],\n default='day', max_length=8)\n", (5013, 5102), False, 'from django.db import migrations, models\n'), ((5236, 5290), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Choose Day"""', 'max_length': '(128)'}), "(default='Choose Day', max_length=128)\n", (5252, 5290), False, 'from django.db import migrations, models\n'), ((5413, 5639), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('absolute', 'Absolute'), ('percent', 'Percentage'), ('multchoice',\n 'Multiple Choice')]", 'help_text': '"""Select whether this will be an absolute or relative tolerance criteria"""', 'max_length': '(20)'}), "(choices=[('absolute', 'Absolute'), ('percent',\n 'Percentage'), ('multchoice', 'Multiple Choice')], help_text=\n 'Select whether this will be an absolute or relative tolerance criteria',\n max_length=20)\n", (5429, 5639), False, 'from django.db import migrations, models\n'), ((5752, 5863), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'help_text': '"""Any extra javascript to run when loading perform page"""', 'null': '(True)'}), "(blank=True, help_text=\n 'Any extra javascript to run when loading perform page', null=True)\n", (5768, 5863), False, 'from django.db import migrations, models\n'), ((5989, 6100), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'help_text': '"""Any extra javascript to run when loading perform page"""', 'null': '(True)'}), "(blank=True, help_text=\n 'Any extra javascript to run when loading perform page', null=True)\n", (6005, 6100), False, 'from django.db import migrations, models\n'), ((6227, 6336), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'help_text': '"""When was this session due when it was performed"""', 'null': '(True)'}), "(blank=True, help_text=\n 'When was this session due when it was performed', null=True)\n", (6247, 6336), False, 'from django.db import migrations, models\n'), ((6526, 6601), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'default': '""""""', 'editable': '(False)', 'max_length': '(255)'}), "(db_index=True, default='', editable=False, max_length=255)\n", (6542, 6601), False, 'from django.db import migrations, models\n'), ((6738, 6960), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'help_text': '"""Frequency with which this test list is to be performed"""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""unittestcollections"""', 'to': '"""qa.Frequency"""'}), "(blank=True, help_text=\n 'Frequency with which this test list is to be performed', null=True,\n on_delete=django.db.models.deletion.CASCADE, related_name=\n 'unittestcollections', to='qa.Frequency')\n", (6755, 6960), False, 'from django.db import migrations, models\n')]
|
from aiogram import types
from aiogram.dispatcher.filters import Command
from aiogram.dispatcher.filters.builtin import CommandHelp
from loader import dp, db
from utils.misc import rate_limit
@rate_limit(5, 'help')
@dp.message_handler(CommandHelp())
async def bot_help(message: types.Message):
text = [
'Список команд: ',
'/start - Почати діалог',
'/help - Отримати довідку',
# '/clear - Застрягли в меню - це допоможе',
# '/count - Кількість користувачів сервісу',
]
await message.answer('\n'.join(text))
@dp.message_handler(Command("stat_info"))
async def bot_help(message: types.Message):
parcels_created = await db.get_parcels_created()
parcels_sent = await db.get_parcels_sent()
parcels_received = await db.get_parcels_received()
door_not_opened = await db.get_door_not_opened()
await message.answer(f"<b>Відправлено:</b> {parcels_sent}\n"
f"<b>Отримано:</b> {parcels_received}\n"
f"<b>Створено:</b> {parcels_created}\n"
f"<b>Отримано(Повторне відкриття комірки):</b> {door_not_opened} \n"
f"<b>Всього:</b> {parcels_sent + parcels_received}")
|
[
"utils.misc.rate_limit",
"aiogram.dispatcher.filters.Command",
"aiogram.dispatcher.filters.builtin.CommandHelp",
"loader.db.get_parcels_received",
"loader.db.get_parcels_created",
"loader.db.get_parcels_sent",
"loader.db.get_door_not_opened"
] |
[((195, 216), 'utils.misc.rate_limit', 'rate_limit', (['(5)', '"""help"""'], {}), "(5, 'help')\n", (205, 216), False, 'from utils.misc import rate_limit\n'), ((237, 250), 'aiogram.dispatcher.filters.builtin.CommandHelp', 'CommandHelp', ([], {}), '()\n', (248, 250), False, 'from aiogram.dispatcher.filters.builtin import CommandHelp\n'), ((584, 604), 'aiogram.dispatcher.filters.Command', 'Command', (['"""stat_info"""'], {}), "('stat_info')\n", (591, 604), False, 'from aiogram.dispatcher.filters import Command\n'), ((678, 702), 'loader.db.get_parcels_created', 'db.get_parcels_created', ([], {}), '()\n', (700, 702), False, 'from loader import dp, db\n'), ((728, 749), 'loader.db.get_parcels_sent', 'db.get_parcels_sent', ([], {}), '()\n', (747, 749), False, 'from loader import dp, db\n'), ((779, 804), 'loader.db.get_parcels_received', 'db.get_parcels_received', ([], {}), '()\n', (802, 804), False, 'from loader import dp, db\n'), ((833, 857), 'loader.db.get_door_not_opened', 'db.get_door_not_opened', ([], {}), '()\n', (855, 857), False, 'from loader import dp, db\n')]
|
from typing import OrderedDict
import ruamel
from ruamel.yaml import YAML
from ruamel.yaml.representer import RoundTripRepresenter
import sys
import os
yaml=YAML(pure=True) # default, if not specfied, is 'rt' (round-trip)
class MyRepresenter(RoundTripRepresenter):
pass
ruamel.yaml.add_representer(OrderedDict, MyRepresenter.represent_dict,
representer=MyRepresenter)
YAML.Representer = MyRepresenter
with open(r'skeletons/cve.yml', 'r') as skel_file:
skel = yaml.load(skel_file)
skel_keys = list(skel.keys())
keys_to_keep = "fixes vccs CVE".split()
for entry in os.scandir(r'cves/'):
with open(entry.path, 'r') as f:
h = yaml.load(f)
for key in skel_keys:
if key not in keys_to_keep:
h[key] = skel[key]
with open(entry.path, 'w') as f:
print(yaml.dump(h, f))
|
[
"ruamel.yaml.add_representer",
"os.scandir",
"ruamel.yaml.YAML"
] |
[((158, 173), 'ruamel.yaml.YAML', 'YAML', ([], {'pure': '(True)'}), '(pure=True)\n', (162, 173), False, 'from ruamel.yaml import YAML\n'), ((279, 380), 'ruamel.yaml.add_representer', 'ruamel.yaml.add_representer', (['OrderedDict', 'MyRepresenter.represent_dict'], {'representer': 'MyRepresenter'}), '(OrderedDict, MyRepresenter.represent_dict,\n representer=MyRepresenter)\n', (306, 380), False, 'import ruamel\n'), ((608, 627), 'os.scandir', 'os.scandir', (['"""cves/"""'], {}), "('cves/')\n", (618, 627), False, 'import os\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## For Testing Matrix2vec on dataset MNIST
## PCA, Kernel PCA, ISOMAP, NMDS, LLE, LE
# import tensorflow as ts
import logging
import os.path
import sys
import multiprocessing
import numpy as np
import argparse
import scipy.io
import datetime
import matrix2vec
from sklearn import datasets as ds
from sklearn.datasets import load_digits
from sklearn.manifold import LocallyLinearEmbedding
# from keras.datasets import mnist
from sklearn.manifold import SpectralEmbedding
from sklearn.decomposition import PCA
from sklearn.manifold import MDS, Isomap
from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import MinMaxScaler
from sklearn import svm, metrics
from sklearn.preprocessing import scale
#import cPickle as pickle
import pickle
from scipy import misc
import matplotlib.image as mpimg
def unpickle(file):
with open(file, 'rb') as fo:
dict = pickle.load(fo)
return np.array(dict['data']), np.array(dict['labels'])
def load_data(path):
x_train, y_train = ds.load_svmlight_file(path)
x_train.todense()
return x_train,y_train
def read_data(data_file):
import gzip
f = gzip.open(data_file, "rb")
# train, val, test = pickle.load(f)
train, val, test = pickle.load(f, encoding='bytes')
f.close()
train_x = train[0]
train_y = train[1]
test_x = test[0]
test_y = test[1]
return train_x, train_y, test_x, test_y
def resizeSVHDShape(matrix):
svhd = np.zeros((5000,3072))
[rows, cols] = svhd.shape
for r in range(rows):
for c in range(cols):
svhd[r][c]=matrix[(c%1024)/32][(c%1024)%32][c/1024][r]
return svhd
if __name__ == "__main__":
#args = parse_args()
# CoilData = scipy.io.loadmat("D:/NSFC/project/data/coil20/COIL20.mat") # Loading coil20.mat
# coil_x = CoilData['X']
# coil_y = CoilData['Y']
# x_train = coil_x
# y_train = []
# for item in coil_y:
# y_train.append(item[0])
# print("Load the COIL20 dataset finished...")
# SVHDData = scipy.io.loadmat("D:/NSFC/project/data/SVHN/train_32x32.mat") # Loading SVHN
# svhd_x = SVHDData['X']
# x_train = resizeSVHDShape(svhd_x)
# svhd_y = SVHDData['y']
# y_train = []
# for item in svhd_y:
# y_train.append(item[0])
# print("Load dataset finished...")
#data,label =load_data("D:/NSFC/project/data/movie/train.bow")
# data, label = load_data(args.input)
x_train, y_train, x_test, y_test = read_data("D:/NSFC/project/data/MNIST/origData/mnistpklgz/mnist.pkl.gz")
print("Load dataset MNIST finished...")
program = os.path.basename(sys.argv[0])
logger = logging.getLogger(program)
x_train=x_train[0:5000, :]
y_train = y_train[0:5000]
print(x_train.shape)
print(x_train)
models = []
emb_size=64
num_neighbors=32
for emb_size in (32,64):
print("********************* emb_size="+str(emb_size)+" ***************")
models=[]
models.append(LocallyLinearEmbedding(n_neighbors=num_neighbors,n_components=emb_size,n_jobs=multiprocessing.cpu_count()))
models.append(SpectralEmbedding(n_neighbors=num_neighbors,n_components=emb_size,n_jobs=multiprocessing.cpu_count()))
models.append(PCA(n_components=emb_size))
models.append(MDS(n_components=emb_size,n_jobs=multiprocessing.cpu_count()))
models.append(Isomap(n_neighbors=num_neighbors, n_components=emb_size, n_jobs=multiprocessing.cpu_count()))
models.append('matrix2vec')
model_names = ['lle', 'le', 'pca', 'MDS', 'ISOMAP', 'matrix2vec'] # names corresponding to model
for index, embedding in enumerate(models):
print('Start running model '+model_names[index]+"...")
start = datetime.datetime.now()
X_transformed= np.zeros((x_train.shape[0],emb_size))
if(index<=4):
# X_transformed = embedding.fit_transform(x_train)
X_transformed = embedding.fit_transform(x_train)
else:
X_transformed=matrix2vec.matrix2vec(x_train,emb_size,topk=5,num_iter=10)
end = datetime.datetime.now()
#scale
X_transformed=scale(X_transformed)
print('Model '+model_names[index]+' Finished in '+str(end-start)+" s.")
#Using KNN classifier to test the result with cross_validation
knn = KNeighborsClassifier()
param = {"n_neighbors": [1, 3, 5, 7, 11]} # 构造一些参数的值进行搜索 (字典类型,可以有多个参数)
gc = GridSearchCV(knn, param_grid=param, cv=4)
gc.fit(X_transformed, y_train)
knn = gc.best_estimator_
print("The best parameter: n_neighbors=" + str(knn.n_neighbors))
scores = cross_val_score(knn, X_transformed, y_train, cv=4)
print("交叉验证Accuracy: ", scores)
print("Accuracy: %0.4f (+/- %0.4f)" % (scores.mean(), scores.std() * 2))
|
[
"sklearn.model_selection.GridSearchCV",
"gzip.open",
"sklearn.preprocessing.scale",
"sklearn.model_selection.cross_val_score",
"numpy.zeros",
"sklearn.neighbors.KNeighborsClassifier",
"pickle.load",
"numpy.array",
"sklearn.datasets.load_svmlight_file",
"sklearn.decomposition.PCA",
"matrix2vec.matrix2vec",
"datetime.datetime.now",
"logging.getLogger",
"multiprocessing.cpu_count"
] |
[((1143, 1170), 'sklearn.datasets.load_svmlight_file', 'ds.load_svmlight_file', (['path'], {}), '(path)\n', (1164, 1170), True, 'from sklearn import datasets as ds\n'), ((1273, 1299), 'gzip.open', 'gzip.open', (['data_file', '"""rb"""'], {}), "(data_file, 'rb')\n", (1282, 1299), False, 'import gzip\n'), ((1363, 1395), 'pickle.load', 'pickle.load', (['f'], {'encoding': '"""bytes"""'}), "(f, encoding='bytes')\n", (1374, 1395), False, 'import pickle\n'), ((1583, 1605), 'numpy.zeros', 'np.zeros', (['(5000, 3072)'], {}), '((5000, 3072))\n', (1591, 1605), True, 'import numpy as np\n'), ((2776, 2802), 'logging.getLogger', 'logging.getLogger', (['program'], {}), '(program)\n', (2793, 2802), False, 'import logging\n'), ((1022, 1037), 'pickle.load', 'pickle.load', (['fo'], {}), '(fo)\n', (1033, 1037), False, 'import pickle\n'), ((1049, 1071), 'numpy.array', 'np.array', (["dict['data']"], {}), "(dict['data'])\n", (1057, 1071), True, 'import numpy as np\n'), ((1073, 1097), 'numpy.array', 'np.array', (["dict['labels']"], {}), "(dict['labels'])\n", (1081, 1097), True, 'import numpy as np\n'), ((3372, 3398), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'emb_size'}), '(n_components=emb_size)\n', (3375, 3398), False, 'from sklearn.decomposition import PCA\n'), ((3884, 3907), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3905, 3907), False, 'import datetime\n'), ((3935, 3973), 'numpy.zeros', 'np.zeros', (['(x_train.shape[0], emb_size)'], {}), '((x_train.shape[0], emb_size))\n', (3943, 3973), True, 'import numpy as np\n'), ((4257, 4280), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4278, 4280), False, 'import datetime\n'), ((4327, 4347), 'sklearn.preprocessing.scale', 'scale', (['X_transformed'], {}), '(X_transformed)\n', (4332, 4347), False, 'from sklearn.preprocessing import scale\n'), ((4540, 4562), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (4560, 4562), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((4665, 4706), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['knn'], {'param_grid': 'param', 'cv': '(4)'}), '(knn, param_grid=param, cv=4)\n', (4677, 4706), False, 'from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score\n'), ((4885, 4935), 'sklearn.model_selection.cross_val_score', 'cross_val_score', (['knn', 'X_transformed', 'y_train'], {'cv': '(4)'}), '(knn, X_transformed, y_train, cv=4)\n', (4900, 4935), False, 'from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score\n'), ((4180, 4241), 'matrix2vec.matrix2vec', 'matrix2vec.matrix2vec', (['x_train', 'emb_size'], {'topk': '(5)', 'num_iter': '(10)'}), '(x_train, emb_size, topk=5, num_iter=10)\n', (4201, 4241), False, 'import matrix2vec\n'), ((3195, 3222), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (3220, 3222), False, 'import multiprocessing\n'), ((3320, 3347), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (3345, 3347), False, 'import multiprocessing\n'), ((3455, 3482), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (3480, 3482), False, 'import multiprocessing\n'), ((3571, 3598), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (3596, 3598), False, 'import multiprocessing\n')]
|
# first motor test
import pin
import time
# load the configuration from config1.json
pin.load("config1.json")
print("foreward")
pin.Out("in1",1)
pin.Out("in2",0)
pin.Out("ena",1)
time.sleep(3)
print("backward")
pin.Out("in1",0)
pin.Out("in2",1)
pin.Out("ena",1)
time.sleep(3)
pin.Out("ena",0)
print("stop")
# reset the GPIOs
pin.cleanup()
|
[
"pin.load",
"pin.Out",
"pin.cleanup",
"time.sleep"
] |
[((86, 110), 'pin.load', 'pin.load', (['"""config1.json"""'], {}), "('config1.json')\n", (94, 110), False, 'import pin\n'), ((130, 147), 'pin.Out', 'pin.Out', (['"""in1"""', '(1)'], {}), "('in1', 1)\n", (137, 147), False, 'import pin\n'), ((147, 164), 'pin.Out', 'pin.Out', (['"""in2"""', '(0)'], {}), "('in2', 0)\n", (154, 164), False, 'import pin\n'), ((164, 181), 'pin.Out', 'pin.Out', (['"""ena"""', '(1)'], {}), "('ena', 1)\n", (171, 181), False, 'import pin\n'), ((182, 195), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (192, 195), False, 'import time\n'), ((215, 232), 'pin.Out', 'pin.Out', (['"""in1"""', '(0)'], {}), "('in1', 0)\n", (222, 232), False, 'import pin\n'), ((232, 249), 'pin.Out', 'pin.Out', (['"""in2"""', '(1)'], {}), "('in2', 1)\n", (239, 249), False, 'import pin\n'), ((249, 266), 'pin.Out', 'pin.Out', (['"""ena"""', '(1)'], {}), "('ena', 1)\n", (256, 266), False, 'import pin\n'), ((267, 280), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (277, 280), False, 'import time\n'), ((281, 298), 'pin.Out', 'pin.Out', (['"""ena"""', '(0)'], {}), "('ena', 0)\n", (288, 298), False, 'import pin\n'), ((331, 344), 'pin.cleanup', 'pin.cleanup', ([], {}), '()\n', (342, 344), False, 'import pin\n')]
|
import collections
import datetime
import importlib
import math
import os.path
import pprint
import random
import subprocess
import sys
import git
import numpy as np
import sacred
import tensorboardX
import torch
import tqdm
def git_revision():
return subprocess.check_output("git rev-parse --short HEAD".split()).strip().decode("utf-8")
def cat_last_dim(x, with_expand=False):
"""
Concatenates a sequence of tensors along the last dimension, if with_expand==True it expands/broadcasts the tensors
as necessary for concatenation.
:param x: sequence of tensors of the same number of dimensions.
:param with_expand: boolean deciding whether to expand/broadcast the tensors as necessary for concatenation.
:return: concatenated tensor
"""
assert all(len(i.shape) == 2 for i in x)
if not with_expand:
return torch.cat(x, dim=-1)
shape_to_expand = np.array(tuple(tuple(i.shape[:-1]) for i in x)).max(axis=0).tolist()
return cat_last_dim(tuple(i.expand(*shape_to_expand, -1) for i in x), with_expand=False)
def flatten(l):
"""
Flattens the list l.
:param l: list
:return: flattened list.
"""
return [item for sublist in l for item in sublist]
def iterate_minibatches(batchsize, data):
assert len(data) > 0
assert isinstance(data, collections.abc.Mapping)
for start_idx in range(0, len(data[tuple(data.keys())[0]]) - batchsize + 1, batchsize):
excerpt = slice(start_idx, start_idx + batchsize)
yield {k: v[excerpt] for k, v in data.items()}
def experiment_name(args):
"""
Returns the experiment name.
"""
if args.problem_name in ['tail_integral_1d', 'tail_integral_5d']:
return f'{args.problem_name}_{args.q1_or_q2}'
elif args.problem_name == 'cancer':
return f'{args.problem_name}_{args.factor}_{args.q1_or_q2}'
else:
raise Exception(f'Unknown problem_name: {args.problem_name}.')
def get_model(args):
"""
Imports appropriate module for the model.
"""
if args.problem_name in ['tail_integral_1d', 'tail_integral_5d']:
return importlib.import_module(f'amci.tail_integral.model')
elif args.problem_name == 'cancer':
return importlib.import_module(f'amci.cancer.model')
else:
raise Exception(f'Unknown problem_name: {args.problem_name}.')
def sacred_main_helper(train_func, args, _run):
"""
Helper function
"""
for k, v in args.__dict__.items():
if type(v) == sacred.config.custom_containers.ReadOnlyList:
setattr(args, k, list(v))
args.experiment_name = experiment_name(args)
# Handle git repo related aspects
repo = git.Repo('../..')
args.git_clean = not repo.is_dirty()
if not args.git_clean and args.require_clean_repo:
raise RuntimeError("The repo is not clean, change require_clean_repo flag if you want to"
"run the code with a dirty repo.")
args.git_commit = git_revision()
# Handle CUDA config
args.cuda = not args.no_cuda and torch.cuda.is_available()
torch.set_default_tensor_type('torch.cuda.FloatTensor' if args.cuda else 'torch.FloatTensor')
args.device = 'cuda' if args.cuda else 'cpu'
# Set random seeds
random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
# Sort out paths
args.runid = datetime.datetime.now().strftime("%y%m%d_%H%M_%f") + '_' + args.git_commit
if args.logs_root is None:
# os.path.dirname(sys.argv[0]) should point to amci/src/amci
args.logs_root = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]), '../../logs'))
args.output_folder = os.path.join(args.logs_root, f'{args.experiment_name}_{args.runid}')
os.mkdir(args.output_folder)
_run.info['output_folder'] = os.path.abspath(args.output_folder)
_run.info['args'] = args.__dict__
# Tensorboard
_writer = tensorboardX.SummaryWriter(os.path.join(args.output_folder, 'tensorboard.file'))
_writer.add_text('hyperparameters', pprint.pformat(args.__dict__))
with open(os.path.join(args.output_folder, 'config.txt'), 'wt') as out:
pprint.pprint(args.__dict__, stream=out)
pprint.pprint(args.__dict__)
return train_func(args, _run, _writer)
def validate_checkpoint(checkpoint_filepath):
if not (os.path.isfile(checkpoint_filepath) and os.access(checkpoint_filepath, os.R_OK)):
raise Exception(f'Cannot access the checkpoint, it doesnt exist or dont have the right permissions: '
f'{checkpoint_filepath}.')
def validate_checkpoints(args):
validate_checkpoint(args.checkpoint_q1)
validate_checkpoint(args.checkpoint_q2)
def dict_of_lists_to_list_of_dicts(dict_of_lists):
any_key_of_the_dict = list(dict_of_lists.keys())[0]
list_of_dicts = [{k: v[i:i + 1] for k, v in dict_of_lists.items()}
for i in range(len(dict_of_lists[any_key_of_the_dict]))]
return list_of_dicts
def cuda_config(args):
args.cuda = not args.no_cuda and torch.cuda.is_available()
args.device = 'cuda' if args.cuda else 'cpu'
torch.set_default_tensor_type('torch.cuda.FloatTensor' if args.cuda else 'torch.FloatTensor')
return args
def generate_samples_for_evaluation(number_of_samples_total, number_of_samples_gpu_capacity, ys_thetas, model,
device='cpu'):
data_dicts_q1, data_dicts_q2 = [], []
for y_theta in tqdm.tqdm(dict_of_lists_to_list_of_dicts(ys_thetas)):
data_dicts_q1_temp = []
data_dicts_q2_temp = []
number_of_samples_left = number_of_samples_total
for _ in tqdm.tqdm(range(int(math.ceil(number_of_samples_total/number_of_samples_gpu_capacity))),
disable=False):
# returns a dict
data_dict_q1, data_dict_q2 = model.samples_for_evaluation(
min(number_of_samples_left, number_of_samples_gpu_capacity),
y_theta=y_theta, device=device)
data_dicts_q1_temp.append(data_dict_q1)
data_dicts_q2_temp.append(data_dict_q2)
number_of_samples_left -= number_of_samples_gpu_capacity
data_dicts_q1.append({k: torch.cat(tuple(d[k] for d in data_dicts_q1_temp), dim=0) for k in data_dicts_q1_temp[0].keys()})
data_dicts_q2.append({k: torch.cat(tuple(d[k] for d in data_dicts_q2_temp), dim=0) for k in data_dicts_q2_temp[0].keys()})
# lists of dicts
data_dict_q1, data_dict_q2 = map(
lambda data_dicts: {k: torch.stack(tuple(d[k] for d in data_dicts), dim=0) for k in data_dicts[0].keys()},
(data_dicts_q1, data_dicts_q2))
return data_dict_q1, data_dict_q2
def get_flow_hyper_net(hidden_units_per_layer, parameters_nelement, in_dim, number_of_layers=2):
"""
Returns an MLP neural net used as the hyper-network for the flows.
"""
H = hidden_units_per_layer
layers = [torch.nn.Linear(in_dim, H), torch.nn.ReLU()]
layers += flatten([[torch.nn.Linear(H, H), torch.nn.ReLU()] for _ in range(number_of_layers)])
layers += [torch.nn.Linear(H, parameters_nelement),]
flow_hyper_net = torch.nn.Sequential(*layers)
for param in flow_hyper_net.parameters():
torch.nn.init.uniform_(param, a=-0.01, b=0.01)
# Maybe in more complicated cases the hypernetwork could benefit from a more sophisticated initialization strategy,
# e.g. like or inspired by Chang et al., Principled Weight Initialization for Hypernetworks, ICLR 2020
# https://openreview.net/forum?id=H1lma24tPB
# It could also use normalizing the input to mean 0, std 1, e.g. by utilizing the knowledge about the prior
# distributions over the variables we're conditioning over. That would partially alleviate the need for such
# "hand-crafted" (chosen the magnitude heuristically) initialization like here.
return flow_hyper_net
def load_proposals_from_checkpoints(object, factors, model_parameters=tuple()):
"""
Loads checkpoints for the proposals.
Checks the consistency of the model parameters between different checkpoints loaded.
Note: Training and Evaluation should inherit from some common abstract base class.
:param object: instantiation of the Evaluation class
:param factors: collection of factor names to be loaded
:param model_parameters: collection of model parameter names
:return: model parameters consistent for all the checkpoints
"""
checkpoints_args_dicts = []
for factor in factors:
checkpoint = torch.load(object.args.__dict__[f'checkpoint_{factor}'], map_location=object.args.device)
object.__setattr__(factor, object.get_proposal_model(checkpoint[1]))
object.__getattribute__(factor).load_state_dict(checkpoint[0])
checkpoints_args_dicts.append(checkpoint[-2].__dict__)
# checks the consistency of the model parameters between different checkpoints loaded
for k in model_parameters:
v = checkpoints_args_dicts[0][k]
for checkpoints_args_dict in checkpoints_args_dicts[1:]:
assert checkpoints_args_dict[k] == v
return {k: checkpoints_args_dicts[0][k] for k in model_parameters}
def load_ys_thetas_and_groundtruths(self, dataset_size):
"""
Loads ys_thetas and ground truth values from the file as per run configuration.
Note: Training and Evaluation should inherit from some common abstract base class.
"""
filepath = os.path.join(self.args.logs_root,
f'ground_truths_{self.args.problem_name}_{str(self.args.ground_truth_samples)}')
if not (os.path.isfile(filepath) and os.access(filepath, os.R_OK)):
raise Exception(f'Cannot access the groundtruths file {filepath}, '
f'it doesnt exist or you dont have the right permissions. '
f'You can generate the groundtruths file using ground_truth.py script.')
ground_truths_dict = torch.load(filepath)
print(f'Loading ground truths from :{filepath}')
# check that the ground truth was generated for the same model parameter values as for
for k, v in self.loaded_checkpoint_parameters.items():
assert ground_truths_dict['args'].__dict__[k] == v
if tuple(ground_truths_dict['ys_thetas'].values())[0].shape[0] != dataset_size:
raise Exception(f'The number of datapoints in file {filepath} ({ground_truths_dict["ys_thetas"].shape[0]})'
f'does not match the argument dataset_size ({dataset_size}).')
return ground_truths_dict['ys_thetas'], ground_truths_dict['estimate']
|
[
"pprint.pformat",
"torch.nn.ReLU",
"importlib.import_module",
"torch.nn.Sequential",
"math.ceil",
"torch.manual_seed",
"torch.load",
"torch.nn.init.uniform_",
"git.Repo",
"torch.set_default_tensor_type",
"torch.cat",
"torch.cuda.manual_seed_all",
"random.seed",
"pprint.pprint",
"torch.cuda.is_available",
"torch.nn.Linear",
"datetime.datetime.now"
] |
[((2682, 2699), 'git.Repo', 'git.Repo', (['"""../.."""'], {}), "('../..')\n", (2690, 2699), False, 'import git\n'), ((3086, 3183), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (["('torch.cuda.FloatTensor' if args.cuda else 'torch.FloatTensor')"], {}), "('torch.cuda.FloatTensor' if args.cuda else\n 'torch.FloatTensor')\n", (3115, 3183), False, 'import torch\n'), ((3257, 3279), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (3268, 3279), False, 'import random\n'), ((3284, 3312), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (3301, 3312), False, 'import torch\n'), ((3317, 3354), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['args.seed'], {}), '(args.seed)\n', (3343, 3354), False, 'import torch\n'), ((4219, 4247), 'pprint.pprint', 'pprint.pprint', (['args.__dict__'], {}), '(args.__dict__)\n', (4232, 4247), False, 'import pprint\n'), ((5143, 5240), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (["('torch.cuda.FloatTensor' if args.cuda else 'torch.FloatTensor')"], {}), "('torch.cuda.FloatTensor' if args.cuda else\n 'torch.FloatTensor')\n", (5172, 5240), False, 'import torch\n'), ((7177, 7205), 'torch.nn.Sequential', 'torch.nn.Sequential', (['*layers'], {}), '(*layers)\n', (7196, 7205), False, 'import torch\n'), ((9974, 9994), 'torch.load', 'torch.load', (['filepath'], {}), '(filepath)\n', (9984, 9994), False, 'import torch\n'), ((858, 878), 'torch.cat', 'torch.cat', (['x'], {'dim': '(-1)'}), '(x, dim=-1)\n', (867, 878), False, 'import torch\n'), ((2116, 2168), 'importlib.import_module', 'importlib.import_module', (['f"""amci.tail_integral.model"""'], {}), "(f'amci.tail_integral.model')\n", (2139, 2168), False, 'import importlib\n'), ((3056, 3081), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3079, 3081), False, 'import torch\n'), ((4057, 4086), 'pprint.pformat', 'pprint.pformat', (['args.__dict__'], {}), '(args.__dict__)\n', (4071, 4086), False, 'import pprint\n'), ((4173, 4213), 'pprint.pprint', 'pprint.pprint', (['args.__dict__'], {'stream': 'out'}), '(args.__dict__, stream=out)\n', (4186, 4213), False, 'import pprint\n'), ((5064, 5089), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5087, 5089), False, 'import torch\n'), ((6954, 6980), 'torch.nn.Linear', 'torch.nn.Linear', (['in_dim', 'H'], {}), '(in_dim, H)\n', (6969, 6980), False, 'import torch\n'), ((6982, 6997), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {}), '()\n', (6995, 6997), False, 'import torch\n'), ((7113, 7152), 'torch.nn.Linear', 'torch.nn.Linear', (['H', 'parameters_nelement'], {}), '(H, parameters_nelement)\n', (7128, 7152), False, 'import torch\n'), ((7261, 7307), 'torch.nn.init.uniform_', 'torch.nn.init.uniform_', (['param'], {'a': '(-0.01)', 'b': '(0.01)'}), '(param, a=-0.01, b=0.01)\n', (7283, 7307), False, 'import torch\n'), ((8567, 8661), 'torch.load', 'torch.load', (["object.args.__dict__[f'checkpoint_{factor}']"], {'map_location': 'object.args.device'}), "(object.args.__dict__[f'checkpoint_{factor}'], map_location=\n object.args.device)\n", (8577, 8661), False, 'import torch\n'), ((2224, 2269), 'importlib.import_module', 'importlib.import_module', (['f"""amci.cancer.model"""'], {}), "(f'amci.cancer.model')\n", (2247, 2269), False, 'import importlib\n'), ((7023, 7044), 'torch.nn.Linear', 'torch.nn.Linear', (['H', 'H'], {}), '(H, H)\n', (7038, 7044), False, 'import torch\n'), ((7046, 7061), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {}), '()\n', (7059, 7061), False, 'import torch\n'), ((3394, 3417), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3415, 3417), False, 'import datetime\n'), ((5693, 5760), 'math.ceil', 'math.ceil', (['(number_of_samples_total / number_of_samples_gpu_capacity)'], {}), '(number_of_samples_total / number_of_samples_gpu_capacity)\n', (5702, 5760), False, 'import math\n')]
|
from django.shortcuts import render , redirect
from django.contrib.auth import authenticate,login, get_user_model
from django.http import HttpResponse
from django.contrib.auth.views import LogoutView
from django.utils.http import is_safe_url
from .forms import LoginForm,RegisterForm,GuestForm
from .models import GuestEmail
# Create your views here.
def guest_register_view(request):
form = GuestForm(request.POST or None)
context={
"form":form
}
next_=request.GET.get('next')
next_post=request.POST.get('next')
redirect_path=next_ or next_post or None
if form.is_valid():
email=form.cleaned_data.get("email")
new_guest_email=GuestEmail.objects.create(email=email)
request.session['guest_email_id']=new_guest_email.id
if is_safe_url(redirect_path,request.get_host()):
return redirect(redirect_path)
else:
return redirect("/register")
return redirect("/register/")
def login_page(request):
login_form = LoginForm(request.POST or None)
context={
"form":login_form
}
next_=request.GET.get('next')
next_post=request.POST.get('next')
redirect_path=next_ or next_post or None
print(request,"hello")
if login_form.is_valid():
username=login_form.cleaned_data.get("username")
password=login_form.cleaned_data.get("password")
user=authenticate(request,username=username,password=password)
if user is not None:
login(request,user)
try:
del request.session['guest_email_id']
except:
pass
if is_safe_url(redirect_path,request.get_host()):
return redirect(redirect_path)
else:
return redirect("/")
else:
print("Error")
return render(request,"accounts/login.html",context)
User = get_user_model()
def register_page(request):
form = RegisterForm(request.POST or None)
context={
"form":form
}
if form.is_valid():
username = form.cleaned_data.get("username")
password = form.cleaned_data.get("password")
email = form.cleaned_data.get("email")
new_user=User.objects.create_user(username,email,password)
return render(request,"accounts/register.html",context)
|
[
"django.shortcuts.redirect",
"django.contrib.auth.get_user_model",
"django.contrib.auth.authenticate",
"django.shortcuts.render",
"django.contrib.auth.login"
] |
[((1659, 1675), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1673, 1675), False, 'from django.contrib.auth import authenticate, login, get_user_model\n'), ((872, 894), 'django.shortcuts.redirect', 'redirect', (['"""/register/"""'], {}), "('/register/')\n", (880, 894), False, 'from django.shortcuts import render, redirect\n'), ((1605, 1652), 'django.shortcuts.render', 'render', (['request', '"""accounts/login.html"""', 'context'], {}), "(request, 'accounts/login.html', context)\n", (1611, 1652), False, 'from django.shortcuts import render, redirect\n'), ((2000, 2050), 'django.shortcuts.render', 'render', (['request', '"""accounts/register.html"""', 'context'], {}), "(request, 'accounts/register.html', context)\n", (2006, 2050), False, 'from django.shortcuts import render, redirect\n'), ((1274, 1333), 'django.contrib.auth.authenticate', 'authenticate', (['request'], {'username': 'username', 'password': 'password'}), '(request, username=username, password=password)\n', (1286, 1333), False, 'from django.contrib.auth import authenticate, login, get_user_model\n'), ((796, 819), 'django.shortcuts.redirect', 'redirect', (['redirect_path'], {}), '(redirect_path)\n', (804, 819), False, 'from django.shortcuts import render, redirect\n'), ((839, 860), 'django.shortcuts.redirect', 'redirect', (['"""/register"""'], {}), "('/register')\n", (847, 860), False, 'from django.shortcuts import render, redirect\n'), ((1358, 1378), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (1363, 1378), False, 'from django.contrib.auth import authenticate, login, get_user_model\n'), ((1512, 1535), 'django.shortcuts.redirect', 'redirect', (['redirect_path'], {}), '(redirect_path)\n', (1520, 1535), False, 'from django.shortcuts import render, redirect\n'), ((1557, 1570), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1565, 1570), False, 'from django.shortcuts import render, redirect\n')]
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
bl_info = {
'name': 'Shapekey pinning',
'author': '<NAME>',
'version': '0.1',
'blender': (2, 5, 6),
"api": 34786,
"location": "View3D > UI panel > Shapekey pinning",
"description": "Shapekey pinning",
"warning": "",
"category": "3D View"}
import bpy, os, mathutils
from mathutils import *
from bpy.props import *
# class VIEW3D_OT_ResetExpressionsButton(bpy.types.Operator):
#
class VIEW3D_OT_ResetExpressionsButton(bpy.types.Operator):
bl_idname = "shapepin.reset_expressions"
bl_label = "Reset expressions"
def execute(self, context):
keys = context.object.data.shape_keys
if keys:
for block in keys.key_blocks:
block.value = 0.0
return{'FINISHED'}
#
# class VIEW3D_OT_KeyExpressionButton(bpy.types.Operator):
#
class VIEW3D_OT_KeyExpressionsButton(bpy.types.Operator):
bl_idname = "shapepin.key_expressions"
bl_label = "Key"
def execute(self, context):
keys = context.object.data.shape_keys
keyAll = context.scene.keyAll
if keys:
keylist = findActiveFcurves(keys.animation_data)
frame = context.scene.frame_current
for block in keys.key_blocks:
if (keyAll or (block.name in keylist)):
block.keyframe_insert("value", index=-1, frame=frame)
return{'FINISHED'}
def findActiveFcurves(adata):
if adata:
action = adata.action
else:
return []
if action:
keylist = []
for fcu in action.fcurves:
words = fcu.data_path.split('"')
keylist.append(words[1])
return keylist
return []
#
# class VIEW3D_OT_PinExpressionButton(bpy.types.Operator):
#
class VIEW3D_OT_PinExpressionButton(bpy.types.Operator):
bl_idname = "shapepin.pin_expression"
bl_label = "Pin"
expression = bpy.props.StringProperty()
def execute(self, context):
keys = context.object.data.shape_keys
keyAll = context.scene.keyAll
if keys:
frame = context.scene.frame_current
for block in keys.key_blocks:
oldvalue = block.value
block.value = 1.0 if block.name == self.expression else 0.0
if (context.tool_settings.use_keyframe_insert_auto and
(keyAll or (block.value > 0.01) or (abs(block.value-oldvalue) > 0.01))):
block.keyframe_insert("value", index=-1, frame=frame)
return{'FINISHED'}
#
# class ExpressionsPanel(bpy.types.Panel):
#
class ExpressionsPanel(bpy.types.Panel):
bl_label = "Pin shapekeys"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
@classmethod
def poll(cls, context):
return context.object and (context.object.type == 'MESH')
def draw(self, context):
layout = self.layout
layout.label(text="Expressions")
layout.operator("shapepin.reset_expressions")
layout.prop(context.scene, "keyAll")
layout.operator("shapepin.key_expressions")
layout.separator()
keys = context.object.data.shape_keys
if keys:
for block in keys.key_blocks:
row = layout.split(0.75)
row.prop(block, 'value', text=block.name)
row.operator("shapepin.pin_expression", text="Pin").expression = block.name
return
###################################################################################
#
# initialize and register
#
###################################################################################
def init():
bpy.types.Scene.keyAll = BoolProperty(
name="Key all",
description="Set keys for all shapes",
default=False)
def register():
init()
bpy.utils.register_module(__name__)
pass
def unregister():
bpy.utils.unregister_module(__name__)
pass
if __name__ == "__main__":
register()
|
[
"bpy.utils.unregister_module",
"bpy.utils.register_module",
"bpy.props.StringProperty"
] |
[((2815, 2841), 'bpy.props.StringProperty', 'bpy.props.StringProperty', ([], {}), '()\n', (2839, 2841), False, 'import bpy, os, mathutils\n'), ((4775, 4810), 'bpy.utils.register_module', 'bpy.utils.register_module', (['__name__'], {}), '(__name__)\n', (4800, 4810), False, 'import bpy, os, mathutils\n'), ((4847, 4884), 'bpy.utils.unregister_module', 'bpy.utils.unregister_module', (['__name__'], {}), '(__name__)\n', (4874, 4884), False, 'import bpy, os, mathutils\n')]
|
#!/usr/bin/env python
import logging
import time
import redis
import uwatch2lib
from acme_notifications.instance import settings
log = logging.getLogger(__name__)
def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)-8s %(message)s")
log.info("Starting forwarding from Redis to BLE")
while True:
try:
msg_str = pop_msg()
forward_to_watch(msg_str)
except KeyboardInterrupt:
return
except Exception:
log.exception("")
time.sleep(settings.REDIS_RETRY_DELAY)
def pop_msg():
"""Pop the next message from the Redis queue"""
while True:
try:
log.debug("Waiting for new message on Redis queue")
with redis.Redis() as r:
msg_key, msg_bytes = r.blpop("msg")
except Exception as e:
log.error(f"Pop from Redis failed. Retrying.")
log.error(f" Error: {e}")
time.sleep(settings.REDIS_RETRY_DELAY)
else:
return msg_bytes.decode("utf-8")
def forward_to_watch(msg_str):
while True:
try:
with uwatch2lib.Uwatch2(settings.UWATCH2_MAC) as watch:
watch.send_message(msg_str)
except Exception as e:
log.error(f"Forward to watch failed. Retrying.")
log.error(f" Error: {e}")
log.error(f" Msg: {msg_str}")
time.sleep(settings.RETRY_DELAY_SEC)
else:
log.error(f"Forwarded to watch: {msg_str}")
break
if __name__ == "__main__":
main()
|
[
"redis.Redis",
"uwatch2lib.Uwatch2",
"logging.basicConfig",
"time.sleep",
"logging.getLogger"
] |
[((139, 166), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (156, 166), False, 'import logging\n'), ((185, 263), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(levelname)-8s %(message)s"""'}), "(level=logging.DEBUG, format='%(levelname)-8s %(message)s')\n", (204, 263), False, 'import logging\n'), ((538, 576), 'time.sleep', 'time.sleep', (['settings.REDIS_RETRY_DELAY'], {}), '(settings.REDIS_RETRY_DELAY)\n', (548, 576), False, 'import time\n'), ((756, 769), 'redis.Redis', 'redis.Redis', ([], {}), '()\n', (767, 769), False, 'import redis\n'), ((969, 1007), 'time.sleep', 'time.sleep', (['settings.REDIS_RETRY_DELAY'], {}), '(settings.REDIS_RETRY_DELAY)\n', (979, 1007), False, 'import time\n'), ((1146, 1186), 'uwatch2lib.Uwatch2', 'uwatch2lib.Uwatch2', (['settings.UWATCH2_MAC'], {}), '(settings.UWATCH2_MAC)\n', (1164, 1186), False, 'import uwatch2lib\n'), ((1427, 1463), 'time.sleep', 'time.sleep', (['settings.RETRY_DELAY_SEC'], {}), '(settings.RETRY_DELAY_SEC)\n', (1437, 1463), False, 'import time\n')]
|
import os, sys
import pandas as pd
import numpy as np
import integer_polyomino.scripts.graph_topo as gt
def genome_metric_preprocess(df):
iso_groups(df)
df['Iso_index'] = df['Iso_index'].astype(np.uint8)
df['original'] = df['original'].astype('category')
df['pIDs'] = df['pIDs'].apply(lambda x: str(eval(x))).astype('category')
df['diversity'] = df['diversity'].astype(np.uint16)
df['complex_diversity'] = df['complex_diversity'].astype(np.uint16)
df['neutral_weight'] = df['neutral_weight'].astype('category')
# df['frequencies'] = df['frequencies'].apply(lambda x: np.array(eval(x), dtype=np.uint8))
metrics = ['srobustness','irobustness','evolvability', 'robust_evolvability', 'complex_evolvability', 'rare','unbound']
for metric in metrics:
df[metric] = df[metric].astype(np.float16)
def iso_groups(df):
df['Iso_index'] = df['diversity'].astype(np.int16)
for pID in df.pIDs.unique():
small_df = df[df.pIDs == pID]
partition = gt.PartitionPhenotype(list(map(eval, small_df.original.unique())))
df['Iso_index'].update(small_df['original'].apply(lambda x: partition[str(eval(x))]))
def set_metric_preprocess(df):
# df['diversity_tracker'] = df['diversity_tracker'].apply(lambda x: np.array(eval(x), dtype=np.int16))
df['analysed'] = df['analysed'].astype(np.int16)
df['misclassified'] = df['misclassified'].astype(np.int16)
df['diversity'] = df['diversity'].astype(np.int16)
# df['originals'] = df['originals'].apply(lambda x: list(eval(x)))
df['neutral_size'] = df['neutral_size'].astype(np.int32)
metrics = ['srobustness', 'irobustness', 'evolvability', 'robust_evolvability', 'complex_evolvability', 'rare','unbound', 'complex_diversity']
for metric in metrics:
df[metric] = df[metric].astype(np.float16)
def write_to_hdf(file_path, files, store, kind, overwrite):
for file_name in files:
if((overwrite) or not(('/' + file_name[:-4]) in store.keys())):
df = pd.read_csv(os.path.join(file_path, file_name), sep=' ')
if(kind == 'set'):
set_metric_preprocess(df)
elif(kind == 'genome'):
genome_metric_preprocess(df)
store.append(file_name[:-4], df, format='table', data_columns=True)
|
[
"os.path.join"
] |
[((2024, 2058), 'os.path.join', 'os.path.join', (['file_path', 'file_name'], {}), '(file_path, file_name)\n', (2036, 2058), False, 'import os, sys\n')]
|
import os
import numpy as np
import logging
log = logging.getLogger('data_utils')
def resample_ants(nii_file, nii_file_newres, new_res=(1.37, 1.37, 10, 1)):
'''
Call ANTs to resample an image to a given resolution and save a new resampled file.
:param nii_file: the path of the input file
:param nii_file_newres: the path of the output file
:param new_res: the pixel resolution to resample
'''
print('Resampling %s at resolution %s to file %s' % (nii_file, str(new_res), nii_file_newres))
os.system('~/bin/ants/bin/ResampleImage %d %s %s %s' %
(len(new_res), nii_file, nii_file_newres, 'x'.join([str(r) for r in new_res])))
def normalise(array, min_value, max_value):
array = (max_value - min_value) * (array - float(array.min())) / (array.max() - array.min()) + min_value
assert array.max() == max_value and array.min() == min_value
return array
def crop_same(image_list, mask_list, size=(None, None), mode='equal', pad_mode='constant'):
'''
Crop the data in the image and mask lists, so that they have the same size.
:param image_list: a list of images. Each element should be 4-dimensional, (sl,h,w,chn)
:param mask_list: a list of masks. Each element should be 4-dimensional, (sl,h,w,chn)
:param size: dimensions to crop the images to.
:param mode: can be one of [equal, left, right]. Denotes where to crop pixels from. Defaults to middle.
:param pad_mode: can be one of ['edge', 'constant']. 'edge' pads using the values of the edge pixels,
'constant' pads with a constant value
:return: the modified arrays
'''
min_w = np.min([m.shape[1] for m in mask_list]) if size[0] is None else size[0]
min_h = np.min([m.shape[2] for m in mask_list]) if size[1] is None else size[1]
# log.debug('Resizing list1 of size %s to size %s' % (str(image_list[0].shape), str((min_w, min_h))))
# log.debug('Resizing list2 of size %s to size %s' % (str(mask_list[0].shape), str((min_w, min_h))))
img_result, msk_result = [], []
for i in range(len(mask_list)):
im = image_list[i]
m = mask_list[i]
if m.shape[1] > min_w:
m = _crop(m, 1, min_w, mode)
if im.shape[1] > min_w:
im = _crop(im, 1, min_w, mode)
if m.shape[1] < min_w:
m = _pad(m, 1, min_w, pad_mode)
if im.shape[1] < min_w:
im = _pad(im, 1, min_w, pad_mode)
if m.shape[2] > min_h:
m = _crop(m, 2, min_h, mode)
if im.shape[2] > min_h:
im = _crop(im, 2, min_h, mode)
if m.shape[2] < min_h:
m = _pad(m, 2, min_h, pad_mode)
if im.shape[2] < min_h:
im = _pad(im, 2, min_h, pad_mode)
img_result.append(im)
msk_result.append(m)
return img_result, msk_result
def _crop(image, dim, nb_pixels, mode):
diff = image.shape[dim] - nb_pixels
if mode == 'equal':
l = int(np.ceil(diff / 2))
r = image.shape[dim] - l
elif mode == 'right':
l = 0
r = nb_pixels
elif mode == 'left':
l = diff
r = image.shape[dim]
else:
raise 'Unexpected mode: %s. Expected to be one of [equal, left, right].' % mode
if dim == 1:
return image[:, l:r, :, :]
elif dim == 2:
return image[:, :, l:r, :]
else:
return None
def _pad(image, dim, nb_pixels, mode):
diff = nb_pixels - image.shape[dim]
l = int(diff / 2)
r = int(diff - l)
if dim == 1:
pad_width = ((0, 0), (l, r), (0, 0), (0, 0))
elif dim == 2:
pad_width = ((0, 0), (0, 0), (l, r), (0, 0))
else:
return None
if mode == 'edge':
new_image = np.pad(image, pad_width, 'edge')
elif mode == 'constant':
new_image = np.pad(image, pad_width, 'constant', constant_values=0)
else:
raise Exception('Invalid pad mode: ' + mode)
return new_image
def sample(data, nb_samples, seed=-1):
if seed > -1:
np.random.seed(seed)
idx = np.random.choice(len(data), size=nb_samples, replace=False)
return np.array([data[i] for i in idx])
def generator(batch, mode, *x):
assert mode in ['overflow', 'no_overflow']
imshape = x[0].shape
for ar in x:
# case where all inputs are images
if len(ar.shape) == len(imshape):
assert ar.shape[:-1] == imshape[:-1], str(ar.shape) + ' vs ' + str(imshape)
# case where inputs might be arrays of different dimensions
else:
assert ar.shape[0] == imshape[0], str(ar.shape) + ' vs ' + str(imshape)
start = 0
while 1:
if isempty(*x): # if the arrays are empty do not process and yield empty arrays
log.info('Empty inputs. Return empty arrays')
res = []
for ar in x:
res.append(np.empty(shape=ar.shape))
if len(res) > 1:
yield res
else:
yield res[0]
else:
start, ims = generate(start, batch, mode, *x)
if len(ims) == 1:
yield ims[0]
else:
yield ims
def isempty(*x):
for ar in x:
if ar.shape[0] > 0:
return False
return True
def generate(start, batch, mode, *images):
result = []
if mode == 'no_overflow':
for ar in images:
result.append(ar[start:start + batch] + 0)
start += batch
if start >= len(images[0]):
index = np.array(range(len(images[0])))
np.random.shuffle(index)
for ar in images:
ar[:] = ar[index] # shuffle array
start = 0
return start, result
if start + batch <= len(images[0]):
for ar in images:
result.append(ar[start:start + batch] + 0)
start += batch
return start, result
else:
# shuffle images
index = np.array(range(len(images[0])))
np.random.shuffle(index)
extra = batch + start - len(images[0]) # extra images to use from the beginning
for ar in images:
ims = ar[start:] + 0 # last images of array
ar[:] = ar[index] # shuffle array
if extra > 0:
result.append(np.concatenate([ims, ar[0:extra]], axis=0))
return extra, result
|
[
"numpy.pad",
"numpy.random.seed",
"numpy.random.shuffle",
"numpy.ceil",
"numpy.concatenate",
"numpy.empty",
"numpy.min",
"numpy.array",
"logging.getLogger"
] |
[((50, 81), 'logging.getLogger', 'logging.getLogger', (['"""data_utils"""'], {}), "('data_utils')\n", (67, 81), False, 'import logging\n'), ((4156, 4188), 'numpy.array', 'np.array', (['[data[i] for i in idx]'], {}), '([data[i] for i in idx])\n', (4164, 4188), True, 'import numpy as np\n'), ((1692, 1731), 'numpy.min', 'np.min', (['[m.shape[1] for m in mask_list]'], {}), '([m.shape[1] for m in mask_list])\n', (1698, 1731), True, 'import numpy as np\n'), ((1776, 1815), 'numpy.min', 'np.min', (['[m.shape[2] for m in mask_list]'], {}), '([m.shape[2] for m in mask_list])\n', (1782, 1815), True, 'import numpy as np\n'), ((3764, 3796), 'numpy.pad', 'np.pad', (['image', 'pad_width', '"""edge"""'], {}), "(image, pad_width, 'edge')\n", (3770, 3796), True, 'import numpy as np\n'), ((4054, 4074), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (4068, 4074), True, 'import numpy as np\n'), ((6027, 6051), 'numpy.random.shuffle', 'np.random.shuffle', (['index'], {}), '(index)\n', (6044, 6051), True, 'import numpy as np\n'), ((3003, 3020), 'numpy.ceil', 'np.ceil', (['(diff / 2)'], {}), '(diff / 2)\n', (3010, 3020), True, 'import numpy as np\n'), ((3846, 3901), 'numpy.pad', 'np.pad', (['image', 'pad_width', '"""constant"""'], {'constant_values': '(0)'}), "(image, pad_width, 'constant', constant_values=0)\n", (3852, 3901), True, 'import numpy as np\n'), ((5604, 5628), 'numpy.random.shuffle', 'np.random.shuffle', (['index'], {}), '(index)\n', (5621, 5628), True, 'import numpy as np\n'), ((4899, 4923), 'numpy.empty', 'np.empty', ([], {'shape': 'ar.shape'}), '(shape=ar.shape)\n', (4907, 4923), True, 'import numpy as np\n'), ((6331, 6373), 'numpy.concatenate', 'np.concatenate', (['[ims, ar[0:extra]]'], {'axis': '(0)'}), '([ims, ar[0:extra]], axis=0)\n', (6345, 6373), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time : 2021/10/12 6:00 下午
# @Author : <NAME>
# @File : setup.py.py
# @Software: PyCharm
import setuptools
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setuptools.setup(
name='whrtest',
version='0.0.1',
author='<NAME>',
author_email='<EMAIL>',
description='Oh! whr',
long_description=long_description,
url='https://github.com/BigGoby/BigPig',
packages=setuptools.find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
)
|
[
"setuptools.find_packages"
] |
[((479, 505), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (503, 505), False, 'import setuptools\n')]
|
from mmdet.apis import init_detector, inference_detector, show_result_pyplot
import mmcv
import cv2
import numpy as np
import time
import sys
import glob
import os
from datetime import datetime
def process_video_crcnn(frame_offset, frame_count, config_file, checkpoint_file, video_path):
"""
frame_offset: skipping this many frames
frame_count: run detection on this many frames
"""
f_number = 0
frame_offset = int(frame_offset)
frame_count = int(frame_count)
video = mmcv.VideoReader(video_path)
model = init_detector(config_file, checkpoint_file, device='cuda:0')
model.cfg.data.test.pipeline[1]['img_scale'] = video.resolution
print('[config] img_scale: {}'.format(model.cfg.data.test.pipeline[1]['img_scale']))
print('[config] score threshold: {}'.format(model.cfg.test_cfg['rcnn']['score_thr']))
print('[config] iou threshold: {}'.format(model.cfg.test_cfg['rcnn']['nms']['iou_threshold']))
print('[config] rpn nms threshold: {}'.format(model.cfg.test_cfg['rpn']['nms_thr']))
now = datetime.now()
date_time = now.strftime("%m%d%Y_%H%M%S")
log_filename = './demo/dump/det.txt'
log_file = open(log_filename, 'w')
start_process = time.time()
slice_start = 0 if frame_offset == 0 else frame_offset-1
slice_end = frame_offset+frame_count
print('[DBG] processing frames from {} - {}'.format(range(slice_start,slice_end)[0], range(slice_start,slice_end)[-1]))
last_boxes = []
for index in range(slice_start,slice_end):
frame = video[index]
f_number = f_number + 1
if frame is None:
print('[DBG] Empty frame received!')
break
start_time = time.time()
result = inference_detector(model, frame)
end_time = time.time()
bbox_result, _ = result, None
bboxes = np.vstack(bbox_result)
labels = [np.full(bbox.shape[0], i, dtype=np.int32) for i, bbox in enumerate(bbox_result)]
labels = np.concatenate(labels)
if len(bboxes) == 0 or (len(bboxes) == 1 and labels[0] != 1):
if len(last_boxes) == 0:
print('[DBG] both current & previous detection lists for frame %d are empty' % (f_number))
log_file.write(str(f_number)+","+str(100.0)+","+str(100.0)+","+str(135.0)+","+str(228.0)+","+str(0.1) + "\n")
else:
print('[DBG] received empty detection list for frame %d copying boxes from previous frame' % (f_number))
for i in range(len(last_boxes)):
box = last_boxes[i]
d = (box[0], box[1], box[2], box[3], box[4])
# cv2.rectangle(frame, (int(d[0]), int(d[1])), (int(d[2]), int(d[3])), (255,0,0), 2)
log_file.write(str(f_number)+","+str(d[0])+","+str(d[1])+","+str(d[2])+","+str(d[3])+","+str(d[4]) + "\n")
else:
for i in range(len(bboxes)):
# bb [816.4531 265.64264 832.7383 311.08356 0.99859136]
bb = bboxes[i]
if labels[i] != 1:
continue
d = (bb[0], bb[1], bb[2], bb[3], bb[4])
if (d[2]-d[0]) <= 0. or (d[3]-d[1]) <= 0.:
print ('[DBG] wrong size of a box at frame: %d' % (f_number))
continue
cv2.rectangle(frame, (int(d[0]), int(d[1])), (int(d[2]), int(d[3])), (255,0,0), 2)
log_file.write(str(f_number)+","+str(d[0])+","+str(d[1])+","+str(d[2])+","+str(d[3])+","+str(d[4]) + "\n")
last_boxes = bboxes.copy()
if f_number == 1 or f_number % 300 == 0:
end_process = time.time()
print('[DBG][{}/{}] frame inference time: {} {}, elapsed time: {} {}'.format(f_number+slice_start, slice_end-1, end_time-start_time, '.s', (end_process-start_process), '.s'))
if f_number == 1 or f_number % 3000 == 0:
dump_path = "./demo/dump/dump-%06d.jpg" % (f_number)
cv2.imwrite(dump_path, frame)
log_file.flush()
os.fsync(log_file.fileno())
print('[DBG] detection complete!')
log_file.close()
def process_jpg_crcnn(config_file, checkpoint_file, image_dir):
model = init_detector(config_file, checkpoint_file, device='cuda:0')
now = datetime.now()
date_time = now.strftime("%m%d%Y_%H%M%S")
log_filename = './demo/dump/det.txt'
log_file = open(log_filename, 'w')
start_process = time.time()
#dsort_img_path = '/home/dmitriy.khvan/dsort-gcp/bepro-data/data/img1'
frame_count = len(glob.glob(os.path.join(image_dir,'*.jpg')))
for num, filename in enumerate(sorted(glob.glob(os.path.join(image_dir,'*.jpg')))):
f_number = num + 1
frame = cv2.imread(filename)
if frame is None:
break
start_time = time.time()
result = inference_detector(model, frame)
end_time = time.time()
bbox_result, segm_result = result, None
bboxes = np.vstack(bbox_result)
labels = [np.full(bbox.shape[0], i, dtype=np.int32) for i, bbox in enumerate(bbox_result)]
labels = np.concatenate(labels)
for i in range(len(bboxes)):
bb = bboxes[i]
if labels[i] != 0: continue
d = (int(bb[0]), int(bb[1]), int(bb[2]), int(bb[3]))
cv2.rectangle(frame, (d[0], d[1]), (d[2], d[3]), (255,0,0), 2)
log_file.write(str(f_number)+","+str(d[0])+","+str(d[1])+","+str(d[2])+","+str(d[3]) + "\n")
if f_number == 1 or f_number % 500 == 0:
end_process = time.time()
print('[DBG][{}/{}] frame inference time: {} {}, elapsed time: {} {}'.format(f_number, frame_count, end_time-start_time, '.s', (end_process-start_process), '.s'))
if f_number == 1 or f_number % 1000 == 0:
dump_path = "./demo/dump/dump-%06d.jpg" % (f_number)
cv2.imwrite(dump_path, frame)
log_file.flush()
os.fsync(log_file.fileno())
print('[DBG] detection complete!')
log_file.close()
if __name__ == '__main__':
data_dir = sys.argv[1]
config_file = sys.argv[2]
checkpoint_file = sys.argv[3]
frame_offset = sys.argv[4]
frame_count = sys.argv[5]
# python demo/mmdetection_demo.py PVO4R8Dh-trim.mp4 configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_bepro.py checkpoint/crcnn_r50_bepro_stitch.pth 0 87150 /home/dmitriy.khvan/tmp/
process_video_crcnn(frame_offset, frame_count, config_file, checkpoint_file, data_dir)
|
[
"numpy.full",
"numpy.concatenate",
"os.path.join",
"cv2.imwrite",
"mmdet.apis.init_detector",
"mmdet.apis.inference_detector",
"mmcv.VideoReader",
"time.time",
"cv2.imread",
"cv2.rectangle",
"datetime.datetime.now",
"numpy.vstack"
] |
[((504, 532), 'mmcv.VideoReader', 'mmcv.VideoReader', (['video_path'], {}), '(video_path)\n', (520, 532), False, 'import mmcv\n'), ((545, 605), 'mmdet.apis.init_detector', 'init_detector', (['config_file', 'checkpoint_file'], {'device': '"""cuda:0"""'}), "(config_file, checkpoint_file, device='cuda:0')\n", (558, 605), False, 'from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n'), ((1057, 1071), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1069, 1071), False, 'from datetime import datetime\n'), ((1220, 1231), 'time.time', 'time.time', ([], {}), '()\n', (1229, 1231), False, 'import time\n'), ((4273, 4333), 'mmdet.apis.init_detector', 'init_detector', (['config_file', 'checkpoint_file'], {'device': '"""cuda:0"""'}), "(config_file, checkpoint_file, device='cuda:0')\n", (4286, 4333), False, 'from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n'), ((4345, 4359), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4357, 4359), False, 'from datetime import datetime\n'), ((4508, 4519), 'time.time', 'time.time', ([], {}), '()\n', (4517, 4519), False, 'import time\n'), ((1718, 1729), 'time.time', 'time.time', ([], {}), '()\n', (1727, 1729), False, 'import time\n'), ((1747, 1779), 'mmdet.apis.inference_detector', 'inference_detector', (['model', 'frame'], {}), '(model, frame)\n', (1765, 1779), False, 'from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n'), ((1799, 1810), 'time.time', 'time.time', ([], {}), '()\n', (1808, 1810), False, 'import time\n'), ((1875, 1897), 'numpy.vstack', 'np.vstack', (['bbox_result'], {}), '(bbox_result)\n', (1884, 1897), True, 'import numpy as np\n'), ((2015, 2037), 'numpy.concatenate', 'np.concatenate', (['labels'], {}), '(labels)\n', (2029, 2037), True, 'import numpy as np\n'), ((4798, 4818), 'cv2.imread', 'cv2.imread', (['filename'], {}), '(filename)\n', (4808, 4818), False, 'import cv2\n'), ((4890, 4901), 'time.time', 'time.time', ([], {}), '()\n', (4899, 4901), False, 'import time\n'), ((4919, 4951), 'mmdet.apis.inference_detector', 'inference_detector', (['model', 'frame'], {}), '(model, frame)\n', (4937, 4951), False, 'from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n'), ((4971, 4982), 'time.time', 'time.time', ([], {}), '()\n', (4980, 4982), False, 'import time\n'), ((5057, 5079), 'numpy.vstack', 'np.vstack', (['bbox_result'], {}), '(bbox_result)\n', (5066, 5079), True, 'import numpy as np\n'), ((5197, 5219), 'numpy.concatenate', 'np.concatenate', (['labels'], {}), '(labels)\n', (5211, 5219), True, 'import numpy as np\n'), ((1917, 1958), 'numpy.full', 'np.full', (['bbox.shape[0]', 'i'], {'dtype': 'np.int32'}), '(bbox.shape[0], i, dtype=np.int32)\n', (1924, 1958), True, 'import numpy as np\n'), ((3709, 3720), 'time.time', 'time.time', ([], {}), '()\n', (3718, 3720), False, 'import time\n'), ((4036, 4065), 'cv2.imwrite', 'cv2.imwrite', (['dump_path', 'frame'], {}), '(dump_path, frame)\n', (4047, 4065), False, 'import cv2\n'), ((4628, 4660), 'os.path.join', 'os.path.join', (['image_dir', '"""*.jpg"""'], {}), "(image_dir, '*.jpg')\n", (4640, 4660), False, 'import os\n'), ((5099, 5140), 'numpy.full', 'np.full', (['bbox.shape[0]', 'i'], {'dtype': 'np.int32'}), '(bbox.shape[0], i, dtype=np.int32)\n', (5106, 5140), True, 'import numpy as np\n'), ((5403, 5467), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(d[0], d[1])', '(d[2], d[3])', '(255, 0, 0)', '(2)'], {}), '(frame, (d[0], d[1]), (d[2], d[3]), (255, 0, 0), 2)\n', (5416, 5467), False, 'import cv2\n'), ((5647, 5658), 'time.time', 'time.time', ([], {}), '()\n', (5656, 5658), False, 'import time\n'), ((5974, 6003), 'cv2.imwrite', 'cv2.imwrite', (['dump_path', 'frame'], {}), '(dump_path, frame)\n', (5985, 6003), False, 'import cv2\n'), ((4719, 4751), 'os.path.join', 'os.path.join', (['image_dir', '"""*.jpg"""'], {}), "(image_dir, '*.jpg')\n", (4731, 4751), False, 'import os\n')]
|
import logging
import io, os
import sys
import threading
import time
from collections import deque
from os.path import expanduser
import PIL
from PIL import Image, ImageFile
from cobiv.modules.core.entity import Entity
ImageFile.LOAD_TRUNCATED_IMAGES = True
class ThumbLoader(Entity):
logger = logging.getLogger(__name__)
def __init__(self):
super(ThumbLoader, self).__init__()
self.to_cache = deque()
self.container = None
self.thread = None
self.thread_alive = True
self.cell_size = 120
self.thumb_path = None
self.queue_empty = True
self.session = self.lookup("session", "Entity")
def ready(self):
super(ThumbLoader, self).ready()
self.cell_size = int(self.get_config_value('image_size', 120))
self.thumb_path = self.get_config_value('path')
self.get_app().register_event_observer('on_file_content_change', self.delete_thumbnail)
def build_yaml_config(self, config):
config[self.get_name()] = {
'image_size': 120,
'path': os.path.join(expanduser('~'), '.cobiv', 'thumbnails')
}
return config
def get_name(self=None):
return "thumbloader"
def stop(self):
self.thread_alive = False
self.thread.join()
def restart(self):
if self.thread is not None and self.thread.is_alive:
self.thread_alive = False
self.thread.join()
self.thread = threading.Thread(target=self.run)
self.thread.start()
def get_fullpath_from_file_id(self, file_id):
return os.path.join(self.thumb_path, str(file_id) + '.png')
def run(self):
self.thread_alive = True
try:
while self.thread_alive:
if not self.queue_empty:
try:
file_id, filename, repo_key, file_type = self.to_cache.popleft()
if file_type=='book':
pass
else:
thumb_filename = self.get_fullpath_from_file_id(file_id)
if not os.path.exists(thumb_filename):
self.create_thumbnail_data(repo_key, filename, self.cell_size, thumb_filename)
time.sleep(0.5)
except IndexError:
self.queue_empty = True
time.sleep(2.0)
except KeyboardInterrupt:
pass
def append(self, *items):
for item in items:
self.to_cache.append(item)
self.queue_empty = False
def clear_cache(self):
self.to_cache.clear()
def get_filename_caption(self, filename):
name = os.path.basename(filename)
if len(name) > 12:
name = name[:5] + "..." + name[-7:]
return name
def delete_thumbnail(self, *items):
for file_id in items:
try:
os.remove(self.get_fullpath_from_file_id(file_id))
except WindowsError:
pass
def create_thumbnail_data(self, repo_key, filename, size, destination):
self.logger.debug("creating thumbnail for " + filename)
file_fs = self.session.get_filesystem(repo_key)
data = file_fs.getbytes(filename)
img = Image.open(io.BytesIO(data))
try:
img.load()
except SyntaxError as e:
path = os.path.dirname(sys.argv[0])
destination = os.path.join(path, "resources", "icons", "image_corrupt.png")
self.logger.error("Failed to read default thumbnail at : " + destination)
self.logger.error(e, exc_info=True)
return destination
except:
pass
if img.size[1] > img.size[0]:
baseheight = size
hpercent = (baseheight / float(img.size[1]))
wsize = int((float(img.size[0]) * float(hpercent)))
hsize = size
else:
basewidth = size
wpercent = (basewidth / float(img.size[0]))
hsize = int((float(img.size[1]) * float(wpercent)))
wsize = size
img = img.resize((wsize, hsize), PIL.Image.ANTIALIAS)
img.convert('RGB').save(destination, format='PNG', optimize=True)
return destination
|
[
"threading.Thread",
"io.BytesIO",
"os.path.join",
"os.path.basename",
"os.path.dirname",
"os.path.exists",
"logging.getLogger",
"time.sleep",
"os.path.expanduser",
"collections.deque"
] |
[((303, 330), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (320, 330), False, 'import logging\n'), ((424, 431), 'collections.deque', 'deque', ([], {}), '()\n', (429, 431), False, 'from collections import deque\n'), ((1488, 1521), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run'}), '(target=self.run)\n', (1504, 1521), False, 'import threading\n'), ((2782, 2808), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (2798, 2808), False, 'import io, os\n'), ((3378, 3394), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (3388, 3394), False, 'import io, os\n'), ((1098, 1113), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (1108, 1113), False, 'from os.path import expanduser\n'), ((3484, 3512), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (3499, 3512), False, 'import io, os\n'), ((3539, 3600), 'os.path.join', 'os.path.join', (['path', '"""resources"""', '"""icons"""', '"""image_corrupt.png"""'], {}), "(path, 'resources', 'icons', 'image_corrupt.png')\n", (3551, 3600), False, 'import io, os\n'), ((2459, 2474), 'time.sleep', 'time.sleep', (['(2.0)'], {}), '(2.0)\n', (2469, 2474), False, 'import time\n'), ((2157, 2187), 'os.path.exists', 'os.path.exists', (['thumb_filename'], {}), '(thumb_filename)\n', (2171, 2187), False, 'import io, os\n'), ((2332, 2347), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2342, 2347), False, 'import time\n')]
|
#!/usr/bin/env python
import argparse
parser = argparse.ArgumentParser(description="Use the last runs to set the nuisance Chebyshev and covariance parameters.")
parser.add_argument("rundir", help="The relative path to the output directory containing the samples.")
args = parser.parse_args()
import Starfish
from Starfish.model import PhiParam
from Starfish import utils
# Determine all of the orders we will be fitting
spectra = Starfish.data["files"]
orders = Starfish.data["orders"]
for spectrum_id in range(len(spectra)):
for order in orders:
npoly = Starfish.config["cheb_degree"]
if order == orders[-1]:
# Use cheb degree - 1 for the last order
npoly -= 1
fname_phi = Starfish.specfmt.format(spectrum_id, order) + "phi.json"
phi = PhiParam.load(fname_phi)
fname_mc = args.rundir + "/" + Starfish.specfmt.format(spectrum_id, order) + "/mc.hdf5"
flatchain = utils.h5read(fname_mc)
pars = flatchain[-1,:]
phi.cheb = pars[:npoly]
phi.sigAmp = float(pars[npoly])
phi.logAmp = float(pars[npoly + 1])
phi.l = float(pars[npoly + 2])
phi.save()
|
[
"Starfish.utils.h5read",
"Starfish.model.PhiParam.load",
"Starfish.specfmt.format",
"argparse.ArgumentParser"
] |
[((49, 172), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Use the last runs to set the nuisance Chebyshev and covariance parameters."""'}), "(description=\n 'Use the last runs to set the nuisance Chebyshev and covariance parameters.'\n )\n", (72, 172), False, 'import argparse\n'), ((806, 830), 'Starfish.model.PhiParam.load', 'PhiParam.load', (['fname_phi'], {}), '(fname_phi)\n', (819, 830), False, 'from Starfish.model import PhiParam\n'), ((948, 970), 'Starfish.utils.h5read', 'utils.h5read', (['fname_mc'], {}), '(fname_mc)\n', (960, 970), False, 'from Starfish import utils\n'), ((735, 778), 'Starfish.specfmt.format', 'Starfish.specfmt.format', (['spectrum_id', 'order'], {}), '(spectrum_id, order)\n', (758, 778), False, 'import Starfish\n'), ((871, 914), 'Starfish.specfmt.format', 'Starfish.specfmt.format', (['spectrum_id', 'order'], {}), '(spectrum_id, order)\n', (894, 914), False, 'import Starfish\n')]
|
import numpy as np
def add_noise(a):
if len(a.shape) == 2:
b = np.random.rand(a.shape[0], a.shape[1])
return a + b
else:
return a
def dot_product(a, b):
if len(a.shape) == 2 and len(b.shape) == 1 and a.shape[1] == b.shape[0]:
return a.dot(b)
else:
return "Incompatible dimensions"
if __name__ == "__main__":
dim = 200
a = np.eye(dim)
b = np.ones((dim,))
res1 = add_noise(a)
res2 = dot_product(a, b)
print(res2)
assert res2.all() == b.all()
|
[
"numpy.random.rand",
"numpy.eye",
"numpy.ones"
] |
[((391, 402), 'numpy.eye', 'np.eye', (['dim'], {}), '(dim)\n', (397, 402), True, 'import numpy as np\n'), ((411, 426), 'numpy.ones', 'np.ones', (['(dim,)'], {}), '((dim,))\n', (418, 426), True, 'import numpy as np\n'), ((76, 114), 'numpy.random.rand', 'np.random.rand', (['a.shape[0]', 'a.shape[1]'], {}), '(a.shape[0], a.shape[1])\n', (90, 114), True, 'import numpy as np\n')]
|