hexsha
stringlengths 40
40
| size
int64 4
996k
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
996k
| avg_line_length
float64 1.33
58.2k
| max_line_length
int64 2
323k
| alphanum_fraction
float64 0
0.97
| content_no_comment
stringlengths 0
946k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7908d337d6152662d0f863c174c8395f105d01a4
| 8,404
|
py
|
Python
|
swift/proxy/controllers/account.py
|
fvennetier/swift
|
127bf9707c383ac737fce9f243bcc8f6655910df
|
[
"Apache-2.0"
] | 3
|
2020-08-10T14:13:49.000Z
|
2021-08-08T10:21:31.000Z
|
swift/proxy/controllers/account.py
|
fvennetier/swift
|
127bf9707c383ac737fce9f243bcc8f6655910df
|
[
"Apache-2.0"
] | 4
|
2018-01-04T14:47:30.000Z
|
2021-05-07T10:57:24.000Z
|
swift/proxy/controllers/account.py
|
fvennetier/swift
|
127bf9707c383ac737fce9f243bcc8f6655910df
|
[
"Apache-2.0"
] | 2
|
2017-11-16T14:56:15.000Z
|
2017-11-16T14:57:54.000Z
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six.moves.urllib.parse import unquote
from swift import gettext_ as _
from swift.account.utils import account_listing_response
from swift.common.middleware.acl import parse_acl, format_acl
from swift.common.utils import public
from swift.common.constraints import check_metadata
from swift.common.http import HTTP_NOT_FOUND, HTTP_GONE
from swift.proxy.controllers.base import Controller, clear_info_cache, \
set_info_cache
from swift.common.middleware import listing_formats
from swift.common.swob import HTTPBadRequest, HTTPMethodNotAllowed
from swift.common.request_helpers import get_sys_meta_prefix
class AccountController(Controller):
"""WSGI controller for account requests"""
server_type = 'Account'
def __init__(self, app, account_name, **kwargs):
super(AccountController, self).__init__(app)
self.account_name = unquote(account_name)
if not self.app.allow_account_management:
self.allowed_methods.remove('PUT')
self.allowed_methods.remove('DELETE')
def add_acls_from_sys_metadata(self, resp):
if resp.environ['REQUEST_METHOD'] in ('HEAD', 'GET', 'PUT', 'POST'):
prefix = get_sys_meta_prefix('account') + 'core-'
name = 'access-control'
(extname, intname) = ('x-account-' + name, prefix + name)
acl_dict = parse_acl(version=2, data=resp.headers.pop(intname))
if acl_dict: # treat empty dict as empty header
resp.headers[extname] = format_acl(
version=2, acl_dict=acl_dict)
def GETorHEAD(self, req):
"""Handler for HTTP GET/HEAD requests."""
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
# Don't cache this. We know the account doesn't exist because
# the name is bad; we don't need to cache that because it's
# really cheap to recompute.
return resp
partition = self.app.account_ring.get_part(self.account_name)
concurrency = self.app.account_ring.replica_count \
if self.app.get_policy_options(None).concurrent_gets else 1
node_iter = self.app.iter_nodes(self.app.account_ring, partition)
params = req.params
params['format'] = 'json'
req.params = params
resp = self.GETorHEAD_base(
req, _('Account'), node_iter, partition,
req.swift_entity_path.rstrip('/'), concurrency)
if resp.status_int == HTTP_NOT_FOUND:
if resp.headers.get('X-Account-Status', '').lower() == 'deleted':
resp.status = HTTP_GONE
elif self.app.account_autocreate:
# This is kind of a lie; we pretend like the account is
# there, but it's not. We'll create it as soon as something
# tries to write to it, but we don't need databases on disk
# to tell us that nothing's there.
#
# We set a header so that certain consumers can tell it's a
# fake listing. The important one is the PUT of a container
# to an autocreate account; the proxy checks to see if the
# account exists before actually performing the PUT and
# creates the account if necessary. If we feed it a perfect
# lie, it'll just try to create the container without
# creating the account, and that'll fail.
resp = account_listing_response(
self.account_name, req,
listing_formats.get_listing_content_type(req))
resp.headers['X-Backend-Fake-Account-Listing'] = 'yes'
# Cache this. We just made a request to a storage node and got
# up-to-date information for the account.
resp.headers['X-Backend-Recheck-Account-Existence'] = str(
self.app.recheck_account_existence)
set_info_cache(self.app, req.environ, self.account_name, None, resp)
if req.environ.get('swift_owner'):
self.add_acls_from_sys_metadata(resp)
else:
for header in self.app.swift_owner_headers:
resp.headers.pop(header, None)
return resp
@public
def PUT(self, req):
"""HTTP PUT request handler."""
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
error_response = check_metadata(req, 'account')
if error_response:
return error_response
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
return resp
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'PUT',
req.swift_entity_path, [headers] * len(accounts))
self.add_acls_from_sys_metadata(resp)
return resp
@public
def POST(self, req):
"""HTTP POST request handler."""
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
return resp
error_response = check_metadata(req, 'account')
if error_response:
return error_response
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))
if resp.status_int == HTTP_NOT_FOUND and self.app.account_autocreate:
self.autocreate_account(req, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))
self.add_acls_from_sys_metadata(resp)
return resp
@public
def DELETE(self, req):
"""HTTP DELETE request handler."""
# Extra safety in case someone typos a query string for an
# account-level DELETE request that was really meant to be caught by
# some middleware.
if req.query_string:
return HTTPBadRequest(request=req)
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'DELETE',
req.swift_entity_path, [headers] * len(accounts))
return resp
| 46.175824
| 77
| 0.642789
|
from six.moves.urllib.parse import unquote
from swift import gettext_ as _
from swift.account.utils import account_listing_response
from swift.common.middleware.acl import parse_acl, format_acl
from swift.common.utils import public
from swift.common.constraints import check_metadata
from swift.common.http import HTTP_NOT_FOUND, HTTP_GONE
from swift.proxy.controllers.base import Controller, clear_info_cache, \
set_info_cache
from swift.common.middleware import listing_formats
from swift.common.swob import HTTPBadRequest, HTTPMethodNotAllowed
from swift.common.request_helpers import get_sys_meta_prefix
class AccountController(Controller):
server_type = 'Account'
def __init__(self, app, account_name, **kwargs):
super(AccountController, self).__init__(app)
self.account_name = unquote(account_name)
if not self.app.allow_account_management:
self.allowed_methods.remove('PUT')
self.allowed_methods.remove('DELETE')
def add_acls_from_sys_metadata(self, resp):
if resp.environ['REQUEST_METHOD'] in ('HEAD', 'GET', 'PUT', 'POST'):
prefix = get_sys_meta_prefix('account') + 'core-'
name = 'access-control'
(extname, intname) = ('x-account-' + name, prefix + name)
acl_dict = parse_acl(version=2, data=resp.headers.pop(intname))
if acl_dict:
resp.headers[extname] = format_acl(
version=2, acl_dict=acl_dict)
def GETorHEAD(self, req):
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
return resp
partition = self.app.account_ring.get_part(self.account_name)
concurrency = self.app.account_ring.replica_count \
if self.app.get_policy_options(None).concurrent_gets else 1
node_iter = self.app.iter_nodes(self.app.account_ring, partition)
params = req.params
params['format'] = 'json'
req.params = params
resp = self.GETorHEAD_base(
req, _('Account'), node_iter, partition,
req.swift_entity_path.rstrip('/'), concurrency)
if resp.status_int == HTTP_NOT_FOUND:
if resp.headers.get('X-Account-Status', '').lower() == 'deleted':
resp.status = HTTP_GONE
elif self.app.account_autocreate:
# to tell us that nothing's there.
# fake listing. The important one is the PUT of a container
# to an autocreate account; the proxy checks to see if the
# account exists before actually performing the PUT and
# creates the account if necessary. If we feed it a perfect
# lie, it'll just try to create the container without
resp = account_listing_response(
self.account_name, req,
listing_formats.get_listing_content_type(req))
resp.headers['X-Backend-Fake-Account-Listing'] = 'yes'
# Cache this. We just made a request to a storage node and got
# up-to-date information for the account.
resp.headers['X-Backend-Recheck-Account-Existence'] = str(
self.app.recheck_account_existence)
set_info_cache(self.app, req.environ, self.account_name, None, resp)
if req.environ.get('swift_owner'):
self.add_acls_from_sys_metadata(resp)
else:
for header in self.app.swift_owner_headers:
resp.headers.pop(header, None)
return resp
@public
def PUT(self, req):
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
error_response = check_metadata(req, 'account')
if error_response:
return error_response
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
return resp
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'PUT',
req.swift_entity_path, [headers] * len(accounts))
self.add_acls_from_sys_metadata(resp)
return resp
@public
def POST(self, req):
length_limit = self.get_name_length_limit()
if len(self.account_name) > length_limit:
resp = HTTPBadRequest(request=req)
resp.body = b'Account name length of %d longer than %d' % \
(len(self.account_name), length_limit)
return resp
error_response = check_metadata(req, 'account')
if error_response:
return error_response
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))
if resp.status_int == HTTP_NOT_FOUND and self.app.account_autocreate:
self.autocreate_account(req, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))
self.add_acls_from_sys_metadata(resp)
return resp
@public
def DELETE(self, req):
# Extra safety in case someone typos a query string for an
# account-level DELETE request that was really meant to be caught by
# some middleware.
if req.query_string:
return HTTPBadRequest(request=req)
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'DELETE',
req.swift_entity_path, [headers] * len(accounts))
return resp
| true
| true
|
7908d35c4dbf2724356c58e5f3a0f424313b4a87
| 4,177
|
py
|
Python
|
scripts/plotting/plot_lfes.py
|
acumb/LatticeDNAOrigami
|
0f2522286adc9815865d4abfc55f546da40e606b
|
[
"MIT"
] | 5
|
2016-04-10T21:21:52.000Z
|
2018-07-21T15:33:07.000Z
|
scripts/plotting/plot_lfes.py
|
acumb/LatticeDNAOrigami
|
0f2522286adc9815865d4abfc55f546da40e606b
|
[
"MIT"
] | 1
|
2017-06-22T17:31:37.000Z
|
2017-06-22T17:31:37.000Z
|
scripts/plotting/plot_lfes.py
|
acumb/LatticeDNAOrigami
|
0f2522286adc9815865d4abfc55f546da40e606b
|
[
"MIT"
] | 1
|
2019-04-30T03:11:19.000Z
|
2019-04-30T03:11:19.000Z
|
#!/usr/bin/python
"""Plot LFEs of given order parameter."""
import argparse
import sys
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib import gridspec
from matplotlib.ticker import MaxNLocator
import numpy as np
import pandas as pd
from matplotlibstyles import styles
from matplotlibstyles import plotutils
def main():
args = parse_args()
f = setup_figure()
gs = gridspec.GridSpec(1, 1, f)
ax = f.add_subplot(gs[0, 0])
if args.post_lfes == None:
args.post_lfes = ['' for i in range(len(args.systems))]
plot_figure(f, ax, vars(args))
setup_axis(ax, args.tag)
#set_labels(ax)
save_figure(f, args.plot_filebase)
def setup_figure():
styles.set_default_style()
figsize = (plotutils.cm_to_inches(10), plotutils.cm_to_inches(7))
return plt.figure(figsize=figsize, dpi=300, constrained_layout=True)
def plot_figure(f, ax, args):
systems = args['systems']
varis = args['varis']
input_dir = args['input_dir']
tag = args['tag']
post_lfes = args['post_lfes']
stacking_enes = args['stacking_enes']
if stacking_enes is not None:
stacking_enes = [abs(e) for e in stacking_enes]
cmap = plotutils.create_truncated_colormap(
0.2, 0.8, name='plasma')
#mappable = plotutils.create_linear_mappable(
# cmap, abs(stacking_enes[0]), abs(stacking_enes[-1]))
#colors = [mappable.to_rgba(abs(e)) for e in stacking_enes]
increment = stacking_enes[1] - stacking_enes[0]
cmap, norm, colors = plotutils.create_segmented_colormap(cmap, stacking_enes, increment)
else:
cmap = cm.get_cmap('tab10')
colors = [cmap(i) for i in range(len(systems))]
for i in range(len(systems)):
system = systems[i]
vari = varis[i]
post_lfe = post_lfes[i]
if post_lfe != '':
post_lfe = '-' + post_lfe
inp_filebase = f'{input_dir}/{system}-{vari}_lfes{post_lfe}-{tag}'
lfes = pd.read_csv(f'{inp_filebase}.aves', sep=' ', index_col=0)
lfe_stds = pd.read_csv(f'{inp_filebase}.stds', sep=' ', index_col=0)
temp = lfes.columns[0]
lfes = lfes[temp]
lfes = lfes - lfes[0]
lfe_stds = lfe_stds[temp]
label = f'{system}-{vari}'
ax.errorbar(lfes.index, lfes, yerr=lfe_stds, marker='o', label=label,
color=colors[i])
if stacking_enes is not None:
label = r'$-U_\text{stack} / \SI{1000}{\kb\kelvin}$'
tick_labels = [f'${e/1000:.1f}$' for e in stacking_enes]
plotutils.plot_segmented_colorbar(
f, ax, cmap, norm, label, tick_labels, 'horizontal')
def setup_axis(ax, ylabel=None, xlabel=None, ylim_top=None, xlim_right=None):
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.set_ylabel(ylabel)
ax.set_xlabel(xlabel)
ax.set_ylim(top=ylim_top)
ax.set_xlim(right=xlim_right)
def set_labels(ax):
plt.legend()
def save_figure(f, plot_filebase):
#f.savefig(plot_filebase + '.pgf', transparent=True)
f.savefig(plot_filebase + '.pdf', transparent=True)
f.savefig(plot_filebase + '.png', transparent=True)
def parse_args():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'input_dir',
type=str,
help='Input directory')
parser.add_argument(
'plot_filebase',
type=str,
help='Plots directory')
parser.add_argument(
'tag',
type=str,
help='OP tag')
parser.add_argument(
'--systems',
nargs='+',
type=str,
help='Systems')
parser.add_argument(
'--varis',
nargs='+',
type=str,
help='Simulation variants')
parser.add_argument(
'--post_lfes',
nargs='+',
type=str,
help='Filename additions after lfes, if any')
parser.add_argument(
'--stacking_enes',
nargs='+',
type=float,
help='Stacking energies (for colormap)')
return parser.parse_args()
if __name__ == '__main__':
main()
| 28.414966
| 96
| 0.625808
|
import argparse
import sys
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib import gridspec
from matplotlib.ticker import MaxNLocator
import numpy as np
import pandas as pd
from matplotlibstyles import styles
from matplotlibstyles import plotutils
def main():
args = parse_args()
f = setup_figure()
gs = gridspec.GridSpec(1, 1, f)
ax = f.add_subplot(gs[0, 0])
if args.post_lfes == None:
args.post_lfes = ['' for i in range(len(args.systems))]
plot_figure(f, ax, vars(args))
setup_axis(ax, args.tag)
save_figure(f, args.plot_filebase)
def setup_figure():
styles.set_default_style()
figsize = (plotutils.cm_to_inches(10), plotutils.cm_to_inches(7))
return plt.figure(figsize=figsize, dpi=300, constrained_layout=True)
def plot_figure(f, ax, args):
systems = args['systems']
varis = args['varis']
input_dir = args['input_dir']
tag = args['tag']
post_lfes = args['post_lfes']
stacking_enes = args['stacking_enes']
if stacking_enes is not None:
stacking_enes = [abs(e) for e in stacking_enes]
cmap = plotutils.create_truncated_colormap(
0.2, 0.8, name='plasma')
increment = stacking_enes[1] - stacking_enes[0]
cmap, norm, colors = plotutils.create_segmented_colormap(cmap, stacking_enes, increment)
else:
cmap = cm.get_cmap('tab10')
colors = [cmap(i) for i in range(len(systems))]
for i in range(len(systems)):
system = systems[i]
vari = varis[i]
post_lfe = post_lfes[i]
if post_lfe != '':
post_lfe = '-' + post_lfe
inp_filebase = f'{input_dir}/{system}-{vari}_lfes{post_lfe}-{tag}'
lfes = pd.read_csv(f'{inp_filebase}.aves', sep=' ', index_col=0)
lfe_stds = pd.read_csv(f'{inp_filebase}.stds', sep=' ', index_col=0)
temp = lfes.columns[0]
lfes = lfes[temp]
lfes = lfes - lfes[0]
lfe_stds = lfe_stds[temp]
label = f'{system}-{vari}'
ax.errorbar(lfes.index, lfes, yerr=lfe_stds, marker='o', label=label,
color=colors[i])
if stacking_enes is not None:
label = r'$-U_\text{stack} / \SI{1000}{\kb\kelvin}$'
tick_labels = [f'${e/1000:.1f}$' for e in stacking_enes]
plotutils.plot_segmented_colorbar(
f, ax, cmap, norm, label, tick_labels, 'horizontal')
def setup_axis(ax, ylabel=None, xlabel=None, ylim_top=None, xlim_right=None):
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.set_ylabel(ylabel)
ax.set_xlabel(xlabel)
ax.set_ylim(top=ylim_top)
ax.set_xlim(right=xlim_right)
def set_labels(ax):
plt.legend()
def save_figure(f, plot_filebase):
f.savefig(plot_filebase + '.pdf', transparent=True)
f.savefig(plot_filebase + '.png', transparent=True)
def parse_args():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'input_dir',
type=str,
help='Input directory')
parser.add_argument(
'plot_filebase',
type=str,
help='Plots directory')
parser.add_argument(
'tag',
type=str,
help='OP tag')
parser.add_argument(
'--systems',
nargs='+',
type=str,
help='Systems')
parser.add_argument(
'--varis',
nargs='+',
type=str,
help='Simulation variants')
parser.add_argument(
'--post_lfes',
nargs='+',
type=str,
help='Filename additions after lfes, if any')
parser.add_argument(
'--stacking_enes',
nargs='+',
type=float,
help='Stacking energies (for colormap)')
return parser.parse_args()
if __name__ == '__main__':
main()
| true
| true
|
7908d48d572968773d17eac5920a57118da9966f
| 663
|
py
|
Python
|
car_rental/car_rental/users/tests/test_drf_urls.py
|
TheHaRyPL/Car-rental
|
21eb7b4c66bef5d6377dcfcc1006f7742b4339f5
|
[
"MIT"
] | null | null | null |
car_rental/car_rental/users/tests/test_drf_urls.py
|
TheHaRyPL/Car-rental
|
21eb7b4c66bef5d6377dcfcc1006f7742b4339f5
|
[
"MIT"
] | null | null | null |
car_rental/car_rental/users/tests/test_drf_urls.py
|
TheHaRyPL/Car-rental
|
21eb7b4c66bef5d6377dcfcc1006f7742b4339f5
|
[
"MIT"
] | null | null | null |
import pytest
from django.urls import resolve, reverse
from car_rental.users.models import User
pytestmark = pytest.mark.django_db
def test_user_detail(user: User):
assert (
reverse("api:user-detail", kwargs={"username": user.username})
== f"/api/users/{user.username}/"
)
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
def test_user_list():
assert reverse("api:user-list") == "/api/users/"
assert resolve("/api/users/").view_name == "api:user-list"
def test_user_me():
assert reverse("api:user-me") == "/api/users/me/"
assert resolve("/api/users/me/").view_name == "api:user-me"
| 26.52
| 81
| 0.669683
|
import pytest
from django.urls import resolve, reverse
from car_rental.users.models import User
pytestmark = pytest.mark.django_db
def test_user_detail(user: User):
assert (
reverse("api:user-detail", kwargs={"username": user.username})
== f"/api/users/{user.username}/"
)
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
def test_user_list():
assert reverse("api:user-list") == "/api/users/"
assert resolve("/api/users/").view_name == "api:user-list"
def test_user_me():
assert reverse("api:user-me") == "/api/users/me/"
assert resolve("/api/users/me/").view_name == "api:user-me"
| true
| true
|
7908d4968ae6b535b2977d6f888376196da01990
| 12,318
|
py
|
Python
|
examples/minkunet.py
|
zarzarj/MinkowskiEngine
|
1c1c09d23bd2147fa41cae25fa8837290c2bd07b
|
[
"MIT"
] | 1
|
2022-01-17T13:06:05.000Z
|
2022-01-17T13:06:05.000Z
|
examples/minkunet.py
|
zarzarj/MinkowskiEngine
|
1c1c09d23bd2147fa41cae25fa8837290c2bd07b
|
[
"MIT"
] | null | null | null |
examples/minkunet.py
|
zarzarj/MinkowskiEngine
|
1c1c09d23bd2147fa41cae25fa8837290c2bd07b
|
[
"MIT"
] | null | null | null |
# Copyright (c) Chris Choy (chrischoy@ai.stanford.edu).
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import torch
import torch.nn as nn
from torch.optim import SGD
import MinkowskiEngine as ME
from MinkowskiEngine.modules.resnet_block import BasicBlock, Bottleneck
from examples.resnet import ResNetBase
class BasicBlockShallow(nn.Module):
expansion = 1
def __init__(self,
inplanes,
planes,
stride=1,
dilation=1,
downsample=None,
bn_momentum=0.1,
dimension=-1):
super(BasicBlockShallow, self).__init__()
assert dimension > 0
self.conv1 = ME.MinkowskiConvolution(
inplanes, planes, kernel_size=1, stride=stride, dilation=dilation, dimension=dimension)
self.norm1 = ME.MinkowskiBatchNorm(planes, momentum=bn_momentum)
self.conv2 = ME.MinkowskiConvolution(
planes, planes, kernel_size=1, stride=1, dilation=dilation, dimension=dimension)
self.norm2 = ME.MinkowskiBatchNorm(planes, momentum=bn_momentum)
self.relu = ME.MinkowskiReLU(inplace=True)
self.downsample = downsample
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.norm1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.norm2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class MinkUNetBase(ResNetBase):
BLOCK = None
PLANES = None
DILATIONS = (1, 1, 1, 1, 1, 1, 1, 1)
LAYERS = (2, 2, 2, 2, 2, 2, 2, 2)
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
INIT_DIM = 32
OUT_TENSOR_STRIDE = 1
# To use the model, must call initialize_coords before forward pass.
# Once data is processed, call clear to reset the model before calling
# initialize_coords
def __init__(self, in_channels=3, out_channels=20, bn_momentum=0.1, D=3, **kwargs):
self.bn_momentum=bn_momentum
for name, value in kwargs.items():
if name != "self":
try:
setattr(self, name, value)
except:
print(name, value)
ResNetBase.__init__(self, in_channels, out_channels, D)
def network_initialization(self, in_channels, out_channels, D):
# Output of the first conv concated to conv6
self.inplanes = self.INIT_DIM
self.conv0p1s1 = ME.MinkowskiConvolution(
in_channels, self.inplanes, kernel_size=5, dimension=D)
self.bn0 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.conv1p1s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn1 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block1 = self._make_layer(self.BLOCK, self.PLANES[0],
self.LAYERS[0])
self.conv2p2s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn2 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block2 = self._make_layer(self.BLOCK, self.PLANES[1],
self.LAYERS[1])
self.conv3p4s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn3 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block3 = self._make_layer(self.BLOCK, self.PLANES[2],
self.LAYERS[2])
self.conv4p8s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn4 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block4 = self._make_layer(self.BLOCK, self.PLANES[3],
self.LAYERS[3])
self.convtr4p16s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[4], kernel_size=2, stride=2, dimension=D)
self.bntr4 = ME.MinkowskiBatchNorm(self.PLANES[4], momentum=self.bn_momentum)
self.inplanes = self.PLANES[4] + self.PLANES[2] * self.BLOCK.expansion
self.block5 = self._make_layer(self.BLOCK, self.PLANES[4],
self.LAYERS[4])
self.convtr5p8s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[5], kernel_size=2, stride=2, dimension=D)
self.bntr5 = ME.MinkowskiBatchNorm(self.PLANES[5], momentum=self.bn_momentum)
self.inplanes = self.PLANES[5] + self.PLANES[1] * self.BLOCK.expansion
self.block6 = self._make_layer(self.BLOCK, self.PLANES[5],
self.LAYERS[5])
self.convtr6p4s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[6], kernel_size=2, stride=2, dimension=D)
self.bntr6 = ME.MinkowskiBatchNorm(self.PLANES[6], momentum=self.bn_momentum)
self.inplanes = self.PLANES[6] + self.PLANES[0] * self.BLOCK.expansion
self.block7 = self._make_layer(self.BLOCK, self.PLANES[6],
self.LAYERS[6])
self.convtr7p2s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[7], kernel_size=2, stride=2, dimension=D)
self.bntr7 = ME.MinkowskiBatchNorm(self.PLANES[7], momentum=self.bn_momentum)
self.inplanes = self.PLANES[7] + self.INIT_DIM
self.block8 = self._make_layer(self.BLOCK, self.PLANES[7],
self.LAYERS[7])
self.final = ME.MinkowskiConvolution(
self.PLANES[7] * self.BLOCK.expansion,
out_channels,
kernel_size=1,
bias=True,
dimension=D)
self.relu = ME.MinkowskiReLU(inplace=True)
def forward(self, in_dict, return_feats=False):
# print(in_dict['feats'].shape, in_dict['coords'].shape)
if self.quantization_mode == 'average':
quantization_mode=ME.SparseTensorQuantizationMode.UNWEIGHTED_AVERAGE
elif self.quantization_mode == 'random':
quantization_mode=ME.SparseTensorQuantizationMode.RANDOM_SUBSAMPLE
in_field = ME.TensorField(
features=in_dict['feats'],
coordinates=in_dict['coords'],
quantization_mode=quantization_mode,
# minkowski_algorithm=ME.MinkowskiAlgorithm.SPEED_OPTIMIZED,
minkowski_algorithm=ME.MinkowskiAlgorithm.MEMORY_EFFICIENT,
device=in_dict['feats'].device,
)
# print(in_field.device)
# x = ME.SparseTensor(in_dict['feats'], in_dict['coords'])
# print(in_field)
# print(in_dict['feats'].shape)
x = in_field.sparse()
out = self.conv0p1s1(x)
# print(out.coordinates.shape)
out = self.bn0(out)
out_p1 = self.relu(out)
out = self.conv1p1s2(out_p1)
# print(out.coordinates.shape)
out = self.bn1(out)
out = self.relu(out)
out_b1p2 = self.block1(out)
out = self.conv2p2s2(out_b1p2)
# print(out.coordinates.shape)
out = self.bn2(out)
out = self.relu(out)
out_b2p4 = self.block2(out)
out = self.conv3p4s2(out_b2p4)
# print(out.coordinates.shape)
out = self.bn3(out)
out = self.relu(out)
out_b3p8 = self.block3(out)
# tensor_stride=16
out = self.conv4p8s2(out_b3p8)
# print(out.coordinates.shape)
out = self.bn4(out)
out = self.relu(out)
out = self.block4(out)
# tensor_stride=8
out = self.convtr4p16s2(out)
out = self.bntr4(out)
out = self.relu(out)
out = ME.cat(out, out_b3p8)
out = self.block5(out)
# tensor_stride=4
out = self.convtr5p8s2(out)
out = self.bntr5(out)
out = self.relu(out)
out = ME.cat(out, out_b2p4)
out = self.block6(out)
# tensor_stride=2
out = self.convtr6p4s2(out)
out = self.bntr6(out)
out = self.relu(out)
out = ME.cat(out, out_b1p2)
out = self.block7(out)
# tensor_stride=1
out = self.convtr7p2s2(out)
out = self.bntr7(out)
out = self.relu(out)
out = ME.cat(out, out_p1)
out_feats = self.block8(out)
out = self.final(out_feats)
# if in_dict['rand_shift'] is not None:
# coords = []
# for i in range(len(in_dict['rand_shift'])):
# coords.append( out.coordinates_at(i) - in_dict['rand_shift'][i])
# feats = out.decomposed_features
# else:
# coords, feats = out.decomposed_coordinates_and_features
feats = out.slice(in_field).F
# feats = out.F
# feats = torch.cat(feats, axis=0)
if return_feats:
# return feats, out_feats, in_field
return feats, out_feats.slice(in_field).F
return feats
@staticmethod
def add_argparse_args(parent_parser):
parser = parent_parser.add_argument_group("MinkUNet")
parser.add_argument("--quantization_mode", type=str, default='average')
# parser.add_argument("--out_channels", type=int, default=32)
return parent_parser
def convert_sync_batchnorm(self):
self = ME.MinkowskiSyncBatchNorm.convert_sync_batchnorm(self)
class MinkUNet14(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (1, 1, 1, 1, 1, 1, 1, 1)
class MinkUNet18(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (2, 2, 2, 2, 2, 2, 2, 2)
class MinkUNet34(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet34Shallow(MinkUNetBase):
BLOCK = BasicBlockShallow
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet50(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet101(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (2, 3, 4, 23, 2, 2, 2, 2)
class MinkUNet14A(MinkUNet14):
PLANES = (32, 64, 128, 256, 128, 128, 96, 96)
class MinkUNet14B(MinkUNet14):
PLANES = (32, 64, 128, 256, 128, 128, 128, 128)
class MinkUNet14C(MinkUNet14):
PLANES = (32, 64, 128, 256, 192, 192, 128, 128)
class MinkUNet14D(MinkUNet14):
PLANES = (32, 64, 128, 256, 384, 384, 384, 384)
class MinkUNet18A(MinkUNet18):
PLANES = (32, 64, 128, 256, 128, 128, 96, 96)
class MinkUNet18B(MinkUNet18):
PLANES = (32, 64, 128, 256, 128, 128, 128, 128)
class MinkUNet18D(MinkUNet18):
PLANES = (32, 64, 128, 256, 384, 384, 384, 384)
class MinkUNet34A(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 64, 64)
class MinkUNet34B(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 64, 32)
class MinkUNet34C(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
class MinkUNet34CShallow(MinkUNet34Shallow):
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
| 35.194286
| 99
| 0.624696
|
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import torch
import torch.nn as nn
from torch.optim import SGD
import MinkowskiEngine as ME
from MinkowskiEngine.modules.resnet_block import BasicBlock, Bottleneck
from examples.resnet import ResNetBase
class BasicBlockShallow(nn.Module):
expansion = 1
def __init__(self,
inplanes,
planes,
stride=1,
dilation=1,
downsample=None,
bn_momentum=0.1,
dimension=-1):
super(BasicBlockShallow, self).__init__()
assert dimension > 0
self.conv1 = ME.MinkowskiConvolution(
inplanes, planes, kernel_size=1, stride=stride, dilation=dilation, dimension=dimension)
self.norm1 = ME.MinkowskiBatchNorm(planes, momentum=bn_momentum)
self.conv2 = ME.MinkowskiConvolution(
planes, planes, kernel_size=1, stride=1, dilation=dilation, dimension=dimension)
self.norm2 = ME.MinkowskiBatchNorm(planes, momentum=bn_momentum)
self.relu = ME.MinkowskiReLU(inplace=True)
self.downsample = downsample
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.norm1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.norm2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class MinkUNetBase(ResNetBase):
BLOCK = None
PLANES = None
DILATIONS = (1, 1, 1, 1, 1, 1, 1, 1)
LAYERS = (2, 2, 2, 2, 2, 2, 2, 2)
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
INIT_DIM = 32
OUT_TENSOR_STRIDE = 1
# To use the model, must call initialize_coords before forward pass.
# Once data is processed, call clear to reset the model before calling
# initialize_coords
def __init__(self, in_channels=3, out_channels=20, bn_momentum=0.1, D=3, **kwargs):
self.bn_momentum=bn_momentum
for name, value in kwargs.items():
if name != "self":
try:
setattr(self, name, value)
except:
print(name, value)
ResNetBase.__init__(self, in_channels, out_channels, D)
def network_initialization(self, in_channels, out_channels, D):
# Output of the first conv concated to conv6
self.inplanes = self.INIT_DIM
self.conv0p1s1 = ME.MinkowskiConvolution(
in_channels, self.inplanes, kernel_size=5, dimension=D)
self.bn0 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.conv1p1s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn1 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block1 = self._make_layer(self.BLOCK, self.PLANES[0],
self.LAYERS[0])
self.conv2p2s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn2 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block2 = self._make_layer(self.BLOCK, self.PLANES[1],
self.LAYERS[1])
self.conv3p4s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn3 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block3 = self._make_layer(self.BLOCK, self.PLANES[2],
self.LAYERS[2])
self.conv4p8s2 = ME.MinkowskiConvolution(
self.inplanes, self.inplanes, kernel_size=2, stride=2, dimension=D)
self.bn4 = ME.MinkowskiBatchNorm(self.inplanes, momentum=self.bn_momentum)
self.block4 = self._make_layer(self.BLOCK, self.PLANES[3],
self.LAYERS[3])
self.convtr4p16s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[4], kernel_size=2, stride=2, dimension=D)
self.bntr4 = ME.MinkowskiBatchNorm(self.PLANES[4], momentum=self.bn_momentum)
self.inplanes = self.PLANES[4] + self.PLANES[2] * self.BLOCK.expansion
self.block5 = self._make_layer(self.BLOCK, self.PLANES[4],
self.LAYERS[4])
self.convtr5p8s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[5], kernel_size=2, stride=2, dimension=D)
self.bntr5 = ME.MinkowskiBatchNorm(self.PLANES[5], momentum=self.bn_momentum)
self.inplanes = self.PLANES[5] + self.PLANES[1] * self.BLOCK.expansion
self.block6 = self._make_layer(self.BLOCK, self.PLANES[5],
self.LAYERS[5])
self.convtr6p4s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[6], kernel_size=2, stride=2, dimension=D)
self.bntr6 = ME.MinkowskiBatchNorm(self.PLANES[6], momentum=self.bn_momentum)
self.inplanes = self.PLANES[6] + self.PLANES[0] * self.BLOCK.expansion
self.block7 = self._make_layer(self.BLOCK, self.PLANES[6],
self.LAYERS[6])
self.convtr7p2s2 = ME.MinkowskiConvolutionTranspose(
self.inplanes, self.PLANES[7], kernel_size=2, stride=2, dimension=D)
self.bntr7 = ME.MinkowskiBatchNorm(self.PLANES[7], momentum=self.bn_momentum)
self.inplanes = self.PLANES[7] + self.INIT_DIM
self.block8 = self._make_layer(self.BLOCK, self.PLANES[7],
self.LAYERS[7])
self.final = ME.MinkowskiConvolution(
self.PLANES[7] * self.BLOCK.expansion,
out_channels,
kernel_size=1,
bias=True,
dimension=D)
self.relu = ME.MinkowskiReLU(inplace=True)
def forward(self, in_dict, return_feats=False):
# print(in_dict['feats'].shape, in_dict['coords'].shape)
if self.quantization_mode == 'average':
quantization_mode=ME.SparseTensorQuantizationMode.UNWEIGHTED_AVERAGE
elif self.quantization_mode == 'random':
quantization_mode=ME.SparseTensorQuantizationMode.RANDOM_SUBSAMPLE
in_field = ME.TensorField(
features=in_dict['feats'],
coordinates=in_dict['coords'],
quantization_mode=quantization_mode,
# minkowski_algorithm=ME.MinkowskiAlgorithm.SPEED_OPTIMIZED,
minkowski_algorithm=ME.MinkowskiAlgorithm.MEMORY_EFFICIENT,
device=in_dict['feats'].device,
)
# print(in_field.device)
# x = ME.SparseTensor(in_dict['feats'], in_dict['coords'])
# print(in_field)
# print(in_dict['feats'].shape)
x = in_field.sparse()
out = self.conv0p1s1(x)
# print(out.coordinates.shape)
out = self.bn0(out)
out_p1 = self.relu(out)
out = self.conv1p1s2(out_p1)
# print(out.coordinates.shape)
out = self.bn1(out)
out = self.relu(out)
out_b1p2 = self.block1(out)
out = self.conv2p2s2(out_b1p2)
# print(out.coordinates.shape)
out = self.bn2(out)
out = self.relu(out)
out_b2p4 = self.block2(out)
out = self.conv3p4s2(out_b2p4)
# print(out.coordinates.shape)
out = self.bn3(out)
out = self.relu(out)
out_b3p8 = self.block3(out)
# tensor_stride=16
out = self.conv4p8s2(out_b3p8)
# print(out.coordinates.shape)
out = self.bn4(out)
out = self.relu(out)
out = self.block4(out)
# tensor_stride=8
out = self.convtr4p16s2(out)
out = self.bntr4(out)
out = self.relu(out)
out = ME.cat(out, out_b3p8)
out = self.block5(out)
# tensor_stride=4
out = self.convtr5p8s2(out)
out = self.bntr5(out)
out = self.relu(out)
out = ME.cat(out, out_b2p4)
out = self.block6(out)
# tensor_stride=2
out = self.convtr6p4s2(out)
out = self.bntr6(out)
out = self.relu(out)
out = ME.cat(out, out_b1p2)
out = self.block7(out)
# tensor_stride=1
out = self.convtr7p2s2(out)
out = self.bntr7(out)
out = self.relu(out)
out = ME.cat(out, out_p1)
out_feats = self.block8(out)
out = self.final(out_feats)
# if in_dict['rand_shift'] is not None:
# coords = []
# for i in range(len(in_dict['rand_shift'])):
# coords.append( out.coordinates_at(i) - in_dict['rand_shift'][i])
# feats = out.decomposed_features
# else:
# coords, feats = out.decomposed_coordinates_and_features
feats = out.slice(in_field).F
# feats = out.F
# feats = torch.cat(feats, axis=0)
if return_feats:
# return feats, out_feats, in_field
return feats, out_feats.slice(in_field).F
return feats
@staticmethod
def add_argparse_args(parent_parser):
parser = parent_parser.add_argument_group("MinkUNet")
parser.add_argument("--quantization_mode", type=str, default='average')
# parser.add_argument("--out_channels", type=int, default=32)
return parent_parser
def convert_sync_batchnorm(self):
self = ME.MinkowskiSyncBatchNorm.convert_sync_batchnorm(self)
class MinkUNet14(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (1, 1, 1, 1, 1, 1, 1, 1)
class MinkUNet18(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (2, 2, 2, 2, 2, 2, 2, 2)
class MinkUNet34(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet34Shallow(MinkUNetBase):
BLOCK = BasicBlockShallow
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet50(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (2, 3, 4, 6, 2, 2, 2, 2)
class MinkUNet101(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (2, 3, 4, 23, 2, 2, 2, 2)
class MinkUNet14A(MinkUNet14):
PLANES = (32, 64, 128, 256, 128, 128, 96, 96)
class MinkUNet14B(MinkUNet14):
PLANES = (32, 64, 128, 256, 128, 128, 128, 128)
class MinkUNet14C(MinkUNet14):
PLANES = (32, 64, 128, 256, 192, 192, 128, 128)
class MinkUNet14D(MinkUNet14):
PLANES = (32, 64, 128, 256, 384, 384, 384, 384)
class MinkUNet18A(MinkUNet18):
PLANES = (32, 64, 128, 256, 128, 128, 96, 96)
class MinkUNet18B(MinkUNet18):
PLANES = (32, 64, 128, 256, 128, 128, 128, 128)
class MinkUNet18D(MinkUNet18):
PLANES = (32, 64, 128, 256, 384, 384, 384, 384)
class MinkUNet34A(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 64, 64)
class MinkUNet34B(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 64, 32)
class MinkUNet34C(MinkUNet34):
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
class MinkUNet34CShallow(MinkUNet34Shallow):
PLANES = (32, 64, 128, 256, 256, 128, 96, 96)
| true
| true
|
7908d49b0bbc0c0c66a33b21167659dc4f383409
| 1,465
|
py
|
Python
|
tests/sentry/logging/test_handler.py
|
learninto/sentry
|
4f9f564841498b3af49c1677d6b61f3e47b01923
|
[
"BSD-3-Clause"
] | 1
|
2019-10-17T17:46:16.000Z
|
2019-10-17T17:46:16.000Z
|
tests/sentry/logging/test_handler.py
|
learninto/sentry
|
4f9f564841498b3af49c1677d6b61f3e47b01923
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sentry/logging/test_handler.py
|
learninto/sentry
|
4f9f564841498b3af49c1677d6b61f3e47b01923
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import pytest
import logging
import mock
from sentry.logging.handlers import StructLogHandler
@pytest.fixture
def handler():
return StructLogHandler()
@pytest.fixture
def logger():
return mock.MagicMock()
def make_logrecord(**extra):
kwargs = dict(
name="name",
level=logging.INFO,
pathname="pathname",
lineno=10,
msg="msg",
args=None,
exc_info=None,
)
kwargs.update(extra or {})
return logging.LogRecord(**kwargs)
@pytest.mark.parametrize(
"record,out",
(
({}, {}),
({"msg": "%s", "args": (1,)}, {"event": "%s", "positional_args": (1,)}),
({"args": ({"a": 1},)}, {"positional_args": ({"a": 1},)}),
({"exc_info": True}, {"exc_info": True}),
),
)
def test_emit(record, out, handler, logger):
record = make_logrecord(**record)
handler.emit(record, logger=logger)
expected = dict(level=logging.INFO, event="msg", name="name")
expected.update(out)
logger.log.assert_called_once_with(**expected)
@mock.patch("sentry.logging.handlers.metrics")
def test_log_to_metric(metrics):
logger = logging.getLogger("django.request")
logger.warn("CSRF problem")
metrics.incr.assert_called_once_with("django.request.csrf_problem", skip_internal=False)
metrics.reset_mock()
logger.warn("Some other problem we don't care about")
assert metrics.incr.call_count == 0
| 24.016393
| 92
| 0.640273
|
from __future__ import absolute_import
import pytest
import logging
import mock
from sentry.logging.handlers import StructLogHandler
@pytest.fixture
def handler():
return StructLogHandler()
@pytest.fixture
def logger():
return mock.MagicMock()
def make_logrecord(**extra):
kwargs = dict(
name="name",
level=logging.INFO,
pathname="pathname",
lineno=10,
msg="msg",
args=None,
exc_info=None,
)
kwargs.update(extra or {})
return logging.LogRecord(**kwargs)
@pytest.mark.parametrize(
"record,out",
(
({}, {}),
({"msg": "%s", "args": (1,)}, {"event": "%s", "positional_args": (1,)}),
({"args": ({"a": 1},)}, {"positional_args": ({"a": 1},)}),
({"exc_info": True}, {"exc_info": True}),
),
)
def test_emit(record, out, handler, logger):
record = make_logrecord(**record)
handler.emit(record, logger=logger)
expected = dict(level=logging.INFO, event="msg", name="name")
expected.update(out)
logger.log.assert_called_once_with(**expected)
@mock.patch("sentry.logging.handlers.metrics")
def test_log_to_metric(metrics):
logger = logging.getLogger("django.request")
logger.warn("CSRF problem")
metrics.incr.assert_called_once_with("django.request.csrf_problem", skip_internal=False)
metrics.reset_mock()
logger.warn("Some other problem we don't care about")
assert metrics.incr.call_count == 0
| true
| true
|
7908d4cc766fa19b9dc718d41898eca8af92e658
| 2,607
|
py
|
Python
|
telechat/__init__.py
|
Sefank/telechat
|
8439f1565d363ffb18c44a647fc6440e2f3b6b41
|
[
"MIT"
] | null | null | null |
telechat/__init__.py
|
Sefank/telechat
|
8439f1565d363ffb18c44a647fc6440e2f3b6b41
|
[
"MIT"
] | null | null | null |
telechat/__init__.py
|
Sefank/telechat
|
8439f1565d363ffb18c44a647fc6440e2f3b6b41
|
[
"MIT"
] | null | null | null |
import os
import click
from flask import Flask, render_template
from flask_wtf.csrf import CSRFError
from telechat.extensions import db, login_manager, csrf, moment
from telechat.blueprints.auth import auth_bp
from telechat.blueprints.chat import chat_bp
from telechat.blueprints.admin import admin_bp
from telechat.blueprints.oauth import oauth_bp
from telechat.settings import config
from telechat.models import User, Message
def register_extensions(app: Flask):
"""注册需要的扩展程序包到 Flask 程序实例 app 中"""
db.init_app(app) # 数据库 ORM
login_manager.init_app(app) # 登录状态管理
csrf.init_app(app) # CSRF 令牌管理
moment.init_app(app) # 时间格式化管理
def register_blueprints(app: Flask):
"""注册需要的蓝图程序包到 Flask 程序实例 app 中"""
app.register_blueprint(auth_bp)
app.register_blueprint(oauth_bp)
app.register_blueprint(chat_bp)
app.register_blueprint(admin_bp)
def register_errors(app: Flask):
"""注册需要的错误处理程序包到 Flask 程序实例 app 中"""
@app.errorhandler(400) # Bad Request 客户端请求的语法错误,服务器无法理解
def bad_request(e):
return render_template('error.html', description=e.description, code=e.code), 400
@app.errorhandler(404) # Not Found 服务器无法根据客户端的请求找到资源(网页)
def page_not_found(e):
return render_template('error.html', description=e.description, code=e.code), 404
@app.errorhandler(500) # Internal Server Error 服务器内部错误,无法完成请求
def internal_server_error(e):
return render_template('error.html', description="服务器内部错误,无法完成请求!", code="500"), 500
@app.errorhandler(CSRFError) # CSRF 验证失败
def csrf_error_handle(e):
return render_template('error.html', description=e.description, code=e.code), 400
def register_commands(app: Flask):
"""注册需要的CLI命令程序包到 Flask 程序实例 app 中"""
@app.cli.command()
@click.option('--drop', is_flag=True, help="创建之前销毁数据库")
def initdb(drop: bool):
"""初始化数据库结构"""
if drop:
# 确认删除
pass
pass
@app.cli.command()
@click.option('--num', default=300, help="消息数量,默认为300")
def forge(num: int):
"""生成虚拟数据"""
pass
def create_app(config_name=None):
"""程序工厂:创建 Flask 程序,加载配置,注册扩展、蓝图等程序包"""
# 从环境变量载入配置环境名称
if config_name is None:
config_name = os.getenv('FLASK_CONFIG', 'development')
# 创建 Flask 程序实例,程序名称为 telechat
app = Flask('telechat')
# 载入相应的配置
app.config.from_object(config[config_name])
# 注册程序包
register_extensions(app) # 扩展
register_blueprints(app) # 蓝图
register_errors(app) # 错误处理
register_commands(app) # CLI命令
# 返回已配置好的 Flask 程序实例
return app
| 28.336957
| 92
| 0.693901
|
import os
import click
from flask import Flask, render_template
from flask_wtf.csrf import CSRFError
from telechat.extensions import db, login_manager, csrf, moment
from telechat.blueprints.auth import auth_bp
from telechat.blueprints.chat import chat_bp
from telechat.blueprints.admin import admin_bp
from telechat.blueprints.oauth import oauth_bp
from telechat.settings import config
from telechat.models import User, Message
def register_extensions(app: Flask):
db.init_app(app)
login_manager.init_app(app)
csrf.init_app(app)
moment.init_app(app)
def register_blueprints(app: Flask):
app.register_blueprint(auth_bp)
app.register_blueprint(oauth_bp)
app.register_blueprint(chat_bp)
app.register_blueprint(admin_bp)
def register_errors(app: Flask):
@app.errorhandler(400)
def bad_request(e):
return render_template('error.html', description=e.description, code=e.code), 400
@app.errorhandler(404)
def page_not_found(e):
return render_template('error.html', description=e.description, code=e.code), 404
@app.errorhandler(500)
def internal_server_error(e):
return render_template('error.html', description="服务器内部错误,无法完成请求!", code="500"), 500
@app.errorhandler(CSRFError)
def csrf_error_handle(e):
return render_template('error.html', description=e.description, code=e.code), 400
def register_commands(app: Flask):
@app.cli.command()
@click.option('--drop', is_flag=True, help="创建之前销毁数据库")
def initdb(drop: bool):
if drop:
pass
pass
@app.cli.command()
@click.option('--num', default=300, help="消息数量,默认为300")
def forge(num: int):
pass
def create_app(config_name=None):
if config_name is None:
config_name = os.getenv('FLASK_CONFIG', 'development')
app = Flask('telechat')
app.config.from_object(config[config_name])
register_extensions(app)
register_blueprints(app)
register_errors(app)
register_commands(app)
return app
| true
| true
|
7908d5b20e263d111b51de7124bd201f44c00f47
| 2,073
|
py
|
Python
|
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Volume/Operators/clip.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | 8
|
2021-12-14T21:30:01.000Z
|
2022-02-14T11:30:03.000Z
|
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Volume/Operators/clip.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | null | null | null |
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Volume/Operators/clip.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | null | null | null |
from DejaVu.IndexedPolygons import IndexedPolygons
from Volume.Grid3D import Grid3D
class ClipMeshWithMask:
"""Clip method of this class takes a mesh i.e. IndexedPolgons and
selects all vertices which fall onto voxel witha true value in a mask grid.
It returns a new IndexedPolygons geometry with the triangles for which
3 vertices are selected.
"""
def __init__(self):
pass
def clip(self, mesh, grid):
assert isinstance(mesh, IndexedPolygons)
assert isinstance(grid, Grid3D)
origin = grid.getOriginReal()
stepSize = grid.getStepSizeReal()
dx, dy, dz = grid.dimensions
vertices = mesh.vertexSet.vertices.array
triangles = mesh.faceSet.faces.array
# compute the voxel on which each vertex falls
# array of indiced into grid for the vertices
vertInd = ((vertices-origin)/stepSize).astype('i')
# select the vertices on voxels that have a value True
selVert = []
vertEquiv = {}
numVertSel = 0
nvert = 0
data = grid.data
for i,j,k in vertInd:
if i>=0 and i<dx:
if j>=0 and j<dy:
if k>=0 and k<dz:
if data[i,j,k]:
selVert.append( vertices[nvert] )
vertEquiv[nvert] = numVertSel
numVertSel += 1
nvert += 1
# build a set of faces for which some vertices are selected
# and keep only selected vertices
selFaces = []
for i,j,k in triangles:
nbvs = 0
v1 = vertEquiv.get(i, None)
if v1: nbvs +=1
v2 = vertEquiv.get(j, None)
if v2: nbvs +=1
v3 = vertEquiv.get(k, None)
if v3: nbvs +=1
if nbvs == 3:
selFaces.append( (v1,v2,v3) )
clippedGeom = IndexedPolygons(mesh.name+'_clipped', vertices=selVert,
faces=selFaces)
return clippedGeom
| 33.435484
| 77
| 0.549445
|
from DejaVu.IndexedPolygons import IndexedPolygons
from Volume.Grid3D import Grid3D
class ClipMeshWithMask:
def __init__(self):
pass
def clip(self, mesh, grid):
assert isinstance(mesh, IndexedPolygons)
assert isinstance(grid, Grid3D)
origin = grid.getOriginReal()
stepSize = grid.getStepSizeReal()
dx, dy, dz = grid.dimensions
vertices = mesh.vertexSet.vertices.array
triangles = mesh.faceSet.faces.array
vertInd = ((vertices-origin)/stepSize).astype('i')
selVert = []
vertEquiv = {}
numVertSel = 0
nvert = 0
data = grid.data
for i,j,k in vertInd:
if i>=0 and i<dx:
if j>=0 and j<dy:
if k>=0 and k<dz:
if data[i,j,k]:
selVert.append( vertices[nvert] )
vertEquiv[nvert] = numVertSel
numVertSel += 1
nvert += 1
selFaces = []
for i,j,k in triangles:
nbvs = 0
v1 = vertEquiv.get(i, None)
if v1: nbvs +=1
v2 = vertEquiv.get(j, None)
if v2: nbvs +=1
v3 = vertEquiv.get(k, None)
if v3: nbvs +=1
if nbvs == 3:
selFaces.append( (v1,v2,v3) )
clippedGeom = IndexedPolygons(mesh.name+'_clipped', vertices=selVert,
faces=selFaces)
return clippedGeom
| true
| true
|
7908d661dc9f49748aba23e48f0244f72f2bdfa7
| 21,846
|
py
|
Python
|
etl/parsers/etw/Microsoft_Windows_ShellCommon_StartLayoutPopulation.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
etl/parsers/etw/Microsoft_Windows_ShellCommon_StartLayoutPopulation.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
etl/parsers/etw/Microsoft_Windows_ShellCommon_StartLayoutPopulation.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-ShellCommon-StartLayoutPopulation
GUID : 97ca8142-10b1-4baa-9fbb-70a7d11231c3
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1_0(Etw):
pattern = Struct(
"collectionName" / WString,
"initializationReason" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=3, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_3_0(Etw):
pattern = Struct(
"layoutSelectionSerializedString" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=5, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_5_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=7, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_7_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=8, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_8_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=11, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_11_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=12, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_12_0(Etw):
pattern = Struct(
"layoutProviderName" / WString,
"HResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=15, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_15_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=16, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_16_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=17, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_17_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=18, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_18_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=19, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_19_0(Etw):
pattern = Struct(
"tileData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=21, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_21_0(Etw):
pattern = Struct(
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=22, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_22_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=23, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_23_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=28, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_28_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=29, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_29_0(Etw):
pattern = Struct(
"tileAumid" / WString,
"appSize" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=30, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_30_0(Etw):
pattern = Struct(
"tileAumid" / WString,
"appSize" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=31, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_31_0(Etw):
pattern = Struct(
"appSize" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=32, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_32_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=33, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_33_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=35, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_35_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=38, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_38_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=39, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_39_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=41, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_41_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=42, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_42_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=45, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_45_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=46, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_46_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=49, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_49_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=52, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_52_0(Etw):
pattern = Struct(
"tileData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=53, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_53_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=54, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_54_0(Etw):
pattern = Struct(
"groupData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=55, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_55_0(Etw):
pattern = Struct(
"groupData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=56, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_56_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=57, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_57_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=58, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_58_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=60, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_60_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=62, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_62_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=63, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_63_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=64, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_64_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=65, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_65_0(Etw):
pattern = Struct(
"value1" / WString,
"value2" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1002, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1002_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1004, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1004_0(Etw):
pattern = Struct(
"itemId" / WString,
"itemName" / WString,
"groupCount" / Int32ul,
"tileCount" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1005, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1005_0(Etw):
pattern = Struct(
"itemId" / WString,
"itemName" / WString,
"groupCount" / Int32ul,
"tileCount" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1100, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1100_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1101, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1101_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1102, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1102_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1103, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1103_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1104, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1104_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1105, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1105_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1106, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1106_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1107, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1107_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1200, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1200_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1202, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1202_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1203, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1203_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1204, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1204_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1205, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1205_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1206, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1206_0(Etw):
pattern = Struct(
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1207, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1207_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1208, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1208_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1209, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1209_0(Etw):
pattern = Struct(
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1250, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1250_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1252, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1252_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1253, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1253_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1300, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1300_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1301, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1301_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1303, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1303_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1400, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1400_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1401, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1401_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1404, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1404_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1405, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1405_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1900, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1900_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1902, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1902_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1903, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1903_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1904, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1904_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1905, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1905_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1906, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1906_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2101, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2101_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2102, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2102_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"savedVersion" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2103, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2103_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2110, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2110_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2111, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2111_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2112, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2112_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul,
"savedVersion" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2150, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2150_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2151, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2151_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2152, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2152_0(Etw):
pattern = Struct(
"packageFamilyName" / WString,
"InstallState" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2153, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2153_0(Etw):
pattern = Struct(
"packageFamilyName" / WString,
"InstallState" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2154, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2154_0(Etw):
pattern = Struct(
"value" / WString
)
| 30.899576
| 123
| 0.72178
|
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1_0(Etw):
pattern = Struct(
"collectionName" / WString,
"initializationReason" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=3, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_3_0(Etw):
pattern = Struct(
"layoutSelectionSerializedString" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=5, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_5_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=7, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_7_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=8, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_8_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=11, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_11_0(Etw):
pattern = Struct(
"layoutProviderName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=12, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_12_0(Etw):
pattern = Struct(
"layoutProviderName" / WString,
"HResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=15, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_15_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=16, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_16_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=17, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_17_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=18, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_18_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=19, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_19_0(Etw):
pattern = Struct(
"tileData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=21, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_21_0(Etw):
pattern = Struct(
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=22, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_22_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=23, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_23_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=28, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_28_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=29, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_29_0(Etw):
pattern = Struct(
"tileAumid" / WString,
"appSize" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=30, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_30_0(Etw):
pattern = Struct(
"tileAumid" / WString,
"appSize" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=31, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_31_0(Etw):
pattern = Struct(
"appSize" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=32, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_32_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=33, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_33_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=35, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_35_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=38, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_38_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=39, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_39_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=41, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_41_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=42, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_42_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=45, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_45_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=46, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_46_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=49, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_49_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=52, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_52_0(Etw):
pattern = Struct(
"tileData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=53, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_53_0(Etw):
pattern = Struct(
"tileIdentifier" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=54, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_54_0(Etw):
pattern = Struct(
"groupData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=55, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_55_0(Etw):
pattern = Struct(
"groupData" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=56, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_56_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=57, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_57_0(Etw):
pattern = Struct(
"containerName" / WString,
"containerXPosition" / Int32ul,
"containerYPosition" / Int32ul,
"failureDetails" / CString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=58, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_58_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=60, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_60_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=62, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_62_0(Etw):
pattern = Struct(
"TaskHResultValue" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=63, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_63_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=64, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_64_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=65, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_65_0(Etw):
pattern = Struct(
"value1" / WString,
"value2" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1002, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1002_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1004, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1004_0(Etw):
pattern = Struct(
"itemId" / WString,
"itemName" / WString,
"groupCount" / Int32ul,
"tileCount" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1005, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1005_0(Etw):
pattern = Struct(
"itemId" / WString,
"itemName" / WString,
"groupCount" / Int32ul,
"tileCount" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1100, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1100_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1101, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1101_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1102, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1102_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1103, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1103_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1104, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1104_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1105, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1105_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1106, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1106_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1107, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1107_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1200, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1200_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1202, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1202_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1203, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1203_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1204, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1204_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1205, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1205_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1206, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1206_0(Etw):
pattern = Struct(
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1207, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1207_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1208, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1208_0(Etw):
pattern = Struct(
"itemId" / Guid,
"containerId" / Guid,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1209, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1209_0(Etw):
pattern = Struct(
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1250, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1250_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1252, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1252_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1253, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1253_0(Etw):
pattern = Struct(
"savedVersion" / Int64ul,
"itemId" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1300, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1300_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1301, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1301_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1303, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1303_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1400, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1400_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1401, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1401_0(Etw):
pattern = Struct(
"tileIdentifier" / WString,
"collectionName" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1404, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1404_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1405, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1405_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1900, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1900_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1902, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1902_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1903, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1903_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1904, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1904_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1905, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1905_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=1906, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_1906_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2101, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2101_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2102, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2102_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid,
"savedVersion" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2103, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2103_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2110, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2110_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2111, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2111_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2112, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2112_0(Etw):
pattern = Struct(
"itemName" / WString,
"size" / Int64ul,
"savedVersion" / Int64ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2150, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2150_0(Etw):
pattern = Struct(
"itemName" / WString,
"itemId" / Guid
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2151, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2151_0(Etw):
pattern = Struct(
"value" / WString
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2152, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2152_0(Etw):
pattern = Struct(
"packageFamilyName" / WString,
"InstallState" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2153, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2153_0(Etw):
pattern = Struct(
"packageFamilyName" / WString,
"InstallState" / Int32ul
)
@declare(guid=guid("97ca8142-10b1-4baa-9fbb-70a7d11231c3"), event_id=2154, version=0)
class Microsoft_Windows_ShellCommon_StartLayoutPopulation_2154_0(Etw):
pattern = Struct(
"value" / WString
)
| true
| true
|
7908d6b09c56bc0b7507e3574e698af7985b0ce7
| 18,814
|
py
|
Python
|
test/test_pymbolic.py
|
sv2518/pymbolic
|
42687a410b1c355beec510b91c18f97e5137795b
|
[
"MIT"
] | null | null | null |
test/test_pymbolic.py
|
sv2518/pymbolic
|
42687a410b1c355beec510b91c18f97e5137795b
|
[
"MIT"
] | null | null | null |
test/test_pymbolic.py
|
sv2518/pymbolic
|
42687a410b1c355beec510b91c18f97e5137795b
|
[
"MIT"
] | null | null | null |
__copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import pymbolic.primitives as prim
import pytest
from pymbolic import parse
from pytools.lex import ParseError
from pymbolic.mapper import IdentityMapper
try:
reduce
except NameError:
from functools import reduce
# {{{ utilities
def assert_parsed_same_as_python(expr_str):
# makes sure that has only one line
expr_str, = expr_str.split("\n")
from pymbolic.interop.ast import ASTToPymbolic
import ast
ast2p = ASTToPymbolic()
try:
expr_parsed_by_python = ast2p(ast.parse(expr_str).body[0].value)
except SyntaxError:
with pytest.raises(ParseError):
parse(expr_str)
else:
expr_parsed_by_pymbolic = parse(expr_str)
assert expr_parsed_by_python == expr_parsed_by_pymbolic
def assert_parse_roundtrip(expr_str):
expr = parse(expr_str)
from pymbolic.mapper.stringifier import StringifyMapper
strified = StringifyMapper()(expr)
assert strified == expr_str, (strified, expr_str)
# }}}
def test_integer_power():
from pymbolic.algorithm import integer_power
for base, expn in [
(17, 5),
(17, 2**10),
(13, 20),
(13, 1343),
]:
assert base**expn == integer_power(base, expn)
def test_expand():
from pymbolic import var, expand
x = var("x")
u = (x+1)**5
expand(u)
def test_substitute():
from pymbolic import parse, substitute, evaluate
u = parse("5+x.min**2")
xmin = parse("x.min")
assert evaluate(substitute(u, {xmin: 25})) == 630
def test_no_comparison():
from pymbolic import parse
x = parse("17+3*x")
y = parse("12-5*y")
def expect_typeerror(f):
try:
f()
except TypeError:
pass
else:
raise AssertionError
expect_typeerror(lambda: x < y)
expect_typeerror(lambda: x <= y)
expect_typeerror(lambda: x > y)
expect_typeerror(lambda: x >= y)
def test_structure_preservation():
x = prim.Sum((5, 7))
from pymbolic.mapper import IdentityMapper
x2 = IdentityMapper()(x)
assert x == x2
def test_sympy_interaction():
pytest.importorskip("sympy")
import sympy as sp
x, y = sp.symbols("x y")
f = sp.Function("f")
s1_expr = 1/f(x/sp.sqrt(x**2+y**2)).diff(x, 5) # pylint:disable=not-callable
from pymbolic.interop.sympy import (
SympyToPymbolicMapper,
PymbolicToSympyMapper)
s2p = SympyToPymbolicMapper()
p2s = PymbolicToSympyMapper()
p1_expr = s2p(s1_expr)
s2_expr = p2s(p1_expr)
assert sp.ratsimp(s1_expr - s2_expr) == 0
p2_expr = s2p(s2_expr)
s3_expr = p2s(p2_expr)
assert sp.ratsimp(s1_expr - s3_expr) == 0
# {{{ fft
def test_fft_with_floats():
numpy = pytest.importorskip("numpy")
import numpy.linalg as la
from pymbolic.algorithm import fft, ifft
for n in [2**i for i in range(4, 10)]+[17, 12, 948]:
a = numpy.random.rand(n) + 1j*numpy.random.rand(n)
f_a = fft(a)
a2 = ifft(f_a)
assert la.norm(a-a2) < 1e-10
f_a_numpy = numpy.fft.fft(a)
assert la.norm(f_a-f_a_numpy) < 1e-10
class NearZeroKiller(IdentityMapper):
def map_constant(self, expr):
if isinstance(expr, complex):
r = expr.real
i = expr.imag
if abs(r) < 1e-15:
r = 0
if abs(i) < 1e-15:
i = 0
return complex(r, i)
else:
return expr
def test_fft():
numpy = pytest.importorskip("numpy")
from pymbolic import var
from pymbolic.algorithm import fft, sym_fft
vars = numpy.array([var(chr(97+i)) for i in range(16)], dtype=object)
print(vars)
print(fft(vars))
traced_fft = sym_fft(vars)
from pymbolic.mapper.stringifier import PREC_NONE
from pymbolic.mapper.c_code import CCodeMapper
ccm = CCodeMapper()
code = [ccm(tfi, PREC_NONE) for tfi in traced_fft]
for cse_name, cse_str in enumerate(ccm.cse_name_list):
print(f"{cse_name} = {cse_str}")
for i, line in enumerate(code):
print("result[%d] = %s" % (i, line))
# }}}
def test_sparse_multiply():
numpy = pytest.importorskip("numpy")
pytest.importorskip("scipy")
import scipy.sparse as ss
la = numpy.linalg
mat = numpy.random.randn(10, 10)
s_mat = ss.csr_matrix(mat)
vec = numpy.random.randn(10)
mat_vec = s_mat*vec
from pymbolic.algorithm import csr_matrix_multiply
mat_vec_2 = csr_matrix_multiply(s_mat, vec)
assert la.norm(mat_vec-mat_vec_2) < 1e-14
# {{{ parser
def test_parser():
from pymbolic import parse
parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
"-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
"+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
print(repr(parse("d4knl0")))
print(repr(parse("0.")))
print(repr(parse("0.e1")))
assert parse("0.e1") == 0
assert parse("1e-12") == 1e-12
print(repr(parse("a >= 1")))
print(repr(parse("a <= 1")))
print(repr(parse(":")))
print(repr(parse("1:")))
print(repr(parse(":2")))
print(repr(parse("1:2")))
print(repr(parse("::")))
print(repr(parse("1::")))
print(repr(parse(":1:")))
print(repr(parse("::1")))
print(repr(parse("3::1")))
print(repr(parse(":5:1")))
print(repr(parse("3:5:1")))
assert_parse_roundtrip("()")
assert_parse_roundtrip("(3,)")
assert_parse_roundtrip("[x + 3, 3, 5]")
assert_parse_roundtrip("[]")
assert_parse_roundtrip("[x]")
assert_parse_roundtrip("g[i, k] + 2.0*h[i, k]")
parse("g[i,k]+(+2.0)*h[i, k]")
print(repr(parse("a - b - c")))
print(repr(parse("-a - -b - -c")))
print(repr(parse("- - - a - - - - b - - - - - c")))
print(repr(parse("~(a ^ b)")))
print(repr(parse("(a | b) | ~(~a & ~b)")))
print(repr(parse("3 << 1")))
print(repr(parse("1 >> 3")))
print(parse("3::1"))
assert parse("e1") == prim.Variable("e1")
assert parse("d1") == prim.Variable("d1")
from pymbolic import variables
f, x, y, z = variables("f x y z")
assert parse("f((x,y),z)") == f((x, y), z)
assert parse("f((x,),z)") == f((x,), z)
assert parse("f(x,(y,z),z)") == f(x, (y, z), z)
assert parse("f(x,(y,z),z, name=15)") == f(x, (y, z), z, name=15)
assert parse("f(x,(y,z),z, name=15, name2=17)") == f(
x, (y, z), z, name=15, name2=17)
assert_parsed_same_as_python("5+i if i>=0 else (0 if i<-1 else 10)")
assert_parsed_same_as_python("0 if 1 if 2 else 3 else 4")
assert_parsed_same_as_python("0 if (1 if 2 else 3) else 4")
assert_parsed_same_as_python("(2, 3,)")
with pytest.deprecated_call():
parse("1+if(0, 1, 2)")
# }}}
def test_mappers():
from pymbolic import variables
f, x, y, z = variables("f x y z")
for expr in [
f(x, (y, z), name=z**2)
]:
from pymbolic.mapper import WalkMapper
from pymbolic.mapper.dependency import DependencyMapper
str(expr)
IdentityMapper()(expr)
WalkMapper()(expr)
DependencyMapper()(expr)
def test_func_dep_consistency():
from pymbolic import var
from pymbolic.mapper.dependency import DependencyMapper
f = var("f")
x = var("x")
dep_map = DependencyMapper(include_calls="descend_args")
assert dep_map(f(x)) == {x}
assert dep_map(f(x=x)) == {x}
def test_conditions():
from pymbolic import var
x = var("x")
y = var("y")
assert str(x.eq(y).and_(x.le(5))) == "x == y and x <= 5"
def test_graphviz():
from pymbolic import parse
expr = parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
"-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
"+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
from pymbolic.mapper.graphviz import GraphvizMapper
gvm = GraphvizMapper()
gvm(expr)
print(gvm.get_dot_code())
# {{{ geometric algebra
@pytest.mark.parametrize("dims", [2, 3, 4, 5])
# START_GA_TEST
def test_geometric_algebra(dims):
pytest.importorskip("numpy")
import numpy as np
from pymbolic.geometric_algebra import MultiVector as MV # noqa
vec1 = MV(np.random.randn(dims))
vec2 = MV(np.random.randn(dims))
vec3 = MV(np.random.randn(dims))
vec4 = MV(np.random.randn(dims))
vec5 = MV(np.random.randn(dims))
# Fundamental identity
assert ((vec1 ^ vec2) + (vec1 | vec2)).close_to(vec1*vec2)
# Antisymmetry
assert (vec1 ^ vec2 ^ vec3).close_to(- vec2 ^ vec1 ^ vec3)
vecs = [vec1, vec2, vec3, vec4, vec5]
if len(vecs) > dims:
from operator import xor as outer
assert reduce(outer, vecs).close_to(0)
assert (vec1.inv()*vec1).close_to(1)
assert (vec1*vec1.inv()).close_to(1)
assert ((1/vec1)*vec1).close_to(1)
assert (vec1/vec1).close_to(1)
for a, b, c in [
(vec1, vec2, vec3),
(vec1*vec2, vec3, vec4),
(vec1, vec2*vec3, vec4),
(vec1, vec2, vec3*vec4),
(vec1, vec2, vec3*vec4*vec5),
(vec1, vec2*vec1, vec3*vec4*vec5),
]:
# Associativity
assert ((a*b)*c).close_to(a*(b*c))
assert ((a ^ b) ^ c).close_to(a ^ (b ^ c))
# The inner product is not associative.
# scalar product
assert ((c*b).project(0)) .close_to(b.scalar_product(c))
assert ((c.rev()*b).project(0)) .close_to(b.rev().scalar_product(c))
assert ((b.rev()*b).project(0)) .close_to(b.norm_squared())
assert b.norm_squared() >= 0
assert c.norm_squared() >= 0
# Cauchy's inequality
assert b.scalar_product(c) <= abs(b)*abs(c) + 1e-13
# contractions
# (3.18) in [DFM]
assert abs(b.scalar_product(a ^ c) - (b >> a).scalar_product(c)) < 1e-13
# duality, (3.20) in [DFM]
assert ((a ^ b) << c) .close_to(a << (b << c))
# two definitions of the dual agree: (1.2.26) in [HS]
# and (sec 3.5.3) in [DFW]
assert (c << c.I.rev()).close_to(c | c.I.rev())
# inverse
for div in list(b.gen_blades()) + [vec1, vec1.I]:
assert (div.inv()*div).close_to(1)
assert (div*div.inv()).close_to(1)
assert ((1/div)*div).close_to(1)
assert (div/div).close_to(1)
assert ((c/div)*div).close_to(c)
assert ((c*div)/div).close_to(c)
# reverse properties (Sec 2.9.5 [DFM])
assert c.rev().rev() == c
assert (b ^ c).rev() .close_to(c.rev() ^ b.rev())
# dual properties
# (1.2.26) in [HS]
assert c.dual() .close_to(c | c.I.rev())
assert c.dual() .close_to(c*c.I.rev())
# involution properties (Sec 2.9.5 DFW)
assert c.invol().invol() == c
assert (b ^ c).invol() .close_to(b.invol() ^ c.invol())
# commutator properties
# Jacobi identity (1.1.56c) in [HS] or (8.2) in [DFW]
assert (a.x(b.x(c)) + b.x(c.x(a)) + c.x(a.x(b))).close_to(0)
# (1.57) in [HS]
assert a.x(b*c) .close_to(a.x(b)*c + b*a.x(c))
# END_GA_TEST
# }}}
def test_ast_interop():
src = """
def f():
xx = 3*y + z * (12 if x < 13 else 13)
yy = f(x, y=y)
"""
import ast
mod = ast.parse(src.replace("\n ", "\n"))
print(ast.dump(mod))
from pymbolic.interop.ast import ASTToPymbolic
ast2p = ASTToPymbolic()
for f in mod.body:
if not isinstance(f, ast.FunctionDef):
continue
for stmt in f.body:
if not isinstance(stmt, ast.Assign):
continue
lhs, = stmt.targets
lhs = ast2p(lhs)
rhs = ast2p(stmt.value)
print(lhs, rhs)
def test_compile():
from pymbolic import parse, compile
code = compile(parse("x ** y"), ["x", "y"])
assert code(2, 5) == 32
# Test pickling of compiled code.
import pickle
code = pickle.loads(pickle.dumps(code))
assert code(3, 3) == 27
def test_unifier():
from pymbolic import var
from pymbolic.mapper.unifier import UnidirectionalUnifier
a, b, c, d, e, f = [var(s) for s in "abcdef"]
def match_found(records, eqns):
for record in records:
if eqns <= set(record.equations):
return True
return False
recs = UnidirectionalUnifier("abc")(a+b*c, d+e*f)
assert len(recs) == 2
assert match_found(recs, {(a, d), (b, e), (c, f)})
assert match_found(recs, {(a, d), (b, f), (c, e)})
recs = UnidirectionalUnifier("abc")(a+b, d+e+f)
assert len(recs) == 6
assert match_found(recs, {(a, d), (b, e+f)})
assert match_found(recs, {(a, e), (b, d+f)})
assert match_found(recs, {(a, f), (b, d+e)})
assert match_found(recs, {(b, d), (a, e+f)})
assert match_found(recs, {(b, e), (a, d+f)})
assert match_found(recs, {(b, f), (a, d+e)})
vals = [var("v" + str(i)) for i in range(100)]
recs = UnidirectionalUnifier("a")(sum(vals[1:]) + a, sum(vals))
assert len(recs) == 1
assert match_found(recs, {(a, var("v0"))})
recs = UnidirectionalUnifier("abc")(a+b+c, d+e)
assert len(recs) == 0
recs = UnidirectionalUnifier("abc")(f(a+b, f(a+c)), f(b+c, f(b+d)))
assert len(recs) == 1
assert match_found(recs, {(a, b), (b, c), (c, d)})
def test_long_sympy_mapping():
sp = pytest.importorskip("sympy")
from pymbolic.interop.sympy import SympyToPymbolicMapper
SympyToPymbolicMapper()(sp.sympify(int(10**20)))
SympyToPymbolicMapper()(sp.sympify(int(10)))
def test_stringifier_preserve_shift_order():
for expr in [
parse("(a << b) >> 2"),
parse("a << (b >> 2)")
]:
assert parse(str(expr)) == expr
LATEX_TEMPLATE = r"""\documentclass{article}
\usepackage{amsmath}
\begin{document}
%s
\end{document}"""
def test_latex_mapper():
from pymbolic import parse
from pymbolic.mapper.stringifier import LaTeXMapper, StringifyMapper
tm = LaTeXMapper()
sm = StringifyMapper()
equations = []
def add(expr):
# Add an equation to the list of tests.
equations.append(r"\[{}\] % from: {}".format(tm(expr), sm(expr)))
add(parse("a * b + c"))
add(parse("f(a,b,c)"))
add(parse("a ** b ** c"))
add(parse("(a | b) ^ ~c"))
add(parse("a << b"))
add(parse("a >> b"))
add(parse("a[i,j,k]"))
add(parse("a[1:3]"))
add(parse("a // b"))
add(parse("not (a or b) and c"))
add(parse("(a % b) % c"))
add(parse("(a >= b) or (b <= c)"))
add(prim.Min((1,)) + prim.Max((1, 2)))
add(prim.Substitution(prim.Variable("x") ** 2, ("x",), (2,)))
add(prim.Derivative(parse("x**2"), ("x",)))
# Run LaTeX and ensure the file compiles.
import os
import tempfile
import subprocess
import shutil
latex_dir = tempfile.mkdtemp("pymbolic")
try:
tex_file_path = os.path.join(latex_dir, "input.tex")
with open(tex_file_path, "w") as tex_file:
contents = LATEX_TEMPLATE % "\n".join(equations)
tex_file.write(contents)
try:
subprocess.check_output(
["latex",
"-interaction=nonstopmode",
"-output-directory=%s" % latex_dir,
tex_file_path],
universal_newlines=True)
except OSError: # FIXME: Should be FileNotFoundError on Py3
pytest.skip("latex command not found")
except subprocess.CalledProcessError as err:
raise AssertionError(str(err.output))
finally:
shutil.rmtree(latex_dir)
def test_flop_counter():
x = prim.Variable("x")
y = prim.Variable("y")
z = prim.Variable("z")
subexpr = prim.CommonSubexpression(3 * (x**2 + y + z))
expr = 3*subexpr + subexpr
from pymbolic.mapper.flop_counter import FlopCounter, CSEAwareFlopCounter
assert FlopCounter()(expr) == 4 * 2 + 2
assert CSEAwareFlopCounter()(expr) == 4 + 2
def test_make_sym_vector():
numpy = pytest.importorskip("numpy")
from pymbolic.primitives import make_sym_vector
assert len(make_sym_vector("vec", 2)) == 2
assert len(make_sym_vector("vec", numpy.int32(2))) == 2
assert len(make_sym_vector("vec", [1, 2, 3])) == 3
def test_multiplicative_stringify_preserves_association():
for inner in ["*", " / ", " // ", " % "]:
for outer in ["*", " / ", " // ", " % "]:
if outer == inner:
continue
assert_parse_roundtrip(f"x{outer}(y{inner}z)")
assert_parse_roundtrip(f"(y{inner}z){outer}x")
assert_parse_roundtrip("(-1)*(((-1)*x) / 5)")
def test_differentiator_flags_for_nonsmooth_and_discontinuous():
import pymbolic.functions as pf
from pymbolic.mapper.differentiator import differentiate
x = prim.Variable("x")
with pytest.raises(ValueError):
differentiate(pf.fabs(x), x)
result = differentiate(pf.fabs(x), x, allowed_nonsmoothness="continuous")
assert result == pf.sign(x)
with pytest.raises(ValueError):
differentiate(pf.sign(x), x)
result = differentiate(pf.sign(x), x, allowed_nonsmoothness="discontinuous")
assert result == 0
def test_np_bool_handling():
from pymbolic.mapper.evaluator import evaluate
numpy = pytest.importorskip("numpy")
expr = prim.LogicalNot(numpy.bool_(False))
assert evaluate(expr) is True
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
exec(sys.argv[1])
else:
from pytest import main
main([__file__])
# vim: fdm=marker
| 27.831361
| 83
| 0.590837
|
__copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import pymbolic.primitives as prim
import pytest
from pymbolic import parse
from pytools.lex import ParseError
from pymbolic.mapper import IdentityMapper
try:
reduce
except NameError:
from functools import reduce
def assert_parsed_same_as_python(expr_str):
expr_str, = expr_str.split("\n")
from pymbolic.interop.ast import ASTToPymbolic
import ast
ast2p = ASTToPymbolic()
try:
expr_parsed_by_python = ast2p(ast.parse(expr_str).body[0].value)
except SyntaxError:
with pytest.raises(ParseError):
parse(expr_str)
else:
expr_parsed_by_pymbolic = parse(expr_str)
assert expr_parsed_by_python == expr_parsed_by_pymbolic
def assert_parse_roundtrip(expr_str):
expr = parse(expr_str)
from pymbolic.mapper.stringifier import StringifyMapper
strified = StringifyMapper()(expr)
assert strified == expr_str, (strified, expr_str)
def test_integer_power():
from pymbolic.algorithm import integer_power
for base, expn in [
(17, 5),
(17, 2**10),
(13, 20),
(13, 1343),
]:
assert base**expn == integer_power(base, expn)
def test_expand():
from pymbolic import var, expand
x = var("x")
u = (x+1)**5
expand(u)
def test_substitute():
from pymbolic import parse, substitute, evaluate
u = parse("5+x.min**2")
xmin = parse("x.min")
assert evaluate(substitute(u, {xmin: 25})) == 630
def test_no_comparison():
from pymbolic import parse
x = parse("17+3*x")
y = parse("12-5*y")
def expect_typeerror(f):
try:
f()
except TypeError:
pass
else:
raise AssertionError
expect_typeerror(lambda: x < y)
expect_typeerror(lambda: x <= y)
expect_typeerror(lambda: x > y)
expect_typeerror(lambda: x >= y)
def test_structure_preservation():
x = prim.Sum((5, 7))
from pymbolic.mapper import IdentityMapper
x2 = IdentityMapper()(x)
assert x == x2
def test_sympy_interaction():
pytest.importorskip("sympy")
import sympy as sp
x, y = sp.symbols("x y")
f = sp.Function("f")
s1_expr = 1/f(x/sp.sqrt(x**2+y**2)).diff(x, 5)
from pymbolic.interop.sympy import (
SympyToPymbolicMapper,
PymbolicToSympyMapper)
s2p = SympyToPymbolicMapper()
p2s = PymbolicToSympyMapper()
p1_expr = s2p(s1_expr)
s2_expr = p2s(p1_expr)
assert sp.ratsimp(s1_expr - s2_expr) == 0
p2_expr = s2p(s2_expr)
s3_expr = p2s(p2_expr)
assert sp.ratsimp(s1_expr - s3_expr) == 0
def test_fft_with_floats():
numpy = pytest.importorskip("numpy")
import numpy.linalg as la
from pymbolic.algorithm import fft, ifft
for n in [2**i for i in range(4, 10)]+[17, 12, 948]:
a = numpy.random.rand(n) + 1j*numpy.random.rand(n)
f_a = fft(a)
a2 = ifft(f_a)
assert la.norm(a-a2) < 1e-10
f_a_numpy = numpy.fft.fft(a)
assert la.norm(f_a-f_a_numpy) < 1e-10
class NearZeroKiller(IdentityMapper):
def map_constant(self, expr):
if isinstance(expr, complex):
r = expr.real
i = expr.imag
if abs(r) < 1e-15:
r = 0
if abs(i) < 1e-15:
i = 0
return complex(r, i)
else:
return expr
def test_fft():
numpy = pytest.importorskip("numpy")
from pymbolic import var
from pymbolic.algorithm import fft, sym_fft
vars = numpy.array([var(chr(97+i)) for i in range(16)], dtype=object)
print(vars)
print(fft(vars))
traced_fft = sym_fft(vars)
from pymbolic.mapper.stringifier import PREC_NONE
from pymbolic.mapper.c_code import CCodeMapper
ccm = CCodeMapper()
code = [ccm(tfi, PREC_NONE) for tfi in traced_fft]
for cse_name, cse_str in enumerate(ccm.cse_name_list):
print(f"{cse_name} = {cse_str}")
for i, line in enumerate(code):
print("result[%d] = %s" % (i, line))
def test_sparse_multiply():
numpy = pytest.importorskip("numpy")
pytest.importorskip("scipy")
import scipy.sparse as ss
la = numpy.linalg
mat = numpy.random.randn(10, 10)
s_mat = ss.csr_matrix(mat)
vec = numpy.random.randn(10)
mat_vec = s_mat*vec
from pymbolic.algorithm import csr_matrix_multiply
mat_vec_2 = csr_matrix_multiply(s_mat, vec)
assert la.norm(mat_vec-mat_vec_2) < 1e-14
def test_parser():
from pymbolic import parse
parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
"-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
"+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
print(repr(parse("d4knl0")))
print(repr(parse("0.")))
print(repr(parse("0.e1")))
assert parse("0.e1") == 0
assert parse("1e-12") == 1e-12
print(repr(parse("a >= 1")))
print(repr(parse("a <= 1")))
print(repr(parse(":")))
print(repr(parse("1:")))
print(repr(parse(":2")))
print(repr(parse("1:2")))
print(repr(parse("::")))
print(repr(parse("1::")))
print(repr(parse(":1:")))
print(repr(parse("::1")))
print(repr(parse("3::1")))
print(repr(parse(":5:1")))
print(repr(parse("3:5:1")))
assert_parse_roundtrip("()")
assert_parse_roundtrip("(3,)")
assert_parse_roundtrip("[x + 3, 3, 5]")
assert_parse_roundtrip("[]")
assert_parse_roundtrip("[x]")
assert_parse_roundtrip("g[i, k] + 2.0*h[i, k]")
parse("g[i,k]+(+2.0)*h[i, k]")
print(repr(parse("a - b - c")))
print(repr(parse("-a - -b - -c")))
print(repr(parse("- - - a - - - - b - - - - - c")))
print(repr(parse("~(a ^ b)")))
print(repr(parse("(a | b) | ~(~a & ~b)")))
print(repr(parse("3 << 1")))
print(repr(parse("1 >> 3")))
print(parse("3::1"))
assert parse("e1") == prim.Variable("e1")
assert parse("d1") == prim.Variable("d1")
from pymbolic import variables
f, x, y, z = variables("f x y z")
assert parse("f((x,y),z)") == f((x, y), z)
assert parse("f((x,),z)") == f((x,), z)
assert parse("f(x,(y,z),z)") == f(x, (y, z), z)
assert parse("f(x,(y,z),z, name=15)") == f(x, (y, z), z, name=15)
assert parse("f(x,(y,z),z, name=15, name2=17)") == f(
x, (y, z), z, name=15, name2=17)
assert_parsed_same_as_python("5+i if i>=0 else (0 if i<-1 else 10)")
assert_parsed_same_as_python("0 if 1 if 2 else 3 else 4")
assert_parsed_same_as_python("0 if (1 if 2 else 3) else 4")
assert_parsed_same_as_python("(2, 3,)")
with pytest.deprecated_call():
parse("1+if(0, 1, 2)")
def test_mappers():
from pymbolic import variables
f, x, y, z = variables("f x y z")
for expr in [
f(x, (y, z), name=z**2)
]:
from pymbolic.mapper import WalkMapper
from pymbolic.mapper.dependency import DependencyMapper
str(expr)
IdentityMapper()(expr)
WalkMapper()(expr)
DependencyMapper()(expr)
def test_func_dep_consistency():
from pymbolic import var
from pymbolic.mapper.dependency import DependencyMapper
f = var("f")
x = var("x")
dep_map = DependencyMapper(include_calls="descend_args")
assert dep_map(f(x)) == {x}
assert dep_map(f(x=x)) == {x}
def test_conditions():
from pymbolic import var
x = var("x")
y = var("y")
assert str(x.eq(y).and_(x.le(5))) == "x == y and x <= 5"
def test_graphviz():
from pymbolic import parse
expr = parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
"-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
"+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
from pymbolic.mapper.graphviz import GraphvizMapper
gvm = GraphvizMapper()
gvm(expr)
print(gvm.get_dot_code())
@pytest.mark.parametrize("dims", [2, 3, 4, 5])
def test_geometric_algebra(dims):
pytest.importorskip("numpy")
import numpy as np
from pymbolic.geometric_algebra import MultiVector as MV
vec1 = MV(np.random.randn(dims))
vec2 = MV(np.random.randn(dims))
vec3 = MV(np.random.randn(dims))
vec4 = MV(np.random.randn(dims))
vec5 = MV(np.random.randn(dims))
assert ((vec1 ^ vec2) + (vec1 | vec2)).close_to(vec1*vec2)
assert (vec1 ^ vec2 ^ vec3).close_to(- vec2 ^ vec1 ^ vec3)
vecs = [vec1, vec2, vec3, vec4, vec5]
if len(vecs) > dims:
from operator import xor as outer
assert reduce(outer, vecs).close_to(0)
assert (vec1.inv()*vec1).close_to(1)
assert (vec1*vec1.inv()).close_to(1)
assert ((1/vec1)*vec1).close_to(1)
assert (vec1/vec1).close_to(1)
for a, b, c in [
(vec1, vec2, vec3),
(vec1*vec2, vec3, vec4),
(vec1, vec2*vec3, vec4),
(vec1, vec2, vec3*vec4),
(vec1, vec2, vec3*vec4*vec5),
(vec1, vec2*vec1, vec3*vec4*vec5),
]:
assert ((a*b)*c).close_to(a*(b*c))
assert ((a ^ b) ^ c).close_to(a ^ (b ^ c))
assert ((c*b).project(0)) .close_to(b.scalar_product(c))
assert ((c.rev()*b).project(0)) .close_to(b.rev().scalar_product(c))
assert ((b.rev()*b).project(0)) .close_to(b.norm_squared())
assert b.norm_squared() >= 0
assert c.norm_squared() >= 0
assert b.scalar_product(c) <= abs(b)*abs(c) + 1e-13
# contractions
# (3.18) in [DFM]
assert abs(b.scalar_product(a ^ c) - (b >> a).scalar_product(c)) < 1e-13
# duality, (3.20) in [DFM]
assert ((a ^ b) << c) .close_to(a << (b << c))
# two definitions of the dual agree: (1.2.26) in [HS]
# and (sec 3.5.3) in [DFW]
assert (c << c.I.rev()).close_to(c | c.I.rev())
# inverse
for div in list(b.gen_blades()) + [vec1, vec1.I]:
assert (div.inv()*div).close_to(1)
assert (div*div.inv()).close_to(1)
assert ((1/div)*div).close_to(1)
assert (div/div).close_to(1)
assert ((c/div)*div).close_to(c)
assert ((c*div)/div).close_to(c)
# reverse properties (Sec 2.9.5 [DFM])
assert c.rev().rev() == c
assert (b ^ c).rev() .close_to(c.rev() ^ b.rev())
# dual properties
# (1.2.26) in [HS]
assert c.dual() .close_to(c | c.I.rev())
assert c.dual() .close_to(c*c.I.rev())
# involution properties (Sec 2.9.5 DFW)
assert c.invol().invol() == c
assert (b ^ c).invol() .close_to(b.invol() ^ c.invol())
# commutator properties
# Jacobi identity (1.1.56c) in [HS] or (8.2) in [DFW]
assert (a.x(b.x(c)) + b.x(c.x(a)) + c.x(a.x(b))).close_to(0)
# (1.57) in [HS]
assert a.x(b*c) .close_to(a.x(b)*c + b*a.x(c))
# END_GA_TEST
# }}}
def test_ast_interop():
src = """
def f():
xx = 3*y + z * (12 if x < 13 else 13)
yy = f(x, y=y)
"""
import ast
mod = ast.parse(src.replace("\n ", "\n"))
print(ast.dump(mod))
from pymbolic.interop.ast import ASTToPymbolic
ast2p = ASTToPymbolic()
for f in mod.body:
if not isinstance(f, ast.FunctionDef):
continue
for stmt in f.body:
if not isinstance(stmt, ast.Assign):
continue
lhs, = stmt.targets
lhs = ast2p(lhs)
rhs = ast2p(stmt.value)
print(lhs, rhs)
def test_compile():
from pymbolic import parse, compile
code = compile(parse("x ** y"), ["x", "y"])
assert code(2, 5) == 32
# Test pickling of compiled code.
import pickle
code = pickle.loads(pickle.dumps(code))
assert code(3, 3) == 27
def test_unifier():
from pymbolic import var
from pymbolic.mapper.unifier import UnidirectionalUnifier
a, b, c, d, e, f = [var(s) for s in "abcdef"]
def match_found(records, eqns):
for record in records:
if eqns <= set(record.equations):
return True
return False
recs = UnidirectionalUnifier("abc")(a+b*c, d+e*f)
assert len(recs) == 2
assert match_found(recs, {(a, d), (b, e), (c, f)})
assert match_found(recs, {(a, d), (b, f), (c, e)})
recs = UnidirectionalUnifier("abc")(a+b, d+e+f)
assert len(recs) == 6
assert match_found(recs, {(a, d), (b, e+f)})
assert match_found(recs, {(a, e), (b, d+f)})
assert match_found(recs, {(a, f), (b, d+e)})
assert match_found(recs, {(b, d), (a, e+f)})
assert match_found(recs, {(b, e), (a, d+f)})
assert match_found(recs, {(b, f), (a, d+e)})
vals = [var("v" + str(i)) for i in range(100)]
recs = UnidirectionalUnifier("a")(sum(vals[1:]) + a, sum(vals))
assert len(recs) == 1
assert match_found(recs, {(a, var("v0"))})
recs = UnidirectionalUnifier("abc")(a+b+c, d+e)
assert len(recs) == 0
recs = UnidirectionalUnifier("abc")(f(a+b, f(a+c)), f(b+c, f(b+d)))
assert len(recs) == 1
assert match_found(recs, {(a, b), (b, c), (c, d)})
def test_long_sympy_mapping():
sp = pytest.importorskip("sympy")
from pymbolic.interop.sympy import SympyToPymbolicMapper
SympyToPymbolicMapper()(sp.sympify(int(10**20)))
SympyToPymbolicMapper()(sp.sympify(int(10)))
def test_stringifier_preserve_shift_order():
for expr in [
parse("(a << b) >> 2"),
parse("a << (b >> 2)")
]:
assert parse(str(expr)) == expr
LATEX_TEMPLATE = r"""\documentclass{article}
\usepackage{amsmath}
\begin{document}
%s
\end{document}"""
def test_latex_mapper():
from pymbolic import parse
from pymbolic.mapper.stringifier import LaTeXMapper, StringifyMapper
tm = LaTeXMapper()
sm = StringifyMapper()
equations = []
def add(expr):
# Add an equation to the list of tests.
equations.append(r"\[{}\] % from: {}".format(tm(expr), sm(expr)))
add(parse("a * b + c"))
add(parse("f(a,b,c)"))
add(parse("a ** b ** c"))
add(parse("(a | b) ^ ~c"))
add(parse("a << b"))
add(parse("a >> b"))
add(parse("a[i,j,k]"))
add(parse("a[1:3]"))
add(parse("a // b"))
add(parse("not (a or b) and c"))
add(parse("(a % b) % c"))
add(parse("(a >= b) or (b <= c)"))
add(prim.Min((1,)) + prim.Max((1, 2)))
add(prim.Substitution(prim.Variable("x") ** 2, ("x",), (2,)))
add(prim.Derivative(parse("x**2"), ("x",)))
# Run LaTeX and ensure the file compiles.
import os
import tempfile
import subprocess
import shutil
latex_dir = tempfile.mkdtemp("pymbolic")
try:
tex_file_path = os.path.join(latex_dir, "input.tex")
with open(tex_file_path, "w") as tex_file:
contents = LATEX_TEMPLATE % "\n".join(equations)
tex_file.write(contents)
try:
subprocess.check_output(
["latex",
"-interaction=nonstopmode",
"-output-directory=%s" % latex_dir,
tex_file_path],
universal_newlines=True)
except OSError: # FIXME: Should be FileNotFoundError on Py3
pytest.skip("latex command not found")
except subprocess.CalledProcessError as err:
raise AssertionError(str(err.output))
finally:
shutil.rmtree(latex_dir)
def test_flop_counter():
x = prim.Variable("x")
y = prim.Variable("y")
z = prim.Variable("z")
subexpr = prim.CommonSubexpression(3 * (x**2 + y + z))
expr = 3*subexpr + subexpr
from pymbolic.mapper.flop_counter import FlopCounter, CSEAwareFlopCounter
assert FlopCounter()(expr) == 4 * 2 + 2
assert CSEAwareFlopCounter()(expr) == 4 + 2
def test_make_sym_vector():
numpy = pytest.importorskip("numpy")
from pymbolic.primitives import make_sym_vector
assert len(make_sym_vector("vec", 2)) == 2
assert len(make_sym_vector("vec", numpy.int32(2))) == 2
assert len(make_sym_vector("vec", [1, 2, 3])) == 3
def test_multiplicative_stringify_preserves_association():
for inner in ["*", " / ", " // ", " % "]:
for outer in ["*", " / ", " // ", " % "]:
if outer == inner:
continue
assert_parse_roundtrip(f"x{outer}(y{inner}z)")
assert_parse_roundtrip(f"(y{inner}z){outer}x")
assert_parse_roundtrip("(-1)*(((-1)*x) / 5)")
def test_differentiator_flags_for_nonsmooth_and_discontinuous():
import pymbolic.functions as pf
from pymbolic.mapper.differentiator import differentiate
x = prim.Variable("x")
with pytest.raises(ValueError):
differentiate(pf.fabs(x), x)
result = differentiate(pf.fabs(x), x, allowed_nonsmoothness="continuous")
assert result == pf.sign(x)
with pytest.raises(ValueError):
differentiate(pf.sign(x), x)
result = differentiate(pf.sign(x), x, allowed_nonsmoothness="discontinuous")
assert result == 0
def test_np_bool_handling():
from pymbolic.mapper.evaluator import evaluate
numpy = pytest.importorskip("numpy")
expr = prim.LogicalNot(numpy.bool_(False))
assert evaluate(expr) is True
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
exec(sys.argv[1])
else:
from pytest import main
main([__file__])
# vim: fdm=marker
| true
| true
|
7908d746ff18a71c46571423dc0b91c0ad61e883
| 1,906
|
py
|
Python
|
netdata/workers/json_storage.py
|
mincode/netdata
|
4369a3bfb473509eff92083e03f214d5b75f6074
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
netdata/workers/json_storage.py
|
mincode/netdata
|
4369a3bfb473509eff92083e03f214d5b75f6074
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
netdata/workers/json_storage.py
|
mincode/netdata
|
4369a3bfb473509eff92083e03f214d5b75f6074
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import os
import json
__author__ = 'Manfred Minimair <manfred@minimair.org>'
class JSONStorage:
"""
File storage for a dictionary.
"""
file = '' # file name of storage file
data = None # data dict
indent = ' ' # indent prefix for pretty printing json files
def __init__(self, path, name):
"""
Initizlize.
:param path: path to the storage file;
empty means the current direcory.
:param name: file name, json file; may include a path.
"""
if path:
os.makedirs(path, exist_ok=True)
self.file = os.path.normpath(os.path.join(path, name))
try:
with open(self.file) as data_file:
self.data = json.load(data_file)
except FileNotFoundError:
self.data = dict()
self.dump()
def dump(self):
"""
Dump data into storage file.
"""
with open(self.file, 'w') as out_file:
json.dump(self.data, out_file, indent=self.indent)
def get(self, item):
"""
Get stored item.
:param item: name, string, of item to get.
:return: stored item; raises a KeyError if item does not exist.
"""
return self.data[item]
def set(self, item, value):
"""
Set item's value; causes the data to be dumped into the storage file.
:param item: name, string of item to set.
:param value: value to set.
"""
self.data[item] = value
self.dump()
def __getattr__(self, item):
"""
Get stored item with .-notation if not defined as a class member.
:param item: name, string of item compatible
with Python class member name.
:return value of item.
"""
if item in self.data:
return self.data[item]
else:
raise AttributeError
| 28.029412
| 77
| 0.559811
|
import os
import json
__author__ = 'Manfred Minimair <manfred@minimair.org>'
class JSONStorage:
file = ''
data = None
indent = ' '
def __init__(self, path, name):
if path:
os.makedirs(path, exist_ok=True)
self.file = os.path.normpath(os.path.join(path, name))
try:
with open(self.file) as data_file:
self.data = json.load(data_file)
except FileNotFoundError:
self.data = dict()
self.dump()
def dump(self):
with open(self.file, 'w') as out_file:
json.dump(self.data, out_file, indent=self.indent)
def get(self, item):
return self.data[item]
def set(self, item, value):
self.data[item] = value
self.dump()
def __getattr__(self, item):
if item in self.data:
return self.data[item]
else:
raise AttributeError
| true
| true
|
7908d88ba569ad63d3dfe13890f364ae53088ea0
| 912
|
py
|
Python
|
tests/unit/test_advanced_collectible.py
|
Sam44323/nft-mix-opensea
|
05f85412d6e088094ea18d2b0372578cbab2ebc4
|
[
"MIT"
] | null | null | null |
tests/unit/test_advanced_collectible.py
|
Sam44323/nft-mix-opensea
|
05f85412d6e088094ea18d2b0372578cbab2ebc4
|
[
"MIT"
] | null | null | null |
tests/unit/test_advanced_collectible.py
|
Sam44323/nft-mix-opensea
|
05f85412d6e088094ea18d2b0372578cbab2ebc4
|
[
"MIT"
] | null | null | null |
from brownie import AdvancedCollectible, network
import pytest
from scripts.advanced_collectible.deploy_and_create import deploy_and_create, get_contract
from scripts.utils.helpful_scripts import LOCAL_BLOCKCHAIN_ENVIRONMENTS, get_account
def test_can_create_advanced_collectible():
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
pytest.skip("Only for local testing")
advanced_collectible, creation_transaction = deploy_and_create()
# getting the requestId value from the requestedCollectible event
requestId = creation_transaction.events["requestedCollectible"]["requestId"]
randomNumber = 777
get_contract("vrf_coordinator").callBackWithRandomness(
requestId, randomNumber, advanced_collectible.address, {"from": get_account()})
assert advanced_collectible.tokenCounter() == 1
assert advanced_collectible.tokenIdToBreed(0) == randomNumber % 3
| 48
| 90
| 0.804825
|
from brownie import AdvancedCollectible, network
import pytest
from scripts.advanced_collectible.deploy_and_create import deploy_and_create, get_contract
from scripts.utils.helpful_scripts import LOCAL_BLOCKCHAIN_ENVIRONMENTS, get_account
def test_can_create_advanced_collectible():
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
pytest.skip("Only for local testing")
advanced_collectible, creation_transaction = deploy_and_create()
requestId = creation_transaction.events["requestedCollectible"]["requestId"]
randomNumber = 777
get_contract("vrf_coordinator").callBackWithRandomness(
requestId, randomNumber, advanced_collectible.address, {"from": get_account()})
assert advanced_collectible.tokenCounter() == 1
assert advanced_collectible.tokenIdToBreed(0) == randomNumber % 3
| true
| true
|
7908d8b8118a8f8da1175e4e0304b7e98d832205
| 7,037
|
py
|
Python
|
lib/galaxy/files/uris.py
|
itisAliRH/galaxy
|
b3b693ea0788f773442c8481472a87f43ccb10d7
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/files/uris.py
|
itisAliRH/galaxy
|
b3b693ea0788f773442c8481472a87f43ccb10d7
|
[
"CC-BY-3.0"
] | 6
|
2021-11-11T20:57:49.000Z
|
2021-12-10T15:30:33.000Z
|
lib/galaxy/files/uris.py
|
itisAliRH/galaxy
|
b3b693ea0788f773442c8481472a87f43ccb10d7
|
[
"CC-BY-3.0"
] | null | null | null |
import base64
import ipaddress
import logging
import os
import socket
import tempfile
import urllib.request
from typing import (
List,
Optional,
TYPE_CHECKING,
Union,
)
from urllib.parse import urlparse
from galaxy.exceptions import (
AdminRequiredException,
ConfigDoesNotAllowException,
)
from galaxy.util import (
DEFAULT_SOCKET_TIMEOUT,
get_charset_from_http_headers,
stream_to_open_named_file,
unicodify,
)
if TYPE_CHECKING:
from galaxy.files import ConfiguredFileSources
log = logging.getLogger(__name__)
def stream_url_to_str(
path: str, file_sources: Optional["ConfiguredFileSources"] = None, prefix: str = "gx_file_stream"
) -> str:
tmp_file = stream_url_to_file(path, file_sources=file_sources, prefix=prefix)
try:
with open(tmp_file, "r") as f:
return f.read()
finally:
os.remove(tmp_file)
def stream_url_to_file(
path: str, file_sources: Optional["ConfiguredFileSources"] = None, prefix: str = "gx_file_stream"
) -> str:
temp_name: str
if file_sources and file_sources.looks_like_uri(path):
file_source_path = file_sources.get_file_source_path(path)
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as temp:
temp_name = temp.name
file_source_path.file_source.realize_to(file_source_path.path, temp_name)
elif path.startswith("base64://"):
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as temp:
temp_name = temp.name
temp.write(base64.b64decode(path[len("base64://") :]))
temp.flush()
else:
page = urllib.request.urlopen(path, timeout=DEFAULT_SOCKET_TIMEOUT) # page will be .close()ed in stream_to_file
temp_name = stream_to_file(page, prefix=prefix, source_encoding=get_charset_from_http_headers(page.headers))
return temp_name
def stream_to_file(stream, suffix="", prefix="", dir=None, text=False, **kwd):
"""Writes a stream to a temporary file, returns the temporary file's name"""
fd, temp_name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
return stream_to_open_named_file(stream, fd, temp_name, **kwd)
IpAddressT = Union[ipaddress.IPv4Address, ipaddress.IPv6Address]
IpNetwrokT = Union[ipaddress.IPv4Network, ipaddress.IPv6Network]
IpAllowedListEntryT = Union[IpAddressT, IpNetwrokT]
def validate_uri_access(uri: str, is_admin: bool, ip_allowlist: List[IpAllowedListEntryT]) -> None:
"""Perform uniform checks on supplied URIs.
- Prevent access to local IPs not found in ip_allowlist.
- Don't allow non-admins to access file:// URIs.
"""
validate_non_local(uri, ip_allowlist)
if not is_admin and uri.lstrip().startswith("file://"):
raise AdminRequiredException()
def validate_non_local(uri: str, ip_allowlist: List[IpAllowedListEntryT]) -> str:
# If it doesn't look like a URL, ignore it.
if not (uri.lstrip().startswith("http://") or uri.lstrip().startswith("https://")):
return uri
# Strip leading whitespace before passing url to urlparse()
url = uri.lstrip()
# Extract hostname component
parsed_url = urlparse(url).netloc
# If credentials are in this URL, we need to strip those.
if parsed_url.count("@") > 0:
# credentials.
parsed_url = parsed_url[parsed_url.rindex("@") + 1 :]
# Percent encoded colons and other characters will not be resolved as such
# so we don't have to either.
# Sometimes the netloc will contain the port which is not desired, so we
# need to extract that.
port = None
# However, it could ALSO be an IPv6 address they've supplied.
if ":" in parsed_url:
# IPv6 addresses have colons in them already (it seems like always more than two)
if parsed_url.count(":") >= 2:
# Since IPv6 already use colons extensively, they wrap it in
# brackets when there is a port, e.g. http://[2001:db8:1f70::999:de8:7648:6e8]:100/
# However if it ends with a ']' then there is no port after it and
# they've wrapped it in brackets just for fun.
if "]" in parsed_url and not parsed_url.endswith("]"):
# If this +1 throws a range error, we don't care, their url
# shouldn't end with a colon.
idx = parsed_url.rindex(":")
# We parse as an int and let this fail ungracefully if parsing
# fails because we desire to fail closed rather than open.
port = int(parsed_url[idx + 1 :])
parsed_url = parsed_url[:idx]
else:
# Plain ipv6 without port
pass
else:
# This should finally be ipv4 with port. It cannot be IPv6 as that
# was caught by earlier cases, and it cannot be due to credentials.
idx = parsed_url.rindex(":")
port = int(parsed_url[idx + 1 :])
parsed_url = parsed_url[:idx]
# safe to log out, no credentials/request path, just an IP + port
log.debug("parsed url, port: %s : %s", parsed_url, port)
# Call getaddrinfo to resolve hostname into tuples containing IPs.
addrinfo = socket.getaddrinfo(parsed_url, port)
# Get the IP addresses that this entry resolves to (uniquely)
# We drop:
# AF_* family: It will resolve to AF_INET or AF_INET6, getaddrinfo(3) doesn't even mention AF_UNIX,
# socktype: We don't care if a stream/dgram/raw protocol
# protocol: we don't care if it is tcp or udp.
addrinfo_results = {info[4][0] for info in addrinfo}
# There may be multiple (e.g. IPv4 + IPv6 or DNS round robin). Any one of these
# could resolve to a local addresses (and could be returned by chance),
# therefore we must check them all.
for raw_ip in addrinfo_results:
# Convert to an IP object so we can tell if it is in private space.
ip = ipaddress.ip_address(unicodify(raw_ip))
# If this is a private address
if ip.is_private:
results = []
# If this IP is not anywhere in the allowlist
for allowlisted in ip_allowlist:
# If it's an IP address range (rather than a single one...)
if isinstance(allowlisted, (ipaddress.IPv4Network, ipaddress.IPv6Network)):
results.append(ip in allowlisted)
else:
results.append(ip == allowlisted)
if any(results):
# If we had any True, then THIS (and ONLY THIS) IP address that
# that specific DNS entry resolved to is in allowlisted and
# safe to access. But we cannot exit here, we must ensure that
# all IPs that that DNS entry resolves to are likewise safe.
pass
else:
# Otherwise, we deny access.
raise ConfigDoesNotAllowException("Access to this address in not permitted by server configuration")
return url
| 41.639053
| 120
| 0.656672
|
import base64
import ipaddress
import logging
import os
import socket
import tempfile
import urllib.request
from typing import (
List,
Optional,
TYPE_CHECKING,
Union,
)
from urllib.parse import urlparse
from galaxy.exceptions import (
AdminRequiredException,
ConfigDoesNotAllowException,
)
from galaxy.util import (
DEFAULT_SOCKET_TIMEOUT,
get_charset_from_http_headers,
stream_to_open_named_file,
unicodify,
)
if TYPE_CHECKING:
from galaxy.files import ConfiguredFileSources
log = logging.getLogger(__name__)
def stream_url_to_str(
path: str, file_sources: Optional["ConfiguredFileSources"] = None, prefix: str = "gx_file_stream"
) -> str:
tmp_file = stream_url_to_file(path, file_sources=file_sources, prefix=prefix)
try:
with open(tmp_file, "r") as f:
return f.read()
finally:
os.remove(tmp_file)
def stream_url_to_file(
path: str, file_sources: Optional["ConfiguredFileSources"] = None, prefix: str = "gx_file_stream"
) -> str:
temp_name: str
if file_sources and file_sources.looks_like_uri(path):
file_source_path = file_sources.get_file_source_path(path)
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as temp:
temp_name = temp.name
file_source_path.file_source.realize_to(file_source_path.path, temp_name)
elif path.startswith("base64://"):
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as temp:
temp_name = temp.name
temp.write(base64.b64decode(path[len("base64://") :]))
temp.flush()
else:
page = urllib.request.urlopen(path, timeout=DEFAULT_SOCKET_TIMEOUT)
temp_name = stream_to_file(page, prefix=prefix, source_encoding=get_charset_from_http_headers(page.headers))
return temp_name
def stream_to_file(stream, suffix="", prefix="", dir=None, text=False, **kwd):
fd, temp_name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
return stream_to_open_named_file(stream, fd, temp_name, **kwd)
IpAddressT = Union[ipaddress.IPv4Address, ipaddress.IPv6Address]
IpNetwrokT = Union[ipaddress.IPv4Network, ipaddress.IPv6Network]
IpAllowedListEntryT = Union[IpAddressT, IpNetwrokT]
def validate_uri_access(uri: str, is_admin: bool, ip_allowlist: List[IpAllowedListEntryT]) -> None:
validate_non_local(uri, ip_allowlist)
if not is_admin and uri.lstrip().startswith("file://"):
raise AdminRequiredException()
def validate_non_local(uri: str, ip_allowlist: List[IpAllowedListEntryT]) -> str:
if not (uri.lstrip().startswith("http://") or uri.lstrip().startswith("https://")):
return uri
# Strip leading whitespace before passing url to urlparse()
url = uri.lstrip()
# Extract hostname component
parsed_url = urlparse(url).netloc
# If credentials are in this URL, we need to strip those.
if parsed_url.count("@") > 0:
# credentials.
parsed_url = parsed_url[parsed_url.rindex("@") + 1 :]
# Percent encoded colons and other characters will not be resolved as such
# so we don't have to either.
port = None
if ":" in parsed_url:
# IPv6 addresses have colons in them already (it seems like always more than two)
if parsed_url.count(":") >= 2:
# Since IPv6 already use colons extensively, they wrap it in
# brackets when there is a port, e.g. http://[2001:db8:1f70::999:de8:7648:6e8]:100/
# However if it ends with a ']' then there is no port after it and
# they've wrapped it in brackets just for fun.
if "]" in parsed_url and not parsed_url.endswith("]"):
# shouldn't end with a colon.
idx = parsed_url.rindex(":")
port = int(parsed_url[idx + 1 :])
parsed_url = parsed_url[:idx]
else:
pass
else:
idx = parsed_url.rindex(":")
port = int(parsed_url[idx + 1 :])
parsed_url = parsed_url[:idx]
log.debug("parsed url, port: %s : %s", parsed_url, port)
addrinfo = socket.getaddrinfo(parsed_url, port)
# socktype: We don't care if a stream/dgram/raw protocol
addrinfo_results = {info[4][0] for info in addrinfo}
# There may be multiple (e.g. IPv4 + IPv6 or DNS round robin). Any one of these
# could resolve to a local addresses (and could be returned by chance),
# therefore we must check them all.
for raw_ip in addrinfo_results:
# Convert to an IP object so we can tell if it is in private space.
ip = ipaddress.ip_address(unicodify(raw_ip))
# If this is a private address
if ip.is_private:
results = []
# If this IP is not anywhere in the allowlist
for allowlisted in ip_allowlist:
# If it's an IP address range (rather than a single one...)
if isinstance(allowlisted, (ipaddress.IPv4Network, ipaddress.IPv6Network)):
results.append(ip in allowlisted)
else:
results.append(ip == allowlisted)
if any(results):
pass
else:
raise ConfigDoesNotAllowException("Access to this address in not permitted by server configuration")
return url
| true
| true
|
7908d98db986c3b603109c9af73e58b1ea91cf9b
| 4,059
|
py
|
Python
|
tools/chrome_proxy/webdriver/variations_combinations.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/chrome_proxy/webdriver/variations_combinations.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/chrome_proxy/webdriver/variations_combinations.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2021-01-05T23:43:46.000Z
|
2021-01-07T23:36:34.000Z
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import io
import os
import platform
import sys
import time
import unittest
import common
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'tools', 'variations'))
import fieldtrial_util
test_blacklist = [
# These tests set their own field trials and should be ignored.
'quic.Quic.testCheckPageWithQuicProxy',
'quic.Quic.testCheckPageWithQuicProxyTransaction',
'smoke.Smoke.testCheckPageWithHoldback',
]
def GetExperimentArgs():
"""Returns a list of arguments with all tested field trials.
This function is a simple wrapper around the variation team's fieldtrail_util
script that generates command line arguments to test Chromium field trials.
Returns:
an array of command line arguments to pass to chrome
"""
config_path = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'testing', 'variations', 'fieldtrial_testing_config.json')
my_platform = ''
if common.ParseFlags().android:
my_platform = 'android'
elif platform.system().lower() == 'linux':
my_platform = 'linux'
elif platform.system().lower() == 'windows':
my_platform = 'windows'
elif platform.system().lower() == 'darwin':
my_platform = 'mac'
else:
raise Exception('unknown platform!')
return fieldtrial_util.GenerateArgs(config_path, my_platform)
def GenerateTestSuites():
"""A generator function that yields non-blacklisted tests to run.
This function yields test suites each with a single test case whose id is not
blacklisted in the array at the top of this file.
Yields:
non-blacklisted test suites to run
"""
loader = unittest.TestLoader()
for test_suite in loader.discover(os.path.dirname(__file__), pattern='*.py'):
for test_case in test_suite:
for test_method in test_case:
if test_method.id() not in test_blacklist:
ts = unittest.TestSuite()
ts.addTest(test_method)
yield (ts, test_method.id())
def ParseFlagsWithExtraBrowserArgs(extra_args):
"""Generates a function to override common.ParseFlags.
The returned function will honor everything in the original ParseFlags(), but
adds on additional browser_args.
Args:
extra_args: The extra browser agruments to add.
Returns:
A function to override common.ParseFlags with additional browser_args.
"""
original_flags = common.ParseFlags()
def AddExtraBrowserArgs():
original_flags.browser_args = ((original_flags.browser_args if
original_flags.browser_args else '') + ' ' + extra_args)
return original_flags
return AddExtraBrowserArgs
def main():
"""Runs all non-blacklisted tests against Chromium field trials.
This script run all chrome proxy integration tests that haven't been
blacklisted against the field trial testing configuration used by Chromium
perf bots.
"""
flags = common.ParseFlags()
experiment_args = ' '.join(GetExperimentArgs())
common.ParseFlags = ParseFlagsWithExtraBrowserArgs(experiment_args)
# Each test is wrapped in its own test suite so results can be evaluated
# individually.
for test_suite, test_id in GenerateTestSuites():
buf = io.BytesIO()
sys.stdout.write('%s... ' % test_id)
sys.stdout.flush()
testRunner = unittest.runner.TextTestRunner(stream=buf, verbosity=2,
buffer=(not flags.disable_buffer))
result = testRunner.run(test_suite)
if result.wasSuccessful():
print('ok')
else:
print('failed')
print(buf.getvalue())
print('To repeat this test, run: ')
print("%s %s %s --test_filter=%s --browser_args='%s'" % (
sys.executable,
os.path.join(os.path.dirname(__file__), 'run_all_tests.py'), ' '.join(
sys.argv[1:]), '.'.join(test_id.split('.')[1:]), experiment_args))
if flags.failfast:
return
if __name__ == '__main__':
main()
| 33.545455
| 80
| 0.716433
|
from __future__ import print_function
import io
import os
import platform
import sys
import time
import unittest
import common
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'tools', 'variations'))
import fieldtrial_util
test_blacklist = [
'quic.Quic.testCheckPageWithQuicProxy',
'quic.Quic.testCheckPageWithQuicProxyTransaction',
'smoke.Smoke.testCheckPageWithHoldback',
]
def GetExperimentArgs():
config_path = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'testing', 'variations', 'fieldtrial_testing_config.json')
my_platform = ''
if common.ParseFlags().android:
my_platform = 'android'
elif platform.system().lower() == 'linux':
my_platform = 'linux'
elif platform.system().lower() == 'windows':
my_platform = 'windows'
elif platform.system().lower() == 'darwin':
my_platform = 'mac'
else:
raise Exception('unknown platform!')
return fieldtrial_util.GenerateArgs(config_path, my_platform)
def GenerateTestSuites():
loader = unittest.TestLoader()
for test_suite in loader.discover(os.path.dirname(__file__), pattern='*.py'):
for test_case in test_suite:
for test_method in test_case:
if test_method.id() not in test_blacklist:
ts = unittest.TestSuite()
ts.addTest(test_method)
yield (ts, test_method.id())
def ParseFlagsWithExtraBrowserArgs(extra_args):
original_flags = common.ParseFlags()
def AddExtraBrowserArgs():
original_flags.browser_args = ((original_flags.browser_args if
original_flags.browser_args else '') + ' ' + extra_args)
return original_flags
return AddExtraBrowserArgs
def main():
flags = common.ParseFlags()
experiment_args = ' '.join(GetExperimentArgs())
common.ParseFlags = ParseFlagsWithExtraBrowserArgs(experiment_args)
for test_suite, test_id in GenerateTestSuites():
buf = io.BytesIO()
sys.stdout.write('%s... ' % test_id)
sys.stdout.flush()
testRunner = unittest.runner.TextTestRunner(stream=buf, verbosity=2,
buffer=(not flags.disable_buffer))
result = testRunner.run(test_suite)
if result.wasSuccessful():
print('ok')
else:
print('failed')
print(buf.getvalue())
print('To repeat this test, run: ')
print("%s %s %s --test_filter=%s --browser_args='%s'" % (
sys.executable,
os.path.join(os.path.dirname(__file__), 'run_all_tests.py'), ' '.join(
sys.argv[1:]), '.'.join(test_id.split('.')[1:]), experiment_args))
if flags.failfast:
return
if __name__ == '__main__':
main()
| true
| true
|
7908d99819f24bfd0b83b16febaeb5d0fe85ff40
| 90
|
py
|
Python
|
python/p020.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
python/p020.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
python/p020.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
# 20
num = 1
for i in range(100):
num *= i + 1
print(sum(int(n) for n in str(num)))
| 11.25
| 36
| 0.544444
|
num = 1
for i in range(100):
num *= i + 1
print(sum(int(n) for n in str(num)))
| true
| true
|
7908d9fe1ccbca93979cef5a519dac2aede81f9f
| 11,585
|
py
|
Python
|
cfltools/depreciated/getuniqueip.py
|
bradley-evans/cfltools
|
940014313063c97875a2fe1085cbfe392cb3ec44
|
[
"MIT"
] | 8
|
2018-07-26T02:32:33.000Z
|
2022-02-18T00:55:32.000Z
|
cfltools/depreciated/getuniqueip.py
|
bradley-evans/cfltools
|
940014313063c97875a2fe1085cbfe392cb3ec44
|
[
"MIT"
] | 3
|
2018-07-23T17:13:45.000Z
|
2018-07-31T19:57:43.000Z
|
cfltools/depreciated/getuniqueip.py
|
bradley-evans/cfltools
|
940014313063c97875a2fe1085cbfe392cb3ec44
|
[
"MIT"
] | 1
|
2019-10-06T23:20:17.000Z
|
2019-10-06T23:20:17.000Z
|
import csv # csv reader functions
from collections.abc import Counter # count uniques in a file quickly, O(nlogn)
from decimal import Decimal # just to show decimals with lower precision
# Global Variables #
from cfltools.settings import APPFOLDER
class IpAddress:
def __init__(self, ip, numOccurances):
self.ip = ip
self.numOccurances = numOccurances
self.startTime = float('inf')
self.endTime = float('-inf')
def findTimeColumn(row):
"""Dynamically determine which column of a log file contains dates.
Parameters:
row: A row of a logfile
Returns:
iterator: An integer defining the row that contains a valid date
string.
"""
import dateparser
iterator = 0
for item in row:
if item.isdigit():
# This is a hacky way of avoiding integers from
# being detected as date/time information
iterator += 1
continue
this = dateparser.parse(item)
if this:
return iterator
iterator += 1
return None
def findIpColumn(row):
import re
iterator = 0
# What's below are two regular expressions that pattern match to IP
# addresses. I tried using a library for this (netaddr) but that
# started matching to long integers that happened to have the right
# bits for an IP address.
ipv4_address = re.compile("""
^(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])$
""", re.VERBOSE)
ipv6_address = re.compile("""
^(?:(?:[0-9A-Fa-f]{1,4}:)
{6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|::
(?:[0-9A-Fa-f]{1,4}:)
{5}(?:[0-9A-Fa-f]{1,4}:
[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::
[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:
[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::
[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:)
{,6}[0-9A-Fa-f]{1,4})?::)$
""", re.VERBOSE) # and that's how you regex IPv6
for item in row:
ipv4_check = ipv4_address.match(item)
ipv6_check = ipv6_address.match(item)
if ipv4_check or ipv6_check:
return iterator
iterator = iterator + 1
print("Could not find a column containing IP addresses!")
print("Error in getuniqueip.py, findIpColumn()")
exit(1)
def scrapeIPs(filename):
"""Scrapes all IP addresses from a logfile.
"""
# Encoding must be UTF-8 to allow for international chars
file = open(filename, encoding='utf-8')
logfile_reader = csv.reader(file) # csv reader class
# Put all of the IP addresses into one list. #
print('Getting the size of the logfile....\n')
# Count the number of rows so we can track progress later.
logsize = sum(1 for row in logfile_reader)
# Determine which row contains an IP address.
file.seek(0)
next(logfile_reader)
row = next(logfile_reader)
ip_column = findIpColumn(row)
file.seek(0) # Return to the top of the csv.
next(logfile_reader) # Skip the header row.
print('Processing ' + str(logsize) + ' entries.')
iterator = 0
all_ip_address = []
for entry in logfile_reader:
try:
# For each entry, we will append that entry's IP address to
# a list of all the IPs. We'll return that list later.
entry_ip_address = entry[ip_column]
all_ip_address.append(entry_ip_address)
iterator = iterator + 1
if iterator % 1000 == 0:
percentDone = round(Decimal((iterator / logsize) * 100), 2)
string = 'Currently: Scraping all IPs from file. Entry ' + \
str(iterator) + ' of ' + str(logsize) + \
' Percent Done: ' + str(percentDone) + '%.'
print(string, end='\r')
except UserWarning:
print('\n* * * Invalid entry detected on line ' + str(iterator) +
'.')
iterator = iterator + 1
print('Line data: ')
print('Using column {} for IP address.'.format(ip_column))
print('Data from that column, for this entry, '
'was {}.'.format(entry[ip_column]))
print(entry)
print('\n')
return all_ip_address
def getUniqueIps(all_ip_address):
# Run Counter() on the complete list of IPs. #
iterator = 0
counted_ip_address = Counter(all_ip_address)
unique_ip_address = []
print('=== Creating list of unique IPs. ===')
logsize = len(counted_ip_address)
for address in counted_ip_address:
try:
# Create a new IpAddress() object for each discovered
# IP. Store the address and the counts for its appearance
# in that object.
this_addr = address
this_count = counted_ip_address[address]
newIpAddress = IpAddress(this_addr, this_count)
unique_ip_address.append(newIpAddress)
iterator = iterator + 1
if (iterator % 1000) == 0:
percentDone = round(Decimal((iterator / logsize) * 100), 2)
string = 'Currently: Creating Unique IP List. Entry ' + \
str(iterator) + ' of ' + str(logsize) + \
' Percent Done: ' + str(percentDone) + '%.'
print(string, end='\r')
except UserWarning:
print('\nError creating IP address object!')
print('Crash data:')
print('\tThe address line was:')
print(address)
# Sort the list by most frequently occuring IP. #
percentDone = 100
string = 'Currently: Generating report. Entry ' + str(iterator) + \
' of ' + str(logsize) + ' Percent Done: ' + str(percentDone) + \
'%.'
print(string, '\n')
unique_ip_address.sort(key=lambda x: x.numOccurances, reverse=True)
return unique_ip_address
def sendUniqueToDatabase(unique_ip_address, APPFOLDER, incident_id, conn):
print(APPFOLDER)
c = conn.cursor()
for ip in unique_ip_address:
c.execute("""
INSERT INTO ipaddrs(ip,number_occurances,incident_id,
start_time,end_time)
VALUES(?,?,?,?,?)
""", (ip.ip, ip.numOccurances, incident_id, ip.startTime,
ip.endTime))
conn.commit()
def getTimerange(filename, unique_ip_address):
"""Naive method to determine the time range during which an IP
address appears in a logfile.
This is sort of hacky. I'm using timestring to process fairly arbitrary
text input strings for dates from logs, converting those into POSIX
dates and times, and then comparing that to a simple integer stored
in the object to establish our range.
Parameters:
filename: The logfile we are examining in this job.
unique_ip_address: A list of IpAddress() objects.
Returns:
unique_ip_address: A list of unique IPAddress()
objects with dates included.
"""
import csv
import dateparser
print('Determining date/time ranges for each unique IP...')
file = open(filename, 'r', encoding='utf-8')
logfile_reader = csv.reader(file)
next(logfile_reader)
row = next(logfile_reader)
ip_column = findIpColumn(row)
time_column = findTimeColumn(row)
file.seek(0)
next(logfile_reader)
# TODO: get this runtime under O(n^2)
for ip in unique_ip_address:
file.seek(0)
for entry in logfile_reader:
if ip.ip == entry[ip_column]:
entry_time = dateparser.parse(entry[time_column],
settings={'TIMEZONE': 'UTC',
'RETURN_AS_TIMEZONE_AWARE': True
}).timestamp()
if ip.startTime > entry_time:
ip.startTime = entry_time
if ip.endTime < entry_time:
ip.endTime = entry_time
return unique_ip_address
def run(filename, incident_id, seen):
import configparser
config = configparser.ConfigParser()
config.read(APPFOLDER + '/cfltools.ini')
all_ip_address = scrapeIPs(filename)
unique_ip_address = getUniqueIps(all_ip_address)
unique_ip_address = getTimerange(filename, unique_ip_address)
if not seen:
import sqlite3
db_connection = sqlite3.connect(config['USER']['db_loc'])
print('Adding to database located at {}...'.format(config['USER']['db_loc']))
sendUniqueToDatabase(unique_ip_address, APPFOLDER, incident_id, db_connection)
db_connection.close()
else:
print('File was already added to database. Skipping database export.')
def main():
pass
if __name__ == "__main__":
main()
| 41.375
| 86
| 0.489167
|
import csv
from collections.abc import Counter
from decimal import Decimal
from cfltools.settings import APPFOLDER
class IpAddress:
def __init__(self, ip, numOccurances):
self.ip = ip
self.numOccurances = numOccurances
self.startTime = float('inf')
self.endTime = float('-inf')
def findTimeColumn(row):
import dateparser
iterator = 0
for item in row:
if item.isdigit():
iterator += 1
continue
this = dateparser.parse(item)
if this:
return iterator
iterator += 1
return None
def findIpColumn(row):
import re
iterator = 0
# addresses. I tried using a library for this (netaddr) but that
# started matching to long integers that happened to have the right
# bits for an IP address.
ipv4_address = re.compile("""
^(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])$
""", re.VERBOSE)
ipv6_address = re.compile("""
^(?:(?:[0-9A-Fa-f]{1,4}:)
{6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|::
(?:[0-9A-Fa-f]{1,4}:)
{5}(?:[0-9A-Fa-f]{1,4}:
[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:)
{2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::
[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:
[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::
(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|
(?:(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5])\\.)
{3}(?:[0-9]|[1-9][0-9]|1[0-9]
{2}|2[0-4][0-9]|25[0-5]))|
(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::
[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:)
{,6}[0-9A-Fa-f]{1,4})?::)$
""", re.VERBOSE) # and that's how you regex IPv6
for item in row:
ipv4_check = ipv4_address.match(item)
ipv6_check = ipv6_address.match(item)
if ipv4_check or ipv6_check:
return iterator
iterator = iterator + 1
print("Could not find a column containing IP addresses!")
print("Error in getuniqueip.py, findIpColumn()")
exit(1)
def scrapeIPs(filename):
file = open(filename, encoding='utf-8')
logfile_reader = csv.reader(file)
print('Getting the size of the logfile....\n')
logsize = sum(1 for row in logfile_reader)
file.seek(0)
next(logfile_reader)
row = next(logfile_reader)
ip_column = findIpColumn(row)
file.seek(0)
next(logfile_reader)
print('Processing ' + str(logsize) + ' entries.')
iterator = 0
all_ip_address = []
for entry in logfile_reader:
try:
# a list of all the IPs. We'll return that list later.
entry_ip_address = entry[ip_column]
all_ip_address.append(entry_ip_address)
iterator = iterator + 1
if iterator % 1000 == 0:
percentDone = round(Decimal((iterator / logsize) * 100), 2)
string = 'Currently: Scraping all IPs from file. Entry ' + \
str(iterator) + ' of ' + str(logsize) + \
' Percent Done: ' + str(percentDone) + '%.'
print(string, end='\r')
except UserWarning:
print('\n* * * Invalid entry detected on line ' + str(iterator) +
'.')
iterator = iterator + 1
print('Line data: ')
print('Using column {} for IP address.'.format(ip_column))
print('Data from that column, for this entry, '
'was {}.'.format(entry[ip_column]))
print(entry)
print('\n')
return all_ip_address
def getUniqueIps(all_ip_address):
iterator = 0
counted_ip_address = Counter(all_ip_address)
unique_ip_address = []
print('=== Creating list of unique IPs. ===')
logsize = len(counted_ip_address)
for address in counted_ip_address:
try:
this_addr = address
this_count = counted_ip_address[address]
newIpAddress = IpAddress(this_addr, this_count)
unique_ip_address.append(newIpAddress)
iterator = iterator + 1
if (iterator % 1000) == 0:
percentDone = round(Decimal((iterator / logsize) * 100), 2)
string = 'Currently: Creating Unique IP List. Entry ' + \
str(iterator) + ' of ' + str(logsize) + \
' Percent Done: ' + str(percentDone) + '%.'
print(string, end='\r')
except UserWarning:
print('\nError creating IP address object!')
print('Crash data:')
print('\tThe address line was:')
print(address)
percentDone = 100
string = 'Currently: Generating report. Entry ' + str(iterator) + \
' of ' + str(logsize) + ' Percent Done: ' + str(percentDone) + \
'%.'
print(string, '\n')
unique_ip_address.sort(key=lambda x: x.numOccurances, reverse=True)
return unique_ip_address
def sendUniqueToDatabase(unique_ip_address, APPFOLDER, incident_id, conn):
print(APPFOLDER)
c = conn.cursor()
for ip in unique_ip_address:
c.execute("""
INSERT INTO ipaddrs(ip,number_occurances,incident_id,
start_time,end_time)
VALUES(?,?,?,?,?)
""", (ip.ip, ip.numOccurances, incident_id, ip.startTime,
ip.endTime))
conn.commit()
def getTimerange(filename, unique_ip_address):
import csv
import dateparser
print('Determining date/time ranges for each unique IP...')
file = open(filename, 'r', encoding='utf-8')
logfile_reader = csv.reader(file)
next(logfile_reader)
row = next(logfile_reader)
ip_column = findIpColumn(row)
time_column = findTimeColumn(row)
file.seek(0)
next(logfile_reader)
for ip in unique_ip_address:
file.seek(0)
for entry in logfile_reader:
if ip.ip == entry[ip_column]:
entry_time = dateparser.parse(entry[time_column],
settings={'TIMEZONE': 'UTC',
'RETURN_AS_TIMEZONE_AWARE': True
}).timestamp()
if ip.startTime > entry_time:
ip.startTime = entry_time
if ip.endTime < entry_time:
ip.endTime = entry_time
return unique_ip_address
def run(filename, incident_id, seen):
import configparser
config = configparser.ConfigParser()
config.read(APPFOLDER + '/cfltools.ini')
all_ip_address = scrapeIPs(filename)
unique_ip_address = getUniqueIps(all_ip_address)
unique_ip_address = getTimerange(filename, unique_ip_address)
if not seen:
import sqlite3
db_connection = sqlite3.connect(config['USER']['db_loc'])
print('Adding to database located at {}...'.format(config['USER']['db_loc']))
sendUniqueToDatabase(unique_ip_address, APPFOLDER, incident_id, db_connection)
db_connection.close()
else:
print('File was already added to database. Skipping database export.')
def main():
pass
if __name__ == "__main__":
main()
| true
| true
|
7908da2e7d3605d288fb3f68c65c0ddb0ec54557
| 437
|
py
|
Python
|
plotly/validators/streamtube/_name.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/streamtube/_name.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/streamtube/_name.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name='name', parent_name='streamtube', **kwargs):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 31.214286
| 79
| 0.647597
|
import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name='name', parent_name='streamtube', **kwargs):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| true
| true
|
7908da3a9de13aa8b47d74510aff12c78f3cf400
| 1,156
|
py
|
Python
|
checkov/terraform/checks/resource/aws/S3BucketObjectLock.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 1
|
2021-02-13T15:24:42.000Z
|
2021-02-13T15:24:42.000Z
|
checkov/terraform/checks/resource/aws/S3BucketObjectLock.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 7
|
2021-04-12T06:54:07.000Z
|
2022-03-21T14:04:14.000Z
|
checkov/terraform/checks/resource/aws/S3BucketObjectLock.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 1
|
2021-12-16T03:09:55.000Z
|
2021-12-16T03:09:55.000Z
|
from typing import Dict, List, Any
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceCheck
class S3BucketObjectLock(BaseResourceCheck):
def __init__(self) -> None:
name = "Ensure that S3 bucket has lock configuration enabled by default"
id = "CKV_AWS_143"
supported_resources = ["aws_s3_bucket"]
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:
lock_conf = conf.get("object_lock_configuration")
if lock_conf and lock_conf[0]:
lock_enabled = lock_conf[0].get("object_lock_enabled")
if lock_enabled in ["Enabled", ["Enabled"]]:
return CheckResult.PASSED
return CheckResult.FAILED
return CheckResult.UNKNOWN
def get_evaluated_keys(self) -> List[str]:
return ["object_lock_configuration/[0]/object_lock_enabled"]
check = S3BucketObjectLock()
| 38.533333
| 106
| 0.712803
|
from typing import Dict, List, Any
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceCheck
class S3BucketObjectLock(BaseResourceCheck):
def __init__(self) -> None:
name = "Ensure that S3 bucket has lock configuration enabled by default"
id = "CKV_AWS_143"
supported_resources = ["aws_s3_bucket"]
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:
lock_conf = conf.get("object_lock_configuration")
if lock_conf and lock_conf[0]:
lock_enabled = lock_conf[0].get("object_lock_enabled")
if lock_enabled in ["Enabled", ["Enabled"]]:
return CheckResult.PASSED
return CheckResult.FAILED
return CheckResult.UNKNOWN
def get_evaluated_keys(self) -> List[str]:
return ["object_lock_configuration/[0]/object_lock_enabled"]
check = S3BucketObjectLock()
| true
| true
|
7908daefc745db57da1e75a03a505539fc39727a
| 4,708
|
py
|
Python
|
opts.py
|
yucornetto/CAKES
|
a1f87694028e1fae45c993129fe2e7f034a977cc
|
[
"MIT"
] | 8
|
2020-12-15T03:05:13.000Z
|
2021-06-07T08:42:56.000Z
|
opts.py
|
yucornetto/CAKES
|
a1f87694028e1fae45c993129fe2e7f034a977cc
|
[
"MIT"
] | null | null | null |
opts.py
|
yucornetto/CAKES
|
a1f87694028e1fae45c993129fe2e7f034a977cc
|
[
"MIT"
] | 1
|
2021-01-04T08:07:53.000Z
|
2021-01-04T08:07:53.000Z
|
import argparse
parser = argparse.ArgumentParser(description="PyTorch implementation of action recognition models")
parser.add_argument('--dataset', type=str, choices=['somethingv1','somethingv2','diving48'],
default = 'somethingv1')
parser.add_argument('--root_path', type = str, default = '../',
help = 'root path to video dataset folders')
parser.add_argument('--store_name', type=str, default="")
# ========================= Model Configs ==========================
parser.add_argument('--type', type=str, default="GST",choices=['GST','R3D','S3D', 'I3D'],
help = 'type of temporal models, currently support GST,Res3D and S3D')
parser.add_argument('--arch', type=str, default="resnet50",choices=['resnet50','resnet101'],
help = 'backbone networks, currently only support resnet')
parser.add_argument('--num_segments', type=int, default=8)
parser.add_argument('--alpha', type=int, default=4, help = 'spatial temporal split for output channels')
parser.add_argument('--beta', type=int, default=2, choices=[1,2], help = 'channel splits for input channels, 1 for GST-Large and 2 for GST')
# ========================= Learning Configs ==========================
parser.add_argument('--epochs', default=70, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=24, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('--lr', '--learning-rate', default=0.01, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--lr_steps', default=[50, 60], type=float, nargs="+",
metavar='LRSteps', help='epochs to decay learning rate by 10')
parser.add_argument('--dropout', '--dp', default=0.3, type=float,
metavar='dp', help='dropout ratio')
parser.add_argument('--warm', default=5, type=float, help='warm up epochs')
#========================= Optimizer Configs ==========================
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight_decay', '--wd', default=3e-4, type=float,
metavar='W', help='weight decay (default: 3e-4)')
parser.add_argument('--clip-gradient', '--gd', default=20, type=float,
metavar='W', help='gradient norm clipping (default: 20)')
# ========================= Monitor Configs ==========================
parser.add_argument('--print-freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 20)')
parser.add_argument('--eval-freq', '-ef', default=1, type=int,
metavar='N', help='evaluation frequency (default: 1)')
# ========================= Runtime Configs ==========================
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--checkpoint_dir',type=str, required=True,
help = 'folder to restore checkpoint and training log')
# ========================= Added by Qihang ==========================
parser.add_argument('--op_code', type=str, default="conv3d", help='op code to use')
parser.add_argument('--sparsity-regularization', '-sr', dest='sr', action='store_true',
help='train with channel sparsity regularization')
parser.add_argument('--s', type=float, default=0.0001,
help='scale sparse rate (default: 0.0001)')
parser.add_argument('--conv_config', type=str, default='',
help='conv config')
parser.add_argument('--search', action='store_true', default=False,
help='search mode')
parser.add_argument('--prune', action='store_true', default=False,
help='prune after training')
parser.add_argument('--prune_model_path', type=str, default='',
help='model to prune')
parser.add_argument('--reweight', action='store_true', default=False,
help='reweight the prune factor')
parser.add_argument('--finetune', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
| 62.773333
| 140
| 0.594095
|
import argparse
parser = argparse.ArgumentParser(description="PyTorch implementation of action recognition models")
parser.add_argument('--dataset', type=str, choices=['somethingv1','somethingv2','diving48'],
default = 'somethingv1')
parser.add_argument('--root_path', type = str, default = '../',
help = 'root path to video dataset folders')
parser.add_argument('--store_name', type=str, default="")
parser.add_argument('--type', type=str, default="GST",choices=['GST','R3D','S3D', 'I3D'],
help = 'type of temporal models, currently support GST,Res3D and S3D')
parser.add_argument('--arch', type=str, default="resnet50",choices=['resnet50','resnet101'],
help = 'backbone networks, currently only support resnet')
parser.add_argument('--num_segments', type=int, default=8)
parser.add_argument('--alpha', type=int, default=4, help = 'spatial temporal split for output channels')
parser.add_argument('--beta', type=int, default=2, choices=[1,2], help = 'channel splits for input channels, 1 for GST-Large and 2 for GST')
parser.add_argument('--epochs', default=70, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=24, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('--lr', '--learning-rate', default=0.01, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--lr_steps', default=[50, 60], type=float, nargs="+",
metavar='LRSteps', help='epochs to decay learning rate by 10')
parser.add_argument('--dropout', '--dp', default=0.3, type=float,
metavar='dp', help='dropout ratio')
parser.add_argument('--warm', default=5, type=float, help='warm up epochs')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight_decay', '--wd', default=3e-4, type=float,
metavar='W', help='weight decay (default: 3e-4)')
parser.add_argument('--clip-gradient', '--gd', default=20, type=float,
metavar='W', help='gradient norm clipping (default: 20)')
parser.add_argument('--print-freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 20)')
parser.add_argument('--eval-freq', '-ef', default=1, type=int,
metavar='N', help='evaluation frequency (default: 1)')
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--checkpoint_dir',type=str, required=True,
help = 'folder to restore checkpoint and training log')
parser.add_argument('--op_code', type=str, default="conv3d", help='op code to use')
parser.add_argument('--sparsity-regularization', '-sr', dest='sr', action='store_true',
help='train with channel sparsity regularization')
parser.add_argument('--s', type=float, default=0.0001,
help='scale sparse rate (default: 0.0001)')
parser.add_argument('--conv_config', type=str, default='',
help='conv config')
parser.add_argument('--search', action='store_true', default=False,
help='search mode')
parser.add_argument('--prune', action='store_true', default=False,
help='prune after training')
parser.add_argument('--prune_model_path', type=str, default='',
help='model to prune')
parser.add_argument('--reweight', action='store_true', default=False,
help='reweight the prune factor')
parser.add_argument('--finetune', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
| true
| true
|
7908dcae58f72e537451fda85c471be83bee3ac4
| 1,042
|
py
|
Python
|
cride/utils/models.py
|
Jonulo/dejango-adv
|
29bdec2f3a4c2cf9d9ff154e5c1bedf4006e479a
|
[
"MIT"
] | null | null | null |
cride/utils/models.py
|
Jonulo/dejango-adv
|
29bdec2f3a4c2cf9d9ff154e5c1bedf4006e479a
|
[
"MIT"
] | null | null | null |
cride/utils/models.py
|
Jonulo/dejango-adv
|
29bdec2f3a4c2cf9d9ff154e5c1bedf4006e479a
|
[
"MIT"
] | null | null | null |
"""Django models utilities."""
#Django
from django.db import models
class CRideModel(models.Model):
"""
CrideModel acts as an abastract base class from
which every other model in the project will inherit.
This class provides every table with de following
attributes:
+ created (Datetime): store the datetime the object was created
+ modified (Datetime): store the last datetime the object was modified
"""
created = models.DateTimeField(
'created at',
auto_now_add=True, # set the date auto when the model is created
help_text='Date time on which the object was created.'
)
modified = models.DateTimeField(
'modified at',
auto_now=True, # set the date when the model is called
help_text='Date time on which the object was last modified.'
)
class Meta:
"""Meta options."""
abstract = True
# Set class config when it is called
get_latest_by = 'created'
ordering = ['-created', '-modified']
| 29.771429
| 78
| 0.649712
|
from django.db import models
class CRideModel(models.Model):
created = models.DateTimeField(
'created at',
auto_now_add=True,
help_text='Date time on which the object was created.'
)
modified = models.DateTimeField(
'modified at',
auto_now=True,
help_text='Date time on which the object was last modified.'
)
class Meta:
abstract = True
get_latest_by = 'created'
ordering = ['-created', '-modified']
| true
| true
|
7908dd28cb376d21480645ce25ee9573cdeda8a6
| 382
|
py
|
Python
|
web/node_modules/weblas/test/data/binary_matrix.py
|
egornagornov3tf4k/Zeta36y
|
4502be4c86195b0aa5184c45d6f221b34daee7a8
|
[
"MIT"
] | 13
|
2017-12-29T12:16:36.000Z
|
2022-01-22T21:23:00.000Z
|
web/node_modules/weblas/test/data/binary_matrix.py
|
egornagornov3tf4k/Zeta36y
|
4502be4c86195b0aa5184c45d6f221b34daee7a8
|
[
"MIT"
] | null | null | null |
web/node_modules/weblas/test/data/binary_matrix.py
|
egornagornov3tf4k/Zeta36y
|
4502be4c86195b0aa5184c45d6f221b34daee7a8
|
[
"MIT"
] | 6
|
2018-02-21T02:11:10.000Z
|
2022-01-22T21:23:47.000Z
|
#!/usr/bin/env python
"""Create two randomly generated matrices, of the specified sizes and write them
to JSON files.
"""
import json
import numpy as np
def read(path):
with open(path, 'rb') as f:
matrix = np.fromfile(f, dtype=np.float32)
return matrix
def write(path, matrix):
with open(path, 'wb') as f:
f.write(matrix.astype(np.float32).tostring())
return matrix
| 16.608696
| 80
| 0.704188
|
import json
import numpy as np
def read(path):
with open(path, 'rb') as f:
matrix = np.fromfile(f, dtype=np.float32)
return matrix
def write(path, matrix):
with open(path, 'wb') as f:
f.write(matrix.astype(np.float32).tostring())
return matrix
| true
| true
|
7908dd7b845bff1879f13e4eec61ac675b3a410d
| 2,723
|
py
|
Python
|
gate/mixin.py
|
n-serrette/wedding-website
|
9086edee97b8c99beb2da7f833663ef15520b46a
|
[
"MIT"
] | null | null | null |
gate/mixin.py
|
n-serrette/wedding-website
|
9086edee97b8c99beb2da7f833663ef15520b46a
|
[
"MIT"
] | null | null | null |
gate/mixin.py
|
n-serrette/wedding-website
|
9086edee97b8c99beb2da7f833663ef15520b46a
|
[
"MIT"
] | null | null | null |
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.shortcuts import resolve_url
from gate.views import redirect_to_gate
from gate import REDIRECT_FIELD_NAME
class GateLockMixin:
gate_url = None
permission_denied_message = ''
raise_exception = False
redirect_field_name = REDIRECT_FIELD_NAME
def get_gate_url(self):
"""
Override this method to override the gate_url attribute.
"""
gate_url = self.gate_url or settings.GATE_URL
if not gate_url:
raise ImproperlyConfigured(
'{0} is missing the gate_url attribute. Define {0}.gate_url, settings.GATE_URL, or override '
'{0}.get_gate_url().'.format(self.__class__.__name__)
)
return str(gate_url)
def get_permission_denied_message(self):
"""
Override this method to override the permission_denied_message attribute.
"""
return self.permission_denied_message
def get_redirect_field_name(self):
"""
Override this method to override the redirect_field_name attribute.
"""
return self.redirect_field_name
def handle_no_permission(self):
if self.raise_exception:
raise PermissionDenied(self.get_permission_denied_message())
path = self.request.build_absolute_uri()
resolved_gate_url = resolve_url(self.get_gate_url())
# If the gate url is the same scheme and net location then use the
# path as the "next" url.
gate_scheme, gate_netloc = urlparse(resolved_gate_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if (
(not gate_scheme or gate_scheme == current_scheme) and
(not gate_netloc or gate_netloc == current_netloc)
):
path = self.request.get_full_path()
return redirect_to_gate(
path,
resolved_gate_url,
self.get_redirect_field_name(),
)
def lock_test_func(self, key):
raise NotImplementedError(
'{} is missing the implementation of the test_func() method.'.format(self.__class__.__name__)
)
def get_lock_test_func(self):
"""
Override this method to use a different test_func method.
"""
return self.lock_test_func
def dispatch(self, request, *args, **kwargs):
key = request.session.get('gate_key', None)
key_test_result = self.get_lock_test_func()(key)
if not key_test_result:
return self.handle_no_permission()
return super().dispatch(request, *args, **kwargs)
| 34.468354
| 109
| 0.658465
|
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.shortcuts import resolve_url
from gate.views import redirect_to_gate
from gate import REDIRECT_FIELD_NAME
class GateLockMixin:
gate_url = None
permission_denied_message = ''
raise_exception = False
redirect_field_name = REDIRECT_FIELD_NAME
def get_gate_url(self):
gate_url = self.gate_url or settings.GATE_URL
if not gate_url:
raise ImproperlyConfigured(
'{0} is missing the gate_url attribute. Define {0}.gate_url, settings.GATE_URL, or override '
'{0}.get_gate_url().'.format(self.__class__.__name__)
)
return str(gate_url)
def get_permission_denied_message(self):
return self.permission_denied_message
def get_redirect_field_name(self):
return self.redirect_field_name
def handle_no_permission(self):
if self.raise_exception:
raise PermissionDenied(self.get_permission_denied_message())
path = self.request.build_absolute_uri()
resolved_gate_url = resolve_url(self.get_gate_url())
gate_scheme, gate_netloc = urlparse(resolved_gate_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if (
(not gate_scheme or gate_scheme == current_scheme) and
(not gate_netloc or gate_netloc == current_netloc)
):
path = self.request.get_full_path()
return redirect_to_gate(
path,
resolved_gate_url,
self.get_redirect_field_name(),
)
def lock_test_func(self, key):
raise NotImplementedError(
'{} is missing the implementation of the test_func() method.'.format(self.__class__.__name__)
)
def get_lock_test_func(self):
return self.lock_test_func
def dispatch(self, request, *args, **kwargs):
key = request.session.get('gate_key', None)
key_test_result = self.get_lock_test_func()(key)
if not key_test_result:
return self.handle_no_permission()
return super().dispatch(request, *args, **kwargs)
| true
| true
|
7908deb22f18ae2f3b153c75fdd6b8808525f189
| 2,622
|
py
|
Python
|
Trees/BinaryTreePL.py
|
eferroni/Data-Structure-and-Algorithms
|
bad33d4619884e03c85e13f604db753af5b543a0
|
[
"MIT"
] | null | null | null |
Trees/BinaryTreePL.py
|
eferroni/Data-Structure-and-Algorithms
|
bad33d4619884e03c85e13f604db753af5b543a0
|
[
"MIT"
] | null | null | null |
Trees/BinaryTreePL.py
|
eferroni/Data-Structure-and-Algorithms
|
bad33d4619884e03c85e13f604db753af5b543a0
|
[
"MIT"
] | null | null | null |
import QueueLinkedList as queue
"""
n1
/\
n2 n3
/\ /\
n4 n5 n6 n7
"""
class BinaryTree:
def __init__(self, size) -> None:
self.customList = size * [None]
self.lastUsedIndex = 0
self.maxSize = size
def inserNode(self, value):
if self.lastUsedIndex + 1 == self.maxSize:
return "Full"
self.customList[self.lastUsedIndex + 1] = value
self.lastUsedIndex += 1
return "Inserted"
def searchNode(self, value):
if value in self.customList:
return "Success"
return "Not found"
def preOrderTraversal(self, index):
# root -> left -> right
if index > self.lastUsedIndex:
return
print(self.customList[index])
self.preOrderTraversal(index * 2)
self.preOrderTraversal(index * 2 + 1)
def inOrderTraversal(self, index):
# left -> root -> right
if index > self.lastUsedIndex:
return
self.inOrderTraversal(index * 2)
print(self.customList[index])
self.inOrderTraversal(index * 2 + 1)
def postOrderTraversal(self, index):
# left -> right -> root
if index > self.lastUsedIndex:
return
self.postOrderTraversal(index * 2)
self.postOrderTraversal(index * 2 + 1)
print(self.customList[index])
def levelOrderTraversal(self, index):
for i in range(index, self.lastUsedIndex + 1):
print(self.customList[i])
def deleteNode(self, value):
if self.lastUsedIndex == 0:
return "Nothing to delete"
for i in range(1, self.lastUsedIndex + 1):
if self.customList[i] == value:
self.customList[i] = self.customList[self.lastUsedIndex]
self.customList[self.lastUsedIndex] = None
self.lastUsedIndex -= 1
return "Deleted"
def deleteTree(self):
self.customList = None
return "Deleted"
newBT = BinaryTree(8)
print(newBT.inserNode("N1"))
print(newBT.inserNode("N2"))
print(newBT.inserNode("N3"))
print(newBT.inserNode("N4"))
print(newBT.inserNode("N5"))
print(newBT.inserNode("N6"))
print(newBT.inserNode("N7"))
print(newBT.inserNode("N8"))
print(newBT.searchNode("N1"))
print(newBT.searchNode("N8"))
print("preOrderTraversal")
newBT.preOrderTraversal(1)
print("inOrderTraversal")
newBT.inOrderTraversal(1)
print("postOrderTraversal")
newBT.postOrderTraversal(1)
print("levelOrderTraversal")
newBT.levelOrderTraversal(1)
print(newBT.deleteNode("N4"))
newBT.levelOrderTraversal(1)
print(newBT.deleteTree())
| 25.705882
| 72
| 0.621663
|
import QueueLinkedList as queue
class BinaryTree:
def __init__(self, size) -> None:
self.customList = size * [None]
self.lastUsedIndex = 0
self.maxSize = size
def inserNode(self, value):
if self.lastUsedIndex + 1 == self.maxSize:
return "Full"
self.customList[self.lastUsedIndex + 1] = value
self.lastUsedIndex += 1
return "Inserted"
def searchNode(self, value):
if value in self.customList:
return "Success"
return "Not found"
def preOrderTraversal(self, index):
if index > self.lastUsedIndex:
return
print(self.customList[index])
self.preOrderTraversal(index * 2)
self.preOrderTraversal(index * 2 + 1)
def inOrderTraversal(self, index):
if index > self.lastUsedIndex:
return
self.inOrderTraversal(index * 2)
print(self.customList[index])
self.inOrderTraversal(index * 2 + 1)
def postOrderTraversal(self, index):
if index > self.lastUsedIndex:
return
self.postOrderTraversal(index * 2)
self.postOrderTraversal(index * 2 + 1)
print(self.customList[index])
def levelOrderTraversal(self, index):
for i in range(index, self.lastUsedIndex + 1):
print(self.customList[i])
def deleteNode(self, value):
if self.lastUsedIndex == 0:
return "Nothing to delete"
for i in range(1, self.lastUsedIndex + 1):
if self.customList[i] == value:
self.customList[i] = self.customList[self.lastUsedIndex]
self.customList[self.lastUsedIndex] = None
self.lastUsedIndex -= 1
return "Deleted"
def deleteTree(self):
self.customList = None
return "Deleted"
newBT = BinaryTree(8)
print(newBT.inserNode("N1"))
print(newBT.inserNode("N2"))
print(newBT.inserNode("N3"))
print(newBT.inserNode("N4"))
print(newBT.inserNode("N5"))
print(newBT.inserNode("N6"))
print(newBT.inserNode("N7"))
print(newBT.inserNode("N8"))
print(newBT.searchNode("N1"))
print(newBT.searchNode("N8"))
print("preOrderTraversal")
newBT.preOrderTraversal(1)
print("inOrderTraversal")
newBT.inOrderTraversal(1)
print("postOrderTraversal")
newBT.postOrderTraversal(1)
print("levelOrderTraversal")
newBT.levelOrderTraversal(1)
print(newBT.deleteNode("N4"))
newBT.levelOrderTraversal(1)
print(newBT.deleteTree())
| true
| true
|
7908deb2cec3d0a93231706775a7f0c1ea2e97e5
| 994
|
py
|
Python
|
test/test_check_invalid_value_IATA_code.py
|
Sergei-Soldatov/IFD.A
|
5ec99de7a0059e1ae3551e4df89c461332246f60
|
[
"Apache-2.0"
] | null | null | null |
test/test_check_invalid_value_IATA_code.py
|
Sergei-Soldatov/IFD.A
|
5ec99de7a0059e1ae3551e4df89c461332246f60
|
[
"Apache-2.0"
] | null | null | null |
test/test_check_invalid_value_IATA_code.py
|
Sergei-Soldatov/IFD.A
|
5ec99de7a0059e1ae3551e4df89c461332246f60
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# IFD.A-8 :: Версия: 1 :: Проверка ввода невалидного значения в поле "код IATA" для выбора аэропорта
# Шаг 1
def test_check_invalid_value_IATA_to_select_airport(app):
app.session.enter_login(username="test")
app.session.enter_password(password="1245")
app.airport.open_form_add_airport()
app.airport.enter_IATA_code(iata_cod="QWE")
app.airport.search_airport_by_parameter()
app.airport.message_no_airports()
app.airport.exit_from_the_add_airport_form()
app.session.logout()
# IFD.A-8 :: Версия: 1 :: Проверка ввода невалидного значения в поле "код IATA" для выбора аэропорта
# Шаг 2
def test_check_invalid_characters_in_IATA_code(app):
app.session.enter_login(username="test")
app.session.enter_password(password="1245")
app.airport.open_form_add_airport()
app.airport.enter_IATA_code(iata_cod="!№;%:?*")
app.airport.wait_massege_no_airport()
app.airport.exit_from_the_add_airport_form()
app.session.logout()
| 41.416667
| 100
| 0.751509
|
def test_check_invalid_value_IATA_to_select_airport(app):
app.session.enter_login(username="test")
app.session.enter_password(password="1245")
app.airport.open_form_add_airport()
app.airport.enter_IATA_code(iata_cod="QWE")
app.airport.search_airport_by_parameter()
app.airport.message_no_airports()
app.airport.exit_from_the_add_airport_form()
app.session.logout()
def test_check_invalid_characters_in_IATA_code(app):
app.session.enter_login(username="test")
app.session.enter_password(password="1245")
app.airport.open_form_add_airport()
app.airport.enter_IATA_code(iata_cod="!№;%:?*")
app.airport.wait_massege_no_airport()
app.airport.exit_from_the_add_airport_form()
app.session.logout()
| true
| true
|
7908e1416c956529d1e7d2760160bb600aff2d8c
| 6,798
|
py
|
Python
|
bolinette/data/mapping/mapper.py
|
TheCaptainCat/flasque
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | 3
|
2019-10-25T12:21:28.000Z
|
2020-09-11T13:43:32.000Z
|
bolinette/data/mapping/mapper.py
|
TheCaptainCat/bolinette
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | null | null | null |
bolinette/data/mapping/mapper.py
|
TheCaptainCat/bolinette
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | null | null | null |
from typing import Literal
from bolinette import data, types
from bolinette.data import mapping
from bolinette.exceptions import InternalError
from bolinette.utils.functions import getattr_, hasattr_, invoke
class Mapper:
def __init__(self):
self._payloads: dict[str, dict[str, mapping.Definition]] = {}
self._responses: dict[str, dict[str, mapping.Definition]] = {}
@staticmethod
def _get_def(
collection: dict[str, dict[str, mapping.Definition]], model_name: str, key: str
) -> "mapping.Definition":
m = collection.get(model_name)
if m is None:
raise InternalError(f"mapping.unknown_model:{model_name}")
d = m.get(key)
if d is None:
raise InternalError(f"mapping.unknown_definition:{model_name}.{key}")
return d
def payload(self, model_name: str, key: str):
return self._get_def(self._payloads, model_name, key)
@property
def payloads(self):
for model_name in self._payloads:
for key in self._payloads[model_name]:
yield model_name, key, self._payloads[model_name][key]
def response(self, model_name: str, key: str):
return self._get_def(self._responses, model_name, key)
@property
def responses(self):
for model_name in self._responses:
for key in self._responses[model_name]:
yield model_name, key, self._responses[model_name][key]
def _extract_defs(
self,
model: "data.Model",
model_cls: type["data.Model"],
collection: Literal["payloads", "responses"],
merge_defs: Literal["ignore", "append", "overwrite"],
):
defs = {}
for parent in model_cls.__bases__:
if issubclass(parent, data.Model) and parent != data.Model:
for _key, _def in self._extract_defs(
model, parent, collection, merge_defs
).items():
defs[_key] = _def
def_func = getattr(model_cls, collection)
if hasattr_(def_func, "__func__"):
def_func = def_func.__func__
def_gen = def_func(model)
if def_gen is None:
return defs
new_defs = list(def_gen)
for _def in new_defs:
if isinstance(_def, list):
model_key = "default"
payload = _def
else:
model_key, payload = _def
if model_key in defs:
if merge_defs == "append":
for _param in payload:
defs[model_key].append(_param)
elif merge_defs == "overwrite":
defs[model_key] = payload
else:
defs[model_key] = payload
return defs
def register(self, model: "data.Model"):
def create_defs(collection, attr_name: Literal["payloads", "responses"]):
defs = self._extract_defs(
model, type(model), attr_name, model.__blnt__.merge_defs
)
for model_key, payload in defs.items():
definition = mapping.Definition(model.__blnt__.name, model_key)
for field in payload:
definition.fields.append(field)
if definition.model_name not in collection:
collection[definition.model_name] = {}
collection[definition.model_name][definition.model_key] = definition
create_defs(self._payloads, "payloads")
create_defs(self._responses, "responses")
def marshall(
self,
definition,
entity,
*,
skip_none=False,
as_list=False,
use_foreign_key=False,
):
if entity is None:
return None
if as_list:
return [
self.marshall(
definition,
e,
skip_none=skip_none,
as_list=False,
use_foreign_key=use_foreign_key,
)
for e in entity
]
values = {}
for field in definition.fields:
self._marshall_object(values, field, entity, skip_none, use_foreign_key)
return values
def _marshall_object(
self, values, field, entity, skip_none: bool, use_foreign_key: bool
):
if isinstance(field, mapping.Field):
self._marshall_field(values, field, entity, skip_none)
elif isinstance(field, mapping.Reference) and use_foreign_key:
values[field.foreign_key] = getattr_(entity, field.foreign_key, None)
elif isinstance(field, mapping.Definition):
self._marshall_definition(values, field, entity, skip_none, use_foreign_key)
elif isinstance(field, mapping.List):
self._marshall_list(values, field, entity, skip_none, use_foreign_key)
@staticmethod
def _marshall_field(values, field: "mapping.Field", entity, skip_none: bool):
if field.function is not None:
value = field.function(entity)
else:
value = getattr_(entity, field.key, None)
if field.formatting is not None:
value = field.formatting(value)
if not skip_none or value is not None:
values[field.name] = value
def _marshall_definition(
self,
values,
definition: "mapping.Definition",
entity,
skip_none: bool,
use_foreign_key: bool,
):
d = self.response(definition.model_name, definition.model_key)
attr = None
if definition.function and callable(definition.function):
attr = definition.function(entity)
elif hasattr_(entity, definition.name):
attr = getattr_(entity, definition.name, None)
values[definition.name] = self.marshall(
d, attr, skip_none=skip_none, as_list=False, use_foreign_key=use_foreign_key
)
def _marshall_list(
self,
values,
field: "mapping.List",
entity,
skip_none: bool,
use_foreign_key: bool,
):
if field.function and callable(field.function):
e_list = invoke(field.function, entity)
else:
e_list = getattr_(entity, field.name, None)
elem = field.element
if isinstance(elem, types.db.DataType):
values[field.name] = [e for e in e_list]
elif isinstance(elem, mapping.Definition):
d = self.response(elem.model_name, elem.model_key)
values[field.name] = self.marshall(
d,
e_list,
skip_none=skip_none,
as_list=True,
use_foreign_key=use_foreign_key,
)
| 35.968254
| 88
| 0.582966
|
from typing import Literal
from bolinette import data, types
from bolinette.data import mapping
from bolinette.exceptions import InternalError
from bolinette.utils.functions import getattr_, hasattr_, invoke
class Mapper:
def __init__(self):
self._payloads: dict[str, dict[str, mapping.Definition]] = {}
self._responses: dict[str, dict[str, mapping.Definition]] = {}
@staticmethod
def _get_def(
collection: dict[str, dict[str, mapping.Definition]], model_name: str, key: str
) -> "mapping.Definition":
m = collection.get(model_name)
if m is None:
raise InternalError(f"mapping.unknown_model:{model_name}")
d = m.get(key)
if d is None:
raise InternalError(f"mapping.unknown_definition:{model_name}.{key}")
return d
def payload(self, model_name: str, key: str):
return self._get_def(self._payloads, model_name, key)
@property
def payloads(self):
for model_name in self._payloads:
for key in self._payloads[model_name]:
yield model_name, key, self._payloads[model_name][key]
def response(self, model_name: str, key: str):
return self._get_def(self._responses, model_name, key)
@property
def responses(self):
for model_name in self._responses:
for key in self._responses[model_name]:
yield model_name, key, self._responses[model_name][key]
def _extract_defs(
self,
model: "data.Model",
model_cls: type["data.Model"],
collection: Literal["payloads", "responses"],
merge_defs: Literal["ignore", "append", "overwrite"],
):
defs = {}
for parent in model_cls.__bases__:
if issubclass(parent, data.Model) and parent != data.Model:
for _key, _def in self._extract_defs(
model, parent, collection, merge_defs
).items():
defs[_key] = _def
def_func = getattr(model_cls, collection)
if hasattr_(def_func, "__func__"):
def_func = def_func.__func__
def_gen = def_func(model)
if def_gen is None:
return defs
new_defs = list(def_gen)
for _def in new_defs:
if isinstance(_def, list):
model_key = "default"
payload = _def
else:
model_key, payload = _def
if model_key in defs:
if merge_defs == "append":
for _param in payload:
defs[model_key].append(_param)
elif merge_defs == "overwrite":
defs[model_key] = payload
else:
defs[model_key] = payload
return defs
def register(self, model: "data.Model"):
def create_defs(collection, attr_name: Literal["payloads", "responses"]):
defs = self._extract_defs(
model, type(model), attr_name, model.__blnt__.merge_defs
)
for model_key, payload in defs.items():
definition = mapping.Definition(model.__blnt__.name, model_key)
for field in payload:
definition.fields.append(field)
if definition.model_name not in collection:
collection[definition.model_name] = {}
collection[definition.model_name][definition.model_key] = definition
create_defs(self._payloads, "payloads")
create_defs(self._responses, "responses")
def marshall(
self,
definition,
entity,
*,
skip_none=False,
as_list=False,
use_foreign_key=False,
):
if entity is None:
return None
if as_list:
return [
self.marshall(
definition,
e,
skip_none=skip_none,
as_list=False,
use_foreign_key=use_foreign_key,
)
for e in entity
]
values = {}
for field in definition.fields:
self._marshall_object(values, field, entity, skip_none, use_foreign_key)
return values
def _marshall_object(
self, values, field, entity, skip_none: bool, use_foreign_key: bool
):
if isinstance(field, mapping.Field):
self._marshall_field(values, field, entity, skip_none)
elif isinstance(field, mapping.Reference) and use_foreign_key:
values[field.foreign_key] = getattr_(entity, field.foreign_key, None)
elif isinstance(field, mapping.Definition):
self._marshall_definition(values, field, entity, skip_none, use_foreign_key)
elif isinstance(field, mapping.List):
self._marshall_list(values, field, entity, skip_none, use_foreign_key)
@staticmethod
def _marshall_field(values, field: "mapping.Field", entity, skip_none: bool):
if field.function is not None:
value = field.function(entity)
else:
value = getattr_(entity, field.key, None)
if field.formatting is not None:
value = field.formatting(value)
if not skip_none or value is not None:
values[field.name] = value
def _marshall_definition(
self,
values,
definition: "mapping.Definition",
entity,
skip_none: bool,
use_foreign_key: bool,
):
d = self.response(definition.model_name, definition.model_key)
attr = None
if definition.function and callable(definition.function):
attr = definition.function(entity)
elif hasattr_(entity, definition.name):
attr = getattr_(entity, definition.name, None)
values[definition.name] = self.marshall(
d, attr, skip_none=skip_none, as_list=False, use_foreign_key=use_foreign_key
)
def _marshall_list(
self,
values,
field: "mapping.List",
entity,
skip_none: bool,
use_foreign_key: bool,
):
if field.function and callable(field.function):
e_list = invoke(field.function, entity)
else:
e_list = getattr_(entity, field.name, None)
elem = field.element
if isinstance(elem, types.db.DataType):
values[field.name] = [e for e in e_list]
elif isinstance(elem, mapping.Definition):
d = self.response(elem.model_name, elem.model_key)
values[field.name] = self.marshall(
d,
e_list,
skip_none=skip_none,
as_list=True,
use_foreign_key=use_foreign_key,
)
| true
| true
|
7908e22e579eb3be116fc20dca793d18b300fcc5
| 7,015
|
py
|
Python
|
tests/components/sensor/test_canary.py
|
dannyqwertz/home-assistant
|
688bdc6532e514afbdc8efd1f574a7b5c9e8d280
|
[
"Apache-2.0"
] | 4
|
2019-01-10T14:47:54.000Z
|
2021-04-22T02:06:27.000Z
|
tests/components/sensor/test_canary.py
|
au190/home-assistant
|
e87ecbd5007acad7468d7118d02b21f6d783c8bc
|
[
"Apache-2.0"
] | 6
|
2021-02-08T21:02:40.000Z
|
2022-03-12T00:52:16.000Z
|
tests/components/sensor/test_canary.py
|
au190/home-assistant
|
e87ecbd5007acad7468d7118d02b21f6d783c8bc
|
[
"Apache-2.0"
] | 3
|
2018-08-29T19:26:20.000Z
|
2020-01-19T11:58:22.000Z
|
"""The tests for the Canary sensor platform."""
import copy
import unittest
from unittest.mock import Mock
from homeassistant.components.canary import DATA_CANARY
from homeassistant.components.sensor import canary
from homeassistant.components.sensor.canary import CanarySensor, \
SENSOR_TYPES, ATTR_AIR_QUALITY, STATE_AIR_QUALITY_NORMAL, \
STATE_AIR_QUALITY_ABNORMAL, STATE_AIR_QUALITY_VERY_ABNORMAL
from tests.common import (get_test_home_assistant)
from tests.components.test_canary import mock_device, mock_location
VALID_CONFIG = {
"canary": {
"username": "foo@bar.org",
"password": "bar",
}
}
class TestCanarySensorSetup(unittest.TestCase):
"""Test the Canary platform."""
DEVICES = []
def add_entities(self, devices, action):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Initialize values for this testcase class."""
self.hass = get_test_home_assistant()
self.config = copy.deepcopy(VALID_CONFIG)
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_sensors(self):
"""Test the sensor setup."""
online_device_at_home = mock_device(20, "Dining Room", True, "Canary")
offline_device_at_home = mock_device(21, "Front Yard", False, "Canary")
online_device_at_work = mock_device(22, "Office", True, "Canary")
self.hass.data[DATA_CANARY] = Mock()
self.hass.data[DATA_CANARY].locations = [
mock_location("Home", True, devices=[online_device_at_home,
offline_device_at_home]),
mock_location("Work", True, devices=[online_device_at_work]),
]
canary.setup_platform(self.hass, self.config, self.add_entities, None)
assert 6 == len(self.DEVICES)
def test_temperature_sensor(self):
"""Test temperature sensor with fahrenheit."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home", False)
data = Mock()
data.get_reading.return_value = 21.1234
sensor = CanarySensor(data, SENSOR_TYPES[0], location, device)
sensor.update()
assert "Home Family Room Temperature" == sensor.name
assert "°C" == sensor.unit_of_measurement
assert 21.12 == sensor.state
assert "mdi:thermometer" == sensor.icon
def test_temperature_sensor_with_none_sensor_value(self):
"""Test temperature sensor with fahrenheit."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home", False)
data = Mock()
data.get_reading.return_value = None
sensor = CanarySensor(data, SENSOR_TYPES[0], location, device)
sensor.update()
assert sensor.state is None
def test_humidity_sensor(self):
"""Test humidity sensor."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 50.4567
sensor = CanarySensor(data, SENSOR_TYPES[1], location, device)
sensor.update()
assert "Home Family Room Humidity" == sensor.name
assert "%" == sensor.unit_of_measurement
assert 50.46 == sensor.state
assert "mdi:water-percent" == sensor.icon
def test_air_quality_sensor_with_very_abnormal_reading(self):
"""Test air quality sensor."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 0.4
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 0.4 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_VERY_ABNORMAL == air_quality
def test_air_quality_sensor_with_abnormal_reading(self):
"""Test air quality sensor."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 0.59
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 0.59 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_ABNORMAL == air_quality
def test_air_quality_sensor_with_normal_reading(self):
"""Test air quality sensor."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 1.0
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 1.0 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_NORMAL == air_quality
def test_air_quality_sensor_with_none_sensor_value(self):
"""Test air quality sensor."""
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = None
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert sensor.state is None
assert sensor.device_state_attributes is None
def test_battery_sensor(self):
"""Test battery sensor."""
device = mock_device(10, "Family Room", "Canary Flex")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 70.4567
sensor = CanarySensor(data, SENSOR_TYPES[4], location, device)
sensor.update()
assert "Home Family Room Battery" == sensor.name
assert "%" == sensor.unit_of_measurement
assert 70.46 == sensor.state
assert "mdi:battery-70" == sensor.icon
def test_wifi_sensor(self):
"""Test battery sensor."""
device = mock_device(10, "Family Room", "Canary Flex")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = -57
sensor = CanarySensor(data, SENSOR_TYPES[3], location, device)
sensor.update()
assert "Home Family Room Wifi" == sensor.name
assert "dBm" == sensor.unit_of_measurement
assert -57 == sensor.state
assert "mdi:wifi" == sensor.icon
| 34.219512
| 79
| 0.648753
|
import copy
import unittest
from unittest.mock import Mock
from homeassistant.components.canary import DATA_CANARY
from homeassistant.components.sensor import canary
from homeassistant.components.sensor.canary import CanarySensor, \
SENSOR_TYPES, ATTR_AIR_QUALITY, STATE_AIR_QUALITY_NORMAL, \
STATE_AIR_QUALITY_ABNORMAL, STATE_AIR_QUALITY_VERY_ABNORMAL
from tests.common import (get_test_home_assistant)
from tests.components.test_canary import mock_device, mock_location
VALID_CONFIG = {
"canary": {
"username": "foo@bar.org",
"password": "bar",
}
}
class TestCanarySensorSetup(unittest.TestCase):
DEVICES = []
def add_entities(self, devices, action):
for device in devices:
self.DEVICES.append(device)
def setUp(self):
self.hass = get_test_home_assistant()
self.config = copy.deepcopy(VALID_CONFIG)
def tearDown(self):
self.hass.stop()
def test_setup_sensors(self):
online_device_at_home = mock_device(20, "Dining Room", True, "Canary")
offline_device_at_home = mock_device(21, "Front Yard", False, "Canary")
online_device_at_work = mock_device(22, "Office", True, "Canary")
self.hass.data[DATA_CANARY] = Mock()
self.hass.data[DATA_CANARY].locations = [
mock_location("Home", True, devices=[online_device_at_home,
offline_device_at_home]),
mock_location("Work", True, devices=[online_device_at_work]),
]
canary.setup_platform(self.hass, self.config, self.add_entities, None)
assert 6 == len(self.DEVICES)
def test_temperature_sensor(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home", False)
data = Mock()
data.get_reading.return_value = 21.1234
sensor = CanarySensor(data, SENSOR_TYPES[0], location, device)
sensor.update()
assert "Home Family Room Temperature" == sensor.name
assert "°C" == sensor.unit_of_measurement
assert 21.12 == sensor.state
assert "mdi:thermometer" == sensor.icon
def test_temperature_sensor_with_none_sensor_value(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home", False)
data = Mock()
data.get_reading.return_value = None
sensor = CanarySensor(data, SENSOR_TYPES[0], location, device)
sensor.update()
assert sensor.state is None
def test_humidity_sensor(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 50.4567
sensor = CanarySensor(data, SENSOR_TYPES[1], location, device)
sensor.update()
assert "Home Family Room Humidity" == sensor.name
assert "%" == sensor.unit_of_measurement
assert 50.46 == sensor.state
assert "mdi:water-percent" == sensor.icon
def test_air_quality_sensor_with_very_abnormal_reading(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 0.4
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 0.4 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_VERY_ABNORMAL == air_quality
def test_air_quality_sensor_with_abnormal_reading(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 0.59
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 0.59 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_ABNORMAL == air_quality
def test_air_quality_sensor_with_normal_reading(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 1.0
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert "Home Family Room Air Quality" == sensor.name
assert sensor.unit_of_measurement is None
assert 1.0 == sensor.state
assert "mdi:weather-windy" == sensor.icon
air_quality = sensor.device_state_attributes[ATTR_AIR_QUALITY]
assert STATE_AIR_QUALITY_NORMAL == air_quality
def test_air_quality_sensor_with_none_sensor_value(self):
device = mock_device(10, "Family Room", "Canary")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = None
sensor = CanarySensor(data, SENSOR_TYPES[2], location, device)
sensor.update()
assert sensor.state is None
assert sensor.device_state_attributes is None
def test_battery_sensor(self):
device = mock_device(10, "Family Room", "Canary Flex")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = 70.4567
sensor = CanarySensor(data, SENSOR_TYPES[4], location, device)
sensor.update()
assert "Home Family Room Battery" == sensor.name
assert "%" == sensor.unit_of_measurement
assert 70.46 == sensor.state
assert "mdi:battery-70" == sensor.icon
def test_wifi_sensor(self):
device = mock_device(10, "Family Room", "Canary Flex")
location = mock_location("Home")
data = Mock()
data.get_reading.return_value = -57
sensor = CanarySensor(data, SENSOR_TYPES[3], location, device)
sensor.update()
assert "Home Family Room Wifi" == sensor.name
assert "dBm" == sensor.unit_of_measurement
assert -57 == sensor.state
assert "mdi:wifi" == sensor.icon
| true
| true
|
7908e22f51452c20a3bfb329a57192bddb17bbda
| 6,409
|
py
|
Python
|
core/models/nddr_net.py
|
WZzhaoyi/MTLNAS
|
c04fcce1437eef306a41a6a224551be99d88f9a3
|
[
"Apache-2.0"
] | 86
|
2020-04-04T03:37:33.000Z
|
2022-03-13T07:36:24.000Z
|
core/models/nddr_net.py
|
hengxyz/MTLNAS
|
c04fcce1437eef306a41a6a224551be99d88f9a3
|
[
"Apache-2.0"
] | 6
|
2020-04-05T15:09:15.000Z
|
2021-06-08T21:12:35.000Z
|
core/models/nddr_net.py
|
hengxyz/MTLNAS
|
c04fcce1437eef306a41a6a224551be99d88f9a3
|
[
"Apache-2.0"
] | 15
|
2020-04-07T03:27:34.000Z
|
2021-12-22T09:13:27.000Z
|
import numpy as np
from time import sleep
import torch
import torch.nn as nn
import torch.nn.functional as F
from core.models.common_layers import batch_norm, get_nddr
from core.tasks import get_tasks
from core.utils import AttrDict
from core.utils.losses import poly
class SingleTaskNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(SingleTaskNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
return result
def forward(self, x):
N, C, H, W = x.size()
y = x.clone()
x = self.net1.base(x)
y = self.net2.base(y)
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
y = self.net2.stages[stage_id](y)
x = self.net1.head(x)
y = self.net2.head(y)
return AttrDict({'out1': x, 'out2': y})
class SharedFeatureNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(SharedFeatureNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
return result
def forward(self, x):
x = self.net1.base(x)
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
out1 = self.net1.head(x)
out2 = self.net2.head(x)
return AttrDict({'out1': out1, 'out2': out2})
class NDDRNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(NDDRNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
nddrs = []
total_channels = 0
for stage_id in range(self.num_stages):
out_channels = net1.stages[stage_id].out_channels
assert out_channels == net2.stages[stage_id].out_channels
if stage_id in cfg.TRAIN.AUX_LAYERS:
total_channels += out_channels
nddr = get_nddr(cfg, out_channels, out_channels)
nddrs.append(nddr)
nddrs = nn.ModuleList(nddrs)
self.aux = cfg.TRAIN.AUX
if self.aux:
print("Using shortcut")
self.aux_conv1 = nn.Sequential(
nn.Conv2d(total_channels, 256, kernel_size=3, padding=1, bias=False),
batch_norm(256, eps=1e-03, momentum=cfg.MODEL.BATCH_NORM_MOMENTUM),
nn.ReLU(inplace=True),
nn.Dropout2d(p=0.5),
nn.Conv2d(256, cfg.MODEL.NET1_CLASSES, kernel_size=1)
)
self.aux_conv2 = nn.Sequential(
nn.Conv2d(total_channels, 256, kernel_size=3, padding=1, bias=False),
batch_norm(256, eps=1e-03, momentum=cfg.MODEL.BATCH_NORM_MOMENTUM),
nn.ReLU(inplace=True),
nn.Dropout2d(p=0.5),
nn.Conv2d(256, cfg.MODEL.NET2_CLASSES, kernel_size=1)
)
self.nddrs = nn.ModuleDict({
'nddrs': nddrs,
})
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
if self.aux:
result.aux_loss1 = self.task1.loss(result.aux1, label_1)
result.aux_loss2 = self.task2.loss(result.aux2, label_2)
result.aux_loss = result.aux_loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.aux_loss2
result.aux_weight = poly(start=self.cfg.TRAIN.AUX_WEIGHT, end=0.,
steps=self._step, total_steps=self.cfg.TRAIN.STEPS,
period=self.cfg.TRAIN.AUX_PERIOD,
power=1.)
result.loss += result.aux_weight * result.aux_loss
return result
def forward(self, x):
N, C, H, W = x.size()
y = x.clone()
x = self.net1.base(x)
y = self.net2.base(y)
xs, ys = [], []
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
y = self.net2.stages[stage_id](y)
if isinstance(x, list):
x[0], y[0] = self.nddrs['nddrs'][stage_id](x[0], y[0])
else:
x, y = self.nddrs['nddrs'][stage_id](x, y)
if self.aux and self.training and stage_id in self.cfg.TRAIN.AUX_LAYERS:
xs.append(x)
ys.append(y)
x = self.net1.head(x)
y = self.net2.head(y)
result = AttrDict({'out1': x, 'out2': y})
if self.aux and self.training:
_, _, h, w = x.size()
aux_x = torch.cat([F.interpolate(_x, (h, w), mode='bilinear', align_corners=True) for _x in xs[:-1]] + [xs[-1]],
dim=1)
aux_y = torch.cat([F.interpolate(_y, (h, w), mode='bilinear', align_corners=True) for _y in ys[:-1]] + [ys[-1]],
dim=1)
result.aux1 = self.aux_conv1(aux_x)
result.aux2 = self.aux_conv2(aux_y)
return result
| 36.414773
| 124
| 0.559058
|
import numpy as np
from time import sleep
import torch
import torch.nn as nn
import torch.nn.functional as F
from core.models.common_layers import batch_norm, get_nddr
from core.tasks import get_tasks
from core.utils import AttrDict
from core.utils.losses import poly
class SingleTaskNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(SingleTaskNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
return result
def forward(self, x):
N, C, H, W = x.size()
y = x.clone()
x = self.net1.base(x)
y = self.net2.base(y)
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
y = self.net2.stages[stage_id](y)
x = self.net1.head(x)
y = self.net2.head(y)
return AttrDict({'out1': x, 'out2': y})
class SharedFeatureNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(SharedFeatureNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
return result
def forward(self, x):
x = self.net1.base(x)
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
out1 = self.net1.head(x)
out2 = self.net2.head(x)
return AttrDict({'out1': out1, 'out2': out2})
class NDDRNet(nn.Module):
def __init__(self, cfg, net1, net2):
super(NDDRNet, self).__init__()
self.cfg = cfg
self.net1 = net1
self.net2 = net2
assert len(net1.stages) == len(net2.stages)
self.task1, self.task2 = get_tasks(cfg)
self.num_stages = len(net1.stages)
nddrs = []
total_channels = 0
for stage_id in range(self.num_stages):
out_channels = net1.stages[stage_id].out_channels
assert out_channels == net2.stages[stage_id].out_channels
if stage_id in cfg.TRAIN.AUX_LAYERS:
total_channels += out_channels
nddr = get_nddr(cfg, out_channels, out_channels)
nddrs.append(nddr)
nddrs = nn.ModuleList(nddrs)
self.aux = cfg.TRAIN.AUX
if self.aux:
print("Using shortcut")
self.aux_conv1 = nn.Sequential(
nn.Conv2d(total_channels, 256, kernel_size=3, padding=1, bias=False),
batch_norm(256, eps=1e-03, momentum=cfg.MODEL.BATCH_NORM_MOMENTUM),
nn.ReLU(inplace=True),
nn.Dropout2d(p=0.5),
nn.Conv2d(256, cfg.MODEL.NET1_CLASSES, kernel_size=1)
)
self.aux_conv2 = nn.Sequential(
nn.Conv2d(total_channels, 256, kernel_size=3, padding=1, bias=False),
batch_norm(256, eps=1e-03, momentum=cfg.MODEL.BATCH_NORM_MOMENTUM),
nn.ReLU(inplace=True),
nn.Dropout2d(p=0.5),
nn.Conv2d(256, cfg.MODEL.NET2_CLASSES, kernel_size=1)
)
self.nddrs = nn.ModuleDict({
'nddrs': nddrs,
})
self._step = 0
def step(self):
self._step += 1
def loss(self, x, labels):
label_1, label_2 = labels
result = self.forward(x)
result.loss1 = self.task1.loss(result.out1, label_1)
result.loss2 = self.task2.loss(result.out2, label_2)
result.loss = result.loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.loss2
if self.aux:
result.aux_loss1 = self.task1.loss(result.aux1, label_1)
result.aux_loss2 = self.task2.loss(result.aux2, label_2)
result.aux_loss = result.aux_loss1 + self.cfg.TRAIN.TASK2_FACTOR * result.aux_loss2
result.aux_weight = poly(start=self.cfg.TRAIN.AUX_WEIGHT, end=0.,
steps=self._step, total_steps=self.cfg.TRAIN.STEPS,
period=self.cfg.TRAIN.AUX_PERIOD,
power=1.)
result.loss += result.aux_weight * result.aux_loss
return result
def forward(self, x):
N, C, H, W = x.size()
y = x.clone()
x = self.net1.base(x)
y = self.net2.base(y)
xs, ys = [], []
for stage_id in range(self.num_stages):
x = self.net1.stages[stage_id](x)
y = self.net2.stages[stage_id](y)
if isinstance(x, list):
x[0], y[0] = self.nddrs['nddrs'][stage_id](x[0], y[0])
else:
x, y = self.nddrs['nddrs'][stage_id](x, y)
if self.aux and self.training and stage_id in self.cfg.TRAIN.AUX_LAYERS:
xs.append(x)
ys.append(y)
x = self.net1.head(x)
y = self.net2.head(y)
result = AttrDict({'out1': x, 'out2': y})
if self.aux and self.training:
_, _, h, w = x.size()
aux_x = torch.cat([F.interpolate(_x, (h, w), mode='bilinear', align_corners=True) for _x in xs[:-1]] + [xs[-1]],
dim=1)
aux_y = torch.cat([F.interpolate(_y, (h, w), mode='bilinear', align_corners=True) for _y in ys[:-1]] + [ys[-1]],
dim=1)
result.aux1 = self.aux_conv1(aux_x)
result.aux2 = self.aux_conv2(aux_y)
return result
| true
| true
|
7908e2d4ea654159fc7e3d60596282dd1dfd6428
| 6,383
|
py
|
Python
|
word2vec/estimators/word2vec.py
|
akb89/word2vec
|
0533e6c1b5ee02d2523bc18359423f94651f7805
|
[
"MIT"
] | 14
|
2019-06-19T15:26:07.000Z
|
2021-11-13T20:15:58.000Z
|
word2vec/estimators/word2vec.py
|
akb89/word2vec
|
0533e6c1b5ee02d2523bc18359423f94651f7805
|
[
"MIT"
] | 9
|
2019-05-03T09:50:32.000Z
|
2022-02-10T00:16:56.000Z
|
word2vec/estimators/word2vec.py
|
akb89/word2vec
|
0533e6c1b5ee02d2523bc18359423f94651f7805
|
[
"MIT"
] | 4
|
2019-12-06T09:49:48.000Z
|
2022-01-19T08:56:32.000Z
|
"""A word2vec implementation using Tensorflow and estimators."""
import os
from collections import defaultdict
import logging
import tensorflow as tf
# from tensorflow.python import debug as tf_debug # pylint: disable=E0611
import word2vec.utils.datasets as datasets_utils
import word2vec.models.word2vec as w2v_model
from word2vec.evaluation.men import MEN
logger = logging.getLogger(__name__)
__all__ = ('Word2Vec')
class Word2Vec():
"""Tensorflow implementation of Word2vec."""
def __init__(self):
"""Initialize vocab dictionaries."""
self._words = []
self._counts = []
self._total_count = 0
@property
def vocab_size(self):
"""Return the number of items in vocabulary.
Since we use len(word_freq_dict) as the default index for UKN in
the index_table, we have to add 1 to the length
"""
return len(self._words) + 1
def build_vocab(self, data_filepath, vocab_filepath, min_count):
"""Create vocabulary-related data."""
logger.info('Building vocabulary from file {}'.format(data_filepath))
logger.info('Loading word counts...')
if self.vocab_size > 1:
logger.warning('This instance of W2V\'s vocabulary does not seem '
'to be empty. Erasing previously stored vocab...')
self._words, self._counts, self._total_count = [], [], 0
word_count_dict = defaultdict(int)
with open(data_filepath, 'r') as data_stream:
for line in data_stream:
for word in line.strip().split():
word_count_dict[word] += 1
logger.info('Saving word frequencies to file: {}'
.format(vocab_filepath))
with open(vocab_filepath, 'w') as vocab_stream:
# words need to be sorted in decreasing frequency to be able
# to rely on the default tf.nn.log_uniform_candidate_sampler
# later on in the tf.nn.nce_loss
for word, count in sorted(word_count_dict.items(),
key=lambda x: x[1], reverse=True):
print('{}\t{}'.format(word, count), file=vocab_stream)
if count >= min_count:
self._words.append(word)
self._counts.append(count)
self._total_count += count
def load_vocab(self, vocab_filepath, min_count):
"""Load a previously saved vocabulary file."""
logger.info('Loading word counts from file {}'.format(vocab_filepath))
self._words, self._counts, self._total_count = [], [], 0
with open(vocab_filepath, 'r', encoding='UTF-8') as vocab_stream:
for line in vocab_stream:
word_count = line.strip().split('\t', 1)
word, count = word_count[0], int(word_count[1])
if count >= min_count:
self._words.append(word)
self._counts.append(count)
self._total_count += count
logger.info('Done loading word counts')
# pylint: disable=R0914,W0613
def train(self, train_mode, training_data_filepath, model_dirpath,
batch_size, embedding_size, num_neg_samples,
learning_rate, window_size, num_epochs, sampling_rate,
p_num_threads, t_num_threads, shuffling_buffer_size,
save_summary_steps, save_checkpoints_steps, keep_checkpoint_max,
log_step_count_steps, debug, debug_port, xla):
"""Train Word2Vec."""
if self.vocab_size == 1:
raise Exception('You need to build or load a vocabulary before '
'training word2vec')
if train_mode not in ('cbow', 'skipgram'):
raise Exception('Unsupported train_mode \'{}\''.format(train_mode))
sess_config = tf.compat.v1.ConfigProto(log_device_placement=True)
sess_config.intra_op_parallelism_threads = t_num_threads
sess_config.inter_op_parallelism_threads = t_num_threads
# if xla:
# sess_config.graph_options.optimizer_options.global_jit_level = \
# tf.OptimizerOptions.ON_1 # JIT compilation on GPU
run_config = tf.estimator.RunConfig(
session_config=sess_config, save_summary_steps=save_summary_steps,
save_checkpoints_steps=save_checkpoints_steps,
keep_checkpoint_max=keep_checkpoint_max,
log_step_count_steps=log_step_count_steps)
estimator = tf.estimator.Estimator(
model_fn=w2v_model.model,
model_dir=model_dirpath,
config=run_config,
params={
'mode': train_mode,
'vocab_size': self.vocab_size,
'batch_size': batch_size,
'embedding_size': embedding_size,
'num_neg_samples': num_neg_samples,
'learning_rate': learning_rate,
'words': self._words,
'p_num_threads': p_num_threads,
'xla': xla,
'men': MEN(os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'resources', 'MEN_dataset_natural_form_full'))
})
# waiting for v2 fix in tf.summary.FileWriter:
tf.compat.v1.disable_eager_execution()
if debug:
raise Exception('Unsupported parameter: waiting for the TF team '
'to release v2 equivalents for TensorBoardDebugHook')
# hooks = [tf.estimator.ProfilerHook(
# save_steps=save_summary_steps, show_dataflow=True,
# show_memory=True, output_dir=model_dirpath),
# tf_debug.TensorBoardDebugHook('localhost:{}'
# .format(debug_port))]
# else:
hooks = [tf.estimator.ProfilerHook(
save_steps=save_summary_steps, show_dataflow=True,
show_memory=True, output_dir=model_dirpath)]
estimator.train(
input_fn=lambda: datasets_utils.get_w2v_train_dataset(
training_data_filepath, train_mode, self._words, self._counts,
self._total_count, window_size, sampling_rate, batch_size,
num_epochs, p_num_threads, shuffling_buffer_size),
hooks=hooks)
| 44.950704
| 81
| 0.608491
|
import os
from collections import defaultdict
import logging
import tensorflow as tf
.datasets as datasets_utils
import word2vec.models.word2vec as w2v_model
from word2vec.evaluation.men import MEN
logger = logging.getLogger(__name__)
__all__ = ('Word2Vec')
class Word2Vec():
def __init__(self):
self._words = []
self._counts = []
self._total_count = 0
@property
def vocab_size(self):
return len(self._words) + 1
def build_vocab(self, data_filepath, vocab_filepath, min_count):
logger.info('Building vocabulary from file {}'.format(data_filepath))
logger.info('Loading word counts...')
if self.vocab_size > 1:
logger.warning('This instance of W2V\'s vocabulary does not seem '
'to be empty. Erasing previously stored vocab...')
self._words, self._counts, self._total_count = [], [], 0
word_count_dict = defaultdict(int)
with open(data_filepath, 'r') as data_stream:
for line in data_stream:
for word in line.strip().split():
word_count_dict[word] += 1
logger.info('Saving word frequencies to file: {}'
.format(vocab_filepath))
with open(vocab_filepath, 'w') as vocab_stream:
# words need to be sorted in decreasing frequency to be able
# to rely on the default tf.nn.log_uniform_candidate_sampler
# later on in the tf.nn.nce_loss
for word, count in sorted(word_count_dict.items(),
key=lambda x: x[1], reverse=True):
print('{}\t{}'.format(word, count), file=vocab_stream)
if count >= min_count:
self._words.append(word)
self._counts.append(count)
self._total_count += count
def load_vocab(self, vocab_filepath, min_count):
logger.info('Loading word counts from file {}'.format(vocab_filepath))
self._words, self._counts, self._total_count = [], [], 0
with open(vocab_filepath, 'r', encoding='UTF-8') as vocab_stream:
for line in vocab_stream:
word_count = line.strip().split('\t', 1)
word, count = word_count[0], int(word_count[1])
if count >= min_count:
self._words.append(word)
self._counts.append(count)
self._total_count += count
logger.info('Done loading word counts')
# pylint: disable=R0914,W0613
def train(self, train_mode, training_data_filepath, model_dirpath,
batch_size, embedding_size, num_neg_samples,
learning_rate, window_size, num_epochs, sampling_rate,
p_num_threads, t_num_threads, shuffling_buffer_size,
save_summary_steps, save_checkpoints_steps, keep_checkpoint_max,
log_step_count_steps, debug, debug_port, xla):
if self.vocab_size == 1:
raise Exception('You need to build or load a vocabulary before '
'training word2vec')
if train_mode not in ('cbow', 'skipgram'):
raise Exception('Unsupported train_mode \'{}\''.format(train_mode))
sess_config = tf.compat.v1.ConfigProto(log_device_placement=True)
sess_config.intra_op_parallelism_threads = t_num_threads
sess_config.inter_op_parallelism_threads = t_num_threads
# if xla:
# sess_config.graph_options.optimizer_options.global_jit_level = \
# tf.OptimizerOptions.ON_1 # JIT compilation on GPU
run_config = tf.estimator.RunConfig(
session_config=sess_config, save_summary_steps=save_summary_steps,
save_checkpoints_steps=save_checkpoints_steps,
keep_checkpoint_max=keep_checkpoint_max,
log_step_count_steps=log_step_count_steps)
estimator = tf.estimator.Estimator(
model_fn=w2v_model.model,
model_dir=model_dirpath,
config=run_config,
params={
'mode': train_mode,
'vocab_size': self.vocab_size,
'batch_size': batch_size,
'embedding_size': embedding_size,
'num_neg_samples': num_neg_samples,
'learning_rate': learning_rate,
'words': self._words,
'p_num_threads': p_num_threads,
'xla': xla,
'men': MEN(os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'resources', 'MEN_dataset_natural_form_full'))
})
# waiting for v2 fix in tf.summary.FileWriter:
tf.compat.v1.disable_eager_execution()
if debug:
raise Exception('Unsupported parameter: waiting for the TF team '
'to release v2 equivalents for TensorBoardDebugHook')
# hooks = [tf.estimator.ProfilerHook(
# save_steps=save_summary_steps, show_dataflow=True,
# show_memory=True, output_dir=model_dirpath),
# tf_debug.TensorBoardDebugHook('localhost:{}'
# .format(debug_port))]
# else:
hooks = [tf.estimator.ProfilerHook(
save_steps=save_summary_steps, show_dataflow=True,
show_memory=True, output_dir=model_dirpath)]
estimator.train(
input_fn=lambda: datasets_utils.get_w2v_train_dataset(
training_data_filepath, train_mode, self._words, self._counts,
self._total_count, window_size, sampling_rate, batch_size,
num_epochs, p_num_threads, shuffling_buffer_size),
hooks=hooks)
| true
| true
|
7908e30251196b26a18072bb4b63cce247904208
| 10,205
|
py
|
Python
|
conans/client/rest/uploader_downloader.py
|
xaqq/conan
|
ab0870336550b7521da71595c6babf42d5690f7b
|
[
"MIT"
] | null | null | null |
conans/client/rest/uploader_downloader.py
|
xaqq/conan
|
ab0870336550b7521da71595c6babf42d5690f7b
|
[
"MIT"
] | 1
|
2018-06-01T09:34:49.000Z
|
2018-06-01T13:51:07.000Z
|
conans/client/rest/uploader_downloader.py
|
xaqq/conan
|
ab0870336550b7521da71595c6babf42d5690f7b
|
[
"MIT"
] | null | null | null |
import os
import time
import traceback
from conans.client.tools.files import human_size
from conans.errors import AuthenticationException, ConanConnectionError, ConanException, \
NotFoundException
from conans.util.files import mkdir, save_append, sha1sum, to_file_bytes
from conans.util.log import logger
from conans.util.tracer import log_download
class Uploader(object):
def __init__(self, requester, output, verify, chunk_size=1000):
self.chunk_size = chunk_size
self.output = output
self.requester = requester
self.verify = verify
def upload(self, url, abs_path, auth=None, dedup=False, retry=1, retry_wait=0, headers=None):
# Send always the header with the Sha1
headers = headers or {}
headers["X-Checksum-Sha1"] = sha1sum(abs_path)
if dedup:
dedup_headers = {"X-Checksum-Deploy": "true"}
if headers:
dedup_headers.update(headers)
response = self.requester.put(url, data="", verify=self.verify, headers=dedup_headers,
auth=auth)
if response.status_code == 403:
if auth.token is None:
raise AuthenticationException(response.content)
raise ForbiddenException(response.content)
if response.status_code == 201: # Artifactory returns 201 if the file is there
return response
self.output.info("")
# Actual transfer of the real content
it = load_in_chunks(abs_path, self.chunk_size)
# Now it is a chunked read file
file_size = os.stat(abs_path).st_size
it = upload_with_progress(file_size, it, self.chunk_size, self.output)
# Now it will print progress in each iteration
iterable_to_file = IterableToFileAdapter(it, file_size)
# Now it is prepared to work with request
ret = call_with_retry(self.output, retry, retry_wait, self._upload_file, url,
data=iterable_to_file, headers=headers, auth=auth)
return ret
def _upload_file(self, url, data, headers, auth):
try:
response = self.requester.put(url, data=data, verify=self.verify,
headers=headers, auth=auth)
if response.status_code == 403:
if auth.token is None:
raise AuthenticationException(response.content)
raise ForbiddenException(response.content)
except ConanException:
raise
except Exception as exc:
raise ConanException(exc)
return response
class IterableToFileAdapter(object):
def __init__(self, iterable, total_size):
self.iterator = iter(iterable)
self.total_size = total_size
def read(self, size=-1): # @UnusedVariable
return next(self.iterator, b'')
def __len__(self):
return self.total_size
def __iter__(self):
return self.iterator.__iter__()
class upload_with_progress(object):
def __init__(self, totalsize, iterator, chunk_size, output):
self.totalsize = totalsize
self.output = output
self.chunk_size = chunk_size
self.aprox_chunks = self.totalsize * 1.0 / chunk_size
self.groups = iterator
def __iter__(self):
last_progress = None
for index, chunk in enumerate(self.groups):
if self.aprox_chunks == 0:
index = self.aprox_chunks
units = progress_units(index, self.aprox_chunks)
progress = human_readable_progress(index * self.chunk_size, self.totalsize)
if last_progress != units: # Avoid screen refresh if nothing has change
print_progress(self.output, units, progress)
last_progress = units
yield chunk
progress = human_readable_progress(self.totalsize, self.totalsize)
print_progress(self.output, progress_units(100, 100), progress)
def __len__(self):
return self.totalsize
def load_in_chunks(path, chunk_size=1024):
"""Lazy function (generator) to read a file piece by piece.
Default chunk size: 1k."""
with open(path, 'rb') as file_object:
while True:
data = file_object.read(chunk_size)
if not data:
break
yield data
class Downloader(object):
def __init__(self, requester, output, verify, chunk_size=1000):
self.chunk_size = chunk_size
self.output = output
self.requester = requester
self.verify = verify
def download(self, url, file_path=None, auth=None, retry=3, retry_wait=0, overwrite=False,
headers=None):
if file_path and not os.path.isabs(file_path):
file_path = os.path.abspath(file_path)
if file_path and os.path.exists(file_path):
if overwrite:
if self.output:
self.output.warn("file '%s' already exists, overwriting" % file_path)
else:
# Should not happen, better to raise, probably we had to remove
# the dest folder before
raise ConanException("Error, the file to download already exists: '%s'" % file_path)
return call_with_retry(self.output, retry, retry_wait, self._download_file, url, auth,
headers, file_path)
def _download_file(self, url, auth, headers, file_path):
t1 = time.time()
try:
response = self.requester.get(url, stream=True, verify=self.verify, auth=auth,
headers=headers)
except Exception as exc:
raise ConanException("Error downloading file %s: '%s'" % (url, exc))
if not response.ok:
if response.status_code == 404:
raise NotFoundException("Not found: %s" % url)
elif response.status_code == 401:
raise AuthenticationException()
raise ConanException("Error %d downloading file %s" % (response.status_code, url))
try:
logger.debug("DOWNLOAD: %s" % url)
data = self._download_data(response, file_path)
duration = time.time() - t1
log_download(url, duration)
return data
except Exception as e:
logger.debug(e.__class__)
logger.debug(traceback.format_exc())
# If this part failed, it means problems with the connection to server
raise ConanConnectionError("Download failed, check server, possibly try again\n%s"
% str(e))
def _download_data(self, response, file_path):
ret = bytearray()
total_length = response.headers.get('content-length')
if total_length is None: # no content length header
if not file_path:
ret += response.content
else:
if self.output:
total_length = len(response.content)
progress = human_readable_progress(total_length, total_length)
print_progress(self.output, 50, progress)
save_append(file_path, response.content)
else:
total_length = int(total_length)
encoding = response.headers.get('content-encoding')
gzip = (encoding == "gzip")
# chunked can be a problem: https://www.greenbytes.de/tech/webdav/rfc2616.html#rfc.section.4.4
# It will not send content-length or should be ignored
def download_chunks(file_handler=None, ret_buffer=None):
"""Write to a buffer or to a file handler"""
chunk_size = 1024 if not file_path else 1024 * 100
download_size = 0
last_progress = None
for data in response.iter_content(chunk_size):
download_size += len(data)
if ret_buffer is not None:
ret_buffer.extend(data)
if file_handler is not None:
file_handler.write(to_file_bytes(data))
if self.output:
units = progress_units(download_size, total_length)
progress = human_readable_progress(download_size, total_length)
if last_progress != units: # Avoid screen refresh if nothing has change
print_progress(self.output, units, progress)
last_progress = units
return download_size
if file_path:
mkdir(os.path.dirname(file_path))
with open(file_path, 'wb') as handle:
dl_size = download_chunks(file_handler=handle)
else:
dl_size = download_chunks(ret_buffer=ret)
response.close()
if dl_size != total_length and not gzip:
raise ConanException("Transfer interrupted before "
"complete: %s < %s" % (dl_size, total_length))
if not file_path:
return bytes(ret)
else:
return
def progress_units(progress, total):
if total == 0:
return 0
return min(50, int(50 * progress / total))
def human_readable_progress(bytes_transferred, total_bytes):
return "%s/%s" % (human_size(bytes_transferred), human_size(total_bytes))
def print_progress(output, units, progress=""):
if output.is_terminal:
output.rewrite_line("[%s%s] %s" % ('=' * units, ' ' * (50 - units), progress))
def call_with_retry(out, retry, retry_wait, method, *args, **kwargs):
for counter in range(retry):
try:
return method(*args, **kwargs)
except NotFoundException:
raise
except ConanException as exc:
if counter == (retry - 1):
raise
else:
if out:
out.error(exc)
out.info("Waiting %d seconds to retry..." % retry_wait)
time.sleep(retry_wait)
| 38.655303
| 106
| 0.590691
|
import os
import time
import traceback
from conans.client.tools.files import human_size
from conans.errors import AuthenticationException, ConanConnectionError, ConanException, \
NotFoundException
from conans.util.files import mkdir, save_append, sha1sum, to_file_bytes
from conans.util.log import logger
from conans.util.tracer import log_download
class Uploader(object):
def __init__(self, requester, output, verify, chunk_size=1000):
self.chunk_size = chunk_size
self.output = output
self.requester = requester
self.verify = verify
def upload(self, url, abs_path, auth=None, dedup=False, retry=1, retry_wait=0, headers=None):
headers = headers or {}
headers["X-Checksum-Sha1"] = sha1sum(abs_path)
if dedup:
dedup_headers = {"X-Checksum-Deploy": "true"}
if headers:
dedup_headers.update(headers)
response = self.requester.put(url, data="", verify=self.verify, headers=dedup_headers,
auth=auth)
if response.status_code == 403:
if auth.token is None:
raise AuthenticationException(response.content)
raise ForbiddenException(response.content)
if response.status_code == 201:
return response
self.output.info("")
it = load_in_chunks(abs_path, self.chunk_size)
file_size = os.stat(abs_path).st_size
it = upload_with_progress(file_size, it, self.chunk_size, self.output)
iterable_to_file = IterableToFileAdapter(it, file_size)
ret = call_with_retry(self.output, retry, retry_wait, self._upload_file, url,
data=iterable_to_file, headers=headers, auth=auth)
return ret
def _upload_file(self, url, data, headers, auth):
try:
response = self.requester.put(url, data=data, verify=self.verify,
headers=headers, auth=auth)
if response.status_code == 403:
if auth.token is None:
raise AuthenticationException(response.content)
raise ForbiddenException(response.content)
except ConanException:
raise
except Exception as exc:
raise ConanException(exc)
return response
class IterableToFileAdapter(object):
def __init__(self, iterable, total_size):
self.iterator = iter(iterable)
self.total_size = total_size
def read(self, size=-1):
return next(self.iterator, b'')
def __len__(self):
return self.total_size
def __iter__(self):
return self.iterator.__iter__()
class upload_with_progress(object):
def __init__(self, totalsize, iterator, chunk_size, output):
self.totalsize = totalsize
self.output = output
self.chunk_size = chunk_size
self.aprox_chunks = self.totalsize * 1.0 / chunk_size
self.groups = iterator
def __iter__(self):
last_progress = None
for index, chunk in enumerate(self.groups):
if self.aprox_chunks == 0:
index = self.aprox_chunks
units = progress_units(index, self.aprox_chunks)
progress = human_readable_progress(index * self.chunk_size, self.totalsize)
if last_progress != units:
print_progress(self.output, units, progress)
last_progress = units
yield chunk
progress = human_readable_progress(self.totalsize, self.totalsize)
print_progress(self.output, progress_units(100, 100), progress)
def __len__(self):
return self.totalsize
def load_in_chunks(path, chunk_size=1024):
with open(path, 'rb') as file_object:
while True:
data = file_object.read(chunk_size)
if not data:
break
yield data
class Downloader(object):
def __init__(self, requester, output, verify, chunk_size=1000):
self.chunk_size = chunk_size
self.output = output
self.requester = requester
self.verify = verify
def download(self, url, file_path=None, auth=None, retry=3, retry_wait=0, overwrite=False,
headers=None):
if file_path and not os.path.isabs(file_path):
file_path = os.path.abspath(file_path)
if file_path and os.path.exists(file_path):
if overwrite:
if self.output:
self.output.warn("file '%s' already exists, overwriting" % file_path)
else:
raise ConanException("Error, the file to download already exists: '%s'" % file_path)
return call_with_retry(self.output, retry, retry_wait, self._download_file, url, auth,
headers, file_path)
def _download_file(self, url, auth, headers, file_path):
t1 = time.time()
try:
response = self.requester.get(url, stream=True, verify=self.verify, auth=auth,
headers=headers)
except Exception as exc:
raise ConanException("Error downloading file %s: '%s'" % (url, exc))
if not response.ok:
if response.status_code == 404:
raise NotFoundException("Not found: %s" % url)
elif response.status_code == 401:
raise AuthenticationException()
raise ConanException("Error %d downloading file %s" % (response.status_code, url))
try:
logger.debug("DOWNLOAD: %s" % url)
data = self._download_data(response, file_path)
duration = time.time() - t1
log_download(url, duration)
return data
except Exception as e:
logger.debug(e.__class__)
logger.debug(traceback.format_exc())
raise ConanConnectionError("Download failed, check server, possibly try again\n%s"
% str(e))
def _download_data(self, response, file_path):
ret = bytearray()
total_length = response.headers.get('content-length')
if total_length is None:
if not file_path:
ret += response.content
else:
if self.output:
total_length = len(response.content)
progress = human_readable_progress(total_length, total_length)
print_progress(self.output, 50, progress)
save_append(file_path, response.content)
else:
total_length = int(total_length)
encoding = response.headers.get('content-encoding')
gzip = (encoding == "gzip")
def download_chunks(file_handler=None, ret_buffer=None):
"""Write to a buffer or to a file handler"""
chunk_size = 1024 if not file_path else 1024 * 100
download_size = 0
last_progress = None
for data in response.iter_content(chunk_size):
download_size += len(data)
if ret_buffer is not None:
ret_buffer.extend(data)
if file_handler is not None:
file_handler.write(to_file_bytes(data))
if self.output:
units = progress_units(download_size, total_length)
progress = human_readable_progress(download_size, total_length)
if last_progress != units:
print_progress(self.output, units, progress)
last_progress = units
return download_size
if file_path:
mkdir(os.path.dirname(file_path))
with open(file_path, 'wb') as handle:
dl_size = download_chunks(file_handler=handle)
else:
dl_size = download_chunks(ret_buffer=ret)
response.close()
if dl_size != total_length and not gzip:
raise ConanException("Transfer interrupted before "
"complete: %s < %s" % (dl_size, total_length))
if not file_path:
return bytes(ret)
else:
return
def progress_units(progress, total):
if total == 0:
return 0
return min(50, int(50 * progress / total))
def human_readable_progress(bytes_transferred, total_bytes):
return "%s/%s" % (human_size(bytes_transferred), human_size(total_bytes))
def print_progress(output, units, progress=""):
if output.is_terminal:
output.rewrite_line("[%s%s] %s" % ('=' * units, ' ' * (50 - units), progress))
def call_with_retry(out, retry, retry_wait, method, *args, **kwargs):
for counter in range(retry):
try:
return method(*args, **kwargs)
except NotFoundException:
raise
except ConanException as exc:
if counter == (retry - 1):
raise
else:
if out:
out.error(exc)
out.info("Waiting %d seconds to retry..." % retry_wait)
time.sleep(retry_wait)
| true
| true
|
7908e52b5acc8f196064f27d3b05b84effb3e4c0
| 1,251
|
py
|
Python
|
paddlemm/models/retrieval/layers/contrastive.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | 42
|
2022-01-05T13:49:48.000Z
|
2022-03-30T20:20:18.000Z
|
paddlemm/models/retrieval/layers/contrastive.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | null | null | null |
paddlemm/models/retrieval/layers/contrastive.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | 5
|
2022-01-19T00:27:24.000Z
|
2022-03-23T08:29:50.000Z
|
import paddle
import paddle.nn as nn
class ContrastiveLoss(nn.Layer):
"""
Compute contrastive loss
"""
def __init__(self, margin=0, max_violation=False):
super(ContrastiveLoss, self).__init__()
self.margin = margin
self.max_violation = max_violation
def forward(self, scores):
# compute image-sentence score matrix
diag_idx = [[i, i] for i in range(len(scores))]
diagonal = paddle.gather_nd(scores, paddle.to_tensor(diag_idx)).unsqueeze(1)
d1 = diagonal.expand_as(scores)
d2 = paddle.transpose(d1, (1,0)).expand_as(scores)
# compare every diagonal score to scores in its column
# caption retrieval
cost_s = (self.margin + scores - d1).clip(min=0)
# compare every diagonal score to scores in its row
# image retrieval
cost_im = (self.margin + scores - d2).clip(min=0)
# clear diagonals
mask = paddle.eye(scores.shape[0]) < .5
cost_s = cost_s * mask
cost_im = cost_im * mask
# keep the maximum violating negative for each query
if self.max_violation:
cost_s = cost_s.max(1)
cost_im = cost_im.max(0)
return cost_s.sum() + cost_im.sum()
| 32.921053
| 84
| 0.620304
|
import paddle
import paddle.nn as nn
class ContrastiveLoss(nn.Layer):
def __init__(self, margin=0, max_violation=False):
super(ContrastiveLoss, self).__init__()
self.margin = margin
self.max_violation = max_violation
def forward(self, scores):
diag_idx = [[i, i] for i in range(len(scores))]
diagonal = paddle.gather_nd(scores, paddle.to_tensor(diag_idx)).unsqueeze(1)
d1 = diagonal.expand_as(scores)
d2 = paddle.transpose(d1, (1,0)).expand_as(scores)
cost_s = (self.margin + scores - d1).clip(min=0)
cost_im = (self.margin + scores - d2).clip(min=0)
mask = paddle.eye(scores.shape[0]) < .5
cost_s = cost_s * mask
cost_im = cost_im * mask
if self.max_violation:
cost_s = cost_s.max(1)
cost_im = cost_im.max(0)
return cost_s.sum() + cost_im.sum()
| true
| true
|
7908e53a9b546b8e543c022f21a7efebaae89a17
| 740
|
py
|
Python
|
networkit/test/test_reachability.py
|
angriman/network
|
3a4c5fd32eb2be8d5b34eaee17f8fe4e6e141894
|
[
"MIT"
] | 366
|
2019-06-27T18:48:18.000Z
|
2022-03-29T08:36:49.000Z
|
networkit/test/test_reachability.py
|
angriman/network
|
3a4c5fd32eb2be8d5b34eaee17f8fe4e6e141894
|
[
"MIT"
] | 387
|
2019-06-24T11:30:39.000Z
|
2022-03-31T10:37:28.000Z
|
networkit/test/test_reachability.py
|
angriman/network
|
3a4c5fd32eb2be8d5b34eaee17f8fe4e6e141894
|
[
"MIT"
] | 131
|
2019-07-04T15:40:13.000Z
|
2022-03-29T12:34:23.000Z
|
#!/usr/bin/env python3
import unittest
import networkit as nk
class TestReachability(unittest.TestCase):
def testReachableNodes(self):
for directed in [False, True]:
for exact in [False, True]:
g = nk.generators.ErdosRenyiGenerator(100, 0.01, directed).generate()
rn = nk.reachability.ReachableNodes(g, exact).run()
for u in g.iterNodes():
reached = []
nk.traversal.Traversal.BFSfrom(g, u, lambda v, _: reached.append(v))
if exact:
self.assertEqual(rn.numberOfReachableNodes(u), len(reached))
else:
self.assertLessEqual(rn.numberOfReachableNodesLB(u), len(reached))
self.assertGreaterEqual(rn.numberOfReachableNodesUB(u), len(reached))
if __name__ == "__main__":
unittest.main()
| 30.833333
| 75
| 0.709459
|
import unittest
import networkit as nk
class TestReachability(unittest.TestCase):
def testReachableNodes(self):
for directed in [False, True]:
for exact in [False, True]:
g = nk.generators.ErdosRenyiGenerator(100, 0.01, directed).generate()
rn = nk.reachability.ReachableNodes(g, exact).run()
for u in g.iterNodes():
reached = []
nk.traversal.Traversal.BFSfrom(g, u, lambda v, _: reached.append(v))
if exact:
self.assertEqual(rn.numberOfReachableNodes(u), len(reached))
else:
self.assertLessEqual(rn.numberOfReachableNodesLB(u), len(reached))
self.assertGreaterEqual(rn.numberOfReachableNodesUB(u), len(reached))
if __name__ == "__main__":
unittest.main()
| true
| true
|
7908e56570449d11614316ad38365ff90b93d140
| 401
|
py
|
Python
|
setup.py
|
Frederick-S/wtouch
|
4d6b2fefca8eb0b14b56ca9c5e1c060473093609
|
[
"MIT"
] | null | null | null |
setup.py
|
Frederick-S/wtouch
|
4d6b2fefca8eb0b14b56ca9c5e1c060473093609
|
[
"MIT"
] | null | null | null |
setup.py
|
Frederick-S/wtouch
|
4d6b2fefca8eb0b14b56ca9c5e1c060473093609
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name='wtouch',
version='0.0.1',
description='Create a file in current folder.',
url='https://github.com/Frederick-S/wtouch',
packages=find_packages(exclude=['tests']),
entry_points={
'console_scripts': [
'wtouch = wtouch.main:main'
]
},
include_package_data=True,
test_suite="tests"
)
| 23.588235
| 51
| 0.628429
|
from setuptools import setup, find_packages
setup(
name='wtouch',
version='0.0.1',
description='Create a file in current folder.',
url='https://github.com/Frederick-S/wtouch',
packages=find_packages(exclude=['tests']),
entry_points={
'console_scripts': [
'wtouch = wtouch.main:main'
]
},
include_package_data=True,
test_suite="tests"
)
| true
| true
|
7908e6043af0fbdf0d7188b4db6c8a049ae2a209
| 748
|
py
|
Python
|
test/unit/rules/resources/properties/test_onlyone.py
|
duartemendes/cfn-python-lint
|
b0c599773761cf3d6a3be27b6ad96c2b76d9b266
|
[
"MIT-0"
] | null | null | null |
test/unit/rules/resources/properties/test_onlyone.py
|
duartemendes/cfn-python-lint
|
b0c599773761cf3d6a3be27b6ad96c2b76d9b266
|
[
"MIT-0"
] | null | null | null |
test/unit/rules/resources/properties/test_onlyone.py
|
duartemendes/cfn-python-lint
|
b0c599773761cf3d6a3be27b6ad96c2b76d9b266
|
[
"MIT-0"
] | null | null | null |
"""
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
from test.unit.rules import BaseRuleTestCase
from cfnlint.rules.resources.properties.OnlyOne import OnlyOne # pylint: disable=E0401
class TestPropertyOnlyOne(BaseRuleTestCase):
"""Test OnlyOne Property Configuration"""
def setUp(self):
"""Setup"""
super(TestPropertyOnlyOne, self).setUp()
self.collection.register(OnlyOne())
def test_file_positive(self):
"""Test Positive"""
self.helper_file_positive()
def test_file_negative(self):
"""Test failure"""
self.helper_file_negative(
'test/fixtures/templates/bad/resources/properties/onlyone.yaml', 5)
| 29.92
| 87
| 0.696524
|
from test.unit.rules import BaseRuleTestCase
from cfnlint.rules.resources.properties.OnlyOne import OnlyOne
class TestPropertyOnlyOne(BaseRuleTestCase):
def setUp(self):
super(TestPropertyOnlyOne, self).setUp()
self.collection.register(OnlyOne())
def test_file_positive(self):
self.helper_file_positive()
def test_file_negative(self):
self.helper_file_negative(
'test/fixtures/templates/bad/resources/properties/onlyone.yaml', 5)
| true
| true
|
7908e63e0376665719f7c607907de5ac3a07dd99
| 8,249
|
py
|
Python
|
eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/dagutil.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/dagutil.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/dagutil.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 1
|
2021-12-16T23:31:37.000Z
|
2021-12-16T23:31:37.000Z
|
# dagutil.py - dag utilities for mercurial
#
# Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
# and Peter Arrenbrecht <peter@arrenbrecht.ch>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullrev
from i18n import _
class basedag(object):
'''generic interface for DAGs
terms:
"ix" (short for index) identifies a nodes internally,
"id" identifies one externally.
All params are ixs unless explicitly suffixed otherwise.
Pluralized params are lists or sets.
'''
def __init__(self):
self._inverse = None
def nodeset(self):
'''set of all node idxs'''
raise NotImplementedError()
def heads(self):
'''list of head ixs'''
raise NotImplementedError()
def parents(self, ix):
'''list of parents ixs of ix'''
raise NotImplementedError()
def inverse(self):
'''inverse DAG, where parents becomes children, etc.'''
raise NotImplementedError()
def ancestorset(self, starts, stops=None):
'''
set of all ancestors of starts (incl), but stop walk at stops (excl)
'''
raise NotImplementedError()
def descendantset(self, starts, stops=None):
'''
set of all descendants of starts (incl), but stop walk at stops (excl)
'''
return self.inverse().ancestorset(starts, stops)
def headsetofconnecteds(self, ixs):
'''
subset of connected list of ixs so that no node has a descendant in it
By "connected list" we mean that if an ancestor and a descendant are in
the list, then so is at least one path connecting them.
'''
raise NotImplementedError()
def externalize(self, ix):
'''return a list of (or set if given a set) of node ids'''
return self._externalize(ix)
def externalizeall(self, ixs):
'''return a list of (or set if given a set) of node ids'''
ids = self._externalizeall(ixs)
if isinstance(ixs, set):
return set(ids)
return list(ids)
def internalize(self, id):
'''return a list of (or set if given a set) of node ixs'''
return self._internalize(id)
def internalizeall(self, ids, filterunknown=False):
'''return a list of (or set if given a set) of node ids'''
ixs = self._internalizeall(ids, filterunknown)
if isinstance(ids, set):
return set(ixs)
return list(ixs)
class genericdag(basedag):
'''generic implementations for DAGs'''
def ancestorset(self, starts, stops=None):
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
n = pending.pop()
if n not in seen and n not in stops:
seen.add(n)
pending.extend(self.parents(n))
return seen
def headsetofconnecteds(self, ixs):
hds = set(ixs)
if not hds:
return hds
for n in ixs:
for p in self.parents(n):
hds.discard(p)
assert hds
return hds
class revlogbaseddag(basedag):
'''generic dag interface to a revlog'''
def __init__(self, revlog, nodeset):
basedag.__init__(self)
self._revlog = revlog
self._heads = None
self._nodeset = nodeset
def nodeset(self):
return self._nodeset
def heads(self):
if self._heads is None:
self._heads = self._getheads()
return self._heads
def _externalize(self, ix):
return self._revlog.index[ix][7]
def _externalizeall(self, ixs):
idx = self._revlog.index
return [idx[i][7] for i in ixs]
def _internalize(self, id):
ix = self._revlog.rev(id)
if ix == nullrev:
raise LookupError(id, self._revlog.indexfile, _('nullid'))
return ix
def _internalizeall(self, ids, filterunknown):
rl = self._revlog
if filterunknown:
return [r for r in map(rl.nodemap.get, ids)
if r is not None and r != nullrev]
return map(self._internalize, ids)
class revlogdag(revlogbaseddag):
'''dag interface to a revlog'''
def __init__(self, revlog):
revlogbaseddag.__init__(self, revlog, set(xrange(len(revlog))))
def _getheads(self):
return [r for r in self._revlog.headrevs() if r != nullrev]
def parents(self, ix):
rlog = self._revlog
idx = rlog.index
revdata = idx[ix]
prev = revdata[5]
if prev != nullrev:
prev2 = revdata[6]
if prev2 == nullrev:
return [prev]
return [prev, prev2]
prev2 = revdata[6]
if prev2 != nullrev:
return [prev2]
return []
def inverse(self):
if self._inverse is None:
self._inverse = inverserevlogdag(self)
return self._inverse
def ancestorset(self, starts, stops=None):
rlog = self._revlog
idx = rlog.index
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
rev = pending.pop()
if rev not in seen and rev not in stops:
seen.add(rev)
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
pending.append(prev)
return seen
def headsetofconnecteds(self, ixs):
if not ixs:
return set()
rlog = self._revlog
idx = rlog.index
headrevs = set(ixs)
for rev in ixs:
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
headrevs.discard(prev)
assert headrevs
return headrevs
def linearize(self, ixs):
'''linearize and topologically sort a list of revisions
The linearization process tries to create long runs of revs where
a child rev comes immediately after its first parent. This is done by
visiting the heads of the given revs in inverse topological order,
and for each visited rev, visiting its second parent, then its first
parent, then adding the rev itself to the output list.
'''
sorted = []
visit = list(self.headsetofconnecteds(ixs))
visit.sort(reverse=True)
finished = set()
while visit:
cur = visit.pop()
if cur < 0:
cur = -cur - 1
if cur not in finished:
sorted.append(cur)
finished.add(cur)
else:
visit.append(-cur - 1)
visit += [p for p in self.parents(cur)
if p in ixs and p not in finished]
assert len(sorted) == len(ixs)
return sorted
class inverserevlogdag(revlogbaseddag, genericdag):
'''inverse of an existing revlog dag; see revlogdag.inverse()'''
def __init__(self, orig):
revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
self._orig = orig
self._children = {}
self._roots = []
self._walkfrom = len(self._revlog) - 1
def _walkto(self, walkto):
rev = self._walkfrom
cs = self._children
roots = self._roots
idx = self._revlog.index
while rev >= walkto:
data = idx[rev]
isroot = True
for prev in [data[5], data[6]]: # parent revs
if prev != nullrev:
cs.setdefault(prev, []).append(rev)
isroot = False
if isroot:
roots.append(rev)
rev -= 1
self._walkfrom = rev
def _getheads(self):
self._walkto(nullrev)
return self._roots
def parents(self, ix):
if ix is None:
return []
if ix <= self._walkfrom:
self._walkto(ix)
return self._children.get(ix, [])
def inverse(self):
return self._orig
| 29.672662
| 79
| 0.564432
|
from node import nullrev
from i18n import _
class basedag(object):
def __init__(self):
self._inverse = None
def nodeset(self):
raise NotImplementedError()
def heads(self):
raise NotImplementedError()
def parents(self, ix):
raise NotImplementedError()
def inverse(self):
raise NotImplementedError()
def ancestorset(self, starts, stops=None):
raise NotImplementedError()
def descendantset(self, starts, stops=None):
return self.inverse().ancestorset(starts, stops)
def headsetofconnecteds(self, ixs):
raise NotImplementedError()
def externalize(self, ix):
return self._externalize(ix)
def externalizeall(self, ixs):
ids = self._externalizeall(ixs)
if isinstance(ixs, set):
return set(ids)
return list(ids)
def internalize(self, id):
return self._internalize(id)
def internalizeall(self, ids, filterunknown=False):
ixs = self._internalizeall(ids, filterunknown)
if isinstance(ids, set):
return set(ixs)
return list(ixs)
class genericdag(basedag):
def ancestorset(self, starts, stops=None):
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
n = pending.pop()
if n not in seen and n not in stops:
seen.add(n)
pending.extend(self.parents(n))
return seen
def headsetofconnecteds(self, ixs):
hds = set(ixs)
if not hds:
return hds
for n in ixs:
for p in self.parents(n):
hds.discard(p)
assert hds
return hds
class revlogbaseddag(basedag):
def __init__(self, revlog, nodeset):
basedag.__init__(self)
self._revlog = revlog
self._heads = None
self._nodeset = nodeset
def nodeset(self):
return self._nodeset
def heads(self):
if self._heads is None:
self._heads = self._getheads()
return self._heads
def _externalize(self, ix):
return self._revlog.index[ix][7]
def _externalizeall(self, ixs):
idx = self._revlog.index
return [idx[i][7] for i in ixs]
def _internalize(self, id):
ix = self._revlog.rev(id)
if ix == nullrev:
raise LookupError(id, self._revlog.indexfile, _('nullid'))
return ix
def _internalizeall(self, ids, filterunknown):
rl = self._revlog
if filterunknown:
return [r for r in map(rl.nodemap.get, ids)
if r is not None and r != nullrev]
return map(self._internalize, ids)
class revlogdag(revlogbaseddag):
def __init__(self, revlog):
revlogbaseddag.__init__(self, revlog, set(xrange(len(revlog))))
def _getheads(self):
return [r for r in self._revlog.headrevs() if r != nullrev]
def parents(self, ix):
rlog = self._revlog
idx = rlog.index
revdata = idx[ix]
prev = revdata[5]
if prev != nullrev:
prev2 = revdata[6]
if prev2 == nullrev:
return [prev]
return [prev, prev2]
prev2 = revdata[6]
if prev2 != nullrev:
return [prev2]
return []
def inverse(self):
if self._inverse is None:
self._inverse = inverserevlogdag(self)
return self._inverse
def ancestorset(self, starts, stops=None):
rlog = self._revlog
idx = rlog.index
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
rev = pending.pop()
if rev not in seen and rev not in stops:
seen.add(rev)
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
pending.append(prev)
return seen
def headsetofconnecteds(self, ixs):
if not ixs:
return set()
rlog = self._revlog
idx = rlog.index
headrevs = set(ixs)
for rev in ixs:
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
headrevs.discard(prev)
assert headrevs
return headrevs
def linearize(self, ixs):
sorted = []
visit = list(self.headsetofconnecteds(ixs))
visit.sort(reverse=True)
finished = set()
while visit:
cur = visit.pop()
if cur < 0:
cur = -cur - 1
if cur not in finished:
sorted.append(cur)
finished.add(cur)
else:
visit.append(-cur - 1)
visit += [p for p in self.parents(cur)
if p in ixs and p not in finished]
assert len(sorted) == len(ixs)
return sorted
class inverserevlogdag(revlogbaseddag, genericdag):
def __init__(self, orig):
revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
self._orig = orig
self._children = {}
self._roots = []
self._walkfrom = len(self._revlog) - 1
def _walkto(self, walkto):
rev = self._walkfrom
cs = self._children
roots = self._roots
idx = self._revlog.index
while rev >= walkto:
data = idx[rev]
isroot = True
for prev in [data[5], data[6]]:
if prev != nullrev:
cs.setdefault(prev, []).append(rev)
isroot = False
if isroot:
roots.append(rev)
rev -= 1
self._walkfrom = rev
def _getheads(self):
self._walkto(nullrev)
return self._roots
def parents(self, ix):
if ix is None:
return []
if ix <= self._walkfrom:
self._walkto(ix)
return self._children.get(ix, [])
def inverse(self):
return self._orig
| true
| true
|
7908e73eb5b7b886b228430c6d2786bb4d94665f
| 5,456
|
py
|
Python
|
src/test/resources/expected/neuron/hhcell/main_script.py
|
openworm/org.geppetto.model.neuroml
|
50afef112ecd4acf5b7e938784dac51eaebb09ea
|
[
"MIT"
] | 4
|
2015-03-04T18:18:08.000Z
|
2021-03-06T16:29:48.000Z
|
src/test/resources/expected/neuron/hhcell/main_script.py
|
openworm/org.geppetto.model.neuroml
|
50afef112ecd4acf5b7e938784dac51eaebb09ea
|
[
"MIT"
] | 46
|
2015-01-06T17:23:53.000Z
|
2021-03-24T17:06:18.000Z
|
src/test/resources/expected/neuron/hhcell/main_script.py
|
openworm/org.geppetto.model.neuroml
|
50afef112ecd4acf5b7e938784dac51eaebb09ea
|
[
"MIT"
] | 13
|
2015-02-04T13:41:49.000Z
|
2019-02-15T21:46:18.000Z
|
'''
Neuron simulator export for:
Components:
net1 (Type: network)
sim1 (Type: Simulation: length=1.0 (SI time) step=5.0E-5 (SI time))
hhcell (Type: cell)
passive (Type: ionChannelPassive: conductance=1.0E-11 (SI conductance))
na (Type: ionChannelHH: conductance=1.0E-11 (SI conductance))
k (Type: ionChannelHH: conductance=1.0E-11 (SI conductance))
pulseGen1 (Type: pulseGenerator: delay=0.0 (SI time) duration=1.0E8 (SI time) amplitude=8.000000000000001E-11 (SI current))
This NEURON file has been generated by org.neuroml.export (see https://github.com/NeuroML/org.neuroml.export)
org.neuroml.export v1.5.3
org.neuroml.model v1.5.3
jLEMS v0.9.9.0
'''
import neuron
import time
import hashlib
h = neuron.h
h.load_file("stdlib.hoc")
h.load_file("stdgui.hoc")
h("objref p")
h("p = new PythonObject()")
class NeuronSimulation():
def __init__(self, tstop, dt, seed=123456789):
print("\n Starting simulation in NEURON of %sms generated from NeuroML2 model...\n"%tstop)
self.seed = seed
self.randoms = []
self.next_global_id = 0 # Used in Random123 classes for elements using random(), etc.
self.next_spiking_input_id = 0 # Used in Random123 classes for elements using random(), etc.
'''
Adding simulation Component(id=sim1 type=Simulation) of network/component: net1 (Type: network)
'''
# ###################### Population: hhpop
print("Population hhpop contains 1 instance(s) of component: hhcell of type: cell")
h.load_file("hhcell.hoc")
a_hhpop = []
h("{ n_hhpop = 1 }")
h("objectvar a_hhpop[n_hhpop]")
for i in range(int(h.n_hhpop)):
h("a_hhpop[%i] = new hhcell()"%i)
h("access a_hhpop[%i].soma"%i)
self.next_global_id+=1
h("proc initialiseV_hhpop() { for i = 0, n_hhpop-1 { a_hhpop[i].set_initial_v() } }")
h("objref fih_hhpop")
h('{fih_hhpop = new FInitializeHandler(0, "initialiseV_hhpop()")}')
h("proc initialiseIons_hhpop() { for i = 0, n_hhpop-1 { a_hhpop[i].set_initial_ion_properties() } }")
h("objref fih_ion_hhpop")
h('{fih_ion_hhpop = new FInitializeHandler(1, "initialiseIons_hhpop()")}')
# Adding single input: Component(id=null type=explicitInput)
h("objref explicitInput_pulseGen1a_hhpop0_soma")
h("a_hhpop[0].soma { explicitInput_pulseGen1a_hhpop0_soma = new pulseGen1(0.5) } ")
trec = h.Vector()
trec.record(h._ref_t)
h.tstop = tstop
h.dt = dt
h.steps_per_ms = 1/h.dt
# ###################### File to save: time.dat (time)
# Column: time
h(' objectvar v_time ')
h(' { v_time = new Vector() } ')
h(' { v_time.record(&t) } ')
h.v_time.resize((h.tstop * h.steps_per_ms) + 1)
self.initialized = False
self.sim_end = -1 # will be overwritten
def run(self):
self.initialized = True
sim_start = time.time()
print("Running a simulation of %sms (dt = %sms; seed=%s)" % (h.tstop, h.dt, self.seed))
h.run()
self.sim_end = time.time()
sim_time = self.sim_end - sim_start
print("Finished NEURON simulation in %f seconds (%f mins)..."%(sim_time, sim_time/60.0))
self.save_results()
def advance(self):
if not self.initialized:
h.finitialize()
self.initialized = True
h.fadvance()
###############################################################################
# Hash function to use in generation of random value
# This is copied from NetPyNE: https://github.com/Neurosim-lab/netpyne/blob/master/netpyne/simFuncs.py
###############################################################################
def _id32 (self,obj):
return int(hashlib.md5(obj).hexdigest()[0:8],16) # convert 8 first chars of md5 hash in base 16 to int
###############################################################################
# Initialize the stim randomizer
# This is copied from NetPyNE: https://github.com/Neurosim-lab/netpyne/blob/master/netpyne/simFuncs.py
###############################################################################
def _init_stim_randomizer(self,rand, stimType, gid, seed):
#print("INIT STIM %s; %s; %s; %s"%(rand, stimType, gid, seed))
rand.Random123(self._id32(stimType), gid, seed)
def save_results(self):
print("Saving results at t=%s..."%h.t)
if self.sim_end < 0: self.sim_end = time.time()
# ###################### File to save: time.dat (time)
py_v_time = [ t/1000 for t in h.v_time.to_python() ] # Convert to Python list for speed...
f_time_f2 = open('time.dat', 'w')
num_points = len(py_v_time) # Simulation may have been stopped before tstop...
for i in range(num_points):
f_time_f2.write('%f'% py_v_time[i]) # Save in SI units...
f_time_f2.close()
print("Saved data to: time.dat")
save_end = time.time()
save_time = save_end - self.sim_end
print("Finished saving results in %f seconds"%(save_time))
print("Done")
quit()
if __name__ == '__main__':
ns = NeuronSimulation(tstop=1000.0, dt=0.049999997, seed=123456789)
ns.run()
| 31.72093
| 128
| 0.566166
|
import neuron
import time
import hashlib
h = neuron.h
h.load_file("stdlib.hoc")
h.load_file("stdgui.hoc")
h("objref p")
h("p = new PythonObject()")
class NeuronSimulation():
def __init__(self, tstop, dt, seed=123456789):
print("\n Starting simulation in NEURON of %sms generated from NeuroML2 model...\n"%tstop)
self.seed = seed
self.randoms = []
self.next_global_id = 0
self.next_spiking_input_id = 0
")
h("objref fih_ion_hhpop")
h('{fih_ion_hhpop = new FInitializeHandler(1, "initialiseIons_hhpop()")}')
h("objref explicitInput_pulseGen1a_hhpop0_soma")
h("a_hhpop[0].soma { explicitInput_pulseGen1a_hhpop0_soma = new pulseGen1(0.5) } ")
trec = h.Vector()
trec.record(h._ref_t)
h.tstop = tstop
h.dt = dt
h.steps_per_ms = 1/h.dt
| true
| true
|
7908e7e6b04137ecf4709441a1b6aa39615103e8
| 1,208
|
py
|
Python
|
Programs/env_lda.py
|
mikepackard415/Scientific-Environmental-Discourse
|
f8d08734f7c2ce98e088479ac7b58c7b348c0401
|
[
"MIT"
] | null | null | null |
Programs/env_lda.py
|
mikepackard415/Scientific-Environmental-Discourse
|
f8d08734f7c2ce98e088479ac7b58c7b348c0401
|
[
"MIT"
] | null | null | null |
Programs/env_lda.py
|
mikepackard415/Scientific-Environmental-Discourse
|
f8d08734f7c2ce98e088479ac7b58c7b348c0401
|
[
"MIT"
] | null | null | null |
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from lda import LDA
def learn_topics(texts, topicnum):
# Get vocabulary and word counts. Use the top 10,000 most frequent
# lowercase unigrams with at least 3 alphabetical, non-numeric characters,
# punctuation treated as separators.
print("Vectorizing...")
CVzer = CountVectorizer(max_features=10000,
lowercase=True)
doc_vcnts = CVzer.fit_transform(texts)
vocabulary = CVzer.get_feature_names()
# Learn topics. Refresh conrols print frequency.
print("LDA")
lda_model = LDA(topicnum, n_iter=4000, refresh=500)
doc_topic = lda_model.fit_transform(doc_vcnts)
topic_word = lda_model.topic_word_
return doc_topic, topic_word, vocabulary
print("Reading data...")
env = pd.read_csv('../Data/Environmental Discourse/env_processed.csv', index_col=0)
env = env[~env.text_processed.isna()]
doc_topic, topic_word, vocabulary = learn_topics(env.text_processed, 100)
print(doc_topic[0,:])
for i in range(100):
env['topic_{}'.format(i)] = doc_topic[:, i]
env.to_csv('../Data/Environmental Discourse/env_lda.csv')
| 34.514286
| 84
| 0.701987
|
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from lda import LDA
def learn_topics(texts, topicnum):
print("Vectorizing...")
CVzer = CountVectorizer(max_features=10000,
lowercase=True)
doc_vcnts = CVzer.fit_transform(texts)
vocabulary = CVzer.get_feature_names()
print("LDA")
lda_model = LDA(topicnum, n_iter=4000, refresh=500)
doc_topic = lda_model.fit_transform(doc_vcnts)
topic_word = lda_model.topic_word_
return doc_topic, topic_word, vocabulary
print("Reading data...")
env = pd.read_csv('../Data/Environmental Discourse/env_processed.csv', index_col=0)
env = env[~env.text_processed.isna()]
doc_topic, topic_word, vocabulary = learn_topics(env.text_processed, 100)
print(doc_topic[0,:])
for i in range(100):
env['topic_{}'.format(i)] = doc_topic[:, i]
env.to_csv('../Data/Environmental Discourse/env_lda.csv')
| true
| true
|
7908e8ae137ab9e85eff92ca84e83834e0215fda
| 5,047
|
py
|
Python
|
fate-manager/hyperion/entity/types.py
|
cold-code/FATE-Cloud
|
60de124a084624d46a9c2c9c8ca2a3dd4968d65d
|
[
"Apache-2.0"
] | null | null | null |
fate-manager/hyperion/entity/types.py
|
cold-code/FATE-Cloud
|
60de124a084624d46a9c2c9c8ca2a3dd4968d65d
|
[
"Apache-2.0"
] | null | null | null |
fate-manager/hyperion/entity/types.py
|
cold-code/FATE-Cloud
|
60de124a084624d46a9c2c9c8ca2a3dd4968d65d
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2020 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class BaseStatus(object):
@classmethod
def status_list(cls):
return [cls.__dict__[k] for k in cls.__dict__.keys() if not callable(getattr(cls, k)) and not k.startswith("__")]
@classmethod
def contains(cls, status):
return status in cls.status_list()
class BaseStateTransitionRule(object):
RULES = {}
@classmethod
def if_pass(cls, src_status, dest_status):
if src_status not in cls.RULES:
return False
if dest_status not in cls.RULES[src_status]:
return False
else:
return True
class StatusSet(BaseStatus):
WAITING = 'waiting'
READY = 'ready'
RUNNING = "running"
CANCELED = "canceled"
TIMEOUT = "timeout"
FAILED = "failed"
SUCCESS = "success"
SKIPPED = "skipped"
@classmethod
def get_level(cls, status):
return dict(zip(cls.status_list(), range(len(cls.status_list())))).get(status, None)
class JobStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.READY, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.READY: [StatusSet.WAITING, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.WAITING],
StatusSet.FAILED: [StatusSet.WAITING],
StatusSet.SUCCESS: [StatusSet.WAITING],
}
class PlayStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.READY, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.READY: [StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.FAILED: [StatusSet.WAITING],
StatusSet.SUCCESS: [],
}
class TaskStatus(BaseStatus):
WAITING = StatusSet.WAITING
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.RUNNING, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.SKIPPED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.SKIPPED],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.FAILED: [],
StatusSet.SUCCESS: [],
StatusSet.SKIPPED: []
}
class EndStatus(BaseStatus):
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
COMPLETE = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
class StandbyStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
class OngoingStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
class InterruptStatus(BaseStatus):
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
class NoneKillStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
| 33.423841
| 144
| 0.69467
|
class BaseStatus(object):
@classmethod
def status_list(cls):
return [cls.__dict__[k] for k in cls.__dict__.keys() if not callable(getattr(cls, k)) and not k.startswith("__")]
@classmethod
def contains(cls, status):
return status in cls.status_list()
class BaseStateTransitionRule(object):
RULES = {}
@classmethod
def if_pass(cls, src_status, dest_status):
if src_status not in cls.RULES:
return False
if dest_status not in cls.RULES[src_status]:
return False
else:
return True
class StatusSet(BaseStatus):
WAITING = 'waiting'
READY = 'ready'
RUNNING = "running"
CANCELED = "canceled"
TIMEOUT = "timeout"
FAILED = "failed"
SUCCESS = "success"
SKIPPED = "skipped"
@classmethod
def get_level(cls, status):
return dict(zip(cls.status_list(), range(len(cls.status_list())))).get(status, None)
class JobStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.READY, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.READY: [StatusSet.WAITING, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.WAITING],
StatusSet.FAILED: [StatusSet.WAITING],
StatusSet.SUCCESS: [StatusSet.WAITING],
}
class PlayStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.READY, StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.READY: [StatusSet.RUNNING, StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.FAILED: [StatusSet.WAITING],
StatusSet.SUCCESS: [],
}
class TaskStatus(BaseStatus):
WAITING = StatusSet.WAITING
RUNNING = StatusSet.RUNNING
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
class StateTransitionRule(BaseStateTransitionRule):
RULES = {
StatusSet.WAITING: [StatusSet.RUNNING, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.SKIPPED],
StatusSet.RUNNING: [StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, StatusSet.SUCCESS, StatusSet.SKIPPED],
StatusSet.CANCELED: [StatusSet.WAITING],
StatusSet.TIMEOUT: [StatusSet.FAILED, StatusSet.SUCCESS],
StatusSet.FAILED: [],
StatusSet.SUCCESS: [],
StatusSet.SKIPPED: []
}
class EndStatus(BaseStatus):
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
COMPLETE = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
class StandbyStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
class OngoingStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
RUNNING = StatusSet.RUNNING
class InterruptStatus(BaseStatus):
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
class NoneKillStatus(BaseStatus):
WAITING = StatusSet.WAITING
READY = StatusSet.READY
CANCELED = StatusSet.CANCELED
TIMEOUT = StatusSet.TIMEOUT
FAILED = StatusSet.FAILED
SUCCESS = StatusSet.SUCCESS
SKIPPED = StatusSet.SKIPPED
| true
| true
|
7908e94da7c76cb8d0aef0a9b2f0abe8549e4d02
| 1,527
|
py
|
Python
|
pet_mk_viii/UnitTest/push_button.py
|
Pet-Series/Pet-Mk-VII
|
4f14d8af46d6a3f4d9838028a6ac0d3c37695ab2
|
[
"MIT"
] | null | null | null |
pet_mk_viii/UnitTest/push_button.py
|
Pet-Series/Pet-Mk-VII
|
4f14d8af46d6a3f4d9838028a6ac0d3c37695ab2
|
[
"MIT"
] | 1
|
2022-03-30T20:40:19.000Z
|
2022-03-30T20:40:19.000Z
|
pet_mk_viii/UnitTest/push_button.py
|
Pet-Series/Pet-Mk-VIII
|
4f14d8af46d6a3f4d9838028a6ac0d3c37695ab2
|
[
"MIT"
] | null | null | null |
import RPi.GPIO as GPIO
from time import sleep
import sys
#Set warnings off (optional)
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
#Set Button and LED pins
JOYSTICK_BUTTON = 12
MAIN_SWITCH = 22
LED = 6
class button():
'''
A simple Push-Button class
'''
def __init__(self, pin, pud_up_down):
print("'def __init__(self," + str(pin)+ "): '")
GPIO.setup(pin, GPIO.IN, pull_up_down=pud_up_down)
GPIO.setup(LED,GPIO.OUT)
GPIO.add_event_detect(pin, GPIO.BOTH, callback=self.push_button_callback, bouncetime=300)
# GPIO.add_event_detect(pin, GPIO.FALLING, callback=self.release_button_callback, bouncetime=300)
def push_button_callback(self, channel):
print(channel)
sleep(0.1)
if GPIO.input(channel):
print("Rising edge detected on " + str(channel) )
GPIO.output(LED,GPIO.HIGH)
else:
print("Falling edge detected on " + str(channel) )
GPIO.output(LED,GPIO.LOW)
def main(args=None):
main_switch = button(MAIN_SWITCH, GPIO.PUD_DOWN)
joystick_button = button(JOYSTICK_BUTTON, GPIO.PUD_UP)
try:
while True:
print(".")
sleep(5)
except KeyboardInterrupt:
print("LedLightNode **** 💀 Ctrl-C detected...")
finally:
print("LedLightNode **** 🪦 Ending... ")
print( str(sys.exc_info()[1]) ) # Need ´import sys´
# Time to clean up stuff!
GPIO.cleanup()
if __name__ == "__main__":
main()
| 27.763636
| 104
| 0.618206
|
import RPi.GPIO as GPIO
from time import sleep
import sys
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
JOYSTICK_BUTTON = 12
MAIN_SWITCH = 22
LED = 6
class button():
def __init__(self, pin, pud_up_down):
print("'def __init__(self," + str(pin)+ "): '")
GPIO.setup(pin, GPIO.IN, pull_up_down=pud_up_down)
GPIO.setup(LED,GPIO.OUT)
GPIO.add_event_detect(pin, GPIO.BOTH, callback=self.push_button_callback, bouncetime=300)
def push_button_callback(self, channel):
print(channel)
sleep(0.1)
if GPIO.input(channel):
print("Rising edge detected on " + str(channel) )
GPIO.output(LED,GPIO.HIGH)
else:
print("Falling edge detected on " + str(channel) )
GPIO.output(LED,GPIO.LOW)
def main(args=None):
main_switch = button(MAIN_SWITCH, GPIO.PUD_DOWN)
joystick_button = button(JOYSTICK_BUTTON, GPIO.PUD_UP)
try:
while True:
print(".")
sleep(5)
except KeyboardInterrupt:
print("LedLightNode **** 💀 Ctrl-C detected...")
finally:
print("LedLightNode **** 🪦 Ending... ")
print( str(sys.exc_info()[1]) )
GPIO.cleanup()
if __name__ == "__main__":
main()
| true
| true
|
7908e96224c11c7a1c29ab81251541832b7ef41c
| 3,287
|
py
|
Python
|
src/covid_model_seiir_pipeline/lib/utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 23
|
2020-05-25T00:20:32.000Z
|
2022-01-18T10:32:09.000Z
|
src/covid_model_seiir_pipeline/lib/utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 15
|
2020-06-15T16:34:22.000Z
|
2021-08-15T22:11:37.000Z
|
src/covid_model_seiir_pipeline/lib/utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 11
|
2020-05-24T21:57:29.000Z
|
2021-09-07T18:21:15.000Z
|
from __future__ import annotations
import abc
from dataclasses import asdict as asdict_, fields, is_dataclass
from pathlib import Path
from typing import Dict, Union, Tuple
from pprint import pformat
from covid_shared import ihme_deps
import numpy as np
import pandas as pd
import yaml
class YamlIOMixin:
"""Mixin for reading and writing yaml files."""
@staticmethod
def _coerce_path(path: Union[str, Path]) -> Path:
path = Path(path)
if path.suffix not in ['.yaml', '.yml']:
raise ValueError('Path must point to a yaml file. '
f'You provided {str(path)}')
return path
@classmethod
def _load(cls, path: Union[str, Path]):
path = cls._coerce_path(path)
with path.open() as f:
data = yaml.full_load(f)
return data
@classmethod
def _dump(cls, data, path: Union[str, Path]) -> None:
path = cls._coerce_path(path)
with path.open('w') as f:
yaml.dump(data, f, sort_keys=False)
class Specification(YamlIOMixin):
"""Generic class for pipeline stage specifications."""
@classmethod
def from_path(cls, specification_path: Union[str, Path]) -> Specification:
"""Builds the specification from a file path."""
spec_dict = cls._load(specification_path)
return cls.from_dict(spec_dict)
@classmethod
def from_dict(cls, spec_dict: Dict) -> Specification:
"""Builds the specification from a dictionary."""
args = cls.parse_spec_dict(spec_dict)
return cls(*args)
@classmethod
@abc.abstractmethod
def parse_spec_dict(cls, specification: Dict) -> Tuple:
"""Parses a dict representation of the specification into init args."""
raise NotImplementedError
@abc.abstractmethod
def to_dict(self) -> Dict:
"""Coerce the specification to a dict."""
raise NotImplementedError
def dump(self, path: Union[str, Path]) -> None:
"""Writes this specification to a file."""
data = self.to_dict()
self._dump(data, path)
def __repr__(self):
return f'{self.__class__.__name__}(\n{pformat(self.to_dict())}\n)'
def asdict(data_class) -> Dict:
"""Type coerce items for easy serialization"""
data = asdict_(data_class)
out = {}
for k, v in data.items():
if isinstance(v, tuple):
out[k] = list(v)
elif isinstance(v, np.ndarray):
out[k] = v.tolist()
else:
out[k] = v
return out
def filter_to_spec_fields(spec_dict: dict, specification):
if is_dataclass(specification):
return {
k: v for k, v in spec_dict.items()
if k in [f.name for f in fields(specification)]
}
else:
return spec_dict
def load_location_hierarchy(location_set_version_id: int = None,
location_file: Path = None, **kwargs):
assert ((location_set_version_id and not location_file)
or (not location_set_version_id and location_file))
if location_set_version_id:
return ihme_deps.get_location_hierarchy_by_version(
location_set_version_id=location_set_version_id,
)
else:
return pd.read_csv(location_file)
| 29.612613
| 79
| 0.634621
|
from __future__ import annotations
import abc
from dataclasses import asdict as asdict_, fields, is_dataclass
from pathlib import Path
from typing import Dict, Union, Tuple
from pprint import pformat
from covid_shared import ihme_deps
import numpy as np
import pandas as pd
import yaml
class YamlIOMixin:
@staticmethod
def _coerce_path(path: Union[str, Path]) -> Path:
path = Path(path)
if path.suffix not in ['.yaml', '.yml']:
raise ValueError('Path must point to a yaml file. '
f'You provided {str(path)}')
return path
@classmethod
def _load(cls, path: Union[str, Path]):
path = cls._coerce_path(path)
with path.open() as f:
data = yaml.full_load(f)
return data
@classmethod
def _dump(cls, data, path: Union[str, Path]) -> None:
path = cls._coerce_path(path)
with path.open('w') as f:
yaml.dump(data, f, sort_keys=False)
class Specification(YamlIOMixin):
@classmethod
def from_path(cls, specification_path: Union[str, Path]) -> Specification:
spec_dict = cls._load(specification_path)
return cls.from_dict(spec_dict)
@classmethod
def from_dict(cls, spec_dict: Dict) -> Specification:
args = cls.parse_spec_dict(spec_dict)
return cls(*args)
@classmethod
@abc.abstractmethod
def parse_spec_dict(cls, specification: Dict) -> Tuple:
raise NotImplementedError
@abc.abstractmethod
def to_dict(self) -> Dict:
raise NotImplementedError
def dump(self, path: Union[str, Path]) -> None:
data = self.to_dict()
self._dump(data, path)
def __repr__(self):
return f'{self.__class__.__name__}(\n{pformat(self.to_dict())}\n)'
def asdict(data_class) -> Dict:
data = asdict_(data_class)
out = {}
for k, v in data.items():
if isinstance(v, tuple):
out[k] = list(v)
elif isinstance(v, np.ndarray):
out[k] = v.tolist()
else:
out[k] = v
return out
def filter_to_spec_fields(spec_dict: dict, specification):
if is_dataclass(specification):
return {
k: v for k, v in spec_dict.items()
if k in [f.name for f in fields(specification)]
}
else:
return spec_dict
def load_location_hierarchy(location_set_version_id: int = None,
location_file: Path = None, **kwargs):
assert ((location_set_version_id and not location_file)
or (not location_set_version_id and location_file))
if location_set_version_id:
return ihme_deps.get_location_hierarchy_by_version(
location_set_version_id=location_set_version_id,
)
else:
return pd.read_csv(location_file)
| true
| true
|
7908ea2c9f6d7e854bd9299e0904e32c5cbc531b
| 5,965
|
py
|
Python
|
episode-7/main.py
|
abhra2020-smart/python-minecraft-clone
|
08e186763ad778bc2a47c55fa30732f64d36f4e7
|
[
"MIT"
] | null | null | null |
episode-7/main.py
|
abhra2020-smart/python-minecraft-clone
|
08e186763ad778bc2a47c55fa30732f64d36f4e7
|
[
"MIT"
] | null | null | null |
episode-7/main.py
|
abhra2020-smart/python-minecraft-clone
|
08e186763ad778bc2a47c55fa30732f64d36f4e7
|
[
"MIT"
] | null | null | null |
import math
import ctypes
import pyglet
pyglet.options["shadow_window"] = False
pyglet.options["debug_gl"] = False
import pyglet.gl as gl
import matrix
import shader
import camera
import block_type
import texture_manager
class Window(pyglet.window.Window):
def __init__(self, **args):
super().__init__(**args)
# create blocks
self.texture_manager = texture_manager.Texture_manager(16, 16, 256)
self.cobblestone = block_type.Block_type(self.texture_manager, "cobblestone", {"all": "cobblestone"})
self.grass = block_type.Block_type(self.texture_manager, "grass", {"top": "grass", "bottom": "dirt", "sides": "grass_side"})
self.dirt = block_type.Block_type(self.texture_manager, "dirt", {"all": "dirt"})
self.stone = block_type.Block_type(self.texture_manager, "stone", {"all": "stone"})
self.sand = block_type.Block_type(self.texture_manager, "sand", {"all": "sand"})
self.planks = block_type.Block_type(self.texture_manager, "planks", {"all": "planks"})
self.log = block_type.Block_type(self.texture_manager, "log", {"top": "log_top", "bottom": "log_top", "sides": "log_side"})
self.texture_manager.generate_mipmaps()
# create vertex array object
self.vao = gl.GLuint(0)
gl.glGenVertexArrays(1, ctypes.byref(self.vao))
gl.glBindVertexArray(self.vao)
# create vertex position vbo
self.vertex_position_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.vertex_position_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vertex_position_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.vertex_positions)),
(gl.GLfloat * len(self.grass.vertex_positions)) (*self.grass.vertex_positions),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(0, 3, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(0)
# create tex coord vbo
self.tex_coord_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.tex_coord_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.tex_coord_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.tex_coords)),
(gl.GLfloat * len(self.grass.tex_coords)) (*self.grass.tex_coords),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(1, 3, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(1)
# create shading value vbo
self.shading_value_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.shading_value_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.shading_value_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.shading_values)),
(gl.GLfloat * len(self.grass.shading_values)) (*self.grass.shading_values),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(2, 1, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(2)
# create index buffer object
self.ibo = gl.GLuint(0)
gl.glGenBuffers(1, self.ibo)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self.ibo)
gl.glBufferData(
gl.GL_ELEMENT_ARRAY_BUFFER,
ctypes.sizeof(gl.GLuint * len(self.grass.indices)),
(gl.GLuint * len(self.grass.indices)) (*self.grass.indices),
gl.GL_STATIC_DRAW)
# create shader
self.shader = shader.Shader("vert.glsl", "frag.glsl")
self.shader_sampler_location = self.shader.find_uniform(b"texture_array_sampler")
self.shader.use()
# pyglet stuff
pyglet.clock.schedule_interval(self.update, 1.0 / 60)
self.mouse_captured = False
# camera stuff
self.camera = camera.Camera(self.shader, self.width, self.height)
def update(self, delta_time):
if not self.mouse_captured:
self.camera.input = [0, 0, 0]
self.camera.update_camera(delta_time)
def on_draw(self):
self.camera.update_matrices()
# bind textures
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glBindTexture(gl.GL_TEXTURE_2D_ARRAY, self.texture_manager.texture_array)
gl.glUniform1i(self.shader_sampler_location, 0)
# draw stuff
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glClearColor(0.0, 0.0, 0.0, 1.0)
self.clear()
gl.glDrawElements(
gl.GL_TRIANGLES,
len(self.grass.indices),
gl.GL_UNSIGNED_INT,
None)
# input functions
def on_resize(self, width, height):
print(f"Resize {width} * {height}")
gl.glViewport(0, 0, width, height)
self.camera.width = width
self.camera.height = height
def on_mouse_press(self, x, y, button, modifiers):
self.mouse_captured = not self.mouse_captured
self.set_exclusive_mouse(self.mouse_captured)
def on_mouse_motion(self, x, y, delta_x, delta_y):
if self.mouse_captured:
sensitivity = 0.004
self.camera.rotation[0] -= delta_x * sensitivity
self.camera.rotation[1] += delta_y * sensitivity
self.camera.rotation[1] = max(-math.tau / 4, min(math.tau / 4, self.camera.rotation[1]))
def on_key_press(self, key, modifiers):
if not self.mouse_captured:
return
if key == pyglet.window.key.D: self.camera.input[0] += 1
elif key == pyglet.window.key.A: self.camera.input[0] -= 1
elif key == pyglet.window.key.W: self.camera.input[2] += 1
elif key == pyglet.window.key.S: self.camera.input[2] -= 1
elif key == pyglet.window.key.SPACE : self.camera.input[1] += 1
elif key == pyglet.window.key.LSHIFT: self.camera.input[1] -= 1
def on_key_release(self, key, modifiers):
if not self.mouse_captured:
return
if key == pyglet.window.key.D: self.camera.input[0] -= 1
elif key == pyglet.window.key.A: self.camera.input[0] += 1
elif key == pyglet.window.key.W: self.camera.input[2] -= 1
elif key == pyglet.window.key.S: self.camera.input[2] += 1
elif key == pyglet.window.key.SPACE : self.camera.input[1] -= 1
elif key == pyglet.window.key.LSHIFT: self.camera.input[1] += 1
class Game:
def __init__(self):
self.config = gl.Config(major_version = 3, depth_size = 16)
self.window = Window(config = self.config, width = 800, height = 600, caption = "Minecraft clone", resizable = True, vsync = False)
def run(self):
pyglet.app.run()
if __name__ == "__main__":
game = Game()
game.run()
| 30.126263
| 133
| 0.718692
|
import math
import ctypes
import pyglet
pyglet.options["shadow_window"] = False
pyglet.options["debug_gl"] = False
import pyglet.gl as gl
import matrix
import shader
import camera
import block_type
import texture_manager
class Window(pyglet.window.Window):
def __init__(self, **args):
super().__init__(**args)
self.texture_manager = texture_manager.Texture_manager(16, 16, 256)
self.cobblestone = block_type.Block_type(self.texture_manager, "cobblestone", {"all": "cobblestone"})
self.grass = block_type.Block_type(self.texture_manager, "grass", {"top": "grass", "bottom": "dirt", "sides": "grass_side"})
self.dirt = block_type.Block_type(self.texture_manager, "dirt", {"all": "dirt"})
self.stone = block_type.Block_type(self.texture_manager, "stone", {"all": "stone"})
self.sand = block_type.Block_type(self.texture_manager, "sand", {"all": "sand"})
self.planks = block_type.Block_type(self.texture_manager, "planks", {"all": "planks"})
self.log = block_type.Block_type(self.texture_manager, "log", {"top": "log_top", "bottom": "log_top", "sides": "log_side"})
self.texture_manager.generate_mipmaps()
self.vao = gl.GLuint(0)
gl.glGenVertexArrays(1, ctypes.byref(self.vao))
gl.glBindVertexArray(self.vao)
self.vertex_position_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.vertex_position_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vertex_position_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.vertex_positions)),
(gl.GLfloat * len(self.grass.vertex_positions)) (*self.grass.vertex_positions),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(0, 3, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(0)
self.tex_coord_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.tex_coord_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.tex_coord_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.tex_coords)),
(gl.GLfloat * len(self.grass.tex_coords)) (*self.grass.tex_coords),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(1, 3, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(1)
self.shading_value_vbo = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.byref(self.shading_value_vbo))
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.shading_value_vbo)
gl.glBufferData(
gl.GL_ARRAY_BUFFER,
ctypes.sizeof(gl.GLfloat * len(self.grass.shading_values)),
(gl.GLfloat * len(self.grass.shading_values)) (*self.grass.shading_values),
gl.GL_STATIC_DRAW)
gl.glVertexAttribPointer(2, 1, gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(2)
self.ibo = gl.GLuint(0)
gl.glGenBuffers(1, self.ibo)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self.ibo)
gl.glBufferData(
gl.GL_ELEMENT_ARRAY_BUFFER,
ctypes.sizeof(gl.GLuint * len(self.grass.indices)),
(gl.GLuint * len(self.grass.indices)) (*self.grass.indices),
gl.GL_STATIC_DRAW)
self.shader = shader.Shader("vert.glsl", "frag.glsl")
self.shader_sampler_location = self.shader.find_uniform(b"texture_array_sampler")
self.shader.use()
pyglet.clock.schedule_interval(self.update, 1.0 / 60)
self.mouse_captured = False
self.camera = camera.Camera(self.shader, self.width, self.height)
def update(self, delta_time):
if not self.mouse_captured:
self.camera.input = [0, 0, 0]
self.camera.update_camera(delta_time)
def on_draw(self):
self.camera.update_matrices()
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glBindTexture(gl.GL_TEXTURE_2D_ARRAY, self.texture_manager.texture_array)
gl.glUniform1i(self.shader_sampler_location, 0)
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glClearColor(0.0, 0.0, 0.0, 1.0)
self.clear()
gl.glDrawElements(
gl.GL_TRIANGLES,
len(self.grass.indices),
gl.GL_UNSIGNED_INT,
None)
def on_resize(self, width, height):
print(f"Resize {width} * {height}")
gl.glViewport(0, 0, width, height)
self.camera.width = width
self.camera.height = height
def on_mouse_press(self, x, y, button, modifiers):
self.mouse_captured = not self.mouse_captured
self.set_exclusive_mouse(self.mouse_captured)
def on_mouse_motion(self, x, y, delta_x, delta_y):
if self.mouse_captured:
sensitivity = 0.004
self.camera.rotation[0] -= delta_x * sensitivity
self.camera.rotation[1] += delta_y * sensitivity
self.camera.rotation[1] = max(-math.tau / 4, min(math.tau / 4, self.camera.rotation[1]))
def on_key_press(self, key, modifiers):
if not self.mouse_captured:
return
if key == pyglet.window.key.D: self.camera.input[0] += 1
elif key == pyglet.window.key.A: self.camera.input[0] -= 1
elif key == pyglet.window.key.W: self.camera.input[2] += 1
elif key == pyglet.window.key.S: self.camera.input[2] -= 1
elif key == pyglet.window.key.SPACE : self.camera.input[1] += 1
elif key == pyglet.window.key.LSHIFT: self.camera.input[1] -= 1
def on_key_release(self, key, modifiers):
if not self.mouse_captured:
return
if key == pyglet.window.key.D: self.camera.input[0] -= 1
elif key == pyglet.window.key.A: self.camera.input[0] += 1
elif key == pyglet.window.key.W: self.camera.input[2] -= 1
elif key == pyglet.window.key.S: self.camera.input[2] += 1
elif key == pyglet.window.key.SPACE : self.camera.input[1] -= 1
elif key == pyglet.window.key.LSHIFT: self.camera.input[1] += 1
class Game:
def __init__(self):
self.config = gl.Config(major_version = 3, depth_size = 16)
self.window = Window(config = self.config, width = 800, height = 600, caption = "Minecraft clone", resizable = True, vsync = False)
def run(self):
pyglet.app.run()
if __name__ == "__main__":
game = Game()
game.run()
| true
| true
|
7908eac94588f0c35d6443c95e4a889dcdd92644
| 945
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/interface/get/get_interface_mac_address/test_api_get_interface_mac_address.py
|
CiscoTestAutomation/genielibs
|
becee8a1a85f4973e00859e3244e2c8fe45a394c
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/interface/get/get_interface_mac_address/test_api_get_interface_mac_address.py
|
CiscoTestAutomation/genielibs
|
becee8a1a85f4973e00859e3244e2c8fe45a394c
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/interface/get/get_interface_mac_address/test_api_get_interface_mac_address.py
|
CiscoTestAutomation/genielibs
|
becee8a1a85f4973e00859e3244e2c8fe45a394c
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
import unittest
from pyats.topology import loader
from genie.libs.sdk.apis.iosxe.interface.get import get_interface_mac_address
class TestGetInterfaceMacAddress(unittest.TestCase):
@classmethod
def setUpClass(self):
testbed = """
devices:
R1_xe:
connections:
defaults:
class: unicon.Unicon
a:
command: mock_device_cli --os iosxe --mock_data_dir mock_data --state connect
protocol: unknown
os: iosxe
platform: iosxe
type: CSR1000v
"""
self.testbed = loader.load(testbed)
self.device = self.testbed.devices['R1_xe']
self.device.connect()
def test_get_interface_mac_address(self):
result = get_interface_mac_address(self.device, 'GigabitEthernet1')
expected_output = '5e01.4000.0000'
self.assertEqual(result, expected_output)
| 30.483871
| 93
| 0.621164
|
import unittest
from pyats.topology import loader
from genie.libs.sdk.apis.iosxe.interface.get import get_interface_mac_address
class TestGetInterfaceMacAddress(unittest.TestCase):
@classmethod
def setUpClass(self):
testbed = """
devices:
R1_xe:
connections:
defaults:
class: unicon.Unicon
a:
command: mock_device_cli --os iosxe --mock_data_dir mock_data --state connect
protocol: unknown
os: iosxe
platform: iosxe
type: CSR1000v
"""
self.testbed = loader.load(testbed)
self.device = self.testbed.devices['R1_xe']
self.device.connect()
def test_get_interface_mac_address(self):
result = get_interface_mac_address(self.device, 'GigabitEthernet1')
expected_output = '5e01.4000.0000'
self.assertEqual(result, expected_output)
| true
| true
|
7908eb5c747d1b302729d9401e1781c65d594dd4
| 30,985
|
py
|
Python
|
pycalc/MAVProxy/modules/mavproxy_cmdlong.py
|
joakimzhang/python-electron
|
79bc174a14c5286ca739bb7d8ce6522fdc6e9e80
|
[
"CC0-1.0"
] | null | null | null |
pycalc/MAVProxy/modules/mavproxy_cmdlong.py
|
joakimzhang/python-electron
|
79bc174a14c5286ca739bb7d8ce6522fdc6e9e80
|
[
"CC0-1.0"
] | 8
|
2021-01-28T19:26:22.000Z
|
2022-03-24T18:07:24.000Z
|
pycalc/MAVProxy/modules/mavproxy_cmdlong.py
|
joakimzhang/python-electron
|
79bc174a14c5286ca739bb7d8ce6522fdc6e9e80
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
'''command long'''
import threading
import time, os
import math
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
from threading import Thread
import mpl_toolkits.mplot3d.axes3d as p3
import matplotlib.animation as animation
class CmdlongModule(mp_module.MPModule):
def __init__(self, mpstate):
super(CmdlongModule, self).__init__(mpstate, "cmdlong")
self.add_command('setspeed', self.cmd_do_change_speed, "do_change_speed")
self.add_command('setyaw', self.cmd_condition_yaw, "condition_yaw")
self.add_command('offboard', self.offboard_mode, "offboard")
self.add_command('p_mode', self.position_mode, "p_mode")
self.add_command('m_mode', self.manual_mode, "m_mode")
self.add_command('a_mode', self.altitude_mode, "a_mode")
self.add_command('takeoff2', self.cmd_takeoff_2, "takeoff2")
self.add_command('takeoff3', self.takeoff_3, "takeoff3")
self.add_command('music',self.music,"music")
self.add_command('land2', self.land_2, "land2")
self.add_command('fly', self.fly, "fly")
self.add_command('x', self.x, "x")
self.add_command('y', self.y, "y")
self.add_command('z', self.z, "z")
self.add_command('h', self.h, "h")
self.add_command('yaw', self.yaw, "yaw")
self.add_command('takeoff', self.cmd_takeoff, "takeoff")
self.add_command('velocity', self.cmd_velocity, "velocity")
self.add_command('position', self.cmd_position, "position")
self.add_command('st', self.start_position_thread, "start_position_thread")
self.add_command('attitude', self.cmd_attitude, "attitude")
self.add_command('cammsg', self.cmd_cammsg, "cammsg")
self.add_command('camctrlmsg', self.cmd_camctrlmsg, "camctrlmsg")
self.add_command('posvel', self.cmd_posvel, "posvel")
self.add_command('parachute', self.cmd_parachute, "parachute",
['<enable|disable|release>'])
self.add_command('long', self.cmd_long, "execute mavlink long command",
self.cmd_long_commands())
self.dis_max = 0
self.dis_min = 100
self.dis_diff = self.dis_max - self.dis_min
self.svo_x_max = 0
self.svo_x_min = 0
self.svo_y_max = 0
self.svo_y_min = 0
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
self.list_x = []
self.list_y = []
self.list_z = []
self.svo_x = 0
self.svo_y = 0
self.svo_z = 0
#thread_obj = Thread(target = self.show_svo_2d)
#thread_obj = Thread(target = self.show_svo)
#thread_obj.setDaemon(True)
#thread_obj.start()
def cmd_long_commands(self):
atts = dir(mavutil.mavlink)
atts = filter( lambda x : x.lower().startswith("mav_cmd"), atts)
ret = []
for att in atts:
ret.append(att)
ret.append(str(att[8:]))
return ret
def cmd_takeoff(self, args):
'''take off'''
if ( len(args) != 1):
print("Usage: takeoff ALTITUDE_IN_METERS")
return
if (len(args) == 1):
altitude = float(args[0])
print("Take Off started")
self.master.mav.command_long_send(
self.settings.target_system, # target_system
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL, # target_component
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF, # command
0, # confirmation
0, # param1
0, # param2
0, # param3
0, # param4
0, # param5
0, # param6
altitude) # param7
def cmd_parachute(self, args):
'''parachute control'''
usage = "Usage: parachute <enable|disable|release>"
if len(args) != 1:
print(usage)
return
cmds = {
'enable' : mavutil.mavlink.PARACHUTE_ENABLE,
'disable' : mavutil.mavlink.PARACHUTE_DISABLE,
'release' : mavutil.mavlink.PARACHUTE_RELEASE
}
if not args[0] in cmds:
print(usage)
return
cmd = cmds[args[0]]
self.master.mav.command_long_send(
self.settings.target_system, # target_system
0, # target_component
mavutil.mavlink.MAV_CMD_DO_PARACHUTE,
0,
cmd,
0, 0, 0, 0, 0, 0)
def cmd_camctrlmsg(self, args):
'''camctrlmsg'''
print("Sent DIGICAM_CONFIGURE CMD_LONG")
self.master.mav.command_long_send(
self.settings.target_system, # target_system
0, # target_component
mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONFIGURE, # command
0, # confirmation
10, # param1
20, # param2
30, # param3
40, # param4
50, # param5
60, # param6
70) # param7
def cmd_cammsg(self, args):
'''cammsg'''
print("Sent DIGICAM_CONTROL CMD_LONG")
self.master.mav.command_long_send(
self.settings.target_system, # target_system
0, # target_component
mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONTROL, # command
0, # confirmation
10, # param1
20, # param2
30, # param3
40, # param4
50, # param5
60, # param6
70) # param7
def cmd_do_change_speed(self, args):
'''speed value'''
if ( len(args) != 1):
print("Usage: speed SPEED_VALUE")
return
if (len(args) == 1):
speed = float(args[0])
print("SPEED %s" % (str(speed)))
self.master.mav.command_long_send(
self.settings.target_system, # target_system
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL, # target_component
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED, # command
0, # confirmation
0, # param1
speed, # param2 (Speed value)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
def cmd_condition_yaw(self, args):
'''yaw angle angular_speed angle_mode'''
if ( len(args) != 3):
print("Usage: yaw ANGLE ANGULAR_SPEED MODE:[0 absolute / 1 relative]")
return
if (len(args) == 3):
angle = float(args[0])
angular_speed = float(args[1])
angle_mode = float(args[2])
print("ANGLE %s" % (str(angle)))
self.master.mav.command_long_send(
self.settings.target_system, # target_system
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL, # target_component
mavutil.mavlink.MAV_CMD_CONDITION_YAW, # command
0, # confirmation
angle, # param1 (angle value)
angular_speed, # param2 (angular speed value)
0, # param3
angle_mode, # param4 (mode: 0->absolute / 1->relative)
0, # param5
0, # param6
0) # param7
def cmd_velocity(self, args):
'''velocity x-ms y-ms z-ms'''
if (len(args) != 3):
print("Usage: velocity x y z (m/s)")
return
if (len(args) == 3):
x_mps = float(args[0])
y_mps = float(args[1])
z_mps = float(args[2])
print("x:%f, y:%f, z:%f" % (x_mps, y_mps, z_mps))
self.master.mav.set_position_target_local_ned_send(
0, # system time in milliseconds
1, # target system
0, # target component
8, # coordinate frame MAV_FRAME_BODY_NED
455, # type mask (vel only)
0, 0, 0, # position x,y,z
x_mps, y_mps, z_mps, # velocity x,y,z
0, 0, 0, # accel x,y,z
0, 0) # yaw, yaw rate
def mavlink_packet(self, msg):
type = msg.get_type()
if type == 'DISTANCE_SENSOR':
#print "distance find\n"
#print isinstance(msg,subclass)
#print msg.current_distance
#print msg.__class__
#self.console.set_status('distance','distance %s' % msg.current_distance)
#print msg.current_distance
if self.dis_max < msg.current_distance:
self.dis_max = msg.current_distance
if self.dis_min > msg.current_distance:
self.dis_min = msg.current_distance
self.dis_diff = self.dis_max - self.dis_min
#self.msg.current_distance =
if type == 'SVO_POSITION_RAW':
#self.svo_x = msg.position_x
#self.svo_y = msg.position_y
#self.svo_z = msg.position_z
if self.svo_x_max < msg.position_x:
self.svo_x_max = msg.position_x
if self.svo_x_min > msg.position_x:
self.svo_x_min = msg.position_x
if self.svo_y_max < msg.position_y:
self.svo_y_max = msg.position_y
if self.svo_y_min > msg.position_y:
self.svo_y_min = msg.position_y
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
#print self.dis_max
#print self.dis_min
elif type == 'LOCAL_POSITION_NED':
self.console.set_status('position_ned_x','position_x %s' % msg.x)
self.svo_x = msg.x
#print type(self.svo_x)
#self.console.set_status('position_ned_y','position_y %s' % msg.y)
self.svo_y = msg.y
#print (svo_y)
#self.console.set_status('position_ned_z','position_ned %s' % msg.z)
self.svo_z = msg.z
def show_svo_2d(self):
fig = plt.figure()
#self.ax = p3.Axes3D(fig)
self.ax = fig.add_subplot(1, 1, 1)
num = 0
self.ax.set_xlabel('X')
self.ax.set_ylabel('Y')
self.ax.set_title('2D Test')
self.ax.set_xlim([-1, 1])
self.ax.set_ylim([-1, 1])
self.num = 0
#self.lineData = self.ax.scatter(1, 1, c = 'b', marker = '.')
self.lineData, = self.ax.plot([],[])
line_ani = animation.FuncAnimation(fig, self.update_lines_2d,self.Gen_RandLine_2d,
interval=100, blit=False)
plt.show()
def show_svo(self):
fig = plt.figure()
#self.ax = p3.Axes3D(fig)
self.ax = fig.add_subplot(1, 1, 1, projection="3d")
num = 0
self.ax.set_xlabel('X')
num = 0
self.ax.set_xlabel('X')
self.ax.set_xlim3d([-1.0, 1.0])
self.ax.set_ylabel('Y')
self.ax.set_ylim3d([-1.0, 1.0])
self.ax.set_zlabel('Z')
self.ax.set_zlim3d([-1.0, 1.0])
self.ax.set_title('3D Test')
self.num = 0
#line_ani = animation.FuncAnimation(fig, self.update_lines,self.Gen_RandLine,
# interval=10, blit=False)
self.lineData = self.ax.scatter([1], [1], [1], c = 'b', marker = '.')
line_ani = animation.FuncAnimation(fig, self.update_lines,self.Gen_RandLine,
interval=10, blit=False)
plt.show()
def data_stream(self):
pass
def Gen_RandLine_2d(self):
if len(self.list_x)<200:
self.list_x.append(self.svo_x)
self.list_y.append(self.svo_y)
self.list_z.append(self.svo_z)
else:
self.list_x.append(self.svo_x)
self.list_x = self.list_x[1:]
self.list_y.append(self.svo_y)
self.list_y = self.list_y[1:]
self.list_z.append(self.svo_z)
self.list_z = self.list_z[1:]
#for i in range(2):
#list_x = self.svo_x
#list_y = self.svo_y
self.list_x.append(float(self.svo_x))
self.list_y.append(float(self.svo_y))
#self.list_z.append(float(self.svo_z))
lineData = [self.list_x,self.list_y]
#lineData = [list_x,list_y]
#print type(list_x)
#print lineData
#time.sleep(0.02)
#self.ax.set_zlim(min(data[2]), max(data[2]))
#lineData = [self.list_x,self.list_y,self.list_z]
yield lineData
def update_lines_2d(self,data):
#print "data",data
#lineData = self.ax.scatter(data[0], data[1], data[2], c = 'b', marker = '.')
#self.lineData.set_data([(data[0], data[1])])
self.lineData.set_xdata(data[0])
self.lineData.set_ydata(data[1])
self.num = self.num + 1
#self.ax.set_xlim(min(data[0]), max(data[0]))
#self.ax.set_ylim(min(data[1]), max(data[1]))
if self.num == 100:
#self.ax.cla()
#print self.num
self.num = 0
self.ax.set_xlim(min(data[0])-1, max(data[0])+1)
self.ax.set_ylim(min(data[1])-1, max(data[1])+1)
return self.lineData,
def Gen_RandLine(self):
'''
if len(self.list_x)<70:
self.list_x.append(self.svo_x)
self.list_y.append(self.svo_y)
self.list_z.append(self.svo_z)
else:
self.list_x.append(self.svo_x)
self.list_x = self.list_x[1:]
self.list_y.append(self.svo_y)
self.list_y = self.list_y[1:]
self.list_z.append(self.svo_z)
self.list_z = self.list_z[1:]
'''
#for i in range(2):
list_x = self.svo_x
list_y = self.svo_y
list_z = self.svo_z
#self.list_x.append(float(self.svo_x))
#self.list_y.append(float(self.svo_y))
#self.list_z.append(float(self.svo_z))
#lineData = [self.list_x,self.list_y,self.list_z]
lineData = [[list_x],[list_y],[list_z]]
#print type(list_x)
#print lineData
#self.ax.set_xlim(min(data[0]), max(data[0]))
#self.ax.set_ylim(min(data[1]), max(data[1]))
#self.ax.set_zlim(min(data[2]), max(data[2]))
#lineData = [self.list_x,self.list_y,self.list_z]
yield lineData
def update_lines(self,data):
#print "data",data
#lineData = self.ax.scatter(data[0], data[1], data[2], c = 'b', marker = '.')
self.lineData.set_offsets([(data[0], data[1])])
#self.lineData.set_data([data[0], data[1]])
self.lineData.set_3d_properties([data[2]], "z")
self.num = self.num + 1
if self.num == 200:
#self.ax.cla()
#print self.num
self.num = 0
self.ax.set_xlabel('X')
#self.ax.set_xlim3d([-1.0, 1.0])
self.ax.set_ylabel('Y')
#self.ax.set_ylim3d([-1.0, 1.0])
self.ax.set_zlabel('Z')
#self.ax.set_zlim3d([-1.0, 1.0])
self.ax.set_title('3D Test')
print "xdiff",self.x_diff
print "ydiff",self.y_diff
#lineData = ax.scatter(data[0], data[1], data[2], c = 'b', marker = '.')
#plt.pause(0.01)
#ax = p3.Axes3D(fig)
return self.lineData
def position_mode(self,args):
print "position mode!!!!!!!!!!!!!!!!!"
self.list_x = []
self.list_y = []
self.list_z = []
#self.start_position_thread(1)
time.sleep(0.5)
#self.master.set_mode(221,6,0)
self.master.set_mode(129,3,0)
self.dis_max = 0
self.dis_min = 100
self.svo_x_max = 0
self.svo_x_min = 0
self.svo_y_max = 0
self.svo_y_min = 0
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
def manual_mode(self,args):
print "manual mode!!!!!!!!!!!!!!!!!"
print self.master.__class__
#self.start_position_thread(1)
#time.sleep(0.5)
#self.master.set_mode(221,6,0)
self.master.set_mode(129,1,0)
self.v_z = float(args[0])
def altitude_mode(self,args):
print "altitude mode!!!!!!!!!!!!!!!!!"
#self.start_position_thread(1)
#time.sleep(0.5)
#self.master.set_mode(221,6,0)
self.master.set_mode(129,2,0)
#self.v_z = float(370)
#self.dis_max = 0
#self.dis_min = 100
def offboard_mode(self,args):
print "offboard!!!!!!!!!!!!!!!!!"
#self.cmd_position_2(1)
self.start_offboard_thread(1)
time.sleep(0.5)
self.master.set_mode(221,6,0)
#self.master.set_mode(1,3,0)
def cmd_takeoff_2(self, args):
'''position z-m'''
if (len(args) != 1):
print("Usage: position z (meters)")
return
if (len(args) == 1):
# x_m = float(0)
# y_m = float(0)
z_m = float(args[0])
# print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
self.master.mav.set_position_target_local_ned_send(
0, # system time in milliseconds
1, # target system
0, # target component
8, # coordinate frame MAV_FRAME_BODY_NED
5571, # type mask (pos only)
0, 0, z_m, # position x,y,z
0, 0, 0, # velocity x,y,z
0, 0, 0, # accel x,y,z
0, 0) # yaw, yaw rate
def takeoff_3(self,args):
self.type_mask = 5571
#self.type_mask = 3576
self.x_m = float(0)
self.y_m = float(0)
self.z_m = float(1.5)
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
#self.cmd_position([1,1,1])
def music(self,args):
self.master.mav.command_long_send(
self.settings.target_system, # target_system
1, # target_component
0, # command
1, # confirmation
0, # param1
0, # param2 (Speed value)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
print self.settings.target_system
print mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL
def land_2(self,args):
self.type_mask = 9671
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
#def h(self,args):
# self.type_mask = 1479
# self.v_x = float(0)
# self.v_y = float(0)
# self.v_z = float(0)
def x(self,args):
#print self.master.flightmode
self.type_mask = 1479
if self.master.flightmode == "POSCTL":
#print self.master
self.v_x = float(args[0])*0.5
elif self.master.flightmode == "ALTCTL":
#print self.master
self.v_x = float(args[0])*1
elif self.master.flightmode == "MANUAL":
#print self.master
self.v_x = float(args[0])*1
#self.v_z = -4
self.button = 1
def y(self,args):
self.type_mask = 1479
if self.master.flightmode == "POSCTL":
self.v_y = float(args[0])*0.5
elif self.master.flightmode == "ALTCTL":
self.v_y = float(args[0])*1
elif self.master.flightmode == "MANUAL":
self.v_y = float(args[0])*1
#self.v_z = -4
self.button = 1
def z(self,args):
self.type_mask = 1479
#self.v_z = float(args[0])
if self.master.flightmode == "POSCTL":
self.v_z = self.v_z + int(args[0])
elif self.master.flightmode == "ALTCTL":
self.v_z = self.v_z + int(args[0])
elif self.master.flightmode == "MANUAL":
self.v_z = self.v_z + int(args[0])*0.1
self.button = 1
def yaw(self,args):
self.type_mask = 1479
#self.yaw_rate = float(float(args[0])*(math.pi/6.0))
self.yaw_rate = float(args[0])*1.5
self.button = 1
#time.sleep(0.5)
#self.yaw_rate = float(0)
def h(self,args):
self.type_mask = 1479
self.v_x = float(0)
self.v_y = float(0)
if self.master.flightmode == "POSCTL":
self.v_z = float(args[0])
elif self.master.flightmode == "ALTCTL":
self.v_z = float(args[0])
elif self.master.flightmode == "MANUAL":
pass
self.yaw_rate = float(0)
self.button = 0
def fly(self,args):
self.type_mask = 1479
self.v_x = float(1)
time.sleep(2)
self.v_x = float(0)
self.v_y = float(1)
time.sleep(2)
self.v_y = float(0)
self.v_x = float(-1)
time.sleep(2)
self.v_x = float(0)
self.v_y = float(-1)
time.sleep(2)
self.v_y = float(0)
def start_position_thread(self,args):
thread_obj = threading.Thread(target=self._cmd_position_2)
thread_obj.setDaemon(True)
thread_obj.start()
#pass
def start_offboard_thread(self,args):
thread_obj = threading.Thread(target=self._cmd_position_2_offboard)
thread_obj.start()
def _cmd_position_2_offboard(self):
'''position x-m y-m z-m'''
#if (len(args) != 3):
# print("Usage: position x y z (meters)")
# return
#if (len(args) == 3):
self.type_mask = 17863
self.x_m = float(0)
self.y_m = float(0)
self.z_m = float(0)
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
self.yaw_rate = float(0)
#print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
while 1:
time.sleep(0.05)
#print "type_mask:%s\n" % self.type_mask
#print "v_x:%s\n" % self.v_x
#print "v_y:%s\n" % self.v_y
#print "v_z:%s\n" % self.v_z
#print "z_m:%s\n" % self.z_m
#print "send idle"
self.master.mav.set_position_target_local_ned_send(
0, # system time in milliseconds
1, # target system
0, # target component
8, # coordinate frame MAV_FRAME_BODY_NED
self.type_mask, # type mask (pos only) 42707
self.x_m, self.y_m, self.z_m, # position x,y,z
self.v_x, self.v_y, self.v_z, # velocity x,y,z
0, 0, 0, # accel x,y,z
0, self.yaw_rate) # yaw, yaw rate
def _cmd_position_2(self):
print "position2"
'''position x-m y-m z-m'''
#if (len(args) != 3):
# print("Usage: position x y z (meters)")
# return
#if (len(args) == 3):
#self.type_mask = 17863
#self.x_m = float(0)
#self.y_m = float(0)
#self.z_m = float(0)
self.v_x = 0
self.v_y = 0
self.v_z = 0
self.yaw_rate = 0
self.button = 0
#print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
i = 0
while 1:
time.sleep(0.05)
#print "type_mask:%s\n" % self.type_mask
#print "v_x:%s\n" % self.v_x
#print "v_y:%s\n" % self.v_y
#print "v_z:%s\n" % self.v_z
#print "z_m:%s\n" % self.z_m
#print "send idle"
self.master.mav.manual_control_send(self.master.target_system,
self.v_x, self.v_y,
self.v_z, self.yaw_rate,
self.button)
i = i + 1
if 0:
#if i == 100:
print "x",(int(self.v_x))
print "y",(int(self.v_y))
print "z",(int(self.v_z))
print "yaw",(int(self.yaw_rate))
print "dis_diff",(self.dis_diff)
print "x_diff",(self.x_diff)
print "y_diff",(self.y_diff)
print "button",self.button
print "target",(self.master.target_system)
i = 0
def cmd_position3(self, args):
'''position x-m y-m z-m'''
if (len(args) != 3):
print("Usage: position x y z (meters)")
return
if (len(args) == 3):
x_m = float(args[0])
y_m = float(args[1])
z_m = float(args[2])
print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
self.master.mav.set_position_target_local_ned_send(
0, # system time in milliseconds
1, # target system
0, # target component
8, # coordinate frame MAV_FRAME_BODY_NED
1479, # type mask (pos only)
0, 0, 0,# position x,y,z
x_m, y_m, z_m, # velocity x,y,z
0, 0, 0, # accel x,y,z
0, 0) # yaw, yaw rate
def cmd_position(self, args):
'''position x-m y-m z-m'''
if (len(args) != 3):
print("Usage: position x y z (meters)")
return
if (len(args) == 3):
x_m = float(args[0])
y_m = float(args[1])
z_m = float(args[2])
print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
self.master.mav.set_position_target_local_ned_send(
0, # system time in milliseconds
1, # target system
0, # target component
8, # coordinate frame MAV_FRAME_BODY_NED
3576, # type mask (pos only)
x_m, y_m, z_m, # position x,y,z
0, 0, 0, # velocity x,y,z
0, 0, 0, # accel x,y,z
0, 0) # yaw, yaw rate
def cmd_attitude(self, args):
'''attitude q0 q1 q2 q3 thrust'''
if len(args) != 5:
print("Usage: attitude q0 q1 q2 q3 thrust (0~1)")
return
if len(args) == 5:
q0 = float(args[0])
q1 = float(args[1])
q2 = float(args[2])
q3 = float(args[3])
thrust = float(args[4])
att_target = [q0, q1, q2, q3]
print("q0:%.3f, q1:%.3f, q2:%.3f q3:%.3f thrust:%.2f" % (q0, q1, q2, q3, thrust))
self.master.mav.set_attitude_target_send(
0, # system time in milliseconds
1, # target system
0, # target component
63, # type mask (ignore all except attitude + thrust)
att_target, # quaternion attitude
0, # body roll rate
0, # body pich rate
0, # body yaw rate
thrust) # thrust
def cmd_posvel(self, args):
'''posvel mapclick vN vE vD'''
ignoremask = 511
latlon = None
try:
latlon = self.module('map').click_position
except Exception:
pass
if latlon is None:
print "set latlon to zeros"
latlon = [0, 0]
else:
ignoremask = ignoremask & 504
print "found latlon", ignoremask
vN = 0
vE = 0
vD = 0
if (len(args) == 3):
vN = float(args[0])
vE = float(args[1])
vD = float(args[2])
ignoremask = ignoremask & 455
print "ignoremask",ignoremask
print latlon
self.master.mav.set_position_target_global_int_send(
0, # system time in ms
1, # target system
0, # target component
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
ignoremask, # ignore
int(latlon[0] * 1e7),
int(latlon[1] * 1e7),
10,
vN, vE, vD, # velocity
0, 0, 0, # accel x,y,z
0, 0) # yaw, yaw rate
def cmd_long(self, args):
'''execute supplied command long'''
if len(args) < 1:
print("Usage: long <command> [arg1] [arg2]...")
return
command = None
if args[0].isdigit():
command = int(args[0])
else:
try:
command = eval("mavutil.mavlink." + args[0])
except AttributeError as e:
try:
command = eval("mavutil.mavlink.MAV_CMD_" + args[0])
except AttributeError as e:
pass
if command is None:
print("Unknown command long ({0})".format(args[0]))
return
floating_args = [ float(x) for x in args[1:] ]
while len(floating_args) < 7:
floating_args.append(float(0))
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
command,
0,
*floating_args)
def init(mpstate):
'''initialise module'''
return CmdlongModule(mpstate)
| 36.367371
| 93
| 0.487333
|
'''command long'''
import threading
import time, os
import math
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
from threading import Thread
import mpl_toolkits.mplot3d.axes3d as p3
import matplotlib.animation as animation
class CmdlongModule(mp_module.MPModule):
def __init__(self, mpstate):
super(CmdlongModule, self).__init__(mpstate, "cmdlong")
self.add_command('setspeed', self.cmd_do_change_speed, "do_change_speed")
self.add_command('setyaw', self.cmd_condition_yaw, "condition_yaw")
self.add_command('offboard', self.offboard_mode, "offboard")
self.add_command('p_mode', self.position_mode, "p_mode")
self.add_command('m_mode', self.manual_mode, "m_mode")
self.add_command('a_mode', self.altitude_mode, "a_mode")
self.add_command('takeoff2', self.cmd_takeoff_2, "takeoff2")
self.add_command('takeoff3', self.takeoff_3, "takeoff3")
self.add_command('music',self.music,"music")
self.add_command('land2', self.land_2, "land2")
self.add_command('fly', self.fly, "fly")
self.add_command('x', self.x, "x")
self.add_command('y', self.y, "y")
self.add_command('z', self.z, "z")
self.add_command('h', self.h, "h")
self.add_command('yaw', self.yaw, "yaw")
self.add_command('takeoff', self.cmd_takeoff, "takeoff")
self.add_command('velocity', self.cmd_velocity, "velocity")
self.add_command('position', self.cmd_position, "position")
self.add_command('st', self.start_position_thread, "start_position_thread")
self.add_command('attitude', self.cmd_attitude, "attitude")
self.add_command('cammsg', self.cmd_cammsg, "cammsg")
self.add_command('camctrlmsg', self.cmd_camctrlmsg, "camctrlmsg")
self.add_command('posvel', self.cmd_posvel, "posvel")
self.add_command('parachute', self.cmd_parachute, "parachute",
['<enable|disable|release>'])
self.add_command('long', self.cmd_long, "execute mavlink long command",
self.cmd_long_commands())
self.dis_max = 0
self.dis_min = 100
self.dis_diff = self.dis_max - self.dis_min
self.svo_x_max = 0
self.svo_x_min = 0
self.svo_y_max = 0
self.svo_y_min = 0
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
self.list_x = []
self.list_y = []
self.list_z = []
self.svo_x = 0
self.svo_y = 0
self.svo_z = 0
def cmd_long_commands(self):
atts = dir(mavutil.mavlink)
atts = filter( lambda x : x.lower().startswith("mav_cmd"), atts)
ret = []
for att in atts:
ret.append(att)
ret.append(str(att[8:]))
return ret
def cmd_takeoff(self, args):
'''take off'''
if ( len(args) != 1):
print("Usage: takeoff ALTITUDE_IN_METERS")
return
if (len(args) == 1):
altitude = float(args[0])
print("Take Off started")
self.master.mav.command_long_send(
self.settings.target_system,
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL,
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF,
0,
0,
0,
0,
0,
0,
0,
altitude)
def cmd_parachute(self, args):
'''parachute control'''
usage = "Usage: parachute <enable|disable|release>"
if len(args) != 1:
print(usage)
return
cmds = {
'enable' : mavutil.mavlink.PARACHUTE_ENABLE,
'disable' : mavutil.mavlink.PARACHUTE_DISABLE,
'release' : mavutil.mavlink.PARACHUTE_RELEASE
}
if not args[0] in cmds:
print(usage)
return
cmd = cmds[args[0]]
self.master.mav.command_long_send(
self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_PARACHUTE,
0,
cmd,
0, 0, 0, 0, 0, 0)
def cmd_camctrlmsg(self, args):
'''camctrlmsg'''
print("Sent DIGICAM_CONFIGURE CMD_LONG")
self.master.mav.command_long_send(
self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONFIGURE,
0,
10,
20,
30,
40,
50,
60,
70)
def cmd_cammsg(self, args):
'''cammsg'''
print("Sent DIGICAM_CONTROL CMD_LONG")
self.master.mav.command_long_send(
self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONTROL,
0,
10,
20,
30,
40,
50,
60,
70)
def cmd_do_change_speed(self, args):
'''speed value'''
if ( len(args) != 1):
print("Usage: speed SPEED_VALUE")
return
if (len(args) == 1):
speed = float(args[0])
print("SPEED %s" % (str(speed)))
self.master.mav.command_long_send(
self.settings.target_system,
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL,
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
0,
0,
speed,
0,
0,
0,
0,
0)
def cmd_condition_yaw(self, args):
'''yaw angle angular_speed angle_mode'''
if ( len(args) != 3):
print("Usage: yaw ANGLE ANGULAR_SPEED MODE:[0 absolute / 1 relative]")
return
if (len(args) == 3):
angle = float(args[0])
angular_speed = float(args[1])
angle_mode = float(args[2])
print("ANGLE %s" % (str(angle)))
self.master.mav.command_long_send(
self.settings.target_system,
mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL,
mavutil.mavlink.MAV_CMD_CONDITION_YAW,
0,
angle,
angular_speed,
0,
angle_mode,
0,
0,
0)
def cmd_velocity(self, args):
'''velocity x-ms y-ms z-ms'''
if (len(args) != 3):
print("Usage: velocity x y z (m/s)")
return
if (len(args) == 3):
x_mps = float(args[0])
y_mps = float(args[1])
z_mps = float(args[2])
print("x:%f, y:%f, z:%f" % (x_mps, y_mps, z_mps))
self.master.mav.set_position_target_local_ned_send(
0,
1,
0,
8,
455,
0, 0, 0,
x_mps, y_mps, z_mps,
0, 0, 0,
0, 0)
def mavlink_packet(self, msg):
type = msg.get_type()
if type == 'DISTANCE_SENSOR':
if self.dis_max < msg.current_distance:
self.dis_max = msg.current_distance
if self.dis_min > msg.current_distance:
self.dis_min = msg.current_distance
self.dis_diff = self.dis_max - self.dis_min
if type == 'SVO_POSITION_RAW':
if self.svo_x_max < msg.position_x:
self.svo_x_max = msg.position_x
if self.svo_x_min > msg.position_x:
self.svo_x_min = msg.position_x
if self.svo_y_max < msg.position_y:
self.svo_y_max = msg.position_y
if self.svo_y_min > msg.position_y:
self.svo_y_min = msg.position_y
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
elif type == 'LOCAL_POSITION_NED':
self.console.set_status('position_ned_x','position_x %s' % msg.x)
self.svo_x = msg.x
self.svo_y = msg.y
self.svo_z = msg.z
def show_svo_2d(self):
fig = plt.figure()
self.ax = fig.add_subplot(1, 1, 1)
num = 0
self.ax.set_xlabel('X')
self.ax.set_ylabel('Y')
self.ax.set_title('2D Test')
self.ax.set_xlim([-1, 1])
self.ax.set_ylim([-1, 1])
self.num = 0
self.lineData, = self.ax.plot([],[])
line_ani = animation.FuncAnimation(fig, self.update_lines_2d,self.Gen_RandLine_2d,
interval=100, blit=False)
plt.show()
def show_svo(self):
fig = plt.figure()
self.ax = fig.add_subplot(1, 1, 1, projection="3d")
num = 0
self.ax.set_xlabel('X')
num = 0
self.ax.set_xlabel('X')
self.ax.set_xlim3d([-1.0, 1.0])
self.ax.set_ylabel('Y')
self.ax.set_ylim3d([-1.0, 1.0])
self.ax.set_zlabel('Z')
self.ax.set_zlim3d([-1.0, 1.0])
self.ax.set_title('3D Test')
self.num = 0
self.lineData = self.ax.scatter([1], [1], [1], c = 'b', marker = '.')
line_ani = animation.FuncAnimation(fig, self.update_lines,self.Gen_RandLine,
interval=10, blit=False)
plt.show()
def data_stream(self):
pass
def Gen_RandLine_2d(self):
if len(self.list_x)<200:
self.list_x.append(self.svo_x)
self.list_y.append(self.svo_y)
self.list_z.append(self.svo_z)
else:
self.list_x.append(self.svo_x)
self.list_x = self.list_x[1:]
self.list_y.append(self.svo_y)
self.list_y = self.list_y[1:]
self.list_z.append(self.svo_z)
self.list_z = self.list_z[1:]
self.list_x.append(float(self.svo_x))
self.list_y.append(float(self.svo_y))
lineData = [self.list_x,self.list_y]
yield lineData
def update_lines_2d(self,data):
self.lineData.set_xdata(data[0])
self.lineData.set_ydata(data[1])
self.num = self.num + 1
if self.num == 100:
self.num = 0
self.ax.set_xlim(min(data[0])-1, max(data[0])+1)
self.ax.set_ylim(min(data[1])-1, max(data[1])+1)
return self.lineData,
def Gen_RandLine(self):
'''
if len(self.list_x)<70:
self.list_x.append(self.svo_x)
self.list_y.append(self.svo_y)
self.list_z.append(self.svo_z)
else:
self.list_x.append(self.svo_x)
self.list_x = self.list_x[1:]
self.list_y.append(self.svo_y)
self.list_y = self.list_y[1:]
self.list_z.append(self.svo_z)
self.list_z = self.list_z[1:]
'''
list_x = self.svo_x
list_y = self.svo_y
list_z = self.svo_z
lineData = [[list_x],[list_y],[list_z]]
yield lineData
def update_lines(self,data):
self.lineData.set_offsets([(data[0], data[1])])
self.lineData.set_3d_properties([data[2]], "z")
self.num = self.num + 1
if self.num == 200:
self.num = 0
self.ax.set_xlabel('X')
self.ax.set_ylabel('Y')
self.ax.set_zlabel('Z')
self.ax.set_title('3D Test')
print "xdiff",self.x_diff
print "ydiff",self.y_diff
return self.lineData
def position_mode(self,args):
print "position mode!!!!!!!!!!!!!!!!!"
self.list_x = []
self.list_y = []
self.list_z = []
time.sleep(0.5)
self.master.set_mode(129,3,0)
self.dis_max = 0
self.dis_min = 100
self.svo_x_max = 0
self.svo_x_min = 0
self.svo_y_max = 0
self.svo_y_min = 0
self.x_diff = self.svo_x_max - self.svo_x_min
self.y_diff = self.svo_y_max - self.svo_y_min
def manual_mode(self,args):
print "manual mode!!!!!!!!!!!!!!!!!"
print self.master.__class__
self.master.set_mode(129,1,0)
self.v_z = float(args[0])
def altitude_mode(self,args):
print "altitude mode!!!!!!!!!!!!!!!!!"
self.master.set_mode(129,2,0)
def offboard_mode(self,args):
print "offboard!!!!!!!!!!!!!!!!!"
self.start_offboard_thread(1)
time.sleep(0.5)
self.master.set_mode(221,6,0)
def cmd_takeoff_2(self, args):
'''position z-m'''
if (len(args) != 1):
print("Usage: position z (meters)")
return
if (len(args) == 1):
z_m = float(args[0])
self.master.mav.set_position_target_local_ned_send(
0,
1,
0,
8,
5571,
0, 0, z_m,
0, 0, 0,
0, 0, 0,
0, 0)
def takeoff_3(self,args):
self.type_mask = 5571
self.x_m = float(0)
self.y_m = float(0)
self.z_m = float(1.5)
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
def music(self,args):
self.master.mav.command_long_send(
self.settings.target_system,
1,
0,
1,
0,
0,
0,
0,
0,
0,
0)
print self.settings.target_system
print mavutil.mavlink.MAV_COMP_ID_SYSTEM_CONTROL
def land_2(self,args):
self.type_mask = 9671
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
def x(self,args):
self.type_mask = 1479
if self.master.flightmode == "POSCTL":
self.v_x = float(args[0])*0.5
elif self.master.flightmode == "ALTCTL":
self.v_x = float(args[0])*1
elif self.master.flightmode == "MANUAL":
self.v_x = float(args[0])*1
self.button = 1
def y(self,args):
self.type_mask = 1479
if self.master.flightmode == "POSCTL":
self.v_y = float(args[0])*0.5
elif self.master.flightmode == "ALTCTL":
self.v_y = float(args[0])*1
elif self.master.flightmode == "MANUAL":
self.v_y = float(args[0])*1
self.button = 1
def z(self,args):
self.type_mask = 1479
if self.master.flightmode == "POSCTL":
self.v_z = self.v_z + int(args[0])
elif self.master.flightmode == "ALTCTL":
self.v_z = self.v_z + int(args[0])
elif self.master.flightmode == "MANUAL":
self.v_z = self.v_z + int(args[0])*0.1
self.button = 1
def yaw(self,args):
self.type_mask = 1479
self.yaw_rate = float(args[0])*1.5
self.button = 1
def h(self,args):
self.type_mask = 1479
self.v_x = float(0)
self.v_y = float(0)
if self.master.flightmode == "POSCTL":
self.v_z = float(args[0])
elif self.master.flightmode == "ALTCTL":
self.v_z = float(args[0])
elif self.master.flightmode == "MANUAL":
pass
self.yaw_rate = float(0)
self.button = 0
def fly(self,args):
self.type_mask = 1479
self.v_x = float(1)
time.sleep(2)
self.v_x = float(0)
self.v_y = float(1)
time.sleep(2)
self.v_y = float(0)
self.v_x = float(-1)
time.sleep(2)
self.v_x = float(0)
self.v_y = float(-1)
time.sleep(2)
self.v_y = float(0)
def start_position_thread(self,args):
thread_obj = threading.Thread(target=self._cmd_position_2)
thread_obj.setDaemon(True)
thread_obj.start()
def start_offboard_thread(self,args):
thread_obj = threading.Thread(target=self._cmd_position_2_offboard)
thread_obj.start()
def _cmd_position_2_offboard(self):
'''position x-m y-m z-m'''
self.type_mask = 17863
self.x_m = float(0)
self.y_m = float(0)
self.z_m = float(0)
self.v_x = float(0)
self.v_y = float(0)
self.v_z = float(0)
self.yaw_rate = float(0)
while 1:
time.sleep(0.05)
self.master.mav.set_position_target_local_ned_send(
0,
1,
0,
8,
self.type_mask,
self.x_m, self.y_m, self.z_m,
self.v_x, self.v_y, self.v_z,
0, 0, 0,
0, self.yaw_rate)
def _cmd_position_2(self):
print "position2"
'''position x-m y-m z-m'''
self.v_x = 0
self.v_y = 0
self.v_z = 0
self.yaw_rate = 0
self.button = 0
i = 0
while 1:
time.sleep(0.05)
self.master.mav.manual_control_send(self.master.target_system,
self.v_x, self.v_y,
self.v_z, self.yaw_rate,
self.button)
i = i + 1
if 0:
print "x",(int(self.v_x))
print "y",(int(self.v_y))
print "z",(int(self.v_z))
print "yaw",(int(self.yaw_rate))
print "dis_diff",(self.dis_diff)
print "x_diff",(self.x_diff)
print "y_diff",(self.y_diff)
print "button",self.button
print "target",(self.master.target_system)
i = 0
def cmd_position3(self, args):
'''position x-m y-m z-m'''
if (len(args) != 3):
print("Usage: position x y z (meters)")
return
if (len(args) == 3):
x_m = float(args[0])
y_m = float(args[1])
z_m = float(args[2])
print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
self.master.mav.set_position_target_local_ned_send(
0,
1,
0,
8,
1479,
0, 0, 0,
x_m, y_m, z_m,
0, 0, 0,
0, 0)
def cmd_position(self, args):
'''position x-m y-m z-m'''
if (len(args) != 3):
print("Usage: position x y z (meters)")
return
if (len(args) == 3):
x_m = float(args[0])
y_m = float(args[1])
z_m = float(args[2])
print("x:%f, y:%f, z:%f" % (x_m, y_m, z_m))
self.master.mav.set_position_target_local_ned_send(
0,
1,
0,
8,
3576,
x_m, y_m, z_m,
0, 0, 0,
0, 0, 0,
0, 0)
def cmd_attitude(self, args):
'''attitude q0 q1 q2 q3 thrust'''
if len(args) != 5:
print("Usage: attitude q0 q1 q2 q3 thrust (0~1)")
return
if len(args) == 5:
q0 = float(args[0])
q1 = float(args[1])
q2 = float(args[2])
q3 = float(args[3])
thrust = float(args[4])
att_target = [q0, q1, q2, q3]
print("q0:%.3f, q1:%.3f, q2:%.3f q3:%.3f thrust:%.2f" % (q0, q1, q2, q3, thrust))
self.master.mav.set_attitude_target_send(
0,
1,
0,
63,
att_target,
0,
0,
0,
thrust)
def cmd_posvel(self, args):
'''posvel mapclick vN vE vD'''
ignoremask = 511
latlon = None
try:
latlon = self.module('map').click_position
except Exception:
pass
if latlon is None:
print "set latlon to zeros"
latlon = [0, 0]
else:
ignoremask = ignoremask & 504
print "found latlon", ignoremask
vN = 0
vE = 0
vD = 0
if (len(args) == 3):
vN = float(args[0])
vE = float(args[1])
vD = float(args[2])
ignoremask = ignoremask & 455
print "ignoremask",ignoremask
print latlon
self.master.mav.set_position_target_global_int_send(
0,
1,
0,
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
ignoremask,
int(latlon[0] * 1e7),
int(latlon[1] * 1e7),
10,
vN, vE, vD,
0, 0, 0,
0, 0)
def cmd_long(self, args):
'''execute supplied command long'''
if len(args) < 1:
print("Usage: long <command> [arg1] [arg2]...")
return
command = None
if args[0].isdigit():
command = int(args[0])
else:
try:
command = eval("mavutil.mavlink." + args[0])
except AttributeError as e:
try:
command = eval("mavutil.mavlink.MAV_CMD_" + args[0])
except AttributeError as e:
pass
if command is None:
print("Unknown command long ({0})".format(args[0]))
return
floating_args = [ float(x) for x in args[1:] ]
while len(floating_args) < 7:
floating_args.append(float(0))
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
command,
0,
*floating_args)
def init(mpstate):
'''initialise module'''
return CmdlongModule(mpstate)
| false
| true
|
7908ec075a8b14b62076c5abbb6d278914aac0b6
| 1,743
|
py
|
Python
|
apps/jd_app/models.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 5
|
2019-10-30T01:16:30.000Z
|
2020-06-14T03:32:19.000Z
|
apps/jd_app/models.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 2
|
2020-10-12T07:12:48.000Z
|
2021-06-02T03:15:47.000Z
|
apps/jd_app/models.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 3
|
2019-12-06T17:33:49.000Z
|
2021-03-01T13:24:22.000Z
|
from datetime import datetime
from django.db import models
# Create your models here.
class JD(models.Model):
appkey = models.CharField(max_length=100,verbose_name='appkey')
secret = models.CharField(max_length=100,verbose_name='secret')
add_time = models.DateTimeField(default=datetime.now,verbose_name='添加时间')
def __str__(self):
return self.appkey
class Meta:
verbose_name = '配置'
verbose_name_plural = verbose_name
"""
1-好券商品,
2-超级大卖场,
10-9.9专区,
22-热销爆品,
24-数码家电,
25-超市,
26-母婴玩具,
27-家具日用,
28-美妆穿搭,
29-医药保健,
30-图书文具,
31-今日必推,
32-王牌好货
"""
class Category(models.Model):
CHOOSE = (
('1','导航'),
('2','九宫格'),
)
pid = models.CharField(max_length=10,verbose_name='分类id')
name = models.CharField(max_length=20,verbose_name='分类名')
sort = models.IntegerField(verbose_name='排序',default=0)
type = models.CharField(max_length=10,choices=CHOOSE,verbose_name='显示',default='1')
add_time = models.DateTimeField(default=datetime.now,verbose_name='添加时间')
def __str__(self):
return self.name
class Meta:
verbose_name = '类别'
verbose_name_plural = verbose_name
class Banner(models.Model):
title = models.CharField(max_length=100,verbose_name='活动名称')
url = models.TextField(verbose_name='跳转地址')
img = models.URLField(verbose_name='图片地址',default='')
start_time = models.DateField(default=datetime.now,verbose_name='活动开始时间')
end_time = models.DateField(default=datetime.now,verbose_name='活动结束时间')
add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间')
def __str__(self):
return self.title
class Meta:
verbose_name = '活动'
verbose_name_plural = verbose_name
| 24.9
| 87
| 0.690189
|
from datetime import datetime
from django.db import models
class JD(models.Model):
appkey = models.CharField(max_length=100,verbose_name='appkey')
secret = models.CharField(max_length=100,verbose_name='secret')
add_time = models.DateTimeField(default=datetime.now,verbose_name='添加时间')
def __str__(self):
return self.appkey
class Meta:
verbose_name = '配置'
verbose_name_plural = verbose_name
class Category(models.Model):
CHOOSE = (
('1','导航'),
('2','九宫格'),
)
pid = models.CharField(max_length=10,verbose_name='分类id')
name = models.CharField(max_length=20,verbose_name='分类名')
sort = models.IntegerField(verbose_name='排序',default=0)
type = models.CharField(max_length=10,choices=CHOOSE,verbose_name='显示',default='1')
add_time = models.DateTimeField(default=datetime.now,verbose_name='添加时间')
def __str__(self):
return self.name
class Meta:
verbose_name = '类别'
verbose_name_plural = verbose_name
class Banner(models.Model):
title = models.CharField(max_length=100,verbose_name='活动名称')
url = models.TextField(verbose_name='跳转地址')
img = models.URLField(verbose_name='图片地址',default='')
start_time = models.DateField(default=datetime.now,verbose_name='活动开始时间')
end_time = models.DateField(default=datetime.now,verbose_name='活动结束时间')
add_time = models.DateTimeField(default=datetime.now, verbose_name='添加时间')
def __str__(self):
return self.title
class Meta:
verbose_name = '活动'
verbose_name_plural = verbose_name
| true
| true
|
7908edc30bcf26d9931d6ebc78acb1c00e718f42
| 3,298
|
py
|
Python
|
src/commercetools/testing/auth.py
|
jeroenubbink/commercetools-python-sdk
|
ee27768d6fdde3e12618059891d1d4f75dd61390
|
[
"MIT"
] | 15
|
2018-11-02T14:35:52.000Z
|
2022-03-16T07:51:44.000Z
|
src/commercetools/testing/auth.py
|
jeroenubbink/commercetools-python-sdk
|
ee27768d6fdde3e12618059891d1d4f75dd61390
|
[
"MIT"
] | 84
|
2018-11-02T12:50:32.000Z
|
2022-03-22T01:25:54.000Z
|
src/commercetools/testing/auth.py
|
jeroenubbink/commercetools-python-sdk
|
ee27768d6fdde3e12618059891d1d4f75dd61390
|
[
"MIT"
] | 13
|
2019-01-03T09:16:50.000Z
|
2022-02-15T18:37:19.000Z
|
import base64
import typing
import uuid
from urllib.parse import parse_qs
from commercetools.testing.abstract import BaseBackend
from commercetools.testing.utils import create_commercetools_response
class AuthModel:
def __init__(self):
self.tokens: typing.List[str] = []
def add_token(self, token):
self.tokens.append(token)
def is_valid(self, client_id, client_secret):
return True
class AuthBackend(BaseBackend):
path_prefix = r"/oauth/(?P<path>.*)"
hostnames = ["auth.sphere.io", "localhost"]
model_class = AuthModel
def __init__(self, *args, **kwargs):
self._expire_time = 172800
super().__init__()
def set_expire_time(self, value):
self._expire_time = value
@property
def url_prefix(self):
return r"/oauth/(?P<path>.*)"
def urls(self):
return [("token", "POST", self.token), ("introspect", "POST", self.introspect)]
def _get_api_client_credentials(
self, request
) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
params = parse_qs(request.body)
client_id = None
client_secret = None
if request.headers.get("Authorization"):
auth_type, auth_info = request.headers["Authorization"].split()
if auth_type == "Basic":
client_id, client_secret = str(base64.b64decode(auth_info)).split(":")
elif params.get("client_id") and params.get("client_secret"):
client_id = params.get("client_id")
client_secret = params.get("client_secret")
return client_id, client_secret
def token(self, request):
client_id, client_secret = self._get_api_client_credentials(request)
if not client_id or not client_secret:
response = create_commercetools_response(request, status_code=401)
return response
if self.model.is_valid(client_id, client_secret):
params = parse_qs(request.body)
scope = params.get("scope", "manage_project:todo")
token = {
"access_token": str(uuid.uuid4()),
"expires_in": self._expire_time,
"scope": scope,
"token_type": "Bearer",
}
self.model.add_token(token)
response = create_commercetools_response(request, json=token)
return response
def introspect(self, request):
client_id, client_secret = self._get_api_client_credentials(request)
if not client_id or not client_secret:
response = create_commercetools_response(request, status_code=401)
return response
if self.model.is_valid(client_id, client_secret):
token = request.qs.get("token", [None])[0]
stored_tokens = [
token_object.get("access_token") for token_object in self.model.tokens
]
if token in stored_tokens:
status = {
"active": True,
"scope": "manage_project:todo",
"exp": self._expire_time,
}
else:
status = {"active": False}
response = create_commercetools_response(request, json=status)
return response
| 34
| 87
| 0.61037
|
import base64
import typing
import uuid
from urllib.parse import parse_qs
from commercetools.testing.abstract import BaseBackend
from commercetools.testing.utils import create_commercetools_response
class AuthModel:
def __init__(self):
self.tokens: typing.List[str] = []
def add_token(self, token):
self.tokens.append(token)
def is_valid(self, client_id, client_secret):
return True
class AuthBackend(BaseBackend):
path_prefix = r"/oauth/(?P<path>.*)"
hostnames = ["auth.sphere.io", "localhost"]
model_class = AuthModel
def __init__(self, *args, **kwargs):
self._expire_time = 172800
super().__init__()
def set_expire_time(self, value):
self._expire_time = value
@property
def url_prefix(self):
return r"/oauth/(?P<path>.*)"
def urls(self):
return [("token", "POST", self.token), ("introspect", "POST", self.introspect)]
def _get_api_client_credentials(
self, request
) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
params = parse_qs(request.body)
client_id = None
client_secret = None
if request.headers.get("Authorization"):
auth_type, auth_info = request.headers["Authorization"].split()
if auth_type == "Basic":
client_id, client_secret = str(base64.b64decode(auth_info)).split(":")
elif params.get("client_id") and params.get("client_secret"):
client_id = params.get("client_id")
client_secret = params.get("client_secret")
return client_id, client_secret
def token(self, request):
client_id, client_secret = self._get_api_client_credentials(request)
if not client_id or not client_secret:
response = create_commercetools_response(request, status_code=401)
return response
if self.model.is_valid(client_id, client_secret):
params = parse_qs(request.body)
scope = params.get("scope", "manage_project:todo")
token = {
"access_token": str(uuid.uuid4()),
"expires_in": self._expire_time,
"scope": scope,
"token_type": "Bearer",
}
self.model.add_token(token)
response = create_commercetools_response(request, json=token)
return response
def introspect(self, request):
client_id, client_secret = self._get_api_client_credentials(request)
if not client_id or not client_secret:
response = create_commercetools_response(request, status_code=401)
return response
if self.model.is_valid(client_id, client_secret):
token = request.qs.get("token", [None])[0]
stored_tokens = [
token_object.get("access_token") for token_object in self.model.tokens
]
if token in stored_tokens:
status = {
"active": True,
"scope": "manage_project:todo",
"exp": self._expire_time,
}
else:
status = {"active": False}
response = create_commercetools_response(request, json=status)
return response
| true
| true
|
7908ee6a8bd2987044d87e1114158c744b4cf8f0
| 616
|
py
|
Python
|
partners/urls.py
|
IndraTeja/uno-cpi
|
b9e635d4273613602b6c57a0c70d82d3df5c77ca
|
[
"MIT"
] | null | null | null |
partners/urls.py
|
IndraTeja/uno-cpi
|
b9e635d4273613602b6c57a0c70d82d3df5c77ca
|
[
"MIT"
] | 7
|
2020-02-11T23:38:26.000Z
|
2022-01-13T01:02:45.000Z
|
partners/urls.py
|
Goutham2591/mav-cpi
|
3f7985d2a7bf57c08a1ff65bc14a8e60c3d0464b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
app_name = 'partners'
urlpatterns = [
path('registerCampusPartner/', views.registerCampusPartner, name='registerCampusPartner'),
path('registerCommunityPartner/', views.registerCommunityPartner, name='registerCommunityPartner'),
path('profile/userprofile/', views.userProfile, name='userprofile'),
path('profile/userprofileupdate/', views.userProfileUpdate,name='userprofileupdate'),
path('profile/orgprofile/', views.orgProfile, name='orgprofile'),
path('profile/orgprofileupdate/', views.orgProfileUpdate, name='orgprofileupdate'),
]
| 47.384615
| 104
| 0.761364
|
from django.urls import path
from . import views
app_name = 'partners'
urlpatterns = [
path('registerCampusPartner/', views.registerCampusPartner, name='registerCampusPartner'),
path('registerCommunityPartner/', views.registerCommunityPartner, name='registerCommunityPartner'),
path('profile/userprofile/', views.userProfile, name='userprofile'),
path('profile/userprofileupdate/', views.userProfileUpdate,name='userprofileupdate'),
path('profile/orgprofile/', views.orgProfile, name='orgprofile'),
path('profile/orgprofileupdate/', views.orgProfileUpdate, name='orgprofileupdate'),
]
| true
| true
|
7908eea552959b49b7b10164847eb7dc74028999
| 5,430
|
py
|
Python
|
example/usermanagement/serve_flask_marshmallow.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 20
|
2017-10-13T11:23:33.000Z
|
2021-12-09T12:42:06.000Z
|
example/usermanagement/serve_flask_marshmallow.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 130
|
2017-10-10T15:09:13.000Z
|
2021-12-30T10:36:08.000Z
|
example/usermanagement/serve_flask_marshmallow.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 7
|
2017-10-17T07:24:42.000Z
|
2021-09-16T14:33:17.000Z
|
# -*- coding: utf-8 -*-
from datetime import datetime
import json
import time
import flask
from example.usermanagement.schema_marshmallow import AboutSchema
from example.usermanagement.schema_marshmallow import NoContentSchema
from example.usermanagement.schema_marshmallow import UserAvatarSchema
from example.usermanagement.schema_marshmallow import UserDigestSchema
from example.usermanagement.schema_marshmallow import UserIdPathSchema
from example.usermanagement.schema_marshmallow import UserSchema
from example.usermanagement.userlib import User
from example.usermanagement.userlib import UserAvatarNotFound
from example.usermanagement.userlib import UserLib
from example.usermanagement.userlib import UserNotFound
from hapic import Hapic
from hapic import MarshmallowProcessor
from hapic.data import HapicData
from hapic.data import HapicFile
from hapic.error.marshmallow import MarshmallowDefaultErrorBuilder
from hapic.ext.flask import FlaskContext
try: # Python 3.5+
from http import HTTPStatus
except ImportError:
from http import client as HTTPStatus
hapic = Hapic()
hapic.set_processor_class(MarshmallowProcessor)
class FlaskController(object):
@hapic.with_api_doc()
@hapic.output_body(AboutSchema())
def about(self):
"""
This endpoint allow to check that the API is running. This description
is generated from the docstring of the method.
"""
return {"version": "1.2.3", "datetime": datetime.now()}
@hapic.with_api_doc()
@hapic.output_body(UserDigestSchema(many=True))
def get_users(self):
"""
Obtain users list.
"""
return UserLib().get_users()
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.input_path(UserIdPathSchema())
@hapic.output_body(UserSchema())
def get_user(self, id, hapic_data: HapicData):
"""
Return a user taken from the list or return a 404
"""
return UserLib().get_user(int(hapic_data.path["id"]))
@hapic.with_api_doc()
# TODO - G.M - 2017-12-5 - Support input_forms ?
# TODO - G.M - 2017-12-5 - Support exclude, only ?
@hapic.input_body(UserSchema(exclude=("id",)))
@hapic.output_body(UserSchema())
def add_user(self, hapic_data: HapicData):
"""
Add a user to the list
"""
new_user = User(**hapic_data.body)
return UserLib().add_user(new_user)
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.output_body(NoContentSchema(), default_http_code=204)
@hapic.input_path(UserIdPathSchema())
def del_user(self, id, hapic_data: HapicData):
UserLib().del_user(int(hapic_data.path["id"]))
return NoContentSchema()
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.handle_exception(UserAvatarNotFound, HTTPStatus.NOT_FOUND)
@hapic.input_path(UserIdPathSchema())
@hapic.output_file(["image/png"])
def get_user_avatar(self, id, hapic_data: HapicData):
return HapicFile(
file_path=UserLib().get_user_avatar_path(user_id=(int(hapic_data.path["id"])))
)
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.handle_exception(UserAvatarNotFound, HTTPStatus.BAD_REQUEST)
@hapic.input_path(UserIdPathSchema())
@hapic.input_files(UserAvatarSchema())
@hapic.output_body(NoContentSchema(), default_http_code=204)
def update_user_avatar(self, id, hapic_data: HapicData):
UserLib().update_user_avatar(
user_id=int(hapic_data.path["id"]), avatar=hapic_data.files["avatar"]
)
def bind(self, app: flask.Flask):
app.add_url_rule("/about", view_func=self.about)
app.add_url_rule("/users/", view_func=self.get_users)
app.add_url_rule("/users/<id>", view_func=self.get_user)
app.add_url_rule("/users/", view_func=self.add_user, methods=["POST"])
app.add_url_rule("/users/<id>", view_func=self.del_user, methods=["DELETE"]) # nopep8
app.add_url_rule(
"/users/<id>/avatar", view_func=self.get_user_avatar, methods=["GET"]
) # nopep8
app.add_url_rule("/users/<id>/avatar", view_func=self.update_user_avatar, methods=["PUT"])
if __name__ == "__main__":
app = flask.Flask(__name__)
controllers = FlaskController()
controllers.bind(app)
hapic.set_context(FlaskContext(app, default_error_builder=MarshmallowDefaultErrorBuilder()))
print("")
print("")
print("GENERATING OPENAPI DOCUMENTATION")
doc_title = "Demo API documentation"
doc_description = (
"This documentation has been generated from "
"code. You can see it using swagger: "
"http://editor2.swagger.io/"
)
hapic.add_documentation_view("/doc/", doc_title, doc_description)
openapi_file_name = "api-documentation.json"
with open(openapi_file_name, "w") as openapi_file_handle:
openapi_file_handle.write(
json.dumps(hapic.generate_doc(title=doc_title, description=doc_description))
)
print("Documentation generated in {}".format(openapi_file_name))
time.sleep(1)
print("")
print("")
print("RUNNING FLASK SERVER NOW")
print("DOCUMENTATION AVAILABLE AT /doc/")
# Run app
app.run(host="127.0.0.1", port=8082, debug=True)
| 36.689189
| 98
| 0.707182
|
from datetime import datetime
import json
import time
import flask
from example.usermanagement.schema_marshmallow import AboutSchema
from example.usermanagement.schema_marshmallow import NoContentSchema
from example.usermanagement.schema_marshmallow import UserAvatarSchema
from example.usermanagement.schema_marshmallow import UserDigestSchema
from example.usermanagement.schema_marshmallow import UserIdPathSchema
from example.usermanagement.schema_marshmallow import UserSchema
from example.usermanagement.userlib import User
from example.usermanagement.userlib import UserAvatarNotFound
from example.usermanagement.userlib import UserLib
from example.usermanagement.userlib import UserNotFound
from hapic import Hapic
from hapic import MarshmallowProcessor
from hapic.data import HapicData
from hapic.data import HapicFile
from hapic.error.marshmallow import MarshmallowDefaultErrorBuilder
from hapic.ext.flask import FlaskContext
try:
from http import HTTPStatus
except ImportError:
from http import client as HTTPStatus
hapic = Hapic()
hapic.set_processor_class(MarshmallowProcessor)
class FlaskController(object):
@hapic.with_api_doc()
@hapic.output_body(AboutSchema())
def about(self):
return {"version": "1.2.3", "datetime": datetime.now()}
@hapic.with_api_doc()
@hapic.output_body(UserDigestSchema(many=True))
def get_users(self):
return UserLib().get_users()
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.input_path(UserIdPathSchema())
@hapic.output_body(UserSchema())
def get_user(self, id, hapic_data: HapicData):
return UserLib().get_user(int(hapic_data.path["id"]))
@hapic.with_api_doc()
@hapic.input_body(UserSchema(exclude=("id",)))
@hapic.output_body(UserSchema())
def add_user(self, hapic_data: HapicData):
new_user = User(**hapic_data.body)
return UserLib().add_user(new_user)
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.output_body(NoContentSchema(), default_http_code=204)
@hapic.input_path(UserIdPathSchema())
def del_user(self, id, hapic_data: HapicData):
UserLib().del_user(int(hapic_data.path["id"]))
return NoContentSchema()
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.handle_exception(UserAvatarNotFound, HTTPStatus.NOT_FOUND)
@hapic.input_path(UserIdPathSchema())
@hapic.output_file(["image/png"])
def get_user_avatar(self, id, hapic_data: HapicData):
return HapicFile(
file_path=UserLib().get_user_avatar_path(user_id=(int(hapic_data.path["id"])))
)
@hapic.with_api_doc()
@hapic.handle_exception(UserNotFound, HTTPStatus.NOT_FOUND)
@hapic.handle_exception(UserAvatarNotFound, HTTPStatus.BAD_REQUEST)
@hapic.input_path(UserIdPathSchema())
@hapic.input_files(UserAvatarSchema())
@hapic.output_body(NoContentSchema(), default_http_code=204)
def update_user_avatar(self, id, hapic_data: HapicData):
UserLib().update_user_avatar(
user_id=int(hapic_data.path["id"]), avatar=hapic_data.files["avatar"]
)
def bind(self, app: flask.Flask):
app.add_url_rule("/about", view_func=self.about)
app.add_url_rule("/users/", view_func=self.get_users)
app.add_url_rule("/users/<id>", view_func=self.get_user)
app.add_url_rule("/users/", view_func=self.add_user, methods=["POST"])
app.add_url_rule("/users/<id>", view_func=self.del_user, methods=["DELETE"])
app.add_url_rule(
"/users/<id>/avatar", view_func=self.get_user_avatar, methods=["GET"]
)
app.add_url_rule("/users/<id>/avatar", view_func=self.update_user_avatar, methods=["PUT"])
if __name__ == "__main__":
app = flask.Flask(__name__)
controllers = FlaskController()
controllers.bind(app)
hapic.set_context(FlaskContext(app, default_error_builder=MarshmallowDefaultErrorBuilder()))
print("")
print("")
print("GENERATING OPENAPI DOCUMENTATION")
doc_title = "Demo API documentation"
doc_description = (
"This documentation has been generated from "
"code. You can see it using swagger: "
"http://editor2.swagger.io/"
)
hapic.add_documentation_view("/doc/", doc_title, doc_description)
openapi_file_name = "api-documentation.json"
with open(openapi_file_name, "w") as openapi_file_handle:
openapi_file_handle.write(
json.dumps(hapic.generate_doc(title=doc_title, description=doc_description))
)
print("Documentation generated in {}".format(openapi_file_name))
time.sleep(1)
print("")
print("")
print("RUNNING FLASK SERVER NOW")
print("DOCUMENTATION AVAILABLE AT /doc/")
app.run(host="127.0.0.1", port=8082, debug=True)
| true
| true
|
7908f006c711ecbda5d511f15234b90e66530ec4
| 3,098
|
py
|
Python
|
test.py
|
Leo-xxx/flappy
|
bf3d09ecf8ad2a7a35d7513d3d6eb39bca9c1922
|
[
"MIT"
] | 192
|
2019-02-28T05:50:59.000Z
|
2022-03-21T07:23:32.000Z
|
test.py
|
yangluliang/flappy
|
746cd33d8b56f09b71a308ce041150d8eb61344b
|
[
"MIT"
] | 6
|
2019-05-18T09:19:31.000Z
|
2020-11-15T01:13:07.000Z
|
test.py
|
yangluliang/flappy
|
746cd33d8b56f09b71a308ce041150d8eb61344b
|
[
"MIT"
] | 52
|
2019-03-07T03:15:23.000Z
|
2022-03-03T06:17:28.000Z
|
########################## FWMAV Simulation #########################
# Version 0.3
# Fan Fei Feb 2019
# Direct motor driven flapping wing MAV simulation
#######################################################################
import gym
import flappy
from stable_baselines.common.policies import MlpPolicy
from stable_baselines.common.vec_env import DummyVecEnv
from stable_baselines.common.vec_env import SubprocVecEnv
from stable_baselines.common import set_global_seeds
from flappy.envs.fwmav.controllers.arc_xy_arc_z import ARCController
from flappy.envs.fwmav.controllers.pid_controller import PIDController
import time
import argparse
import importlib
import numpy as np
def make_env(env_id, rank, seed=0, random_init = True, randomize_sim = True, phantom_sensor = False):
def _init():
env = gym.make(env_id)
env.config(random_init, randomize_sim, phantom_sensor)
if rank == 0:
env.enable_visualization()
env.enable_print()
env.seed(seed + rank)
return env
# set_global_seeds(seed)
return _init
class LazyModel:
def __init__(self,env,model_type):
self.action_lb = env.action_lb
self.action_ub = env.action_ub
self.observation_bound = env.observation_bound
if model_type == 'PID':
self.policy = PIDController(env.sim.dt_c)
elif model_type == 'ARC':
self.policy = ARCController(env.sim.dt_c)
else:
raise Exception('Error')
def predict(self, obs):
action = self.policy.get_action(obs[0]*self.observation_bound)
# scale action from [action_lb, action_ub] to [-1,1]
# since baseline does not support asymmetric action space
normalized_action = (action-self.action_lb)/(self.action_ub - self.action_lb)*2 - 1
action = np.array([normalized_action])
return action, None
def main(args):
env_id = 'fwmav_hover-v0'
env = DummyVecEnv([make_env(env_id, 0, random_init = args.rand_init, randomize_sim = args.rand_dynamics, phantom_sensor = args.phantom_sensor)])
if args.model_type != 'PID' and args.model_type != 'ARC':
try:
model_cls = getattr(
importlib.import_module('stable_baselines'), args.model_type)
except AttributeError:
print(args.model_type, "Error: wrong model type")
return
try:
model = model_cls.load(args.model_path)
except:
print(args.model_path, "Error: wrong model path")
else:
model = LazyModel(env.envs[0],args.model_type)
obs = env.reset()
while True:
if env.envs[0].is_sim_on == False:
env.envs[0].gui.cv.wait()
elif env.envs[0].is_sim_on:
action, _ = model.predict(obs)
obs, rewards, done, info = env.step(action)
# if done:
# obs = env.reset()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--model_type', required=True)
parser.add_argument('--model_path')
parser.add_argument(
'--policy_type', const='MlpPolicy', default='MlpPolicy', nargs='?')
parser.add_argument('--rand_init', action='store_true', default=False)
parser.add_argument('--rand_dynamics', action='store_true', default=False)
parser.add_argument('--phantom_sensor', action='store_true', default=False)
args = parser.parse_args()
main(args)
| 31.612245
| 145
| 0.714009
| true
| true
|
|
7908f041090e0ad93a81737cfc2b38afecb21b6a
| 592
|
py
|
Python
|
offpcc/domains/dmc_cartpole_su.py
|
zhihanyang2022/CleanRL
|
dfeb9aa992032b63bab9df0dc08ded334ceda546
|
[
"MIT"
] | 27
|
2021-10-10T17:15:42.000Z
|
2022-03-02T02:38:44.000Z
|
offpcc/domains/dmc_cartpole_su.py
|
zhihanyang2022/CleanRL
|
dfeb9aa992032b63bab9df0dc08ded334ceda546
|
[
"MIT"
] | 1
|
2021-06-08T12:37:12.000Z
|
2021-06-08T12:37:12.000Z
|
offpcc/domains/dmc_cartpole_su.py
|
zhihanyang2022/CleanRL
|
dfeb9aa992032b63bab9df0dc08ded334ceda546
|
[
"MIT"
] | 3
|
2021-11-18T11:15:02.000Z
|
2022-03-09T12:51:35.000Z
|
import dmc2gym
from domains.wrappers import ConcatObs
def mdp():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=[], frame_skip=5, track_prev_action=False)
def p():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=['velocity'], frame_skip=5, track_prev_action=False)
def va():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=['position'], frame_skip=5, track_prev_action=True)
def p_concat5():
return ConcatObs(p(), 5)
def va_concat10():
return ConcatObs(va(), 10)
| 26.909091
| 137
| 0.741554
|
import dmc2gym
from domains.wrappers import ConcatObs
def mdp():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=[], frame_skip=5, track_prev_action=False)
def p():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=['velocity'], frame_skip=5, track_prev_action=False)
def va():
return dmc2gym.make(domain_name="cartpole", task_name="swingup", keys_to_exclude=['position'], frame_skip=5, track_prev_action=True)
def p_concat5():
return ConcatObs(p(), 5)
def va_concat10():
return ConcatObs(va(), 10)
| true
| true
|
7908f0aae83b8012b04742235aed97f8a6ab12bf
| 777
|
py
|
Python
|
Exercicios em python/ex69.py
|
GabrielSantos25/Python
|
208eec0144587aa4e0aa7fa00da29ffa0478eac8
|
[
"MIT"
] | null | null | null |
Exercicios em python/ex69.py
|
GabrielSantos25/Python
|
208eec0144587aa4e0aa7fa00da29ffa0478eac8
|
[
"MIT"
] | null | null | null |
Exercicios em python/ex69.py
|
GabrielSantos25/Python
|
208eec0144587aa4e0aa7fa00da29ffa0478eac8
|
[
"MIT"
] | null | null | null |
print('-'*20)
print('CADASTRE UMA PESSOA')
print('-'*20)
total = totalm = totalf = 0
while True:
idade = int(input('Idade: '))
if idade >= 18:
total += 1
sexo = ' '
while sexo not in 'MF':
sexo = str(input('Sexo: [M/F]')).strip().upper()[0]
# observações!
if sexo == 'M':
totalm += 1
if sexo == 'F' and idade < 20:
totalf +=1
resposta = ' '
while resposta not in 'SN':
resposta = str(input('Quer continuar? [S/N]')).upper().strip()[0]
if resposta == 'N':
break
print('===== FIM DO PROGRAMA =====')
print(f'Total de pessoas com mais de 18 anos: {total}')
print(f'Ao todo temos {totalm} homens cadastrados')
print(f'E temos {totalf} mulher com menos de 20 anos')
| 25.9
| 73
| 0.537967
|
print('-'*20)
print('CADASTRE UMA PESSOA')
print('-'*20)
total = totalm = totalf = 0
while True:
idade = int(input('Idade: '))
if idade >= 18:
total += 1
sexo = ' '
while sexo not in 'MF':
sexo = str(input('Sexo: [M/F]')).strip().upper()[0]
if sexo == 'M':
totalm += 1
if sexo == 'F' and idade < 20:
totalf +=1
resposta = ' '
while resposta not in 'SN':
resposta = str(input('Quer continuar? [S/N]')).upper().strip()[0]
if resposta == 'N':
break
print('===== FIM DO PROGRAMA =====')
print(f'Total de pessoas com mais de 18 anos: {total}')
print(f'Ao todo temos {totalm} homens cadastrados')
print(f'E temos {totalf} mulher com menos de 20 anos')
| true
| true
|
7908f0be09c001faf60d38698ab97836651a2937
| 1,241
|
py
|
Python
|
QuoteEngine/Ingestor.py
|
1ayham1/Data_Science-MemesGenerator
|
71f64960122e8b32acf84cc71dbcec4fac90b0d1
|
[
"MIT"
] | null | null | null |
QuoteEngine/Ingestor.py
|
1ayham1/Data_Science-MemesGenerator
|
71f64960122e8b32acf84cc71dbcec4fac90b0d1
|
[
"MIT"
] | null | null | null |
QuoteEngine/Ingestor.py
|
1ayham1/Data_Science-MemesGenerator
|
71f64960122e8b32acf84cc71dbcec4fac90b0d1
|
[
"MIT"
] | null | null | null |
"""Define family of algorithms and make them interchangeable
The algorithms vary independetly from the clients using it.
This class implements to IngestorInterface and dynamically invoke
a suitable algorithm (strategy.algorithm()), through parse()
abstract method. i.e. it is independent of how an algorithm
is implemented.
That means, the behavior can be changed without breaking the classes
that use it, and the classes can switch between behaviors by changing
the specific implementation used without requiring any
significant code changes.
"""
from typing import List
from .IngestorInterface import IngestorInterface
from .QuoteModel import QuoteModel
from .CSVImporter import CSVImporter
from .PDFImporter import PDFImporter
from .DocxImporter import DocxImporter
from .TXTImporter import TXTImporter
class Ingestor(IngestorInterface):
"""Define family of algorithms & dynamically invoke the one of interest"""
importer_classes = [CSVImporter, PDFImporter, DocxImporter, TXTImporter]
@classmethod
def parse(cls, path: str) -> List[QuoteModel]:
for importer in cls.importer_classes:
if importer.can_ingest(path):
return importer.parse(path)
| 34.472222
| 79
| 0.759871
|
from typing import List
from .IngestorInterface import IngestorInterface
from .QuoteModel import QuoteModel
from .CSVImporter import CSVImporter
from .PDFImporter import PDFImporter
from .DocxImporter import DocxImporter
from .TXTImporter import TXTImporter
class Ingestor(IngestorInterface):
importer_classes = [CSVImporter, PDFImporter, DocxImporter, TXTImporter]
@classmethod
def parse(cls, path: str) -> List[QuoteModel]:
for importer in cls.importer_classes:
if importer.can_ingest(path):
return importer.parse(path)
| true
| true
|
7908f0d2e966cd5b8a3b2952f22d9cdd3ff7b664
| 2,368
|
py
|
Python
|
invenio_app_ils/circulation/jsonresolvers/loan.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
invenio_app_ils/circulation/jsonresolvers/loan.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
invenio_app_ils/circulation/jsonresolvers/loan.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019-2020 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Circulation Loan resolvers."""
from invenio_circulation.proxies import current_circulation
from invenio_pidstore.errors import PIDDeletedError
from invenio_app_ils.circulation.utils import resolve_item_from_loan
from invenio_app_ils.jsonresolver.api import \
get_field_value_for_record as get_field_value
from invenio_app_ils.jsonresolver.api import get_pid_or_default, pick
from invenio_app_ils.proxies import current_app_ils
from invenio_app_ils.records.resolver.resolver import get_patron
def item_resolver(loan_pid):
"""Resolve an Item given a Loan PID."""
Loan = current_circulation.loan_record_cls
loan = Loan.get_record_by_pid(loan_pid)
if not loan.get("item_pid"):
return {}
try:
# can resolve to an Item or BorrowingRequest
item = resolve_item_from_loan(loan["item_pid"])
except PIDDeletedError:
item = {}
else:
item = pick(
item,
"barcode", # not set in BorrowingRequest
"description",
"document_pid",
"medium", # not set in BorrowingRequest
"pid",
)
return item
@get_pid_or_default(default_value=dict())
def loan_patron_resolver(loan_pid):
"""Resolve a Patron given a Loan PID."""
Loan = current_circulation.loan_record_cls
try:
patron_pid = get_field_value(Loan, loan_pid, "patron_pid")
except KeyError:
return {}
return get_patron(patron_pid)
@get_pid_or_default(default_value=dict())
def document_resolver(loan_pid):
"""Resolve a Document given a Loan PID."""
Loan = current_circulation.loan_record_cls
try:
document_pid = get_field_value(Loan, loan_pid, "document_pid")
except KeyError:
return {}
Document = current_app_ils.document_record_cls
try:
document = Document.get_record_by_pid(document_pid)
except PIDDeletedError:
obj = {}
else:
obj = pick(
document,
"authors",
"edition",
"document_type",
"pid",
"title",
# TODO: add the imprint year here
)
return obj
| 28.190476
| 76
| 0.668074
|
from invenio_circulation.proxies import current_circulation
from invenio_pidstore.errors import PIDDeletedError
from invenio_app_ils.circulation.utils import resolve_item_from_loan
from invenio_app_ils.jsonresolver.api import \
get_field_value_for_record as get_field_value
from invenio_app_ils.jsonresolver.api import get_pid_or_default, pick
from invenio_app_ils.proxies import current_app_ils
from invenio_app_ils.records.resolver.resolver import get_patron
def item_resolver(loan_pid):
Loan = current_circulation.loan_record_cls
loan = Loan.get_record_by_pid(loan_pid)
if not loan.get("item_pid"):
return {}
try:
item = resolve_item_from_loan(loan["item_pid"])
except PIDDeletedError:
item = {}
else:
item = pick(
item,
"barcode",
"description",
"document_pid",
"medium",
"pid",
)
return item
@get_pid_or_default(default_value=dict())
def loan_patron_resolver(loan_pid):
Loan = current_circulation.loan_record_cls
try:
patron_pid = get_field_value(Loan, loan_pid, "patron_pid")
except KeyError:
return {}
return get_patron(patron_pid)
@get_pid_or_default(default_value=dict())
def document_resolver(loan_pid):
Loan = current_circulation.loan_record_cls
try:
document_pid = get_field_value(Loan, loan_pid, "document_pid")
except KeyError:
return {}
Document = current_app_ils.document_record_cls
try:
document = Document.get_record_by_pid(document_pid)
except PIDDeletedError:
obj = {}
else:
obj = pick(
document,
"authors",
"edition",
"document_type",
"pid",
"title",
)
return obj
| true
| true
|
7908f1126071b8be0fdbd134d770ce9c20ce7b27
| 1,127
|
py
|
Python
|
ReinforcementLearning/Bandit/EpsilonGreedy.py
|
MitI-7/MachineLearning
|
6450e2a9260ae70cb75cd2f195729143fe427431
|
[
"MIT"
] | null | null | null |
ReinforcementLearning/Bandit/EpsilonGreedy.py
|
MitI-7/MachineLearning
|
6450e2a9260ae70cb75cd2f195729143fe427431
|
[
"MIT"
] | null | null | null |
ReinforcementLearning/Bandit/EpsilonGreedy.py
|
MitI-7/MachineLearning
|
6450e2a9260ae70cb75cd2f195729143fe427431
|
[
"MIT"
] | null | null | null |
import random
import math
import numpy as np
from typing import List
class EpsilonGreedy:
def __init__(self, epsilon: float, counts: List[int], values: List[float]):
assert epsilon is None or 0.0 <= epsilon <= 1.0
self.epsilon = epsilon
self.counts = counts
self.values = values
def initialize(self, n_arms):
self.counts = [0] * n_arms
self.values = [0.0] * n_arms
def select_arm(self):
epsilon = self.epsilon
if epsilon is None:
t = sum(self.counts) + 1
epsilon = 1 / math.log(t + 0.0000001)
# 活用
if random.random() > epsilon:
return np.argmax(self.values)
# 探索
else:
return random.randrange(len(self.values))
def update(self, chosen_arm, reward):
self.counts[chosen_arm] += 1
n = self.counts[chosen_arm]
value = self.values[chosen_arm]
self.values[chosen_arm] = ((n - 1) / float(n)) * value + (1 / float(n)) * reward # online average
def __str__(self):
return "EpsilonGreedy(epsilon={0})".format(self.epsilon)
| 28.175
| 108
| 0.584738
|
import random
import math
import numpy as np
from typing import List
class EpsilonGreedy:
def __init__(self, epsilon: float, counts: List[int], values: List[float]):
assert epsilon is None or 0.0 <= epsilon <= 1.0
self.epsilon = epsilon
self.counts = counts
self.values = values
def initialize(self, n_arms):
self.counts = [0] * n_arms
self.values = [0.0] * n_arms
def select_arm(self):
epsilon = self.epsilon
if epsilon is None:
t = sum(self.counts) + 1
epsilon = 1 / math.log(t + 0.0000001)
if random.random() > epsilon:
return np.argmax(self.values)
else:
return random.randrange(len(self.values))
def update(self, chosen_arm, reward):
self.counts[chosen_arm] += 1
n = self.counts[chosen_arm]
value = self.values[chosen_arm]
self.values[chosen_arm] = ((n - 1) / float(n)) * value + (1 / float(n)) * reward
def __str__(self):
return "EpsilonGreedy(epsilon={0})".format(self.epsilon)
| true
| true
|
7908f1cccbd446c855fab8653d88ad1c9a2bd027
| 569
|
py
|
Python
|
venv/Lib/site-packages/skimage/conftest.py
|
997Yi/Flask-web
|
6b5e5d274bfa25fbd3db5af02723a5671f1e901d
|
[
"MIT"
] | 2
|
2020-08-25T13:55:00.000Z
|
2020-08-25T16:36:03.000Z
|
venv/Lib/site-packages/skimage/conftest.py
|
997Yi/Flask-web
|
6b5e5d274bfa25fbd3db5af02723a5671f1e901d
|
[
"MIT"
] | 1
|
2020-04-25T20:36:07.000Z
|
2020-04-25T20:36:07.000Z
|
site-packages/skimage/conftest.py
|
Wristlebane/Pyto
|
901ac307b68486d8289105c159ca702318bea5b0
|
[
"MIT"
] | 1
|
2020-07-30T10:30:48.000Z
|
2020-07-30T10:30:48.000Z
|
# Use legacy numpy printing. This fix is made to keep doctests functional.
# For more info, see https://github.com/scikit-image/scikit-image/pull/2935 .
# TODO: remove this workaround once minimal required numpy is set to 1.14.0
from distutils.version import LooseVersion as Version
import numpy as np
if Version(np.__version__) >= Version('1.14'):
np.set_printoptions(legacy='1.13')
# List of files that pytest should ignore
collect_ignore = ["io/_plugins",]
try:
import visvis
except ImportError:
collect_ignore.append("measure/mc_meta/visual_test.py")
| 35.5625
| 77
| 0.760984
|
from distutils.version import LooseVersion as Version
import numpy as np
if Version(np.__version__) >= Version('1.14'):
np.set_printoptions(legacy='1.13')
collect_ignore = ["io/_plugins",]
try:
import visvis
except ImportError:
collect_ignore.append("measure/mc_meta/visual_test.py")
| true
| true
|
7908f1fc56f5ecaaa2415bd1b8957b5348177620
| 1,811
|
py
|
Python
|
exaslct_src/lib/export_container_tasks_creator.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | null | null | null |
exaslct_src/lib/export_container_tasks_creator.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-06T07:36:11.000Z
|
2019-05-06T07:36:11.000Z
|
exaslct_src/lib/export_container_tasks_creator.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-03T08:49:29.000Z
|
2019-05-03T08:49:29.000Z
|
from typing import Dict
from exaslct_src.lib.export_container_task import ExportContainerTask
from exaslct_src.lib.data.required_task_info import RequiredTaskInfo
from exaslct_src.lib.docker.docker_create_image_task import DockerCreateImageTask
class ExportContainerTasksCreator():
def __init__(self, export_path:str, release_name:str):
self.release_name = release_name
self.export_path = export_path
def create_export_tasks_for_flavors(
self, build_tasks: Dict[str, Dict[str, DockerCreateImageTask]]):
return {flavor_path: self.create_export_tasks(flavor_path, build_task)
for flavor_path, build_task in build_tasks.items()}
def create_export_tasks(self, flavor_path: str,
build_tasks: Dict[str, DockerCreateImageTask]):
return {release_type: self.create_export_task(release_type, flavor_path, build_task)
for release_type, build_task in build_tasks.items()}
def create_export_task(self, release_type: str, flavor_path: str,
build_task: DockerCreateImageTask):
required_task_info = self.create_required_task_info(build_task)
return \
ExportContainerTask(
required_task_info_json=required_task_info.to_json(indent=None),
export_path=self.export_path,
release_name=self.release_name,
release_type=release_type,
flavor_path=flavor_path)
def create_required_task_info(self, build_task):
required_task_info = \
RequiredTaskInfo(module_name=build_task.__module__,
class_name=build_task.__class__.__name__,
params=build_task.param_kwargs)
return required_task_info
| 45.275
| 92
| 0.692435
|
from typing import Dict
from exaslct_src.lib.export_container_task import ExportContainerTask
from exaslct_src.lib.data.required_task_info import RequiredTaskInfo
from exaslct_src.lib.docker.docker_create_image_task import DockerCreateImageTask
class ExportContainerTasksCreator():
def __init__(self, export_path:str, release_name:str):
self.release_name = release_name
self.export_path = export_path
def create_export_tasks_for_flavors(
self, build_tasks: Dict[str, Dict[str, DockerCreateImageTask]]):
return {flavor_path: self.create_export_tasks(flavor_path, build_task)
for flavor_path, build_task in build_tasks.items()}
def create_export_tasks(self, flavor_path: str,
build_tasks: Dict[str, DockerCreateImageTask]):
return {release_type: self.create_export_task(release_type, flavor_path, build_task)
for release_type, build_task in build_tasks.items()}
def create_export_task(self, release_type: str, flavor_path: str,
build_task: DockerCreateImageTask):
required_task_info = self.create_required_task_info(build_task)
return \
ExportContainerTask(
required_task_info_json=required_task_info.to_json(indent=None),
export_path=self.export_path,
release_name=self.release_name,
release_type=release_type,
flavor_path=flavor_path)
def create_required_task_info(self, build_task):
required_task_info = \
RequiredTaskInfo(module_name=build_task.__module__,
class_name=build_task.__class__.__name__,
params=build_task.param_kwargs)
return required_task_info
| true
| true
|
7908f38a2f366f8efa26aa93d4185f78abc6b682
| 145
|
py
|
Python
|
apache/util_script.py
|
GrahamDumpleton-abandoned/apswigpy
|
b821b94a78ceed5b8991f7c345aeeadca3729a90
|
[
"Apache-2.0"
] | null | null | null |
apache/util_script.py
|
GrahamDumpleton-abandoned/apswigpy
|
b821b94a78ceed5b8991f7c345aeeadca3729a90
|
[
"Apache-2.0"
] | null | null | null |
apache/util_script.py
|
GrahamDumpleton-abandoned/apswigpy
|
b821b94a78ceed5b8991f7c345aeeadca3729a90
|
[
"Apache-2.0"
] | null | null | null |
import apache
if apache.version == (2, 2):
from apache22.util_script import *
else:
raise RuntimeError('Apache version not supported.')
| 20.714286
| 55
| 0.717241
|
import apache
if apache.version == (2, 2):
from apache22.util_script import *
else:
raise RuntimeError('Apache version not supported.')
| true
| true
|
7908f69fd6f5e627e54b697a55756de54ed44c2e
| 28,367
|
py
|
Python
|
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/__init__.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | null | null | null |
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/__init__.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | 10
|
2020-06-05T19:42:26.000Z
|
2022-03-11T23:38:35.000Z
|
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/__init__.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | null | null | null |
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements the Check Constraint Module."""
import simplejson as json
from functools import wraps
import pgadmin.browser.server_groups.servers.databases as database
from flask import render_template, make_response, request, jsonify
from flask_babel import gettext as _
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
import ConstraintRegistry
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
class CheckConstraintModule(CollectionNodeModule):
"""
class CheckConstraintModule(CollectionNodeModule):
This class represents The Check Constraint Module.
Methods:
-------
* __init__(*args, **kwargs)
- Initialize the Check Constraint Module.
* get_nodes(gid, sid, did, scid)
- Generate the Check Constraint collection node.
* node_inode(gid, sid, did, scid)
- Returns Check Constraint node as leaf node.
* script_load()
- Load the module script for the Check Constraint, when any of the
Check node is initialized.
"""
NODE_TYPE = 'check_constraints'
COLLECTION_LABEL = _("Check Constraints")
def __init__(self, *args, **kwargs):
super(CheckConstraintModule, self).__init__(*args, **kwargs)
self.min_ver = None
self.max_ver = None
def get_nodes(self, gid, sid, did, scid, doid):
"""
Generate the Check Constraint collection node.
"""
yield self.generate_browser_collection_node(doid)
@property
def node_inode(self):
"""
Returns Check Constraint node as leaf node.
"""
return False
@property
def script_load(self):
"""
Load the module script for the Check Constraint, when any of the
Check node is initialized.
"""
return database.DatabaseModule.NODE_TYPE
@property
def module_use_template_javascript(self):
"""
Returns whether Jinja2 template is used for generating the javascript
module.
"""
return False
@property
def csssnippets(self):
"""
Returns a snippet of css to include in the page
"""
return [
render_template(
"check_constraint/css/check_constraint.css",
node_type=self.node_type
)
]
blueprint = CheckConstraintModule(__name__)
class CheckConstraintView(PGChildNodeView):
"""
class CheckConstraintView(PGChildNodeView):
This class inherits PGChildNodeView to get the different routes for
the module.
The class is responsible to Create, Read, Update and Delete operations for
the Check Constraint.
Methods:
-------
* module_js():
- Load JS file (check-constraints.js) for this module.
* check_precondition(f):
- Works as a decorator.
- Checks database connection status.
- Attach connection object and template path.
* list(gid, sid, did, scid, doid):
- List the Check Constraints.
* nodes(gid, sid, did, scid):
- Returns all the Check Constraints to generate Nodes in the browser.
* properties(gid, sid, did, scid, doid):
- Returns the Check Constraint properties.
* create(gid, sid, did, scid):
- Creates a new Check Constraint object.
* update(gid, sid, did, scid, doid):
- Updates the Check Constraint object.
* delete(gid, sid, did, scid, doid):
- Drops the Check Constraint object.
* sql(gid, sid, did, scid, doid=None):
- Returns the SQL for the Check Constraint object.
* msql(gid, sid, did, scid, doid=None):
- Returns the modified SQL.
* get_sql(gid, sid, data, scid, tid=None):
- Generates the SQL statements to create/update the Check Constraint.
object.
* dependents(gid, sid, did, scid, tid, cid):
- Returns the dependents for the Check Constraint object.
* dependencies(gid, sid, did, scid, tid, cid):
- Returns the dependencies for the Check Constraint object.
* validate_check_constraint(gid, sid, did, scid, tid, cid):
- Validate check constraint.
"""
node_type = blueprint.node_type
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [
{'type': 'int', 'id': 'cid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'delete': [{'delete': 'delete'}],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}],
'validate': [{'get': 'validate_check_constraint'}],
})
def module_js(self):
"""
Load JS file (check_constraint.js) for this module.
"""
return make_response(
render_template(
"check_constraint/js/check_constraint.js",
_=_
),
200, {'Content-Type': 'application/x-javascript'}
)
def check_precondition(f):
"""
Works as a decorator.
Checks database connection status.
Attach connection object and template path.
"""
@wraps(f)
def wrap(*args, **kwargs):
self = args[0]
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(kwargs['sid'])
self.conn = self.manager.connection(did=kwargs['did'])
self.qtIdent = driver.qtIdent
# Set the template path for the SQL scripts
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
self.schema = rset['rows'][0]['schema']
self.table = rset['rows'][0]['table']
return f(*args, **kwargs)
return wrap
def end_transaction(self):
"""
End database transaction.
Returns:
"""
SQL = "END;"
self.conn.execute_scalar(SQL)
@check_precondition
def list(self, gid, sid, did, scid, tid, cid=None):
"""
List the Check Constraints.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Id
"""
try:
res = self.get_node_list(gid, sid, did, scid, tid, cid)
return ajax_response(
response=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_node_list(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all check constraints
nodes within that collection as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Cehck constraint ID
Returns:
"""
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(sid)
self.conn = self.manager.connection(did=did)
self.qtIdent = driver.qtIdent
# Set the template path for the SQL scripts
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
self.schema = rset['rows'][0]['schema']
self.table = rset['rows'][0]['table']
SQL = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid)
status, res = self.conn.execute_dict(SQL)
return res['rows']
@check_precondition
def node(self, gid, sid, did, scid, tid, cid):
"""
Returns all the Check Constraints.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check constraint Id.
"""
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
cid=cid)
status, rset = self.conn.execute_2darray(SQL)
if len(rset['rows']) == 0:
return gone(_("""Could not find the check constraint."""))
if "convalidated" in rset['rows'][0] and rset['rows'][0]["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res = self.blueprint.generate_browser_node(
rset['rows'][0]['oid'],
tid,
rset['rows'][0]['name'],
icon=icon,
valid=valid
)
return make_json_response(
data=res,
status=200
)
@check_precondition
def nodes(self, gid, sid, did, scid, tid):
"""
Returns all the Check Constraints.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check constraint Id.
"""
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
if "convalidated" in row and row["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon=icon,
valid=valid
))
return make_json_response(
data=res,
status=200
)
def get_nodes(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all event check constraint as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Check constraint ID
Returns:
"""
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(sid)
self.conn = self.manager.connection(did=did)
self.qtIdent = driver.qtIdent
# Set the template path for the SQL scripts
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
self.schema = rset['rows'][0]['schema']
self.table = rset['rows'][0]['table']
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
if "convalidated" in row and row["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon=icon,
valid=valid
))
return res
@check_precondition
def properties(self, gid, sid, did, scid, tid, cid):
"""
Returns the Check Constraints property.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Check Id
cid: Check Constraint Id
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
_("Could not find the object on the server.")
)
data = res['rows'][0]
return ajax_response(
response=data,
status=200
)
@check_precondition
def create(self, gid, sid, did, scid, tid, cid=None):
"""
This function will create a primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Check constraint ID
Returns:
"""
required_args = ['consrc']
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
for k, v in data.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
for arg in required_args:
if arg not in data or data[arg] == '':
return make_json_response(
status=400,
success=0,
errormsg=_(
"Could not find the required parameter (%s)." % arg
)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
SQL = "BEGIN;"
# Start transaction.
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
# The below SQL will execute CREATE DDL only
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data
)
status, msg = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=msg)
if 'name' not in data or data['name'] == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql'],
),
tid=tid)
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
self.end_transaction()
data['name'] = res['rows'][0]['name']
else:
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
tid=tid,
name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
return jsonify(
node=self.blueprint.generate_browser_node(
res['rows'][0]['oid'],
tid,
data['name'],
icon=icon,
valid=valid
)
)
except Exception as e:
self.end_transaction()
return make_json_response(
status=400,
success=0,
errormsg=e
)
@check_precondition
def delete(self, gid, sid, did, scid, tid, cid):
"""
Drops the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Check Id
cid: Check Constraint Id
"""
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if not res['rows']:
return make_json_response(
success=0,
errormsg=_(
'Error: Object not found.'
),
info=_(
'The specified check constraint could not be found.\n'
)
)
data = res['rows'][0]
SQL = render_template("/".join([self.template_path,
'delete.sql']),
data=data)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check Constraint dropped."),
data={
'id': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, cid):
"""
Updates the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
try:
data['schema'] = self.schema
data['table'] = self.table
SQL, name = self.get_sql(gid, sid, data, scid, tid, cid)
if not SQL:
return name
SQL = SQL.strip('\n').strip(' ')
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'get_name.sql']),
cid=cid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = 'icon-check_constraints_bad'
valid = False
else:
icon = 'icon-check_constraints'
valid = True
return jsonify(
node=self.blueprint.generate_browser_node(
cid,
tid,
name,
icon=icon,
valid=valid
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def sql(self, gid, sid, did, scid, tid, cid=None):
"""
Returns the SQL for the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
_("Could not find the object on the server.")
)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
sql_header += render_template(
"/".join([self.template_path, 'delete.sql']),
data=data)
sql_header += "\n"
SQL = sql_header + SQL
return ajax_response(response=SQL)
@check_precondition
def msql(self, gid, sid, did, scid, tid, cid=None):
"""
Returns the modified SQL.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
Returns:
Check Constraint object in json format.
"""
data = {}
for k, v in request.args.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except ValueError:
data[k] = v
data['schema'] = self.schema
data['table'] = self.table
try:
sql, name = self.get_sql(gid, sid, data, scid, tid, cid)
if not sql:
return name
sql = sql.strip('\n').strip(' ')
if sql == '':
sql = "--modified SQL"
return make_json_response(
data=sql,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, data, scid, tid, cid=None):
"""
Generates the SQL statements to create/update the Check Constraint.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
if cid is not None:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return False, gone(
_("Could not find the object on the server.")
)
old_data = res['rows'][0]
required_args = ['name']
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = ['consrc']
for arg in required_args:
if arg not in data:
return _('-- definition incomplete')
elif isinstance(data[arg], list) and len(data[arg]) < 1:
return _('-- definition incomplete')
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
return SQL, data['name'] if 'name' in data else old_data['name']
@check_precondition
def dependents(self, gid, sid, did, scid, tid, cid):
"""
This function get the dependents and return ajax response
for the Check Constraint node.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
dependents_result = self.get_dependents(self.conn, cid)
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, cid):
"""
This function get the dependencies and return ajax response
for the Check Constraint node.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
dependencies_result = self.get_dependencies(self.conn, cid)
return ajax_response(
response=dependencies_result,
status=200
)
@check_precondition
def validate_check_constraint(self, gid, sid, did, scid, tid, cid):
"""
Validate check constraint.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
Returns:
"""
data = {}
try:
data['schema'] = self.schema
data['table'] = self.table
sql = render_template("/".join([self.template_path, 'get_name.sql']), cid=cid)
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
data['name'] = res
sql = render_template("/".join([self.template_path, 'validate.sql']), data=data)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check constraint updated."),
data={
'id': cid,
'tid': tid,
'scid': scid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
constraint = ConstraintRegistry(
'check_constraint', CheckConstraintModule, CheckConstraintView
)
CheckConstraintView.register_node_view(blueprint)
| 30.934569
| 92
| 0.501251
|
= self.conn.execute_2darray(SQL)
if len(rset['rows']) == 0:
return gone(_("""Could not find the check constraint."""))
if "convalidated" in rset['rows'][0] and rset['rows'][0]["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res = self.blueprint.generate_browser_node(
rset['rows'][0]['oid'],
tid,
rset['rows'][0]['name'],
icon=icon,
valid=valid
)
return make_json_response(
data=res,
status=200
)
@check_precondition
def nodes(self, gid, sid, did, scid, tid):
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
if "convalidated" in row and row["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon=icon,
valid=valid
))
return make_json_response(
data=res,
status=200
)
def get_nodes(self, gid, sid, did, scid, tid, cid=None):
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(sid)
self.conn = self.manager.connection(did=did)
self.qtIdent = driver.qtIdent
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
self.schema = rset['rows'][0]['schema']
self.table = rset['rows'][0]['table']
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
if "convalidated" in row and row["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon=icon,
valid=valid
))
return res
@check_precondition
def properties(self, gid, sid, did, scid, tid, cid):
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
_("Could not find the object on the server.")
)
data = res['rows'][0]
return ajax_response(
response=data,
status=200
)
@check_precondition
def create(self, gid, sid, did, scid, tid, cid=None):
required_args = ['consrc']
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
for k, v in data.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
for arg in required_args:
if arg not in data or data[arg] == '':
return make_json_response(
status=400,
success=0,
errormsg=_(
"Could not find the required parameter (%s)." % arg
)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
SQL = "BEGIN;"
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data
)
status, msg = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=msg)
if 'name' not in data or data['name'] == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql'],
),
tid=tid)
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
self.end_transaction()
data['name'] = res['rows'][0]['name']
else:
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
tid=tid,
name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
return jsonify(
node=self.blueprint.generate_browser_node(
res['rows'][0]['oid'],
tid,
data['name'],
icon=icon,
valid=valid
)
)
except Exception as e:
self.end_transaction()
return make_json_response(
status=400,
success=0,
errormsg=e
)
@check_precondition
def delete(self, gid, sid, did, scid, tid, cid):
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if not res['rows']:
return make_json_response(
success=0,
errormsg=_(
'Error: Object not found.'
),
info=_(
'The specified check constraint could not be found.\n'
)
)
data = res['rows'][0]
SQL = render_template("/".join([self.template_path,
'delete.sql']),
data=data)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check Constraint dropped."),
data={
'id': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, cid):
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
try:
data['schema'] = self.schema
data['table'] = self.table
SQL, name = self.get_sql(gid, sid, data, scid, tid, cid)
if not SQL:
return name
SQL = SQL.strip('\n').strip(' ')
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'get_name.sql']),
cid=cid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = 'icon-check_constraints_bad'
valid = False
else:
icon = 'icon-check_constraints'
valid = True
return jsonify(
node=self.blueprint.generate_browser_node(
cid,
tid,
name,
icon=icon,
valid=valid
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def sql(self, gid, sid, did, scid, tid, cid=None):
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
_("Could not find the object on the server.")
)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
sql_header += render_template(
"/".join([self.template_path, 'delete.sql']),
data=data)
sql_header += "\n"
SQL = sql_header + SQL
return ajax_response(response=SQL)
@check_precondition
def msql(self, gid, sid, did, scid, tid, cid=None):
data = {}
for k, v in request.args.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except ValueError:
data[k] = v
data['schema'] = self.schema
data['table'] = self.table
try:
sql, name = self.get_sql(gid, sid, data, scid, tid, cid)
if not sql:
return name
sql = sql.strip('\n').strip(' ')
if sql == '':
sql = "--modified SQL"
return make_json_response(
data=sql,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, data, scid, tid, cid=None):
if cid is not None:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return False, gone(
_("Could not find the object on the server.")
)
old_data = res['rows'][0]
required_args = ['name']
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = ['consrc']
for arg in required_args:
if arg not in data:
return _('-- definition incomplete')
elif isinstance(data[arg], list) and len(data[arg]) < 1:
return _('-- definition incomplete')
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
return SQL, data['name'] if 'name' in data else old_data['name']
@check_precondition
def dependents(self, gid, sid, did, scid, tid, cid):
dependents_result = self.get_dependents(self.conn, cid)
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, cid):
dependencies_result = self.get_dependencies(self.conn, cid)
return ajax_response(
response=dependencies_result,
status=200
)
@check_precondition
def validate_check_constraint(self, gid, sid, did, scid, tid, cid):
data = {}
try:
data['schema'] = self.schema
data['table'] = self.table
sql = render_template("/".join([self.template_path, 'get_name.sql']), cid=cid)
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
data['name'] = res
sql = render_template("/".join([self.template_path, 'validate.sql']), data=data)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check constraint updated."),
data={
'id': cid,
'tid': tid,
'scid': scid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
constraint = ConstraintRegistry(
'check_constraint', CheckConstraintModule, CheckConstraintView
)
CheckConstraintView.register_node_view(blueprint)
| true
| true
|
7908f716a9071b582abe39f0e366ba440e8de357
| 710
|
gyp
|
Python
|
test/copies/src/copies-sourceless-shared-lib.gyp
|
chlorm-forks/gyp
|
a8921fcaab1a18c8cf7e4ab09ceb940e336918ec
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
test/copies/src/copies-sourceless-shared-lib.gyp
|
chlorm-forks/gyp
|
a8921fcaab1a18c8cf7e4ab09ceb940e336918ec
|
[
"BSD-3-Clause"
] | 1,432
|
2017-06-21T04:08:48.000Z
|
2020-08-25T16:21:15.000Z
|
test/copies/src/copies-sourceless-shared-lib.gyp
|
chlorm-forks/gyp
|
a8921fcaab1a18c8cf7e4ab09ceb940e336918ec
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# Copyright (c) 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'mylib',
'type': 'static_library',
'sources': [ 'foo.c' ],
},
{
'target_name': 'mysolib',
'type': 'shared_library',
'dependencies': [ 'mylib' ],
'copies': [
{
'destination': '<(PRODUCT_DIR)/copies-out',
'files': [ 'file1' ],
},
],
# link.exe gets confused by sourceless shared libraries and needs this
# to become unconfused.
'msvs_settings': { 'VCLinkerTool': { 'TargetMachine': '1', }, },
},
],
}
| 25.357143
| 76
| 0.540845
|
{
'targets': [
{
'target_name': 'mylib',
'type': 'static_library',
'sources': [ 'foo.c' ],
},
{
'target_name': 'mysolib',
'type': 'shared_library',
'dependencies': [ 'mylib' ],
'copies': [
{
'destination': '<(PRODUCT_DIR)/copies-out',
'files': [ 'file1' ],
},
],
'msvs_settings': { 'VCLinkerTool': { 'TargetMachine': '1', }, },
},
],
}
| true
| true
|
7908f7ec5839a4deb692ebb3693747826a3a5a18
| 10,364
|
py
|
Python
|
tools/create_makefile.py
|
mehrdad-shokri/phxpaxos
|
c6c8806c3497cabe6e74dc09aa4487b6fd2057f7
|
[
"OpenSSL"
] | 2,025
|
2017-08-02T09:32:00.000Z
|
2022-03-30T05:16:30.000Z
|
tools/create_makefile.py
|
mehrdad-shokri/phxpaxos
|
c6c8806c3497cabe6e74dc09aa4487b6fd2057f7
|
[
"OpenSSL"
] | 130
|
2017-08-02T11:05:48.000Z
|
2022-01-29T04:24:48.000Z
|
tools/create_makefile.py
|
mehrdad-shokri/phxpaxos
|
c6c8806c3497cabe6e74dc09aa4487b6fd2057f7
|
[
"OpenSSL"
] | 580
|
2017-08-02T15:31:36.000Z
|
2022-03-16T14:09:01.000Z
|
import sys
import os
import pwd
import getpass
from build_comm import *
base_path=""
include_makefile=[]
makefile=[]
obj_list=[]
clean_dir=[]
lib_path="$(SRC_BASE_PATH)/.lib"
ext_lib_path="$(SRC_BASE_PATH)/.lib/extlib"
sbin_path="$(SRC_BASE_PATH)/.sbin"
def writefile(write_file, text):
write_file.append(text + "\n")
def Uniq(u_list, check_uniq=0):
if( check_uniq == 0 ):
for item in u_list:
while(u_list.count(item) > 1):
u_list.remove(item)
else:
res_list = []
for item in u_list:
if( res_list.count(item) == 0 ):
res_list.append( item )
u_list = res_list
return u_list
res_list={}
makefile=None
makefile_name = "Makefile"
def GetSourceTagFromDeps(path, lib_name, tag_name, check_uniq=0):
define_name = GetLableName(lib_name, tag_name)
lib_define_name = GetLableName(lib_name, "LIB")
res = []
if(path.find("third_party") >=0):
return res
if((path,lib_name,tag_name) in res_list):
return res_list[ (path,lib_name,tag_name) ]
lib_path = base_path + "/" + path + "/" + include_makefile_name
makefile_file = open(lib_path, "r")
try:
lines = makefile_file.readlines()
for line in lines:
values=line.split('=')
find_local_res = []
find_global_res = []
if(values[0] == define_name):
value = values[1].replace('\n', '').split(' ')
for obj in value:
if(len(obj) > 0):
if(tag_name == "OBJ"):
find_local_res.append("%s/%s" % (path, obj))
elif(tag_name == "LIB"):
if(len(obj.split(':')) == 1):
find_local_res.append(obj)
else:
find_local_res.append(obj)
if(values[0] == lib_define_name):
res_inc = values[1].replace('\n','')
dep_lib_list = res_inc.split(' ')
for dep_lib in dep_lib_list:
if(len(dep_lib.split(':')) > 1):
deps_path = dep_lib.split(':')[0]
deps_lib_name = dep_lib.split(':')[1]
if(deps_path == ""):
deps_path = path
find_global_res+=GetSourceTagFromDeps(deps_path, deps_lib_name, tag_name, check_uniq)
if( check_uniq == 0 ):
res+= find_local_res
res+= find_global_res
else:
res+= find_global_res
res+= find_local_res
finally:
makefile_file.close()
if( tag_name == "FULL_LIB_DEPS_PATH" ):
res.append( "$(SRC_BASE_PATH)/%s" % path )
res=Uniq(res, check_uniq)
res_list[ (path,lib_name,tag_name) ] = res
return res
def PrintComm(path, target_name, lib_name):
inc_res=GetSourceTagFromDeps(path, lib_name, "INCS")
cppflags_res=GetSourceTagFromDeps(path, lib_name, "EXTRA_CPPFLAGS")
full_lib_path_res=GetSourceTagFromDeps(path, lib_name, "FULL_LIB_DEPS_PATH", 1)
for path in full_lib_path_res:
if(path.find("third_party")!=-1):
full_lib_path_res.remove(path)
obj_name = "%s_%s" % (lib_name.upper(), "SRC")
inc_name = "%s_%s" % (lib_name.upper(), "INCS")
full_lib_path_name = "%s_%s" % (lib_name.upper(), "FULL_LIB_PATH")
extra_cpp_flag_name = "%s_%s" % (lib_name.upper(), "EXTRA_CPPFLAGS")
makefile.write("%s=$(%s)\n" % (obj_name, GetLableName(lib_name, "OBJ")))
makefile.write("%s=$(sort %s)\n" % (inc_name, ' '.join(inc_res)))
makefile.write("%s=%s\n" % (full_lib_path_name, ' '.join(full_lib_path_res)))
makefile.write("%s=%s\n\n" % (extra_cpp_flag_name,' '.join(cppflags_res)))
makefile.write("CPPFLAGS+=$(patsubst %%,-I%%, $(%s))\n" % inc_name)
makefile.write("CPPFLAGS+=$(%s)\n\n" % extra_cpp_flag_name)
return (obj_name,inc_name, full_lib_path_name, extra_cpp_flag_name)
def PrintReferenceDIR(target_name, direct_inc_name):
makefile.write("%s_dir:$(%s)\n" % (target_name, direct_inc_name))
makefile.write("\t@for dir in $^;\\\n")
makefile.write("\tdo \\\n")
makefile.write("\tcurrent_dir=`readlink $$dir -m`;\\\n");
makefile.write("\tpwd_dir=`pwd`;\\\n");
makefile.write("\tpwd_dir=`readlink $$pwd_dir -m`;\\\n");
makefile.write("\tif ([ \"$$current_dir\" != \"$$pwd_dir\" ]); then \\\n");
makefile.write("\tmake -C $$dir;\\\n");
makefile.write("\tfi;\\\n");
makefile.write("\tdone\n\n");
def PrintLib(path, target_name, lib_name, export = False):
(obj_name,inc_name, direct_inc_name, extra_cpp_flag_name) = PrintComm(path, target_name, lib_name)
if(export == True):
makefile.write("lib_%s: %s_dir %s/lib%s.a %s/lib%s.a\n\n" % (target_name, target_name, lib_path, target_name, ext_lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (ext_lib_path, target_name))
PrintReferenceDIR(target_name, direct_inc_name)
src_name = GetLableName(lib_name, "LIB_OBJ")
makefile.write("%s/lib%s.a: $(%s)\n" % (lib_path, target_name, obj_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (obj_name))
src_res=GetSourceTagFromDeps(path, lib_name, "OBJ")
makefile.write("%s=$(patsubst %%, $(SRC_BASE_PATH)/%%, %s)\n" % (src_name, ' '.join(src_res)))
makefile.write("%s/lib%s.a: $(%s)\n" % (ext_lib_path, target_name, src_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (src_name))
else:
makefile.write("lib_%s:%s/lib%s.a\n\n" % (target_name, lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (lib_path, target_name))
makefile.write("%s/lib%s.a: $(%s)\n" % (lib_path, target_name, obj_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (obj_name))
def PrintBin(path, target_name, lib_name):
(obj_name,inc_name, direct_inc_name, extra_cpp_flag_name) = PrintComm(path, target_name, lib_name)
link_name = "%s_%s" % (lib_name.upper(), "LINK")
sys_lib_name = "%s_%s" % (lib_name.upper(), "SYS_LIB")
link_res=GetSourceTagFromDeps(path, lib_name, "LIB")
sys_lib_res=GetSourceTagFromDeps(path, lib_name, "SYS_LIB")
makefile.write("%s=%s\n" % (link_name, ' '.join(link_res)))
makefile.write("%s=%s\n" % (sys_lib_name,' '.join(sys_lib_res)))
flag_key = GetLableName(lib_name, "FLAGS")
makefile.write("%s+=$(LDFLAGS)\n\n" % flag_key)
makefile.write("%s+=$(patsubst %%,-l%%, $(%s))\n" % (flag_key, link_name))
makefile.write("%s+=$(%s)\n" % (flag_key, sys_lib_name))
makefile.write("%s_bin:%s_dir %s\n\n" % (target_name, target_name, target_name))
PrintReferenceDIR(target_name, direct_inc_name)
makefile.write("%s:$(%s)\n" % (target_name, obj_name))
makefile.write("\t$(CXX) $^ -o $@ $(%s)\n" % flag_key)
makefile.write("\tcp $@ %s/\n\n" % sbin_path)
clean_dir.append("%s" % target_name )
clean_dir.append("%s/%s" % (sbin_path, target_name))
def GetSubDirList(path):
sub_dir_list=[]
if( os.path.exists("%s/src_list" % path ) ):
sub_dir_file=open("%s/src_list" % path)
lines = sub_dir_file.readlines()
for sub_dir in lines:
sub_dir_list.append(sub_dir.strip())
sub_dir_file.close()
else:
sub_dir=os.listdir(path)
for dir in sub_dir:
if( os.path.isdir( "%s/%s" % ( path, dir ) ) and dir[0] != "." ):
sub_dir_list.append( dir )
return sub_dir_list
def PrintMakeAllSubDir(dir_list, clean_lib=False):
if( len(dir_list) > 0 ):
makefile.write( "SUBDIRS=%s\n\n" % ' '.join(dir_list) )
makefile.write( ".PHONY:sub_dir\n" );
makefile.write( "sub_dir:$(SUBDIRS)\n" );
makefile.write("\t@for sub_dir in $^; do \\\n");
makefile.write("\tmake -C $$sub_dir; \\\n");
makefile.write("\tdone\n\n");
makefile.write( ".PHONY:clean\n" )
makefile.write( "clean:$(SUBDIRS)\n" )
makefile.write("\t@for sub_dir in $^; do \\\n")
makefile.write("\tmake -C $$sub_dir clean;\\\n")
makefile.write("\tdone\n")
if(clean_lib):
makefile.write("\trm -f %s/*.a %s/*.a %s/*.a $(SRC_BASE_PATH)/lib/*.a\n" % (lib_path, ext_lib_path, sbin_path) );
makefile.write("\trm -rf *.o *.pb.* %s " % ' '.join(clean_dir));
else:
makefile.write("clean:\n")
if(clean_lib):
makefile.write("\trm -f %s/*.a %s/*.a %s/*.a $(SRC_BASE_PATH)/lib/*.a\n" % (lib_path, ext_lib_path, sbin_path) );
makefile.write("\trm -rf *.o *.pb.* %s " % ' '.join(clean_dir));
def Process(path, library_list, elibrary_list, binary_list):
for lib in library_list:
if(len(lib) == 0):
continue
PrintLib(path, lib, lib)
for lib in elibrary_list:
if(len(lib) == 0):
continue
PrintLib(path, lib, lib, True)
for lib in binary_list:
if(len(lib) == 0):
continue
PrintBin(path, lib, lib)
def CreateMakeFile(path):
global makefile
global base_path
target_list = []
library_list = []
elibrary_list = []
binary_list = []
lib_count = 0
makefile_define_path = "%s/%s" % (path, include_makefile_name)
makefile_path = "%s/%s" % (path, makefile_name)
makefile = open(makefile_path, "w");
makefile.write("SRC_BASE_PATH=%s\n\n" % base_path)
if(os.path.exists(makefile_define_path)):
define_makefile_file = open(makefile_define_path)
try:
lines = define_makefile_file.readlines()
for line in lines:
args = line.split('=')
if(len(args) > 1 and args[0] == "allobject"):
target_list = str.strip(args[1]).split(' ')
for target in target_list:
if(len(target) == 0):
continue
lib_count = lib_count+1
if(target[0:3] == "lib"):
library_list.append(target[3:-2])
elif(target[0:4] == "elib"):
elibrary_list.append(target[4:-2])
else:
binary_list.append(target)
except:
print("file %s not found:" % makefile_define_path)
finally:
define_makefile_file.close()
makefile.write("all:");
if( lib_count > 0 ):
for lib in library_list:
makefile.write("lib_%s " %(lib));
for lib in elibrary_list:
makefile.write("lib_%s " %(lib));
for lib in binary_list:
makefile.write("%s_bin " %(lib));
else:
makefile.write("sub_dir");
makefile.write("\n\n");
makefile.write("include $(SRC_BASE_PATH)/makefile.mk\n\n" )
if( lib_count > 0 ):
makefile.write("include %s\n" % include_makefile_name)
Process(path[len(base_path):], library_list, elibrary_list, binary_list)
sub_dir_list = GetSubDirList(path)
PrintMakeAllSubDir(sub_dir_list, path==base_path)
for target in elibrary_list:
makefile.write("lib%s.a %s/lib%s.a " % (target, lib_path,target));
for target in binary_list:
makefile.write("%s " % (target));
makefile.write("\n\n");
else:
sub_dir_list = GetSubDirList(path)
PrintMakeAllSubDir(sub_dir_list, path==base_path)
makefile.close()
current_dir=os.path.abspath(".")
if(__name__ == '__main__'):
base_path = sys.argv[1]
current_path = sys.argv[2]
if(current_path[0:len(base_path)] != base_path):
print("path error, base %s, current %s" % (base_path, current_path[0:len(base_path)]))
exit(0)
CreateMakeFile(current_path)
| 31.02994
| 139
| 0.661521
|
import sys
import os
import pwd
import getpass
from build_comm import *
base_path=""
include_makefile=[]
makefile=[]
obj_list=[]
clean_dir=[]
lib_path="$(SRC_BASE_PATH)/.lib"
ext_lib_path="$(SRC_BASE_PATH)/.lib/extlib"
sbin_path="$(SRC_BASE_PATH)/.sbin"
def writefile(write_file, text):
write_file.append(text + "\n")
def Uniq(u_list, check_uniq=0):
if( check_uniq == 0 ):
for item in u_list:
while(u_list.count(item) > 1):
u_list.remove(item)
else:
res_list = []
for item in u_list:
if( res_list.count(item) == 0 ):
res_list.append( item )
u_list = res_list
return u_list
res_list={}
makefile=None
makefile_name = "Makefile"
def GetSourceTagFromDeps(path, lib_name, tag_name, check_uniq=0):
define_name = GetLableName(lib_name, tag_name)
lib_define_name = GetLableName(lib_name, "LIB")
res = []
if(path.find("third_party") >=0):
return res
if((path,lib_name,tag_name) in res_list):
return res_list[ (path,lib_name,tag_name) ]
lib_path = base_path + "/" + path + "/" + include_makefile_name
makefile_file = open(lib_path, "r")
try:
lines = makefile_file.readlines()
for line in lines:
values=line.split('=')
find_local_res = []
find_global_res = []
if(values[0] == define_name):
value = values[1].replace('\n', '').split(' ')
for obj in value:
if(len(obj) > 0):
if(tag_name == "OBJ"):
find_local_res.append("%s/%s" % (path, obj))
elif(tag_name == "LIB"):
if(len(obj.split(':')) == 1):
find_local_res.append(obj)
else:
find_local_res.append(obj)
if(values[0] == lib_define_name):
res_inc = values[1].replace('\n','')
dep_lib_list = res_inc.split(' ')
for dep_lib in dep_lib_list:
if(len(dep_lib.split(':')) > 1):
deps_path = dep_lib.split(':')[0]
deps_lib_name = dep_lib.split(':')[1]
if(deps_path == ""):
deps_path = path
find_global_res+=GetSourceTagFromDeps(deps_path, deps_lib_name, tag_name, check_uniq)
if( check_uniq == 0 ):
res+= find_local_res
res+= find_global_res
else:
res+= find_global_res
res+= find_local_res
finally:
makefile_file.close()
if( tag_name == "FULL_LIB_DEPS_PATH" ):
res.append( "$(SRC_BASE_PATH)/%s" % path )
res=Uniq(res, check_uniq)
res_list[ (path,lib_name,tag_name) ] = res
return res
def PrintComm(path, target_name, lib_name):
inc_res=GetSourceTagFromDeps(path, lib_name, "INCS")
cppflags_res=GetSourceTagFromDeps(path, lib_name, "EXTRA_CPPFLAGS")
full_lib_path_res=GetSourceTagFromDeps(path, lib_name, "FULL_LIB_DEPS_PATH", 1)
for path in full_lib_path_res:
if(path.find("third_party")!=-1):
full_lib_path_res.remove(path)
obj_name = "%s_%s" % (lib_name.upper(), "SRC")
inc_name = "%s_%s" % (lib_name.upper(), "INCS")
full_lib_path_name = "%s_%s" % (lib_name.upper(), "FULL_LIB_PATH")
extra_cpp_flag_name = "%s_%s" % (lib_name.upper(), "EXTRA_CPPFLAGS")
makefile.write("%s=$(%s)\n" % (obj_name, GetLableName(lib_name, "OBJ")))
makefile.write("%s=$(sort %s)\n" % (inc_name, ' '.join(inc_res)))
makefile.write("%s=%s\n" % (full_lib_path_name, ' '.join(full_lib_path_res)))
makefile.write("%s=%s\n\n" % (extra_cpp_flag_name,' '.join(cppflags_res)))
makefile.write("CPPFLAGS+=$(patsubst %%,-I%%, $(%s))\n" % inc_name)
makefile.write("CPPFLAGS+=$(%s)\n\n" % extra_cpp_flag_name)
return (obj_name,inc_name, full_lib_path_name, extra_cpp_flag_name)
def PrintReferenceDIR(target_name, direct_inc_name):
makefile.write("%s_dir:$(%s)\n" % (target_name, direct_inc_name))
makefile.write("\t@for dir in $^;\\\n")
makefile.write("\tdo \\\n")
makefile.write("\tcurrent_dir=`readlink $$dir -m`;\\\n");
makefile.write("\tpwd_dir=`pwd`;\\\n");
makefile.write("\tpwd_dir=`readlink $$pwd_dir -m`;\\\n");
makefile.write("\tif ([ \"$$current_dir\" != \"$$pwd_dir\" ]); then \\\n");
makefile.write("\tmake -C $$dir;\\\n");
makefile.write("\tfi;\\\n");
makefile.write("\tdone\n\n");
def PrintLib(path, target_name, lib_name, export = False):
(obj_name,inc_name, direct_inc_name, extra_cpp_flag_name) = PrintComm(path, target_name, lib_name)
if(export == True):
makefile.write("lib_%s: %s_dir %s/lib%s.a %s/lib%s.a\n\n" % (target_name, target_name, lib_path, target_name, ext_lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (ext_lib_path, target_name))
PrintReferenceDIR(target_name, direct_inc_name)
src_name = GetLableName(lib_name, "LIB_OBJ")
makefile.write("%s/lib%s.a: $(%s)\n" % (lib_path, target_name, obj_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (obj_name))
src_res=GetSourceTagFromDeps(path, lib_name, "OBJ")
makefile.write("%s=$(patsubst %%, $(SRC_BASE_PATH)/%%, %s)\n" % (src_name, ' '.join(src_res)))
makefile.write("%s/lib%s.a: $(%s)\n" % (ext_lib_path, target_name, src_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (src_name))
else:
makefile.write("lib_%s:%s/lib%s.a\n\n" % (target_name, lib_path, target_name))
clean_dir.append( "%s/lib%s.a" % (lib_path, target_name))
makefile.write("%s/lib%s.a: $(%s)\n" % (lib_path, target_name, obj_name))
makefile.write("\tar -cvq $@ $(%s)\n\n" % (obj_name))
def PrintBin(path, target_name, lib_name):
(obj_name,inc_name, direct_inc_name, extra_cpp_flag_name) = PrintComm(path, target_name, lib_name)
link_name = "%s_%s" % (lib_name.upper(), "LINK")
sys_lib_name = "%s_%s" % (lib_name.upper(), "SYS_LIB")
link_res=GetSourceTagFromDeps(path, lib_name, "LIB")
sys_lib_res=GetSourceTagFromDeps(path, lib_name, "SYS_LIB")
makefile.write("%s=%s\n" % (link_name, ' '.join(link_res)))
makefile.write("%s=%s\n" % (sys_lib_name,' '.join(sys_lib_res)))
flag_key = GetLableName(lib_name, "FLAGS")
makefile.write("%s+=$(LDFLAGS)\n\n" % flag_key)
makefile.write("%s+=$(patsubst %%,-l%%, $(%s))\n" % (flag_key, link_name))
makefile.write("%s+=$(%s)\n" % (flag_key, sys_lib_name))
makefile.write("%s_bin:%s_dir %s\n\n" % (target_name, target_name, target_name))
PrintReferenceDIR(target_name, direct_inc_name)
makefile.write("%s:$(%s)\n" % (target_name, obj_name))
makefile.write("\t$(CXX) $^ -o $@ $(%s)\n" % flag_key)
makefile.write("\tcp $@ %s/\n\n" % sbin_path)
clean_dir.append("%s" % target_name )
clean_dir.append("%s/%s" % (sbin_path, target_name))
def GetSubDirList(path):
sub_dir_list=[]
if( os.path.exists("%s/src_list" % path ) ):
sub_dir_file=open("%s/src_list" % path)
lines = sub_dir_file.readlines()
for sub_dir in lines:
sub_dir_list.append(sub_dir.strip())
sub_dir_file.close()
else:
sub_dir=os.listdir(path)
for dir in sub_dir:
if( os.path.isdir( "%s/%s" % ( path, dir ) ) and dir[0] != "." ):
sub_dir_list.append( dir )
return sub_dir_list
def PrintMakeAllSubDir(dir_list, clean_lib=False):
if( len(dir_list) > 0 ):
makefile.write( "SUBDIRS=%s\n\n" % ' '.join(dir_list) )
makefile.write( ".PHONY:sub_dir\n" );
makefile.write( "sub_dir:$(SUBDIRS)\n" );
makefile.write("\t@for sub_dir in $^; do \\\n");
makefile.write("\tmake -C $$sub_dir; \\\n");
makefile.write("\tdone\n\n");
makefile.write( ".PHONY:clean\n" )
makefile.write( "clean:$(SUBDIRS)\n" )
makefile.write("\t@for sub_dir in $^; do \\\n")
makefile.write("\tmake -C $$sub_dir clean;\\\n")
makefile.write("\tdone\n")
if(clean_lib):
makefile.write("\trm -f %s/*.a %s/*.a %s/*.a $(SRC_BASE_PATH)/lib/*.a\n" % (lib_path, ext_lib_path, sbin_path) );
makefile.write("\trm -rf *.o *.pb.* %s " % ' '.join(clean_dir));
else:
makefile.write("clean:\n")
if(clean_lib):
makefile.write("\trm -f %s/*.a %s/*.a %s/*.a $(SRC_BASE_PATH)/lib/*.a\n" % (lib_path, ext_lib_path, sbin_path) );
makefile.write("\trm -rf *.o *.pb.* %s " % ' '.join(clean_dir));
def Process(path, library_list, elibrary_list, binary_list):
for lib in library_list:
if(len(lib) == 0):
continue
PrintLib(path, lib, lib)
for lib in elibrary_list:
if(len(lib) == 0):
continue
PrintLib(path, lib, lib, True)
for lib in binary_list:
if(len(lib) == 0):
continue
PrintBin(path, lib, lib)
def CreateMakeFile(path):
global makefile
global base_path
target_list = []
library_list = []
elibrary_list = []
binary_list = []
lib_count = 0
makefile_define_path = "%s/%s" % (path, include_makefile_name)
makefile_path = "%s/%s" % (path, makefile_name)
makefile = open(makefile_path, "w");
makefile.write("SRC_BASE_PATH=%s\n\n" % base_path)
if(os.path.exists(makefile_define_path)):
define_makefile_file = open(makefile_define_path)
try:
lines = define_makefile_file.readlines()
for line in lines:
args = line.split('=')
if(len(args) > 1 and args[0] == "allobject"):
target_list = str.strip(args[1]).split(' ')
for target in target_list:
if(len(target) == 0):
continue
lib_count = lib_count+1
if(target[0:3] == "lib"):
library_list.append(target[3:-2])
elif(target[0:4] == "elib"):
elibrary_list.append(target[4:-2])
else:
binary_list.append(target)
except:
print("file %s not found:" % makefile_define_path)
finally:
define_makefile_file.close()
makefile.write("all:");
if( lib_count > 0 ):
for lib in library_list:
makefile.write("lib_%s " %(lib));
for lib in elibrary_list:
makefile.write("lib_%s " %(lib));
for lib in binary_list:
makefile.write("%s_bin " %(lib));
else:
makefile.write("sub_dir");
makefile.write("\n\n");
makefile.write("include $(SRC_BASE_PATH)/makefile.mk\n\n" )
if( lib_count > 0 ):
makefile.write("include %s\n" % include_makefile_name)
Process(path[len(base_path):], library_list, elibrary_list, binary_list)
sub_dir_list = GetSubDirList(path)
PrintMakeAllSubDir(sub_dir_list, path==base_path)
for target in elibrary_list:
makefile.write("lib%s.a %s/lib%s.a " % (target, lib_path,target));
for target in binary_list:
makefile.write("%s " % (target));
makefile.write("\n\n");
else:
sub_dir_list = GetSubDirList(path)
PrintMakeAllSubDir(sub_dir_list, path==base_path)
makefile.close()
current_dir=os.path.abspath(".")
if(__name__ == '__main__'):
base_path = sys.argv[1]
current_path = sys.argv[2]
if(current_path[0:len(base_path)] != base_path):
print("path error, base %s, current %s" % (base_path, current_path[0:len(base_path)]))
exit(0)
CreateMakeFile(current_path)
| true
| true
|
7908f8598d94e85f7bd651d199599fbd62fdb5f7
| 623
|
py
|
Python
|
tests/wrangled_test.py
|
zack-murray/lambdata-zmurray
|
5c41af2f2159cc081b86077a8b24d9920609e08a
|
[
"MIT"
] | null | null | null |
tests/wrangled_test.py
|
zack-murray/lambdata-zmurray
|
5c41af2f2159cc081b86077a8b24d9920609e08a
|
[
"MIT"
] | null | null | null |
tests/wrangled_test.py
|
zack-murray/lambdata-zmurray
|
5c41af2f2159cc081b86077a8b24d9920609e08a
|
[
"MIT"
] | 1
|
2020-05-06T01:47:15.000Z
|
2020-05-06T01:47:15.000Z
|
import unittest
from my_lambdata.assignment1 import WrangledFrame
class TestWrangledFrame(unittest.TestCase):
def test_add_state_names(self):
wf = WrangledFrame({"abbrev": ["CA", "CO", "CT", "DC", "TX"]})
breakpoint()
wf.add_state_names()
# ensure there is a "name" column
self.assertEqual(list(wf.columns), ['abbrev', 'name'])
# ensure the values of WF are specific classes/values
# (string, "California")
self.assertEqual(wf["name"][0], "California")
self.assertEqual(wf["abbrev"][0], "CA")
if __name__ == '__main__':
unittest.main()
| 27.086957
| 70
| 0.627608
|
import unittest
from my_lambdata.assignment1 import WrangledFrame
class TestWrangledFrame(unittest.TestCase):
def test_add_state_names(self):
wf = WrangledFrame({"abbrev": ["CA", "CO", "CT", "DC", "TX"]})
breakpoint()
wf.add_state_names()
self.assertEqual(list(wf.columns), ['abbrev', 'name'])
self.assertEqual(wf["name"][0], "California")
self.assertEqual(wf["abbrev"][0], "CA")
if __name__ == '__main__':
unittest.main()
| true
| true
|
7908fa0ca084705fd59f6d525bd1cd830f4d5cb5
| 652
|
py
|
Python
|
bandit/insecureCode.py
|
mir-dhaka/coding_playground
|
f20138e404c27e008b2902d4be2e9f9d4c25b11f
|
[
"Apache-2.0"
] | 2
|
2019-05-23T06:05:20.000Z
|
2019-11-17T01:35:45.000Z
|
bandit/insecureCode.py
|
mir-dhaka/coding_playground
|
f20138e404c27e008b2902d4be2e9f9d4c25b11f
|
[
"Apache-2.0"
] | 2
|
2020-08-29T12:29:11.000Z
|
2020-08-29T12:30:14.000Z
|
bandit/insecureCode.py
|
mir-dhaka/coding_playground
|
f20138e404c27e008b2902d4be2e9f9d4c25b11f
|
[
"Apache-2.0"
] | 2
|
2020-07-18T17:07:36.000Z
|
2021-12-06T02:21:15.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 damian <damian@C-DZ-E5500>
#
# Distributed under terms of the MIT license.
"""
"""
import subprocess
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import yaml
def read_file(filename):
with open(filename) as f:
data = yaml.load(f.read())
def run_command(cmd):
return subprocess.check_call(cmd, shell=True)
db = create_engine('sqlite:///somedatabase')
Session = sessionmaker(bind=db)
def get_user(uid):
session = Session()
query = "select * from user where id='%s'" % uid
return session.execute(query)
| 19.757576
| 52
| 0.691718
|
import subprocess
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import yaml
def read_file(filename):
with open(filename) as f:
data = yaml.load(f.read())
def run_command(cmd):
return subprocess.check_call(cmd, shell=True)
db = create_engine('sqlite:///somedatabase')
Session = sessionmaker(bind=db)
def get_user(uid):
session = Session()
query = "select * from user where id='%s'" % uid
return session.execute(query)
| true
| true
|
7908fb3364f621064e1037dcfd71331b065775e4
| 935
|
py
|
Python
|
llvm/test/MC/COFF/bigobj.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 4,812
|
2015-01-02T19:38:10.000Z
|
2022-03-27T12:42:24.000Z
|
llvm/test/MC/COFF/bigobj.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 127
|
2015-12-03T21:42:53.000Z
|
2019-11-21T14:34:20.000Z
|
llvm/test/MC/COFF/bigobj.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 2,543
|
2015-01-01T11:18:36.000Z
|
2022-03-22T21:32:36.000Z
|
# RUN: python %s | llvm-mc -filetype=obj -triple i686-pc-win32 - | llvm-readobj -h | FileCheck %s
from __future__ import print_function
# This test checks that the COFF object emitter can produce objects with
# more than 65279 sections.
# While we only generate 65277 sections, an implicit .text, .data and .bss will
# also be emitted. This brings the total to 65280.
num_sections = 65277
# CHECK: ImageFileHeader {
# CHECK-NEXT: Machine: IMAGE_FILE_MACHINE_I386
# CHECK-NEXT: SectionCount: 65280
# CHECK-NEXT: TimeDateStamp: {{[0-9]+}}
# CHECK-NEXT: PointerToSymbolTable: 0x{{[0-9A-F]+}}
# CHECK-NEXT: SymbolCount: 195837
# CHECK-NEXT: OptionalHeaderSize: 0
# CHECK-NEXT: Characteristics [ (0x0)
# CHECK-NEXT: ]
# CHECK-NEXT: }
for i in range(0, num_sections):
print(""" .section .bss,"bw",discard,_b%d
.globl _b%d # @b%d
_b%d:
.byte 0 # 0x0
""" % (i, i, i, i))
| 32.241379
| 97
| 0.656684
|
from __future__ import print_function
num_sections = 65277
for i in range(0, num_sections):
print(""" .section .bss,"bw",discard,_b%d
.globl _b%d # @b%d
_b%d:
.byte 0 # 0x0
""" % (i, i, i, i))
| true
| true
|
7908fc415dfc1cd7d22cd6fb320f53e39b4e70d5
| 3,401
|
py
|
Python
|
jenkins_client.py
|
hummerstudio/jenkinsclient
|
d8cbdab84d837a45c644a67e1072778f41bb89a5
|
[
"MulanPSL-1.0"
] | 1
|
2021-10-05T16:34:05.000Z
|
2021-10-05T16:34:05.000Z
|
jenkins_client.py
|
hummerstudio/jenkinsclient
|
d8cbdab84d837a45c644a67e1072778f41bb89a5
|
[
"MulanPSL-1.0"
] | 1
|
2020-10-16T07:56:44.000Z
|
2021-05-16T06:14:23.000Z
|
jenkins_client.py
|
hummerstudio/jenkinsclient
|
d8cbdab84d837a45c644a67e1072778f41bb89a5
|
[
"MulanPSL-1.0"
] | 1
|
2021-10-05T16:34:14.000Z
|
2021-10-05T16:34:14.000Z
|
"""
Copyright (c) 2020 Tang Ming
jenkinsclient is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
import locale
import os
import fire
from jenkinsclient.core import Core
from jenkinsclient.credentials import Credentials
from jenkinsclient.executor import Executor
from jenkinsclient.build import Build
from jenkinsclient.config import Config
from jenkinsclient.job import Job
from jenkinsclient.node import Node
from jenkinsclient.plugin import Plugin
from jenkinsclient import jenkins_server
from jenkinsclient.queue import Queue
class JenkinsClient(object):
"""A powerful cross-platform Jenkins command line client"""
def __init__(self):
self.build = Build()
self.config = Config()
self.core = Core()
self.cred = Credentials()
self.executor = Executor()
self.job = Job()
self.node = Node()
self.plugin = Plugin()
self.queue = Queue()
def app(self):
"""app mode, operating jenkins in a window """
try:
__import__('webview')
except ModuleNotFoundError:
print('APP模式为实验性功能,需要使用pywebview模块,将自动为你安装')
return_code = os.system('pip3 install --quiet pywebview')
if return_code != 0:
print('自动安装pywebview失败!')
exit(1)
else:
print('自动安装pywebview成功')
import webview
url = jenkins_server.get_blue_url()
webview.create_window('Jenkins', url=url, width=1024, height=768, confirm_close=True, text_select=True)
webview.start()
def creds(self):
"""List Credentials"""
return Credentials().ls()
def jobs(self):
"""List jobs"""
return Job().ls()
def nodes(self):
"""List nodes"""
return Node().ls()
def plugins(self):
"""List plugins"""
return Plugin().ls()
def queues(self):
"""List queues"""
return Queue().ls()
def version(self):
"""Display Jenkins server version"""
server = jenkins_server.get_jenkins_server(type='jenkinsapi')
version = server.version
return 'Jenkins server version: %s' % version
def whoami(self):
"""
Display who am i
"""
server = jenkins_server.get_jenkins_server()
try:
i = server.get_whoami()
except Exception as e:
return '操作失败:连接服务器失败'
return i['fullName']
if locale.getdefaultlocale()[0] == 'zh_CN':
JenkinsClient.__doc__ = '功能强大的跨平台Jenkins命令行客户端'
JenkinsClient.app.__doc__ = 'APP模式——在独立窗口中操作Jenkins'
JenkinsClient.creds.__doc__ = '显示凭据列表'
JenkinsClient.jobs.__doc__ = '显示任务列表'
JenkinsClient.nodes.__doc__ = '显示节点列表'
JenkinsClient.plugins.__doc__ = '显示插件列表'
JenkinsClient.queues.__doc__ = '显示队列'
JenkinsClient.version.__doc__ = '显示Jenkins服务器版本号'
JenkinsClient.whoami.__doc__ = '显示当前用户'
def main():
fire.Fire(JenkinsClient)
if __name__ == '__main__':
main()
| 29.833333
| 201
| 0.648339
|
import locale
import os
import fire
from jenkinsclient.core import Core
from jenkinsclient.credentials import Credentials
from jenkinsclient.executor import Executor
from jenkinsclient.build import Build
from jenkinsclient.config import Config
from jenkinsclient.job import Job
from jenkinsclient.node import Node
from jenkinsclient.plugin import Plugin
from jenkinsclient import jenkins_server
from jenkinsclient.queue import Queue
class JenkinsClient(object):
def __init__(self):
self.build = Build()
self.config = Config()
self.core = Core()
self.cred = Credentials()
self.executor = Executor()
self.job = Job()
self.node = Node()
self.plugin = Plugin()
self.queue = Queue()
def app(self):
try:
__import__('webview')
except ModuleNotFoundError:
print('APP模式为实验性功能,需要使用pywebview模块,将自动为你安装')
return_code = os.system('pip3 install --quiet pywebview')
if return_code != 0:
print('自动安装pywebview失败!')
exit(1)
else:
print('自动安装pywebview成功')
import webview
url = jenkins_server.get_blue_url()
webview.create_window('Jenkins', url=url, width=1024, height=768, confirm_close=True, text_select=True)
webview.start()
def creds(self):
return Credentials().ls()
def jobs(self):
return Job().ls()
def nodes(self):
return Node().ls()
def plugins(self):
return Plugin().ls()
def queues(self):
return Queue().ls()
def version(self):
server = jenkins_server.get_jenkins_server(type='jenkinsapi')
version = server.version
return 'Jenkins server version: %s' % version
def whoami(self):
server = jenkins_server.get_jenkins_server()
try:
i = server.get_whoami()
except Exception as e:
return '操作失败:连接服务器失败'
return i['fullName']
if locale.getdefaultlocale()[0] == 'zh_CN':
JenkinsClient.__doc__ = '功能强大的跨平台Jenkins命令行客户端'
JenkinsClient.app.__doc__ = 'APP模式——在独立窗口中操作Jenkins'
JenkinsClient.creds.__doc__ = '显示凭据列表'
JenkinsClient.jobs.__doc__ = '显示任务列表'
JenkinsClient.nodes.__doc__ = '显示节点列表'
JenkinsClient.plugins.__doc__ = '显示插件列表'
JenkinsClient.queues.__doc__ = '显示队列'
JenkinsClient.version.__doc__ = '显示Jenkins服务器版本号'
JenkinsClient.whoami.__doc__ = '显示当前用户'
def main():
fire.Fire(JenkinsClient)
if __name__ == '__main__':
main()
| true
| true
|
7908fcfdc28bc6f848ba082bddc54e43af6b8532
| 20,002
|
py
|
Python
|
mutation_origin/cli.py
|
Phuong-Le/mutationorigin
|
fad00fca3c1073637ede2c6948f5278a030971dc
|
[
"BSD-3-Clause"
] | 1
|
2020-01-10T02:46:06.000Z
|
2020-01-10T02:46:06.000Z
|
mutation_origin/cli.py
|
Phuong-Le/mutationorigin
|
fad00fca3c1073637ede2c6948f5278a030971dc
|
[
"BSD-3-Clause"
] | 1
|
2019-10-16T01:12:45.000Z
|
2019-10-16T01:12:45.000Z
|
mutation_origin/cli.py
|
Phuong-Le/mutationorigin
|
fad00fca3c1073637ede2c6948f5278a030971dc
|
[
"BSD-3-Clause"
] | 3
|
2020-02-10T02:00:16.000Z
|
2021-01-14T02:14:03.000Z
|
"""command line interface for mutation_origin"""
import os
import time
import pickle
from collections import defaultdict
import click
from tqdm import tqdm
import pandas
from numpy import log
from numpy.random import seed as np_seed
from scitrack import CachingLogger
from sklearn.model_selection import train_test_split
from mutation_origin.opt import (_seed, _feature_dim, _enu_path,
_germline_path, _output_path, _flank_size,
_train_size, _enu_ratio,
_numreps, _label_col, _proximal, _usegc,
_training_path, _c_values, _penalty_options,
_n_jobs, _classifier_path, _data_path,
_predictions_path, _alpha_options,
_overwrite, _verbose, _class_prior,
_strategy, _score)
from mutation_origin.preprocess import data_to_numeric
from mutation_origin.encoder import (get_scaler, inverse_transform_response,
transform_response)
from mutation_origin.classify import (logistic_regression, one_class_svm,
predict_origin, naive_bayes, xgboost)
from mutation_origin.util import (dump_json, load_predictions,
get_basename, get_classifier_label,
get_enu_germline_sizes, iter_indices,
load_classifier, open_)
from mutation_origin.postprocess import measure_performance
__author__ = "Gavin Huttley"
__copyright__ = "Copyright 2014, Gavin Huttley"
__credits__ = ["Yicheng Zhu", "Cheng Soon Ong", "Gavin Huttley"]
__license__ = "BSD"
__version__ = "0.3"
__maintainer__ = "Gavin Huttley"
__email__ = "Gavin.Huttley@anu.edu.au"
__status__ = "Development"
LOGGER = CachingLogger()
@click.group()
def main():
"""mutori -- for building and applying classifiers of mutation origin"""
pass
@main.command()
@_seed
@_enu_path
@_germline_path
@_output_path
@_train_size
@_enu_ratio
@_numreps
@_overwrite
def sample_data(enu_path, germline_path, output_path, seed,
train_size,
enu_ratio, numreps, overwrite):
"""creates train/test sample data"""
if seed is None:
seed = int(time.time())
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
# set the random number seed
np_seed(seed)
start_time = time.time()
os.makedirs(output_path, exist_ok=True)
logfile_path = os.path.join(output_path, "logs/data_sampling.log")
if os.path.exists(logfile_path) and not overwrite:
click.secho(f"Exists: {logfile_path}! use overwrite to force.",
fg='red')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(enu_path)
LOGGER.input_file(germline_path)
enu = pandas.read_csv(enu_path, sep="\t", header=0)
germline = pandas.read_csv(germline_path, sep="\t", header=0)
train_size = train_size // 2
test_size = train_size
train_enu_ratio, test_enu_ratio = enu_ratio
enu_train_size, germ_train_size = get_enu_germline_sizes(train_size,
train_enu_ratio)
enu_test_size, germ_test_size = get_enu_germline_sizes(test_size,
test_enu_ratio)
assert min(enu_train_size, germ_train_size,
enu_test_size, germ_test_size) > 0
if (2 * train_size > enu.shape[0] or
2 * train_size > germline.shape[0]):
print(f"ENU data set size: {enu.shape[0]}")
print(f"Germline data set size: {germline.shape[0]}")
print(f"Train set size: {train_size}")
raise ValueError("2 x train size exceeds"
" size of training data source(s)")
for rep in range(numreps):
test_outpath = os.path.join(output_path, f"test-{rep}.tsv.gz")
train_outpath = os.path.join(output_path, f"train-{rep}.tsv.gz")
enu_training, enu_testing = train_test_split(
enu,
test_size=enu_test_size,
train_size=enu_train_size)
germ_training, germ_testing = train_test_split(
germline,
test_size=germ_test_size,
train_size=germ_train_size)
if any(map(lambda x: x.shape[0] == 0,
[enu_training, enu_testing, germ_training, germ_testing])):
raise RuntimeError("screw up in creating test/train set")
# concat the data frames
testing = pandas.concat([enu_testing, germ_testing])
training = pandas.concat([enu_training, germ_training])
# write out, separately, the ENU and Germline data for train and test
testing.to_csv(test_outpath, index=False,
sep="\t", compression='gzip')
training.to_csv(train_outpath, index=False,
sep="\t", compression='gzip')
LOGGER.output_file(test_outpath)
LOGGER.output_file(train_outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_score
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_c_values
@_penalty_options
@_n_jobs
@_overwrite
@_verbose
def lr_train(training_path, output_path, label_col, seed, scoring,
flank_size, feature_dim, proximal,
usegc, c_values, penalty_options, n_jobs, overwrite, verbose):
"""logistic regression training, validation, dumps optimal model"""
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-lr.pkl.gz")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-lr.log")
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
if usegc:
# we need to scale the data
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = logistic_regression(feat, resp, seed, scoring,
c_values,
penalty_options.split(","), n_jobs)
betas = dict(zip(names, classifier.best_estimator_.coef_.tolist()[0]))
result = dict(classifier=classifier.best_estimator_, betas=betas,
scoring=scoring)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_score
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_alpha_options
@_class_prior
@_n_jobs
@_overwrite
@_verbose
def nb_train(training_path, output_path, label_col, seed, scoring,
flank_size, feature_dim, proximal,
usegc, alpha_options, class_prior, n_jobs, overwrite, verbose):
"""Naive Bayes training, validation, dumps optimal model"""
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-nb.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-nb.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
if class_prior is not None:
class_labels = list(class_prior)
encoded = transform_response(class_labels)
ordered = sorted(zip(encoded, class_labels))
class_prior = [class_prior[l] for _, l in ordered]
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
if usegc:
# we need to scale the data
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = naive_bayes(feat, resp, seed, alpha_options, scoring,
class_prior=class_prior, n_jobs=n_jobs)
betas = dict(zip(names, classifier.best_estimator_.coef_.tolist()[0]))
result = dict(classifier=classifier.best_estimator_, betas=betas,
scoring=scoring)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open_(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_strategy
@_n_jobs
@_overwrite
@_verbose
def xgboost_train(training_path, output_path, label_col, seed,
flank_size, feature_dim, proximal,
usegc, strategy, n_jobs, overwrite, verbose):
"""Naive Bayes training, validation, dumps optimal model"""
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-xgb.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-xgb.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
# hacking feature so all -1 > 0
resp = [v if v > 0 else 0 for v in resp]
if usegc:
# we need to scale the data
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = xgboost(feat, resp, seed, strategy, n_jobs, verbose)
result = dict(classifier=classifier)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_overwrite
@_verbose
def ocs_train(training_path, output_path, label_col, seed,
flank_size, feature_dim, proximal, usegc, overwrite, verbose):
"""one-class svm training for outlier detection"""
if seed is None:
seed = int(time.time())
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
start_time = time.time()
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-ocs.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-ocs.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, _, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc=usegc,
one_class='g')
classifier = one_class_svm(feat, seed)
result = dict(classifier=classifier)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_classifier_path
@_data_path
@_output_path
@_label_col
@_class_prior
@_overwrite
@_verbose
def predict(classifier_path, data_path, output_path, label_col, class_prior,
overwrite, verbose):
"""predict labels for data"""
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
classifier, feature_params, scaler = load_classifier(classifier_path)
class_label = get_classifier_label(classifier)
if class_prior is not None and class_label == 'lr':
# https://stats.stackexchange.com/questions/117592/logistic-regression-prior-correction-at-test-time
# based on above and King and Zeng, we adjust the intercept term such
# that it is incremented by ln(p(1) / p(-1)) where p(1) is the prior
# of a 1 label, p(-1)=1-p(1)
class_labels = list(class_prior)
encoded = transform_response(class_labels)
ordered = sorted(zip(encoded, class_labels))
if 'e' in ordered[0]:
adj = log(class_prior['g'] / class_prior['e'])
else:
adj = log(class_prior['e'] / class_prior['g'])
classifier.intercept_ += adj
basename_class = get_basename(classifier_path)
basename_data = get_basename(data_path)
basename = f"{basename_class}-{basename_data}"
outpath = os.path.join(
output_path,
f"{basename}-predicted-{class_label}.json.gz")
os.makedirs(output_path, exist_ok=True)
logfile_path = os.path.join(output_path,
f"logs/{basename}-predict-{class_label}.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(classifier_path)
LOGGER.input_file(data_path)
start_time = time.time()
# if NB, the score func name is different
if class_label in ("nb", "xgb"):
classifier.decision_function = classifier.predict_proba
fulldata = pandas.read_csv(data_path, sep='\t')
result = {}
result['feature_params'] = feature_params
result['classifier_label'] = class_label
result['classifier_path'] = classifier_path
result['predictions'] = defaultdict(list)
total = fulldata.shape[0] // 2000
pbar = tqdm(iter_indices(
fulldata.shape[0], block_size=2000), ncols=80, total=total)
for indices in pbar:
data = fulldata.iloc[indices]
ids, resp, feat, n_dims, names = data_to_numeric(data,
label_col=label_col,
**feature_params)
if scaler:
feat = scaler.transform(feat)
predictions, scores = predict_origin(classifier, feat)
if class_label in ("nb", "xgb"):
# each `score' is the probability of belong to either class
# reduce to just the first class
scores = scores[:, 1].tolist()
elif class_label == 'ocs':
scores = scores[:, 0].tolist()
predictions = inverse_transform_response(predictions)
result['predictions']['varid'].extend(list(ids))
result['predictions']['predicted'].extend(list(predictions))
result['predictions']['scores'].extend(list(scores))
dump_json(outpath, result)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
# def performance -> produces summary stats on trained classifiers
# requires input data and the predicted results
@main.command()
@_data_path
@_predictions_path
@_output_path
@_label_col
@_overwrite
@_verbose
def performance(data_path, predictions_path, output_path, label_col,
overwrite, verbose):
"""produce measures of classifier performance"""
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
if not (data_path or predictions_path):
click.secho("Need data sets!", fg="red")
exit()
basename_train = get_basename(data_path)
basename_pred = get_basename(predictions_path)
basename = f"{basename_train}-{basename_pred}"
outpath = os.path.join(
output_path,
f"{basename}-performance.json.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-performance.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"Use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(data_path)
LOGGER.input_file(predictions_path)
orig = pandas.read_csv(data_path, sep="\t")
predicted, feature_params, classifier_path, label =\
load_predictions(predictions_path)
result = measure_performance(orig, predicted,
label_col)
result["feature_params"] = feature_params
result["classifier_path"] = classifier_path
result["classifier_label"] = label
dump_json(outpath, result)
LOGGER.shutdown()
if __name__ == "__main__":
main()
| 35.40177
| 108
| 0.606739
|
import os
import time
import pickle
from collections import defaultdict
import click
from tqdm import tqdm
import pandas
from numpy import log
from numpy.random import seed as np_seed
from scitrack import CachingLogger
from sklearn.model_selection import train_test_split
from mutation_origin.opt import (_seed, _feature_dim, _enu_path,
_germline_path, _output_path, _flank_size,
_train_size, _enu_ratio,
_numreps, _label_col, _proximal, _usegc,
_training_path, _c_values, _penalty_options,
_n_jobs, _classifier_path, _data_path,
_predictions_path, _alpha_options,
_overwrite, _verbose, _class_prior,
_strategy, _score)
from mutation_origin.preprocess import data_to_numeric
from mutation_origin.encoder import (get_scaler, inverse_transform_response,
transform_response)
from mutation_origin.classify import (logistic_regression, one_class_svm,
predict_origin, naive_bayes, xgboost)
from mutation_origin.util import (dump_json, load_predictions,
get_basename, get_classifier_label,
get_enu_germline_sizes, iter_indices,
load_classifier, open_)
from mutation_origin.postprocess import measure_performance
__author__ = "Gavin Huttley"
__copyright__ = "Copyright 2014, Gavin Huttley"
__credits__ = ["Yicheng Zhu", "Cheng Soon Ong", "Gavin Huttley"]
__license__ = "BSD"
__version__ = "0.3"
__maintainer__ = "Gavin Huttley"
__email__ = "Gavin.Huttley@anu.edu.au"
__status__ = "Development"
LOGGER = CachingLogger()
@click.group()
def main():
pass
@main.command()
@_seed
@_enu_path
@_germline_path
@_output_path
@_train_size
@_enu_ratio
@_numreps
@_overwrite
def sample_data(enu_path, germline_path, output_path, seed,
train_size,
enu_ratio, numreps, overwrite):
if seed is None:
seed = int(time.time())
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
np_seed(seed)
start_time = time.time()
os.makedirs(output_path, exist_ok=True)
logfile_path = os.path.join(output_path, "logs/data_sampling.log")
if os.path.exists(logfile_path) and not overwrite:
click.secho(f"Exists: {logfile_path}! use overwrite to force.",
fg='red')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(enu_path)
LOGGER.input_file(germline_path)
enu = pandas.read_csv(enu_path, sep="\t", header=0)
germline = pandas.read_csv(germline_path, sep="\t", header=0)
train_size = train_size // 2
test_size = train_size
train_enu_ratio, test_enu_ratio = enu_ratio
enu_train_size, germ_train_size = get_enu_germline_sizes(train_size,
train_enu_ratio)
enu_test_size, germ_test_size = get_enu_germline_sizes(test_size,
test_enu_ratio)
assert min(enu_train_size, germ_train_size,
enu_test_size, germ_test_size) > 0
if (2 * train_size > enu.shape[0] or
2 * train_size > germline.shape[0]):
print(f"ENU data set size: {enu.shape[0]}")
print(f"Germline data set size: {germline.shape[0]}")
print(f"Train set size: {train_size}")
raise ValueError("2 x train size exceeds"
" size of training data source(s)")
for rep in range(numreps):
test_outpath = os.path.join(output_path, f"test-{rep}.tsv.gz")
train_outpath = os.path.join(output_path, f"train-{rep}.tsv.gz")
enu_training, enu_testing = train_test_split(
enu,
test_size=enu_test_size,
train_size=enu_train_size)
germ_training, germ_testing = train_test_split(
germline,
test_size=germ_test_size,
train_size=germ_train_size)
if any(map(lambda x: x.shape[0] == 0,
[enu_training, enu_testing, germ_training, germ_testing])):
raise RuntimeError("screw up in creating test/train set")
testing = pandas.concat([enu_testing, germ_testing])
training = pandas.concat([enu_training, germ_training])
testing.to_csv(test_outpath, index=False,
sep="\t", compression='gzip')
training.to_csv(train_outpath, index=False,
sep="\t", compression='gzip')
LOGGER.output_file(test_outpath)
LOGGER.output_file(train_outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_score
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_c_values
@_penalty_options
@_n_jobs
@_overwrite
@_verbose
def lr_train(training_path, output_path, label_col, seed, scoring,
flank_size, feature_dim, proximal,
usegc, c_values, penalty_options, n_jobs, overwrite, verbose):
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-lr.pkl.gz")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-lr.log")
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
if usegc:
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = logistic_regression(feat, resp, seed, scoring,
c_values,
penalty_options.split(","), n_jobs)
betas = dict(zip(names, classifier.best_estimator_.coef_.tolist()[0]))
result = dict(classifier=classifier.best_estimator_, betas=betas,
scoring=scoring)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_score
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_alpha_options
@_class_prior
@_n_jobs
@_overwrite
@_verbose
def nb_train(training_path, output_path, label_col, seed, scoring,
flank_size, feature_dim, proximal,
usegc, alpha_options, class_prior, n_jobs, overwrite, verbose):
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-nb.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-nb.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
if class_prior is not None:
class_labels = list(class_prior)
encoded = transform_response(class_labels)
ordered = sorted(zip(encoded, class_labels))
class_prior = [class_prior[l] for _, l in ordered]
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
if usegc:
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = naive_bayes(feat, resp, seed, alpha_options, scoring,
class_prior=class_prior, n_jobs=n_jobs)
betas = dict(zip(names, classifier.best_estimator_.coef_.tolist()[0]))
result = dict(classifier=classifier.best_estimator_, betas=betas,
scoring=scoring)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open_(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_strategy
@_n_jobs
@_overwrite
@_verbose
def xgboost_train(training_path, output_path, label_col, seed,
flank_size, feature_dim, proximal,
usegc, strategy, n_jobs, overwrite, verbose):
if not seed:
seed = int(time.time())
np_seed(seed)
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-xgb.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-xgb.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, resp, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc)
resp = [v if v > 0 else 0 for v in resp]
if usegc:
scaler = get_scaler(feat)
feat = scaler.transform(feat)
classifier = xgboost(feat, resp, seed, strategy, n_jobs, verbose)
result = dict(classifier=classifier)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
if usegc:
result['scaler'] = scaler
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_training_path
@_output_path
@_label_col
@_seed
@_flank_size
@_feature_dim
@_proximal
@_usegc
@_overwrite
@_verbose
def ocs_train(training_path, output_path, label_col, seed,
flank_size, feature_dim, proximal, usegc, overwrite, verbose):
if seed is None:
seed = int(time.time())
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
start_time = time.time()
os.makedirs(output_path, exist_ok=True)
basename = get_basename(training_path)
outpath = os.path.join(output_path, f"{basename}-classifier-ocs.pkl.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-training-ocs.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(training_path)
start_time = time.time()
_, _, feat, n_dims, names = data_to_numeric(training_path,
label_col, flank_size,
feature_dim, proximal,
usegc=usegc,
one_class='g')
classifier = one_class_svm(feat, seed)
result = dict(classifier=classifier)
result['feature_params'] = dict(feature_dim=feature_dim,
flank_size=flank_size, proximal=proximal,
usegc=usegc)
with open(outpath, 'wb') as clf_file:
pickle.dump(result, clf_file)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
@main.command()
@_classifier_path
@_data_path
@_output_path
@_label_col
@_class_prior
@_overwrite
@_verbose
def predict(classifier_path, data_path, output_path, label_col, class_prior,
overwrite, verbose):
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
classifier, feature_params, scaler = load_classifier(classifier_path)
class_label = get_classifier_label(classifier)
if class_prior is not None and class_label == 'lr':
class_labels = list(class_prior)
encoded = transform_response(class_labels)
ordered = sorted(zip(encoded, class_labels))
if 'e' in ordered[0]:
adj = log(class_prior['g'] / class_prior['e'])
else:
adj = log(class_prior['e'] / class_prior['g'])
classifier.intercept_ += adj
basename_class = get_basename(classifier_path)
basename_data = get_basename(data_path)
basename = f"{basename_class}-{basename_data}"
outpath = os.path.join(
output_path,
f"{basename}-predicted-{class_label}.json.gz")
os.makedirs(output_path, exist_ok=True)
logfile_path = os.path.join(output_path,
f"logs/{basename}-predict-{class_label}.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(classifier_path)
LOGGER.input_file(data_path)
start_time = time.time()
if class_label in ("nb", "xgb"):
classifier.decision_function = classifier.predict_proba
fulldata = pandas.read_csv(data_path, sep='\t')
result = {}
result['feature_params'] = feature_params
result['classifier_label'] = class_label
result['classifier_path'] = classifier_path
result['predictions'] = defaultdict(list)
total = fulldata.shape[0] // 2000
pbar = tqdm(iter_indices(
fulldata.shape[0], block_size=2000), ncols=80, total=total)
for indices in pbar:
data = fulldata.iloc[indices]
ids, resp, feat, n_dims, names = data_to_numeric(data,
label_col=label_col,
**feature_params)
if scaler:
feat = scaler.transform(feat)
predictions, scores = predict_origin(classifier, feat)
if class_label in ("nb", "xgb"):
# reduce to just the first class
scores = scores[:, 1].tolist()
elif class_label == 'ocs':
scores = scores[:, 0].tolist()
predictions = inverse_transform_response(predictions)
result['predictions']['varid'].extend(list(ids))
result['predictions']['predicted'].extend(list(predictions))
result['predictions']['scores'].extend(list(scores))
dump_json(outpath, result)
LOGGER.output_file(outpath)
duration = time.time() - start_time
LOGGER.log_message("%.2f" % (duration / 60.),
label="run duration (minutes)")
LOGGER.shutdown()
# def performance -> produces summary stats on trained classifiers
# requires input data and the predicted results
@main.command()
@_data_path
@_predictions_path
@_output_path
@_label_col
@_overwrite
@_verbose
def performance(data_path, predictions_path, output_path, label_col,
overwrite, verbose):
LOGGER.log_args()
LOGGER.log_versions(['sklearn', 'numpy'])
if not (data_path or predictions_path):
click.secho("Need data sets!", fg="red")
exit()
basename_train = get_basename(data_path)
basename_pred = get_basename(predictions_path)
basename = f"{basename_train}-{basename_pred}"
outpath = os.path.join(
output_path,
f"{basename}-performance.json.gz")
logfile_path = os.path.join(output_path,
f"logs/{basename}-performance.log")
if os.path.exists(outpath) and not overwrite:
if verbose > 1:
click.secho(f"Skipping. {outpath} exists. "
"Use overwrite to force.",
fg='green')
return
LOGGER.log_file_path = logfile_path
LOGGER.input_file(data_path)
LOGGER.input_file(predictions_path)
orig = pandas.read_csv(data_path, sep="\t")
predicted, feature_params, classifier_path, label =\
load_predictions(predictions_path)
result = measure_performance(orig, predicted,
label_col)
result["feature_params"] = feature_params
result["classifier_path"] = classifier_path
result["classifier_label"] = label
dump_json(outpath, result)
LOGGER.shutdown()
if __name__ == "__main__":
main()
| true
| true
|
7908fdd54a96c094be872a6f7be7d3858c5af6b3
| 7,750
|
py
|
Python
|
main.py
|
papaispicolo/CarNDT3-SemanticSegmentation
|
c1940c01769cbf03d7c28c3a72946e4bd9682d59
|
[
"MIT"
] | null | null | null |
main.py
|
papaispicolo/CarNDT3-SemanticSegmentation
|
c1940c01769cbf03d7c28c3a72946e4bd9682d59
|
[
"MIT"
] | null | null | null |
main.py
|
papaispicolo/CarNDT3-SemanticSegmentation
|
c1940c01769cbf03d7c28c3a72946e4bd9682d59
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os.path
import tensorflow as tf
import helper
import warnings
from distutils.version import LooseVersion
import project_tests as tests
# Check TensorFlow Version
assert LooseVersion(tf.__version__) >= LooseVersion('1.0'), 'Please use TensorFlow version 1.0 or newer. You are using {}'.format(tf.__version__)
print('TensorFlow Version: {}'.format(tf.__version__))
# Check for a GPU
if not tf.test.gpu_device_name():
warnings.warn('No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
def load_vgg(sess, vgg_path):
"""
Load Pretrained VGG Model into TensorFlow.
:param sess: TensorFlow Session
:param vgg_path: Path to vgg folder, containing "variables/" and "saved_model.pb"
:return: Tuple of Tensors from VGG model (image_input, keep_prob, layer3_out, layer4_out, layer7_out)
"""
# TODO: Implement function
# Use tf.saved_model.loader.load to load the model and weights
vgg_tag = 'vgg16'
tf.saved_model.loader.load(sess, [vgg_tag], vgg_path)
vgg_input_tensor_name = 'image_input:0'
vgg_keep_prob_tensor_name = 'keep_prob:0'
vgg_layer3_out_tensor_name = 'layer3_out:0'
vgg_layer4_out_tensor_name = 'layer4_out:0'
vgg_layer7_out_tensor_name = 'layer7_out:0'
graph = tf.get_default_graph()
input_img = graph.get_tensor_by_name(vgg_input_tensor_name)
prob = graph.get_tensor_by_name(vgg_keep_prob_tensor_name)
layer3_o = graph.get_tensor_by_name(vgg_layer3_out_tensor_name)
layer4_o = graph.get_tensor_by_name(vgg_layer4_out_tensor_name)
layer7_o = graph.get_tensor_by_name(vgg_layer7_out_tensor_name)
return input_img, prob, layer3_o, layer4_o, layer7_o
tests.test_load_vgg(load_vgg, tf)
def layers(vgg_layer3_out, vgg_layer4_out, vgg_layer7_out, num_classes):
"""
Create the layers for a fully convolutional network. Build skip-layers using the vgg layers.
:param vgg_layer3_out: TF Tensor for VGG Layer 3 output
:param vgg_layer4_out: TF Tensor for VGG Layer 4 output
:param vgg_layer7_out: TF Tensor for VGG Layer 7 output
:param num_classes: Number of classes to classify
:return: The Tensor for the last layer of output
"""
# TODO: Implement function
# 1x1 convolution layer with road / not-road features only
conv_1by1_l7 = tf.layers.conv2d(vgg_layer7_out, num_classes, 1, padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
# upscaling size/ add features
output = tf.layers.conv2d_transpose(conv_1by1_l7, 512, 4, strides=(2,2), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
# skip connections / add to upscaled output
output = tf.add(output, vgg_layer4_out)
# upscaling size/ reduce features
output = tf.layers.conv2d_transpose(output, 256, 4, strides=(2,2), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
# skip connections / add to upscaled output
output = tf.add(output, vgg_layer3_out)
# upscaling size/ reduce features to road OR not-road
output = tf.layers.conv2d_transpose(output, num_classes, 16, strides=(8,8), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3), name='nn_final_output')
return output
tests.test_layers(layers)
def optimize(nn_last_layer, correct_label, learning_rate, num_classes):
"""
Build the TensorFLow loss and optimizer operations.
:param nn_last_layer: TF Tensor of the last layer in the neural network
:param correct_label: TF Placeholder for the correct label image
:param learning_rate: TF Placeholder for the learning rate
:param num_classes: Number of classes to classify
:return: Tuple of (logits, train_op, cross_entropy_loss)
"""
# TODO: Implement function
logits = tf.reshape(nn_last_layer, (-1, num_classes))
# add loss function
cross_entropy_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=correct_label))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
# training_op
training_operation = optimizer.minimize(cross_entropy_loss)
return logits, training_operation, cross_entropy_loss
tests.test_optimize(optimize)
def train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate):
"""
Train neural network and print out the loss during training.
:param sess: TF Session
:param epochs: Number of epochs
:param batch_size: Batch size
:param get_batches_fn: Function to get batches of training data. Call using get_batches_fn(batch_size)
:param train_op: TF Operation to train the neural network
:param cross_entropy_loss: TF Tensor for the amount of loss
:param input_image: TF Placeholder for input images
:param correct_label: TF Placeholder for label images
:param keep_prob: TF Placeholder for dropout keep probability
:param learning_rate: TF Placeholder for learning rate
"""
# TODO: Implement function
# initialize global variables
sess.run(tf.global_variables_initializer())
# going through the batches of images i.e. epoch
for epoch in range(epochs):
for (input_img, gt_img) in get_batches_fn(batch_size):
_, loss = sess.run([train_op, cross_entropy_loss], feed_dict={input_image: input_img,
correct_label: gt_img,
keep_prob: 0.7,
learning_rate: 5e-04})
print("Loss of {} at epoch {}/{}".format(loss, epoch, epochs))
tests.test_train_nn(train_nn)
def run():
num_classes = 2
image_shape = (160, 576) # KITTI dataset uses 160x576 images
data_dir = './data'
runs_dir = './runs'
tests.test_for_kitti_dataset(data_dir)
# Download pretrained vgg model
helper.maybe_download_pretrained_vgg(data_dir)
# OPTIONAL: Train and Inference on the cityscapes dataset instead of the Kitti dataset.
# You'll need a GPU with at least 10 teraFLOPS to train on.
# https://www.cityscapes-dataset.com/
epochs = 20
batch_size = 5
with tf.Session() as sess:
# Path to vgg model
vgg_path = os.path.join(data_dir, 'vgg')
# Create function to get batches
get_batches_fn = helper.gen_batch_function(os.path.join(data_dir, 'data_road/training'), image_shape)
# OPTIONAL: Augment Images for better results
# https://datascience.stackexchange.com/questions/5224/how-to-prepare-augment-images-for-neural-network
correct_label = tf.placeholder(tf.int32)
learning_rate = tf.placeholder(tf.float32)
# TODO: Build NN using load_vgg, layers, and optimize function
input_img, keep_prob, layer3_o, layer4_o, layer7_o = load_vgg(sess, vgg_path)
layer_output = layers(layer3_o, layer4_o, layer7_o, num_classes)
logits, train_op, cross_entropy_loss = optimize(layer_output, correct_label, learning_rate, num_classes)
# TODO: Train NN using the train_nn function
train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_img,
correct_label, keep_prob, learning_rate)
# TODO: Save inference data using helper.save_inference_samples
helper.save_inference_samples(runs_dir, data_dir, sess, image_shape, logits, keep_prob, input_img)
# OPTIONAL: Apply the trained model to a video
if __name__ == '__main__':
run()
| 41.891892
| 146
| 0.711613
|
import os.path
import tensorflow as tf
import helper
import warnings
from distutils.version import LooseVersion
import project_tests as tests
assert LooseVersion(tf.__version__) >= LooseVersion('1.0'), 'Please use TensorFlow version 1.0 or newer. You are using {}'.format(tf.__version__)
print('TensorFlow Version: {}'.format(tf.__version__))
if not tf.test.gpu_device_name():
warnings.warn('No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
def load_vgg(sess, vgg_path):
vgg_tag = 'vgg16'
tf.saved_model.loader.load(sess, [vgg_tag], vgg_path)
vgg_input_tensor_name = 'image_input:0'
vgg_keep_prob_tensor_name = 'keep_prob:0'
vgg_layer3_out_tensor_name = 'layer3_out:0'
vgg_layer4_out_tensor_name = 'layer4_out:0'
vgg_layer7_out_tensor_name = 'layer7_out:0'
graph = tf.get_default_graph()
input_img = graph.get_tensor_by_name(vgg_input_tensor_name)
prob = graph.get_tensor_by_name(vgg_keep_prob_tensor_name)
layer3_o = graph.get_tensor_by_name(vgg_layer3_out_tensor_name)
layer4_o = graph.get_tensor_by_name(vgg_layer4_out_tensor_name)
layer7_o = graph.get_tensor_by_name(vgg_layer7_out_tensor_name)
return input_img, prob, layer3_o, layer4_o, layer7_o
tests.test_load_vgg(load_vgg, tf)
def layers(vgg_layer3_out, vgg_layer4_out, vgg_layer7_out, num_classes):
conv_1by1_l7 = tf.layers.conv2d(vgg_layer7_out, num_classes, 1, padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
output = tf.layers.conv2d_transpose(conv_1by1_l7, 512, 4, strides=(2,2), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
output = tf.add(output, vgg_layer4_out)
output = tf.layers.conv2d_transpose(output, 256, 4, strides=(2,2), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3))
output = tf.add(output, vgg_layer3_out)
output = tf.layers.conv2d_transpose(output, num_classes, 16, strides=(8,8), padding='SAME',
kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3), name='nn_final_output')
return output
tests.test_layers(layers)
def optimize(nn_last_layer, correct_label, learning_rate, num_classes):
logits = tf.reshape(nn_last_layer, (-1, num_classes))
cross_entropy_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=correct_label))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
training_operation = optimizer.minimize(cross_entropy_loss)
return logits, training_operation, cross_entropy_loss
tests.test_optimize(optimize)
def train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate):
sess.run(tf.global_variables_initializer())
for epoch in range(epochs):
for (input_img, gt_img) in get_batches_fn(batch_size):
_, loss = sess.run([train_op, cross_entropy_loss], feed_dict={input_image: input_img,
correct_label: gt_img,
keep_prob: 0.7,
learning_rate: 5e-04})
print("Loss of {} at epoch {}/{}".format(loss, epoch, epochs))
tests.test_train_nn(train_nn)
def run():
num_classes = 2
image_shape = (160, 576)
data_dir = './data'
runs_dir = './runs'
tests.test_for_kitti_dataset(data_dir)
helper.maybe_download_pretrained_vgg(data_dir)
# https://www.cityscapes-dataset.com/
epochs = 20
batch_size = 5
with tf.Session() as sess:
# Path to vgg model
vgg_path = os.path.join(data_dir, 'vgg')
# Create function to get batches
get_batches_fn = helper.gen_batch_function(os.path.join(data_dir, 'data_road/training'), image_shape)
# OPTIONAL: Augment Images for better results
# https://datascience.stackexchange.com/questions/5224/how-to-prepare-augment-images-for-neural-network
correct_label = tf.placeholder(tf.int32)
learning_rate = tf.placeholder(tf.float32)
# TODO: Build NN using load_vgg, layers, and optimize function
input_img, keep_prob, layer3_o, layer4_o, layer7_o = load_vgg(sess, vgg_path)
layer_output = layers(layer3_o, layer4_o, layer7_o, num_classes)
logits, train_op, cross_entropy_loss = optimize(layer_output, correct_label, learning_rate, num_classes)
# TODO: Train NN using the train_nn function
train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_img,
correct_label, keep_prob, learning_rate)
# TODO: Save inference data using helper.save_inference_samples
helper.save_inference_samples(runs_dir, data_dir, sess, image_shape, logits, keep_prob, input_img)
# OPTIONAL: Apply the trained model to a video
if __name__ == '__main__':
run()
| true
| true
|
7908fe19f061ef977f21b52d2782a3887c88c794
| 35,017
|
py
|
Python
|
template/parser.py
|
lmr/Template-Toolkit-Python
|
3b1affc79c2f650b057956b0dbf6e0cb51515999
|
[
"Artistic-2.0"
] | 2
|
2021-05-07T08:50:06.000Z
|
2021-06-12T15:48:23.000Z
|
template/parser.py
|
lmr/Template-Toolkit-Python
|
3b1affc79c2f650b057956b0dbf6e0cb51515999
|
[
"Artistic-2.0"
] | 1
|
2018-11-29T08:49:42.000Z
|
2018-11-29T08:49:42.000Z
|
template/parser.py
|
lmr/Template-Toolkit-Python
|
3b1affc79c2f650b057956b0dbf6e0cb51515999
|
[
"Artistic-2.0"
] | null | null | null |
#
# The Template-Python distribution is Copyright (C) Sean McAfee 2007-2008,
# derived from the Perl Template Toolkit Copyright (C) 1996-2007 Andy
# Wardley. All Rights Reserved.
#
# The file "LICENSE" at the top level of this source distribution describes
# the terms under which this file may be distributed.
#
import collections
import re
import sys
from template import util
from template.constants import *
from template.directive import Directive
from template.grammar import Grammar
from template.util import TemplateException
"""
template.parser - LALR(1) parser for compiling template documents
SYNOPSIS
import template.parser
parser = template.parser.Parser(config)
template = parser.parse(text)
DESCRIPTION
The template.parser module implements a LALR(1) parser and associated
methods for parsing template documents into Python code.
PUBLIC METHODS
__init__(params)
The constructor initializes a new template.parser.Parser object. A
dictionary may be supplied as a parameter to provide configuration
values. These may include:
* START_TAG, END_TAG
The START_TAG and END_TAG options are used to specify character
sequences or regular expressions that mark the start and end of a
template directive. The default values for START_TAG and END_TAG are
'[%' and '%]' respectively, giving us the familiar directive style:
[% example %]
Any Python regex characters can be used and therefore should be
escaped (or use the re.escape function) if they are intended to
represent literal characters.
parser = template.parser.Parser({
'START_TAG': re.escape('<+'),
'END_TAG': re.escape('+>'),
})
example:
<+ INCLUDE foobar +>
The TAGS directive can also be used to set the START_TAG and END_TAG values
on a per-template file basis.
[% TAGS <+ +> %]
* TAG_STYLE
The TAG_STYLE option can be used to set both START_TAG and END_TAG
according to pre-defined tag styles.
parser = template.parser.Parser({
'TAG_STYLE': 'star',
})
Available styles are:
template [% ... %] (default)
template1 [% ... %] or %% ... %% (TT version 1)
metatext %% ... %% (Text::MetaText)
star [* ... *] (TT alternate)
php <? ... ?> (PHP)
asp <% ... %> (ASP)
mason <% ... > (HTML::Mason)
html <!-- ... --> (HTML comments)
Any values specified for START_TAG and/or END_TAG will over-ride those
defined by a TAG_STYLE.
The TAGS directive may also be used to set a TAG_STYLE
[% TAGS html %]
<!-- INCLUDE header -->
* PRE_CHOMP, POST_CHOMP
Anything outside a directive tag is considered plain text and is
generally passed through unaltered (but see the INTERPOLATE option).
This includes all whitespace and newlines characters surrounding
directive tags. Directives that don't generate any output will leave
gaps in the output document.
Example:
Foo
[% a = 10 %]
Bar
Output:
Foo
Bar
The PRE_CHOMP and POST_CHOMP options can help to clean up some of this
extraneous whitespace. Both are disabled by default.
parser = template.parser.Parser({
'PRE_CHOMP': 1,
'POST_CHOMP': 1,
})
With PRE_CHOMP set to 1, the newline and whitespace preceding a
directive at the start of a line will be deleted. This has the effect
of concatenating a line that starts with a directive onto the end of
the previous line.
Foo E<lt>----------.
|
,---(PRE_CHOMP)----'
|
`-- [% a = 10 %] --.
|
,---(POST_CHOMP)---'
|
`-E<gt> Bar
With POST_CHOMP set to 1, any whitespace after a directive up to and
including the newline will be deleted. This has the effect of joining
a line that ends with a directive onto the start of the next line.
If PRE_CHOMP or POST_CHOMP is set to 2, all whitespace including any
number of newline will be removed and replaced with a single space.
This is useful for HTML, where (usually) a contiguous block of
whitespace is rendered the same as a single space.
With PRE_CHOMP or POST_CHOMP set to 3, all adjacent whitespace
(including newlines) will be removed entirely.
These values are defined as CHOMP_NONE, CHOMP_ONE, CHOMP_COLLAPSE and
CHOMP_GREEDY constants in the template.constants module. CHOMP_ALL
is also defined as an alias for CHOMP_ONE to provide backwards
compatability with earlier version of the Template Toolkit.
Additionally the chomp tag modifiers listed below may also be used for
the PRE_CHOMP and POST_CHOMP configuration.
tt = template.Template({
'PRE_CHOMP': '~',
'POST_CHOMP': '-',
})
PRE_CHOMP and POST_CHOMP can be activated for individual directives by
placing a '-' immediately at the start and/or end of the directive.
[% FOREACH user IN userlist %]
[%- user -%]
[% END %]
This has the same effect as CHOMP_ONE in removing all whitespace
before or after the directive up to and including the newline. The
template will be processed as if written:
[% FOREACH user IN userlist %][% user %][% END %]
To remove all whitespace including any number of newlines, use the '~'
character instead.
[% FOREACH user IN userlist %]
[%~ user ~%]
[% END %]
To collapse all whitespace to a single space, use the '=' character.
[% FOREACH user IN userlist %]
[%= user =%]
[% END %]
Here the template is processed as if written:
[% FOREACH user IN userlist %] [% user %] [% END %]
If you have PRE_CHOMP or POST_CHOMP set as configuration options then
you can use '+' to disable any chomping options (i.e. leave the
whitespace intact) on a per-directive basis.
[% FOREACH user = userlist %]
User: [% user +%]
[% END %]
With POST_CHOMP set to CHOMP_ONE, the above example would be parsed as
if written:
[% FOREACH user = userlist %]User: [% user %]
[% END %]
For reference, the PRE_CHOMP and POST_CHOMP configuration options may be set to any of the following:
Constant Value Tag Modifier
----------------------------------
CHOMP_NONE 0 +
CHOMP_ONE 1 -
CHOMP_COLLAPSE 2 =
CHOMP_GREEDY 3 ~
* INTERPOLATE
The INTERPOLATE flag, when set to any true value will cause variable
references in plain text (i.e. not surrounded by START_TAG and
END_TAG) to be recognised and interpolated accordingly.
parser = template.parser.Parser({
'INTERPOLATE': 1,
})
Variables should be prefixed by a '$' to identify them. Curly braces
can be used in the familiar Perl/shell style to explicitly scope the
variable name where required.
# INTERPOLATE => 0
<a href="http://[% server %]/[% help %]">
<img src="[% images %]/help.gif"></a>
[% myorg.name %]
# INTERPOLATE => 1
<a href="http://$server/$help">
<img src="$images/help.gif"></a>
$myorg.name
# explicit scoping with { }
<img src="$images/${icon.next}.gif">
Note that a limitation in Perl's regex engine restricts the maximum
length of an interpolated template to around 32 kilobytes or possibly
less. Files that exceed this limit in size will typically cause Perl
to dump core with a segmentation fault. If you routinely process
templates of this size then you should disable INTERPOLATE or split
the templates in several smaller files or blocks which can then be
joined backed together via PROCESS or INCLUDE.
It is unknown whether this limitation is shared by the Python regex
engine.
* ANYCASE
By default, directive keywords should be expressed in UPPER CASE. The
ANYCASE option can be set to allow directive keywords to be specified
in any case.
# ANYCASE => 0 (default)
[% INCLUDE foobar %] # OK
[% include foobar %] # ERROR
[% include = 10 %] # OK, 'include' is a variable
# ANYCASE => 1
[% INCLUDE foobar %] # OK
[% include foobar %] # OK
[% include = 10 %] # ERROR, 'include' is reserved word
One side-effect of enabling ANYCASE is that you cannot use a variable
of the same name as a reserved word, regardless of case. The reserved
words are currently:
GET CALL SET DEFAULT INSERT INCLUDE PROCESS WRAPPER
IF UNLESS ELSE ELSIF FOR FOREACH WHILE SWITCH CASE
USE PLUGIN FILTER MACRO PYTHON RAWPYTHON BLOCK META
TRY THROW CATCH FINAL NEXT LAST BREAK RETURN STOP
CLEAR TO STEP AND OR NOT MOD DIV END
The only lower case reserved words that cannot be used for variables,
regardless of the ANYCASE option, are the operators:
and or not mod div
* V1DOLLAR
In version 1 of the Template Toolkit, an optional leading '$' could be placed
on any template variable and would be silently ignored.
# VERSION 1
[% $foo %] === [% foo %]
[% $hash.$key %] === [% hash.key %]
To interpolate a variable value the '${' ... '}' construct was used.
Typically, one would do this to index into a hash array when the key
value was stored in a variable.
example:
vars = {
users => {
'aba': { 'name': 'Alan Aardvark', ... },
'abw': { 'name': 'Andy Wardley', ... },
...
},
'uid': 'aba',
...
}
template.process('user/home.html', vars)
'user/home.html':
[% user = users.${uid} %] # users.aba
Name: [% user.name %] # Alan Aardvark
This was inconsistent with double quoted strings and also the
INTERPOLATE mode, where a leading '$' in text was enough to indicate a
variable for interpolation, and the additional curly braces were used
to delimit variable names where necessary. Note that this use is
consistent with UNIX and Perl conventions, among others.
# double quoted string interpolation
[% name = "$title ${user.name}" %]
# INTERPOLATE = 1
<img src="$images/help.gif"></a>
<img src="$images/${icon.next}.gif">
For version 2, these inconsistencies have been removed and the syntax
clarified. A leading '$' on a variable is now used exclusively to
indicate that the variable name should be interpolated
(e.g. subsituted for its value) before being used. The earlier example
from version 1:
# VERSION 1
[% user = users.${uid} %]
Name: [% user.name %]
can now be simplified in version 2 as:
# VERSION 2
[% user = users.$uid %]
Name: [% user.name %]
The leading dollar is no longer ignored and has the same effect of
interpolation as '${' ... '}' in version 1. The curly braces may
still be used to explicitly scope the interpolated variable name
where necessary.
e.g.
[% user = users.${me.id} %]
Name: [% user.name %]
The rule applies for all variables, both within directives and in
plain text if processed with the INTERPOLATE option. This means that
you should no longer (if you ever did) add a leading '$' to a variable
inside a directive, unless you explicitly want it to be interpolated.
One obvious side-effect is that any version 1 templates with variables
using a leading '$' will no longer be processed as expected. Given
the following variable definitions,
[% foo = 'bar'
bar = 'baz'
%]
version 1 would interpret the following as:
# VERSION 1
[% $foo %] => [% GET foo %] => bar
whereas version 2 interprets it as:
# VERSION 2
[% $foo %] => [% GET $foo %] => [% GET bar %] => baz
In version 1, the '$' is ignored and the value for the variable 'foo'
is retrieved and printed. In version 2, the variable '$foo' is first
interpolated to give the variable name 'bar' whose value is then
retrieved and printed.
The use of the optional '$' has never been strongly recommended, but
to assist in backwards compatibility with any version 1 templates that
may rely on this "feature", the V1DOLLAR option can be set to 1
(default: 0) to revert the behaviour and have leading '$' characters
ignored.
parser = template.parser.Parser->new({
'V1DOLLAR': 1,
});
* GRAMMAR
The GRAMMAR configuration item can be used to specify an alternate
grammar for the parser. This allows a modified or entirely new
template language to be constructed and used by the Template Toolkit.
Source templates are compiled to Python code by the template.parser
module using the template.grammar module (by default) to define the
language structure and semantics. Compiled templates are thus
inherently "compatible" with each other and there is nothing to prevent
any number of different template languages being compiled and used within
the same Template Toolkit processing environment (other than the usual
time and memory constraints).
The template.grammar file is constructed from a YACC like grammar
(using Parse::YAPP) and a skeleton module template. These files are
provided, along with a small script to rebuild the grammar, in the
'parser' sub-directory of the distribution. You don't have to know or
worry about these unless you want to hack on the template language or
define your own variant. There is a README file in the same directory
which provides some small guidance but it is assumed that you know
what you're doing if you venture herein. If you grok LALR parsers,
then you should find it comfortably familiar.
By default, an instance of the default template.grammar.Grammar will
be created and used automatically if a GRAMMAR item isn't specified.
import myorg.template.grammar
parser = template.parser.Parser({
'GRAMMAR': myorg.template.grammar.Grammar(),
})
* DEBUG
The DEBUG option can be used to enable various debugging features of
the Template::Parser module.
from template.constants import *
tt = template.Template({
'DEBUG': DEBUG_PARSER | DEBUG_DIRS,
})
The DEBUG value can include any of the following. Multiple values
should be combined using the logical OR operator, '|'.
** DEBUG_PARSER
This flag causes the Parser to generate debugging messages that show
the Python code generated by parsing and compiling each template.
** DEBUG_DIRS
This option causes the Template Toolkit to generate comments
indicating the source file, line and original text of each directive
in the template. These comments are embedded in the template output
using the format defined in the DEBUG_FORMAT configuration item, or a
simple default format if unspecified.
For example, the following template fragment:
Hello World
would generate this output:
## input text line 1 : ##
Hello
## input text line 2 : World ##
World
parse(text)
The parse() method parses the text passed in the first parameter and
returns a dictionary of data defining the compiled representation of
the template text, suitable for passing to the
template.document.Document constructor.
Example:
data = parser.parse(text)
The data dictionary returned contains a BLOCK item containing the
compiled Python code for the template, a DEFBLOCKS item containing a
dictionary of sub-template BLOCKs defined within in the template, and
a METADATA item containing a dictionary of metadata values defined in
META tags.
"""
CONTINUE = 0
ACCEPT = 1
ERROR = 2
ABORT = 3
TAG_STYLE = {
"default": (r"\[%", r"%\]"),
"template1": (r"[[%]%", r"%[]%]"),
"metatext": (r"%%", r"%%"),
"html": (r"<!--", r"-->"),
"mason": (r"<%", r">"),
"asp": (r"<%", r"%>"),
"php": (r"<\?", r"\?>"),
"star": (r"\[\*", r"\*\]"),
}
TAG_STYLE["template"] = TAG_STYLE["tt2"] = TAG_STYLE["default"]
DEFAULT_STYLE = {
"START_TAG": TAG_STYLE["default"][0],
"END_TAG": TAG_STYLE["default"][1],
"ANYCASE": 0,
"INTERPOLATE": 0,
"PRE_CHOMP": 0,
"POST_CHOMP": 0,
"V1DOLLAR": 0,
"EVAL_PYTHON": 0,
}
ESCAPE = {"n": "\n", "r": "\r", "t": "\t"}
CHOMP_FLAGS = r"[-=~+]"
CHOMP_ALL = str(CHOMP_ALL)
CHOMP_COLLAPSE = str(CHOMP_COLLAPSE)
CHOMP_GREEDY = str(CHOMP_GREEDY)
CHOMP_NONE = str(CHOMP_NONE)
CHOMP_CONST = {
"-": CHOMP_ALL,
"=": CHOMP_COLLAPSE,
"~": CHOMP_GREEDY,
"+": CHOMP_NONE
}
PRE_CHOMP = {
CHOMP_ALL: lambda x: re.sub(r"(\n|^)[^\S\n]*\Z", "", x),
CHOMP_COLLAPSE: lambda x: re.sub(r"\s+\Z", " ", x),
CHOMP_GREEDY: lambda x: re.sub(r"\s+\Z", "", x),
CHOMP_NONE: lambda x: x,
}
def postchomp(regex, prefix):
regex = re.compile(regex)
def strip(text, postlines):
match = regex.match(text)
if match:
text = prefix + text[match.end():]
postlines += match.group().count("\n")
return text, postlines
return strip
POST_CHOMP = {
CHOMP_ALL: postchomp(r"[^\S\n]*\n", ""),
CHOMP_COLLAPSE: postchomp(r"\s+", " "),
CHOMP_GREEDY: postchomp(r"\s+", ""),
CHOMP_NONE: lambda x, y: (x, y),
}
def Chomp(x):
return re.sub(r"[-=~+]", lambda m: CHOMP_CONST[m.group()], str(x))
GRAMMAR = re.compile(r"""
# strip out any comments
(\#[^\n]*)
|
# a quoted string matches in $3
(["']) # $2 - opening quote, ' or "
( # $3 - quoted text buffer
(?: # repeat group (no backreference)
\\\\ # an escaped backslash
| # ...or...
\\\2 # an escaped quote \" or \' (match $1)
| # ...or...
. # any other character
| \n
)*? # non-greedy repeat
) # end of $3
\2 # match opening quote
|
# an unquoted number matches in $4
(-? \d+ (?: \. \d+ )?) # numbers
|
# filename matches in $5
( /? \w+ (?: (?: /|::? ) \w* )+ | /\w+ )
|
# an identifier matches in $6
(\w+)
|
# an unquoted word or symbol matches in $7
( [(){}\[\]:;,/\\] # misc parentheses and symbols
| -> # arrow operator (for future?)
| [-+*] # math operations
| \${? # dollar with optional left brace
| => # like "="
| [=!<>]?= | [!<>] # equality tests
| &&? | \|\|? # boolean ops
| \.\.? # n..n sequence
| \S+ # something unquoted
) # end of $7
""", re.VERBOSE)
QUOTED_STRING = re.compile(r"""
( (?: \\. | [^\$] ){1,3000} ) # escaped or non-'$' character [$1]
|
( \$ (?: # embedded variable [$2]
(?: \{ ([^\}]*) \} ) # ${ ... } [$3]
|
([\w\.]+) # $word [$4]
)
)
""", re.VERBOSE)
class Error(Exception):
"""A trivial local exception class."""
pass
class Parser:
"""This module implements a LALR(1) parser and assocated support
methods to parse template documents into the appropriate "compiled"
format.
"""
def __init__(self, param):
self.start_tag = param.get("START_TAG") or DEFAULT_STYLE["START_TAG"]
self.end_tag = param.get("END_TAG") or DEFAULT_STYLE["END_TAG"]
self.tag_style = param.get("TAG_STYLE", "default")
self.anycase = param.get("ANYCASE", False)
self.interpolate = param.get("INTERPOLATE", False)
self.pre_chomp = param.get("PRE_CHOMP", CHOMP_NONE)
self.post_chomp = param.get("POST_CHOMP", CHOMP_NONE)
self.v1dollar = param.get("V1DOLLAR", False)
self.eval_python = param.get("EVAL_PYTHON", False)
self.file_info = param.get("FILE_INFO", 1)
self.grammar = param.get("GRAMMAR", Grammar())
self.factory = param.get("FACTORY", Directive)
self.fileinfo = []
self.defblocks = []
self.defblock_stack = []
self.infor = 0
self.inwhile = 0
self.style = []
# Build a FACTORY object to include any NAMESPACE definitions,
# but only if FACTORY isn't already a (non-callable) object.
if isinstance(self.factory, collections.Callable):
self.factory = self.factory(param)
self.lextable = self.grammar.lextable
self.states = self.grammar.states
self.rules = self.grammar.rules
self.new_style(param)
self.tokenize = (
((1,), self._comment),
((2, 3), self._string),
((4,), self._number),
((5,), self._filename),
((6,), self._identifier),
((7,), self._word),
)
def new_style(self, config):
"""Install a new (stacked) parser style.
This feature is currently experimental but should mimic the
previous behaviour with regard to TAG_STYLE, START_TAG, END_TAG,
etc.
"""
if self.style:
style = self.style[-1]
else:
style = DEFAULT_STYLE
style = style.copy()
tagstyle = config.get("TAG_STYLE")
if tagstyle:
tags = TAG_STYLE.get(tagstyle)
if tags is None:
raise Error("Invalid tag style: %s" % tagstyle)
start, end = tags
config["START_TAG"] = config.get("START_TAG", start)
config["END_TAG"] = config.get("END_TAG", end)
for key in DEFAULT_STYLE.keys():
value = config.get(key)
if value is not None:
style[key] = value
self.style.append(style)
return style
def old_style(self):
"""Pop the current parser style and revert to the previous one.
See new_style(). ** experimental **
"""
if len(self.style) <= 1:
raise Error("only 1 parser style remaining")
self.style.pop()
return self.style[-1]
def location(self):
"""Return Python comment indicating current parser file and line."""
if not self.file_info:
return "\n"
line = self.line
info = self.fileinfo[-1]
file = info and (info.path or info.name) or "(unknown template)"
line = re.sub(r"-.*", "", str(line)) # might be 'n-n'
return '#line %s "%s"\n' % (line, file)
def parse(self, text, info=None):
"""Parses the text string, text, and returns a dictionary
representing the compiled template block(s) as Python code, in the
format expected by template.document.
"""
self.defblock = {}
self.metadata = {}
tokens = self.split_text(text)
if tokens is None:
return None
self.fileinfo.append(info)
block = self._parse(tokens, info)
self.fileinfo.pop()
if block:
return {"BLOCK": block,
"DEFBLOCKS": self.defblock,
"METADATA": self.metadata}
else:
return None
def split_text(self, text):
"""Split input template text into directives and raw text chunks."""
tokens = []
line = 1
style = self.style[-1]
def make_splitter(delims):
return re.compile(r"(?s)(.*?)%s(.*?)%s" % delims)
splitter = make_splitter((style["START_TAG"], style["END_TAG"]))
while True:
match = splitter.match(text)
if not match:
break
text = text[match.end():]
pre, dir = match.group(1), match.group(2)
prelines = pre.count("\n")
dirlines = dir.count("\n")
postlines = 0
if dir.startswith("#"):
# commment out entire directive except for any end chomp flag
match = re.search(CHOMP_FLAGS + "$", dir)
if match:
dir = match.group()
else:
dir = ""
else:
# PRE_CHOMP: process whitespace before tag
match = re.match(r"(%s)?\s*" % CHOMP_FLAGS, dir)
chomp = Chomp(match and match.group(1) or style["PRE_CHOMP"])
if match:
dir = dir[match.end():]
pre = PRE_CHOMP[chomp](pre)
# POST_CHOMP: process whitespace after tag
match = re.search(r"\s*(%s)?\s*$" % CHOMP_FLAGS, dir)
chomp = Chomp(match and match.group(1) or style["POST_CHOMP"])
if match:
dir = dir[:match.start()]
text, postlines = POST_CHOMP[chomp](text, postlines)
if pre:
if style["INTERPOLATE"]:
tokens.append([pre, line, 'ITEXT'])
else:
tokens.extend(["TEXT", pre])
line += prelines
if dir:
# The TAGS directive is a compile-time switch.
match = re.match(r"(?i)TAGS\s+(.*)", dir)
if match:
tags = re.split(r"\s+", match.group(1))
if len(tags) > 1:
splitter = make_splitter(tuple(re.escape(x) for x in tags[:2]))
elif tags[0] in TAG_STYLE:
splitter = make_splitter(TAG_STYLE[tags[0]])
else:
sys.stderr.write("Invalid TAGS style: %s" % tags[0])
else:
if dirlines > 0:
line_range = "%d-%d" % (line, line + dirlines)
else:
line_range = str(line)
tokens.append([dir, line_range, self.tokenise_directive(dir)])
line += dirlines + postlines
if text:
if style["INTERPOLATE"]:
tokens.append([text, line, "ITEXT"])
else:
tokens.extend(["TEXT", text])
return tokens
def _comment(self, token):
"""Tokenizes a comment."""
return ()
def _string(self, quote, token):
"""Tokenizes a string."""
if quote == '"':
if re.search(r"[$\\]", token):
# unescape " and \ but leave \$ escaped so that
# interpolate_text() doesn't incorrectly treat it
# as a variable reference
token = re.sub(r'\\([\\"])', r'\1', token)
token = re.sub(r'\\([^$nrt])', r'\1', token)
token = re.sub(r'\\([nrt])', lambda m: ESCAPE[m.group(1)], token)
return ['"', '"'] + self.interpolate_text(token) + ['"', '"']
else:
return "LITERAL", "scalar(%r)" % token
else:
# Remove escaped single quotes and backslashes:
token = re.sub(r"\\(.)", lambda m: m.group(m.group(1) in "'\\"), token)
return "LITERAL", "scalar(%r)" % token
def _number(self, token):
"""Tokenizes a number."""
return "NUMBER", "scalar(%s)" % token
def _filename(self, token):
"""Tokenizes a filename."""
return "FILENAME", token
def _identifier(self, token):
"""Tokenizes an identifier."""
if self.anycase:
uctoken = token.upper()
else:
uctoken = token
toktype = self.lextable.get(uctoken)
if toktype is not None:
return toktype, uctoken
else:
return "IDENT", token
def _word(self, token):
"""Tokenizes an unquoted word or symbol ."""
return self.lextable.get(token, "UNQUOTED"), token
def tokenise_directive(self, dirtext):
"""Called by the private _parse() method when it encounters a
DIRECTIVE token in the list provided by the split_text() or
interpolate_text() methods.
The method splits the directive into individual tokens as
recognised by the parser grammar (see template.grammar for
details). It constructs a list of tokens each represented by 2
elements, as per split_text() et al. The first element contains
the token type, the second the token itself.
The method tokenises the string using a complex (but fast) regex.
For a deeper understanding of the regex magic at work here, see
Jeffrey Friedl's excellent book "Mastering Regular Expressions",
from O'Reilly, ISBN 1-56592-257-3
Returns the list of chunks (each one being 2 elements) identified
in the directive text.
"""
tokens = []
for match in GRAMMAR.finditer(dirtext):
for indices, method in self.tokenize:
if match.group(indices[0]):
tokens.extend(method(*list(map(match.group, indices))))
break
return tokens
def _parse(self, tokens, info):
"""Parses the list of input tokens passed by reference and returns
an object which contains the compiled representation of the
template.
This is the main parser DFA loop. See embedded comments for
further details.
"""
self.grammar.install_factory(self.factory)
stack = [[0, None]] # DFA stack
coderet = None
token = None
in_string = False
in_python = False
status = CONTINUE
lhs = None
text = None
self.line = 0
self.file = info and info.name
self.inpython = 0
value = None
while True:
stateno = stack[-1][0]
state = self.states[stateno]
# see if any lookaheads exist for the current state
if "ACTIONS" in state:
# get next token and expand any directives (ie. token is a
# list) onto the front of the token list
while token is None and tokens:
token = tokens.pop(0)
if isinstance(token, (list, tuple)):
text, self.line, token = util.unpack(token, 3)
if isinstance(token, (list, tuple)):
tokens[:0] = token + [";", ";"]
token = None # force redo
elif token == "ITEXT":
if in_python:
# don't perform interpolation in PYTHON blocks
token = "TEXT"
value = text
else:
tokens[:0] = self.interpolate_text(text, self.line)
token = None # force redo
else:
# toggle string flag to indicate if we're crossing
# a string boundary
if token == '"':
in_string = not in_string
value = tokens and tokens.pop(0) or None
if token is None:
token = ""
# get the next state for the current lookahead token
lookup = state["ACTIONS"].get(token)
if lookup:
action = lookup
else:
action = state.get("DEFAULT")
else:
# no lookahead assertions
action = state.get("DEFAULT")
# ERROR: no ACTION
if action is None:
break
# shift (positive ACTION)
if action > 0:
stack.append([action, value])
token = value = None
else:
# reduce (negative ACTION)
lhs, len_, code = self.rules[-action]
# no action implies ACCEPTance
if not action:
status = ACCEPT
# use dummy sub if code ref doesn't exist
if not code:
code = lambda *arg: len(arg) >= 2 and arg[1] or None
if len_ > 0:
codevars = [x[1] for x in stack[-len_:]]
else:
codevars = []
try:
coderet = code(self, *codevars)
except TemplateException as e:
self._parse_error(str(e), info.name)
# reduce stack by len_
if len_ > 0:
stack[-len_:] = []
# ACCEPT
if status == ACCEPT:
return coderet
elif status == ABORT:
return None
elif status == ERROR:
break
stack.append([self.states[stack[-1][0]].get("GOTOS", {}).get(lhs),
coderet])
# ERROR
if value is None:
self._parse_error("unexpected end of input", info.name)
elif value == ";":
self._parse_error("unexpected end of directive", info.name, text)
else:
self._parse_error("unexpected token (%s)" %
util.unscalar_lex(value), info.name, text)
def _parse_error(self, msg, name, text=None):
"""Method used to handle errors encountered during the parse process
in the _parse() method.
"""
line = self.line or "unknown"
if text is not None:
msg += "\n [%% %s %%]" % text
raise TemplateException("parse", "%s line %s: %s" % (name, line, msg))
def define_block(self, name, block):
"""Called by the parser 'defblock' rule when a BLOCK definition is
encountered in the template.
The name of the block is passed in the first parameter and a
reference to the compiled block is passed in the second. This
method stores the block in the self.defblock dictionary which has
been initialised by parse() and will later be used by the same
method to call the store() method on the calling cache to define
the block "externally".
"""
if self.defblock is None:
return None
self.defblock[name] = block
return None
def push_defblock(self):
self.defblock_stack.append(self.defblock)
self.defblock = {}
def pop_defblock(self):
if not self.defblock_stack:
return self.defblock
block = self.defblock
self.defblock = self.defblock_stack.pop(0)
return block
def add_metadata(self, setlist):
setlist = [util.unscalar_lex(x) for x in setlist]
if self.metadata is not None:
for key, value in util.chop(setlist, 2):
self.metadata[key] = value
return None
def interpolate_text(self, text, line=0):
"""Examines text looking for any variable references embedded
like $this or like ${ this }.
"""
tokens = []
for match in QUOTED_STRING.finditer(text):
pre = match.group(1)
var = match.group(3) or match.group(4)
dir = match.group(2)
# preceding text
if pre:
line += pre.count("\n")
tokens.extend(("TEXT", pre.replace("\\$", "$")))
# variable reference
if var:
line += dir.count("\n")
tokens.append([dir, line, self.tokenise_directive(var)])
# other '$' reference - treated as text
elif dir:
line += dir.count("\n")
tokens.extend(("TEXT", dir))
return tokens
| 32.818182
| 101
| 0.58583
|
import collections
import re
import sys
from template import util
from template.constants import *
from template.directive import Directive
from template.grammar import Grammar
from template.util import TemplateException
CONTINUE = 0
ACCEPT = 1
ERROR = 2
ABORT = 3
TAG_STYLE = {
"default": (r"\[%", r"%\]"),
"template1": (r"[[%]%", r"%[]%]"),
"metatext": (r"%%", r"%%"),
"html": (r"<!--", r"-->"),
"mason": (r"<%", r">"),
"asp": (r"<%", r"%>"),
"php": (r"<\?", r"\?>"),
"star": (r"\[\*", r"\*\]"),
}
TAG_STYLE["template"] = TAG_STYLE["tt2"] = TAG_STYLE["default"]
DEFAULT_STYLE = {
"START_TAG": TAG_STYLE["default"][0],
"END_TAG": TAG_STYLE["default"][1],
"ANYCASE": 0,
"INTERPOLATE": 0,
"PRE_CHOMP": 0,
"POST_CHOMP": 0,
"V1DOLLAR": 0,
"EVAL_PYTHON": 0,
}
ESCAPE = {"n": "\n", "r": "\r", "t": "\t"}
CHOMP_FLAGS = r"[-=~+]"
CHOMP_ALL = str(CHOMP_ALL)
CHOMP_COLLAPSE = str(CHOMP_COLLAPSE)
CHOMP_GREEDY = str(CHOMP_GREEDY)
CHOMP_NONE = str(CHOMP_NONE)
CHOMP_CONST = {
"-": CHOMP_ALL,
"=": CHOMP_COLLAPSE,
"~": CHOMP_GREEDY,
"+": CHOMP_NONE
}
PRE_CHOMP = {
CHOMP_ALL: lambda x: re.sub(r"(\n|^)[^\S\n]*\Z", "", x),
CHOMP_COLLAPSE: lambda x: re.sub(r"\s+\Z", " ", x),
CHOMP_GREEDY: lambda x: re.sub(r"\s+\Z", "", x),
CHOMP_NONE: lambda x: x,
}
def postchomp(regex, prefix):
regex = re.compile(regex)
def strip(text, postlines):
match = regex.match(text)
if match:
text = prefix + text[match.end():]
postlines += match.group().count("\n")
return text, postlines
return strip
POST_CHOMP = {
CHOMP_ALL: postchomp(r"[^\S\n]*\n", ""),
CHOMP_COLLAPSE: postchomp(r"\s+", " "),
CHOMP_GREEDY: postchomp(r"\s+", ""),
CHOMP_NONE: lambda x, y: (x, y),
}
def Chomp(x):
return re.sub(r"[-=~+]", lambda m: CHOMP_CONST[m.group()], str(x))
GRAMMAR = re.compile(r"""
# strip out any comments
(\#[^\n]*)
|
# a quoted string matches in $3
(["']) # $2 - opening quote, ' or "
( # $3 - quoted text buffer
(?: # repeat group (no backreference)
\\\\ # an escaped backslash
| # ...or...
\\\2 # an escaped quote \" or \' (match $1)
| # ...or...
. # any other character
| \n
)*? # non-greedy repeat
) # end of $3
\2 # match opening quote
|
# an unquoted number matches in $4
(-? \d+ (?: \. \d+ )?) # numbers
|
# filename matches in $5
( /? \w+ (?: (?: /|::? ) \w* )+ | /\w+ )
|
# an identifier matches in $6
(\w+)
|
# an unquoted word or symbol matches in $7
( [(){}\[\]:;,/\\] # misc parentheses and symbols
| -> # arrow operator (for future?)
| [-+*] # math operations
| \${? # dollar with optional left brace
| => # like "="
| [=!<>]?= | [!<>] # equality tests
| &&? | \|\|? # boolean ops
| \.\.? # n..n sequence
| \S+ # something unquoted
) # end of $7
""", re.VERBOSE)
QUOTED_STRING = re.compile(r"""
( (?: \\. | [^\$] ){1,3000} ) # escaped or non-'$' character [$1]
|
( \$ (?: # embedded variable [$2]
(?: \{ ([^\}]*) \} ) # ${ ... } [$3]
|
([\w\.]+) # $word [$4]
)
)
""", re.VERBOSE)
class Error(Exception):
pass
class Parser:
def __init__(self, param):
self.start_tag = param.get("START_TAG") or DEFAULT_STYLE["START_TAG"]
self.end_tag = param.get("END_TAG") or DEFAULT_STYLE["END_TAG"]
self.tag_style = param.get("TAG_STYLE", "default")
self.anycase = param.get("ANYCASE", False)
self.interpolate = param.get("INTERPOLATE", False)
self.pre_chomp = param.get("PRE_CHOMP", CHOMP_NONE)
self.post_chomp = param.get("POST_CHOMP", CHOMP_NONE)
self.v1dollar = param.get("V1DOLLAR", False)
self.eval_python = param.get("EVAL_PYTHON", False)
self.file_info = param.get("FILE_INFO", 1)
self.grammar = param.get("GRAMMAR", Grammar())
self.factory = param.get("FACTORY", Directive)
self.fileinfo = []
self.defblocks = []
self.defblock_stack = []
self.infor = 0
self.inwhile = 0
self.style = []
# Build a FACTORY object to include any NAMESPACE definitions,
# but only if FACTORY isn't already a (non-callable) object.
if isinstance(self.factory, collections.Callable):
self.factory = self.factory(param)
self.lextable = self.grammar.lextable
self.states = self.grammar.states
self.rules = self.grammar.rules
self.new_style(param)
self.tokenize = (
((1,), self._comment),
((2, 3), self._string),
((4,), self._number),
((5,), self._filename),
((6,), self._identifier),
((7,), self._word),
)
def new_style(self, config):
if self.style:
style = self.style[-1]
else:
style = DEFAULT_STYLE
style = style.copy()
tagstyle = config.get("TAG_STYLE")
if tagstyle:
tags = TAG_STYLE.get(tagstyle)
if tags is None:
raise Error("Invalid tag style: %s" % tagstyle)
start, end = tags
config["START_TAG"] = config.get("START_TAG", start)
config["END_TAG"] = config.get("END_TAG", end)
for key in DEFAULT_STYLE.keys():
value = config.get(key)
if value is not None:
style[key] = value
self.style.append(style)
return style
def old_style(self):
if len(self.style) <= 1:
raise Error("only 1 parser style remaining")
self.style.pop()
return self.style[-1]
def location(self):
if not self.file_info:
return "\n"
line = self.line
info = self.fileinfo[-1]
file = info and (info.path or info.name) or "(unknown template)"
line = re.sub(r"-.*", "", str(line)) # might be 'n-n'
return '#line %s "%s"\n' % (line, file)
def parse(self, text, info=None):
self.defblock = {}
self.metadata = {}
tokens = self.split_text(text)
if tokens is None:
return None
self.fileinfo.append(info)
block = self._parse(tokens, info)
self.fileinfo.pop()
if block:
return {"BLOCK": block,
"DEFBLOCKS": self.defblock,
"METADATA": self.metadata}
else:
return None
def split_text(self, text):
tokens = []
line = 1
style = self.style[-1]
def make_splitter(delims):
return re.compile(r"(?s)(.*?)%s(.*?)%s" % delims)
splitter = make_splitter((style["START_TAG"], style["END_TAG"]))
while True:
match = splitter.match(text)
if not match:
break
text = text[match.end():]
pre, dir = match.group(1), match.group(2)
prelines = pre.count("\n")
dirlines = dir.count("\n")
postlines = 0
if dir.startswith("
# commment out entire directive except for any end chomp flag
match = re.search(CHOMP_FLAGS + "$", dir)
if match:
dir = match.group()
else:
dir = ""
else:
# PRE_CHOMP: process whitespace before tag
match = re.match(r"(%s)?\s*" % CHOMP_FLAGS, dir)
chomp = Chomp(match and match.group(1) or style["PRE_CHOMP"])
if match:
dir = dir[match.end():]
pre = PRE_CHOMP[chomp](pre)
# POST_CHOMP: process whitespace after tag
match = re.search(r"\s*(%s)?\s*$" % CHOMP_FLAGS, dir)
chomp = Chomp(match and match.group(1) or style["POST_CHOMP"])
if match:
dir = dir[:match.start()]
text, postlines = POST_CHOMP[chomp](text, postlines)
if pre:
if style["INTERPOLATE"]:
tokens.append([pre, line, 'ITEXT'])
else:
tokens.extend(["TEXT", pre])
line += prelines
if dir:
# The TAGS directive is a compile-time switch.
match = re.match(r"(?i)TAGS\s+(.*)", dir)
if match:
tags = re.split(r"\s+", match.group(1))
if len(tags) > 1:
splitter = make_splitter(tuple(re.escape(x) for x in tags[:2]))
elif tags[0] in TAG_STYLE:
splitter = make_splitter(TAG_STYLE[tags[0]])
else:
sys.stderr.write("Invalid TAGS style: %s" % tags[0])
else:
if dirlines > 0:
line_range = "%d-%d" % (line, line + dirlines)
else:
line_range = str(line)
tokens.append([dir, line_range, self.tokenise_directive(dir)])
line += dirlines + postlines
if text:
if style["INTERPOLATE"]:
tokens.append([text, line, "ITEXT"])
else:
tokens.extend(["TEXT", text])
return tokens
def _comment(self, token):
return ()
def _string(self, quote, token):
if quote == '"':
if re.search(r"[$\\]", token):
# interpolate_text() doesn't incorrectly treat it
# as a variable reference
token = re.sub(r'\\([\\"])', r'\1', token)
token = re.sub(r'\\([^$nrt])', r'\1', token)
token = re.sub(r'\\([nrt])', lambda m: ESCAPE[m.group(1)], token)
return ['"', '"'] + self.interpolate_text(token) + ['"', '"']
else:
return "LITERAL", "scalar(%r)" % token
else:
# Remove escaped single quotes and backslashes:
token = re.sub(r"\\(.)", lambda m: m.group(m.group(1) in "'\\"), token)
return "LITERAL", "scalar(%r)" % token
def _number(self, token):
return "NUMBER", "scalar(%s)" % token
def _filename(self, token):
return "FILENAME", token
def _identifier(self, token):
if self.anycase:
uctoken = token.upper()
else:
uctoken = token
toktype = self.lextable.get(uctoken)
if toktype is not None:
return toktype, uctoken
else:
return "IDENT", token
def _word(self, token):
return self.lextable.get(token, "UNQUOTED"), token
def tokenise_directive(self, dirtext):
tokens = []
for match in GRAMMAR.finditer(dirtext):
for indices, method in self.tokenize:
if match.group(indices[0]):
tokens.extend(method(*list(map(match.group, indices))))
break
return tokens
def _parse(self, tokens, info):
self.grammar.install_factory(self.factory)
stack = [[0, None]]
coderet = None
token = None
in_string = False
in_python = False
status = CONTINUE
lhs = None
text = None
self.line = 0
self.file = info and info.name
self.inpython = 0
value = None
while True:
stateno = stack[-1][0]
state = self.states[stateno]
if "ACTIONS" in state:
while token is None and tokens:
token = tokens.pop(0)
if isinstance(token, (list, tuple)):
text, self.line, token = util.unpack(token, 3)
if isinstance(token, (list, tuple)):
tokens[:0] = token + [";", ";"]
token = None
elif token == "ITEXT":
if in_python:
token = "TEXT"
value = text
else:
tokens[:0] = self.interpolate_text(text, self.line)
token = None # force redo
else:
# toggle string flag to indicate if we're crossing
if token == '"':
in_string = not in_string
value = tokens and tokens.pop(0) or None
if token is None:
token = ""
# get the next state for the current lookahead token
lookup = state["ACTIONS"].get(token)
if lookup:
action = lookup
else:
action = state.get("DEFAULT")
else:
# no lookahead assertions
action = state.get("DEFAULT")
# ERROR: no ACTION
if action is None:
break
# shift (positive ACTION)
if action > 0:
stack.append([action, value])
token = value = None
else:
# reduce (negative ACTION)
lhs, len_, code = self.rules[-action]
# no action implies ACCEPTance
if not action:
status = ACCEPT
# use dummy sub if code ref doesn't exist
if not code:
code = lambda *arg: len(arg) >= 2 and arg[1] or None
if len_ > 0:
codevars = [x[1] for x in stack[-len_:]]
else:
codevars = []
try:
coderet = code(self, *codevars)
except TemplateException as e:
self._parse_error(str(e), info.name)
# reduce stack by len_
if len_ > 0:
stack[-len_:] = []
# ACCEPT
if status == ACCEPT:
return coderet
elif status == ABORT:
return None
elif status == ERROR:
break
stack.append([self.states[stack[-1][0]].get("GOTOS", {}).get(lhs),
coderet])
# ERROR
if value is None:
self._parse_error("unexpected end of input", info.name)
elif value == ";":
self._parse_error("unexpected end of directive", info.name, text)
else:
self._parse_error("unexpected token (%s)" %
util.unscalar_lex(value), info.name, text)
def _parse_error(self, msg, name, text=None):
line = self.line or "unknown"
if text is not None:
msg += "\n [%% %s %%]" % text
raise TemplateException("parse", "%s line %s: %s" % (name, line, msg))
def define_block(self, name, block):
if self.defblock is None:
return None
self.defblock[name] = block
return None
def push_defblock(self):
self.defblock_stack.append(self.defblock)
self.defblock = {}
def pop_defblock(self):
if not self.defblock_stack:
return self.defblock
block = self.defblock
self.defblock = self.defblock_stack.pop(0)
return block
def add_metadata(self, setlist):
setlist = [util.unscalar_lex(x) for x in setlist]
if self.metadata is not None:
for key, value in util.chop(setlist, 2):
self.metadata[key] = value
return None
def interpolate_text(self, text, line=0):
tokens = []
for match in QUOTED_STRING.finditer(text):
pre = match.group(1)
var = match.group(3) or match.group(4)
dir = match.group(2)
# preceding text
if pre:
line += pre.count("\n")
tokens.extend(("TEXT", pre.replace("\\$", "$")))
# variable reference
if var:
line += dir.count("\n")
tokens.append([dir, line, self.tokenise_directive(var)])
# other '$' reference - treated as text
elif dir:
line += dir.count("\n")
tokens.extend(("TEXT", dir))
return tokens
| true
| true
|
7908fe75e07930f352f3b769db6fb5dc1058566b
| 89,512
|
py
|
Python
|
dataactcore/scripts/pull_fpds_data.py
|
RonSherfey/data-act-broker-backend
|
d287abda2cac06dd479ecf0127e789cb8e59387d
|
[
"CC0-1.0"
] | null | null | null |
dataactcore/scripts/pull_fpds_data.py
|
RonSherfey/data-act-broker-backend
|
d287abda2cac06dd479ecf0127e789cb8e59387d
|
[
"CC0-1.0"
] | 3
|
2021-08-22T11:47:45.000Z
|
2022-03-29T22:06:49.000Z
|
dataactcore/scripts/pull_fpds_data.py
|
RonSherfey/data-act-broker-backend
|
d287abda2cac06dd479ecf0127e789cb8e59387d
|
[
"CC0-1.0"
] | null | null | null |
import boto3
import logging
import argparse
import requests
import xmltodict
import asyncio
import datetime
import time
import re
import json
import math
from sqlalchemy import func
from dateutil.relativedelta import relativedelta
from requests.exceptions import ConnectionError, ReadTimeout
from urllib3.exceptions import ReadTimeoutError
from dataactcore.logging import configure_logging
from dataactcore.config import CONFIG_BROKER
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.exc import IntegrityError
from dataactcore.interfaces.db import GlobalDB
from dataactcore.models.domainModels import SubTierAgency, CountryCode, States, CountyCode, Zips, DUNS
from dataactcore.models.stagingModels import DetachedAwardProcurement
from dataactcore.models.jobModels import FPDSUpdate
from dataactcore.utils.business_categories import get_business_categories
from dataactcore.models.jobModels import Submission # noqa
from dataactcore.models.userModel import User # noqa
from dataactvalidator.health_check import create_app
from dataactvalidator.filestreaming.csvLocalWriter import CsvLocalWriter
feed_url = "https://www.fpds.gov/ezsearch/FEEDS/ATOM?FEEDNAME=PUBLIC&templateName=1.5.2&q="
delete_url = "https://www.fpds.gov/ezsearch/FEEDS/ATOM?FEEDNAME=DELETED&templateName=1.5.2&q="
country_code_map = {'USA': 'US', 'ASM': 'AS', 'GUM': 'GU', 'MNP': 'MP', 'PRI': 'PR', 'VIR': 'VI', 'FSM': 'FM',
'MHL': 'MH', 'PLW': 'PW', 'XBK': 'UM', 'XHO': 'UM', 'XJV': 'UM', 'XJA': 'UM', 'XKR': 'UM',
'XPL': 'UM', 'XMW': 'UM', 'XWK': 'UM'}
FPDS_NAMESPACES = {'http://www.fpdsng.com/FPDS': None,
'http://www.w3.org/2005/Atom': None,
'https://www.fpds.gov/FPDS': None}
# Used for asyncio get requests against the ATOM feed
MAX_ENTRIES = 10
MAX_REQUESTS_AT_ONCE = 100
logger = logging.getLogger(__name__)
logging.getLogger("requests").setLevel(logging.WARNING)
def list_data(data):
if isinstance(data, dict):
# make a list so it's consistent
data = [data, ]
return data
def extract_text(data_val):
if type(data_val) is not str:
data_val = data_val['#text']
# If it's now a string, we want to strip it
if type(data_val) is str:
data_val = data_val.strip()
return data_val
def is_valid_zip(zip_code):
if re.match('^\d{5}(-?\d{4})?$', zip_code):
return True
return False
def get_county_by_zip(sess, zip_code):
# if the zip code is not a valid US zip, toss the entire zip
if not is_valid_zip(zip_code):
return None
zip_data = None
# if we have a 9 digit code, grab the first match for 9 digit zips
if len(zip_code) > 5:
zip_data = sess.query(Zips).filter_by(zip5=zip_code[:5], zip_last4=zip_code[-4:]).first()
# if it's not 9 digits or we found no results from the 9 digit we received
if not zip_data:
zip_data = sess.query(Zips).filter_by(zip5=zip_code[:5]).first()
# if we found results at any point, return the county code from it
if zip_data:
return zip_data.county_number
return None
def award_id_values(data, obj):
""" Get values from the awardID level of the xml """
value_map = {'modNumber': 'award_modification_amendme',
'transactionNumber': 'transaction_number',
'PIID': 'piid',
'agencyID': 'agency_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['awardContractID'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'agencyID': 'referenced_idv_agency_iden',
'modNumber': 'referenced_idv_modificatio',
'PIID': 'parent_award_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['referencedIDVID'][key])
except (KeyError, TypeError):
obj[value] = None
# get agencyID name
try:
obj['referenced_idv_agency_desc'] = extract_text(data['referencedIDVID']['agencyID']['@name'])
except (KeyError, TypeError):
obj['referenced_idv_agency_desc'] = None
return obj
def contract_id_values(data, obj):
""" Get values from the contractID level of the xml """
value_map = {'modNumber': 'award_modification_amendme',
'PIID': 'piid',
'agencyID': 'agency_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['IDVID'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'agencyID': 'referenced_idv_agency_iden',
'modNumber': 'referenced_idv_modificatio',
'PIID': 'parent_award_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['referencedIDVID'][key])
except (KeyError, TypeError):
obj[value] = None
# get agencyID name
try:
obj['referenced_idv_agency_desc'] = extract_text(data['referencedIDVID']['agencyID']['@name'])
except (KeyError, TypeError):
obj['referenced_idv_agency_desc'] = None
return obj
def competition_values(data, obj):
""" Get values from the competition level of the xml """
value_map = {'A76Action': 'a_76_fair_act_action',
'commercialItemAcquisitionProcedures': 'commercial_item_acquisitio',
'commercialItemTestProgram': 'commercial_item_test_progr',
'evaluatedPreference': 'evaluated_preference',
'extentCompeted': 'extent_competed',
'fedBizOpps': 'fed_biz_opps',
'localAreaSetAside': 'local_area_set_aside',
'numberOfOffersReceived': 'number_of_offers_received',
'priceEvaluationPercentDifference': 'price_evaluation_adjustmen',
'reasonNotCompeted': 'other_than_full_and_open_c',
'research': 'research',
'smallBusinessCompetitivenessDemonstrationProgram': 'small_business_competitive',
'solicitationProcedures': 'solicitation_procedures',
'statutoryExceptionToFairOpportunity': 'fair_opportunity_limited_s',
'typeOfSetAside': 'type_set_aside'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'A76Action': 'a_76_fair_act_action_desc',
'commercialItemAcquisitionProcedures': 'commercial_item_acqui_desc',
'commercialItemTestProgram': 'commercial_item_test_desc',
'evaluatedPreference': 'evaluated_preference_desc',
'extentCompeted': 'extent_compete_description',
'fedBizOpps': 'fed_biz_opps_description',
'localAreaSetAside': 'local_area_set_aside_desc',
'reasonNotCompeted': 'other_than_full_and_o_desc',
'research': 'research_description',
'solicitationProcedures': 'solicitation_procedur_desc',
'statutoryExceptionToFairOpportunity': 'fair_opportunity_limi_desc',
'typeOfSetAside': 'type_set_aside_description'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def contract_data_values(data, obj, atom_type):
""" Get values from the contractData level of the xml """
value_map = {'consolidatedContract': 'consolidated_contract',
'contingencyHumanitarianPeacekeepingOperation': 'contingency_humanitarian_o',
'contractFinancing': 'contract_financing',
'costAccountingStandardsClause': 'cost_accounting_standards',
'costOrPricingData': 'cost_or_pricing_data',
'descriptionOfContractRequirement': 'award_description',
'GFE-GFP': 'government_furnished_prope',
'inherentlyGovernmentalFunction': 'inherently_government_func',
'majorProgramCode': 'major_program',
'multiYearContract': 'multi_year_contract',
'nationalInterestActionCode': 'national_interest_action',
'numberOfActions': 'number_of_actions',
'performanceBasedServiceContract': 'performance_based_service',
'programAcronym': 'program_acronym',
'purchaseCardAsPaymentMethod': 'purchase_card_as_payment_m',
'reasonForModification': 'action_type',
'referencedIDVMultipleOrSingle': 'referenced_mult_or_single',
'referencedIDVType': 'referenced_idv_type',
'seaTransportation': 'sea_transportation',
'solicitationID': 'solicitation_identifier',
'typeOfContractPricing': 'type_of_contract_pricing',
'typeOfIDC': 'type_of_idc',
'undefinitizedAction': 'undefinitized_action'}
if atom_type == "award":
value_map['contractActionType'] = 'contract_award_type'
else:
value_map['contractActionType'] = 'idv_type'
value_map['multipleOrSingleAwardIDC'] = 'multiple_or_single_award_i'
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'consolidatedContract': 'consolidated_contract_desc',
'contingencyHumanitarianPeacekeepingOperation': 'contingency_humanitar_desc',
'contractFinancing': 'contract_financing_descrip',
'costAccountingStandardsClause': 'cost_accounting_stand_desc',
'costOrPricingData': 'cost_or_pricing_data_desc',
'GFE-GFP': 'government_furnished_desc',
'inherentlyGovernmentalFunction': 'inherently_government_desc',
'multiYearContract': 'multi_year_contract_desc',
'nationalInterestActionCode': 'national_interest_desc',
'performanceBasedServiceContract': 'performance_based_se_desc',
'purchaseCardAsPaymentMethod': 'purchase_card_as_paym_desc',
'reasonForModification': 'action_type_description',
'referencedIDVMultipleOrSingle': 'referenced_mult_or_si_desc',
'referencedIDVType': 'referenced_idv_type_desc',
'seaTransportation': 'sea_transportation_desc',
'typeOfContractPricing': 'type_of_contract_pric_desc',
'typeOfIDC': 'type_of_idc_description',
'undefinitizedAction': 'undefinitized_action_desc'}
if atom_type == "award":
value_map['contractActionType'] = 'contract_award_type_desc'
else:
value_map['contractActionType'] = 'idv_type_description'
value_map['multipleOrSingleAwardIDC'] = 'multiple_or_single_aw_desc'
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def dollar_values_values(data, obj):
""" Get values from the dollarValues level of the xml """
value_map = {'baseAndAllOptionsValue': 'base_and_all_options_value',
'baseAndExercisedOptionsValue': 'base_exercised_options_val',
'obligatedAmount': 'federal_action_obligation'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def total_dollar_values_values(data, obj):
""" Get values from the totalDollarValues level of the xml """
value_map = {'totalBaseAndAllOptionsValue': 'potential_total_value_awar',
'totalBaseAndExercisedOptionsValue': 'current_total_value_award',
'totalObligatedAmount': 'total_obligated_amount'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def legislative_mandates_values(data, obj):
""" Get values from the legislativeMandates level of the xml """
value_map = {'ClingerCohenAct': 'clinger_cohen_act_planning',
'constructionWageRateRequirements': 'construction_wage_rate_req',
'interagencyContractingAuthority': 'interagency_contracting_au',
'otherStatutoryAuthority': 'other_statutory_authority',
'laborStandards': 'labor_standards',
'materialsSuppliesArticlesEquipment': 'materials_supplies_article'}
additional_reporting = None
try:
ar_dicts = data['listOfAdditionalReportingValues']['additionalReportingValue']
except (KeyError, TypeError):
ar_dicts = None
if ar_dicts:
# if there is only one dict, convert it to a list of one dict
if isinstance(ar_dicts, dict):
ar_dicts = [ar_dicts]
ars = []
for ar_dict in ar_dicts:
ar_value = extract_text(ar_dict)
try:
ar_desc = extract_text(ar_dict['@description'])
except (KeyError, TypeError):
ar_desc = None
ar_str = ar_value if ar_desc is None else '{}: {}'.format(ar_value, ar_desc)
ars.append(ar_str)
additional_reporting = '; '.join(ars)
obj['additional_reporting'] = additional_reporting
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'ClingerCohenAct': 'clinger_cohen_act_pla_desc',
'constructionWageRateRequirements': 'construction_wage_rat_desc',
'interagencyContractingAuthority': 'interagency_contract_desc',
'laborStandards': 'labor_standards_descrip',
'materialsSuppliesArticlesEquipment': 'materials_supplies_descrip'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def place_of_performance_values(data, obj):
""" Get values from the placeOfPerformance level of the xml """
value_map = {'placeOfPerformanceCongressionalDistrict': 'place_of_performance_congr',
'placeOfPerformanceZIPCode': 'place_of_performance_zip4a'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# placeOfPerformanceName
try:
obj['place_of_perform_city_name'] = extract_text(data['placeOfPerformanceZIPCode']['@city'])
except (KeyError, TypeError):
obj['place_of_perform_city_name'] = None
# placeOfPerformanceName
try:
obj['place_of_perform_county_na'] = extract_text(data['placeOfPerformanceZIPCode']['@county'])
except (KeyError, TypeError):
obj['place_of_perform_county_na'] = None
# within placeOfPerformance, the principalPlaceOfPerformance sub-level
value_map = {'stateCode': 'place_of_performance_state',
'countryCode': 'place_of_perform_country_c'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['principalPlaceOfPerformance'][key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'countryCode': 'place_of_perf_country_desc',
'stateCode': 'place_of_perfor_state_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['principalPlaceOfPerformance'][key]['@name'])
except (KeyError, TypeError):
obj[value] = None
return obj
def product_or_service_information_values(data, obj):
""" Get values from the productOrServiceInformation level of the xml """
value_map = {'claimantProgramCode': 'dod_claimant_program_code',
'contractBundling': 'contract_bundling',
'countryOfOrigin': 'country_of_product_or_serv',
'informationTechnologyCommercialItemCategory': 'information_technology_com',
'manufacturingOrganizationType': 'domestic_or_foreign_entity',
'placeOfManufacture': 'place_of_manufacture',
'principalNAICSCode': 'naics',
'productOrServiceCode': 'product_or_service_code',
'recoveredMaterialClauses': 'recovered_materials_sustai',
'systemEquipmentCode': 'program_system_or_equipmen',
'useOfEPADesignatedProducts': 'epa_designated_product'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'claimantProgramCode': 'dod_claimant_prog_cod_desc',
'contractBundling': 'contract_bundling_descrip',
'informationTechnologyCommercialItemCategory': 'information_technolog_desc',
'manufacturingOrganizationType': 'domestic_or_foreign_e_desc',
'placeOfManufacture': 'place_of_manufacture_desc',
'principalNAICSCode': 'naics_description',
'productOrServiceCode': 'product_or_service_co_desc',
'recoveredMaterialClauses': 'recovered_materials_s_desc',
'systemEquipmentCode': 'program_system_or_equ_desc',
'useOfEPADesignatedProducts': 'epa_designated_produc_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# get country of origin name
try:
obj['country_of_product_or_desc'] = extract_text(data['countryOfOrigin']['@name'])
except (KeyError, TypeError):
obj['country_of_product_or_desc'] = None
return obj
def purchaser_information_values(data, obj):
""" Get values from the purchaserInformation level of the xml """
value_map = {'contractingOfficeAgencyID': 'awarding_sub_tier_agency_c',
'contractingOfficeID': 'awarding_office_code',
'foreignFunding': 'foreign_funding',
'fundingRequestingAgencyID': 'funding_sub_tier_agency_co',
'fundingRequestingOfficeID': 'funding_office_code'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'foreignFunding': 'foreign_funding_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# name values associated with certain values in purchaserInformation
value_map = {'contractingOfficeAgencyID': 'awarding_sub_tier_agency_n',
'contractingOfficeID': 'awarding_office_name',
'fundingRequestingAgencyID': 'funding_sub_tier_agency_na',
'fundingRequestingOfficeID': 'funding_office_name'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@name'])
except (KeyError, TypeError):
obj[value] = None
return obj
def relevant_contract_dates_values(data, obj):
""" Get values from the relevantContractDates level of the xml """
value_map = {'currentCompletionDate': 'period_of_performance_curr',
'effectiveDate': 'period_of_performance_star',
'lastDateToOrder': 'ordering_period_end_date',
'signedDate': 'action_date',
'ultimateCompletionDate': 'period_of_perf_potential_e'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def vendor_values(data, obj):
""" Get values from the vendor level of the xml """
# base vendor level
value_map = {'CCRException': 'sam_exception',
'contractingOfficerBusinessSizeDetermination': 'contracting_officers_deter'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'CCRException': 'sam_exception_description',
'contractingOfficerBusinessSizeDetermination': 'contracting_officers_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# vendorHeader sub-level
value_map = {'vendorAlternateName': 'vendor_alternate_name',
'vendorDoingAsBusinessName': 'vendor_doing_as_business_n',
'vendorEnabled': 'vendor_enabled',
'vendorLegalOrganizationName': 'vendor_legal_org_name',
'vendorName': 'awardee_or_recipient_legal'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorHeader'][key])
except (KeyError, TypeError):
obj[value] = None
# make sure key exists before passing it
try:
data['vendorSiteDetails']
except KeyError:
data['vendorSiteDetails'] = {}
# vendorSiteDetails sub-level (there are a lot so it gets its own function)
obj = vendor_site_details_values(data['vendorSiteDetails'], obj)
return obj
def vendor_site_details_values(data, obj):
""" Get values from the vendorSiteDetails level of the xml (sub-level of vendor) """
# base vendorSiteDetails level
value_map = {'divisionName': 'division_name',
'divisionNumberOrOfficeCode': 'division_number_or_office',
'vendorAlternateSiteCode': 'vendor_alternate_site_code',
'vendorSiteCode': 'vendor_site_code'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# typeOfEducationalEntity sub-level
value_map = {'is1862LandGrantCollege': 'c1862_land_grant_college',
'is1890LandGrantCollege': 'c1890_land_grant_college',
'is1994LandGrantCollege': 'c1994_land_grant_college',
'isAlaskanNativeServicingInstitution': 'alaskan_native_servicing_i',
'isHistoricallyBlackCollegeOrUniversity': 'historically_black_college',
'isMinorityInstitution': 'minority_institution',
'isNativeHawaiianServicingInstitution': 'native_hawaiian_servicing',
'isPrivateUniversityOrCollege': 'private_university_or_coll',
'isSchoolOfForestry': 'school_of_forestry',
'isStateControlledInstitutionofHigherLearning': 'state_controlled_instituti',
'isTribalCollege': 'tribal_college',
'isVeterinaryCollege': 'veterinary_college'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['typeOfEducationalEntity'][key])
except (KeyError, TypeError):
obj[value] = None
# typeOfGovernmentEntity sub-level
value_map = {'isAirportAuthority': 'airport_authority',
'isCouncilOfGovernments': 'council_of_governments',
'isHousingAuthoritiesPublicOrTribal': 'housing_authorities_public',
'isInterstateEntity': 'interstate_entity',
'isPlanningCommission': 'planning_commission',
'isPortAuthority': 'port_authority',
'isTransitAuthority': 'transit_authority'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['typeOfGovernmentEntity'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes sub-level
value_map = {'isCommunityDevelopedCorporationOwnedFirm': 'community_developed_corpor',
'isForeignGovernment': 'foreign_government',
'isLaborSurplusAreaFirm': 'labor_surplus_area_firm',
'isStateGovernment': 'us_state_government',
'isTribalGovernment': 'us_tribal_government'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > businessOrOrganizationType sub-level
value_map = {'isCorporateEntityNotTaxExempt': 'corporate_entity_not_tax_e',
'isCorporateEntityTaxExempt': 'corporate_entity_tax_exemp',
'isInternationalOrganization': 'international_organization',
'isPartnershipOrLimitedLiabilityPartnership': 'partnership_or_limited_lia',
'isSmallAgriculturalCooperative': 'small_agricultural_coopera',
'isSolePropreitorship': 'sole_proprietorship',
'isUSGovernmentEntity': 'us_government_entity'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['businessOrOrganizationType'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > federalGovernment sub-level
value_map = {'isFederalGovernment': 'us_federal_government',
'isFederalGovernmentAgency': 'federal_agency',
'isFederallyFundedResearchAndDevelopmentCorp': 'federally_funded_research'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['federalGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > localGovernment sub-level
value_map = {'isCityLocalGovernment': 'city_local_government',
'isCountyLocalGovernment': 'county_local_government',
'isInterMunicipalLocalGovernment': 'inter_municipal_local_gove',
'isLocalGovernment': 'us_local_government',
'isLocalGovernmentOwned': 'local_government_owned',
'isMunicipalityLocalGovernment': 'municipality_local_governm',
'isSchoolDistrictLocalGovernment': 'school_district_local_gove',
'isTownshipLocalGovernment': 'township_local_government'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['localGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorCertifications sub-level
value_map = {'isDOTCertifiedDisadvantagedBusinessEnterprise': 'dot_certified_disadvantage',
'isSBACertified8AJointVenture': 'sba_certified_8_a_joint_ve',
'isSBACertified8AProgramParticipant': 'c8a_program_participant',
'isSBACertifiedHUBZone': 'historically_underutilized',
'isSBACertifiedSmallDisadvantagedBusiness': 'small_disadvantaged_busine',
'isSelfCertifiedSmallDisadvantagedBusiness': 'self_certified_small_disad'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorCertifications'][key])
except (KeyError, TypeError):
obj[value] = None
# entityIdentifiers sub-level
try:
obj['cage_code'] = extract_text(data['entityIdentifiers']['cageCode'])
except (KeyError, TypeError):
obj['cage_code'] = None
# entityIdentifiers > vendorDUNSInformation sub-level
value_map = {'DUNSNumber': 'awardee_or_recipient_uniqu',
'globalParentDUNSName': 'ultimate_parent_legal_enti',
'globalParentDUNSNumber': 'ultimate_parent_unique_ide'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['entityIdentifiers']['vendorDUNSInformation'][key])
except (KeyError, TypeError):
obj[value] = None
# entityIdentifiers > vendorUEIInformation sub-level
value_map = {'UEI': 'awardee_or_recipient_uei',
'ultimateParentUEI': 'ultimate_parent_uei'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['entityIdentifiers']['vendorUEIInformation'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorLineOfBusiness sub-level
value_map = {'isCommunityDevelopmentCorporation': 'community_development_corp',
'isDomesticShelter': 'domestic_shelter',
'isEducationalInstitution': 'educational_institution',
'isFoundation': 'foundation',
'isHispanicServicingInstitution': 'hispanic_servicing_institu',
'isHospital': 'hospital_flag',
'isManufacturerOfGoods': 'manufacturer_of_goods',
'isVeterinaryHospital': 'veterinary_hospital'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorLineOfBusiness'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorLocation sub-level
value_map = {'city': 'legal_entity_city_name',
'congressionalDistrictCode': 'legal_entity_congressional',
'countryCode': 'legal_entity_country_code',
'faxNo': 'vendor_fax_number',
'phoneNo': 'vendor_phone_number',
'streetAddress': 'legal_entity_address_line1',
'streetAddress2': 'legal_entity_address_line2',
'streetAddress3': 'legal_entity_address_line3',
'vendorLocationDisabledFlag': 'vendor_location_disabled_f',
'ZIPCode': 'legal_entity_zip4'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorLocation'][key])
except (KeyError, TypeError):
obj[value] = None
# differentiating between US and foreign states
key = 'legal_entity_state_code'
if obj['legal_entity_country_code'] not in country_code_map:
key = 'legal_entity_state_descrip'
# need to set this even if we're not going to be having a code because we need to access it later
obj['legal_entity_state_code'] = None
# if it is in the USA, grab the description for the state
else:
try:
obj['legal_entity_state_descrip'] = extract_text(data['vendorLocation']['state']['@name'])
except (KeyError, TypeError):
obj['legal_entity_state_descrip'] = None
try:
obj[key] = extract_text(data['vendorLocation']['state'])
except (KeyError, TypeError):
obj[key] = None
# getting the name associated with the country code
try:
obj['legal_entity_country_name'] = extract_text(data['vendorLocation']['countryCode']['@name'])
except (KeyError, TypeError):
obj['legal_entity_country_name'] = None
# vendorOrganizationFactors sub-level
value_map = {'isForeignOwnedAndLocated': 'foreign_owned_and_located',
'isLimitedLiabilityCorporation': 'limited_liability_corporat',
'isShelteredWorkshop': 'the_ability_one_program',
'isSubchapterSCorporation': 'subchapter_s_corporation',
'organizationalType': 'organizational_type'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorOrganizationFactors'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorOrganizationFactors > profitStructure sub-level
value_map = {'isForProfitOrganization': 'for_profit_organization',
'isNonprofitOrganization': 'nonprofit_organization',
'isOtherNotForProfitOrganization': 'other_not_for_profit_organ'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorOrganizationFactors']['profitStructure'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorRelationshipWithFederalGovernment sub-level
value_map = {'receivesContracts': 'contracts',
'receivesContractsAndGrants': 'receives_contracts_and_gra',
'receivesGrants': 'grants'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorRelationshipWithFederalGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorSocioEconomicIndicators sub-level
value_map = {'isAlaskanNativeOwnedCorporationOrFirm': 'alaskan_native_owned_corpo',
'isAmericanIndianOwned': 'american_indian_owned_busi',
'isEconomicallyDisadvantagedWomenOwnedSmallBusiness': 'economically_disadvantaged',
'isIndianTribe': 'indian_tribe_federally_rec',
'isJointVentureEconomicallyDisadvantagedWomenOwnedSmallBusiness': 'joint_venture_economically',
'isJointVentureWomenOwnedSmallBusiness': 'joint_venture_women_owned',
'isNativeHawaiianOwnedOrganizationOrFirm': 'native_hawaiian_owned_busi',
'isServiceRelatedDisabledVeteranOwnedBusiness': 'service_disabled_veteran_o',
'isTriballyOwnedFirm': 'tribally_owned_business',
'isVerySmallBusiness': 'emerging_small_business',
'isVeteranOwned': 'veteran_owned_business',
'isWomenOwned': 'woman_owned_business',
'isWomenOwnedSmallBusiness': 'women_owned_small_business'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorSocioEconomicIndicators'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorSocioEconomicIndicators > minorityOwned sub-level
value_map = {'isAsianPacificAmericanOwnedBusiness': 'asian_pacific_american_own',
'isBlackAmericanOwnedBusiness': 'black_american_owned_busin',
'isHispanicAmericanOwnedBusiness': 'hispanic_american_owned_bu',
'isMinorityOwned': 'minority_owned_business',
'isNativeAmericanOwnedBusiness': 'native_american_owned_busi',
'isOtherMinorityOwned': 'other_minority_owned_busin',
'isSubContinentAsianAmericanOwnedBusiness': 'subcontinent_asian_asian_i'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorSocioEconomicIndicators']['minorityOwned'][key])
except (KeyError, TypeError):
obj[value] = None
return obj
def generic_values(data, obj):
""" Get values from the genericTags level of the xml """
generic_strings_value_map = {'genericString01': 'solicitation_date'}
for key, value in generic_strings_value_map.items():
try:
obj[value] = extract_text(data['genericStrings'][key])
except (KeyError, TypeError):
obj[value] = None
return obj
def calculate_ppop_fields(obj, sess, county_by_name, county_by_code, state_code_list, country_list):
""" calculate values that aren't in any feed (or haven't been provided properly) for place of performance """
# only do any of these calculation if the country code is in the list of US territories
if obj['place_of_perform_country_c'] in country_code_map:
# If it's in the list but not USA, find its state code in the list and put that in the state code spot, get
# the state name, then replace country code and country description with USA and UNITED STATES respectively
if obj['place_of_perform_country_c'] != 'USA':
obj['place_of_performance_state'] = country_code_map[obj['place_of_perform_country_c']]
if obj['place_of_performance_state'] in state_code_list:
obj['place_of_perfor_state_desc'] = state_code_list[obj['place_of_performance_state']]
obj['place_of_perform_country_c'] = 'USA'
obj['place_of_perf_country_desc'] = 'UNITED STATES'
# derive state name if we don't have it
if obj['place_of_performance_state'] and not obj['place_of_perfor_state_desc']\
and obj['place_of_performance_state'] in state_code_list:
obj['place_of_perfor_state_desc'] = state_code_list[obj['place_of_performance_state']]
# calculate place of performance county code
if obj['place_of_perform_county_na'] and obj['place_of_performance_state']:
state = obj['place_of_performance_state']
county_name = obj['place_of_perform_county_na']
# make sure they gave us a valid state and then check if it's in our lookup
if state in county_by_name and county_name in county_by_name[state]:
obj['place_of_perform_county_co'] = county_by_name[state][county_name]
# if accessing the county code by state code and county name didn't work, try by zip4a if we have it
if not obj['place_of_perform_county_co'] and obj['place_of_performance_zip4a']:
obj['place_of_perform_county_co'] = get_county_by_zip(sess, obj['place_of_performance_zip4a'])
# if we didn't have a county name but got the county code, we can grab the name
if not obj['place_of_perform_county_na'] and obj['place_of_performance_state'] in county_by_code\
and obj['place_of_perform_county_co'] in county_by_code[obj['place_of_performance_state']]:
obj['place_of_perform_county_na'] =\
county_by_code[obj['place_of_performance_state']][obj['place_of_perform_county_co']]
# if we have content in the zip code and it's in a valid US format, split it into 5 and 4 digit
if obj['place_of_performance_zip4a'] and is_valid_zip(obj['place_of_performance_zip4a']):
obj['place_of_performance_zip5'] = obj['place_of_performance_zip4a'][:5]
if len(obj['place_of_performance_zip4a']) > 5:
obj['place_of_perform_zip_last4'] = obj['place_of_performance_zip4a'][-4:]
# if there is any country code (checked outside function) but not a country name, try to get the country name
if not obj['place_of_perf_country_desc'] and obj['place_of_perform_country_c'] in country_list:
obj['place_of_perf_country_desc'] = country_list[obj['place_of_perform_country_c']]
def calculate_legal_entity_fields(obj, sess, county_by_code, state_code_list, country_list):
""" calculate values that aren't in any feed (or haven't been provided properly) for legal entity """
# do legal entity derivations only if legal entity country code is in a US territory of any kind
if obj['legal_entity_country_code'] in country_code_map:
# if it's in the list but not USA, find its state code in the list and put that in the state code spot, get
# the state name, then replace country code and country description with USA and UNITED STATES respectively
if obj['legal_entity_country_code'] != 'USA':
obj['legal_entity_state_code'] = country_code_map[obj['legal_entity_country_code']]
if obj['legal_entity_state_code'] in state_code_list:
obj['legal_entity_state_descrip'] = state_code_list[obj['legal_entity_state_code']]
obj['legal_entity_country_code'] = 'USA'
obj['legal_entity_country_name'] = 'UNITED STATES'
# derive state name if we don't have it
if obj['legal_entity_state_code'] and not obj['legal_entity_state_descrip']\
and obj['legal_entity_state_code'] in state_code_list:
obj['legal_entity_state_descrip'] = state_code_list[obj['legal_entity_state_code']]
# calculate legal entity county code and split zip when possible
if obj['legal_entity_zip4'] and is_valid_zip(obj['legal_entity_zip4']):
obj['legal_entity_county_code'] = get_county_by_zip(sess, obj['legal_entity_zip4'])
# if we have a county code and a state code, we can try to get the county name
if obj['legal_entity_county_code'] and obj['legal_entity_state_code']:
county_code = obj['legal_entity_county_code']
state = obj['legal_entity_state_code']
# make sure they gave us a valid state and then check if it's in our lookup
if state in county_by_code and county_code in county_by_code[state]:
obj['legal_entity_county_name'] = county_by_code[state][county_code]
obj['legal_entity_zip5'] = obj['legal_entity_zip4'][:5]
if len(obj['legal_entity_zip4']) > 5:
obj['legal_entity_zip_last4'] = obj['legal_entity_zip4'][-4:]
# if there is any country code (checked outside function) but not a country name, try to get the country name
if not obj['legal_entity_country_name'] and obj['legal_entity_country_code'] in country_list:
obj['legal_entity_country_name'] = country_list[obj['legal_entity_country_code']]
def calculate_remaining_fields(obj, sess, sub_tier_list, county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict, atom_type):
""" Calculate values that aren't in any feed but can be calculated.
Args:
obj: a dictionary containing the details we need to derive from and to
sess: the database connection
sub_tier_list: a dictionary containing all the sub tier agency information keyed by sub tier agency code
county_by_name: a dictionary containing all county codes, keyed by state and county name
county_by_code: a dictionary containing all county names, keyed by state and county code
state_code_list: a dictionary containing all state names, keyed by state code
country_list: a dictionary containing all country names, keyed by country code
exec_comp_dict: a dictionary containing all the data for Executive Compensation data keyed by DUNS number
atom_type: a string indicating whether the atom feed being checked is 'award' or 'IDV'
Returns:
the object originally passed in with newly-calculated values added
"""
# we want to null out all the calculated columns in case this is an update to the records
obj['awarding_agency_code'] = None
obj['awarding_agency_name'] = None
obj['funding_agency_code'] = None
obj['funding_agency_name'] = None
obj['place_of_perform_county_co'] = None
obj['legal_entity_county_code'] = None
obj['legal_entity_county_name'] = None
obj['detached_award_proc_unique'] = None
# calculate awarding agency codes/names based on awarding sub tier agency codes
if obj['awarding_sub_tier_agency_c']:
try:
sub_tier_agency = sub_tier_list[obj['awarding_sub_tier_agency_c']]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
obj['awarding_agency_code'] = agency_data.frec_code if use_frec else agency_data.cgac_code
obj['awarding_agency_name'] = agency_data.agency_name
except KeyError:
logger.info('WARNING: MissingSubtierCGAC: The awarding sub-tier cgac_code: %s does not exist in cgac table.'
' The FPDS-provided awarding sub-tier agency name (if given) for this cgac_code is %s. '
'The award has been loaded with awarding_agency_code 999.',
obj['awarding_sub_tier_agency_c'], obj['awarding_sub_tier_agency_n'])
obj['awarding_agency_code'] = '999'
obj['awarding_agency_name'] = None
# calculate funding agency codes/names based on funding sub tier agency codes
if obj['funding_sub_tier_agency_co']:
try:
sub_tier_agency = sub_tier_list[obj['funding_sub_tier_agency_co']]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
obj['funding_agency_code'] = agency_data.frec_code if use_frec else agency_data.cgac_code
obj['funding_agency_name'] = agency_data.agency_name
except KeyError:
logger.info('WARNING: MissingSubtierCGAC: The funding sub-tier cgac_code: %s does not exist in cgac table. '
'The FPDS-provided funding sub-tier agency name (if given) for this cgac_code is %s. '
'The award has been loaded with funding_agency_code 999.',
obj['funding_sub_tier_agency_co'], obj['funding_sub_tier_agency_na'])
obj['funding_agency_code'] = '999'
obj['funding_agency_name'] = None
# do place of performance calculations only if we have SOME country code
if obj['place_of_perform_country_c']:
calculate_ppop_fields(obj, sess, county_by_name, county_by_code, state_code_list, country_list)
# do legal entity calculations only if we have SOME country code
if obj['legal_entity_country_code']:
calculate_legal_entity_fields(obj, sess, county_by_code, state_code_list, country_list)
# calculate business categories
obj['business_categories'] = get_business_categories(row=obj, data_type='fpds')
# Calculate executive compensation data for the entry.
if obj['awardee_or_recipient_uniqu'] and obj['awardee_or_recipient_uniqu'] in exec_comp_dict.keys():
exec_comp = exec_comp_dict[obj['awardee_or_recipient_uniqu']]
for i in range(1, 6):
obj['high_comp_officer{}_full_na'.format(i)] = exec_comp['officer{}_name'.format(i)]
obj['high_comp_officer{}_amount'.format(i)] = exec_comp['officer{}_amt'.format(i)]
else:
# Need to make sure they're null in case this is updating and the DUNS has changed somehow
for i in range(1, 6):
obj['high_comp_officer{}_full_na'.format(i)] = None
obj['high_comp_officer{}_amount'.format(i)] = None
# calculate unique award key
if atom_type == 'award':
unique_award_string_list = ['CONT_AWD']
key_list = ['piid', 'agency_id', 'parent_award_id', 'referenced_idv_agency_iden']
else:
unique_award_string_list = ['CONT_IDV']
key_list = ['piid', 'agency_id']
for item in key_list:
# Get the value in the object or, if the key doesn't exist or value is None, set it to "-none-"
unique_award_string_list.append(obj.get(item) or '-none-')
obj['unique_award_key'] = '_'.join(unique_award_string_list).upper()
# calculate unique key
key_list = ['agency_id', 'referenced_idv_agency_iden', 'piid', 'award_modification_amendme', 'parent_award_id',
'transaction_number']
idv_list = ['agency_id', 'piid', 'award_modification_amendme']
unique_string = ""
for item in key_list:
if len(unique_string) > 0:
unique_string += "_"
if atom_type == 'award' or item in idv_list:
# Get the value in the object or, if the key doesn't exist or value is None, set it to "-none-"
unique_string += obj.get(item) or '-none-'
else:
unique_string += '-none-'
# The order of the unique key is agency_id, referenced_idv_agency_iden, piid, award_modification_amendme,
# parent_award_id, transaction_number
obj['detached_award_proc_unique'] = unique_string
return obj
def process_data(data, sess, atom_type, sub_tier_list, county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict):
""" Process the data coming in.
Args:
data: an object containing the data gathered from the feed
sess: the database connection
atom_type: a string indicating whether the atom feed being checked is 'award' or 'IDV'
sub_tier_list: a dictionary containing all the sub tier agency information keyed by sub tier agency code
county_by_name: a dictionary containing all county codes, keyed by state and county name
county_by_code: a dictionary containing all county names, keyed by state and county code
state_code_list: a dictionary containing all state names, keyed by state code
country_list: a dictionary containing all country names, keyed by country code
exec_comp_dict: a dictionary containing all the data for Executive Compensation data keyed by DUNS number
Returns:
An object containing the processed and calculated data.
"""
obj = {}
if atom_type == "award":
# make sure key exists before passing it
try:
data['awardID']
except KeyError:
data['awardID'] = {}
obj = award_id_values(data['awardID'], obj)
else:
# transaction_number is a part of the unique identifier, set it to None
obj['transaction_number'] = None
# make sure key exists before passing it
try:
data['contractID']
except KeyError:
data['contractID'] = {}
obj = contract_id_values(data['contractID'], obj)
# make sure key exists before passing it
try:
data['competition']
except KeyError:
data['competition'] = {}
obj = competition_values(data['competition'], obj)
# make sure key exists before passing it
try:
data['contractData']
except KeyError:
data['contractData'] = {}
obj = contract_data_values(data['contractData'], obj, atom_type)
# make sure key exists before passing it
try:
data['dollarValues']
except KeyError:
data['dollarValues'] = {}
obj = dollar_values_values(data['dollarValues'], obj)
# make sure key exists before passing it
try:
data['totalDollarValues']
except KeyError:
data['totalDollarValues'] = {}
obj = total_dollar_values_values(data['totalDollarValues'], obj)
if atom_type == "award":
# make sure key exists before passing it
try:
data['placeOfPerformance']
except KeyError:
data['placeOfPerformance'] = {}
obj = place_of_performance_values(data['placeOfPerformance'], obj)
# these values need to be filled so the existence check when calculating county data doesn't freak out
else:
obj['place_of_perform_county_na'] = None
obj['place_of_performance_state'] = None
obj['place_of_perfor_state_desc'] = None
obj['place_of_performance_zip4a'] = None
obj['place_of_perform_country_c'] = None
obj['place_of_perf_country_desc'] = None
# make sure key exists before passing it
try:
data['legislativeMandates']
except KeyError:
data['legislativeMandates'] = {}
obj = legislative_mandates_values(data['legislativeMandates'], obj)
try:
obj['subcontracting_plan'] = extract_text(data['preferencePrograms']['subcontractPlan'])
except (KeyError, TypeError):
obj['subcontracting_plan'] = None
try:
obj['subcontracting_plan_desc'] = extract_text(data['preferencePrograms']['subcontractPlan']['@description'])
except (KeyError, TypeError):
obj['subcontracting_plan_desc'] = None
# make sure key exists before passing it
try:
data['productOrServiceInformation']
except KeyError:
data['productOrServiceInformation'] = {}
obj = product_or_service_information_values(data['productOrServiceInformation'], obj)
# make sure key exists before passing it
try:
data['purchaserInformation']
except KeyError:
data['purchaserInformation'] = {}
obj = purchaser_information_values(data['purchaserInformation'], obj)
# make sure key exists before passing it
try:
data['relevantContractDates']
except KeyError:
data['relevantContractDates'] = {}
obj = relevant_contract_dates_values(data['relevantContractDates'], obj)
# make sure key exists before passing it
try:
data['vendor']
except KeyError:
data['vendor'] = {}
obj = vendor_values(data['vendor'], obj)
# make sure key exists before passing it
try:
data['genericTags']
except KeyError:
data['genericTags'] = {}
obj = generic_values(data['genericTags'], obj)
obj = calculate_remaining_fields(obj, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, atom_type)
try:
obj['last_modified'] = extract_text(data['transactionInformation']['lastModifiedDate'])
except (KeyError, TypeError):
obj['last_modified'] = None
try:
obj['initial_report_date'] = extract_text(data['transactionInformation']['createdDate'])
except (KeyError, TypeError):
obj['initial_report_date'] = None
obj['pulled_from'] = atom_type
# clear out potentially excel-breaking whitespace from specific fields
free_fields = ["award_description", "vendor_doing_as_business_n", "legal_entity_address_line1",
"legal_entity_address_line2", "legal_entity_address_line3", "ultimate_parent_legal_enti",
"awardee_or_recipient_legal", "other_statutory_authority"]
for field in free_fields:
if obj[field]:
obj[field] = re.sub('\s', ' ', obj[field])
return obj
def process_delete_data(data, atom_type):
""" process the delete feed data coming in """
unique_string = ""
# order of unique constraints in string: agency_id, referenced_idv_agency_iden, piid, award_modification_amendme,
# parent_award_id, transaction_number
# get all values that make up unique key
if atom_type == "award":
try:
unique_string += extract_text(data['awardID']['awardContractID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['referencedIDVID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['modNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['referencedIDVID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['transactionNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
else:
try:
unique_string += extract_text(data['contractID']['IDVID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
# referenced_idv_agency_iden not used in IDV identifier, just set it to "-none-"
unique_string += "_-none-_"
try:
unique_string += extract_text(data['contractID']['IDVID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['contractID']['IDVID']['modNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
# parent_award_id not used in IDV identifier and transaction_number not in IDV feed, just set them to "-none-"
unique_string += "_-none-_-none-"
return unique_string
def create_processed_data_list(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict):
""" Create a list of processed data
Args:
data: an object containing the data gathered from the feed
sess: the database connection
contract_type: a string indicating whether the atom feed being checked is 'award' or 'IDV'
sub_tier_list: a dictionary containing all the sub tier agency information keyed by sub tier agency code
county_by_name: a dictionary containing all county codes, keyed by state and county name
county_by_code: a dictionary containing all county names, keyed by state and county code
state_code_list: a dictionary containing all state names, keyed by state code
country_list: a dictionary containing all country names, keyed by country code
exec_comp_dict: a dictionary containing all the data for Executive Compensation data keyed by DUNS number
Returns:
A list containing the processed and calculated data.
"""
data_list = []
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
data_list.append(tmp_obj)
return data_list
def add_processed_data_list(data, sess):
try:
sess.bulk_save_objects([DetachedAwardProcurement(**fpds_data) for fpds_data in data])
sess.commit()
except IntegrityError:
sess.rollback()
logger.error("Attempted to insert duplicate FPDS data. Inserting each row in batch individually.")
for fpds_obj in data:
insert_statement = insert(DetachedAwardProcurement).values(**fpds_obj).\
on_conflict_do_update(index_elements=['detached_award_proc_unique'], set_=fpds_obj)
sess.execute(insert_statement)
sess.commit()
def process_and_add(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, now, threaded=False):
""" Start the processing for data and add it to the DB.
Args:
data: an object containing the data gathered from the feed
contract_type: a string indicating whether the atom feed being checked is 'award' or 'IDV'
sess: the database connection
sub_tier_list: a dictionary containing all the sub tier agency information keyed by sub tier agency code
county_by_name: a dictionary containing all county codes, keyed by state and county name
county_by_code: a dictionary containing all county names, keyed by state and county code
state_code_list: a dictionary containing all state names, keyed by state code
country_list: a dictionary containing all country names, keyed by country code
exec_comp_dict: a dictionary containing all the data for Executive Compensation data keyed by DUNS number
now: a timestamp indicating the time to set the updated_at to
threaded: a boolean indicating whether the process is running as a thread or not
"""
if threaded:
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
tmp_obj['updated_at'] = now
insert_statement = insert(DetachedAwardProcurement).values(**tmp_obj).\
on_conflict_do_update(index_elements=['detached_award_proc_unique'], set_=tmp_obj)
sess.execute(insert_statement)
else:
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
try:
statement = insert(DetachedAwardProcurement).values(**tmp_obj)
sess.execute(statement)
sess.commit()
except IntegrityError:
sess.rollback()
tmp_obj['updated_at'] = now
sess.query(DetachedAwardProcurement).\
filter_by(detached_award_proc_unique=tmp_obj['detached_award_proc_unique']).\
update(tmp_obj, synchronize_session=False)
sess.commit()
def get_with_exception_hand(url_string, expect_entries=True):
""" Retrieve data from FPDS, allow for multiple retries and timeouts """
exception_retries = -1
retry_sleep_times = [5, 30, 60, 180, 300, 360, 420, 480, 540, 600]
request_timeout = 60
while exception_retries < len(retry_sleep_times):
try:
resp = requests.get(url_string, timeout=request_timeout)
if expect_entries:
# we should always expect entries, otherwise we shouldn't be calling it
resp_dict = xmltodict.parse(resp.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
len(list_data(resp_dict['feed']['entry']))
break
except (ConnectionResetError, ReadTimeoutError, ConnectionError, ReadTimeout, KeyError) as e:
exception_retries += 1
request_timeout += 60
if exception_retries < len(retry_sleep_times):
logger.info('Connection exception. Sleeping {}s and then retrying with a max wait of {}s...'
.format(retry_sleep_times[exception_retries], request_timeout))
time.sleep(retry_sleep_times[exception_retries])
else:
logger.info('Connection to FPDS feed lost, maximum retry attempts exceeded.')
raise e
return resp
def get_total_expected_records(base_url):
""" Retrieve the total number of expected records based on the last paginated URL """
# get a single call so we can find the last page
initial_request = get_with_exception_hand(base_url, expect_entries=False)
initial_request_xml = xmltodict.parse(initial_request.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
# retrieve all URLs
try:
urls_list = list_data(initial_request_xml['feed']['link'])
except KeyError:
urls_list = []
# retrieve the "last" URL from the list
final_request_url = None
for url in urls_list:
if url['@rel'] == 'last':
final_request_url = url['@href']
continue
# retrieve the count from the URL of the last page
if not final_request_url:
try:
return len(list_data(initial_request_xml['feed']['entry']))
except KeyError:
return 0
# retrieve the page from the final_request_url
final_request_count = int(final_request_url.split('&start=')[-1])
# retrieve the last page of data
final_request = get_with_exception_hand(final_request_url)
final_request_xml = xmltodict.parse(final_request.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
entries_list = list_data(final_request_xml['feed']['entry'])
except KeyError:
raise Exception("Initial count failed, no entries in last page of request.")
return final_request_count + len(entries_list)
def get_data(contract_type, award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_run=None, threaded=False, start_date=None, end_date=None, metrics=None,
specific_params=None):
""" Get the data from the atom feed based on contract/award type and the last time the script was run.
Args:
contract_type: a string indicating whether the atom feed being checked is 'award' or 'IDV'
award_type: a string indicating what the award type of the feed being checked is
now: a timestamp indicating the time to set the updated_at to
sess: the database connection
sub_tier_list: a dictionary containing all the sub tier agency information keyed by sub tier agency code
county_by_name: a dictionary containing all county codes, keyed by state and county name
county_by_code: a dictionary containing all county names, keyed by state and county code
state_code_list: a dictionary containing all state names, keyed by state code
country_list: a dictionary containing all country names, keyed by country code
exec_comp_dict: a dictionary containing all the data for Executive Compensation data keyed by DUNS number
last_run: a date indicating the last time the pull was run
threaded: a boolean indicating whether the process is running as a thread or not
start_date: a date indicating the first date to pull from (must be provided with end_date)
end_date: a date indicating the last date to pull from (must be provided with start_date)
metrics: a dictionary to gather metrics for the script in
specific_params: a string containing a specific set of params to run the query with (used for outside
scripts that need to run a data load)
"""
if not metrics:
metrics = {}
data = []
yesterday = now - datetime.timedelta(days=1)
utcnow = datetime.datetime.utcnow()
# If a specific set of params was provided, use that
if specific_params:
params = specific_params
# if a date that the script was last successfully run is not provided, get all data
elif not last_run:
params = 'SIGNED_DATE:[2016/10/01,' + yesterday.strftime('%Y/%m/%d') + '] '
metrics['start_date'] = '2016/10/01'
metrics['end_date'] = yesterday.strftime('%Y/%m/%d')
# if a date that the script was last successfully run is provided, get data since that date
else:
last_run_date = last_run - relativedelta(days=1)
params = 'LAST_MOD_DATE:[' + last_run_date.strftime('%Y/%m/%d') + ',' + yesterday.strftime('%Y/%m/%d') + '] '
metrics['start_date'] = last_run_date.strftime('%Y/%m/%d')
metrics['end_date'] = yesterday.strftime('%Y/%m/%d')
if start_date and end_date:
params = 'LAST_MOD_DATE:[' + start_date + ',' + end_date + '] '
metrics['start_date'] = start_date
metrics['end_date'] = end_date
base_url = feed_url + params + 'CONTRACT_TYPE:"' + contract_type.upper() + '" AWARD_TYPE:"' + award_type + '"'
logger.info('Starting get feed: %s', base_url)
# retrieve the total count of expected records for this pull
total_expected_records = get_total_expected_records(base_url)
logger.info('{} record(s) expected from this feed'.format(total_expected_records))
entries_processed = 0
while True:
# pull in the next MAX_ENTRIES * REQUESTS_AT_ONCE until we get anything less than the MAX_ENTRIES
async def atom_async_get(entries_already_processed, total_expected_records):
response_list = []
loop = asyncio.get_event_loop()
requests_at_once = MAX_REQUESTS_AT_ONCE
if total_expected_records - entries_already_processed < (MAX_REQUESTS_AT_ONCE * MAX_ENTRIES):
# adding +1 to ensure that they're not adding anything since we got the expected count
requests_at_once = math.ceil((total_expected_records - entries_already_processed) / MAX_ENTRIES) + 1
futures = [
loop.run_in_executor(
None,
get_with_exception_hand,
base_url + "&start=" + str(entries_already_processed + (start_offset * MAX_ENTRIES)),
total_expected_records > entries_already_processed + (start_offset * MAX_ENTRIES)
)
for start_offset in range(requests_at_once)
]
for response in await asyncio.gather(*futures):
response_list.append(response.text)
pass
return response_list
# End async get requests def
loop = asyncio.get_event_loop()
full_response = loop.run_until_complete(atom_async_get(entries_processed, total_expected_records))
for next_resp in full_response:
response_dict = xmltodict.parse(next_resp, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
entries_per_response = list_data(response_dict['feed']['entry'])
except KeyError:
continue
if last_run or specific_params:
for entry in entries_per_response:
data.append(entry)
entries_processed += 1
else:
data.extend(create_processed_data_list(entries_per_response, contract_type, sess, sub_tier_list,
county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict))
entries_processed += len(entries_per_response)
if entries_processed > total_expected_records:
# Find entries that don't have FPDS content and print them all
for next_resp in full_response:
response_dict = xmltodict.parse(next_resp, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
list_data(response_dict['feed']['entry'])
except KeyError:
logger.info(response_dict)
continue
raise Exception("Total number of expected records has changed\nExpected: {}\nRetrieved so far: {}"
.format(total_expected_records, len(data)))
if data:
# Log which one we're on so we can keep track of how far we are, insert into DB ever 1k lines
logger.info("Retrieved %s lines of get %s: %s feed, writing next %s to DB",
entries_processed, contract_type, award_type, len(data))
if last_run or specific_params:
process_and_add(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, utcnow, threaded)
else:
add_processed_data_list(data, sess)
logger.info("Successfully inserted %s lines of get %s: %s feed, continuing feed retrieval",
len(data), contract_type, award_type)
# if we got less than the full set of records, we can stop calling the feed
if len(data) < (MAX_ENTRIES * MAX_REQUESTS_AT_ONCE):
# ensure we loaded the number of records we expected to, otherwise we'll need to reload
if entries_processed != total_expected_records:
raise Exception("Records retrieved != Total expected records\nExpected: {}\nRetrieved: {}"
.format(total_expected_records, entries_processed))
else:
if 'records_received' not in metrics:
metrics['records_received'] = total_expected_records
else:
metrics['records_received'] += total_expected_records
break
else:
data = []
logger.info("Total entries in %s: %s feed: %s", contract_type, award_type, entries_processed)
logger.info("Processed %s: %s data", contract_type, award_type)
def get_delete_data(contract_type, now, sess, last_run, start_date=None, end_date=None, metrics=None):
""" Get data from the delete feed """
if not metrics:
metrics = {}
data = []
yesterday = now - datetime.timedelta(days=1)
last_run_date = last_run - relativedelta(days=1)
params = 'LAST_MOD_DATE:[' + last_run_date.strftime('%Y/%m/%d') + ',' + yesterday.strftime('%Y/%m/%d') + '] '
if start_date and end_date:
params = 'LAST_MOD_DATE:[' + start_date + ',' + end_date + '] '
# If we just call deletes, we have to set the date. If we don't provide dates, some other part has to have run
# already so this is the only place it needs to get set.
if not metrics['start_date']:
metrics['start_date'] = start_date
if not metrics['end_date']:
metrics['end_date'] = end_date
base_url = delete_url + params + 'CONTRACT_TYPE:"' + contract_type.upper() + '"'
logger.info('Starting delete feed: %s', base_url)
# retrieve the total count of expected records for this pull
total_expected_records = get_total_expected_records(base_url)
logger.info('{} record(s) expected from this feed'.format(total_expected_records))
processed_deletions = 0
while True:
exception_retries = -1
retry_sleep_times = [5, 30, 60, 180, 300, 360, 420, 480, 540, 600]
request_timeout = 60
try:
resp = requests.get(base_url + '&start=' + str(processed_deletions), timeout=request_timeout)
resp_data = xmltodict.parse(resp.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
except (ConnectionResetError, ReadTimeoutError, ConnectionError, ReadTimeout) as e:
exception_retries += 1
request_timeout += 60
if exception_retries < len(retry_sleep_times):
logger.info('Connection exception caught. Sleeping {}s and then retrying with a max wait of {}s...'
.format(retry_sleep_times[exception_retries], request_timeout))
time.sleep(retry_sleep_times[exception_retries])
else:
logger.info('Connection to FPDS feed lost, maximum retry attempts exceeded.')
raise e
# only list the data if there's data to list
try:
listed_data = list_data(resp_data['feed']['entry'])
except KeyError:
listed_data = []
if processed_deletions > total_expected_records:
raise Exception("Total number of expected records has changed\nExpected: {}\nRetrieved so far: {}"
.format(total_expected_records, len(processed_deletions)))
for ld in listed_data:
data.append(ld)
processed_deletions += 1
# Every 100 lines, log which one we're on so we can keep track of how far we are
if processed_deletions % 100 == 0:
logger.info("On line %s of %s delete feed", str(processed_deletions), contract_type)
# if we got less than the full set of records we can stop calling the feed
if len(listed_data) < 10:
# ensure we loaded the number of records we expected to, otherwise we'll need to reload
if processed_deletions != total_expected_records:
raise Exception("Records retrieved != Total expected records\nExpected: {}\nRetrieved: {}"
.format(total_expected_records, len(listed_data)))
else:
if 'deletes_received' not in metrics:
metrics['deletes_received'] = total_expected_records
else:
metrics['deletes_received'] += total_expected_records
break
else:
listed_data = []
logger.info("Total entries in %s delete feed: %s", contract_type, str(processed_deletions))
delete_list = []
delete_dict = {}
for value in data:
# get last modified date
last_modified = value['content'][contract_type]['transactionInformation']['lastModifiedDate']
unique_string = process_delete_data(value['content'][contract_type], atom_type=contract_type)
existing_item = sess.query(DetachedAwardProcurement).\
filter_by(detached_award_proc_unique=unique_string).one_or_none()
if existing_item:
# only add to delete list if the last modified date is later than the existing entry's last modified date
if last_modified > existing_item.last_modified:
delete_list.append(existing_item.detached_award_procurement_id)
delete_dict[existing_item.detached_award_procurement_id] = existing_item.detached_award_proc_unique
# only need to delete values if there's something to delete
if delete_list:
if 'records_deleted' not in metrics:
metrics['records_deleted'] = len(delete_list)
else:
metrics['records_deleted'] += len(delete_list)
sess.query(DetachedAwardProcurement).\
filter(DetachedAwardProcurement.detached_award_procurement_id.in_(delete_list)).\
delete(synchronize_session=False)
# writing the file
seconds = int((datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds())
file_name = now.strftime('%m-%d-%Y') + "_delete_records_" + contract_type + "_" + str(seconds) + ".csv"
metrics['deleted_{}_records_file'.format(contract_type).lower()] = file_name
headers = ["detached_award_procurement_id", "detached_award_proc_unique"]
if CONFIG_BROKER["use_aws"]:
s3client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
# add headers
contents = bytes((",".join(headers) + "\n").encode())
for key, value in delete_dict.items():
contents += bytes('{},{}\n'.format(key, value).encode())
s3client.put_object(Bucket=CONFIG_BROKER['fpds_delete_bucket'], Key=file_name, Body=contents)
else:
with CsvLocalWriter(file_name, headers) as writer:
for key, value in delete_dict.items():
writer.write([key, value])
writer.finish_batch()
def create_lookups(sess):
""" Create the lookups used for FPDS derivations.
Args:
sess: connection to database
Returns:
Dictionaries of sub tier agencies by code, country names by code, county names by state code + county
code, county codes by state code + county name, state name by code, and executive compensation data by
DUNS number
"""
# get and create list of sub tier agencies
sub_tiers = sess.query(SubTierAgency).all()
sub_tier_list = {}
for sub_tier in sub_tiers:
sub_tier_list[sub_tier.sub_tier_agency_code] = sub_tier
# get and create list of country code -> country name mappings.
countries = sess.query(CountryCode).all()
country_list = {}
for country in countries:
country_list[country.country_code] = country.country_name
# get and create list of state code -> state name mappings. Prime the county lists with state codes
county_by_name = {}
county_by_code = {}
state_code_list = {}
state_codes = sess.query(States.state_code, func.upper(States.state_name).label('state_name')).all()
for state_code in state_codes:
county_by_name[state_code.state_code] = {}
county_by_code[state_code.state_code] = {}
state_code_list[state_code.state_code] = state_code.state_name
# Fill the county lists with data (code -> name mappings and name -> code mappings)
county_codes = sess.query(CountyCode.county_number, CountyCode.state_code,
func.upper(CountyCode.county_name).label('county_name')).all()
for county_code in county_codes:
# we don't want any "(CA)" endings, so strip those
county_name = county_code.county_name.replace(' (CA)', '').strip()
# we want all the counties in our by-code lookup because we'd be using this table anyway for derivations
county_by_code[county_code.state_code][county_code.county_number] = county_name
# if the county name has only letters/spaces then we want it in our by-name lookup, the rest have the potential
# to be different from the FPDS feed
if re.match('^[A-Z\s]+$', county_code.county_name):
county_by_name[county_code.state_code][county_name] = county_code.county_number
# get and create list of duns -> exec comp data mappings
exec_comp_dict = {}
duns_list = sess.query(DUNS).filter(DUNS.high_comp_officer1_full_na.isnot(None)).all()
for duns in duns_list:
exec_comp_dict[duns.awardee_or_recipient_uniqu] = \
{'officer1_name': duns.high_comp_officer1_full_na, 'officer1_amt': duns.high_comp_officer1_amount,
'officer2_name': duns.high_comp_officer2_full_na, 'officer2_amt': duns.high_comp_officer2_amount,
'officer3_name': duns.high_comp_officer3_full_na, 'officer3_amt': duns.high_comp_officer3_amount,
'officer4_name': duns.high_comp_officer4_full_na, 'officer4_amt': duns.high_comp_officer4_amount,
'officer5_name': duns.high_comp_officer5_full_na, 'officer5_amt': duns.high_comp_officer5_amount}
del duns_list
return sub_tier_list, country_list, state_code_list, county_by_name, county_by_code, exec_comp_dict
def main():
sess = GlobalDB.db().session
now = datetime.datetime.now()
parser = argparse.ArgumentParser(description='Pull data from the FPDS Atom Feed.')
parser.add_argument('-a', '--all', help='Clear out the database and get historical data', action='store_true')
parser.add_argument('-l', '--latest', help='Get by last_mod_date stored in DB', action='store_true')
parser.add_argument('-d', '--delivery', help='Used in conjunction with -a to indicate delivery order feed',
action='store_true')
parser.add_argument('-o', '--other',
help='Used in conjunction with -a to indicate all feeds other than delivery order',
action='store_true')
parser.add_argument('-da', '--dates', help='Used in conjunction with -l to specify dates to gather updates from.'
'Should have 2 arguments, first and last day, formatted YYYY/mm/dd',
nargs=2, type=str)
parser.add_argument('-del', '--delete', help='Used to only run the delete feed. First argument must be "both", '
'"idv", or "award". The second and third arguments must be the first '
'and last day to run the feeds for, formatted YYYY/mm/dd',
nargs=3, type=str)
args = parser.parse_args()
award_types_award = ["BPA Call", "Definitive Contract", "Purchase Order", "Delivery Order"]
award_types_idv = ["GWAC", "BOA", "BPA", "FSS", "IDC"]
metrics_json = {
'script_name': 'pull_fpds_data.py',
'start_time': str(now),
'records_received': 0,
'deletes_received': 0,
'records_deleted': 0,
'deleted_award_records_file': '',
'deleted_idv_records_file': '',
'start_date': '',
'end_date': ''
}
sub_tier_list, country_list, state_code_list, county_by_name, county_by_code, exec_comp_dict = create_lookups(sess)
if args.all:
if (not args.delivery and not args.other) or (args.delivery and args.other):
logger.error("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
raise ValueError("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
logger.info("Starting at: %s", str(datetime.datetime.now()))
if args.other:
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, metrics=metrics_json)
for award_type in award_types_award:
if award_type != "Delivery Order":
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
elif args.delivery:
get_data("award", "Delivery Order", now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
last_update = sess.query(FPDSUpdate).one_or_none()
if last_update:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
else:
sess.add(FPDSUpdate(update_date=now))
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.latest:
logger.info("Starting at: %s", str(datetime.datetime.now()))
last_update_obj = sess.query(FPDSUpdate).one_or_none()
# update_date can't be null because it's being used as the PK for the table, so it can only exist or
# there are no rows in the table. If there are no rows, act like it's an "add all"
if not last_update_obj:
logger.error(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
raise ValueError(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
last_update = last_update_obj.update_date
start_date = None
end_date = None
if args.dates:
start_date = args.dates[0]
end_date = args.dates[1]
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
for award_type in award_types_award:
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
# We also need to process the delete feed
get_delete_data("IDV", now, sess, last_update, start_date, end_date, metrics=metrics_json)
get_delete_data("award", now, sess, last_update, start_date, end_date, metrics=metrics_json)
if not start_date and not end_date:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.delete:
del_type = args.delete[0]
if del_type == 'award':
del_awards = True
del_idvs = False
elif del_type == 'idv':
del_awards = False
del_idvs = True
elif del_type == 'both':
del_awards = True
del_idvs = True
else:
logger.error("Delete argument must be \"idv\", \"award\", or \"both\"")
raise ValueError("Delete argument must be \"idv\", \"award\", or \"both\"")
if del_idvs:
get_delete_data("IDV", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
if del_awards:
get_delete_data("award", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
sess.commit()
metrics_json['duration'] = str(datetime.datetime.now() - now)
with open('pull_fpds_data_metrics.json', 'w+') as metrics_file:
json.dump(metrics_json, metrics_file)
# TODO add a correct start date for "all" so we don't get ALL the data or too little of the data
# TODO fine-tune indexing
if __name__ == '__main__':
with create_app().app_context():
configure_logging()
main()
| 46.57232
| 120
| 0.652259
|
import boto3
import logging
import argparse
import requests
import xmltodict
import asyncio
import datetime
import time
import re
import json
import math
from sqlalchemy import func
from dateutil.relativedelta import relativedelta
from requests.exceptions import ConnectionError, ReadTimeout
from urllib3.exceptions import ReadTimeoutError
from dataactcore.logging import configure_logging
from dataactcore.config import CONFIG_BROKER
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.exc import IntegrityError
from dataactcore.interfaces.db import GlobalDB
from dataactcore.models.domainModels import SubTierAgency, CountryCode, States, CountyCode, Zips, DUNS
from dataactcore.models.stagingModels import DetachedAwardProcurement
from dataactcore.models.jobModels import FPDSUpdate
from dataactcore.utils.business_categories import get_business_categories
from dataactcore.models.jobModels import Submission
from dataactcore.models.userModel import User
from dataactvalidator.health_check import create_app
from dataactvalidator.filestreaming.csvLocalWriter import CsvLocalWriter
feed_url = "https://www.fpds.gov/ezsearch/FEEDS/ATOM?FEEDNAME=PUBLIC&templateName=1.5.2&q="
delete_url = "https://www.fpds.gov/ezsearch/FEEDS/ATOM?FEEDNAME=DELETED&templateName=1.5.2&q="
country_code_map = {'USA': 'US', 'ASM': 'AS', 'GUM': 'GU', 'MNP': 'MP', 'PRI': 'PR', 'VIR': 'VI', 'FSM': 'FM',
'MHL': 'MH', 'PLW': 'PW', 'XBK': 'UM', 'XHO': 'UM', 'XJV': 'UM', 'XJA': 'UM', 'XKR': 'UM',
'XPL': 'UM', 'XMW': 'UM', 'XWK': 'UM'}
FPDS_NAMESPACES = {'http://www.fpdsng.com/FPDS': None,
'http://www.w3.org/2005/Atom': None,
'https://www.fpds.gov/FPDS': None}
MAX_ENTRIES = 10
MAX_REQUESTS_AT_ONCE = 100
logger = logging.getLogger(__name__)
logging.getLogger("requests").setLevel(logging.WARNING)
def list_data(data):
if isinstance(data, dict):
data = [data, ]
return data
def extract_text(data_val):
if type(data_val) is not str:
data_val = data_val['
# If it's now a string, we want to strip it
if type(data_val) is str:
data_val = data_val.strip()
return data_val
def is_valid_zip(zip_code):
if re.match('^\d{5}(-?\d{4})?$', zip_code):
return True
return False
def get_county_by_zip(sess, zip_code):
if not is_valid_zip(zip_code):
return None
zip_data = None
if len(zip_code) > 5:
zip_data = sess.query(Zips).filter_by(zip5=zip_code[:5], zip_last4=zip_code[-4:]).first()
if not zip_data:
zip_data = sess.query(Zips).filter_by(zip5=zip_code[:5]).first()
# if we found results at any point, return the county code from it
if zip_data:
return zip_data.county_number
return None
def award_id_values(data, obj):
value_map = {'modNumber': 'award_modification_amendme',
'transactionNumber': 'transaction_number',
'PIID': 'piid',
'agencyID': 'agency_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['awardContractID'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'agencyID': 'referenced_idv_agency_iden',
'modNumber': 'referenced_idv_modificatio',
'PIID': 'parent_award_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['referencedIDVID'][key])
except (KeyError, TypeError):
obj[value] = None
# get agencyID name
try:
obj['referenced_idv_agency_desc'] = extract_text(data['referencedIDVID']['agencyID']['@name'])
except (KeyError, TypeError):
obj['referenced_idv_agency_desc'] = None
return obj
def contract_id_values(data, obj):
value_map = {'modNumber': 'award_modification_amendme',
'PIID': 'piid',
'agencyID': 'agency_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['IDVID'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'agencyID': 'referenced_idv_agency_iden',
'modNumber': 'referenced_idv_modificatio',
'PIID': 'parent_award_id'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['referencedIDVID'][key])
except (KeyError, TypeError):
obj[value] = None
# get agencyID name
try:
obj['referenced_idv_agency_desc'] = extract_text(data['referencedIDVID']['agencyID']['@name'])
except (KeyError, TypeError):
obj['referenced_idv_agency_desc'] = None
return obj
def competition_values(data, obj):
value_map = {'A76Action': 'a_76_fair_act_action',
'commercialItemAcquisitionProcedures': 'commercial_item_acquisitio',
'commercialItemTestProgram': 'commercial_item_test_progr',
'evaluatedPreference': 'evaluated_preference',
'extentCompeted': 'extent_competed',
'fedBizOpps': 'fed_biz_opps',
'localAreaSetAside': 'local_area_set_aside',
'numberOfOffersReceived': 'number_of_offers_received',
'priceEvaluationPercentDifference': 'price_evaluation_adjustmen',
'reasonNotCompeted': 'other_than_full_and_open_c',
'research': 'research',
'smallBusinessCompetitivenessDemonstrationProgram': 'small_business_competitive',
'solicitationProcedures': 'solicitation_procedures',
'statutoryExceptionToFairOpportunity': 'fair_opportunity_limited_s',
'typeOfSetAside': 'type_set_aside'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'A76Action': 'a_76_fair_act_action_desc',
'commercialItemAcquisitionProcedures': 'commercial_item_acqui_desc',
'commercialItemTestProgram': 'commercial_item_test_desc',
'evaluatedPreference': 'evaluated_preference_desc',
'extentCompeted': 'extent_compete_description',
'fedBizOpps': 'fed_biz_opps_description',
'localAreaSetAside': 'local_area_set_aside_desc',
'reasonNotCompeted': 'other_than_full_and_o_desc',
'research': 'research_description',
'solicitationProcedures': 'solicitation_procedur_desc',
'statutoryExceptionToFairOpportunity': 'fair_opportunity_limi_desc',
'typeOfSetAside': 'type_set_aside_description'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def contract_data_values(data, obj, atom_type):
value_map = {'consolidatedContract': 'consolidated_contract',
'contingencyHumanitarianPeacekeepingOperation': 'contingency_humanitarian_o',
'contractFinancing': 'contract_financing',
'costAccountingStandardsClause': 'cost_accounting_standards',
'costOrPricingData': 'cost_or_pricing_data',
'descriptionOfContractRequirement': 'award_description',
'GFE-GFP': 'government_furnished_prope',
'inherentlyGovernmentalFunction': 'inherently_government_func',
'majorProgramCode': 'major_program',
'multiYearContract': 'multi_year_contract',
'nationalInterestActionCode': 'national_interest_action',
'numberOfActions': 'number_of_actions',
'performanceBasedServiceContract': 'performance_based_service',
'programAcronym': 'program_acronym',
'purchaseCardAsPaymentMethod': 'purchase_card_as_payment_m',
'reasonForModification': 'action_type',
'referencedIDVMultipleOrSingle': 'referenced_mult_or_single',
'referencedIDVType': 'referenced_idv_type',
'seaTransportation': 'sea_transportation',
'solicitationID': 'solicitation_identifier',
'typeOfContractPricing': 'type_of_contract_pricing',
'typeOfIDC': 'type_of_idc',
'undefinitizedAction': 'undefinitized_action'}
if atom_type == "award":
value_map['contractActionType'] = 'contract_award_type'
else:
value_map['contractActionType'] = 'idv_type'
value_map['multipleOrSingleAwardIDC'] = 'multiple_or_single_award_i'
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'consolidatedContract': 'consolidated_contract_desc',
'contingencyHumanitarianPeacekeepingOperation': 'contingency_humanitar_desc',
'contractFinancing': 'contract_financing_descrip',
'costAccountingStandardsClause': 'cost_accounting_stand_desc',
'costOrPricingData': 'cost_or_pricing_data_desc',
'GFE-GFP': 'government_furnished_desc',
'inherentlyGovernmentalFunction': 'inherently_government_desc',
'multiYearContract': 'multi_year_contract_desc',
'nationalInterestActionCode': 'national_interest_desc',
'performanceBasedServiceContract': 'performance_based_se_desc',
'purchaseCardAsPaymentMethod': 'purchase_card_as_paym_desc',
'reasonForModification': 'action_type_description',
'referencedIDVMultipleOrSingle': 'referenced_mult_or_si_desc',
'referencedIDVType': 'referenced_idv_type_desc',
'seaTransportation': 'sea_transportation_desc',
'typeOfContractPricing': 'type_of_contract_pric_desc',
'typeOfIDC': 'type_of_idc_description',
'undefinitizedAction': 'undefinitized_action_desc'}
if atom_type == "award":
value_map['contractActionType'] = 'contract_award_type_desc'
else:
value_map['contractActionType'] = 'idv_type_description'
value_map['multipleOrSingleAwardIDC'] = 'multiple_or_single_aw_desc'
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def dollar_values_values(data, obj):
value_map = {'baseAndAllOptionsValue': 'base_and_all_options_value',
'baseAndExercisedOptionsValue': 'base_exercised_options_val',
'obligatedAmount': 'federal_action_obligation'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def total_dollar_values_values(data, obj):
value_map = {'totalBaseAndAllOptionsValue': 'potential_total_value_awar',
'totalBaseAndExercisedOptionsValue': 'current_total_value_award',
'totalObligatedAmount': 'total_obligated_amount'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def legislative_mandates_values(data, obj):
value_map = {'ClingerCohenAct': 'clinger_cohen_act_planning',
'constructionWageRateRequirements': 'construction_wage_rate_req',
'interagencyContractingAuthority': 'interagency_contracting_au',
'otherStatutoryAuthority': 'other_statutory_authority',
'laborStandards': 'labor_standards',
'materialsSuppliesArticlesEquipment': 'materials_supplies_article'}
additional_reporting = None
try:
ar_dicts = data['listOfAdditionalReportingValues']['additionalReportingValue']
except (KeyError, TypeError):
ar_dicts = None
if ar_dicts:
# if there is only one dict, convert it to a list of one dict
if isinstance(ar_dicts, dict):
ar_dicts = [ar_dicts]
ars = []
for ar_dict in ar_dicts:
ar_value = extract_text(ar_dict)
try:
ar_desc = extract_text(ar_dict['@description'])
except (KeyError, TypeError):
ar_desc = None
ar_str = ar_value if ar_desc is None else '{}: {}'.format(ar_value, ar_desc)
ars.append(ar_str)
additional_reporting = '; '.join(ars)
obj['additional_reporting'] = additional_reporting
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'ClingerCohenAct': 'clinger_cohen_act_pla_desc',
'constructionWageRateRequirements': 'construction_wage_rat_desc',
'interagencyContractingAuthority': 'interagency_contract_desc',
'laborStandards': 'labor_standards_descrip',
'materialsSuppliesArticlesEquipment': 'materials_supplies_descrip'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
return obj
def place_of_performance_values(data, obj):
value_map = {'placeOfPerformanceCongressionalDistrict': 'place_of_performance_congr',
'placeOfPerformanceZIPCode': 'place_of_performance_zip4a'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# placeOfPerformanceName
try:
obj['place_of_perform_city_name'] = extract_text(data['placeOfPerformanceZIPCode']['@city'])
except (KeyError, TypeError):
obj['place_of_perform_city_name'] = None
# placeOfPerformanceName
try:
obj['place_of_perform_county_na'] = extract_text(data['placeOfPerformanceZIPCode']['@county'])
except (KeyError, TypeError):
obj['place_of_perform_county_na'] = None
# within placeOfPerformance, the principalPlaceOfPerformance sub-level
value_map = {'stateCode': 'place_of_performance_state',
'countryCode': 'place_of_perform_country_c'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['principalPlaceOfPerformance'][key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'countryCode': 'place_of_perf_country_desc',
'stateCode': 'place_of_perfor_state_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['principalPlaceOfPerformance'][key]['@name'])
except (KeyError, TypeError):
obj[value] = None
return obj
def product_or_service_information_values(data, obj):
value_map = {'claimantProgramCode': 'dod_claimant_program_code',
'contractBundling': 'contract_bundling',
'countryOfOrigin': 'country_of_product_or_serv',
'informationTechnologyCommercialItemCategory': 'information_technology_com',
'manufacturingOrganizationType': 'domestic_or_foreign_entity',
'placeOfManufacture': 'place_of_manufacture',
'principalNAICSCode': 'naics',
'productOrServiceCode': 'product_or_service_code',
'recoveredMaterialClauses': 'recovered_materials_sustai',
'systemEquipmentCode': 'program_system_or_equipmen',
'useOfEPADesignatedProducts': 'epa_designated_product'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'claimantProgramCode': 'dod_claimant_prog_cod_desc',
'contractBundling': 'contract_bundling_descrip',
'informationTechnologyCommercialItemCategory': 'information_technolog_desc',
'manufacturingOrganizationType': 'domestic_or_foreign_e_desc',
'placeOfManufacture': 'place_of_manufacture_desc',
'principalNAICSCode': 'naics_description',
'productOrServiceCode': 'product_or_service_co_desc',
'recoveredMaterialClauses': 'recovered_materials_s_desc',
'systemEquipmentCode': 'program_system_or_equ_desc',
'useOfEPADesignatedProducts': 'epa_designated_produc_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# get country of origin name
try:
obj['country_of_product_or_desc'] = extract_text(data['countryOfOrigin']['@name'])
except (KeyError, TypeError):
obj['country_of_product_or_desc'] = None
return obj
def purchaser_information_values(data, obj):
value_map = {'contractingOfficeAgencyID': 'awarding_sub_tier_agency_c',
'contractingOfficeID': 'awarding_office_code',
'foreignFunding': 'foreign_funding',
'fundingRequestingAgencyID': 'funding_sub_tier_agency_co',
'fundingRequestingOfficeID': 'funding_office_code'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'foreignFunding': 'foreign_funding_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# name values associated with certain values in purchaserInformation
value_map = {'contractingOfficeAgencyID': 'awarding_sub_tier_agency_n',
'contractingOfficeID': 'awarding_office_name',
'fundingRequestingAgencyID': 'funding_sub_tier_agency_na',
'fundingRequestingOfficeID': 'funding_office_name'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@name'])
except (KeyError, TypeError):
obj[value] = None
return obj
def relevant_contract_dates_values(data, obj):
value_map = {'currentCompletionDate': 'period_of_performance_curr',
'effectiveDate': 'period_of_performance_star',
'lastDateToOrder': 'ordering_period_end_date',
'signedDate': 'action_date',
'ultimateCompletionDate': 'period_of_perf_potential_e'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
return obj
def vendor_values(data, obj):
# base vendor level
value_map = {'CCRException': 'sam_exception',
'contractingOfficerBusinessSizeDetermination': 'contracting_officers_deter'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# get descriptions for things in the value map
value_map = {'CCRException': 'sam_exception_description',
'contractingOfficerBusinessSizeDetermination': 'contracting_officers_desc'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key]['@description'])
except (KeyError, TypeError):
obj[value] = None
# vendorHeader sub-level
value_map = {'vendorAlternateName': 'vendor_alternate_name',
'vendorDoingAsBusinessName': 'vendor_doing_as_business_n',
'vendorEnabled': 'vendor_enabled',
'vendorLegalOrganizationName': 'vendor_legal_org_name',
'vendorName': 'awardee_or_recipient_legal'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorHeader'][key])
except (KeyError, TypeError):
obj[value] = None
# make sure key exists before passing it
try:
data['vendorSiteDetails']
except KeyError:
data['vendorSiteDetails'] = {}
# vendorSiteDetails sub-level (there are a lot so it gets its own function)
obj = vendor_site_details_values(data['vendorSiteDetails'], obj)
return obj
def vendor_site_details_values(data, obj):
# base vendorSiteDetails level
value_map = {'divisionName': 'division_name',
'divisionNumberOrOfficeCode': 'division_number_or_office',
'vendorAlternateSiteCode': 'vendor_alternate_site_code',
'vendorSiteCode': 'vendor_site_code'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data[key])
except (KeyError, TypeError):
obj[value] = None
# typeOfEducationalEntity sub-level
value_map = {'is1862LandGrantCollege': 'c1862_land_grant_college',
'is1890LandGrantCollege': 'c1890_land_grant_college',
'is1994LandGrantCollege': 'c1994_land_grant_college',
'isAlaskanNativeServicingInstitution': 'alaskan_native_servicing_i',
'isHistoricallyBlackCollegeOrUniversity': 'historically_black_college',
'isMinorityInstitution': 'minority_institution',
'isNativeHawaiianServicingInstitution': 'native_hawaiian_servicing',
'isPrivateUniversityOrCollege': 'private_university_or_coll',
'isSchoolOfForestry': 'school_of_forestry',
'isStateControlledInstitutionofHigherLearning': 'state_controlled_instituti',
'isTribalCollege': 'tribal_college',
'isVeterinaryCollege': 'veterinary_college'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['typeOfEducationalEntity'][key])
except (KeyError, TypeError):
obj[value] = None
# typeOfGovernmentEntity sub-level
value_map = {'isAirportAuthority': 'airport_authority',
'isCouncilOfGovernments': 'council_of_governments',
'isHousingAuthoritiesPublicOrTribal': 'housing_authorities_public',
'isInterstateEntity': 'interstate_entity',
'isPlanningCommission': 'planning_commission',
'isPortAuthority': 'port_authority',
'isTransitAuthority': 'transit_authority'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['typeOfGovernmentEntity'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes sub-level
value_map = {'isCommunityDevelopedCorporationOwnedFirm': 'community_developed_corpor',
'isForeignGovernment': 'foreign_government',
'isLaborSurplusAreaFirm': 'labor_surplus_area_firm',
'isStateGovernment': 'us_state_government',
'isTribalGovernment': 'us_tribal_government'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > businessOrOrganizationType sub-level
value_map = {'isCorporateEntityNotTaxExempt': 'corporate_entity_not_tax_e',
'isCorporateEntityTaxExempt': 'corporate_entity_tax_exemp',
'isInternationalOrganization': 'international_organization',
'isPartnershipOrLimitedLiabilityPartnership': 'partnership_or_limited_lia',
'isSmallAgriculturalCooperative': 'small_agricultural_coopera',
'isSolePropreitorship': 'sole_proprietorship',
'isUSGovernmentEntity': 'us_government_entity'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['businessOrOrganizationType'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > federalGovernment sub-level
value_map = {'isFederalGovernment': 'us_federal_government',
'isFederalGovernmentAgency': 'federal_agency',
'isFederallyFundedResearchAndDevelopmentCorp': 'federally_funded_research'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['federalGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorBusinessTypes > localGovernment sub-level
value_map = {'isCityLocalGovernment': 'city_local_government',
'isCountyLocalGovernment': 'county_local_government',
'isInterMunicipalLocalGovernment': 'inter_municipal_local_gove',
'isLocalGovernment': 'us_local_government',
'isLocalGovernmentOwned': 'local_government_owned',
'isMunicipalityLocalGovernment': 'municipality_local_governm',
'isSchoolDistrictLocalGovernment': 'school_district_local_gove',
'isTownshipLocalGovernment': 'township_local_government'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorBusinessTypes']['localGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorCertifications sub-level
value_map = {'isDOTCertifiedDisadvantagedBusinessEnterprise': 'dot_certified_disadvantage',
'isSBACertified8AJointVenture': 'sba_certified_8_a_joint_ve',
'isSBACertified8AProgramParticipant': 'c8a_program_participant',
'isSBACertifiedHUBZone': 'historically_underutilized',
'isSBACertifiedSmallDisadvantagedBusiness': 'small_disadvantaged_busine',
'isSelfCertifiedSmallDisadvantagedBusiness': 'self_certified_small_disad'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorCertifications'][key])
except (KeyError, TypeError):
obj[value] = None
# entityIdentifiers sub-level
try:
obj['cage_code'] = extract_text(data['entityIdentifiers']['cageCode'])
except (KeyError, TypeError):
obj['cage_code'] = None
# entityIdentifiers > vendorDUNSInformation sub-level
value_map = {'DUNSNumber': 'awardee_or_recipient_uniqu',
'globalParentDUNSName': 'ultimate_parent_legal_enti',
'globalParentDUNSNumber': 'ultimate_parent_unique_ide'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['entityIdentifiers']['vendorDUNSInformation'][key])
except (KeyError, TypeError):
obj[value] = None
# entityIdentifiers > vendorUEIInformation sub-level
value_map = {'UEI': 'awardee_or_recipient_uei',
'ultimateParentUEI': 'ultimate_parent_uei'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['entityIdentifiers']['vendorUEIInformation'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorLineOfBusiness sub-level
value_map = {'isCommunityDevelopmentCorporation': 'community_development_corp',
'isDomesticShelter': 'domestic_shelter',
'isEducationalInstitution': 'educational_institution',
'isFoundation': 'foundation',
'isHispanicServicingInstitution': 'hispanic_servicing_institu',
'isHospital': 'hospital_flag',
'isManufacturerOfGoods': 'manufacturer_of_goods',
'isVeterinaryHospital': 'veterinary_hospital'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorLineOfBusiness'][key])
except (KeyError, TypeError):
obj[value] = None
# vendorLocation sub-level
value_map = {'city': 'legal_entity_city_name',
'congressionalDistrictCode': 'legal_entity_congressional',
'countryCode': 'legal_entity_country_code',
'faxNo': 'vendor_fax_number',
'phoneNo': 'vendor_phone_number',
'streetAddress': 'legal_entity_address_line1',
'streetAddress2': 'legal_entity_address_line2',
'streetAddress3': 'legal_entity_address_line3',
'vendorLocationDisabledFlag': 'vendor_location_disabled_f',
'ZIPCode': 'legal_entity_zip4'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorLocation'][key])
except (KeyError, TypeError):
obj[value] = None
# differentiating between US and foreign states
key = 'legal_entity_state_code'
if obj['legal_entity_country_code'] not in country_code_map:
key = 'legal_entity_state_descrip'
# need to set this even if we're not going to be having a code because we need to access it later
obj['legal_entity_state_code'] = None
else:
try:
obj['legal_entity_state_descrip'] = extract_text(data['vendorLocation']['state']['@name'])
except (KeyError, TypeError):
obj['legal_entity_state_descrip'] = None
try:
obj[key] = extract_text(data['vendorLocation']['state'])
except (KeyError, TypeError):
obj[key] = None
try:
obj['legal_entity_country_name'] = extract_text(data['vendorLocation']['countryCode']['@name'])
except (KeyError, TypeError):
obj['legal_entity_country_name'] = None
value_map = {'isForeignOwnedAndLocated': 'foreign_owned_and_located',
'isLimitedLiabilityCorporation': 'limited_liability_corporat',
'isShelteredWorkshop': 'the_ability_one_program',
'isSubchapterSCorporation': 'subchapter_s_corporation',
'organizationalType': 'organizational_type'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorOrganizationFactors'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'isForProfitOrganization': 'for_profit_organization',
'isNonprofitOrganization': 'nonprofit_organization',
'isOtherNotForProfitOrganization': 'other_not_for_profit_organ'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorOrganizationFactors']['profitStructure'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'receivesContracts': 'contracts',
'receivesContractsAndGrants': 'receives_contracts_and_gra',
'receivesGrants': 'grants'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorRelationshipWithFederalGovernment'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'isAlaskanNativeOwnedCorporationOrFirm': 'alaskan_native_owned_corpo',
'isAmericanIndianOwned': 'american_indian_owned_busi',
'isEconomicallyDisadvantagedWomenOwnedSmallBusiness': 'economically_disadvantaged',
'isIndianTribe': 'indian_tribe_federally_rec',
'isJointVentureEconomicallyDisadvantagedWomenOwnedSmallBusiness': 'joint_venture_economically',
'isJointVentureWomenOwnedSmallBusiness': 'joint_venture_women_owned',
'isNativeHawaiianOwnedOrganizationOrFirm': 'native_hawaiian_owned_busi',
'isServiceRelatedDisabledVeteranOwnedBusiness': 'service_disabled_veteran_o',
'isTriballyOwnedFirm': 'tribally_owned_business',
'isVerySmallBusiness': 'emerging_small_business',
'isVeteranOwned': 'veteran_owned_business',
'isWomenOwned': 'woman_owned_business',
'isWomenOwnedSmallBusiness': 'women_owned_small_business'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorSocioEconomicIndicators'][key])
except (KeyError, TypeError):
obj[value] = None
value_map = {'isAsianPacificAmericanOwnedBusiness': 'asian_pacific_american_own',
'isBlackAmericanOwnedBusiness': 'black_american_owned_busin',
'isHispanicAmericanOwnedBusiness': 'hispanic_american_owned_bu',
'isMinorityOwned': 'minority_owned_business',
'isNativeAmericanOwnedBusiness': 'native_american_owned_busi',
'isOtherMinorityOwned': 'other_minority_owned_busin',
'isSubContinentAsianAmericanOwnedBusiness': 'subcontinent_asian_asian_i'}
for key, value in value_map.items():
try:
obj[value] = extract_text(data['vendorSocioEconomicIndicators']['minorityOwned'][key])
except (KeyError, TypeError):
obj[value] = None
return obj
def generic_values(data, obj):
generic_strings_value_map = {'genericString01': 'solicitation_date'}
for key, value in generic_strings_value_map.items():
try:
obj[value] = extract_text(data['genericStrings'][key])
except (KeyError, TypeError):
obj[value] = None
return obj
def calculate_ppop_fields(obj, sess, county_by_name, county_by_code, state_code_list, country_list):
if obj['place_of_perform_country_c'] in country_code_map:
# the state name, then replace country code and country description with USA and UNITED STATES respectively
if obj['place_of_perform_country_c'] != 'USA':
obj['place_of_performance_state'] = country_code_map[obj['place_of_perform_country_c']]
if obj['place_of_performance_state'] in state_code_list:
obj['place_of_perfor_state_desc'] = state_code_list[obj['place_of_performance_state']]
obj['place_of_perform_country_c'] = 'USA'
obj['place_of_perf_country_desc'] = 'UNITED STATES'
# derive state name if we don't have it
if obj['place_of_performance_state'] and not obj['place_of_perfor_state_desc']\
and obj['place_of_performance_state'] in state_code_list:
obj['place_of_perfor_state_desc'] = state_code_list[obj['place_of_performance_state']]
if obj['place_of_perform_county_na'] and obj['place_of_performance_state']:
state = obj['place_of_performance_state']
county_name = obj['place_of_perform_county_na']
if state in county_by_name and county_name in county_by_name[state]:
obj['place_of_perform_county_co'] = county_by_name[state][county_name]
# if accessing the county code by state code and county name didn't work, try by zip4a if we have it
if not obj['place_of_perform_county_co'] and obj['place_of_performance_zip4a']:
obj['place_of_perform_county_co'] = get_county_by_zip(sess, obj['place_of_performance_zip4a'])
if not obj['place_of_perform_county_na'] and obj['place_of_performance_state'] in county_by_code\
and obj['place_of_perform_county_co'] in county_by_code[obj['place_of_performance_state']]:
obj['place_of_perform_county_na'] =\
county_by_code[obj['place_of_performance_state']][obj['place_of_perform_county_co']]
# if we have content in the zip code and it's in a valid US format, split it into 5 and 4 digit
if obj['place_of_performance_zip4a'] and is_valid_zip(obj['place_of_performance_zip4a']):
obj['place_of_performance_zip5'] = obj['place_of_performance_zip4a'][:5]
if len(obj['place_of_performance_zip4a']) > 5:
obj['place_of_perform_zip_last4'] = obj['place_of_performance_zip4a'][-4:]
if not obj['place_of_perf_country_desc'] and obj['place_of_perform_country_c'] in country_list:
obj['place_of_perf_country_desc'] = country_list[obj['place_of_perform_country_c']]
def calculate_legal_entity_fields(obj, sess, county_by_code, state_code_list, country_list):
if obj['legal_entity_country_code'] in country_code_map:
# the state name, then replace country code and country description with USA and UNITED STATES respectively
if obj['legal_entity_country_code'] != 'USA':
obj['legal_entity_state_code'] = country_code_map[obj['legal_entity_country_code']]
if obj['legal_entity_state_code'] in state_code_list:
obj['legal_entity_state_descrip'] = state_code_list[obj['legal_entity_state_code']]
obj['legal_entity_country_code'] = 'USA'
obj['legal_entity_country_name'] = 'UNITED STATES'
# derive state name if we don't have it
if obj['legal_entity_state_code'] and not obj['legal_entity_state_descrip']\
and obj['legal_entity_state_code'] in state_code_list:
obj['legal_entity_state_descrip'] = state_code_list[obj['legal_entity_state_code']]
if obj['legal_entity_zip4'] and is_valid_zip(obj['legal_entity_zip4']):
obj['legal_entity_county_code'] = get_county_by_zip(sess, obj['legal_entity_zip4'])
if obj['legal_entity_county_code'] and obj['legal_entity_state_code']:
county_code = obj['legal_entity_county_code']
state = obj['legal_entity_state_code']
if state in county_by_code and county_code in county_by_code[state]:
obj['legal_entity_county_name'] = county_by_code[state][county_code]
obj['legal_entity_zip5'] = obj['legal_entity_zip4'][:5]
if len(obj['legal_entity_zip4']) > 5:
obj['legal_entity_zip_last4'] = obj['legal_entity_zip4'][-4:]
# if there is any country code (checked outside function) but not a country name, try to get the country name
if not obj['legal_entity_country_name'] and obj['legal_entity_country_code'] in country_list:
obj['legal_entity_country_name'] = country_list[obj['legal_entity_country_code']]
def calculate_remaining_fields(obj, sess, sub_tier_list, county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict, atom_type):
# we want to null out all the calculated columns in case this is an update to the records
obj['awarding_agency_code'] = None
obj['awarding_agency_name'] = None
obj['funding_agency_code'] = None
obj['funding_agency_name'] = None
obj['place_of_perform_county_co'] = None
obj['legal_entity_county_code'] = None
obj['legal_entity_county_name'] = None
obj['detached_award_proc_unique'] = None
# calculate awarding agency codes/names based on awarding sub tier agency codes
if obj['awarding_sub_tier_agency_c']:
try:
sub_tier_agency = sub_tier_list[obj['awarding_sub_tier_agency_c']]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
obj['awarding_agency_code'] = agency_data.frec_code if use_frec else agency_data.cgac_code
obj['awarding_agency_name'] = agency_data.agency_name
except KeyError:
logger.info('WARNING: MissingSubtierCGAC: The awarding sub-tier cgac_code: %s does not exist in cgac table.'
' The FPDS-provided awarding sub-tier agency name (if given) for this cgac_code is %s. '
'The award has been loaded with awarding_agency_code 999.',
obj['awarding_sub_tier_agency_c'], obj['awarding_sub_tier_agency_n'])
obj['awarding_agency_code'] = '999'
obj['awarding_agency_name'] = None
# calculate funding agency codes/names based on funding sub tier agency codes
if obj['funding_sub_tier_agency_co']:
try:
sub_tier_agency = sub_tier_list[obj['funding_sub_tier_agency_co']]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
obj['funding_agency_code'] = agency_data.frec_code if use_frec else agency_data.cgac_code
obj['funding_agency_name'] = agency_data.agency_name
except KeyError:
logger.info('WARNING: MissingSubtierCGAC: The funding sub-tier cgac_code: %s does not exist in cgac table. '
'The FPDS-provided funding sub-tier agency name (if given) for this cgac_code is %s. '
'The award has been loaded with funding_agency_code 999.',
obj['funding_sub_tier_agency_co'], obj['funding_sub_tier_agency_na'])
obj['funding_agency_code'] = '999'
obj['funding_agency_name'] = None
# do place of performance calculations only if we have SOME country code
if obj['place_of_perform_country_c']:
calculate_ppop_fields(obj, sess, county_by_name, county_by_code, state_code_list, country_list)
# do legal entity calculations only if we have SOME country code
if obj['legal_entity_country_code']:
calculate_legal_entity_fields(obj, sess, county_by_code, state_code_list, country_list)
# calculate business categories
obj['business_categories'] = get_business_categories(row=obj, data_type='fpds')
# Calculate executive compensation data for the entry.
if obj['awardee_or_recipient_uniqu'] and obj['awardee_or_recipient_uniqu'] in exec_comp_dict.keys():
exec_comp = exec_comp_dict[obj['awardee_or_recipient_uniqu']]
for i in range(1, 6):
obj['high_comp_officer{}_full_na'.format(i)] = exec_comp['officer{}_name'.format(i)]
obj['high_comp_officer{}_amount'.format(i)] = exec_comp['officer{}_amt'.format(i)]
else:
# Need to make sure they're null in case this is updating and the DUNS has changed somehow
for i in range(1, 6):
obj['high_comp_officer{}_full_na'.format(i)] = None
obj['high_comp_officer{}_amount'.format(i)] = None
if atom_type == 'award':
unique_award_string_list = ['CONT_AWD']
key_list = ['piid', 'agency_id', 'parent_award_id', 'referenced_idv_agency_iden']
else:
unique_award_string_list = ['CONT_IDV']
key_list = ['piid', 'agency_id']
for item in key_list:
unique_award_string_list.append(obj.get(item) or '-none-')
obj['unique_award_key'] = '_'.join(unique_award_string_list).upper()
# calculate unique key
key_list = ['agency_id', 'referenced_idv_agency_iden', 'piid', 'award_modification_amendme', 'parent_award_id',
'transaction_number']
idv_list = ['agency_id', 'piid', 'award_modification_amendme']
unique_string = ""
for item in key_list:
if len(unique_string) > 0:
unique_string += "_"
if atom_type == 'award' or item in idv_list:
# Get the value in the object or, if the key doesn't exist or value is None, set it to "-none-"
unique_string += obj.get(item) or '-none-'
else:
unique_string += '-none-'
obj['detached_award_proc_unique'] = unique_string
return obj
def process_data(data, sess, atom_type, sub_tier_list, county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict):
obj = {}
if atom_type == "award":
try:
data['awardID']
except KeyError:
data['awardID'] = {}
obj = award_id_values(data['awardID'], obj)
else:
obj['transaction_number'] = None
try:
data['contractID']
except KeyError:
data['contractID'] = {}
obj = contract_id_values(data['contractID'], obj)
try:
data['competition']
except KeyError:
data['competition'] = {}
obj = competition_values(data['competition'], obj)
try:
data['contractData']
except KeyError:
data['contractData'] = {}
obj = contract_data_values(data['contractData'], obj, atom_type)
try:
data['dollarValues']
except KeyError:
data['dollarValues'] = {}
obj = dollar_values_values(data['dollarValues'], obj)
try:
data['totalDollarValues']
except KeyError:
data['totalDollarValues'] = {}
obj = total_dollar_values_values(data['totalDollarValues'], obj)
if atom_type == "award":
try:
data['placeOfPerformance']
except KeyError:
data['placeOfPerformance'] = {}
obj = place_of_performance_values(data['placeOfPerformance'], obj)
else:
obj['place_of_perform_county_na'] = None
obj['place_of_performance_state'] = None
obj['place_of_perfor_state_desc'] = None
obj['place_of_performance_zip4a'] = None
obj['place_of_perform_country_c'] = None
obj['place_of_perf_country_desc'] = None
# make sure key exists before passing it
try:
data['legislativeMandates']
except KeyError:
data['legislativeMandates'] = {}
obj = legislative_mandates_values(data['legislativeMandates'], obj)
try:
obj['subcontracting_plan'] = extract_text(data['preferencePrograms']['subcontractPlan'])
except (KeyError, TypeError):
obj['subcontracting_plan'] = None
try:
obj['subcontracting_plan_desc'] = extract_text(data['preferencePrograms']['subcontractPlan']['@description'])
except (KeyError, TypeError):
obj['subcontracting_plan_desc'] = None
# make sure key exists before passing it
try:
data['productOrServiceInformation']
except KeyError:
data['productOrServiceInformation'] = {}
obj = product_or_service_information_values(data['productOrServiceInformation'], obj)
# make sure key exists before passing it
try:
data['purchaserInformation']
except KeyError:
data['purchaserInformation'] = {}
obj = purchaser_information_values(data['purchaserInformation'], obj)
# make sure key exists before passing it
try:
data['relevantContractDates']
except KeyError:
data['relevantContractDates'] = {}
obj = relevant_contract_dates_values(data['relevantContractDates'], obj)
# make sure key exists before passing it
try:
data['vendor']
except KeyError:
data['vendor'] = {}
obj = vendor_values(data['vendor'], obj)
# make sure key exists before passing it
try:
data['genericTags']
except KeyError:
data['genericTags'] = {}
obj = generic_values(data['genericTags'], obj)
obj = calculate_remaining_fields(obj, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, atom_type)
try:
obj['last_modified'] = extract_text(data['transactionInformation']['lastModifiedDate'])
except (KeyError, TypeError):
obj['last_modified'] = None
try:
obj['initial_report_date'] = extract_text(data['transactionInformation']['createdDate'])
except (KeyError, TypeError):
obj['initial_report_date'] = None
obj['pulled_from'] = atom_type
# clear out potentially excel-breaking whitespace from specific fields
free_fields = ["award_description", "vendor_doing_as_business_n", "legal_entity_address_line1",
"legal_entity_address_line2", "legal_entity_address_line3", "ultimate_parent_legal_enti",
"awardee_or_recipient_legal", "other_statutory_authority"]
for field in free_fields:
if obj[field]:
obj[field] = re.sub('\s', ' ', obj[field])
return obj
def process_delete_data(data, atom_type):
unique_string = ""
# order of unique constraints in string: agency_id, referenced_idv_agency_iden, piid, award_modification_amendme,
# parent_award_id, transaction_number
# get all values that make up unique key
if atom_type == "award":
try:
unique_string += extract_text(data['awardID']['awardContractID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['referencedIDVID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['modNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['referencedIDVID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['awardID']['awardContractID']['transactionNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
else:
try:
unique_string += extract_text(data['contractID']['IDVID']['agencyID'])
except (KeyError, TypeError):
unique_string += "-none-"
# referenced_idv_agency_iden not used in IDV identifier, just set it to "-none-"
unique_string += "_-none-_"
try:
unique_string += extract_text(data['contractID']['IDVID']['PIID'])
except (KeyError, TypeError):
unique_string += "-none-"
unique_string += "_"
try:
unique_string += extract_text(data['contractID']['IDVID']['modNumber'])
except (KeyError, TypeError):
unique_string += "-none-"
# parent_award_id not used in IDV identifier and transaction_number not in IDV feed, just set them to "-none-"
unique_string += "_-none-_-none-"
return unique_string
def create_processed_data_list(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict):
data_list = []
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
data_list.append(tmp_obj)
return data_list
def add_processed_data_list(data, sess):
try:
sess.bulk_save_objects([DetachedAwardProcurement(**fpds_data) for fpds_data in data])
sess.commit()
except IntegrityError:
sess.rollback()
logger.error("Attempted to insert duplicate FPDS data. Inserting each row in batch individually.")
for fpds_obj in data:
insert_statement = insert(DetachedAwardProcurement).values(**fpds_obj).\
on_conflict_do_update(index_elements=['detached_award_proc_unique'], set_=fpds_obj)
sess.execute(insert_statement)
sess.commit()
def process_and_add(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, now, threaded=False):
if threaded:
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
tmp_obj['updated_at'] = now
insert_statement = insert(DetachedAwardProcurement).values(**tmp_obj).\
on_conflict_do_update(index_elements=['detached_award_proc_unique'], set_=tmp_obj)
sess.execute(insert_statement)
else:
for value in data:
tmp_obj = process_data(value['content'][contract_type], sess, atom_type=contract_type,
sub_tier_list=sub_tier_list, county_by_name=county_by_name,
county_by_code=county_by_code, state_code_list=state_code_list,
country_list=country_list, exec_comp_dict=exec_comp_dict)
try:
statement = insert(DetachedAwardProcurement).values(**tmp_obj)
sess.execute(statement)
sess.commit()
except IntegrityError:
sess.rollback()
tmp_obj['updated_at'] = now
sess.query(DetachedAwardProcurement).\
filter_by(detached_award_proc_unique=tmp_obj['detached_award_proc_unique']).\
update(tmp_obj, synchronize_session=False)
sess.commit()
def get_with_exception_hand(url_string, expect_entries=True):
exception_retries = -1
retry_sleep_times = [5, 30, 60, 180, 300, 360, 420, 480, 540, 600]
request_timeout = 60
while exception_retries < len(retry_sleep_times):
try:
resp = requests.get(url_string, timeout=request_timeout)
if expect_entries:
# we should always expect entries, otherwise we shouldn't be calling it
resp_dict = xmltodict.parse(resp.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
len(list_data(resp_dict['feed']['entry']))
break
except (ConnectionResetError, ReadTimeoutError, ConnectionError, ReadTimeout, KeyError) as e:
exception_retries += 1
request_timeout += 60
if exception_retries < len(retry_sleep_times):
logger.info('Connection exception. Sleeping {}s and then retrying with a max wait of {}s...'
.format(retry_sleep_times[exception_retries], request_timeout))
time.sleep(retry_sleep_times[exception_retries])
else:
logger.info('Connection to FPDS feed lost, maximum retry attempts exceeded.')
raise e
return resp
def get_total_expected_records(base_url):
initial_request = get_with_exception_hand(base_url, expect_entries=False)
initial_request_xml = xmltodict.parse(initial_request.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
urls_list = list_data(initial_request_xml['feed']['link'])
except KeyError:
urls_list = []
final_request_url = None
for url in urls_list:
if url['@rel'] == 'last':
final_request_url = url['@href']
continue
if not final_request_url:
try:
return len(list_data(initial_request_xml['feed']['entry']))
except KeyError:
return 0
final_request_count = int(final_request_url.split('&start=')[-1])
final_request = get_with_exception_hand(final_request_url)
final_request_xml = xmltodict.parse(final_request.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
entries_list = list_data(final_request_xml['feed']['entry'])
except KeyError:
raise Exception("Initial count failed, no entries in last page of request.")
return final_request_count + len(entries_list)
def get_data(contract_type, award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_run=None, threaded=False, start_date=None, end_date=None, metrics=None,
specific_params=None):
if not metrics:
metrics = {}
data = []
yesterday = now - datetime.timedelta(days=1)
utcnow = datetime.datetime.utcnow()
if specific_params:
params = specific_params
elif not last_run:
params = 'SIGNED_DATE:[2016/10/01,' + yesterday.strftime('%Y/%m/%d') + '] '
metrics['start_date'] = '2016/10/01'
metrics['end_date'] = yesterday.strftime('%Y/%m/%d')
else:
last_run_date = last_run - relativedelta(days=1)
params = 'LAST_MOD_DATE:[' + last_run_date.strftime('%Y/%m/%d') + ',' + yesterday.strftime('%Y/%m/%d') + '] '
metrics['start_date'] = last_run_date.strftime('%Y/%m/%d')
metrics['end_date'] = yesterday.strftime('%Y/%m/%d')
if start_date and end_date:
params = 'LAST_MOD_DATE:[' + start_date + ',' + end_date + '] '
metrics['start_date'] = start_date
metrics['end_date'] = end_date
base_url = feed_url + params + 'CONTRACT_TYPE:"' + contract_type.upper() + '" AWARD_TYPE:"' + award_type + '"'
logger.info('Starting get feed: %s', base_url)
total_expected_records = get_total_expected_records(base_url)
logger.info('{} record(s) expected from this feed'.format(total_expected_records))
entries_processed = 0
while True:
async def atom_async_get(entries_already_processed, total_expected_records):
response_list = []
loop = asyncio.get_event_loop()
requests_at_once = MAX_REQUESTS_AT_ONCE
if total_expected_records - entries_already_processed < (MAX_REQUESTS_AT_ONCE * MAX_ENTRIES):
requests_at_once = math.ceil((total_expected_records - entries_already_processed) / MAX_ENTRIES) + 1
futures = [
loop.run_in_executor(
None,
get_with_exception_hand,
base_url + "&start=" + str(entries_already_processed + (start_offset * MAX_ENTRIES)),
total_expected_records > entries_already_processed + (start_offset * MAX_ENTRIES)
)
for start_offset in range(requests_at_once)
]
for response in await asyncio.gather(*futures):
response_list.append(response.text)
pass
return response_list
# End async get requests def
loop = asyncio.get_event_loop()
full_response = loop.run_until_complete(atom_async_get(entries_processed, total_expected_records))
for next_resp in full_response:
response_dict = xmltodict.parse(next_resp, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
entries_per_response = list_data(response_dict['feed']['entry'])
except KeyError:
continue
if last_run or specific_params:
for entry in entries_per_response:
data.append(entry)
entries_processed += 1
else:
data.extend(create_processed_data_list(entries_per_response, contract_type, sess, sub_tier_list,
county_by_name, county_by_code, state_code_list, country_list,
exec_comp_dict))
entries_processed += len(entries_per_response)
if entries_processed > total_expected_records:
# Find entries that don't have FPDS content and print them all
for next_resp in full_response:
response_dict = xmltodict.parse(next_resp, process_namespaces=True, namespaces=FPDS_NAMESPACES)
try:
list_data(response_dict['feed']['entry'])
except KeyError:
logger.info(response_dict)
continue
raise Exception("Total number of expected records has changed\nExpected: {}\nRetrieved so far: {}"
.format(total_expected_records, len(data)))
if data:
logger.info("Retrieved %s lines of get %s: %s feed, writing next %s to DB",
entries_processed, contract_type, award_type, len(data))
if last_run or specific_params:
process_and_add(data, contract_type, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, utcnow, threaded)
else:
add_processed_data_list(data, sess)
logger.info("Successfully inserted %s lines of get %s: %s feed, continuing feed retrieval",
len(data), contract_type, award_type)
# if we got less than the full set of records, we can stop calling the feed
if len(data) < (MAX_ENTRIES * MAX_REQUESTS_AT_ONCE):
# ensure we loaded the number of records we expected to, otherwise we'll need to reload
if entries_processed != total_expected_records:
raise Exception("Records retrieved != Total expected records\nExpected: {}\nRetrieved: {}"
.format(total_expected_records, entries_processed))
else:
if 'records_received' not in metrics:
metrics['records_received'] = total_expected_records
else:
metrics['records_received'] += total_expected_records
break
else:
data = []
logger.info("Total entries in %s: %s feed: %s", contract_type, award_type, entries_processed)
logger.info("Processed %s: %s data", contract_type, award_type)
def get_delete_data(contract_type, now, sess, last_run, start_date=None, end_date=None, metrics=None):
if not metrics:
metrics = {}
data = []
yesterday = now - datetime.timedelta(days=1)
last_run_date = last_run - relativedelta(days=1)
params = 'LAST_MOD_DATE:[' + last_run_date.strftime('%Y/%m/%d') + ',' + yesterday.strftime('%Y/%m/%d') + '] '
if start_date and end_date:
params = 'LAST_MOD_DATE:[' + start_date + ',' + end_date + '] '
# already so this is the only place it needs to get set.
if not metrics['start_date']:
metrics['start_date'] = start_date
if not metrics['end_date']:
metrics['end_date'] = end_date
base_url = delete_url + params + 'CONTRACT_TYPE:"' + contract_type.upper() + '"'
logger.info('Starting delete feed: %s', base_url)
# retrieve the total count of expected records for this pull
total_expected_records = get_total_expected_records(base_url)
logger.info('{} record(s) expected from this feed'.format(total_expected_records))
processed_deletions = 0
while True:
exception_retries = -1
retry_sleep_times = [5, 30, 60, 180, 300, 360, 420, 480, 540, 600]
request_timeout = 60
try:
resp = requests.get(base_url + '&start=' + str(processed_deletions), timeout=request_timeout)
resp_data = xmltodict.parse(resp.text, process_namespaces=True, namespaces=FPDS_NAMESPACES)
except (ConnectionResetError, ReadTimeoutError, ConnectionError, ReadTimeout) as e:
exception_retries += 1
request_timeout += 60
if exception_retries < len(retry_sleep_times):
logger.info('Connection exception caught. Sleeping {}s and then retrying with a max wait of {}s...'
.format(retry_sleep_times[exception_retries], request_timeout))
time.sleep(retry_sleep_times[exception_retries])
else:
logger.info('Connection to FPDS feed lost, maximum retry attempts exceeded.')
raise e
# only list the data if there's data to list
try:
listed_data = list_data(resp_data['feed']['entry'])
except KeyError:
listed_data = []
if processed_deletions > total_expected_records:
raise Exception("Total number of expected records has changed\nExpected: {}\nRetrieved so far: {}"
.format(total_expected_records, len(processed_deletions)))
for ld in listed_data:
data.append(ld)
processed_deletions += 1
if processed_deletions % 100 == 0:
logger.info("On line %s of %s delete feed", str(processed_deletions), contract_type)
# if we got less than the full set of records we can stop calling the feed
if len(listed_data) < 10:
# ensure we loaded the number of records we expected to, otherwise we'll need to reload
if processed_deletions != total_expected_records:
raise Exception("Records retrieved != Total expected records\nExpected: {}\nRetrieved: {}"
.format(total_expected_records, len(listed_data)))
else:
if 'deletes_received' not in metrics:
metrics['deletes_received'] = total_expected_records
else:
metrics['deletes_received'] += total_expected_records
break
else:
listed_data = []
logger.info("Total entries in %s delete feed: %s", contract_type, str(processed_deletions))
delete_list = []
delete_dict = {}
for value in data:
last_modified = value['content'][contract_type]['transactionInformation']['lastModifiedDate']
unique_string = process_delete_data(value['content'][contract_type], atom_type=contract_type)
existing_item = sess.query(DetachedAwardProcurement).\
filter_by(detached_award_proc_unique=unique_string).one_or_none()
if existing_item:
if last_modified > existing_item.last_modified:
delete_list.append(existing_item.detached_award_procurement_id)
delete_dict[existing_item.detached_award_procurement_id] = existing_item.detached_award_proc_unique
# only need to delete values if there's something to delete
if delete_list:
if 'records_deleted' not in metrics:
metrics['records_deleted'] = len(delete_list)
else:
metrics['records_deleted'] += len(delete_list)
sess.query(DetachedAwardProcurement).\
filter(DetachedAwardProcurement.detached_award_procurement_id.in_(delete_list)).\
delete(synchronize_session=False)
seconds = int((datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds())
file_name = now.strftime('%m-%d-%Y') + "_delete_records_" + contract_type + "_" + str(seconds) + ".csv"
metrics['deleted_{}_records_file'.format(contract_type).lower()] = file_name
headers = ["detached_award_procurement_id", "detached_award_proc_unique"]
if CONFIG_BROKER["use_aws"]:
s3client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
contents = bytes((",".join(headers) + "\n").encode())
for key, value in delete_dict.items():
contents += bytes('{},{}\n'.format(key, value).encode())
s3client.put_object(Bucket=CONFIG_BROKER['fpds_delete_bucket'], Key=file_name, Body=contents)
else:
with CsvLocalWriter(file_name, headers) as writer:
for key, value in delete_dict.items():
writer.write([key, value])
writer.finish_batch()
def create_lookups(sess):
sub_tiers = sess.query(SubTierAgency).all()
sub_tier_list = {}
for sub_tier in sub_tiers:
sub_tier_list[sub_tier.sub_tier_agency_code] = sub_tier
countries = sess.query(CountryCode).all()
country_list = {}
for country in countries:
country_list[country.country_code] = country.country_name
county_by_name = {}
county_by_code = {}
state_code_list = {}
state_codes = sess.query(States.state_code, func.upper(States.state_name).label('state_name')).all()
for state_code in state_codes:
county_by_name[state_code.state_code] = {}
county_by_code[state_code.state_code] = {}
state_code_list[state_code.state_code] = state_code.state_name
county_codes = sess.query(CountyCode.county_number, CountyCode.state_code,
func.upper(CountyCode.county_name).label('county_name')).all()
for county_code in county_codes:
county_name = county_code.county_name.replace(' (CA)', '').strip()
# we want all the counties in our by-code lookup because we'd be using this table anyway for derivations
county_by_code[county_code.state_code][county_code.county_number] = county_name
if re.match('^[A-Z\s]+$', county_code.county_name):
county_by_name[county_code.state_code][county_name] = county_code.county_number
exec_comp_dict = {}
duns_list = sess.query(DUNS).filter(DUNS.high_comp_officer1_full_na.isnot(None)).all()
for duns in duns_list:
exec_comp_dict[duns.awardee_or_recipient_uniqu] = \
{'officer1_name': duns.high_comp_officer1_full_na, 'officer1_amt': duns.high_comp_officer1_amount,
'officer2_name': duns.high_comp_officer2_full_na, 'officer2_amt': duns.high_comp_officer2_amount,
'officer3_name': duns.high_comp_officer3_full_na, 'officer3_amt': duns.high_comp_officer3_amount,
'officer4_name': duns.high_comp_officer4_full_na, 'officer4_amt': duns.high_comp_officer4_amount,
'officer5_name': duns.high_comp_officer5_full_na, 'officer5_amt': duns.high_comp_officer5_amount}
del duns_list
return sub_tier_list, country_list, state_code_list, county_by_name, county_by_code, exec_comp_dict
def main():
sess = GlobalDB.db().session
now = datetime.datetime.now()
parser = argparse.ArgumentParser(description='Pull data from the FPDS Atom Feed.')
parser.add_argument('-a', '--all', help='Clear out the database and get historical data', action='store_true')
parser.add_argument('-l', '--latest', help='Get by last_mod_date stored in DB', action='store_true')
parser.add_argument('-d', '--delivery', help='Used in conjunction with -a to indicate delivery order feed',
action='store_true')
parser.add_argument('-o', '--other',
help='Used in conjunction with -a to indicate all feeds other than delivery order',
action='store_true')
parser.add_argument('-da', '--dates', help='Used in conjunction with -l to specify dates to gather updates from.'
'Should have 2 arguments, first and last day, formatted YYYY/mm/dd',
nargs=2, type=str)
parser.add_argument('-del', '--delete', help='Used to only run the delete feed. First argument must be "both", '
'"idv", or "award". The second and third arguments must be the first '
'and last day to run the feeds for, formatted YYYY/mm/dd',
nargs=3, type=str)
args = parser.parse_args()
award_types_award = ["BPA Call", "Definitive Contract", "Purchase Order", "Delivery Order"]
award_types_idv = ["GWAC", "BOA", "BPA", "FSS", "IDC"]
metrics_json = {
'script_name': 'pull_fpds_data.py',
'start_time': str(now),
'records_received': 0,
'deletes_received': 0,
'records_deleted': 0,
'deleted_award_records_file': '',
'deleted_idv_records_file': '',
'start_date': '',
'end_date': ''
}
sub_tier_list, country_list, state_code_list, county_by_name, county_by_code, exec_comp_dict = create_lookups(sess)
if args.all:
if (not args.delivery and not args.other) or (args.delivery and args.other):
logger.error("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
raise ValueError("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
logger.info("Starting at: %s", str(datetime.datetime.now()))
if args.other:
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, metrics=metrics_json)
for award_type in award_types_award:
if award_type != "Delivery Order":
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
elif args.delivery:
get_data("award", "Delivery Order", now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
last_update = sess.query(FPDSUpdate).one_or_none()
if last_update:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
else:
sess.add(FPDSUpdate(update_date=now))
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.latest:
logger.info("Starting at: %s", str(datetime.datetime.now()))
last_update_obj = sess.query(FPDSUpdate).one_or_none()
if not last_update_obj:
logger.error(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
raise ValueError(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
last_update = last_update_obj.update_date
start_date = None
end_date = None
if args.dates:
start_date = args.dates[0]
end_date = args.dates[1]
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
for award_type in award_types_award:
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
# We also need to process the delete feed
get_delete_data("IDV", now, sess, last_update, start_date, end_date, metrics=metrics_json)
get_delete_data("award", now, sess, last_update, start_date, end_date, metrics=metrics_json)
if not start_date and not end_date:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.delete:
del_type = args.delete[0]
if del_type == 'award':
del_awards = True
del_idvs = False
elif del_type == 'idv':
del_awards = False
del_idvs = True
elif del_type == 'both':
del_awards = True
del_idvs = True
else:
logger.error("Delete argument must be \"idv\", \"award\", or \"both\"")
raise ValueError("Delete argument must be \"idv\", \"award\", or \"both\"")
if del_idvs:
get_delete_data("IDV", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
if del_awards:
get_delete_data("award", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
sess.commit()
metrics_json['duration'] = str(datetime.datetime.now() - now)
with open('pull_fpds_data_metrics.json', 'w+') as metrics_file:
json.dump(metrics_json, metrics_file)
# TODO add a correct start date for "all" so we don't get ALL the data or too little of the data
if __name__ == '__main__':
with create_app().app_context():
configure_logging()
main()
| true
| true
|
7908fe7fbee791b88a73632579a09a78b09348d4
| 2,724
|
py
|
Python
|
aliyun-python-sdk-vpc/aliyunsdkvpc/request/v20160428/ModifyEipAddressAttributeRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-vpc/aliyunsdkvpc/request/v20160428/ModifyEipAddressAttributeRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-vpc/aliyunsdkvpc/request/v20160428/ModifyEipAddressAttributeRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class ModifyEipAddressAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'ModifyEipAddressAttribute','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_AllocationId(self):
return self.get_query_params().get('AllocationId')
def set_AllocationId(self,AllocationId):
self.add_query_param('AllocationId',AllocationId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_Bandwidth(self):
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self,Bandwidth):
self.add_query_param('Bandwidth',Bandwidth)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
| 34.05
| 84
| 0.769457
|
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class ModifyEipAddressAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'ModifyEipAddressAttribute','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_AllocationId(self):
return self.get_query_params().get('AllocationId')
def set_AllocationId(self,AllocationId):
self.add_query_param('AllocationId',AllocationId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_Bandwidth(self):
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self,Bandwidth):
self.add_query_param('Bandwidth',Bandwidth)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
| true
| true
|
7908fed372287d58f9cf742f2a75006852ac4e38
| 5,177
|
py
|
Python
|
btDown.py
|
tianxiaxi/iDown
|
f958871f6476ea660da26adc4648a679259d7624
|
[
"MIT"
] | 2
|
2021-07-26T16:03:54.000Z
|
2021-11-17T19:42:01.000Z
|
btDown.py
|
tianxiaxi/iDown
|
f958871f6476ea660da26adc4648a679259d7624
|
[
"MIT"
] | 1
|
2018-10-09T01:55:21.000Z
|
2018-10-09T06:18:27.000Z
|
btDown.py
|
tianxiaxi/iDown
|
f958871f6476ea660da26adc4648a679259d7624
|
[
"MIT"
] | 2
|
2020-04-10T13:28:55.000Z
|
2021-07-26T16:03:55.000Z
|
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
'''btDown.py - Download resource for HTTP/HTTPS/FTP/Thunder/Magnet/BT
Usage: python3 btDown.py <url> [path]
Required:
url HTTP/HTTPS/FTP/Thunder/MagNet/BT downloading URL
Optionals:
path The store path for the downloaded file
Notice: Python3 required for btDown.py
Author: zanran (wayne@zanran.me)
CreatedAt: Mon Oct 8 21:27:28 CST 2018
'''
import os
import sys
import requests
import time
import re
import enum
import base64
from urllib import parse, request
def usage(err=None):
if err:
print(err)
print(__doc__)
sys.exit(0)
@enum.unique
class PROTROL_TYEP(enum.Enum):
UNKNOWN = 0
HTTP = 1 # HTTP/HTTPS下载
FTP = 2 # FTP下载
BT = 3 # BT下载
MAGNET = 4 # 磁力下载
THUNDER = 5 # 讯雷下载
class BtDown(object):
def __init__(self, url, path = None):
self.origin_url = url
self.dest_path = path
def detectProtrolType(self, url):
bt_type = PROTROL_TYEP.UNKNOWN
if (re.match('^ftp://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.FTP
elif (re.match('^thunder://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.THUNDER
elif (re.match('^magnet:?', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.MAGNET
elif (re.search(r'\.torrent$', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.BT
# http/https detect must be after torrent
elif (re.match('^https?://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.HTTP
return bt_type
def _parserThunderUrl(self, url):
thunder_url = re.sub('^thunder://', '', url, re.IGNORECASE)
normalize_url = base64.b64decode(thunder_url).decode()
normalize_url = re.sub('^AA', '', normalize_url)
normalize_url = re.sub('ZZ$', '', normalize_url)
return normalize_url
def _parserMagnetUrl(self, url):
return ''
def parseUrlProtrol(self, url):
normalize_url = url
bt_type = self.detectProtrolType(url)
if bt_type in [PROTROL_TYEP.THUNDER]:
normalize_url = self._parserThunderUrl(url)
elif bt_type in [PROTROL_TYEP.MAGNET]:
normalize_url = self._parserMagnetUrl(url)
elif bt_type in [PROTROL_TYEP.BT]:
raise Exception('BT (torrent) is unsupported by now !')
return normalize_url
def getTitle(self, url):
title = 'unnamed_file'
bt_type = self.detectProtrolType(url)
if bt_type in [PROTROL_TYEP.HTTP, PROTROL_TYEP.FTP]:
last_slash = url.rfind('/')
if last_slash != -1:
title = url[last_slash + 1:].strip()
if title.count('%') > 1:
title = parse.unquote(title)
return title
def _showDownloadProgress(self, file, percent):
base_file = os.path.basename(file)
if(percent > 100):
percent = 100
message = '\r Downloading %s ...... %2.f%%' % (base_file, percent)
print(message, end='')
return
def _download_http(self, url, dest_file):
res = requests.get(url, stream=True)
max_file_bytes = int(res.headers['Content-Length'])
chunk_size = 1024*1024*4
downloaded_size = 0
f = open(dest_file, 'wb')
for data in res.iter_content(chunk_size):
downloaded_size += len(data)
percent = downloaded_size / max_file_bytes * 100
self._showDownloadProgress(dest_file, percent)
f.write(data)
f.close()
def _download_ftp(self, url, dest_file):
def _report(blocknum, blocksize, totalsize):
if not totalsize:
return
percent = 100.0 * blocknum * blocksize / totalsize
self._showDownloadProgress(dest_file, percent)
url = parse.quote(url, safe=':/@')
request.urlretrieve(url, dest_file, _report)
def download(self):
print('Start downloading %s' % self.origin_url)
normalize_url = self.parseUrlProtrol(self.origin_url)
print('Parse real url %s' % normalize_url)
title = self.getTitle(normalize_url)
dest_file = title
if self.dest_path:
if not os.path.exists(self.dest_path):
os.makedirs(self.dest_path)
dest_file = os.path.join(self.dest_path, title)
if os.path.exists(dest_file):
os.remove(dest_file)
bt_type = self.detectProtrolType(normalize_url)
if bt_type in [PROTROL_TYEP.HTTP]:
self._download_http(normalize_url, dest_file)
elif bt_type in [PROTROL_TYEP.FTP]:
self._download_ftp(normalize_url, dest_file)
else:
raise Exception('Unknown protrol type detected !')
print('\nSaved file: %s' % dest_file)
return
def main():
if len(sys.argv) not in [2, 3]:
usage()
url = sys.argv[1]
path = None
if len(sys.argv) > 2:
path = sys.argv[2]
bt = BtDown(url, path)
bt.download()
print('------------------ Well done ------------------')
if __name__ == '__main__':
main()
| 31.760736
| 75
| 0.596871
|
import os
import sys
import requests
import time
import re
import enum
import base64
from urllib import parse, request
def usage(err=None):
if err:
print(err)
print(__doc__)
sys.exit(0)
@enum.unique
class PROTROL_TYEP(enum.Enum):
UNKNOWN = 0
HTTP = 1
FTP = 2
BT = 3
MAGNET = 4
THUNDER = 5
class BtDown(object):
def __init__(self, url, path = None):
self.origin_url = url
self.dest_path = path
def detectProtrolType(self, url):
bt_type = PROTROL_TYEP.UNKNOWN
if (re.match('^ftp://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.FTP
elif (re.match('^thunder://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.THUNDER
elif (re.match('^magnet:?', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.MAGNET
elif (re.search(r'\.torrent$', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.BT
elif (re.match('^https?://', url, re.IGNORECASE)):
bt_type = PROTROL_TYEP.HTTP
return bt_type
def _parserThunderUrl(self, url):
thunder_url = re.sub('^thunder://', '', url, re.IGNORECASE)
normalize_url = base64.b64decode(thunder_url).decode()
normalize_url = re.sub('^AA', '', normalize_url)
normalize_url = re.sub('ZZ$', '', normalize_url)
return normalize_url
def _parserMagnetUrl(self, url):
return ''
def parseUrlProtrol(self, url):
normalize_url = url
bt_type = self.detectProtrolType(url)
if bt_type in [PROTROL_TYEP.THUNDER]:
normalize_url = self._parserThunderUrl(url)
elif bt_type in [PROTROL_TYEP.MAGNET]:
normalize_url = self._parserMagnetUrl(url)
elif bt_type in [PROTROL_TYEP.BT]:
raise Exception('BT (torrent) is unsupported by now !')
return normalize_url
def getTitle(self, url):
title = 'unnamed_file'
bt_type = self.detectProtrolType(url)
if bt_type in [PROTROL_TYEP.HTTP, PROTROL_TYEP.FTP]:
last_slash = url.rfind('/')
if last_slash != -1:
title = url[last_slash + 1:].strip()
if title.count('%') > 1:
title = parse.unquote(title)
return title
def _showDownloadProgress(self, file, percent):
base_file = os.path.basename(file)
if(percent > 100):
percent = 100
message = '\r Downloading %s ...... %2.f%%' % (base_file, percent)
print(message, end='')
return
def _download_http(self, url, dest_file):
res = requests.get(url, stream=True)
max_file_bytes = int(res.headers['Content-Length'])
chunk_size = 1024*1024*4
downloaded_size = 0
f = open(dest_file, 'wb')
for data in res.iter_content(chunk_size):
downloaded_size += len(data)
percent = downloaded_size / max_file_bytes * 100
self._showDownloadProgress(dest_file, percent)
f.write(data)
f.close()
def _download_ftp(self, url, dest_file):
def _report(blocknum, blocksize, totalsize):
if not totalsize:
return
percent = 100.0 * blocknum * blocksize / totalsize
self._showDownloadProgress(dest_file, percent)
url = parse.quote(url, safe=':/@')
request.urlretrieve(url, dest_file, _report)
def download(self):
print('Start downloading %s' % self.origin_url)
normalize_url = self.parseUrlProtrol(self.origin_url)
print('Parse real url %s' % normalize_url)
title = self.getTitle(normalize_url)
dest_file = title
if self.dest_path:
if not os.path.exists(self.dest_path):
os.makedirs(self.dest_path)
dest_file = os.path.join(self.dest_path, title)
if os.path.exists(dest_file):
os.remove(dest_file)
bt_type = self.detectProtrolType(normalize_url)
if bt_type in [PROTROL_TYEP.HTTP]:
self._download_http(normalize_url, dest_file)
elif bt_type in [PROTROL_TYEP.FTP]:
self._download_ftp(normalize_url, dest_file)
else:
raise Exception('Unknown protrol type detected !')
print('\nSaved file: %s' % dest_file)
return
def main():
if len(sys.argv) not in [2, 3]:
usage()
url = sys.argv[1]
path = None
if len(sys.argv) > 2:
path = sys.argv[2]
bt = BtDown(url, path)
bt.download()
print('------------------ Well done ------------------')
if __name__ == '__main__':
main()
| true
| true
|
7908ff135c22d7c8e2ea5483b3b23182ff9ab182
| 615
|
py
|
Python
|
Python-codes-CeV/40-Average.py
|
engcristian/Python
|
726a53e9499fd5d0594572298e59e318f98e2d36
|
[
"MIT"
] | 1
|
2021-02-22T03:53:23.000Z
|
2021-02-22T03:53:23.000Z
|
Python-codes-CeV/40-Average.py
|
engcristian/Python
|
726a53e9499fd5d0594572298e59e318f98e2d36
|
[
"MIT"
] | null | null | null |
Python-codes-CeV/40-Average.py
|
engcristian/Python
|
726a53e9499fd5d0594572298e59e318f98e2d36
|
[
"MIT"
] | null | null | null |
'''Ask two student's grade, inform 3 possible averages.
average :
> 7 = Approved
< 7 & > 5 = Recovery
< 5 = Failed
'''
g1 = float(input("Inform the student's first grade: "))
g2 = float(input("Inform the student's second grade: "))
average = (g1 + g2)/2 # how to calculate the avarege grade between two values
if average >= 7:
print(f"Student with avarege \033[35m{average}\033[m: \033[32mAPPROVED\033[m")
elif average >= 5 and average < 7:
print(f"Student with avarege \033[35m{average}\033[m: \033[33mRECOVERY\033[m")
else:
print(f"Student with avarege \033[35m{average}\033[m: \033[31mFAILED\033[m")
| 38.4375
| 82
| 0.687805
|
g1 = float(input("Inform the student's first grade: "))
g2 = float(input("Inform the student's second grade: "))
average = (g1 + g2)/2
if average >= 7:
print(f"Student with avarege \033[35m{average}\033[m: \033[32mAPPROVED\033[m")
elif average >= 5 and average < 7:
print(f"Student with avarege \033[35m{average}\033[m: \033[33mRECOVERY\033[m")
else:
print(f"Student with avarege \033[35m{average}\033[m: \033[31mFAILED\033[m")
| true
| true
|
79090001c6f55f22f5669e721a69995553e7c9bb
| 1,046
|
py
|
Python
|
PyUtilities/hdf5_stress_range.py
|
MingAtUWA/SimpleMPM2
|
7a1d7c257c621123d85a0630e93d42ae25c70fb4
|
[
"MIT"
] | null | null | null |
PyUtilities/hdf5_stress_range.py
|
MingAtUWA/SimpleMPM2
|
7a1d7c257c621123d85a0630e93d42ae25c70fb4
|
[
"MIT"
] | null | null | null |
PyUtilities/hdf5_stress_range.py
|
MingAtUWA/SimpleMPM2
|
7a1d7c257c621123d85a0630e93d42ae25c70fb4
|
[
"MIT"
] | null | null | null |
import numpy as np
import h5py as py
import matplotlib.pyplot as plt
import sys
hdf5_file = py.File("..\\Build\\TestsWithGL\\t2d_mpm_chm_t_bar_conference_restart.hdf5", "r")
frame_id = 0
th_grp = hdf5_file['TimeHistory']['penetration']
pcl_dset = th_grp['frame_%d' % frame_id]['ParticleData']
pcl_num = pcl_dset.attrs['pcl_num']
print(pcl_num)
pcl_stress = np.zeros([pcl_num, 4])
p_min_id = 0
p_min = sys.float_info.min
p_max_id = 0
p_max = -sys.float_info.max
for pcl_id in range(pcl_num):
pcl_data = pcl_dset[pcl_id]
pcl_stress[pcl_id][0] = pcl_data['s11']
pcl_stress[pcl_id][1] = pcl_data['s22']
pcl_stress[pcl_id][2] = pcl_data['s12']
pcl_stress[pcl_id][3] = pcl_data['p']
#p = pcl_stress[pcl_id][3]
p = (pcl_stress[pcl_id][0] + pcl_stress[pcl_id][1] + pcl_stress[pcl_id][2]) / 3.0
if (p < p_min):
p_min = p
p_min_id = pcl_id
if (p > p_max):
p_max = p
p_max_id = pcl_id
print("p min: %f pcl %d\np max: %f pcl %d" % (p_min, p_min_id, p_max, p_max_id))
hdf5_file.close()
| 29.055556
| 93
| 0.664436
|
import numpy as np
import h5py as py
import matplotlib.pyplot as plt
import sys
hdf5_file = py.File("..\\Build\\TestsWithGL\\t2d_mpm_chm_t_bar_conference_restart.hdf5", "r")
frame_id = 0
th_grp = hdf5_file['TimeHistory']['penetration']
pcl_dset = th_grp['frame_%d' % frame_id]['ParticleData']
pcl_num = pcl_dset.attrs['pcl_num']
print(pcl_num)
pcl_stress = np.zeros([pcl_num, 4])
p_min_id = 0
p_min = sys.float_info.min
p_max_id = 0
p_max = -sys.float_info.max
for pcl_id in range(pcl_num):
pcl_data = pcl_dset[pcl_id]
pcl_stress[pcl_id][0] = pcl_data['s11']
pcl_stress[pcl_id][1] = pcl_data['s22']
pcl_stress[pcl_id][2] = pcl_data['s12']
pcl_stress[pcl_id][3] = pcl_data['p']
p = (pcl_stress[pcl_id][0] + pcl_stress[pcl_id][1] + pcl_stress[pcl_id][2]) / 3.0
if (p < p_min):
p_min = p
p_min_id = pcl_id
if (p > p_max):
p_max = p
p_max_id = pcl_id
print("p min: %f pcl %d\np max: %f pcl %d" % (p_min, p_min_id, p_max, p_max_id))
hdf5_file.close()
| true
| true
|
7909001e6a21fbc44e84258b061f8c6d4d90019b
| 74,026
|
py
|
Python
|
python/ccxt/async_support/bitvavo.py
|
DoctorSlimm/ccxt
|
8f19512dfc5dac159eaeb465c98226c00252a9b6
|
[
"MIT"
] | 1
|
2021-11-16T15:45:34.000Z
|
2021-11-16T15:45:34.000Z
|
python/ccxt/async_support/bitvavo.py
|
DoctorSlimm/ccxt
|
8f19512dfc5dac159eaeb465c98226c00252a9b6
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/bitvavo.py
|
DoctorSlimm/ccxt
|
8f19512dfc5dac159eaeb465c98226c00252a9b6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.decimal_to_precision import ROUND
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
from ccxt.base.decimal_to_precision import SIGNIFICANT_DIGITS
class bitvavo(Exchange):
def describe(self):
return self.deep_extend(super(bitvavo, self).describe(), {
'id': 'bitvavo',
'name': 'Bitvavo',
'countries': ['NL'], # Netherlands
'rateLimit': 60, # 1000 requests per minute
'version': 'v2',
'certified': True,
'pro': True,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelAllOrders': True,
'cancelOrder': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': True,
'createStopMarketOrder': True,
'createStopOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': True,
'fetchTransfer': False,
'fetchTransfers': False,
'fetchWithdrawals': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
'transfer': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'8h': '8h',
'12h': '12h',
'1d': '1d',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/169202626-bd130fc5-fcf9-41bb-8d97-6093225c73cd.jpg',
'api': {
'public': 'https://api.bitvavo.com',
'private': 'https://api.bitvavo.com',
},
'www': 'https://bitvavo.com/',
'doc': 'https://docs.bitvavo.com/',
'fees': 'https://bitvavo.com/en/fees',
'referral': 'https://bitvavo.com/?a=24F34952F7',
},
'api': {
'public': {
'get': {
'time': 1,
'markets': 1,
'assets': 1,
'{market}/book': 1,
'{market}/trades': 5,
'{market}/candles': 1,
'ticker/price': 1,
'ticker/book': 1,
'ticker/24h': {'cost': 1, 'noMarket': 25},
},
},
'private': {
'get': {
'account': 1,
'order': 1,
'orders': 5,
'ordersOpen': {'cost': 1, 'noMarket': 25},
'trades': 5,
'balance': 5,
'deposit': 1,
'depositHistory': 5,
'withdrawalHistory': 5,
},
'post': {
'order': 1,
'withdrawal': 1,
},
'put': {
'order': 1,
},
'delete': {
'order': 1,
'orders': 1,
},
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.0025'),
'maker': self.parse_number('0.002'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.0025')],
[self.parse_number('100000'), self.parse_number('0.0020')],
[self.parse_number('250000'), self.parse_number('0.0016')],
[self.parse_number('500000'), self.parse_number('0.0012')],
[self.parse_number('1000000'), self.parse_number('0.0010')],
[self.parse_number('2500000'), self.parse_number('0.0008')],
[self.parse_number('5000000'), self.parse_number('0.0006')],
[self.parse_number('10000000'), self.parse_number('0.0005')],
[self.parse_number('25000000'), self.parse_number('0.0004')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.0015')],
[self.parse_number('100000'), self.parse_number('0.0010')],
[self.parse_number('250000'), self.parse_number('0.0008')],
[self.parse_number('500000'), self.parse_number('0.0006')],
[self.parse_number('1000000'), self.parse_number('0.0005')],
[self.parse_number('2500000'), self.parse_number('0.0004')],
[self.parse_number('5000000'), self.parse_number('0.0004')],
[self.parse_number('10000000'), self.parse_number('0.0003')],
[self.parse_number('25000000'), self.parse_number('0.0003')],
],
},
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'exceptions': {
'exact': {
'101': ExchangeError, # Unknown error. Operation may or may not have succeeded.
'102': BadRequest, # Invalid JSON.
'103': RateLimitExceeded, # You have been rate limited. Please observe the Bitvavo-Ratelimit-AllowAt header to see when you can send requests again. Failure to respect self limit will result in an IP ban. The default value is 1000 weighted requests per minute. Please contact support if you wish to increase self limit.
'104': RateLimitExceeded, # You have been rate limited by the number of new orders. The default value is 100 new orders per second or 100.000 new orders per day. Please update existing orders instead of cancelling and creating orders. Please contact support if you wish to increase self limit.
'105': PermissionDenied, # Your IP or API key has been banned for not respecting the rate limit. The ban expires at ${expiryInMs}.
'107': ExchangeNotAvailable, # The matching engine is overloaded. Please wait 500ms and resubmit your order.
'108': ExchangeNotAvailable, # The matching engine could not process your order in time. Please consider increasing the access window or resubmit your order.
'109': ExchangeNotAvailable, # The matching engine did not respond in time. Operation may or may not have succeeded.
'110': BadRequest, # Invalid endpoint. Please check url and HTTP method.
'200': BadRequest, # ${param} url parameter is not supported. Please note that parameters are case-sensitive and use body parameters for PUT and POST requests.
'201': BadRequest, # ${param} body parameter is not supported. Please note that parameters are case-sensitive and use url parameters for GET and DELETE requests.
'202': BadRequest, # ${param} order parameter is not supported. Please note that certain parameters are only allowed for market or limit orders.
'203': BadSymbol, # {"errorCode":203,"error":"symbol parameter is required."}
'204': BadRequest, # ${param} parameter is not supported.
'205': BadRequest, # ${param} parameter is invalid.
'206': BadRequest, # Use either ${paramA} or ${paramB}. The usage of both parameters at the same time is not supported.
'210': InvalidOrder, # Amount exceeds the maximum allowed amount(1000000000).
'211': InvalidOrder, # Price exceeds the maximum allowed amount(100000000000).
'212': InvalidOrder, # Amount is below the minimum allowed amount for self asset.
'213': InvalidOrder, # Price is below the minimum allowed amount(0.000000000000001).
'214': InvalidOrder, # Price is too detailed
'215': InvalidOrder, # Price is too detailed. A maximum of 15 digits behind the decimal point are allowed.
'216': InsufficientFunds, # {"errorCode":216,"error":"You do not have sufficient balance to complete self operation."}
'217': InvalidOrder, # {"errorCode":217,"error":"Minimum order size in quote currency is 5 EUR or 0.001 BTC."}
'230': ExchangeError, # The order is rejected by the matching engine.
'231': ExchangeError, # The order is rejected by the matching engine. TimeInForce must be GTC when markets are paused.
'232': BadRequest, # You must change at least one of amount, amountRemaining, price, timeInForce, selfTradePrevention or postOnly.
'233': InvalidOrder, # {"errorCode":233,"error":"Order must be active(status new or partiallyFilled) to allow updating/cancelling."}
'234': InvalidOrder, # Market orders cannot be updated.
'235': ExchangeError, # You can only have 100 open orders on each book.
'236': BadRequest, # You can only update amount or amountRemaining, not both.
'240': OrderNotFound, # {"errorCode":240,"error":"No order found. Please be aware that simultaneously updating the same order may return self error."}
'300': AuthenticationError, # Authentication is required for self endpoint.
'301': AuthenticationError, # {"errorCode":301,"error":"API Key must be of length 64."}
'302': AuthenticationError, # Timestamp is invalid. This must be a timestamp in ms. See Bitvavo-Access-Timestamp header or timestamp parameter for websocket.
'303': AuthenticationError, # Window must be between 100 and 60000 ms.
'304': AuthenticationError, # Request was not received within acceptable window(default 30s, or custom with Bitvavo-Access-Window header) of Bitvavo-Access-Timestamp header(or timestamp parameter for websocket).
# '304': AuthenticationError, # Authentication is required for self endpoint.
'305': AuthenticationError, # {"errorCode":305,"error":"No active API key found."}
'306': AuthenticationError, # No active API key found. Please ensure that you have confirmed the API key by e-mail.
'307': PermissionDenied, # This key does not allow access from self IP.
'308': AuthenticationError, # {"errorCode":308,"error":"The signature length is invalid(HMAC-SHA256 should return a 64 length hexadecimal string)."}
'309': AuthenticationError, # {"errorCode":309,"error":"The signature is invalid."}
'310': PermissionDenied, # This key does not allow trading actions.
'311': PermissionDenied, # This key does not allow showing account information.
'312': PermissionDenied, # This key does not allow withdrawal of funds.
'315': BadRequest, # Websocket connections may not be used in a browser. Please use REST requests for self.
'317': AccountSuspended, # This account is locked. Please contact support.
'400': ExchangeError, # Unknown error. Please contact support with a copy of your request.
'401': ExchangeError, # Deposits for self asset are not available at self time.
'402': PermissionDenied, # You need to verify your identitiy before you can deposit and withdraw digital assets.
'403': PermissionDenied, # You need to verify your phone number before you can deposit and withdraw digital assets.
'404': OnMaintenance, # Could not complete self operation, because our node cannot be reached. Possibly under maintenance.
'405': ExchangeError, # You cannot withdraw digital assets during a cooldown period. This is the result of newly added bank accounts.
'406': BadRequest, # {"errorCode":406,"error":"Your withdrawal is too small."}
'407': ExchangeError, # Internal transfer is not possible.
'408': InsufficientFunds, # {"errorCode":408,"error":"You do not have sufficient balance to complete self operation."}
'409': InvalidAddress, # {"errorCode":409,"error":"This is not a verified bank account."}
'410': ExchangeError, # Withdrawals for self asset are not available at self time.
'411': BadRequest, # You can not transfer assets to yourself.
'412': InvalidAddress, # {"errorCode":412,"error":"eth_address_invalid."}
'413': InvalidAddress, # This address violates the whitelist.
'414': ExchangeError, # You cannot withdraw assets within 2 minutes of logging in.
},
'broad': {
'start parameter is invalid': BadRequest, # {"errorCode":205,"error":"start parameter is invalid."}
'symbol parameter is invalid': BadSymbol, # {"errorCode":205,"error":"symbol parameter is invalid."}
'amount parameter is invalid': InvalidOrder, # {"errorCode":205,"error":"amount parameter is invalid."}
'orderId parameter is invalid': InvalidOrder, # {"errorCode":205,"error":"orderId parameter is invalid."}
},
},
'options': {
'BITVAVO-ACCESS-WINDOW': 10000, # default 10 sec
'fetchCurrencies': {
'expires': 1000, # 1 second
},
},
'precisionMode': SIGNIFICANT_DIGITS,
'commonCurrencies': {
'MIOTA': 'IOTA', # https://github.com/ccxt/ccxt/issues/7487
},
})
def currency_to_precision(self, code, fee, networkCode=None):
return self.decimal_to_precision(fee, 0, self.currencies[code]['precision'])
def amount_to_precision(self, symbol, amount):
# https://docs.bitfinex.com/docs/introduction#amount-precision
# The amount field allows up to 8 decimals.
# Anything exceeding self will be rounded to the 8th decimal.
return self.decimal_to_precision(amount, TRUNCATE, self.markets[symbol]['precision']['amount'], DECIMAL_PLACES)
def price_to_precision(self, symbol, price):
price = self.decimal_to_precision(price, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
# https://docs.bitfinex.com/docs/introduction#price-precision
# The precision level of all trading prices is based on significant figures.
# All pairs on Bitfinex use up to 5 significant digits and up to 8 decimals(e.g. 1.2345, 123.45, 1234.5, 0.00012345).
# Prices submit with a precision larger than 5 will be cut by the API.
return self.decimal_to_precision(price, TRUNCATE, 8, DECIMAL_PLACES)
async def fetch_time(self, params={}):
"""
fetches the current integer timestamp in milliseconds from the exchange server
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns int: the current integer timestamp in milliseconds from the exchange server
"""
response = await self.publicGetTime(params)
#
# {"time": 1590379519148}
#
return self.safe_integer(response, 'time')
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for bitvavo
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = await self.publicGetMarkets(params)
currencies = await self.fetch_currencies_from_cache(params)
currenciesById = self.index_by(currencies, 'symbol')
#
# [
# {
# "market":"ADA-BTC",
# "status":"trading", # "trading" "halted" "auction"
# "base":"ADA",
# "quote":"BTC",
# "pricePrecision":5,
# "minOrderInBaseAsset":"100",
# "minOrderInQuoteAsset":"0.001",
# "orderTypes": ["market", "limit"]
# }
# ]
#
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'market')
baseId = self.safe_string(market, 'base')
quoteId = self.safe_string(market, 'quote')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
status = self.safe_string(market, 'status')
baseCurrency = self.safe_value(currenciesById, baseId)
amountPrecision = None
if baseCurrency is not None:
amountPrecision = self.safe_integer(baseCurrency, 'decimals', 8)
result.append({
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': (status == 'trading'),
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': amountPrecision,
'price': self.safe_integer(market, 'pricePrecision'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'minOrderInBaseAsset'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'minOrderInQuoteAsset'),
'max': None,
},
},
'info': market,
})
return result
async def fetch_currencies_from_cache(self, params={}):
# self method is now redundant
# currencies are now fetched before markets
options = self.safe_value(self.options, 'fetchCurrencies', {})
timestamp = self.safe_integer(options, 'timestamp')
expires = self.safe_integer(options, 'expires', 1000)
now = self.milliseconds()
if (timestamp is None) or ((now - timestamp) > expires):
response = await self.publicGetAssets(params)
self.options['fetchCurrencies'] = self.extend(options, {
'response': response,
'timestamp': now,
})
return self.safe_value(self.options['fetchCurrencies'], 'response')
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: an associative dictionary of currencies
"""
response = await self.fetch_currencies_from_cache(params)
#
# [
# {
# "symbol":"ADA",
# "name":"Cardano",
# "decimals":6,
# "depositFee":"0",
# "depositConfirmations":15,
# "depositStatus":"OK", # "OK", "MAINTENANCE", "DELISTED"
# "withdrawalFee":"0.2",
# "withdrawalMinAmount":"0.2",
# "withdrawalStatus":"OK", # "OK", "MAINTENANCE", "DELISTED"
# "networks": ["Mainnet"], # "ETH", "NEO", "ONT", "SEPA", "VET"
# "message":"",
# },
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
depositStatus = self.safe_value(currency, 'depositStatus')
deposit = (depositStatus == 'OK')
withdrawalStatus = self.safe_value(currency, 'withdrawalStatus')
withdrawal = (withdrawalStatus == 'OK')
active = deposit and withdrawal
name = self.safe_string(currency, 'name')
precision = self.safe_integer(currency, 'decimals', 8)
result[code] = {
'id': id,
'info': currency,
'code': code,
'name': name,
'active': active,
'deposit': deposit,
'withdraw': withdrawal,
'fee': self.safe_number(currency, 'withdrawalFee'),
'precision': precision,
'limits': {
'amount': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_number(currency, 'withdrawalMinAmount'),
'max': None,
},
},
}
return result
async def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
response = await self.publicGetTicker24h(self.extend(request, params))
#
# {
# "market":"ETH-BTC",
# "open":"0.022578",
# "high":"0.023019",
# "low":"0.022573",
# "last":"0.023019",
# "volume":"25.16366324",
# "volumeQuote":"0.57333305",
# "bid":"0.023039",
# "bidSize":"0.53500578",
# "ask":"0.023041",
# "askSize":"0.47859202",
# "timestamp":1590381666900
# }
#
return self.parse_ticker(response, market)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# "market":"ETH-BTC",
# "open":"0.022578",
# "high":"0.023019",
# "low":"0.022573",
# "last":"0.023019",
# "volume":"25.16366324",
# "volumeQuote":"0.57333305",
# "bid":"0.023039",
# "bidSize":"0.53500578",
# "ask":"0.023041",
# "askSize":"0.47859202",
# "timestamp":1590381666900
# }
#
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.safe_integer(ticker, 'timestamp')
last = self.safe_string(ticker, 'last')
baseVolume = self.safe_string(ticker, 'volume')
quoteVolume = self.safe_string(ticker, 'volumeQuote')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': self.safe_string(ticker, 'bidSize'),
'ask': self.safe_string(ticker, 'ask'),
'askVolume': self.safe_string(ticker, 'askSize'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None, # previous day close
'change': None,
'percentage': None,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market)
async def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
response = await self.publicGetTicker24h(params)
#
# [
# {
# "market":"ADA-BTC",
# "open":"0.0000059595",
# "high":"0.0000059765",
# "low":"0.0000059595",
# "last":"0.0000059765",
# "volume":"2923.172",
# "volumeQuote":"0.01743483",
# "bid":"0.0000059515",
# "bidSize":"1117.630919",
# "ask":"0.0000059585",
# "askSize":"809.999739",
# "timestamp":1590382266324
# }
# ]
#
return self.parse_tickers(response, symbols)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
# 'limit': 500, # default 500, max 1000
# 'start': since,
# 'end': self.milliseconds(),
# 'tradeIdFrom': '57b1159b-6bf5-4cde-9e2c-6bd6a5678baf',
# 'tradeIdTo': '57b1159b-6bf5-4cde-9e2c-6bd6a5678baf',
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['start'] = since
response = await self.publicGetMarketTrades(self.extend(request, params))
#
# [
# {
# "id":"94154c98-6e8b-4e33-92a8-74e33fc05650",
# "timestamp":1590382761859,
# "amount":"0.06026079",
# "price":"8095.3",
# "side":"buy"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":"94154c98-6e8b-4e33-92a8-74e33fc05650",
# "timestamp":1590382761859,
# "amount":"0.06026079",
# "price":"8095.3",
# "side":"buy"
# }
#
# createOrder, fetchOpenOrders, fetchOrders, editOrder(private)
#
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
#
# fetchMyTrades(private)
#
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "timestamp":1590505649245,
# "market":"ETH-EUR",
# "side":"sell",
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
#
# watchMyTrades(private)
#
# {
# event: 'fill',
# timestamp: 1590964470132,
# market: 'ETH-EUR',
# orderId: '85d082e1-eda4-4209-9580-248281a29a9a',
# fillId: '861d2da5-aa93-475c-8d9a-dce431bd4211',
# side: 'sell',
# amount: '0.1',
# price: '211.46',
# taker: True,
# fee: '0.056',
# feeCurrency: 'EUR'
# }
#
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'amount')
timestamp = self.safe_integer(trade, 'timestamp')
side = self.safe_string(trade, 'side')
id = self.safe_string_2(trade, 'id', 'fillId')
marketId = self.safe_string(trade, 'market')
symbol = self.safe_symbol(marketId, market, '-')
taker = self.safe_value(trade, 'taker')
takerOrMaker = None
if taker is not None:
takerOrMaker = 'taker' if taker else 'maker'
feeCostString = self.safe_string(trade, 'fee')
fee = None
if feeCostString is not None:
feeCurrencyId = self.safe_string(trade, 'feeCurrency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCostString,
'currency': feeCurrencyCode,
}
orderId = self.safe_string(trade, 'orderId')
return self.safe_trade({
'info': trade,
'id': id,
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
async def fetch_trading_fees(self, params={}):
"""
fetch the trading fees for multiple markets
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: a dictionary of `fee structures <https://docs.ccxt.com/en/latest/manual.html#fee-structure>` indexed by market symbols
"""
await self.load_markets()
response = await self.privateGetAccount(params)
#
# {
# "fees": {
# "taker": "0.0025",
# "maker": "0.0015",
# "volume": "10000.00"
# }
# }
#
fees = self.safe_value(response, 'fees')
maker = self.safe_number(fees, 'maker')
taker = self.safe_number(fees, 'taker')
result = {}
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
result[symbol] = {
'info': response,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
request = {
'market': self.market_id(symbol),
}
if limit is not None:
request['depth'] = limit
response = await self.publicGetMarketBook(self.extend(request, params))
#
# {
# "market":"BTC-EUR",
# "nonce":35883831,
# "bids":[
# ["8097.4","0.6229099"],
# ["8097.2","0.64151283"],
# ["8097.1","0.24966294"],
# ],
# "asks":[
# ["8097.5","1.36916911"],
# ["8098.8","0.33462248"],
# ["8099.3","1.12908646"],
# ]
# }
#
orderbook = self.parse_order_book(response, symbol)
orderbook['nonce'] = self.safe_integer(response, 'nonce')
return orderbook
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1590383700000,
# "8088.5",
# "8088.5",
# "8088.5",
# "8088.5",
# "0.04788623"
# ]
#
return [
self.safe_integer(ohlcv, 0),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [[int]]: A list of candles ordered as timestamp, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'interval': self.timeframes[timeframe],
# 'limit': 1440, # default 1440, max 1440
# 'start': since,
# 'end': self.milliseconds(),
}
if since is not None:
# https://github.com/ccxt/ccxt/issues/9227
duration = self.parse_timeframe(timeframe)
request['start'] = since
if limit is None:
limit = 1440
request['end'] = self.sum(since, limit * duration * 1000)
if limit is not None:
request['limit'] = limit # default 1440, max 1440
response = await self.publicGetMarketCandles(self.extend(request, params))
#
# [
# [1590383700000,"8088.5","8088.5","8088.5","8088.5","0.04788623"],
# [1590383580000,"8091.3","8091.5","8091.3","8091.5","0.04931221"],
# [1590383520000,"8090.3","8092.7","8090.3","8092.5","0.04001286"],
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'symbol')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available')
account['used'] = self.safe_string(balance, 'inOrder')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
response = await self.privateGetBalance(params)
#
# [
# {
# "symbol": "BTC",
# "available": "1.57593193",
# "inOrder": "0.74832374"
# }
# ]
#
return self.parse_balance(response)
async def fetch_deposit_address(self, code, params={}):
"""
fetch the deposit address for a currency associated with self account
:param str code: unified currency code
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: an `address structure <https://docs.ccxt.com/en/latest/manual.html#address-structure>`
"""
await self.load_markets()
currency = self.currency(code)
request = {
'symbol': currency['id'],
}
response = await self.privateGetDeposit(self.extend(request, params))
#
# {
# "address": "0x449889e3234514c45d57f7c5a571feba0c7ad567",
# "paymentId": "10002653"
# }
#
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'paymentId')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'network': None,
'info': response,
}
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'side': side,
'orderType': type, # 'market', 'limit', 'stopLoss', 'stopLossLimit', 'takeProfit', 'takeProfitLimit'
# 'amount': self.amount_to_precision(symbol, amount),
# 'price': self.price_to_precision(symbol, price),
# 'amountQuote': self.cost_to_precision(symbol, cost),
# 'timeInForce': 'GTC', # 'GTC', 'IOC', 'FOK'
# 'selfTradePrevention': 'decrementAndCancel', # 'decrementAndCancel', 'cancelOldest', 'cancelNewest', 'cancelBoth'
# 'postOnly': False,
# 'disableMarketProtection': False, # don't cancel if the next fill price is 10% worse than the best fill price
# 'responseRequired': True, # False is faster
}
isStopLimit = (type == 'stopLossLimit') or (type == 'takeProfitLimit')
isStopMarket = (type == 'stopLoss') or (type == 'takeProfit')
if type == 'market':
cost = None
if price is not None:
cost = amount * price
else:
cost = self.safe_number_2(params, 'cost', 'amountQuote')
if cost is not None:
precision = market['precision']['price']
request['amountQuote'] = self.decimal_to_precision(cost, TRUNCATE, precision, self.precisionMode)
else:
request['amount'] = self.amount_to_precision(symbol, amount)
params = self.omit(params, ['cost', 'amountQuote'])
elif type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
request['amount'] = self.amount_to_precision(symbol, amount)
elif isStopMarket or isStopLimit:
stopPrice = self.safe_number_2(params, 'stopPrice', 'triggerAmount')
if stopPrice is None:
if isStopLimit:
raise ArgumentsRequired(self.id + ' createOrder() requires a stopPrice parameter for a ' + type + ' order')
elif isStopMarket:
if price is None:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument or a stopPrice parameter for a ' + type + ' order')
else:
stopPrice = price
if isStopLimit:
request['price'] = self.price_to_precision(symbol, price)
params = self.omit(params, ['stopPrice', 'triggerAmount'])
request['triggerAmount'] = self.price_to_precision(symbol, stopPrice)
request['triggerType'] = 'price'
request['amount'] = self.amount_to_precision(symbol, amount)
response = await self.privatePostOrder(self.extend(request, params))
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
#
return self.parse_order(response, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {}
amountRemaining = self.safe_number(params, 'amountRemaining')
params = self.omit(params, 'amountRemaining')
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
if amount is not None:
request['amount'] = self.amount_to_precision(symbol, amount)
if amountRemaining is not None:
request['amountRemaining'] = self.amount_to_precision(symbol, amountRemaining)
request = self.extend(request, params)
if request:
request['orderId'] = id
request['market'] = market['id']
response = await self.privatePutOrder(self.extend(request, params))
return self.parse_order(response, market)
else:
raise ArgumentsRequired(self.id + ' editOrder() requires an amount argument, or a price argument, or non-empty params')
async def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'market': market['id'],
}
response = await self.privateDeleteOrder(self.extend(request, params))
#
# {
# "orderId": "2e7ce7fc-44e2-4d80-a4a7-d079c4750b61"
# }
#
return self.parse_order(response, market)
async def cancel_all_orders(self, symbol=None, params={}):
"""
cancel all open orders
:param str|None symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['market'] = market['id']
response = await self.privateDeleteOrders(self.extend(request, params))
#
# [
# {
# "orderId": "1be6d0df-d5dc-4b53-a250-3376f3b393e6"
# }
# ]
#
return self.parse_orders(response, market)
async def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'market': market['id'],
}
response = await self.privateGetOrder(self.extend(request, params))
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
#
return self.parse_order(response, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
# 'limit': 500,
# 'start': since,
# 'end': self.milliseconds(),
# 'orderIdFrom': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
# 'orderIdTo': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetOrders(self.extend(request, params))
#
# [
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
# ]
#
return self.parse_orders(response, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all unfilled currently open orders
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
# 'market': market['id'], # rate limit 25 without a market, 1 with market specified
}
market = None
if symbol is not None:
market = self.market(symbol)
request['market'] = market['id']
response = await self.privateGetOrdersOpen(self.extend(request, params))
#
# [
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'new': 'open',
'canceled': 'canceled',
'canceledAuction': 'canceled',
'canceledSelfTradePrevention': 'canceled',
'canceledIOC': 'canceled',
'canceledFOK': 'canceled',
'canceledMarketProtection': 'canceled',
'canceledPostOnly': 'canceled',
'filled': 'closed',
'partiallyFilled': 'open',
'expired': 'canceled',
'rejected': 'canceled',
'awaitingTrigger': 'open', # https://github.com/ccxt/ccxt/issues/8489
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# cancelOrder, cancelAllOrders
#
# {
# "orderId": "2e7ce7fc-44e2-4d80-a4a7-d079c4750b61"
# }
#
# createOrder, fetchOrder, fetchOpenOrders, fetchOrders, editOrder
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "price": "183.49", # limit orders only
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# "timeInForce": "GTC",
# "postOnly": True,
# }
#
id = self.safe_string(order, 'orderId')
timestamp = self.safe_integer(order, 'created')
marketId = self.safe_string(order, 'market')
market = self.safe_market(marketId, market, '-')
symbol = market['symbol']
status = self.parse_order_status(self.safe_string(order, 'status'))
side = self.safe_string(order, 'side')
type = self.safe_string(order, 'orderType')
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'amount')
remaining = self.safe_string(order, 'amountRemaining')
filled = self.safe_string(order, 'filledAmount')
cost = self.safe_string(order, 'filledAmountQuote')
fee = None
feeCost = self.safe_number(order, 'feePaid')
if feeCost is not None:
feeCurrencyId = self.safe_string(order, 'feeCurrency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
rawTrades = self.safe_value(order, 'fills', [])
timeInForce = self.safe_string(order, 'timeInForce')
postOnly = self.safe_value(order, 'postOnly')
# https://github.com/ccxt/ccxt/issues/8489
stopPrice = self.safe_number(order, 'triggerPrice')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': None,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': rawTrades,
}, market)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
# 'limit': 500,
# 'start': since,
# 'end': self.milliseconds(),
# 'tradeIdFrom': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
# 'tradeIdTo': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetTrades(self.extend(request, params))
#
# [
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "timestamp":1590505649245,
# "market":"ETH-EUR",
# "side":"sell",
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def withdraw(self, code, amount, address, tag=None, params={}):
"""
make a withdrawal
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'symbol': currency['id'],
'amount': self.currency_to_precision(code, amount),
'address': address, # address or IBAN
# 'internal': False, # transfer to another Bitvavo user address, no fees
# 'addWithdrawalFee': False, # True = add the fee on top, otherwise the fee is subtracted from the amount
}
if tag is not None:
request['paymentId'] = tag
response = await self.privatePostWithdrawal(self.extend(request, params))
#
# {
# "success": True,
# "symbol": "BTC",
# "amount": "1.5"
# }
#
return self.parse_transaction(response, currency)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
"""
fetch all withdrawals made from an account
:param str|None code: unified currency code
:param int|None since: the earliest time in ms to fetch withdrawals for
:param int|None limit: the maximum number of withdrawals structures to retrieve
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
await self.load_markets()
request = {
# 'symbol': currency['id'],
# 'limit': 500, # default 500, max 1000
# 'start': since,
# 'end': self.milliseconds(),
}
currency = None
if code is not None:
currency = self.currency(code)
request['symbol'] = currency['id']
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetWithdrawalHistory(self.extend(request, params))
#
# [
# {
# "timestamp":1590531212000,
# "symbol":"ETH",
# "amount":"0.091",
# "fee":"0.009",
# "status":"awaiting_bitvavo_inspection",
# "address":"0xe42b309f1eE9F0cbf7f54CcF3bc2159eBfA6735b",
# "paymentId": "10002653",
# "txId": "927b3ea50c5bb52c6854152d305dfa1e27fc01d10464cf10825d96d69d235eb3",
# }
# ]
#
return self.parse_transactions(response, currency, since, limit, {'type': 'withdrawal'})
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
"""
fetch all deposits made to an account
:param str|None code: unified currency code
:param int|None since: the earliest time in ms to fetch deposits for
:param int|None limit: the maximum number of deposits structures to retrieve
:param dict params: extra parameters specific to the bitvavo api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
await self.load_markets()
request = {
# 'symbol': currency['id'],
# 'limit': 500, # default 500, max 1000
# 'start': since,
# 'end': self.milliseconds(),
}
currency = None
if code is not None:
currency = self.currency(code)
request['symbol'] = currency['id']
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetDepositHistory(self.extend(request, params))
#
# [
# {
# "timestamp":1590492401000,
# "symbol":"ETH",
# "amount":"0.249825",
# "fee":"0",
# "status":"completed",
# "txId":"0x5167b473fd37811f9ef22364c3d54726a859ef9d98934b3a1e11d7baa8d2c2e2"
# }
# ]
#
return self.parse_transactions(response, currency, since, limit, {'type': 'deposit'})
def parse_transaction_status(self, status):
statuses = {
'awaiting_processing': 'pending',
'awaiting_email_confirmation': 'pending',
'awaiting_bitvavo_inspection': 'pending',
'approved': 'pending',
'sending': 'pending',
'in_mempool': 'pending',
'processed': 'pending',
'completed': 'ok',
'canceled': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# withdraw
#
# {
# "success": True,
# "symbol": "BTC",
# "amount": "1.5"
# }
#
# fetchWithdrawals
#
# {
# "timestamp": 1542967486256,
# "symbol": "BTC",
# "amount": "0.99994",
# "address": "BitcoinAddress",
# "paymentId": "10002653",
# "txId": "927b3ea50c5bb52c6854152d305dfa1e27fc01d10464cf10825d96d69d235eb3",
# "fee": "0.00006",
# "status": "awaiting_processing"
# }
#
# fetchDeposits
#
# {
# "timestamp":1590492401000,
# "symbol":"ETH",
# "amount":"0.249825",
# "fee":"0",
# "status":"completed",
# "txId":"0x5167b473fd37811f9ef22364c3d54726a859ef9d98934b3a1e11d7baa8d2c2e2"
# }
#
id = None
timestamp = self.safe_integer(transaction, 'timestamp')
currencyId = self.safe_string(transaction, 'symbol')
code = self.safe_currency_code(currencyId, currency)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
amount = self.safe_number(transaction, 'amount')
address = self.safe_string(transaction, 'address')
txid = self.safe_string(transaction, 'txId')
fee = None
feeCost = self.safe_number(transaction, 'fee')
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
type = None
if ('success' in transaction) or ('address' in transaction):
type = 'withdrawal'
else:
type = 'deposit'
tag = self.safe_string(transaction, 'paymentId')
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': None,
'fee': fee,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
query = self.omit(params, self.extract_params(path))
url = '/' + self.version + '/' + self.implode_params(path, params)
getOrDelete = (method == 'GET') or (method == 'DELETE')
if getOrDelete:
if query:
url += '?' + self.urlencode(query)
if api == 'private':
self.check_required_credentials()
payload = ''
if not getOrDelete:
if query:
body = self.json(query)
payload = body
timestamp = str(self.milliseconds())
auth = timestamp + method + url + payload
signature = self.hmac(self.encode(auth), self.encode(self.secret))
accessWindow = self.safe_string(self.options, 'BITVAVO-ACCESS-WINDOW', '10000')
headers = {
'BITVAVO-ACCESS-KEY': self.apiKey,
'BITVAVO-ACCESS-SIGNATURE': signature,
'BITVAVO-ACCESS-TIMESTAMP': timestamp,
'BITVAVO-ACCESS-WINDOW': accessWindow,
}
if not getOrDelete:
headers['Content-Type'] = 'application/json'
url = self.urls['api'][api] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
#
# {"errorCode":308,"error":"The signature length is invalid(HMAC-SHA256 should return a 64 length hexadecimal string)."}
# {"errorCode":203,"error":"symbol parameter is required."}
# {"errorCode":205,"error":"symbol parameter is invalid."}
#
errorCode = self.safe_string(response, 'errorCode')
error = self.safe_string(response, 'error')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_broadly_matched_exception(self.exceptions['broad'], error, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback) # unknown message
def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}):
if ('noMarket' in config) and not ('market' in params):
return config['noMarket']
return self.safe_value(config, 'cost', 1)
| 45.082826
| 340
| 0.514184
|
rt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.decimal_to_precision import ROUND
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
from ccxt.base.decimal_to_precision import SIGNIFICANT_DIGITS
class bitvavo(Exchange):
def describe(self):
return self.deep_extend(super(bitvavo, self).describe(), {
'id': 'bitvavo',
'name': 'Bitvavo',
'countries': ['NL'],
'rateLimit': 60,
'version': 'v2',
'certified': True,
'pro': True,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelAllOrders': True,
'cancelOrder': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': True,
'createStopMarketOrder': True,
'createStopOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': True,
'fetchTransfer': False,
'fetchTransfers': False,
'fetchWithdrawals': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
'transfer': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'8h': '8h',
'12h': '12h',
'1d': '1d',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/169202626-bd130fc5-fcf9-41bb-8d97-6093225c73cd.jpg',
'api': {
'public': 'https://api.bitvavo.com',
'private': 'https://api.bitvavo.com',
},
'www': 'https://bitvavo.com/',
'doc': 'https://docs.bitvavo.com/',
'fees': 'https://bitvavo.com/en/fees',
'referral': 'https://bitvavo.com/?a=24F34952F7',
},
'api': {
'public': {
'get': {
'time': 1,
'markets': 1,
'assets': 1,
'{market}/book': 1,
'{market}/trades': 5,
'{market}/candles': 1,
'ticker/price': 1,
'ticker/book': 1,
'ticker/24h': {'cost': 1, 'noMarket': 25},
},
},
'private': {
'get': {
'account': 1,
'order': 1,
'orders': 5,
'ordersOpen': {'cost': 1, 'noMarket': 25},
'trades': 5,
'balance': 5,
'deposit': 1,
'depositHistory': 5,
'withdrawalHistory': 5,
},
'post': {
'order': 1,
'withdrawal': 1,
},
'put': {
'order': 1,
},
'delete': {
'order': 1,
'orders': 1,
},
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.0025'),
'maker': self.parse_number('0.002'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.0025')],
[self.parse_number('100000'), self.parse_number('0.0020')],
[self.parse_number('250000'), self.parse_number('0.0016')],
[self.parse_number('500000'), self.parse_number('0.0012')],
[self.parse_number('1000000'), self.parse_number('0.0010')],
[self.parse_number('2500000'), self.parse_number('0.0008')],
[self.parse_number('5000000'), self.parse_number('0.0006')],
[self.parse_number('10000000'), self.parse_number('0.0005')],
[self.parse_number('25000000'), self.parse_number('0.0004')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.0015')],
[self.parse_number('100000'), self.parse_number('0.0010')],
[self.parse_number('250000'), self.parse_number('0.0008')],
[self.parse_number('500000'), self.parse_number('0.0006')],
[self.parse_number('1000000'), self.parse_number('0.0005')],
[self.parse_number('2500000'), self.parse_number('0.0004')],
[self.parse_number('5000000'), self.parse_number('0.0004')],
[self.parse_number('10000000'), self.parse_number('0.0003')],
[self.parse_number('25000000'), self.parse_number('0.0003')],
],
},
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'exceptions': {
'exact': {
'101': ExchangeError,
'102': BadRequest,
'103': RateLimitExceeded,
'104': RateLimitExceeded,
'105': PermissionDenied,
'107': ExchangeNotAvailable,
'108': ExchangeNotAvailable,
'109': ExchangeNotAvailable,
'110': BadRequest,
'200': BadRequest,
'201': BadRequest,
'202': BadRequest,
'203': BadSymbol,
'204': BadRequest,
'205': BadRequest,
'206': BadRequest,
'210': InvalidOrder,
'211': InvalidOrder,
'212': InvalidOrder,
'213': InvalidOrder,
'214': InvalidOrder,
'215': InvalidOrder,
'216': InsufficientFunds,
'217': InvalidOrder,
'230': ExchangeError,
'231': ExchangeError,
'232': BadRequest,
'233': InvalidOrder,
'234': InvalidOrder,
'235': ExchangeError,
'236': BadRequest,
'240': OrderNotFound,
'300': AuthenticationError,
'301': AuthenticationError,
'302': AuthenticationError,
'303': AuthenticationError,
'304': AuthenticationError,
,
'306': AuthenticationError,
'307': PermissionDenied,
'308': AuthenticationError,
'309': AuthenticationError,
'310': PermissionDenied,
'311': PermissionDenied,
'312': PermissionDenied,
'315': BadRequest,
'317': AccountSuspended,
'400': ExchangeError,
'401': ExchangeError,
'402': PermissionDenied,
'403': PermissionDenied,
'404': OnMaintenance,
'405': ExchangeError,
'406': BadRequest,
'407': ExchangeError,
'408': InsufficientFunds,
'409': InvalidAddress,
'410': ExchangeError,
'411': BadRequest,
'412': InvalidAddress,
'413': InvalidAddress,
'414': ExchangeError,
},
'broad': {
'start parameter is invalid': BadRequest,
'symbol parameter is invalid': BadSymbol,
'amount parameter is invalid': InvalidOrder,
'orderId parameter is invalid': InvalidOrder,
},
},
'options': {
'BITVAVO-ACCESS-WINDOW': 10000,
'fetchCurrencies': {
'expires': 1000,
},
},
'precisionMode': SIGNIFICANT_DIGITS,
'commonCurrencies': {
'MIOTA': 'IOTA',
},
})
def currency_to_precision(self, code, fee, networkCode=None):
return self.decimal_to_precision(fee, 0, self.currencies[code]['precision'])
def amount_to_precision(self, symbol, amount):
return self.decimal_to_precision(amount, TRUNCATE, self.markets[symbol]['precision']['amount'], DECIMAL_PLACES)
def price_to_precision(self, symbol, price):
price = self.decimal_to_precision(price, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
return self.decimal_to_precision(price, TRUNCATE, 8, DECIMAL_PLACES)
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
return self.safe_integer(response, 'time')
async def fetch_markets(self, params={}):
response = await self.publicGetMarkets(params)
currencies = await self.fetch_currencies_from_cache(params)
currenciesById = self.index_by(currencies, 'symbol')
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'market')
baseId = self.safe_string(market, 'base')
quoteId = self.safe_string(market, 'quote')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
status = self.safe_string(market, 'status')
baseCurrency = self.safe_value(currenciesById, baseId)
amountPrecision = None
if baseCurrency is not None:
amountPrecision = self.safe_integer(baseCurrency, 'decimals', 8)
result.append({
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': (status == 'trading'),
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': amountPrecision,
'price': self.safe_integer(market, 'pricePrecision'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'minOrderInBaseAsset'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'minOrderInQuoteAsset'),
'max': None,
},
},
'info': market,
})
return result
async def fetch_currencies_from_cache(self, params={}):
options = self.safe_value(self.options, 'fetchCurrencies', {})
timestamp = self.safe_integer(options, 'timestamp')
expires = self.safe_integer(options, 'expires', 1000)
now = self.milliseconds()
if (timestamp is None) or ((now - timestamp) > expires):
response = await self.publicGetAssets(params)
self.options['fetchCurrencies'] = self.extend(options, {
'response': response,
'timestamp': now,
})
return self.safe_value(self.options['fetchCurrencies'], 'response')
async def fetch_currencies(self, params={}):
response = await self.fetch_currencies_from_cache(params)
or i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
depositStatus = self.safe_value(currency, 'depositStatus')
deposit = (depositStatus == 'OK')
withdrawalStatus = self.safe_value(currency, 'withdrawalStatus')
withdrawal = (withdrawalStatus == 'OK')
active = deposit and withdrawal
name = self.safe_string(currency, 'name')
precision = self.safe_integer(currency, 'decimals', 8)
result[code] = {
'id': id,
'info': currency,
'code': code,
'name': name,
'active': active,
'deposit': deposit,
'withdraw': withdrawal,
'fee': self.safe_number(currency, 'withdrawalFee'),
'precision': precision,
'limits': {
'amount': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_number(currency, 'withdrawalMinAmount'),
'max': None,
},
},
}
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
response = await self.publicGetTicker24h(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_ticker(self, ticker, market=None):
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.safe_integer(ticker, 'timestamp')
last = self.safe_string(ticker, 'last')
baseVolume = self.safe_string(ticker, 'volume')
quoteVolume = self.safe_string(ticker, 'volumeQuote')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': self.safe_string(ticker, 'bidSize'),
'ask': self.safe_string(ticker, 'ask'),
'askVolume': self.safe_string(ticker, 'askSize'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market)
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTicker24h(params)
return self.parse_tickers(response, symbols)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['start'] = since
response = await self.publicGetMarketTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'amount')
timestamp = self.safe_integer(trade, 'timestamp')
side = self.safe_string(trade, 'side')
id = self.safe_string_2(trade, 'id', 'fillId')
marketId = self.safe_string(trade, 'market')
symbol = self.safe_symbol(marketId, market, '-')
taker = self.safe_value(trade, 'taker')
takerOrMaker = None
if taker is not None:
takerOrMaker = 'taker' if taker else 'maker'
feeCostString = self.safe_string(trade, 'fee')
fee = None
if feeCostString is not None:
feeCurrencyId = self.safe_string(trade, 'feeCurrency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCostString,
'currency': feeCurrencyCode,
}
orderId = self.safe_string(trade, 'orderId')
return self.safe_trade({
'info': trade,
'id': id,
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
async def fetch_trading_fees(self, params={}):
await self.load_markets()
response = await self.privateGetAccount(params)
fees = self.safe_value(response, 'fees')
maker = self.safe_number(fees, 'maker')
taker = self.safe_number(fees, 'taker')
result = {}
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
result[symbol] = {
'info': response,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'market': self.market_id(symbol),
}
if limit is not None:
request['depth'] = limit
response = await self.publicGetMarketBook(self.extend(request, params))
orderbook = self.parse_order_book(response, symbol)
orderbook['nonce'] = self.safe_integer(response, 'nonce')
return orderbook
def parse_ohlcv(self, ohlcv, market=None):
return [
self.safe_integer(ohlcv, 0),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'interval': self.timeframes[timeframe],
}
if since is not None:
duration = self.parse_timeframe(timeframe)
request['start'] = since
if limit is None:
limit = 1440
request['end'] = self.sum(since, limit * duration * 1000)
if limit is not None:
request['limit'] = limit
response = await self.publicGetMarketCandles(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'symbol')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available')
account['used'] = self.safe_string(balance, 'inOrder')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetBalance(params)
return self.parse_balance(response)
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'symbol': currency['id'],
}
response = await self.privateGetDeposit(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'paymentId')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'network': None,
'info': response,
}
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'side': side,
'orderType': type,
e == 'takeProfitLimit')
isStopMarket = (type == 'stopLoss') or (type == 'takeProfit')
if type == 'market':
cost = None
if price is not None:
cost = amount * price
else:
cost = self.safe_number_2(params, 'cost', 'amountQuote')
if cost is not None:
precision = market['precision']['price']
request['amountQuote'] = self.decimal_to_precision(cost, TRUNCATE, precision, self.precisionMode)
else:
request['amount'] = self.amount_to_precision(symbol, amount)
params = self.omit(params, ['cost', 'amountQuote'])
elif type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
request['amount'] = self.amount_to_precision(symbol, amount)
elif isStopMarket or isStopLimit:
stopPrice = self.safe_number_2(params, 'stopPrice', 'triggerAmount')
if stopPrice is None:
if isStopLimit:
raise ArgumentsRequired(self.id + ' createOrder() requires a stopPrice parameter for a ' + type + ' order')
elif isStopMarket:
if price is None:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument or a stopPrice parameter for a ' + type + ' order')
else:
stopPrice = price
if isStopLimit:
request['price'] = self.price_to_precision(symbol, price)
params = self.omit(params, ['stopPrice', 'triggerAmount'])
request['triggerAmount'] = self.price_to_precision(symbol, stopPrice)
request['triggerType'] = 'price'
request['amount'] = self.amount_to_precision(symbol, amount)
response = await self.privatePostOrder(self.extend(request, params))
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
#
return self.parse_order(response, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {}
amountRemaining = self.safe_number(params, 'amountRemaining')
params = self.omit(params, 'amountRemaining')
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
if amount is not None:
request['amount'] = self.amount_to_precision(symbol, amount)
if amountRemaining is not None:
request['amountRemaining'] = self.amount_to_precision(symbol, amountRemaining)
request = self.extend(request, params)
if request:
request['orderId'] = id
request['market'] = market['id']
response = await self.privatePutOrder(self.extend(request, params))
return self.parse_order(response, market)
else:
raise ArgumentsRequired(self.id + ' editOrder() requires an amount argument, or a price argument, or non-empty params')
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'market': market['id'],
}
response = await self.privateDeleteOrder(self.extend(request, params))
#
# {
# "orderId": "2e7ce7fc-44e2-4d80-a4a7-d079c4750b61"
# }
#
return self.parse_order(response, market)
async def cancel_all_orders(self, symbol=None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['market'] = market['id']
response = await self.privateDeleteOrders(self.extend(request, params))
#
# [
# {
# "orderId": "1be6d0df-d5dc-4b53-a250-3376f3b393e6"
# }
# ]
#
return self.parse_orders(response, market)
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'market': market['id'],
}
response = await self.privateGetOrder(self.extend(request, params))
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
#
return self.parse_order(response, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
# 'limit': 500,
# 'start': since,
# 'end': self.milliseconds(),
# 'orderIdFrom': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
# 'orderIdTo': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetOrders(self.extend(request, params))
#
# [
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
# ]
#
return self.parse_orders(response, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'market': market['id'], # rate limit 25 without a market, 1 with market specified
}
market = None
if symbol is not None:
market = self.market(symbol)
request['market'] = market['id']
response = await self.privateGetOrdersOpen(self.extend(request, params))
#
# [
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'new': 'open',
'canceled': 'canceled',
'canceledAuction': 'canceled',
'canceledSelfTradePrevention': 'canceled',
'canceledIOC': 'canceled',
'canceledFOK': 'canceled',
'canceledMarketProtection': 'canceled',
'canceledPostOnly': 'canceled',
'filled': 'closed',
'partiallyFilled': 'open',
'expired': 'canceled',
'rejected': 'canceled',
'awaitingTrigger': 'open', # https://github.com/ccxt/ccxt/issues/8489
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# cancelOrder, cancelAllOrders
#
# {
# "orderId": "2e7ce7fc-44e2-4d80-a4a7-d079c4750b61"
# }
#
# createOrder, fetchOrder, fetchOpenOrders, fetchOrders, editOrder
#
# {
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "market":"ETH-EUR",
# "created":1590505649241,
# "updated":1590505649241,
# "status":"filled",
# "side":"sell",
# "orderType":"market",
# "amount":"0.249825",
# "amountRemaining":"0",
# "price": "183.49", # limit orders only
# "onHold":"0",
# "onHoldCurrency":"ETH",
# "filledAmount":"0.249825",
# "filledAmountQuote":"45.84038925",
# "feePaid":"0.12038925",
# "feeCurrency":"EUR",
# "fills":[
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "timestamp":1590505649245,
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ],
# "selfTradePrevention":"decrementAndCancel",
# "visible":false,
# "disableMarketProtection":false
# "timeInForce": "GTC",
# "postOnly": True,
# }
#
id = self.safe_string(order, 'orderId')
timestamp = self.safe_integer(order, 'created')
marketId = self.safe_string(order, 'market')
market = self.safe_market(marketId, market, '-')
symbol = market['symbol']
status = self.parse_order_status(self.safe_string(order, 'status'))
side = self.safe_string(order, 'side')
type = self.safe_string(order, 'orderType')
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'amount')
remaining = self.safe_string(order, 'amountRemaining')
filled = self.safe_string(order, 'filledAmount')
cost = self.safe_string(order, 'filledAmountQuote')
fee = None
feeCost = self.safe_number(order, 'feePaid')
if feeCost is not None:
feeCurrencyId = self.safe_string(order, 'feeCurrency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
rawTrades = self.safe_value(order, 'fills', [])
timeInForce = self.safe_string(order, 'timeInForce')
postOnly = self.safe_value(order, 'postOnly')
# https://github.com/ccxt/ccxt/issues/8489
stopPrice = self.safe_number(order, 'triggerPrice')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': None,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': rawTrades,
}, market)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
# 'limit': 500,
# 'start': since,
# 'end': self.milliseconds(),
# 'tradeIdFrom': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
# 'tradeIdTo': 'af76d6ce-9f7c-4006-b715-bb5d430652d0',
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetTrades(self.extend(request, params))
#
# [
# {
# "id":"b0c86aa5-6ed3-4a2d-ba3a-be9a964220f4",
# "orderId":"af76d6ce-9f7c-4006-b715-bb5d430652d0",
# "timestamp":1590505649245,
# "market":"ETH-EUR",
# "side":"sell",
# "amount":"0.249825",
# "price":"183.49",
# "taker":true,
# "fee":"0.12038925",
# "feeCurrency":"EUR",
# "settled":true
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'symbol': currency['id'],
'amount': self.currency_to_precision(code, amount),
'address': address, # address or IBAN
# 'internal': False, # transfer to another Bitvavo user address, no fees
# 'addWithdrawalFee': False, # True = add the fee on top, otherwise the fee is subtracted from the amount
}
if tag is not None:
request['paymentId'] = tag
response = await self.privatePostWithdrawal(self.extend(request, params))
#
# {
# "success": True,
# "symbol": "BTC",
# "amount": "1.5"
# }
#
return self.parse_transaction(response, currency)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'symbol': currency['id'],
# 'limit': 500, # default 500, max 1000
# 'start': since,
# 'end': self.milliseconds(),
}
currency = None
if code is not None:
currency = self.currency(code)
request['symbol'] = currency['id']
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetWithdrawalHistory(self.extend(request, params))
#
# [
# {
# "timestamp":1590531212000,
# "symbol":"ETH",
# "amount":"0.091",
# "fee":"0.009",
# "status":"awaiting_bitvavo_inspection",
# "address":"0xe42b309f1eE9F0cbf7f54CcF3bc2159eBfA6735b",
# "paymentId": "10002653",
# "txId": "927b3ea50c5bb52c6854152d305dfa1e27fc01d10464cf10825d96d69d235eb3",
# }
# ]
#
return self.parse_transactions(response, currency, since, limit, {'type': 'withdrawal'})
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'symbol': currency['id'],
# 'limit': 500, # default 500, max 1000
# 'start': since,
# 'end': self.milliseconds(),
}
currency = None
if code is not None:
currency = self.currency(code)
request['symbol'] = currency['id']
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 500, max 1000
response = await self.privateGetDepositHistory(self.extend(request, params))
#
# [
# {
# "timestamp":1590492401000,
# "symbol":"ETH",
# "amount":"0.249825",
# "fee":"0",
# "status":"completed",
# "txId":"0x5167b473fd37811f9ef22364c3d54726a859ef9d98934b3a1e11d7baa8d2c2e2"
# }
# ]
#
return self.parse_transactions(response, currency, since, limit, {'type': 'deposit'})
def parse_transaction_status(self, status):
statuses = {
'awaiting_processing': 'pending',
'awaiting_email_confirmation': 'pending',
'awaiting_bitvavo_inspection': 'pending',
'approved': 'pending',
'sending': 'pending',
'in_mempool': 'pending',
'processed': 'pending',
'completed': 'ok',
'canceled': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# withdraw
#
# {
# "success": True,
# "symbol": "BTC",
# "amount": "1.5"
# }
#
# fetchWithdrawals
#
# {
# "timestamp": 1542967486256,
# "symbol": "BTC",
# "amount": "0.99994",
# "address": "BitcoinAddress",
# "paymentId": "10002653",
# "txId": "927b3ea50c5bb52c6854152d305dfa1e27fc01d10464cf10825d96d69d235eb3",
# "fee": "0.00006",
# "status": "awaiting_processing"
# }
#
# fetchDeposits
#
# {
# "timestamp":1590492401000,
# "symbol":"ETH",
# "amount":"0.249825",
# "fee":"0",
# "status":"completed",
# "txId":"0x5167b473fd37811f9ef22364c3d54726a859ef9d98934b3a1e11d7baa8d2c2e2"
# }
#
id = None
timestamp = self.safe_integer(transaction, 'timestamp')
currencyId = self.safe_string(transaction, 'symbol')
code = self.safe_currency_code(currencyId, currency)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
amount = self.safe_number(transaction, 'amount')
address = self.safe_string(transaction, 'address')
txid = self.safe_string(transaction, 'txId')
fee = None
feeCost = self.safe_number(transaction, 'fee')
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
type = None
if ('success' in transaction) or ('address' in transaction):
type = 'withdrawal'
else:
type = 'deposit'
tag = self.safe_string(transaction, 'paymentId')
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': None,
'fee': fee,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
query = self.omit(params, self.extract_params(path))
url = '/' + self.version + '/' + self.implode_params(path, params)
getOrDelete = (method == 'GET') or (method == 'DELETE')
if getOrDelete:
if query:
url += '?' + self.urlencode(query)
if api == 'private':
self.check_required_credentials()
payload = ''
if not getOrDelete:
if query:
body = self.json(query)
payload = body
timestamp = str(self.milliseconds())
auth = timestamp + method + url + payload
signature = self.hmac(self.encode(auth), self.encode(self.secret))
accessWindow = self.safe_string(self.options, 'BITVAVO-ACCESS-WINDOW', '10000')
headers = {
'BITVAVO-ACCESS-KEY': self.apiKey,
'BITVAVO-ACCESS-SIGNATURE': signature,
'BITVAVO-ACCESS-TIMESTAMP': timestamp,
'BITVAVO-ACCESS-WINDOW': accessWindow,
}
if not getOrDelete:
headers['Content-Type'] = 'application/json'
url = self.urls['api'][api] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
#
# {"errorCode":308,"error":"The signature length is invalid(HMAC-SHA256 should return a 64 length hexadecimal string)."}
# {"errorCode":203,"error":"symbol parameter is required."}
# {"errorCode":205,"error":"symbol parameter is invalid."}
#
errorCode = self.safe_string(response, 'errorCode')
error = self.safe_string(response, 'error')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_broadly_matched_exception(self.exceptions['broad'], error, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback) # unknown message
def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}):
if ('noMarket' in config) and not ('market' in params):
return config['noMarket']
return self.safe_value(config, 'cost', 1)
| true
| true
|
7909007417512d15c6c39b097333e348548af6ea
| 586
|
py
|
Python
|
submissions/submissions_22.py
|
arnabbiswas1/k_tab_sept_roc_auc_binary_classification_KFold
|
7a5a91e52d460fd25133b76d5241462a4aedc474
|
[
"Apache-2.0"
] | null | null | null |
submissions/submissions_22.py
|
arnabbiswas1/k_tab_sept_roc_auc_binary_classification_KFold
|
7a5a91e52d460fd25133b76d5241462a4aedc474
|
[
"Apache-2.0"
] | null | null | null |
submissions/submissions_22.py
|
arnabbiswas1/k_tab_sept_roc_auc_binary_classification_KFold
|
7a5a91e52d460fd25133b76d5241462a4aedc474
|
[
"Apache-2.0"
] | null | null | null |
import os
import pandas as pd
COMPETITION_NAME = "tabular-playground-series-sep-2021"
SUBMISSION_DIR = "."
SUBMISSION_FILE = "sub_blending_1_my_rank_004-2o-lightgbm-colsample_81830_my_ranking_81790_0926_1918.csv"
SUBMISSION_MESSAGE = '"004-2o-lightgbm-colsample-tps-sep-2021 + stacking_lgb_xbg_cat_imputer_no_imputer"'
df = pd.read_csv(f"{SUBMISSION_DIR}/{SUBMISSION_FILE}")
print(df.head())
submission_string = f"kaggle competitions submit {COMPETITION_NAME} -f {SUBMISSION_DIR}/{SUBMISSION_FILE} -m {SUBMISSION_MESSAGE}"
print(submission_string)
os.system(submission_string)
| 29.3
| 130
| 0.812287
|
import os
import pandas as pd
COMPETITION_NAME = "tabular-playground-series-sep-2021"
SUBMISSION_DIR = "."
SUBMISSION_FILE = "sub_blending_1_my_rank_004-2o-lightgbm-colsample_81830_my_ranking_81790_0926_1918.csv"
SUBMISSION_MESSAGE = '"004-2o-lightgbm-colsample-tps-sep-2021 + stacking_lgb_xbg_cat_imputer_no_imputer"'
df = pd.read_csv(f"{SUBMISSION_DIR}/{SUBMISSION_FILE}")
print(df.head())
submission_string = f"kaggle competitions submit {COMPETITION_NAME} -f {SUBMISSION_DIR}/{SUBMISSION_FILE} -m {SUBMISSION_MESSAGE}"
print(submission_string)
os.system(submission_string)
| true
| true
|
7909036782a088913328994be9d41fb8ab88ca3e
| 3,684
|
py
|
Python
|
collectd-smartmon.py
|
nlm/collectd-smartmon
|
7fbbf1a8074dff59dfead8df5069c811563c9aff
|
[
"MIT"
] | null | null | null |
collectd-smartmon.py
|
nlm/collectd-smartmon
|
7fbbf1a8074dff59dfead8df5069c811563c9aff
|
[
"MIT"
] | null | null | null |
collectd-smartmon.py
|
nlm/collectd-smartmon
|
7fbbf1a8074dff59dfead8df5069c811563c9aff
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function, absolute_import
import re
import subprocess
import os
import time
import argparse
import sys
class SmartDevice(object):
smartcmdfmt = ['sudo', 'smartctl', '-f', 'brief', '-A', '/dev/{dev}']
def __init__(self, dev):
self.dev = dev
self.attrcmd = [x.format(dev=dev) for x in self.smartcmdfmt]
def attributes(self):
try:
out = subprocess.check_output(self.attrcmd)
except (OSError, subprocess.CalledProcessError) as err:
print('Error running command: {0}'.format(err), file=sys.stderr)
return
for line in out.split("\n"):
res = re.match('\s*(?P<id>\d+)\s+(?P<name>[\w-]+)\s+'
'(?P<flags>[POSRCK-]{6})\s+'
'(?P<value>\d+)\s+(?P<worst>\d+)\s+'
'(?P<thres>\d+)\s+(?P<fail>[\w-]+)\s+'
'(?P<raw_value>\d+)', line)
if not res:
continue
yield res.groupdict()
def dev_exists(dev):
return os.path.exists('/dev/{0}'.format(dev))
def get_filelist(dirname, pattern):
return [f for f in os.listdir(dirname) if re.match(pattern, f)]
def expand_devices(devlist):
expanded_devlist = []
for dev in devlist:
if dev == 'autodetect':
expanded_devlist.extend(get_filelist('/dev', r'^sd[a-z]+$'))
else:
expanded_devlist.append(dev)
return sorted(list(set(expanded_devlist)))
def smartmon_loop(devices, hostname, interval):
while True:
for dev in devices:
if dev_exists(dev):
for attr in SmartDevice(dev).attributes():
print('PUTVAL "{hostname}/smart-{dev}'
'/absolute-{attr_id:03d}_{attr_name}"'
' interval={interval:d} N:{value:d}'
.format(hostname=hostname, dev=dev,
attr_id=int(attr['id']),
attr_name=attr.get('name'),
interval=int(interval),
value=int(attr['raw_value'])))
time.sleep(interval)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('dev', nargs='*',
help='devices to check (default: autodetect)')
parser.add_argument('-H', '--hostname', type=str,
help='override hostname provided by collectd',
default=os.environ.get('COLLECTD_HOSTNAME'))
parser.add_argument('-i', '--interval', type=int,
help='override interval provided by collectd',
default=int(float(os.environ.get('COLLECTD_INTERVAL', 300))))
parser.add_argument('-c', '--dont-check-devs',
action='store_true', default=False,
help='do not check devices existence at startup')
args = parser.parse_args()
hostname = (args.hostname
or subprocess.check_output(['hostname', '-f']).strip())
if len(hostname) == 0:
parser.error('unable to detect hostname')
interval = max(args.interval, 5)
if len(args.dev) == 0:
devices = expand_devices(['autodetect'])
else:
devices = expand_devices(args.dev)
if not args.dont_check_devs:
for dev in devices:
if not dev_exists(dev):
parser.error('device "/dev/{0}" does not exist'.format(dev))
try:
smartmon_loop(devices, hostname, interval)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| 35.76699
| 85
| 0.540717
|
from __future__ import print_function, absolute_import
import re
import subprocess
import os
import time
import argparse
import sys
class SmartDevice(object):
smartcmdfmt = ['sudo', 'smartctl', '-f', 'brief', '-A', '/dev/{dev}']
def __init__(self, dev):
self.dev = dev
self.attrcmd = [x.format(dev=dev) for x in self.smartcmdfmt]
def attributes(self):
try:
out = subprocess.check_output(self.attrcmd)
except (OSError, subprocess.CalledProcessError) as err:
print('Error running command: {0}'.format(err), file=sys.stderr)
return
for line in out.split("\n"):
res = re.match('\s*(?P<id>\d+)\s+(?P<name>[\w-]+)\s+'
'(?P<flags>[POSRCK-]{6})\s+'
'(?P<value>\d+)\s+(?P<worst>\d+)\s+'
'(?P<thres>\d+)\s+(?P<fail>[\w-]+)\s+'
'(?P<raw_value>\d+)', line)
if not res:
continue
yield res.groupdict()
def dev_exists(dev):
return os.path.exists('/dev/{0}'.format(dev))
def get_filelist(dirname, pattern):
return [f for f in os.listdir(dirname) if re.match(pattern, f)]
def expand_devices(devlist):
expanded_devlist = []
for dev in devlist:
if dev == 'autodetect':
expanded_devlist.extend(get_filelist('/dev', r'^sd[a-z]+$'))
else:
expanded_devlist.append(dev)
return sorted(list(set(expanded_devlist)))
def smartmon_loop(devices, hostname, interval):
while True:
for dev in devices:
if dev_exists(dev):
for attr in SmartDevice(dev).attributes():
print('PUTVAL "{hostname}/smart-{dev}'
'/absolute-{attr_id:03d}_{attr_name}"'
' interval={interval:d} N:{value:d}'
.format(hostname=hostname, dev=dev,
attr_id=int(attr['id']),
attr_name=attr.get('name'),
interval=int(interval),
value=int(attr['raw_value'])))
time.sleep(interval)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('dev', nargs='*',
help='devices to check (default: autodetect)')
parser.add_argument('-H', '--hostname', type=str,
help='override hostname provided by collectd',
default=os.environ.get('COLLECTD_HOSTNAME'))
parser.add_argument('-i', '--interval', type=int,
help='override interval provided by collectd',
default=int(float(os.environ.get('COLLECTD_INTERVAL', 300))))
parser.add_argument('-c', '--dont-check-devs',
action='store_true', default=False,
help='do not check devices existence at startup')
args = parser.parse_args()
hostname = (args.hostname
or subprocess.check_output(['hostname', '-f']).strip())
if len(hostname) == 0:
parser.error('unable to detect hostname')
interval = max(args.interval, 5)
if len(args.dev) == 0:
devices = expand_devices(['autodetect'])
else:
devices = expand_devices(args.dev)
if not args.dont_check_devs:
for dev in devices:
if not dev_exists(dev):
parser.error('device "/dev/{0}" does not exist'.format(dev))
try:
smartmon_loop(devices, hostname, interval)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| true
| true
|
7909039deafd00c16d2abe8b5ae9232446976ad3
| 8,913
|
py
|
Python
|
custom/icds/management/commands/populate_mother_name.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | 1
|
2020-07-14T13:00:23.000Z
|
2020-07-14T13:00:23.000Z
|
custom/icds/management/commands/populate_mother_name.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | null | null | null |
custom/icds/management/commands/populate_mother_name.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | null | null | null |
import csv
import six
import sys
import time
from datetime import (
datetime,
date,
timedelta,
)
from xml.etree import cElementTree as ElementTree
from django.core.management.base import BaseCommand
from corehq.apps.users.util import SYSTEM_USER_ID
from corehq.form_processor.backends.sql.dbaccessors import CaseAccessorSQL, CaseReindexAccessor
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import submit_case_blocks
from corehq.form_processor.backends.sql.dbaccessors import iter_all_rows
from casexml.apps.case.mock import CaseBlock
DOMAIN = "icds-cas"
CASE_TYPE = "person"
CUT_OFF_AGE_IN_YEARS = 6
date_today = date.today()
CUT_OFF_DOB = str(date_today.replace(year=date_today.year - CUT_OFF_AGE_IN_YEARS))
DOB_PROPERTY = "dob"
MOTHER_NAME_PROPERTY = "mother_name"
MOTHER_INDEX_IDENTIFIER = "mother"
CASE_ITERATION_COUNT = 10000
MAX_RESCUE_EXCEPTIONS_ON_UPDATE = 5
CSV_HEADERS = ['Case ID', 'Mother Case ID', 'Mother Name']
class Command(BaseCommand):
help = """
Iterate person cases updated in last 100 days (3 months with buffer) in a single partition,
Find the ones which are
- not deleted
- not belonging to test locations,
- with age less than 6 years using dob case property,
- if there is related mother case, populate mother_name case property with it's name
Returns two lists of case ids, the ones updated and the ones that could not be updated
"""
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.db_alias = None
self.log_progress = False
self.test_locations = None
self.case_accessor = CaseAccessors(DOMAIN)
def add_arguments(self, parser):
parser.add_argument('db_alias')
parser.add_argument(
'--log',
action='store_true',
dest='log_progress',
default=False,
help="log progress"
)
def handle(self, db_alias, log_progress, **options):
self.db_alias = db_alias
self.log_progress = log_progress
self.test_locations = find_test_awc_locations()
filename = self._find_case_ids_without_mother_name()
if self.log_progress:
print('starting update now for cases')
self._update_cases(filename)
def _find_case_ids_without_mother_name(self):
start_date = date.today() - timedelta(days=100)
reindex_accessor = CaseReindexAccessor(
domain=DOMAIN,
case_type=CASE_TYPE, limit_db_aliases=[self.db_alias],
start_date=start_date
)
filename = 'cases_without_mother_name_part_%s_%s.csv' % (self.db_alias, datetime.utcnow())
cases_with_no_mother_name_filename = 'cases_with_no_mother_name_part_%s_%s.csv' % (
self.db_alias, datetime.utcnow())
with open(filename, 'w') as output:
with open(cases_with_no_mother_name_filename, 'w') as no_mother_name_file:
cases_iterated = 0
writer = csv.writer(output)
writer.writerow(CSV_HEADERS)
no_mother_name_writer = csv.writer(no_mother_name_file)
no_mother_name_writer.writerow(['Case ID'])
if self.log_progress:
print('iterating now')
for case in iter_all_rows(reindex_accessor):
if self.log_progress and cases_iterated % CASE_ITERATION_COUNT == 0:
print("cases iterated: %s" % cases_iterated)
cases_iterated += 1
if self._case_needs_to_be_updated(case):
mother_case_id, mother_name = self._find_mother_case_id_and_name(case)
if mother_case_id and mother_name:
writer.writerow([case.case_id, mother_case_id, mother_name])
else:
no_mother_name_writer.writerow([case.case_id])
return filename
def _find_mother_case_id_and_name(self, case):
mother_case_ids = [i.referenced_id for i in CaseAccessorSQL.get_indices(DOMAIN, case.case_id)
if i.identifier == MOTHER_INDEX_IDENTIFIER]
if len(mother_case_ids) == 1:
try:
mother_case = self.case_accessor.get_case(mother_case_ids[0])
except CaseNotFound:
pass
else:
return mother_case.case_id, mother_case.name
return None, None
def _case_needs_to_be_updated(self, case):
if case.deleted:
return False
assert case.type == CASE_TYPE
if bool(case.owner_id) and case.owner_id in self.test_locations:
return False
dob = case.get_case_property(DOB_PROPERTY)
if dob and dob > CUT_OFF_DOB and not case.get_case_property(MOTHER_NAME_PROPERTY):
return True
return False
def _update_cases(self, filename):
exceptions_raised = 0
updates = {} # case id: mother name
counter = 0
with open(filename, 'r') as _input:
reader = csv.DictReader(_input)
with open('cases_without_mother_name_part_%s_updated.csv' % self.db_alias, 'w') as output:
writer = csv.writer(output)
writer.writerow(['Case ID', 'Mother Name'])
for row in reader:
updates[row['Case ID']] = row['Mother Name']
counter += 1
if counter > 0 and counter % 100 == 0:
case_ids = self._reassured_case_ids_to_update(list(updates.keys()))
skip_ids = updates.keys() - case_ids
for case_id in skip_ids:
updates.pop(case_id)
for case_id, mother_name in updates.items():
writer.writerow([case_id, mother_name])
exceptions_raised = self._submit_update_form(updates, exceptions_raised)
if self.log_progress:
print("cases updated: %s" % counter)
updates = {}
counter = 0
# update the pending batch
for case_id, mother_name in updates.items():
writer.writerow([case_id, mother_name])
exceptions_raised = self._submit_update_form(updates, exceptions_raised)
def _submit_update_form(self, updates, exceptions_raised):
update_case_blocks = self.create_case_blocks(updates)
if not update_case_blocks:
return exceptions_raised
for attempt in range(MAX_RESCUE_EXCEPTIONS_ON_UPDATE):
try:
submit_case_blocks(update_case_blocks, DOMAIN, user_id=SYSTEM_USER_ID)
except Exception as e:
exc = sys.exc_info()
exceptions_raised += 1
if self.log_progress:
print("rescuing exception %s %s" % (exceptions_raised, str(e)))
if exceptions_raised > MAX_RESCUE_EXCEPTIONS_ON_UPDATE:
six.reraise(*exc)
else:
time.sleep(60) # wait for 1 min before trying again
else:
break
return exceptions_raised
def create_case_blocks(self, updates):
case_blocks = []
for case_id, mother_name in updates.items():
case_block = CaseBlock.deprecated_init(case_id,
update={MOTHER_NAME_PROPERTY: mother_name},
user_id=SYSTEM_USER_ID)
case_block = ElementTree.tostring(case_block.as_xml()).decode('utf-8')
case_blocks.append(case_block)
return case_blocks
def _reassured_case_ids_to_update(self, case_ids):
# reconfirm the cases before updating to avoid removing updates in between
# fetching case ids and updating
invalid_cases = self.case_accessor.get_cases(case_ids)
case_ids_list = set()
for invalid_case in invalid_cases:
if self._case_needs_to_be_updated(invalid_case):
case_ids_list.add(invalid_case.case_id)
return case_ids_list
def find_test_awc_locations():
test_locations = set()
for location in SQLLocation.active_objects.filter(location_type__code='state', domain=DOMAIN):
if location.metadata.get('is_test_location') == 'test':
test_locations.update(
location.get_descendants(include_self=True).
filter(location_type__code='awc').values_list('location_id', flat=True)
)
return test_locations
| 41.84507
| 102
| 0.62392
|
import csv
import six
import sys
import time
from datetime import (
datetime,
date,
timedelta,
)
from xml.etree import cElementTree as ElementTree
from django.core.management.base import BaseCommand
from corehq.apps.users.util import SYSTEM_USER_ID
from corehq.form_processor.backends.sql.dbaccessors import CaseAccessorSQL, CaseReindexAccessor
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import submit_case_blocks
from corehq.form_processor.backends.sql.dbaccessors import iter_all_rows
from casexml.apps.case.mock import CaseBlock
DOMAIN = "icds-cas"
CASE_TYPE = "person"
CUT_OFF_AGE_IN_YEARS = 6
date_today = date.today()
CUT_OFF_DOB = str(date_today.replace(year=date_today.year - CUT_OFF_AGE_IN_YEARS))
DOB_PROPERTY = "dob"
MOTHER_NAME_PROPERTY = "mother_name"
MOTHER_INDEX_IDENTIFIER = "mother"
CASE_ITERATION_COUNT = 10000
MAX_RESCUE_EXCEPTIONS_ON_UPDATE = 5
CSV_HEADERS = ['Case ID', 'Mother Case ID', 'Mother Name']
class Command(BaseCommand):
help = """
Iterate person cases updated in last 100 days (3 months with buffer) in a single partition,
Find the ones which are
- not deleted
- not belonging to test locations,
- with age less than 6 years using dob case property,
- if there is related mother case, populate mother_name case property with it's name
Returns two lists of case ids, the ones updated and the ones that could not be updated
"""
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.db_alias = None
self.log_progress = False
self.test_locations = None
self.case_accessor = CaseAccessors(DOMAIN)
def add_arguments(self, parser):
parser.add_argument('db_alias')
parser.add_argument(
'--log',
action='store_true',
dest='log_progress',
default=False,
help="log progress"
)
def handle(self, db_alias, log_progress, **options):
self.db_alias = db_alias
self.log_progress = log_progress
self.test_locations = find_test_awc_locations()
filename = self._find_case_ids_without_mother_name()
if self.log_progress:
print('starting update now for cases')
self._update_cases(filename)
def _find_case_ids_without_mother_name(self):
start_date = date.today() - timedelta(days=100)
reindex_accessor = CaseReindexAccessor(
domain=DOMAIN,
case_type=CASE_TYPE, limit_db_aliases=[self.db_alias],
start_date=start_date
)
filename = 'cases_without_mother_name_part_%s_%s.csv' % (self.db_alias, datetime.utcnow())
cases_with_no_mother_name_filename = 'cases_with_no_mother_name_part_%s_%s.csv' % (
self.db_alias, datetime.utcnow())
with open(filename, 'w') as output:
with open(cases_with_no_mother_name_filename, 'w') as no_mother_name_file:
cases_iterated = 0
writer = csv.writer(output)
writer.writerow(CSV_HEADERS)
no_mother_name_writer = csv.writer(no_mother_name_file)
no_mother_name_writer.writerow(['Case ID'])
if self.log_progress:
print('iterating now')
for case in iter_all_rows(reindex_accessor):
if self.log_progress and cases_iterated % CASE_ITERATION_COUNT == 0:
print("cases iterated: %s" % cases_iterated)
cases_iterated += 1
if self._case_needs_to_be_updated(case):
mother_case_id, mother_name = self._find_mother_case_id_and_name(case)
if mother_case_id and mother_name:
writer.writerow([case.case_id, mother_case_id, mother_name])
else:
no_mother_name_writer.writerow([case.case_id])
return filename
def _find_mother_case_id_and_name(self, case):
mother_case_ids = [i.referenced_id for i in CaseAccessorSQL.get_indices(DOMAIN, case.case_id)
if i.identifier == MOTHER_INDEX_IDENTIFIER]
if len(mother_case_ids) == 1:
try:
mother_case = self.case_accessor.get_case(mother_case_ids[0])
except CaseNotFound:
pass
else:
return mother_case.case_id, mother_case.name
return None, None
def _case_needs_to_be_updated(self, case):
if case.deleted:
return False
assert case.type == CASE_TYPE
if bool(case.owner_id) and case.owner_id in self.test_locations:
return False
dob = case.get_case_property(DOB_PROPERTY)
if dob and dob > CUT_OFF_DOB and not case.get_case_property(MOTHER_NAME_PROPERTY):
return True
return False
def _update_cases(self, filename):
exceptions_raised = 0
updates = {} # case id: mother name
counter = 0
with open(filename, 'r') as _input:
reader = csv.DictReader(_input)
with open('cases_without_mother_name_part_%s_updated.csv' % self.db_alias, 'w') as output:
writer = csv.writer(output)
writer.writerow(['Case ID', 'Mother Name'])
for row in reader:
updates[row['Case ID']] = row['Mother Name']
counter += 1
if counter > 0 and counter % 100 == 0:
case_ids = self._reassured_case_ids_to_update(list(updates.keys()))
skip_ids = updates.keys() - case_ids
for case_id in skip_ids:
updates.pop(case_id)
for case_id, mother_name in updates.items():
writer.writerow([case_id, mother_name])
exceptions_raised = self._submit_update_form(updates, exceptions_raised)
if self.log_progress:
print("cases updated: %s" % counter)
updates = {}
counter = 0
# update the pending batch
for case_id, mother_name in updates.items():
writer.writerow([case_id, mother_name])
exceptions_raised = self._submit_update_form(updates, exceptions_raised)
def _submit_update_form(self, updates, exceptions_raised):
update_case_blocks = self.create_case_blocks(updates)
if not update_case_blocks:
return exceptions_raised
for attempt in range(MAX_RESCUE_EXCEPTIONS_ON_UPDATE):
try:
submit_case_blocks(update_case_blocks, DOMAIN, user_id=SYSTEM_USER_ID)
except Exception as e:
exc = sys.exc_info()
exceptions_raised += 1
if self.log_progress:
print("rescuing exception %s %s" % (exceptions_raised, str(e)))
if exceptions_raised > MAX_RESCUE_EXCEPTIONS_ON_UPDATE:
six.reraise(*exc)
else:
time.sleep(60) # wait for 1 min before trying again
else:
break
return exceptions_raised
def create_case_blocks(self, updates):
case_blocks = []
for case_id, mother_name in updates.items():
case_block = CaseBlock.deprecated_init(case_id,
update={MOTHER_NAME_PROPERTY: mother_name},
user_id=SYSTEM_USER_ID)
case_block = ElementTree.tostring(case_block.as_xml()).decode('utf-8')
case_blocks.append(case_block)
return case_blocks
def _reassured_case_ids_to_update(self, case_ids):
# reconfirm the cases before updating to avoid removing updates in between
# fetching case ids and updating
invalid_cases = self.case_accessor.get_cases(case_ids)
case_ids_list = set()
for invalid_case in invalid_cases:
if self._case_needs_to_be_updated(invalid_case):
case_ids_list.add(invalid_case.case_id)
return case_ids_list
def find_test_awc_locations():
test_locations = set()
for location in SQLLocation.active_objects.filter(location_type__code='state', domain=DOMAIN):
if location.metadata.get('is_test_location') == 'test':
test_locations.update(
location.get_descendants(include_self=True).
filter(location_type__code='awc').values_list('location_id', flat=True)
)
return test_locations
| true
| true
|
7909043c829cf1680c1db567491ee4d952463ffa
| 667
|
py
|
Python
|
dcodex/migrations/0035_auto_20210216_0331.py
|
rbturnbull/dcodex
|
85ac729aa9d2cfd0540738efc7d2ce5913c97351
|
[
"Apache-2.0"
] | 5
|
2020-12-19T18:21:56.000Z
|
2021-11-03T01:09:42.000Z
|
dcodex/migrations/0035_auto_20210216_0331.py
|
rbturnbull/dcodex
|
85ac729aa9d2cfd0540738efc7d2ce5913c97351
|
[
"Apache-2.0"
] | 6
|
2021-04-04T12:08:27.000Z
|
2022-03-12T01:07:00.000Z
|
dcodex/migrations/0035_auto_20210216_0331.py
|
rbturnbull/dcodex
|
85ac729aa9d2cfd0540738efc7d2ce5913c97351
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.3 on 2021-02-16 11:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("imagedeck", "0009_auto_20201122_2300"),
("dcodex", "0034_auto_20201215_0315"),
]
operations = [
migrations.AlterField(
model_name="manuscript",
name="imagedeck",
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="imagedeck.deckbase",
),
),
]
| 24.703704
| 64
| 0.562219
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("imagedeck", "0009_auto_20201122_2300"),
("dcodex", "0034_auto_20201215_0315"),
]
operations = [
migrations.AlterField(
model_name="manuscript",
name="imagedeck",
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="imagedeck.deckbase",
),
),
]
| true
| true
|
790904fc423a70595a615822cd209d3fec2f81a0
| 788
|
py
|
Python
|
conanfile.py
|
demo-ci-conan/libB
|
16059c79804bd773a4de75728cf24408db4c6149
|
[
"MIT"
] | null | null | null |
conanfile.py
|
demo-ci-conan/libB
|
16059c79804bd773a4de75728cf24408db4c6149
|
[
"MIT"
] | null | null | null |
conanfile.py
|
demo-ci-conan/libB
|
16059c79804bd773a4de75728cf24408db4c6149
|
[
"MIT"
] | null | null | null |
from conans import ConanFile, CMake
class LibB(ConanFile):
name = "libB"
version = "0.0"
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False]}
default_options = {"shared": False}
generators = "cmake"
scm = {"type": "git",
"url": "auto",
"revision": "auto"}
exports_sources = "LICENSE" # to avoid build info bug
def requirements(self):
self.requires("libA/[>=0.0]@demo/testing")
self.requires("libF/0.0@demo/testing")
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("LICENSE", dst="licenses")
def package_info(self):
self.cpp_info.libs = ["libB",]
| 23.176471
| 57
| 0.568528
|
from conans import ConanFile, CMake
class LibB(ConanFile):
name = "libB"
version = "0.0"
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False]}
default_options = {"shared": False}
generators = "cmake"
scm = {"type": "git",
"url": "auto",
"revision": "auto"}
exports_sources = "LICENSE"
def requirements(self):
self.requires("libA/[>=0.0]@demo/testing")
self.requires("libF/0.0@demo/testing")
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("LICENSE", dst="licenses")
def package_info(self):
self.cpp_info.libs = ["libB",]
| true
| true
|
79090565ec3a1c5532ff5245c32e5148e4906b95
| 1,490
|
py
|
Python
|
dashboard/entities/Devices.py
|
Hexagoons/GUI-Arduino-Weather-Station
|
a4966ea765dbec61b0da6cb1c2efc30a43fa8d0d
|
[
"MIT"
] | null | null | null |
dashboard/entities/Devices.py
|
Hexagoons/GUI-Arduino-Weather-Station
|
a4966ea765dbec61b0da6cb1c2efc30a43fa8d0d
|
[
"MIT"
] | null | null | null |
dashboard/entities/Devices.py
|
Hexagoons/GUI-Arduino-Weather-Station
|
a4966ea765dbec61b0da6cb1c2efc30a43fa8d0d
|
[
"MIT"
] | null | null | null |
import tkinter as tk
from tkinter import ttk
import json
from dashboard.entities.InputField import InputField
from dashboard.entities.StatusField import StatusField
class Devices(ttk.Frame):
"""
Devices Frame for Settings
"""
def __init__(self, parent, settings):
"""
Constructs a WarningPopUp
:param parent: Parent Frame
:param settings: settings class
"""
self.settings = settings
ttk.Frame.__init__(self, parent, relief="raised", borderwidth=2)
self.content = ttk.Frame(self, borderwidth=2)
self.content.pack(expand=True, fill=tk.X, side='top', anchor='n')
self.devices = []
label1 = tk.Label(self.content, text="Apparaten", font=("Verdana", 14), relief="groove")
label1.pack(expand=True, fill=tk.X, side='top')
self.render_devices()
def render_devices(self):
# Removed current sidebar buttons
for frame in self.devices:
frame.pack_forget()
# Add sidebar buttons based on json
self.settings.load_devices()
for serial_number, data in self.settings.devices.items():
self.build_device(serial_number, data)
def build_device(self, serial_number, data):
button = ttk.Button(self.content, text=data["Name"], width=15,
command=lambda: self.settings.show_view(serial_number, self))
button.pack(fill=tk.X, pady=2)
self.devices.append(button)
| 33.111111
| 96
| 0.641611
|
import tkinter as tk
from tkinter import ttk
import json
from dashboard.entities.InputField import InputField
from dashboard.entities.StatusField import StatusField
class Devices(ttk.Frame):
def __init__(self, parent, settings):
self.settings = settings
ttk.Frame.__init__(self, parent, relief="raised", borderwidth=2)
self.content = ttk.Frame(self, borderwidth=2)
self.content.pack(expand=True, fill=tk.X, side='top', anchor='n')
self.devices = []
label1 = tk.Label(self.content, text="Apparaten", font=("Verdana", 14), relief="groove")
label1.pack(expand=True, fill=tk.X, side='top')
self.render_devices()
def render_devices(self):
for frame in self.devices:
frame.pack_forget()
self.settings.load_devices()
for serial_number, data in self.settings.devices.items():
self.build_device(serial_number, data)
def build_device(self, serial_number, data):
button = ttk.Button(self.content, text=data["Name"], width=15,
command=lambda: self.settings.show_view(serial_number, self))
button.pack(fill=tk.X, pady=2)
self.devices.append(button)
| true
| true
|
790908c900d3cb9ed9674bb1af267a72a156b11f
| 8,127
|
py
|
Python
|
upconvert/parser/geda_commands.py
|
lehaianh1986/schematic-file-converter
|
ed67274511a5b0e1b378e4e0fd3943ec8a189f43
|
[
"Apache-2.0"
] | null | null | null |
upconvert/parser/geda_commands.py
|
lehaianh1986/schematic-file-converter
|
ed67274511a5b0e1b378e4e0fd3943ec8a189f43
|
[
"Apache-2.0"
] | null | null | null |
upconvert/parser/geda_commands.py
|
lehaianh1986/schematic-file-converter
|
ed67274511a5b0e1b378e4e0fd3943ec8a189f43
|
[
"Apache-2.0"
] | null | null | null |
class GEDAColor:
""" Enumeration of gEDA colors """
BACKGROUND_COLOR = 0
PIN_COLOR = 1
NET_ENDPOINT_COLOR = 2
GRAPHIC_COLOR = 3
NET_COLOR = 4
ATTRIBUTE_COLOR = 5
LOGIC_BUBBLE_COLOR = 6
DOTS_GRID_COLOR = 7
DETACHED_ATTRIBUTE_COLOR = 8
TEXT_COLOR = 9
BUS_COLOR = 10
SELECT_COLOR = 11
BOUNDINGBOX_COLOR = 12
ZOOM_BOX_COLOR = 13
STROKE_COLOR = 14
LOCK_COLOR = 15
class GEDAParameter(object):
TYPE = ''
def __init__(self, name, datatype=int, default=None):
self._name = name
self.datatype = datatype
self.default = default
@property
def name(self):
if self.TYPE:
return "%s_%s" % (self.TYPE, self._name)
return self._name
class GEDAStyleParameter(GEDAParameter):
""" Style parameter """
TYPE = 'style'
class GEDAExtraParameter(GEDAParameter):
""" Extra parameter """
TYPE = 'extra'
class GEDACommand(object):
""" Command """
TYPE = None
PARAMETERS = ()
EXTRA_PARAMETERS = ()
def parameters(self):
return self.PARAMETERS + self.EXTRA_PARAMETERS
def get_style_keywords(self):
style_type = GEDAStyleParameter.TYPE
return [p.name for p in self.PARAMETERS
if p.name.startswith(style_type)]
def update_default_kwargs(self, **kwargs):
default_kwargs = {}
for parameter in self.parameters():
default_kwargs[parameter.name] = parameter.default
default_kwargs.update(kwargs)
return default_kwargs
def generate_command(self, **kwargs):
kwargs = self.update_default_kwargs(**kwargs)
command = [self.TYPE]
for parameter in self.PARAMETERS:
command.append("%%(%s)s" % parameter.name)
return [" ".join(command) % kwargs]
class GEDALineCommand(GEDACommand):
""" Line command """
TYPE = 'L'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
)
class GEDABoxCommand(GEDACommand):
""" Box command """
TYPE = "B"
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('width'),
GEDAParameter('height'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
)
class GEDACircleCommand(GEDACommand):
""" Circle command """
TYPE = 'V'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('radius'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
)
class GEDAArcCommand(GEDACommand):
""" Arc command """
TYPE = 'A'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('radius'),
GEDAParameter('startangle'),
GEDAParameter('sweepangle'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
)
class GEDATextCommand(GEDACommand):
""" Text command """
TYPE = 'T'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAStyleParameter('color', default=GEDAColor.TEXT_COLOR),
# GEDAStyleParameter('size', default=10),
GEDAParameter('size'),
GEDAParameter('visibility', default=1),
GEDAParameter('show_name_value', default=1),
GEDAParameter('angle', default=0),
GEDAParameter('alignment', default=0),
GEDAParameter('num_lines', default=1),
)
class GEDASegmentCommand(GEDACommand):
""" Segment command """
TYPE = 'N'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.NET_COLOR),
)
class GEDAPinCommand(GEDACommand):
""" Pin command """
TYPE = 'P'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.PIN_COLOR),
# pin type is always 0
GEDAStyleParameter('pintype', default=0),
# first point is active/connected pin
GEDAParameter('whichend', default=0),
)
class GEDAComponentCommand(GEDACommand):
""" Component command """
TYPE = 'C'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
# GEDAParameter('selectable', default=0),
GEDAParameter('selectable', default=1),
GEDAParameter('angle'),
GEDAParameter('mirror'),
GEDAParameter('basename', datatype=str),
)
class GEDAPathCommand(GEDACommand):
""" Path command """
TYPE = "H"
PARAMETERS = (
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
GEDAParameter('num_lines'),
)
EXTRA_PARAMERTERS = (
GEDAExtraParameter('id'),
)
class GEDAVersionCommand(GEDACommand):
""" Version command """
TYPE = 'v'
PARAMETERS = (
GEDAParameter('version'),
GEDAParameter('fileformat_version'),
)
class GEDABusCommand(GEDACommand):
""" Bus command """
TYPE = 'U'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.BUS_COLOR),
GEDAParameter('ripperdir', default=0),
)
class GEDAPictureCommand(GEDACommand):
""" Picture command """
TYPE = 'G'
PARAMETERS = ()
class GEDAEmbeddedEnvironmentCommand(GEDACommand):
""" Embeded command """
TYPE = '['
PARAMETERS = ()
class GEDAAttributeEnvironmentCommand(GEDACommand):
""" Attribute environment command """
TYPE = '{'
PARAMETERS = ()
class GEDACommand(GEDACommand):
""" Command """
TYPE = 'U'
PARAMETERS = ()
| 28.921708
| 69
| 0.621632
|
class GEDAColor:
BACKGROUND_COLOR = 0
PIN_COLOR = 1
NET_ENDPOINT_COLOR = 2
GRAPHIC_COLOR = 3
NET_COLOR = 4
ATTRIBUTE_COLOR = 5
LOGIC_BUBBLE_COLOR = 6
DOTS_GRID_COLOR = 7
DETACHED_ATTRIBUTE_COLOR = 8
TEXT_COLOR = 9
BUS_COLOR = 10
SELECT_COLOR = 11
BOUNDINGBOX_COLOR = 12
ZOOM_BOX_COLOR = 13
STROKE_COLOR = 14
LOCK_COLOR = 15
class GEDAParameter(object):
TYPE = ''
def __init__(self, name, datatype=int, default=None):
self._name = name
self.datatype = datatype
self.default = default
@property
def name(self):
if self.TYPE:
return "%s_%s" % (self.TYPE, self._name)
return self._name
class GEDAStyleParameter(GEDAParameter):
TYPE = 'style'
class GEDAExtraParameter(GEDAParameter):
TYPE = 'extra'
class GEDACommand(object):
TYPE = None
PARAMETERS = ()
EXTRA_PARAMETERS = ()
def parameters(self):
return self.PARAMETERS + self.EXTRA_PARAMETERS
def get_style_keywords(self):
style_type = GEDAStyleParameter.TYPE
return [p.name for p in self.PARAMETERS
if p.name.startswith(style_type)]
def update_default_kwargs(self, **kwargs):
default_kwargs = {}
for parameter in self.parameters():
default_kwargs[parameter.name] = parameter.default
default_kwargs.update(kwargs)
return default_kwargs
def generate_command(self, **kwargs):
kwargs = self.update_default_kwargs(**kwargs)
command = [self.TYPE]
for parameter in self.PARAMETERS:
command.append("%%(%s)s" % parameter.name)
return [" ".join(command) % kwargs]
class GEDALineCommand(GEDACommand):
TYPE = 'L'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
)
class GEDABoxCommand(GEDACommand):
TYPE = "B"
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('width'),
GEDAParameter('height'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
)
class GEDACircleCommand(GEDACommand):
TYPE = 'V'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('radius'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
)
class GEDAArcCommand(GEDACommand):
TYPE = 'A'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('radius'),
GEDAParameter('startangle'),
GEDAParameter('sweepangle'),
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
)
class GEDATextCommand(GEDACommand):
TYPE = 'T'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAStyleParameter('color', default=GEDAColor.TEXT_COLOR),
GEDAParameter('size'),
GEDAParameter('visibility', default=1),
GEDAParameter('show_name_value', default=1),
GEDAParameter('angle', default=0),
GEDAParameter('alignment', default=0),
GEDAParameter('num_lines', default=1),
)
class GEDASegmentCommand(GEDACommand):
TYPE = 'N'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.NET_COLOR),
)
class GEDAPinCommand(GEDACommand):
TYPE = 'P'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.PIN_COLOR),
GEDAStyleParameter('pintype', default=0),
GEDAParameter('whichend', default=0),
)
class GEDAComponentCommand(GEDACommand):
TYPE = 'C'
PARAMETERS = (
GEDAParameter('x'),
GEDAParameter('y'),
GEDAParameter('selectable', default=1),
GEDAParameter('angle'),
GEDAParameter('mirror'),
GEDAParameter('basename', datatype=str),
)
class GEDAPathCommand(GEDACommand):
TYPE = "H"
PARAMETERS = (
GEDAStyleParameter('color', default=GEDAColor.GRAPHIC_COLOR),
GEDAStyleParameter('width', default=10),
GEDAStyleParameter('capstyle', default=0),
GEDAStyleParameter('dashstyle', default=0),
GEDAStyleParameter('dashlength', default=-1),
GEDAStyleParameter('dashspace', default=-1),
GEDAStyleParameter('filltype', default=0),
GEDAStyleParameter('fillwidth', default=-1),
GEDAStyleParameter('angle1', default=-1),
GEDAStyleParameter('pitch1', default=-1),
GEDAStyleParameter('angle2', default=-1),
GEDAStyleParameter('pitch2', default=-1),
GEDAParameter('num_lines'),
)
EXTRA_PARAMERTERS = (
GEDAExtraParameter('id'),
)
class GEDAVersionCommand(GEDACommand):
TYPE = 'v'
PARAMETERS = (
GEDAParameter('version'),
GEDAParameter('fileformat_version'),
)
class GEDABusCommand(GEDACommand):
TYPE = 'U'
PARAMETERS = (
GEDAParameter('x1'),
GEDAParameter('y1'),
GEDAParameter('x2'),
GEDAParameter('y2'),
GEDAStyleParameter('color', default=GEDAColor.BUS_COLOR),
GEDAParameter('ripperdir', default=0),
)
class GEDAPictureCommand(GEDACommand):
TYPE = 'G'
PARAMETERS = ()
class GEDAEmbeddedEnvironmentCommand(GEDACommand):
TYPE = '['
PARAMETERS = ()
class GEDAAttributeEnvironmentCommand(GEDACommand):
TYPE = '{'
PARAMETERS = ()
class GEDACommand(GEDACommand):
TYPE = 'U'
PARAMETERS = ()
| true
| true
|
79090a5fd1a3a33a089c5fbe3390777081cf79d7
| 2,642
|
py
|
Python
|
pywren_ibm_cloud/libs/ibm_cloudfunctions/iam.py
|
class-euproject/lithops
|
acf381817673c29db0e9e143001029357890a39b
|
[
"Apache-2.0"
] | 1
|
2020-08-04T08:16:31.000Z
|
2020-08-04T08:16:31.000Z
|
pywren_ibm_cloud/libs/ibm_cloudfunctions/iam.py
|
class-euproject/lithops
|
acf381817673c29db0e9e143001029357890a39b
|
[
"Apache-2.0"
] | null | null | null |
pywren_ibm_cloud/libs/ibm_cloudfunctions/iam.py
|
class-euproject/lithops
|
acf381817673c29db0e9e143001029357890a39b
|
[
"Apache-2.0"
] | null | null | null |
#
# (C) Copyright IBM Corp. 2019
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import requests
from urllib.parse import urlencode
logger = logging.getLogger(__name__)
class IBMIAMClient:
def __init__(self, iam_config, cf_endpoint, cf_namespace):
self.iam_api_key = iam_config.get('api_key', None)
self.iam_auth_endpoint = iam_config['ibm_auth_endpoint']
self.cf_endpoint = cf_endpoint
self.cf_namespace = cf_namespace
def get_iam_token(self):
data = urlencode({'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': self.iam_api_key})
headers = {
'content-type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'
}
res = requests.post(self.iam_auth_endpoint, data=data, headers=headers)
if res.status_code != 200:
raise RuntimeError("Error: http code {} while retrieving IAM token for API key.".format(res.status_code))
bearer_response = res.json()
bearer_token = bearer_response['access_token']
logger.debug(bearer_token)
return bearer_token
def get_function_namespace_id(self, iam_token):
logger.debug("Getting name space id for {}".format(self.cf_namespace))
headers = {
'content-type': 'application/json',
'Accept': 'application/json',
'Authorization': iam_token
}
url = '/'.join([self.cf_endpoint, 'api', 'v1', 'namespaces'])
res = requests.get(url, headers=headers)
if res.status_code != 200:
raise RuntimeError("Error: http code {} while listing namespaces.".format(res.status_code))
namespaces = res.json()
for current_namespace in namespaces['namespaces']:
if 'name' in current_namespace and current_namespace['name'] == self.cf_namespace:
logger.debug("Found name space id {} for {}".format(current_namespace['id'], self.cf_namespace))
return current_namespace['id']
raise Exception("No IBM Cloud Functions namespace \"{}\" found.".format(self.cf_namespace))
| 38.289855
| 117
| 0.671461
|
import logging
import requests
from urllib.parse import urlencode
logger = logging.getLogger(__name__)
class IBMIAMClient:
def __init__(self, iam_config, cf_endpoint, cf_namespace):
self.iam_api_key = iam_config.get('api_key', None)
self.iam_auth_endpoint = iam_config['ibm_auth_endpoint']
self.cf_endpoint = cf_endpoint
self.cf_namespace = cf_namespace
def get_iam_token(self):
data = urlencode({'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': self.iam_api_key})
headers = {
'content-type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'
}
res = requests.post(self.iam_auth_endpoint, data=data, headers=headers)
if res.status_code != 200:
raise RuntimeError("Error: http code {} while retrieving IAM token for API key.".format(res.status_code))
bearer_response = res.json()
bearer_token = bearer_response['access_token']
logger.debug(bearer_token)
return bearer_token
def get_function_namespace_id(self, iam_token):
logger.debug("Getting name space id for {}".format(self.cf_namespace))
headers = {
'content-type': 'application/json',
'Accept': 'application/json',
'Authorization': iam_token
}
url = '/'.join([self.cf_endpoint, 'api', 'v1', 'namespaces'])
res = requests.get(url, headers=headers)
if res.status_code != 200:
raise RuntimeError("Error: http code {} while listing namespaces.".format(res.status_code))
namespaces = res.json()
for current_namespace in namespaces['namespaces']:
if 'name' in current_namespace and current_namespace['name'] == self.cf_namespace:
logger.debug("Found name space id {} for {}".format(current_namespace['id'], self.cf_namespace))
return current_namespace['id']
raise Exception("No IBM Cloud Functions namespace \"{}\" found.".format(self.cf_namespace))
| true
| true
|
79090ae7f5b2eb168df9aaf66cbdcea43e9ae8dd
| 3,759
|
py
|
Python
|
infoblox_netmri/api/broker/broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from infoblox_netmri.utils.utils import locate, to_snake
from infoblox_netmri.api.exceptions.netmri_exceptions import NotImplementedException
class Broker(object):
""" Base class for broker instances, provides methods for API requests.
And return responces wrapped with specific class
:param client: InfobloxNetMRI client
"""
controller = None
def __init__(self, client):
self.client = client
def api_request(self, method_name, params):
""" Make api request and return single wrapped object
:param method_name: name of API methods
:param params: dict-wrapped params for specific API call
"""
data = self.client.api_request(method_name, params)
if isinstance(data, dict) and len(data) > 1:
for x in data.keys():
data[x] = self._get_return_object_type(data.get(x))
return data
class_name = to_snake(self.__class__.__name__.replace("Broker", ""))
if class_name in data:
result_name = class_name
else:
result_name = method_name.split('/')[-1]
if result_name not in data:
return data
return self._get_return_object_type(data.get(result_name))
# See NETMRI-31545
def api_mixed_request(self, method_name, params):
""" Make api request and download a file and return
JSON response or request status dictionary
:param method_name: name of API methods
:param params: dict-wrapped params for specific API call
"""
data = self.client.api_request(method_name, params, downloadable=True)
class_name = to_snake(self.__class__.__name__.replace("Broker", ""))
if class_name in data:
result_name = class_name
else:
result_name = method_name.split('/')[-1]
if result_name not in data:
return data
return self._get_return_object_type(data.get(result_name))
def api_list_request(self, method_name, params):
""" Make api request and return list of wrapped objects
:param method_name: name of API methods
:param params: dict-wrapped params for specific API call
"""
data = self.client.api_request(method_name, params)
if not data:
return None
try:
return [self._get_return_object_type(x) for x in data[self.controller]]
except KeyError:
print("Sorry, this method will be implemented in the\
future versions of NetMRI")
raise NotImplementedException(self.controller, method_name)
def _get_method_fullname(self, method):
""" Returns full API method name using controller name
**Input**
:param method: method name
:return: full API path
"""
return "{}/{}".format(self.controller, method)
def _get_return_object_type(self, data):
""" Returns wrapped response which inherits from RemoteModel class
:param data: API responce data
:return: RemoteModel child class
"""
if not data or type(data) != dict:
return data
class_name = data.get("_class")
obj_class = locate(self._get_remote_class_name(class_name))
return obj_class(data, self.client)
def _get_remote_class_name(self, name):
""" Generate full path to specific RemoteModel instance
:param name: name of model
:return: full path for model
"""
return "infoblox_netmri.api.remote.models.{pckg}_remote.{name}Remote".format(
pckg=to_snake(name),
name=name
)
| 34.172727
| 85
| 0.623304
|
from infoblox_netmri.utils.utils import locate, to_snake
from infoblox_netmri.api.exceptions.netmri_exceptions import NotImplementedException
class Broker(object):
controller = None
def __init__(self, client):
self.client = client
def api_request(self, method_name, params):
data = self.client.api_request(method_name, params)
if isinstance(data, dict) and len(data) > 1:
for x in data.keys():
data[x] = self._get_return_object_type(data.get(x))
return data
class_name = to_snake(self.__class__.__name__.replace("Broker", ""))
if class_name in data:
result_name = class_name
else:
result_name = method_name.split('/')[-1]
if result_name not in data:
return data
return self._get_return_object_type(data.get(result_name))
def api_mixed_request(self, method_name, params):
data = self.client.api_request(method_name, params, downloadable=True)
class_name = to_snake(self.__class__.__name__.replace("Broker", ""))
if class_name in data:
result_name = class_name
else:
result_name = method_name.split('/')[-1]
if result_name not in data:
return data
return self._get_return_object_type(data.get(result_name))
def api_list_request(self, method_name, params):
data = self.client.api_request(method_name, params)
if not data:
return None
try:
return [self._get_return_object_type(x) for x in data[self.controller]]
except KeyError:
print("Sorry, this method will be implemented in the\
future versions of NetMRI")
raise NotImplementedException(self.controller, method_name)
def _get_method_fullname(self, method):
return "{}/{}".format(self.controller, method)
def _get_return_object_type(self, data):
if not data or type(data) != dict:
return data
class_name = data.get("_class")
obj_class = locate(self._get_remote_class_name(class_name))
return obj_class(data, self.client)
def _get_remote_class_name(self, name):
return "infoblox_netmri.api.remote.models.{pckg}_remote.{name}Remote".format(
pckg=to_snake(name),
name=name
)
| true
| true
|
79090e5a83eb25559b684d75ae6fb2a9c884e5e9
| 4,627
|
py
|
Python
|
test/functional/p2p_node_network_limited.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
test/functional/p2p_node_network_limited.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
test/functional/p2p_node_network_limited.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Vadercoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests NODE_NETWORK_LIMITED.
Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correctly
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
from test_framework.messages import CInv, MSG_BLOCK, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
from test_framework.p2p import P2PInterface
from test_framework.test_framework import VadercoinTestFramework
from test_framework.util import (
assert_equal,
)
class P2PIgnoreInv(P2PInterface):
firstAddrnServices = 0
def on_inv(self, message):
# The node will send us invs for other blocks. Ignore them.
pass
def on_addr(self, message):
self.firstAddrnServices = message.addrs[0].nServices
def wait_for_addr(self, timeout=5):
test_function = lambda: self.last_message.get("addr")
self.wait_until(test_function, timeout=timeout)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(VadercoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [['-prune=550', '-addrmantest'], [], []]
def disconnect_all(self):
self.disconnect_nodes(0, 1)
self.disconnect_nodes(0, 2)
self.disconnect_nodes(1, 2)
def setup_network(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test(self):
node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
expected_services = NODE_WITNESS | NODE_NETWORK_LIMITED
self.log.info("Check that node has signalled expected services.")
assert_equal(node.nServices, expected_services)
self.log.info("Check that the localservices is as expected.")
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
self.connect_nodes(0, 1)
blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address)
self.sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Make sure we can max retrieve block at tip-288.")
node.send_getdata_for_block(blocks[1]) # last block in valid range
node.wait_for_block(int(blocks[1], 16), timeout=3)
self.log.info("Requesting block at height 2 (tip-289) must fail (ignored).")
node.send_getdata_for_block(blocks[0]) # first block outside of the 288+2 limit
node.wait_for_disconnect(5)
self.log.info("Check local address relay, do a fresh connection.")
self.nodes[0].disconnect_p2ps()
node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
node1.send_message(msg_verack())
node1.wait_for_addr()
#must relay address with NODE_NETWORK_LIMITED
assert_equal(node1.firstAddrnServices, expected_services)
self.nodes[0].disconnect_p2ps()
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
self.connect_nodes(0, 2)
try:
self.sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
except:
pass
# node2 must remain at height 0
assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
# now connect also to node 1 (non pruned)
self.connect_nodes(1, 2)
# sync must be possible
self.sync_blocks()
# disconnect all peers
self.disconnect_all()
# mine 10 blocks on node 0 (pruned node)
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
# connect node1 (non pruned) with node0 (pruned) and check if the can sync
self.connect_nodes(0, 1)
# sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED)
self.sync_blocks([self.nodes[0], self.nodes[1]])
if __name__ == '__main__':
NodeNetworkLimitedTest().main()
| 40.587719
| 121
| 0.694619
|
from test_framework.messages import CInv, MSG_BLOCK, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
from test_framework.p2p import P2PInterface
from test_framework.test_framework import VadercoinTestFramework
from test_framework.util import (
assert_equal,
)
class P2PIgnoreInv(P2PInterface):
firstAddrnServices = 0
def on_inv(self, message):
pass
def on_addr(self, message):
self.firstAddrnServices = message.addrs[0].nServices
def wait_for_addr(self, timeout=5):
test_function = lambda: self.last_message.get("addr")
self.wait_until(test_function, timeout=timeout)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(VadercoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [['-prune=550', '-addrmantest'], [], []]
def disconnect_all(self):
self.disconnect_nodes(0, 1)
self.disconnect_nodes(0, 2)
self.disconnect_nodes(1, 2)
def setup_network(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test(self):
node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
expected_services = NODE_WITNESS | NODE_NETWORK_LIMITED
self.log.info("Check that node has signalled expected services.")
assert_equal(node.nServices, expected_services)
self.log.info("Check that the localservices is as expected.")
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
self.connect_nodes(0, 1)
blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address)
self.sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Make sure we can max retrieve block at tip-288.")
node.send_getdata_for_block(blocks[1])
node.wait_for_block(int(blocks[1], 16), timeout=3)
self.log.info("Requesting block at height 2 (tip-289) must fail (ignored).")
node.send_getdata_for_block(blocks[0])
node.wait_for_disconnect(5)
self.log.info("Check local address relay, do a fresh connection.")
self.nodes[0].disconnect_p2ps()
node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
node1.send_message(msg_verack())
node1.wait_for_addr()
assert_equal(node1.firstAddrnServices, expected_services)
self.nodes[0].disconnect_p2ps()
self.connect_nodes(0, 2)
try:
self.sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
except:
pass
assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
self.connect_nodes(1, 2)
self.sync_blocks()
self.disconnect_all()
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
self.connect_nodes(0, 1)
self.sync_blocks([self.nodes[0], self.nodes[1]])
if __name__ == '__main__':
NodeNetworkLimitedTest().main()
| true
| true
|
79090ed4fae9ab29304bd432c2797964675df5ec
| 13,148
|
py
|
Python
|
kivy/uix/image.py
|
eman1can/kivy
|
58bd0dcaf73d7a167a4df53879bfb18e5a005277
|
[
"MIT"
] | null | null | null |
kivy/uix/image.py
|
eman1can/kivy
|
58bd0dcaf73d7a167a4df53879bfb18e5a005277
|
[
"MIT"
] | null | null | null |
kivy/uix/image.py
|
eman1can/kivy
|
58bd0dcaf73d7a167a4df53879bfb18e5a005277
|
[
"MIT"
] | null | null | null |
'''
Image
=====
The :class:`Image` widget is used to display an image::
Example in python::
wimg = Image(source='mylogo.png')
Kv Example::
Image:
source: 'mylogo.png'
size: self.texture_size
Asynchronous Loading
--------------------
To load an image asynchronously (for example from an external webserver), use
the :class:`AsyncImage` subclass::
aimg = AsyncImage(source='http://mywebsite.com/logo.png')
This can be useful as it prevents your application from waiting until the image
is loaded. If you want to display large images or retrieve them from URL's,
using :class:`AsyncImage` will allow these resources to be retrieved on a
background thread without blocking your application.
Alignment
---------
By default, the image is centered and fits inside the widget bounding box.
If you don't want that, you can set `allow_stretch` to True and `keep_ratio`
to False.
You can also inherit from Image and create your own style. For example, if you
want your image to be greater than the size of your widget, you could do::
class FullImage(Image):
pass
And in your kivy language file::
<-FullImage>:
canvas:
Color:
rgb: (1, 1, 1)
Rectangle:
texture: self.texture
size: self.width + 20, self.height + 20
pos: self.x - 10, self.y - 10
'''
__all__ = ('Image', 'AsyncImage')
from kivy.uix.widget import Widget
from kivy.core.image import Image as CoreImage
from kivy.resources import resource_find
from kivy.properties import StringProperty, ObjectProperty, ListProperty, \
AliasProperty, BooleanProperty, NumericProperty, ColorProperty
from kivy.logger import Logger
# delayed imports
Loader = None
class Image(Widget):
'''Image class, see module documentation for more information.
'''
source = StringProperty(None)
'''Filename / source of your image.
:attr:`source` is a :class:`~kivy.properties.StringProperty` and
defaults to None.
'''
texture = ObjectProperty(None, allownone=True)
'''Texture object of the image. The texture represents the original, loaded
image texture. It is stretched and positioned during rendering according to
the :attr:`allow_stretch` and :attr:`keep_ratio` properties.
Depending of the texture creation, the value will be a
:class:`~kivy.graphics.texture.Texture` or a
:class:`~kivy.graphics.texture.TextureRegion` object.
:attr:`texture` is an :class:`~kivy.properties.ObjectProperty` and defaults
to None.
'''
texture_size = ListProperty([0, 0])
'''Texture size of the image. This represents the original, loaded image
texture size.
.. warning::
The texture size is set after the texture property. So if you listen to
the change on :attr:`texture`, the property texture_size will not be
up-to-date. Use self.texture.size instead.
'''
def get_image_ratio(self):
if self.texture:
return self.texture.width / float(self.texture.height)
return 1.
mipmap = BooleanProperty(False)
'''Indicate if you want OpenGL mipmapping to be applied to the texture.
Read :ref:`mipmap` for more information.
.. versionadded:: 1.0.7
:attr:`mipmap` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
image_ratio = AliasProperty(get_image_ratio, bind=('texture',), cache=True)
'''Ratio of the image (width / float(height).
:attr:`image_ratio` is an :class:`~kivy.properties.AliasProperty` and is
read-only.
'''
color = ColorProperty([1, 1, 1, 1])
'''Image color, in the format (r, g, b, a). This attribute can be used to
'tint' an image. Be careful: if the source image is not gray/white, the
color will not really work as expected.
.. versionadded:: 1.0.6
:attr:`color` is a :class:`~kivy.properties.ColorProperty` and defaults to
[1, 1, 1, 1].
.. versionchanged:: 2.0.0
Changed from :class:`~kivy.properties.ListProperty` to
:class:`~kivy.properties.ColorProperty`.
'''
allow_stretch = BooleanProperty(False)
'''If True, the normalized image size will be maximized to fit in the image
box. Otherwise, if the box is too tall, the image will not be
stretched more than 1:1 pixels.
.. versionadded:: 1.0.7
:attr:`allow_stretch` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
keep_ratio = BooleanProperty(True)
'''If False along with allow_stretch being True, the normalized image
size will be maximized to fit in the image box and ignores the aspect
ratio of the image.
Otherwise, if the box is too tall, the image will not be stretched more
than 1:1 pixels.
.. versionadded:: 1.0.8
:attr:`keep_ratio` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True.
'''
keep_data = BooleanProperty(False)
'''If True, the underlying _coreimage will store the raw image data.
This is useful when performing pixel based collision detection.
.. versionadded:: 1.3.0
:attr:`keep_data` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
anim_delay = NumericProperty(.25)
'''Delay the animation if the image is sequenced (like an animated gif).
If anim_delay is set to -1, the animation will be stopped.
.. versionadded:: 1.0.8
:attr:`anim_delay` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.25 (4 FPS).
'''
anim_loop = NumericProperty(0)
'''Number of loops to play then stop animating. 0 means keep animating.
.. versionadded:: 1.9.0
:attr:`anim_loop` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
'''
nocache = BooleanProperty(False)
'''If this property is set True, the image will not be added to the
internal cache. The cache will simply ignore any calls trying to
append the core image.
.. versionadded:: 1.6.0
:attr:`nocache` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
def get_norm_image_size(self):
if not self.texture:
return list(self.size)
ratio = self.image_ratio
w, h = self.size
tw, th = self.texture.size
# ensure that the width is always maximized to the container width
if self.allow_stretch:
if not self.keep_ratio:
return [w, h]
iw = w
else:
iw = min(w, tw)
# calculate the appropriate height
ih = iw / ratio
# if the height is too higher, take the height of the container
# and calculate appropriate width. no need to test further. :)
if ih > h:
if self.allow_stretch:
ih = h
else:
ih = min(h, th)
iw = ih * ratio
return [iw, ih]
norm_image_size = AliasProperty(get_norm_image_size, bind=('texture', 'size', 'allow_stretch', 'image_ratio', 'keep_ratio'), cache=True)
'''Normalized image size within the widget box.
This size will always fit the widget size and will preserve the image
ratio.
:attr:`norm_image_size` is an :class:`~kivy.properties.AliasProperty` and
is read-only.
'''
def __init__(self, **kwargs):
self._coreimage = None
self._loops = 0
update = self.texture_update
fbind = self.fbind
fbind('source', update)
fbind('mipmap', update)
super().__init__(**kwargs)
def texture_update(self, *largs):
self.set_texture_from_resource(self.source)
def set_texture_from_resource(self, resource):
if not resource:
self._clear_core_image()
return
source = resource_find(resource)
if not source:
Logger.error('Image: Not found <%s>' % resource)
self._clear_core_image()
return
if self._coreimage:
self._coreimage.unbind(on_texture=self._on_tex_change)
try:
self._coreimage = image = CoreImage(
source,
mipmap=self.mipmap,
anim_delay=self.anim_delay,
keep_data=self.keep_data,
nocache=self.nocache
)
except Exception:
Logger.error('Image: Error loading <%s>' % resource)
self._clear_core_image()
image = self._coreimage
if image:
image.bind(on_texture=self._on_tex_change)
self.texture = image.texture
def on_anim_delay(self, instance, value):
if self._coreimage is None:
return
self._coreimage.anim_delay = value
if value < 0:
self._coreimage.anim_reset(False)
def on_texture(self, instance, value):
self.texture_size = value.size if value else [0, 0]
def _clear_core_image(self):
if self._coreimage:
self._coreimage.unbind(on_texture=self._on_tex_change)
self.texture = None
self._coreimage = None
self._loops = 0
def _on_tex_change(self, *largs):
# update texture from core image
self.texture = self._coreimage.texture
ci = self._coreimage
if self.anim_loop and ci._anim_index == len(ci._image.textures) - 1:
self._loops += 1
if self.anim_loop == self._loops:
ci.anim_reset(False)
self._loops = 0
def reload(self):
'''Reload image from disk. This facilitates re-loading of
images from disk in case the image content changes.
.. versionadded:: 1.3.0
Usage::
im = Image(source = '1.jpg')
# -- do something --
im.reload()
# image will be re-loaded from disk
'''
self.remove_from_cache()
old_source = self.source
self.source = ''
self.source = old_source
def remove_from_cache(self):
'''Remove image from cache.
.. versionadded:: 2.0.0
'''
if self._coreimage:
self._coreimage.remove_from_cache()
def on_nocache(self, *args):
if self.nocache:
self.remove_from_cache()
if self._coreimage:
self._coreimage._nocache = True
class AsyncImage(Image):
'''Asynchronous Image class. See the module documentation for more
information.
.. note::
The AsyncImage is a specialized form of the Image class. You may
want to refer to the :mod:`~kivy.loader` documentation and in
particular, the :class:`~kivy.loader.ProxyImage` for more detail
on how to handle events around asynchronous image loading.
.. note::
AsyncImage currently does not support properties
:attr:`anim_loop` and :attr:`mipmap` and setting those properties will
have no effect.
'''
__events__ = ('on_error', 'on_load')
def __init__(self, **kwargs):
self._found_source = None
self._coreimage = None
global Loader
if not Loader:
from kivy.loader import Loader
self.fbind('source', self._load_source)
super().__init__(**kwargs)
def _load_source(self, *args):
source = self.source
if not source:
self._clear_core_image()
return
if not self.is_uri(source):
source = resource_find(source)
if not source:
Logger.error('AsyncImage: Not found <%s>' % self.source)
self._clear_core_image()
return
self._found_source = source
self._coreimage = image = Loader.image(
source,
nocache=self.nocache,
mipmap=self.mipmap,
anim_delay=self.anim_delay
)
image.bind(
on_load=self._on_source_load,
on_error=self._on_source_error,
on_texture=self._on_tex_change
)
self.texture = image.texture
def _on_source_load(self, value):
image = self._coreimage.image
if not image:
return
self.texture = image.texture
self.dispatch('on_load')
def _on_source_error(self, instance, error=None):
self.dispatch('on_error', error)
def on_error(self, error):
pass
def on_load(self, *args):
pass
def is_uri(self, filename):
proto = filename.split('://', 1)[0]
return proto in ('http', 'https', 'ftp', 'smb', 'S3')
def _clear_core_image(self):
if self._coreimage:
self._coreimage.unbind(on_load=self._on_source_load)
super()._clear_core_image()
self._found_source = None
def _on_tex_change(self, *largs):
if self._coreimage:
self.texture = self._coreimage.texture
def texture_update(self, *largs):
pass
def remove_from_cache(self):
if self._found_source:
Loader.remove_from_cache(self._found_source)
super().remove_from_cache()
| 30.435185
| 140
| 0.625114
|
__all__ = ('Image', 'AsyncImage')
from kivy.uix.widget import Widget
from kivy.core.image import Image as CoreImage
from kivy.resources import resource_find
from kivy.properties import StringProperty, ObjectProperty, ListProperty, \
AliasProperty, BooleanProperty, NumericProperty, ColorProperty
from kivy.logger import Logger
Loader = None
class Image(Widget):
source = StringProperty(None)
texture = ObjectProperty(None, allownone=True)
texture_size = ListProperty([0, 0])
def get_image_ratio(self):
if self.texture:
return self.texture.width / float(self.texture.height)
return 1.
mipmap = BooleanProperty(False)
image_ratio = AliasProperty(get_image_ratio, bind=('texture',), cache=True)
color = ColorProperty([1, 1, 1, 1])
allow_stretch = BooleanProperty(False)
keep_ratio = BooleanProperty(True)
keep_data = BooleanProperty(False)
anim_delay = NumericProperty(.25)
anim_loop = NumericProperty(0)
nocache = BooleanProperty(False)
def get_norm_image_size(self):
if not self.texture:
return list(self.size)
ratio = self.image_ratio
w, h = self.size
tw, th = self.texture.size
if self.allow_stretch:
if not self.keep_ratio:
return [w, h]
iw = w
else:
iw = min(w, tw)
ih = iw / ratio
if ih > h:
if self.allow_stretch:
ih = h
else:
ih = min(h, th)
iw = ih * ratio
return [iw, ih]
norm_image_size = AliasProperty(get_norm_image_size, bind=('texture', 'size', 'allow_stretch', 'image_ratio', 'keep_ratio'), cache=True)
def __init__(self, **kwargs):
self._coreimage = None
self._loops = 0
update = self.texture_update
fbind = self.fbind
fbind('source', update)
fbind('mipmap', update)
super().__init__(**kwargs)
def texture_update(self, *largs):
self.set_texture_from_resource(self.source)
def set_texture_from_resource(self, resource):
if not resource:
self._clear_core_image()
return
source = resource_find(resource)
if not source:
Logger.error('Image: Not found <%s>' % resource)
self._clear_core_image()
return
if self._coreimage:
self._coreimage.unbind(on_texture=self._on_tex_change)
try:
self._coreimage = image = CoreImage(
source,
mipmap=self.mipmap,
anim_delay=self.anim_delay,
keep_data=self.keep_data,
nocache=self.nocache
)
except Exception:
Logger.error('Image: Error loading <%s>' % resource)
self._clear_core_image()
image = self._coreimage
if image:
image.bind(on_texture=self._on_tex_change)
self.texture = image.texture
def on_anim_delay(self, instance, value):
if self._coreimage is None:
return
self._coreimage.anim_delay = value
if value < 0:
self._coreimage.anim_reset(False)
def on_texture(self, instance, value):
self.texture_size = value.size if value else [0, 0]
def _clear_core_image(self):
if self._coreimage:
self._coreimage.unbind(on_texture=self._on_tex_change)
self.texture = None
self._coreimage = None
self._loops = 0
def _on_tex_change(self, *largs):
self.texture = self._coreimage.texture
ci = self._coreimage
if self.anim_loop and ci._anim_index == len(ci._image.textures) - 1:
self._loops += 1
if self.anim_loop == self._loops:
ci.anim_reset(False)
self._loops = 0
def reload(self):
self.remove_from_cache()
old_source = self.source
self.source = ''
self.source = old_source
def remove_from_cache(self):
if self._coreimage:
self._coreimage.remove_from_cache()
def on_nocache(self, *args):
if self.nocache:
self.remove_from_cache()
if self._coreimage:
self._coreimage._nocache = True
class AsyncImage(Image):
__events__ = ('on_error', 'on_load')
def __init__(self, **kwargs):
self._found_source = None
self._coreimage = None
global Loader
if not Loader:
from kivy.loader import Loader
self.fbind('source', self._load_source)
super().__init__(**kwargs)
def _load_source(self, *args):
source = self.source
if not source:
self._clear_core_image()
return
if not self.is_uri(source):
source = resource_find(source)
if not source:
Logger.error('AsyncImage: Not found <%s>' % self.source)
self._clear_core_image()
return
self._found_source = source
self._coreimage = image = Loader.image(
source,
nocache=self.nocache,
mipmap=self.mipmap,
anim_delay=self.anim_delay
)
image.bind(
on_load=self._on_source_load,
on_error=self._on_source_error,
on_texture=self._on_tex_change
)
self.texture = image.texture
def _on_source_load(self, value):
image = self._coreimage.image
if not image:
return
self.texture = image.texture
self.dispatch('on_load')
def _on_source_error(self, instance, error=None):
self.dispatch('on_error', error)
def on_error(self, error):
pass
def on_load(self, *args):
pass
def is_uri(self, filename):
proto = filename.split('://', 1)[0]
return proto in ('http', 'https', 'ftp', 'smb', 'S3')
def _clear_core_image(self):
if self._coreimage:
self._coreimage.unbind(on_load=self._on_source_load)
super()._clear_core_image()
self._found_source = None
def _on_tex_change(self, *largs):
if self._coreimage:
self.texture = self._coreimage.texture
def texture_update(self, *largs):
pass
def remove_from_cache(self):
if self._found_source:
Loader.remove_from_cache(self._found_source)
super().remove_from_cache()
| true
| true
|
79090eef96959629c484b5871518c33b54fcbf81
| 8,323
|
py
|
Python
|
beta/trim_primers.py
|
ArthurDondi/cDNA_Cupcake
|
528b9593b0ad166ac720be7c5c07a968730a2ce2
|
[
"BSD-3-Clause-Clear"
] | 205
|
2016-07-13T06:26:20.000Z
|
2022-03-03T06:29:43.000Z
|
beta/trim_primers.py
|
ArthurDondi/cDNA_Cupcake
|
528b9593b0ad166ac720be7c5c07a968730a2ce2
|
[
"BSD-3-Clause-Clear"
] | 186
|
2017-02-22T22:46:46.000Z
|
2022-03-23T16:16:15.000Z
|
beta/trim_primers.py
|
ArthurDondi/cDNA_Cupcake
|
528b9593b0ad166ac720be7c5c07a968730a2ce2
|
[
"BSD-3-Clause-Clear"
] | 93
|
2016-08-31T02:24:52.000Z
|
2022-02-24T14:01:27.000Z
|
"""
Experimemtal code for trimming primers & polyA tails from high error rate long reads
"""
import os, sys, pdb
from csv import DictWriter
from collections import namedtuple
from multiprocessing import Process
from Bio.Seq import Seq
from Bio import SeqIO
import parasail
ScoreTuple = namedtuple('ScoreTuple', ['score5', 'end5', 'score3', 'end3', 'endA'])
# for ONT using Clontech
#SEQ_5P = 'AAGCAGTGGTATCAACGCAGAGTACATGGGG'
#SEQ_3P_REV = 'GTATCAACGCAGAGTAC'
ISOSEQ_5P = 'GCAATGAAGTCGCAGGGTTGGG'
ISOSEQ_3P = 'GTACTCTGCGTTGATACCACTGCTT'
#SEQ_5P = 'GCAATGAAGTCGCAGGGTTGGGG'
#SEQ_5P = 'CAGGAAACAGCTATGACC'
#SEQ_3P_REV = 'AAGCAGTGGTATCAACGCAGAGTAC'
#SEQ_3P_REV = 'ACTGGCCGTCGTTTTAC'
MINSCORE_5P = 20
MINSCORE_3P = 20
MIN_A_LEN = 20
SCOREMAT = parasail.matrix_create("ACGT", 2, -5)
def trim5p3p_helper(r, seq_5p, seq_3p_rev):
"""
Search for 5' and 3' in the first and last 100 bp window
"""
s1 = str(r.seq[:100])
s2 = str(r.reverse_complement().seq[:100])
o1 = parasail.sg_qx_trace(s1, seq_5p, 3, 1, SCOREMAT)
o2 = parasail.sg_qe_db_trace(s2, seq_3p_rev, 3, 1, SCOREMAT)
lenA = None
if o2.score >= MINSCORE_3P:
lenA = trimA(s2[o2.end_query + 1:])
if MIN_A_LEN == 0:
end3 = len(r.seq) - o2.end_query - 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=end3)
elif lenA is not None:
end3 = len(r.seq) - o2.end_query - 1
endA = end3 - lenA + 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=endA)
else:
end3 = len(r.seq) - o2.end_query - 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=end3)
def trimA(rev_seq):
if len(rev_seq) == 0:
return None
n_rev_seq = len(rev_seq)
mismatch = 0
i = 0
while mismatch < 2 and i < n_rev_seq:
if rev_seq[i]!='T':
mismatch += 1
i += 1
i -= 1
if i >= MIN_A_LEN:
return i
else:
return None
def trim5p3p_multithreaded(fastq_filename, output_prefix, seq_5p, seq_3p_rev, chunks):
# first figure out how many records there are and record positions
num_lines = 0
for line in open(fastq_filename, 'r'): num_lines += 1
num_records = num_lines // 4
chunk_size = (num_records//chunks) + (num_records%chunks>0)
print("{0} has {1} records, {2} per chunk".format(fastq_filename, num_records, chunk_size))
pools = []
records = []
count = 0
i = 1
for r in SeqIO.parse(open(fastq_filename), 'fastq'):
count += 1
records.append(r)
if count >= chunk_size:
p = Process(target=trim5p3p, args=(records, output_prefix+'.'+str(i), seq_5p, seq_3p_rev))
p.start()
print("Starting worker {0}...".format(i))
pools.append(p)
records = []
count = 0
i += 1
p = Process(target=trim5p3p, args=(records, output_prefix + '.' + str(i), seq_5p, seq_3p_rev))
p.start()
print("Starting worker {0}...".format(i))
pools.append(p)
for p in pools:
p.join()
# now combine all the files
f_FL = open(output_prefix+'.fl.fasta', 'w')
f_FL_clips = open(output_prefix+'.fl.clips', 'w')
f_nFL = open(output_prefix+'.nfl.fasta', 'w')
f_csv = open(output_prefix+'.csv', 'w')
for j in range(1, i+1):
p = output_prefix + '.' + str(j)
with open(p + '.fl.fasta') as h:
f_FL.write(h.read())
print("writing {0} into {1}...".format(h.name, f_FL.name))
with open(p + '.fl.clips') as h:
f_FL_clips.write(h.read())
print("writing {0} into {1}...".format(h.name, f_FL_clips.name))
with open(p + '.nfl.fasta') as h:
f_nFL.write(h.read())
print("writing {0} into {1}...".format(h.name, f_nFL.name))
with open(p + '.csv') as h:
f_csv.write(h.read())
print("writing {0} into {1}...".format(h.name, f_csv.name))
os.remove(p + '.fl.fasta')
os.remove(p + '.fl.clips')
os.remove(p + '.nfl.fasta')
os.remove(p + '.csv')
f_csv.close()
f_FL.close()
f_FL_clips.close()
f_nFL.close()
def trim5p3p(records, output_prefix, seq_5p, seq_3p_rev):
f_FL = open(output_prefix+'.fl.fasta', 'w')
f_FL_clips = open(output_prefix+'.fl.clips', 'w')
f_nFL = open(output_prefix+'.nfl.fasta', 'w')
f_csv = open(output_prefix+'.csv', 'w')
writer = DictWriter(f_csv, fieldnames=['id', 'end5', 'end3', 'endA', 'strand'])
writer.writeheader()
for r in records:
r2 = r.reverse_complement()
r2.id = r.id
t1 = trim5p3p_helper(r, seq_5p, seq_3p_rev)
t2 = trim5p3p_helper(r2, seq_5p, seq_3p_rev)
is_fl_flag1 = t1.score5 >= MINSCORE_5P and t1.score3 >= MINSCORE_3P and (MIN_A_LEN == 0 or t1.endA!=t1.end3)
is_fl_flag2 = t2.score5 >= MINSCORE_5P and t2.score3 >= MINSCORE_3P and (MIN_A_LEN == 0 or t2.endA!=t2.end3)
if is_fl_flag1:
if is_fl_flag2:
if t1.score5+t1.score3 > t2.score5+t2.score3:
strand = '+'
else:
strand = '-'
else: # pick t1
strand = '+'
elif is_fl_flag2:
strand = '-'
else:
strand = 'NA'
info = {'id': r.id,
'end5': 'NA',
'end3': 'NA',
'endA': 'NA',
'strand': 'NA'}
if strand == '+':
info['strand'] = '+'
info['end5'] = t1.end5
info['end3'] = t1.end3
info['endA'] = t1.endA
f_FL.write(">{0}\n{1}\n".format(r.id, r.seq[t1.end5:t1.endA]))
f_FL_clips.write(">{0}_5p strand:+ score:{1}\n{2}\n".format(r.id, t1.score5, r.seq[:t1.end5]))
f_FL_clips.write(">{0}_3p strand:+ score:{1}\n{2}\n".format(r.id, t1.score3, r.seq[t1.endA:]))
elif strand == '-':
info['strand'] = '-'
info['end5'] = t2.end5
info['end3'] = t2.end3
info['endA'] = t2.endA
f_FL.write(">{0}\n{1}\n".format(r2.id, r2.seq[t2.end5:t2.endA]))
f_FL_clips.write(">{0}_5p strand:- score:{1}\n{2}\n".format(r.id, t2.score5, r2.seq[:t2.end5]))
f_FL_clips.write(">{0}_3p strand:- score:{1}\n{2}\n".format(r.id, t2.score3, r2.seq[t2.endA:]))
else:
# non-fL, but we still wanna trim away the stuff
if t1.score5+t1.score3 > t2.score5+t2.score3:
f_nFL.write(">{0} strand:+?\n{1}\n".format(r.id, r.seq[t1.end5:t1.endA]))
else:
f_nFL.write(">{0} strand:-?\n{1}\n".format(r2.id, r2.seq[t2.end5:t2.endA]))
writer.writerow(info)
f_csv.close()
f_FL.close()
f_FL_clips.close()
f_nFL.close()
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("fastq_filename")
parser.add_argument("output_prefix")
parser.add_argument("-p", "--primer_fasta", default=None, help="Primer fasta file (if not given, use IsoSeq defaults)")
parser.add_argument("-n", "--chunks", default=10, type=int, help="Number of chunks (CPUs) to use, default 10")
args = parser.parse_args()
if args.primer_fasta is None:
seq_5p = ISOSEQ_5P
seq_3p = ISOSEQ_3P
print(f"Using Iso-Seq default 5' primer sequence: {seq_5p}")
print(f"Using Iso-Seq default 3' primer sequence: {seq_3p}")
else:
reader = SeqIO.parse(open(args.primer_fasta), 'fasta')
r = next(reader)
if r.seqid!='5p':
print("ERROR: the first entry in {0} should be >5p! Abort!".format(args.primer_fasta))
sys.exit(-1)
seq_5p = str(r.seq)
r = next(reader)
if r.seqid!='3p':
print("ERROR: the second entry in {0} should be >3p! Abort!".format(args.primer_fasta))
sys.exit(-1)
seq_3p = str(r.seq)
print(f"Reading in 5' primer sequence: {seq_5p}")
print(f"Reading in 3' primer sequence: {seq_3p}")
seq_3p_rev = str(Seq(seq_3p).reverse_complement())
trim5p3p_multithreaded(args.fastq_filename, args.output_prefix, seq_5p, seq_3p_rev, args.chunks)
| 36.344978
| 123
| 0.58104
|
import os, sys, pdb
from csv import DictWriter
from collections import namedtuple
from multiprocessing import Process
from Bio.Seq import Seq
from Bio import SeqIO
import parasail
ScoreTuple = namedtuple('ScoreTuple', ['score5', 'end5', 'score3', 'end3', 'endA'])
ISOSEQ_5P = 'GCAATGAAGTCGCAGGGTTGGG'
ISOSEQ_3P = 'GTACTCTGCGTTGATACCACTGCTT'
MINSCORE_5P = 20
MINSCORE_3P = 20
MIN_A_LEN = 20
SCOREMAT = parasail.matrix_create("ACGT", 2, -5)
def trim5p3p_helper(r, seq_5p, seq_3p_rev):
s1 = str(r.seq[:100])
s2 = str(r.reverse_complement().seq[:100])
o1 = parasail.sg_qx_trace(s1, seq_5p, 3, 1, SCOREMAT)
o2 = parasail.sg_qe_db_trace(s2, seq_3p_rev, 3, 1, SCOREMAT)
lenA = None
if o2.score >= MINSCORE_3P:
lenA = trimA(s2[o2.end_query + 1:])
if MIN_A_LEN == 0:
end3 = len(r.seq) - o2.end_query - 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=end3)
elif lenA is not None:
end3 = len(r.seq) - o2.end_query - 1
endA = end3 - lenA + 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=endA)
else:
end3 = len(r.seq) - o2.end_query - 1
return ScoreTuple(score5=o1.score, end5=o1.end_query, score3=o2.score, end3=end3, endA=end3)
def trimA(rev_seq):
if len(rev_seq) == 0:
return None
n_rev_seq = len(rev_seq)
mismatch = 0
i = 0
while mismatch < 2 and i < n_rev_seq:
if rev_seq[i]!='T':
mismatch += 1
i += 1
i -= 1
if i >= MIN_A_LEN:
return i
else:
return None
def trim5p3p_multithreaded(fastq_filename, output_prefix, seq_5p, seq_3p_rev, chunks):
num_lines = 0
for line in open(fastq_filename, 'r'): num_lines += 1
num_records = num_lines // 4
chunk_size = (num_records//chunks) + (num_records%chunks>0)
print("{0} has {1} records, {2} per chunk".format(fastq_filename, num_records, chunk_size))
pools = []
records = []
count = 0
i = 1
for r in SeqIO.parse(open(fastq_filename), 'fastq'):
count += 1
records.append(r)
if count >= chunk_size:
p = Process(target=trim5p3p, args=(records, output_prefix+'.'+str(i), seq_5p, seq_3p_rev))
p.start()
print("Starting worker {0}...".format(i))
pools.append(p)
records = []
count = 0
i += 1
p = Process(target=trim5p3p, args=(records, output_prefix + '.' + str(i), seq_5p, seq_3p_rev))
p.start()
print("Starting worker {0}...".format(i))
pools.append(p)
for p in pools:
p.join()
f_FL = open(output_prefix+'.fl.fasta', 'w')
f_FL_clips = open(output_prefix+'.fl.clips', 'w')
f_nFL = open(output_prefix+'.nfl.fasta', 'w')
f_csv = open(output_prefix+'.csv', 'w')
for j in range(1, i+1):
p = output_prefix + '.' + str(j)
with open(p + '.fl.fasta') as h:
f_FL.write(h.read())
print("writing {0} into {1}...".format(h.name, f_FL.name))
with open(p + '.fl.clips') as h:
f_FL_clips.write(h.read())
print("writing {0} into {1}...".format(h.name, f_FL_clips.name))
with open(p + '.nfl.fasta') as h:
f_nFL.write(h.read())
print("writing {0} into {1}...".format(h.name, f_nFL.name))
with open(p + '.csv') as h:
f_csv.write(h.read())
print("writing {0} into {1}...".format(h.name, f_csv.name))
os.remove(p + '.fl.fasta')
os.remove(p + '.fl.clips')
os.remove(p + '.nfl.fasta')
os.remove(p + '.csv')
f_csv.close()
f_FL.close()
f_FL_clips.close()
f_nFL.close()
def trim5p3p(records, output_prefix, seq_5p, seq_3p_rev):
f_FL = open(output_prefix+'.fl.fasta', 'w')
f_FL_clips = open(output_prefix+'.fl.clips', 'w')
f_nFL = open(output_prefix+'.nfl.fasta', 'w')
f_csv = open(output_prefix+'.csv', 'w')
writer = DictWriter(f_csv, fieldnames=['id', 'end5', 'end3', 'endA', 'strand'])
writer.writeheader()
for r in records:
r2 = r.reverse_complement()
r2.id = r.id
t1 = trim5p3p_helper(r, seq_5p, seq_3p_rev)
t2 = trim5p3p_helper(r2, seq_5p, seq_3p_rev)
is_fl_flag1 = t1.score5 >= MINSCORE_5P and t1.score3 >= MINSCORE_3P and (MIN_A_LEN == 0 or t1.endA!=t1.end3)
is_fl_flag2 = t2.score5 >= MINSCORE_5P and t2.score3 >= MINSCORE_3P and (MIN_A_LEN == 0 or t2.endA!=t2.end3)
if is_fl_flag1:
if is_fl_flag2:
if t1.score5+t1.score3 > t2.score5+t2.score3:
strand = '+'
else:
strand = '-'
else:
strand = '+'
elif is_fl_flag2:
strand = '-'
else:
strand = 'NA'
info = {'id': r.id,
'end5': 'NA',
'end3': 'NA',
'endA': 'NA',
'strand': 'NA'}
if strand == '+':
info['strand'] = '+'
info['end5'] = t1.end5
info['end3'] = t1.end3
info['endA'] = t1.endA
f_FL.write(">{0}\n{1}\n".format(r.id, r.seq[t1.end5:t1.endA]))
f_FL_clips.write(">{0}_5p strand:+ score:{1}\n{2}\n".format(r.id, t1.score5, r.seq[:t1.end5]))
f_FL_clips.write(">{0}_3p strand:+ score:{1}\n{2}\n".format(r.id, t1.score3, r.seq[t1.endA:]))
elif strand == '-':
info['strand'] = '-'
info['end5'] = t2.end5
info['end3'] = t2.end3
info['endA'] = t2.endA
f_FL.write(">{0}\n{1}\n".format(r2.id, r2.seq[t2.end5:t2.endA]))
f_FL_clips.write(">{0}_5p strand:- score:{1}\n{2}\n".format(r.id, t2.score5, r2.seq[:t2.end5]))
f_FL_clips.write(">{0}_3p strand:- score:{1}\n{2}\n".format(r.id, t2.score3, r2.seq[t2.endA:]))
else:
if t1.score5+t1.score3 > t2.score5+t2.score3:
f_nFL.write(">{0} strand:+?\n{1}\n".format(r.id, r.seq[t1.end5:t1.endA]))
else:
f_nFL.write(">{0} strand:-?\n{1}\n".format(r2.id, r2.seq[t2.end5:t2.endA]))
writer.writerow(info)
f_csv.close()
f_FL.close()
f_FL_clips.close()
f_nFL.close()
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("fastq_filename")
parser.add_argument("output_prefix")
parser.add_argument("-p", "--primer_fasta", default=None, help="Primer fasta file (if not given, use IsoSeq defaults)")
parser.add_argument("-n", "--chunks", default=10, type=int, help="Number of chunks (CPUs) to use, default 10")
args = parser.parse_args()
if args.primer_fasta is None:
seq_5p = ISOSEQ_5P
seq_3p = ISOSEQ_3P
print(f"Using Iso-Seq default 5' primer sequence: {seq_5p}")
print(f"Using Iso-Seq default 3' primer sequence: {seq_3p}")
else:
reader = SeqIO.parse(open(args.primer_fasta), 'fasta')
r = next(reader)
if r.seqid!='5p':
print("ERROR: the first entry in {0} should be >5p! Abort!".format(args.primer_fasta))
sys.exit(-1)
seq_5p = str(r.seq)
r = next(reader)
if r.seqid!='3p':
print("ERROR: the second entry in {0} should be >3p! Abort!".format(args.primer_fasta))
sys.exit(-1)
seq_3p = str(r.seq)
print(f"Reading in 5' primer sequence: {seq_5p}")
print(f"Reading in 3' primer sequence: {seq_3p}")
seq_3p_rev = str(Seq(seq_3p).reverse_complement())
trim5p3p_multithreaded(args.fastq_filename, args.output_prefix, seq_5p, seq_3p_rev, args.chunks)
| true
| true
|
79090fbe9b8b68ec68479f9a113253ecc8047899
| 2,523
|
py
|
Python
|
chapter4_serving_patterns/prep_pred_pattern/src/proto/prediction_service_pb2_grpc.py
|
sudabon/ml-system-in-actions
|
4fb1b3e53e4ed7c36e82f7d4b5570e3959aef525
|
[
"MIT"
] | 133
|
2021-04-24T09:57:58.000Z
|
2022-03-29T06:23:56.000Z
|
chapter4_serving_patterns/prep_pred_pattern/src/proto/prediction_service_pb2_grpc.py
|
sudabon/ml-system-in-actions
|
4fb1b3e53e4ed7c36e82f7d4b5570e3959aef525
|
[
"MIT"
] | 6
|
2021-04-24T09:58:40.000Z
|
2021-11-19T04:04:40.000Z
|
chapter4_serving_patterns/prep_pred_pattern/src/proto/prediction_service_pb2_grpc.py
|
sudabon/ml-system-in-actions
|
4fb1b3e53e4ed7c36e82f7d4b5570e3959aef525
|
[
"MIT"
] | 40
|
2021-05-21T23:32:40.000Z
|
2022-03-30T00:33:08.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import src.proto.predict_pb2 as predict__pb2
class PredictionServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Predict = channel.unary_unary(
"/onnxruntime.server.PredictionService/Predict",
request_serializer=predict__pb2.PredictRequest.SerializeToString,
response_deserializer=predict__pb2.PredictResponse.FromString,
)
class PredictionServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def Predict(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_PredictionServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"Predict": grpc.unary_unary_rpc_method_handler(
servicer.Predict,
request_deserializer=predict__pb2.PredictRequest.FromString,
response_serializer=predict__pb2.PredictResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler("onnxruntime.server.PredictionService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PredictionService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Predict(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/onnxruntime.server.PredictionService/Predict",
predict__pb2.PredictRequest.SerializeToString,
predict__pb2.PredictResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
| 32.766234
| 119
| 0.67063
|
import grpc
import src.proto.predict_pb2 as predict__pb2
class PredictionServiceStub(object):
def __init__(self, channel):
self.Predict = channel.unary_unary(
"/onnxruntime.server.PredictionService/Predict",
request_serializer=predict__pb2.PredictRequest.SerializeToString,
response_deserializer=predict__pb2.PredictResponse.FromString,
)
class PredictionServiceServicer(object):
def Predict(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_PredictionServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"Predict": grpc.unary_unary_rpc_method_handler(
servicer.Predict,
request_deserializer=predict__pb2.PredictRequest.FromString,
response_serializer=predict__pb2.PredictResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler("onnxruntime.server.PredictionService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class PredictionService(object):
@staticmethod
def Predict(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/onnxruntime.server.PredictionService/Predict",
predict__pb2.PredictRequest.SerializeToString,
predict__pb2.PredictResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
| true
| true
|
79090ff89d1a5582585b1d7794f226b636dd9b02
| 20,150
|
py
|
Python
|
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/mongoengine/view.py
|
hexlism/css_platform
|
0c80cb314e7e3ecf73de2feec5349c04c0dd581b
|
[
"Apache-2.0"
] | null | null | null |
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/mongoengine/view.py
|
hexlism/css_platform
|
0c80cb314e7e3ecf73de2feec5349c04c0dd581b
|
[
"Apache-2.0"
] | null | null | null |
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/mongoengine/view.py
|
hexlism/css_platform
|
0c80cb314e7e3ecf73de2feec5349c04c0dd581b
|
[
"Apache-2.0"
] | null | null | null |
import logging
from flask import request, flash, abort, Response
from flask_admin import expose
from flask_admin.babel import gettext, ngettext, lazy_gettext
from flask_admin.model import BaseModelView
from flask_admin.model.form import wrap_fields_in_fieldlist
from flask_admin.model.fields import ListEditableFieldList
from flask_admin._compat import iteritems, string_types
import mongoengine
import gridfs
from mongoengine.connection import get_db
from bson.objectid import ObjectId
from flask_admin.actions import action
from .filters import FilterConverter, BaseMongoEngineFilter
from .form import get_form, CustomModelConverter
from .typefmt import DEFAULT_FORMATTERS
from .tools import parse_like_term
from .helpers import format_error
from .ajax import process_ajax_references, create_ajax_loader
from .subdoc import convert_subdocuments
# Set up logger
log = logging.getLogger("flask-admin.mongo")
SORTABLE_FIELDS = set((
mongoengine.StringField,
mongoengine.IntField,
mongoengine.FloatField,
mongoengine.BooleanField,
mongoengine.DateTimeField,
mongoengine.ComplexDateTimeField,
mongoengine.ObjectIdField,
mongoengine.DecimalField,
mongoengine.ReferenceField,
mongoengine.EmailField,
mongoengine.UUIDField,
mongoengine.URLField
))
class ModelView(BaseModelView):
"""
MongoEngine model scaffolding.
"""
column_filters = None
"""
Collection of the column filters.
Can contain either field names or instances of
:class:`flask_admin.contrib.mongoengine.filters.BaseFilter`
classes.
For example::
class MyModelView(BaseModelView):
column_filters = ('user', 'email')
or::
class MyModelView(BaseModelView):
column_filters = (BooleanEqualFilter(User.name, 'Name'))
"""
model_form_converter = CustomModelConverter
"""
Model form conversion class. Use this to implement custom
field conversion logic.
Custom class should be derived from the
`flask_admin.contrib.mongoengine.form.CustomModelConverter`.
For example::
class MyModelConverter(AdminModelConverter):
pass
class MyAdminView(ModelView):
model_form_converter = MyModelConverter
"""
object_id_converter = ObjectId
"""
Mongodb ``_id`` value conversion function. Default is `bson.ObjectId`.
Use this if you are using String, Binary and etc.
For example::
class MyModelView(BaseModelView):
object_id_converter = int
or::
class MyModelView(BaseModelView):
object_id_converter = str
"""
filter_converter = FilterConverter()
"""
Field to filter converter.
Override this attribute to use a non-default converter.
"""
column_type_formatters = DEFAULT_FORMATTERS
"""
Customized type formatters for MongoEngine backend
"""
allowed_search_types = (mongoengine.StringField,
mongoengine.URLField,
mongoengine.EmailField)
"""
List of allowed search field types.
"""
form_subdocuments = None
"""
Subdocument configuration options.
This field accepts dictionary, where key is field name and value is either dictionary or instance of the
`flask_admin.contrib.EmbeddedForm`.
Consider following example::
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Post(db.Document):
text = db.StringField(max_length=30)
data = db.EmbeddedDocumentField(Comment)
class MyAdmin(ModelView):
form_subdocuments = {
'data': {
'form_columns': ('name',)
}
}
In this example, `Post` model has child `Comment` subdocument. When generating form for `Comment` embedded
document, Flask-Admin will only create `name` field.
It is also possible to use class-based embedded document configuration::
class CommentEmbed(EmbeddedForm):
form_columns = ('name',)
class MyAdmin(ModelView):
form_subdocuments = {
'data': CommentEmbed()
}
Arbitrary depth nesting is supported::
class SomeEmbed(EmbeddedForm):
form_excluded_columns = ('test',)
class CommentEmbed(EmbeddedForm):
form_columns = ('name',)
form_subdocuments = {
'inner': SomeEmbed()
}
class MyAdmin(ModelView):
form_subdocuments = {
'data': CommentEmbed()
}
There's also support for forms embedded into `ListField`. All you have
to do is to create nested rule with `None` as a name. Even though it
is slightly confusing, but that's how Flask-MongoEngine creates
form fields embedded into ListField::
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Post(db.Document):
text = db.StringField(max_length=30)
data = db.ListField(db.EmbeddedDocumentField(Comment))
class MyAdmin(ModelView):
form_subdocuments = {
'data': {
'form_subdocuments': {
None: {
'form_columns': ('name',)
}
}
}
}
"""
def __init__(self, model, name=None,
category=None, endpoint=None, url=None, static_folder=None,
menu_class_name=None, menu_icon_type=None, menu_icon_value=None):
"""
Constructor
:param model:
Model class
:param name:
Display name
:param category:
Display category
:param endpoint:
Endpoint
:param url:
Custom URL
:param menu_class_name:
Optional class name for the menu item.
:param menu_icon_type:
Optional icon. Possible icon types:
- `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon
- `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon
- `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory
- `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL
:param menu_icon_value:
Icon glyph name or URL, depending on `menu_icon_type` setting
"""
self._search_fields = []
super(ModelView, self).__init__(model, name, category, endpoint, url, static_folder,
menu_class_name=menu_class_name,
menu_icon_type=menu_icon_type,
menu_icon_value=menu_icon_value)
self._primary_key = self.scaffold_pk()
def _refresh_cache(self):
"""
Refresh cache.
"""
# Process subdocuments
if self.form_subdocuments is None:
self.form_subdocuments = {}
self._form_subdocuments = convert_subdocuments(self.form_subdocuments)
# Cache other properties
super(ModelView, self)._refresh_cache()
def _process_ajax_references(self):
"""
AJAX endpoint is exposed by top-level admin view class, but
subdocuments might have AJAX references too.
This method will recursively go over subdocument configuration
and will precompute AJAX references for them ensuring that
subdocuments can also use AJAX to populate their ReferenceFields.
"""
references = super(ModelView, self)._process_ajax_references()
return process_ajax_references(references, self)
def _get_model_fields(self, model=None):
"""
Inspect model and return list of model fields
:param model:
Model to inspect
"""
if model is None:
model = self.model
return sorted(iteritems(model._fields), key=lambda n: n[1].creation_counter)
def scaffold_pk(self):
# MongoEngine models have predefined 'id' as a key
return 'id'
def get_pk_value(self, model):
"""
Return the primary key value from the model instance
:param model:
Model instance
"""
return model.pk
def scaffold_list_columns(self):
"""
Scaffold list columns
"""
columns = []
for n, f in self._get_model_fields():
# Verify type
field_class = type(f)
if (field_class == mongoengine.ListField and
isinstance(f.field, mongoengine.EmbeddedDocumentField)):
continue
if field_class == mongoengine.EmbeddedDocumentField:
continue
if self.column_display_pk or field_class != mongoengine.ObjectIdField:
columns.append(n)
return columns
def scaffold_sortable_columns(self):
"""
Return a dictionary of sortable columns (name, field)
"""
columns = {}
for n, f in self._get_model_fields():
if type(f) in SORTABLE_FIELDS:
if self.column_display_pk or type(f) != mongoengine.ObjectIdField:
columns[n] = f
return columns
def init_search(self):
"""
Init search
"""
if self.column_searchable_list:
for p in self.column_searchable_list:
if isinstance(p, string_types):
p = self.model._fields.get(p)
if p is None:
raise Exception('Invalid search field')
field_type = type(p)
# Check type
if (field_type not in self.allowed_search_types):
raise Exception('Can only search on text columns. ' +
'Failed to setup search for "%s"' % p)
self._search_fields.append(p)
return bool(self._search_fields)
def scaffold_filters(self, name):
"""
Return filter object(s) for the field
:param name:
Either field name or field instance
"""
if isinstance(name, string_types):
attr = self.model._fields.get(name)
else:
attr = name
if attr is None:
raise Exception('Failed to find field for filter: %s' % name)
# Find name
visible_name = None
if not isinstance(name, string_types):
visible_name = self.get_column_name(attr.name)
if not visible_name:
visible_name = self.get_column_name(name)
# Convert filter
type_name = type(attr).__name__
flt = self.filter_converter.convert(type_name,
attr,
visible_name)
return flt
def is_valid_filter(self, filter):
"""
Validate if the provided filter is a valid MongoEngine filter
:param filter:
Filter object
"""
return isinstance(filter, BaseMongoEngineFilter)
def scaffold_form(self):
"""
Create form from the model.
"""
form_class = get_form(self.model,
self.model_form_converter(self),
base_class=self.form_base_class,
only=self.form_columns,
exclude=self.form_excluded_columns,
field_args=self.form_args,
extra_fields=self.form_extra_fields)
return form_class
def scaffold_list_form(self, custom_fieldlist=ListEditableFieldList,
validators=None):
"""
Create form for the `index_view` using only the columns from
`self.column_editable_list`.
:param validators:
`form_args` dict with only validators
{'name': {'validators': [required()]}}
:param custom_fieldlist:
A WTForm FieldList class. By default, `ListEditableFieldList`.
"""
form_class = get_form(self.model,
self.model_form_converter(self),
base_class=self.form_base_class,
only=self.column_editable_list,
field_args=validators)
return wrap_fields_in_fieldlist(self.form_base_class,
form_class,
custom_fieldlist)
# AJAX foreignkey support
def _create_ajax_loader(self, name, opts):
return create_ajax_loader(self.model, name, name, opts)
def get_query(self):
"""
Returns the QuerySet for this view. By default, it returns all the
objects for the current model.
"""
return self.model.objects
def _search(self, query, search_term):
# TODO: Unfortunately, MongoEngine contains bug which
# prevents running complex Q queries and, as a result,
# Flask-Admin does not support per-word searching like
# in other backends
op, term = parse_like_term(search_term)
criteria = None
for field in self._search_fields:
flt = {'%s__%s' % (field.name, op): term}
q = mongoengine.Q(**flt)
if criteria is None:
criteria = q
else:
criteria |= q
return query.filter(criteria)
def get_list(self, page, sort_column, sort_desc, search, filters,
execute=True):
"""
Get list of objects from MongoEngine
:param page:
Page number
:param sort_column:
Sort column
:param sort_desc:
Sort descending
:param search:
Search criteria
:param filters:
List of applied filters
:param execute:
Run query immediately or not
"""
query = self.get_query()
# Filters
if self._filters:
for flt, flt_name, value in filters:
f = self._filters[flt]
query = f.apply(query, f.clean(value))
# Search
if self._search_supported and search:
query = self._search(query, search)
# Get count
count = query.count() if not self.simple_list_pager else None
# Sorting
if sort_column:
query = query.order_by('%s%s' % ('-' if sort_desc else '', sort_column))
else:
order = self._get_default_order()
if order:
query = query.order_by('%s%s' % ('-' if order[1] else '', order[0]))
# Pagination
if page is not None:
query = query.skip(page * self.page_size)
query = query.limit(self.page_size)
if execute:
query = query.all()
return count, query
def get_one(self, id):
"""
Return a single model instance by its ID
:param id:
Model ID
"""
try:
return self.get_query().filter(pk=id).first()
except mongoengine.ValidationError as ex:
flash(gettext('Failed to get model. %(error)s',
error=format_error(ex)),
'error')
return None
def create_model(self, form):
"""
Create model helper
:param form:
Form instance
"""
try:
model = self.model()
form.populate_obj(model)
self._on_model_change(form, model, True)
model.save()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to create record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to create record.')
return False
else:
self.after_model_change(form, model, True)
return model
def update_model(self, form, model):
"""
Update model helper
:param form:
Form instance
:param model:
Model instance to update
"""
try:
form.populate_obj(model)
self._on_model_change(form, model, False)
model.save()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to update record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to update record.')
return False
else:
self.after_model_change(form, model, False)
return True
def delete_model(self, model):
"""
Delete model helper
:param model:
Model instance
"""
try:
self.on_model_delete(model)
model.delete()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to delete record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to delete record.')
return False
else:
self.after_model_delete(model)
return True
# FileField access API
@expose('/api/file/')
def api_file_view(self):
pk = request.args.get('id')
coll = request.args.get('coll')
db = request.args.get('db', 'default')
if not pk or not coll or not db:
abort(404)
fs = gridfs.GridFS(get_db(db), coll)
data = fs.get(self.object_id_converter(pk))
if not data:
abort(404)
return Response(data.read(),
content_type=data.content_type,
headers={
'Content-Length': data.length
})
# Default model actions
def is_action_allowed(self, name):
# Check delete action permission
if name == 'delete' and not self.can_delete:
return False
return super(ModelView, self).is_action_allowed(name)
@action('delete',
lazy_gettext('Delete'),
lazy_gettext('Are you sure you want to delete selected records?'))
def action_delete(self, ids):
try:
count = 0
all_ids = [self.object_id_converter(pk) for pk in ids]
for obj in self.get_query().in_bulk(all_ids).values():
count += self.delete_model(obj)
flash(ngettext('Record was successfully deleted.',
'%(count)s records were successfully deleted.',
count,
count=count))
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to delete records. %(error)s', error=str(ex)),
'error')
| 31.095679
| 114
| 0.551663
|
import logging
from flask import request, flash, abort, Response
from flask_admin import expose
from flask_admin.babel import gettext, ngettext, lazy_gettext
from flask_admin.model import BaseModelView
from flask_admin.model.form import wrap_fields_in_fieldlist
from flask_admin.model.fields import ListEditableFieldList
from flask_admin._compat import iteritems, string_types
import mongoengine
import gridfs
from mongoengine.connection import get_db
from bson.objectid import ObjectId
from flask_admin.actions import action
from .filters import FilterConverter, BaseMongoEngineFilter
from .form import get_form, CustomModelConverter
from .typefmt import DEFAULT_FORMATTERS
from .tools import parse_like_term
from .helpers import format_error
from .ajax import process_ajax_references, create_ajax_loader
from .subdoc import convert_subdocuments
log = logging.getLogger("flask-admin.mongo")
SORTABLE_FIELDS = set((
mongoengine.StringField,
mongoengine.IntField,
mongoengine.FloatField,
mongoengine.BooleanField,
mongoengine.DateTimeField,
mongoengine.ComplexDateTimeField,
mongoengine.ObjectIdField,
mongoengine.DecimalField,
mongoengine.ReferenceField,
mongoengine.EmailField,
mongoengine.UUIDField,
mongoengine.URLField
))
class ModelView(BaseModelView):
column_filters = None
model_form_converter = CustomModelConverter
object_id_converter = ObjectId
filter_converter = FilterConverter()
column_type_formatters = DEFAULT_FORMATTERS
allowed_search_types = (mongoengine.StringField,
mongoengine.URLField,
mongoengine.EmailField)
form_subdocuments = None
def __init__(self, model, name=None,
category=None, endpoint=None, url=None, static_folder=None,
menu_class_name=None, menu_icon_type=None, menu_icon_value=None):
self._search_fields = []
super(ModelView, self).__init__(model, name, category, endpoint, url, static_folder,
menu_class_name=menu_class_name,
menu_icon_type=menu_icon_type,
menu_icon_value=menu_icon_value)
self._primary_key = self.scaffold_pk()
def _refresh_cache(self):
if self.form_subdocuments is None:
self.form_subdocuments = {}
self._form_subdocuments = convert_subdocuments(self.form_subdocuments)
super(ModelView, self)._refresh_cache()
def _process_ajax_references(self):
references = super(ModelView, self)._process_ajax_references()
return process_ajax_references(references, self)
def _get_model_fields(self, model=None):
if model is None:
model = self.model
return sorted(iteritems(model._fields), key=lambda n: n[1].creation_counter)
def scaffold_pk(self):
return 'id'
def get_pk_value(self, model):
return model.pk
def scaffold_list_columns(self):
columns = []
for n, f in self._get_model_fields():
field_class = type(f)
if (field_class == mongoengine.ListField and
isinstance(f.field, mongoengine.EmbeddedDocumentField)):
continue
if field_class == mongoengine.EmbeddedDocumentField:
continue
if self.column_display_pk or field_class != mongoengine.ObjectIdField:
columns.append(n)
return columns
def scaffold_sortable_columns(self):
columns = {}
for n, f in self._get_model_fields():
if type(f) in SORTABLE_FIELDS:
if self.column_display_pk or type(f) != mongoengine.ObjectIdField:
columns[n] = f
return columns
def init_search(self):
if self.column_searchable_list:
for p in self.column_searchable_list:
if isinstance(p, string_types):
p = self.model._fields.get(p)
if p is None:
raise Exception('Invalid search field')
field_type = type(p)
if (field_type not in self.allowed_search_types):
raise Exception('Can only search on text columns. ' +
'Failed to setup search for "%s"' % p)
self._search_fields.append(p)
return bool(self._search_fields)
def scaffold_filters(self, name):
if isinstance(name, string_types):
attr = self.model._fields.get(name)
else:
attr = name
if attr is None:
raise Exception('Failed to find field for filter: %s' % name)
visible_name = None
if not isinstance(name, string_types):
visible_name = self.get_column_name(attr.name)
if not visible_name:
visible_name = self.get_column_name(name)
type_name = type(attr).__name__
flt = self.filter_converter.convert(type_name,
attr,
visible_name)
return flt
def is_valid_filter(self, filter):
return isinstance(filter, BaseMongoEngineFilter)
def scaffold_form(self):
form_class = get_form(self.model,
self.model_form_converter(self),
base_class=self.form_base_class,
only=self.form_columns,
exclude=self.form_excluded_columns,
field_args=self.form_args,
extra_fields=self.form_extra_fields)
return form_class
def scaffold_list_form(self, custom_fieldlist=ListEditableFieldList,
validators=None):
form_class = get_form(self.model,
self.model_form_converter(self),
base_class=self.form_base_class,
only=self.column_editable_list,
field_args=validators)
return wrap_fields_in_fieldlist(self.form_base_class,
form_class,
custom_fieldlist)
def _create_ajax_loader(self, name, opts):
return create_ajax_loader(self.model, name, name, opts)
def get_query(self):
return self.model.objects
def _search(self, query, search_term):
op, term = parse_like_term(search_term)
criteria = None
for field in self._search_fields:
flt = {'%s__%s' % (field.name, op): term}
q = mongoengine.Q(**flt)
if criteria is None:
criteria = q
else:
criteria |= q
return query.filter(criteria)
def get_list(self, page, sort_column, sort_desc, search, filters,
execute=True):
query = self.get_query()
if self._filters:
for flt, flt_name, value in filters:
f = self._filters[flt]
query = f.apply(query, f.clean(value))
if self._search_supported and search:
query = self._search(query, search)
count = query.count() if not self.simple_list_pager else None
if sort_column:
query = query.order_by('%s%s' % ('-' if sort_desc else '', sort_column))
else:
order = self._get_default_order()
if order:
query = query.order_by('%s%s' % ('-' if order[1] else '', order[0]))
if page is not None:
query = query.skip(page * self.page_size)
query = query.limit(self.page_size)
if execute:
query = query.all()
return count, query
def get_one(self, id):
try:
return self.get_query().filter(pk=id).first()
except mongoengine.ValidationError as ex:
flash(gettext('Failed to get model. %(error)s',
error=format_error(ex)),
'error')
return None
def create_model(self, form):
try:
model = self.model()
form.populate_obj(model)
self._on_model_change(form, model, True)
model.save()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to create record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to create record.')
return False
else:
self.after_model_change(form, model, True)
return model
def update_model(self, form, model):
try:
form.populate_obj(model)
self._on_model_change(form, model, False)
model.save()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to update record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to update record.')
return False
else:
self.after_model_change(form, model, False)
return True
def delete_model(self, model):
try:
self.on_model_delete(model)
model.delete()
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to delete record. %(error)s',
error=format_error(ex)),
'error')
log.exception('Failed to delete record.')
return False
else:
self.after_model_delete(model)
return True
@expose('/api/file/')
def api_file_view(self):
pk = request.args.get('id')
coll = request.args.get('coll')
db = request.args.get('db', 'default')
if not pk or not coll or not db:
abort(404)
fs = gridfs.GridFS(get_db(db), coll)
data = fs.get(self.object_id_converter(pk))
if not data:
abort(404)
return Response(data.read(),
content_type=data.content_type,
headers={
'Content-Length': data.length
})
def is_action_allowed(self, name):
if name == 'delete' and not self.can_delete:
return False
return super(ModelView, self).is_action_allowed(name)
@action('delete',
lazy_gettext('Delete'),
lazy_gettext('Are you sure you want to delete selected records?'))
def action_delete(self, ids):
try:
count = 0
all_ids = [self.object_id_converter(pk) for pk in ids]
for obj in self.get_query().in_bulk(all_ids).values():
count += self.delete_model(obj)
flash(ngettext('Record was successfully deleted.',
'%(count)s records were successfully deleted.',
count,
count=count))
except Exception as ex:
if not self.handle_view_exception(ex):
flash(gettext('Failed to delete records. %(error)s', error=str(ex)),
'error')
| true
| true
|
79091091c4e06730ed2243a54682e0bb34a550b5
| 329
|
py
|
Python
|
courier/management/commands/clear_old_admin_logs.py
|
HelloMelanieC/FiveUp
|
ab97d311f163b09146fe330e4360d8e75d769f95
|
[
"MIT"
] | 12
|
2017-09-10T01:43:42.000Z
|
2020-09-20T01:17:20.000Z
|
courier/management/commands/clear_old_admin_logs.py
|
HelloMelanieC/FiveUp
|
ab97d311f163b09146fe330e4360d8e75d769f95
|
[
"MIT"
] | 22
|
2016-12-26T21:46:10.000Z
|
2022-02-10T08:01:52.000Z
|
courier/management/commands/clear_old_admin_logs.py
|
HelloMelanieC/FiveUp
|
ab97d311f163b09146fe330e4360d8e75d769f95
|
[
"MIT"
] | 4
|
2017-08-24T16:01:37.000Z
|
2019-02-14T23:50:17.000Z
|
from django.core.management.base import BaseCommand
from django.contrib.admin.models import LogEntry
def clear_old_admin_logs():
logs = LogEntry.objects.all()
for i in range(2000, len(logs)):
logs[i].delete()
class Command(BaseCommand):
def handle(self, *args, **options):
clear_old_admin_logs()
| 21.933333
| 51
| 0.705167
|
from django.core.management.base import BaseCommand
from django.contrib.admin.models import LogEntry
def clear_old_admin_logs():
logs = LogEntry.objects.all()
for i in range(2000, len(logs)):
logs[i].delete()
class Command(BaseCommand):
def handle(self, *args, **options):
clear_old_admin_logs()
| true
| true
|
790910f6dbfe1ac36189de9e30ba162e48df428e
| 2,579
|
py
|
Python
|
scripts/camera_publisher_node.py
|
Alexandre-Bonneau/uwds3_perception
|
21529f63b3b3d2ad5e30eefece2d75378ae7651f
|
[
"MIT"
] | null | null | null |
scripts/camera_publisher_node.py
|
Alexandre-Bonneau/uwds3_perception
|
21529f63b3b3d2ad5e30eefece2d75378ae7651f
|
[
"MIT"
] | null | null | null |
scripts/camera_publisher_node.py
|
Alexandre-Bonneau/uwds3_perception
|
21529f63b3b3d2ad5e30eefece2d75378ae7651f
|
[
"MIT"
] | 1
|
2020-02-07T15:54:03.000Z
|
2020-02-07T15:54:03.000Z
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import rospy
import cv2
import sensor_msgs
import numpy as np
from cv_bridge import CvBridge
class CameraPublisher(object):
""" """
def __init__(self):
"""Default constructor"""
self.rgb_image_topic = rospy.get_param("~rgb_image_topic", "/camera/rgb/image_raw")
self.camera_publisher = rospy.Publisher(self.rgb_image_topic, sensor_msgs.msg.Image, queue_size=1)
self.camera_pub_frequency = rospy.get_param("~camera_pub_frequency", 20)
self.bridge = CvBridge()
self.camera_info_topic = rospy.get_param("~camera_info_topic", "/camera/rgb/camera_info")
self.camera_info = sensor_msgs.msg.CameraInfo()
self.camera_info_publisher = rospy.Publisher(self.camera_info_topic, sensor_msgs.msg.CameraInfo, queue_size=1)
self.camera_frame_id = rospy.get_param("~camera_frame_id", "camera_link")
self.camera_info.header.frame_id = self.camera_frame_id
self.capture = cv2.VideoCapture(0)
ok, frame = self.capture.read()
width, height, _ = frame.shape
focal_length = height
center = (height/2, width/2)
camera_matrix = np.array([[focal_length, 0, center[0]],
[0, focal_length, center[1]],
[0, 0, 1]], dtype="double")
P_matrix = np.array([[focal_length, 0, center[0], 0],
[0, focal_length, center[1], 0],
[0, 0, 1, 0]], dtype="double")
dist_coeffs = np.zeros((4, 1))
self.camera_info.D = list(dist_coeffs)
self.camera_info.K = list(camera_matrix.flatten())
self.camera_info.P = list(P_matrix.flatten())
self.timer = rospy.Timer(rospy.Duration(1.0/self.camera_pub_frequency), self.timer_callback)
rospy.loginfo("Camera publisher ready !")
while not rospy.is_shutdown():
rospy.spin()
self.capture.release()
def timer_callback(self, event):
ok, frame = self.capture.read()
if ok:
bgr_image_msg = self.bridge.cv2_to_imgmsg(frame, "bgr8")
bgr_image_msg.header.stamp = rospy.Time().now()
self.camera_info.header = bgr_image_msg.header
bgr_image_msg.header.frame_id = self.camera_frame_id
self.camera_publisher.publish(bgr_image_msg)
self.camera_info_publisher.publish(self.camera_info)
if __name__ == '__main__':
rospy.init_node("camera_publisher", anonymous=False)
c = CameraPublisher()
| 36.842857
| 118
| 0.633579
|
import os
import rospy
import cv2
import sensor_msgs
import numpy as np
from cv_bridge import CvBridge
class CameraPublisher(object):
def __init__(self):
self.rgb_image_topic = rospy.get_param("~rgb_image_topic", "/camera/rgb/image_raw")
self.camera_publisher = rospy.Publisher(self.rgb_image_topic, sensor_msgs.msg.Image, queue_size=1)
self.camera_pub_frequency = rospy.get_param("~camera_pub_frequency", 20)
self.bridge = CvBridge()
self.camera_info_topic = rospy.get_param("~camera_info_topic", "/camera/rgb/camera_info")
self.camera_info = sensor_msgs.msg.CameraInfo()
self.camera_info_publisher = rospy.Publisher(self.camera_info_topic, sensor_msgs.msg.CameraInfo, queue_size=1)
self.camera_frame_id = rospy.get_param("~camera_frame_id", "camera_link")
self.camera_info.header.frame_id = self.camera_frame_id
self.capture = cv2.VideoCapture(0)
ok, frame = self.capture.read()
width, height, _ = frame.shape
focal_length = height
center = (height/2, width/2)
camera_matrix = np.array([[focal_length, 0, center[0]],
[0, focal_length, center[1]],
[0, 0, 1]], dtype="double")
P_matrix = np.array([[focal_length, 0, center[0], 0],
[0, focal_length, center[1], 0],
[0, 0, 1, 0]], dtype="double")
dist_coeffs = np.zeros((4, 1))
self.camera_info.D = list(dist_coeffs)
self.camera_info.K = list(camera_matrix.flatten())
self.camera_info.P = list(P_matrix.flatten())
self.timer = rospy.Timer(rospy.Duration(1.0/self.camera_pub_frequency), self.timer_callback)
rospy.loginfo("Camera publisher ready !")
while not rospy.is_shutdown():
rospy.spin()
self.capture.release()
def timer_callback(self, event):
ok, frame = self.capture.read()
if ok:
bgr_image_msg = self.bridge.cv2_to_imgmsg(frame, "bgr8")
bgr_image_msg.header.stamp = rospy.Time().now()
self.camera_info.header = bgr_image_msg.header
bgr_image_msg.header.frame_id = self.camera_frame_id
self.camera_publisher.publish(bgr_image_msg)
self.camera_info_publisher.publish(self.camera_info)
if __name__ == '__main__':
rospy.init_node("camera_publisher", anonymous=False)
c = CameraPublisher()
| true
| true
|
7909113b4715f07283c948e8bdf136b70c8c9250
| 4,077
|
py
|
Python
|
pde/tools/parse_duration.py
|
lmenou/py-pde
|
3899cba0481657ea7b3d5c05e318d0b851bbe8cd
|
[
"MIT"
] | null | null | null |
pde/tools/parse_duration.py
|
lmenou/py-pde
|
3899cba0481657ea7b3d5c05e318d0b851bbe8cd
|
[
"MIT"
] | null | null | null |
pde/tools/parse_duration.py
|
lmenou/py-pde
|
3899cba0481657ea7b3d5c05e318d0b851bbe8cd
|
[
"MIT"
] | null | null | null |
"""
Parsing time durations from strings
This module provides a function that parses time durations from strings. It has
been copied from the django software, which comes with the following notes:
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import datetime
import re
standard_duration_re = re.compile(
r"^"
r"(?:(?P<days>-?\d+) (days?, )?)?"
r"((?:(?P<hours>-?\d+):)(?=\d+:\d+))?"
r"(?:(?P<minutes>-?\d+):)?"
r"(?P<seconds>-?\d+)"
r"(?:\.(?P<microseconds>\d{1,6})\d{0,6})?"
r"$"
)
# Support the sections of ISO 8601 date representation that are accepted by
# timedelta
iso8601_duration_re = re.compile(
r"^(?P<sign>[-+]?)"
r"P"
r"(?:(?P<days>\d+(.\d+)?)D)?"
r"(?:T"
r"(?:(?P<hours>\d+(.\d+)?)H)?"
r"(?:(?P<minutes>\d+(.\d+)?)M)?"
r"(?:(?P<seconds>\d+(.\d+)?)S)?"
r")?"
r"$"
)
# Support PostgreSQL's day-time interval format, e.g. "3 days 04:05:06". The
# year-month and mixed intervals cannot be converted to a timedelta and thus
# aren't accepted.
postgres_interval_re = re.compile(
r"^"
r"(?:(?P<days>-?\d+) (days? ?))?"
r"(?:(?P<sign>[-+])?"
r"(?P<hours>\d+):"
r"(?P<minutes>\d\d):"
r"(?P<seconds>\d\d)"
r"(?:\.(?P<microseconds>\d{1,6}))?"
r")?$"
)
def parse_duration(value: str) -> datetime.timedelta:
"""Parse a duration string and return a datetime.timedelta.
Args:
value (str): A time duration given as text. The preferred format for
durations is '%d %H:%M:%S.%f'. This function also supports ISO 8601
representation and PostgreSQL's day-time interval format.
Returns:
datetime.timedelta: An instance representing the duration.
"""
match = (
standard_duration_re.match(value)
or iso8601_duration_re.match(value)
or postgres_interval_re.match(value)
)
if match:
kw = match.groupdict()
days = datetime.timedelta(float(kw.pop("days", 0) or 0))
sign = -1 if kw.pop("sign", "+") == "-" else 1
if kw.get("microseconds"):
kw["microseconds"] = kw["microseconds"].ljust(6, "0")
if (
kw.get("seconds")
and kw.get("microseconds")
and kw["seconds"].startswith("-")
):
kw["microseconds"] = "-" + kw["microseconds"]
kw = {k: float(v) for k, v in kw.items() if v is not None}
return days + sign * datetime.timedelta(**kw) # type: ignore
else:
raise ValueError(f"The time duration {value} cannot be parsed.")
__all__ = ["parse_duration"]
| 36.079646
| 80
| 0.651459
|
import datetime
import re
standard_duration_re = re.compile(
r"^"
r"(?:(?P<days>-?\d+) (days?, )?)?"
r"((?:(?P<hours>-?\d+):)(?=\d+:\d+))?"
r"(?:(?P<minutes>-?\d+):)?"
r"(?P<seconds>-?\d+)"
r"(?:\.(?P<microseconds>\d{1,6})\d{0,6})?"
r"$"
)
iso8601_duration_re = re.compile(
r"^(?P<sign>[-+]?)"
r"P"
r"(?:(?P<days>\d+(.\d+)?)D)?"
r"(?:T"
r"(?:(?P<hours>\d+(.\d+)?)H)?"
r"(?:(?P<minutes>\d+(.\d+)?)M)?"
r"(?:(?P<seconds>\d+(.\d+)?)S)?"
r")?"
r"$"
)
# year-month and mixed intervals cannot be converted to a timedelta and thus
# aren't accepted.
postgres_interval_re = re.compile(
r"^"
r"(?:(?P<days>-?\d+) (days? ?))?"
r"(?:(?P<sign>[-+])?"
r"(?P<hours>\d+):"
r"(?P<minutes>\d\d):"
r"(?P<seconds>\d\d)"
r"(?:\.(?P<microseconds>\d{1,6}))?"
r")?$"
)
def parse_duration(value: str) -> datetime.timedelta:
match = (
standard_duration_re.match(value)
or iso8601_duration_re.match(value)
or postgres_interval_re.match(value)
)
if match:
kw = match.groupdict()
days = datetime.timedelta(float(kw.pop("days", 0) or 0))
sign = -1 if kw.pop("sign", "+") == "-" else 1
if kw.get("microseconds"):
kw["microseconds"] = kw["microseconds"].ljust(6, "0")
if (
kw.get("seconds")
and kw.get("microseconds")
and kw["seconds"].startswith("-")
):
kw["microseconds"] = "-" + kw["microseconds"]
kw = {k: float(v) for k, v in kw.items() if v is not None}
return days + sign * datetime.timedelta(**kw)
else:
raise ValueError(f"The time duration {value} cannot be parsed.")
__all__ = ["parse_duration"]
| true
| true
|
790911bb0668c537a7d71a71d91876a2a82c231f
| 1,206
|
py
|
Python
|
pommerman/NN/neural_net.py
|
MaxU11/playground
|
240182f88836e860fc144a82e98c0f4028294334
|
[
"Apache-2.0"
] | null | null | null |
pommerman/NN/neural_net.py
|
MaxU11/playground
|
240182f88836e860fc144a82e98c0f4028294334
|
[
"Apache-2.0"
] | null | null | null |
pommerman/NN/neural_net.py
|
MaxU11/playground
|
240182f88836e860fc144a82e98c0f4028294334
|
[
"Apache-2.0"
] | null | null | null |
class NeuralNet():
def __init__(self, game):
pass
def train(self, examples):
"""
This function trains the neural network with examples obtained from
self-play.
Input:
examples: a list of training examples, where each example is of form
(board, pi, v). pi is the MCTS informed policy vector for
the given board, and v is its value. The examples has
board in its canonical form.
"""
pass
def predict(self, board):
"""
Input:
board: current board in its canonical form.
Returns:
pi: a policy vector for the current board- a numpy array of length
game.getActionSize
v: a float in [-1,1] that gives the value of the current board
"""
pass
def save_checkpoint(self, folder, filename):
"""
Saves the current neural network (with its parameters) in
folder/filename
"""
pass
def load_checkpoint(self, folder, filename):
"""
Loads parameters of the neural network from folder/filename
"""
pass
| 30.15
| 80
| 0.554726
|
class NeuralNet():
def __init__(self, game):
pass
def train(self, examples):
pass
def predict(self, board):
pass
def save_checkpoint(self, folder, filename):
pass
def load_checkpoint(self, folder, filename):
pass
| true
| true
|
790912ae9300aaef288f70c7804b874ce94c60ea
| 35
|
py
|
Python
|
plot_utils/__init__.py
|
IntelLabs/causality-lab
|
6ee7d4325257a38666558f8b7e310581a709d256
|
[
"Apache-2.0"
] | 18
|
2021-12-08T09:06:52.000Z
|
2022-03-07T01:08:47.000Z
|
plot_utils/__init__.py
|
IntelLabs/causality-lab
|
6ee7d4325257a38666558f8b7e310581a709d256
|
[
"Apache-2.0"
] | null | null | null |
plot_utils/__init__.py
|
IntelLabs/causality-lab
|
6ee7d4325257a38666558f8b7e310581a709d256
|
[
"Apache-2.0"
] | null | null | null |
from .draw_graph import draw_graph
| 17.5
| 34
| 0.857143
|
from .draw_graph import draw_graph
| true
| true
|
790913f4fe7b4920a7ab123f0f5c8acb5971d779
| 243
|
py
|
Python
|
pytest_selenium_enhancer/plugin.py
|
popescunsergiu/pytest-selenium-enhancer
|
9966604d5c44621b2ac707fbec278bed7771594a
|
[
"MIT"
] | 2
|
2021-01-20T02:38:31.000Z
|
2021-10-01T11:51:14.000Z
|
pytest_selenium_enhancer/plugin.py
|
popescunsergiu/pytest-selenium-enhancer
|
9966604d5c44621b2ac707fbec278bed7771594a
|
[
"MIT"
] | null | null | null |
pytest_selenium_enhancer/plugin.py
|
popescunsergiu/pytest-selenium-enhancer
|
9966604d5c44621b2ac707fbec278bed7771594a
|
[
"MIT"
] | null | null | null |
"""Pytest plugin entry point. Used for any fixtures needed."""
import pytest
from .pytest_selenium_enhancer import add_custom_commands
@pytest.fixture(scope='session')
def selenium_patcher():
"""Add custom ."""
add_custom_commands()
| 24.3
| 62
| 0.753086
|
import pytest
from .pytest_selenium_enhancer import add_custom_commands
@pytest.fixture(scope='session')
def selenium_patcher():
add_custom_commands()
| true
| true
|
7909141e062b1594972a31affbd61618b4c51153
| 15,584
|
py
|
Python
|
mailchimp_marketing_asyncio/models/add_list_members1.py
|
john-parton/mailchimp-asyncio
|
3865ca0867bec8f537dc1e3256aa3a160c00f8a2
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_marketing_asyncio/models/add_list_members1.py
|
john-parton/mailchimp-asyncio
|
3865ca0867bec8f537dc1e3256aa3a160c00f8a2
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_marketing_asyncio/models/add_list_members1.py
|
john-parton/mailchimp-asyncio
|
3865ca0867bec8f537dc1e3256aa3a160c00f8a2
|
[
"Apache-2.0"
] | 1
|
2022-03-09T14:52:22.000Z
|
2022-03-09T14:52:22.000Z
|
# coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.74
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AddListMembers1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'email_address': 'str',
'email_type': 'str',
'status': 'str',
'merge_fields': 'dict(str, object)',
'interests': 'dict(str, bool)',
'language': 'str',
'vip': 'bool',
'location': 'Location',
'marketing_permissions': 'list[MarketingPermission1]',
'ip_signup': 'str',
'timestamp_signup': 'datetime',
'ip_opt': 'str',
'timestamp_opt': 'datetime',
'tags': 'list[str]'
}
attribute_map = {
'email_address': 'email_address',
'email_type': 'email_type',
'status': 'status',
'merge_fields': 'merge_fields',
'interests': 'interests',
'language': 'language',
'vip': 'vip',
'location': 'location',
'marketing_permissions': 'marketing_permissions',
'ip_signup': 'ip_signup',
'timestamp_signup': 'timestamp_signup',
'ip_opt': 'ip_opt',
'timestamp_opt': 'timestamp_opt',
'tags': 'tags'
}
def __init__(self, email_address=None, email_type=None, status=None, merge_fields=None, interests=None, language=None, vip=None, location=None, marketing_permissions=None, ip_signup=None, timestamp_signup=None, ip_opt=None, timestamp_opt=None, tags=None): # noqa: E501
"""AddListMembers1 - a model defined in Swagger""" # noqa: E501
self._email_address = None
self._email_type = None
self._status = None
self._merge_fields = None
self._interests = None
self._language = None
self._vip = None
self._location = None
self._marketing_permissions = None
self._ip_signup = None
self._timestamp_signup = None
self._ip_opt = None
self._timestamp_opt = None
self._tags = None
self.discriminator = None
self.email_address = email_address
if email_type is not None:
self.email_type = email_type
self.status = status
if merge_fields is not None:
self.merge_fields = merge_fields
if interests is not None:
self.interests = interests
if language is not None:
self.language = language
if vip is not None:
self.vip = vip
if location is not None:
self.location = location
if marketing_permissions is not None:
self.marketing_permissions = marketing_permissions
if ip_signup is not None:
self.ip_signup = ip_signup
if timestamp_signup is not None:
self.timestamp_signup = timestamp_signup
if ip_opt is not None:
self.ip_opt = ip_opt
if timestamp_opt is not None:
self.timestamp_opt = timestamp_opt
if tags is not None:
self.tags = tags
@property
def email_address(self):
"""Gets the email_address of this AddListMembers1. # noqa: E501
Email address for a subscriber. # noqa: E501
:return: The email_address of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._email_address
@email_address.setter
def email_address(self, email_address):
"""Sets the email_address of this AddListMembers1.
Email address for a subscriber. # noqa: E501
:param email_address: The email_address of this AddListMembers1. # noqa: E501
:type: str
"""
if email_address is None:
raise ValueError("Invalid value for `email_address`, must not be `None`") # noqa: E501
self._email_address = email_address
@property
def email_type(self):
"""Gets the email_type of this AddListMembers1. # noqa: E501
Type of email this member asked to get ('html' or 'text'). # noqa: E501
:return: The email_type of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._email_type
@email_type.setter
def email_type(self, email_type):
"""Sets the email_type of this AddListMembers1.
Type of email this member asked to get ('html' or 'text'). # noqa: E501
:param email_type: The email_type of this AddListMembers1. # noqa: E501
:type: str
"""
self._email_type = email_type
@property
def status(self):
"""Gets the status of this AddListMembers1. # noqa: E501
Subscriber's current status. # noqa: E501
:return: The status of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this AddListMembers1.
Subscriber's current status. # noqa: E501
:param status: The status of this AddListMembers1. # noqa: E501
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
allowed_values = ["subscribed", "unsubscribed", "cleaned", "pending", "transactional"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
@property
def merge_fields(self):
"""Gets the merge_fields of this AddListMembers1. # noqa: E501
A dictionary of merge fields where the keys are the merge tags. See the [Merge Fields documentation](https://mailchimp.com/developer/marketing/docs/merge-fields/#structure) for more about the structure. # noqa: E501
:return: The merge_fields of this AddListMembers1. # noqa: E501
:rtype: dict(str, object)
"""
return self._merge_fields
@merge_fields.setter
def merge_fields(self, merge_fields):
"""Sets the merge_fields of this AddListMembers1.
A dictionary of merge fields where the keys are the merge tags. See the [Merge Fields documentation](https://mailchimp.com/developer/marketing/docs/merge-fields/#structure) for more about the structure. # noqa: E501
:param merge_fields: The merge_fields of this AddListMembers1. # noqa: E501
:type: dict(str, object)
"""
self._merge_fields = merge_fields
@property
def interests(self):
"""Gets the interests of this AddListMembers1. # noqa: E501
The key of this object's properties is the ID of the interest in question. # noqa: E501
:return: The interests of this AddListMembers1. # noqa: E501
:rtype: dict(str, bool)
"""
return self._interests
@interests.setter
def interests(self, interests):
"""Sets the interests of this AddListMembers1.
The key of this object's properties is the ID of the interest in question. # noqa: E501
:param interests: The interests of this AddListMembers1. # noqa: E501
:type: dict(str, bool)
"""
self._interests = interests
@property
def language(self):
"""Gets the language of this AddListMembers1. # noqa: E501
If set/detected, the [subscriber's language](https://mailchimp.com/help/view-and-edit-contact-languages/). # noqa: E501
:return: The language of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._language
@language.setter
def language(self, language):
"""Sets the language of this AddListMembers1.
If set/detected, the [subscriber's language](https://mailchimp.com/help/view-and-edit-contact-languages/). # noqa: E501
:param language: The language of this AddListMembers1. # noqa: E501
:type: str
"""
self._language = language
@property
def vip(self):
"""Gets the vip of this AddListMembers1. # noqa: E501
[VIP status](https://mailchimp.com/help/designate-and-send-to-vip-contacts/) for subscriber. # noqa: E501
:return: The vip of this AddListMembers1. # noqa: E501
:rtype: bool
"""
return self._vip
@vip.setter
def vip(self, vip):
"""Sets the vip of this AddListMembers1.
[VIP status](https://mailchimp.com/help/designate-and-send-to-vip-contacts/) for subscriber. # noqa: E501
:param vip: The vip of this AddListMembers1. # noqa: E501
:type: bool
"""
self._vip = vip
@property
def location(self):
"""Gets the location of this AddListMembers1. # noqa: E501
:return: The location of this AddListMembers1. # noqa: E501
:rtype: Location
"""
return self._location
@location.setter
def location(self, location):
"""Sets the location of this AddListMembers1.
:param location: The location of this AddListMembers1. # noqa: E501
:type: Location
"""
self._location = location
@property
def marketing_permissions(self):
"""Gets the marketing_permissions of this AddListMembers1. # noqa: E501
The marketing permissions for the subscriber. # noqa: E501
:return: The marketing_permissions of this AddListMembers1. # noqa: E501
:rtype: list[MarketingPermission1]
"""
return self._marketing_permissions
@marketing_permissions.setter
def marketing_permissions(self, marketing_permissions):
"""Sets the marketing_permissions of this AddListMembers1.
The marketing permissions for the subscriber. # noqa: E501
:param marketing_permissions: The marketing_permissions of this AddListMembers1. # noqa: E501
:type: list[MarketingPermission1]
"""
self._marketing_permissions = marketing_permissions
@property
def ip_signup(self):
"""Gets the ip_signup of this AddListMembers1. # noqa: E501
IP address the subscriber signed up from. # noqa: E501
:return: The ip_signup of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._ip_signup
@ip_signup.setter
def ip_signup(self, ip_signup):
"""Sets the ip_signup of this AddListMembers1.
IP address the subscriber signed up from. # noqa: E501
:param ip_signup: The ip_signup of this AddListMembers1. # noqa: E501
:type: str
"""
self._ip_signup = ip_signup
@property
def timestamp_signup(self):
"""Gets the timestamp_signup of this AddListMembers1. # noqa: E501
The date and time the subscriber signed up for the list in ISO 8601 format. # noqa: E501
:return: The timestamp_signup of this AddListMembers1. # noqa: E501
:rtype: datetime
"""
return self._timestamp_signup
@timestamp_signup.setter
def timestamp_signup(self, timestamp_signup):
"""Sets the timestamp_signup of this AddListMembers1.
The date and time the subscriber signed up for the list in ISO 8601 format. # noqa: E501
:param timestamp_signup: The timestamp_signup of this AddListMembers1. # noqa: E501
:type: datetime
"""
self._timestamp_signup = timestamp_signup
@property
def ip_opt(self):
"""Gets the ip_opt of this AddListMembers1. # noqa: E501
The IP address the subscriber used to confirm their opt-in status. # noqa: E501
:return: The ip_opt of this AddListMembers1. # noqa: E501
:rtype: str
"""
return self._ip_opt
@ip_opt.setter
def ip_opt(self, ip_opt):
"""Sets the ip_opt of this AddListMembers1.
The IP address the subscriber used to confirm their opt-in status. # noqa: E501
:param ip_opt: The ip_opt of this AddListMembers1. # noqa: E501
:type: str
"""
self._ip_opt = ip_opt
@property
def timestamp_opt(self):
"""Gets the timestamp_opt of this AddListMembers1. # noqa: E501
The date and time the subscribe confirmed their opt-in status in ISO 8601 format. # noqa: E501
:return: The timestamp_opt of this AddListMembers1. # noqa: E501
:rtype: datetime
"""
return self._timestamp_opt
@timestamp_opt.setter
def timestamp_opt(self, timestamp_opt):
"""Sets the timestamp_opt of this AddListMembers1.
The date and time the subscribe confirmed their opt-in status in ISO 8601 format. # noqa: E501
:param timestamp_opt: The timestamp_opt of this AddListMembers1. # noqa: E501
:type: datetime
"""
self._timestamp_opt = timestamp_opt
@property
def tags(self):
"""Gets the tags of this AddListMembers1. # noqa: E501
The tags that are associated with a member. # noqa: E501
:return: The tags of this AddListMembers1. # noqa: E501
:rtype: list[str]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this AddListMembers1.
The tags that are associated with a member. # noqa: E501
:param tags: The tags of this AddListMembers1. # noqa: E501
:type: list[str]
"""
self._tags = tags
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AddListMembers1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AddListMembers1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.934426
| 273
| 0.61345
|
import pprint
import re
import six
class AddListMembers1(object):
swagger_types = {
'email_address': 'str',
'email_type': 'str',
'status': 'str',
'merge_fields': 'dict(str, object)',
'interests': 'dict(str, bool)',
'language': 'str',
'vip': 'bool',
'location': 'Location',
'marketing_permissions': 'list[MarketingPermission1]',
'ip_signup': 'str',
'timestamp_signup': 'datetime',
'ip_opt': 'str',
'timestamp_opt': 'datetime',
'tags': 'list[str]'
}
attribute_map = {
'email_address': 'email_address',
'email_type': 'email_type',
'status': 'status',
'merge_fields': 'merge_fields',
'interests': 'interests',
'language': 'language',
'vip': 'vip',
'location': 'location',
'marketing_permissions': 'marketing_permissions',
'ip_signup': 'ip_signup',
'timestamp_signup': 'timestamp_signup',
'ip_opt': 'ip_opt',
'timestamp_opt': 'timestamp_opt',
'tags': 'tags'
}
def __init__(self, email_address=None, email_type=None, status=None, merge_fields=None, interests=None, language=None, vip=None, location=None, marketing_permissions=None, ip_signup=None, timestamp_signup=None, ip_opt=None, timestamp_opt=None, tags=None):
self._email_address = None
self._email_type = None
self._status = None
self._merge_fields = None
self._interests = None
self._language = None
self._vip = None
self._location = None
self._marketing_permissions = None
self._ip_signup = None
self._timestamp_signup = None
self._ip_opt = None
self._timestamp_opt = None
self._tags = None
self.discriminator = None
self.email_address = email_address
if email_type is not None:
self.email_type = email_type
self.status = status
if merge_fields is not None:
self.merge_fields = merge_fields
if interests is not None:
self.interests = interests
if language is not None:
self.language = language
if vip is not None:
self.vip = vip
if location is not None:
self.location = location
if marketing_permissions is not None:
self.marketing_permissions = marketing_permissions
if ip_signup is not None:
self.ip_signup = ip_signup
if timestamp_signup is not None:
self.timestamp_signup = timestamp_signup
if ip_opt is not None:
self.ip_opt = ip_opt
if timestamp_opt is not None:
self.timestamp_opt = timestamp_opt
if tags is not None:
self.tags = tags
@property
def email_address(self):
return self._email_address
@email_address.setter
def email_address(self, email_address):
if email_address is None:
raise ValueError("Invalid value for `email_address`, must not be `None`")
self._email_address = email_address
@property
def email_type(self):
return self._email_type
@email_type.setter
def email_type(self, email_type):
self._email_type = email_type
@property
def status(self):
return self._status
@status.setter
def status(self, status):
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`")
allowed_values = ["subscribed", "unsubscribed", "cleaned", "pending", "transactional"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}"
.format(status, allowed_values)
)
self._status = status
@property
def merge_fields(self):
return self._merge_fields
@merge_fields.setter
def merge_fields(self, merge_fields):
self._merge_fields = merge_fields
@property
def interests(self):
return self._interests
@interests.setter
def interests(self, interests):
self._interests = interests
@property
def language(self):
return self._language
@language.setter
def language(self, language):
self._language = language
@property
def vip(self):
return self._vip
@vip.setter
def vip(self, vip):
self._vip = vip
@property
def location(self):
return self._location
@location.setter
def location(self, location):
self._location = location
@property
def marketing_permissions(self):
return self._marketing_permissions
@marketing_permissions.setter
def marketing_permissions(self, marketing_permissions):
self._marketing_permissions = marketing_permissions
@property
def ip_signup(self):
return self._ip_signup
@ip_signup.setter
def ip_signup(self, ip_signup):
self._ip_signup = ip_signup
@property
def timestamp_signup(self):
return self._timestamp_signup
@timestamp_signup.setter
def timestamp_signup(self, timestamp_signup):
self._timestamp_signup = timestamp_signup
@property
def ip_opt(self):
return self._ip_opt
@ip_opt.setter
def ip_opt(self, ip_opt):
self._ip_opt = ip_opt
@property
def timestamp_opt(self):
return self._timestamp_opt
@timestamp_opt.setter
def timestamp_opt(self, timestamp_opt):
self._timestamp_opt = timestamp_opt
@property
def tags(self):
return self._tags
@tags.setter
def tags(self, tags):
self._tags = tags
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AddListMembers1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, AddListMembers1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
790914c6fc3b1b230cc4b756155b1ead618c0045
| 14,321
|
py
|
Python
|
edgedb/transaction.py
|
mkniewallner/edgedb-python
|
2086b866d3c87c215eecf644b2393ddd857457e0
|
[
"Apache-2.0"
] | 214
|
2019-01-19T03:56:10.000Z
|
2022-03-31T01:37:33.000Z
|
edgedb/transaction.py
|
mkniewallner/edgedb-python
|
2086b866d3c87c215eecf644b2393ddd857457e0
|
[
"Apache-2.0"
] | 120
|
2019-03-19T23:01:52.000Z
|
2022-03-14T08:41:27.000Z
|
edgedb/transaction.py
|
mkniewallner/edgedb-python
|
2086b866d3c87c215eecf644b2393ddd857457e0
|
[
"Apache-2.0"
] | 24
|
2019-04-29T22:41:10.000Z
|
2021-11-15T00:28:01.000Z
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import enum
import typing
from . import abstract
from . import base_con
from . import enums
from . import errors
from . import options
from .datatypes import datatypes
from .protocol import protocol
__all__ = ('Transaction', 'AsyncIOTransaction')
class TransactionState(enum.Enum):
NEW = 0
STARTED = 1
COMMITTED = 2
ROLLEDBACK = 3
FAILED = 4
class BaseTransaction:
__slots__ = (
'_connection',
'_connection_inner',
'_connection_impl',
'_pool',
'_options',
'_state',
'_managed',
)
def __init__(self, owner, options: options.TransactionOptions):
if isinstance(owner, base_con.BaseConnection):
self._connection = owner
self._connection_inner = owner._inner
self._pool = None
else:
self._connection = None
self._connection_inner = None
self._pool = owner
self._connection_impl = None
self._options = options
self._state = TransactionState.NEW
self._managed = False
def is_active(self) -> bool:
return self._state is TransactionState.STARTED
def __check_state_base(self, opname):
if self._state is TransactionState.COMMITTED:
raise errors.InterfaceError(
'cannot {}; the transaction is already committed'.format(
opname))
if self._state is TransactionState.ROLLEDBACK:
raise errors.InterfaceError(
'cannot {}; the transaction is already rolled back'.format(
opname))
if self._state is TransactionState.FAILED:
raise errors.InterfaceError(
'cannot {}; the transaction is in error state'.format(
opname))
def __check_state(self, opname):
if self._state is not TransactionState.STARTED:
if self._state is TransactionState.NEW:
raise errors.InterfaceError(
'cannot {}; the transaction is not yet started'.format(
opname))
self.__check_state_base(opname)
def _make_start_query(self):
self.__check_state_base('start')
if self._state is TransactionState.STARTED:
raise errors.InterfaceError(
'cannot start; the transaction is already started')
return self._options.start_transaction_query()
def _make_commit_query(self):
self.__check_state('commit')
return 'COMMIT;'
def _make_rollback_query(self):
self.__check_state('rollback')
return 'ROLLBACK;'
def _borrow(self):
inner = self._connection_inner
if inner._borrowed_for:
raise base_con.borrow_error(inner._borrowed_for)
inner._borrowed_for = base_con.BorrowReason.TRANSACTION
def _maybe_return(self):
if self._connection_inner is not None:
self._connection_inner._borrowed_for = None
def __repr__(self):
attrs = []
attrs.append('state:{}'.format(self._state.name.lower()))
attrs.append(repr(self._options))
if self.__class__.__module__.startswith('edgedb.'):
mod = 'edgedb'
else:
mod = self.__class__.__module__
return '<{}.{} {} {:#x}>'.format(
mod, self.__class__.__name__, ' '.join(attrs), id(self))
class BaseAsyncIOTransaction(BaseTransaction, abstract.AsyncIOExecutor):
__slots__ = ()
async def _start(self, single_connect=False) -> None:
query = self._make_start_query()
if self._pool is not None:
self._connection = await self._pool._acquire()
self._connection_inner = self._connection._inner
inner = self._connection_inner
if not inner._impl or inner._impl.is_closed():
await self._connection._reconnect(single_attempt=single_connect)
self._connection_impl = self._connection._inner._impl
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.STARTED
async def _commit(self):
try:
query = self._make_commit_query()
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.COMMITTED
finally:
self._maybe_return()
if self._pool is not None:
await self._pool._release(self._connection)
async def _rollback(self):
try:
query = self._make_rollback_query()
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.ROLLEDBACK
finally:
self._maybe_return()
if self._pool is not None:
await self._pool._release(self._connection)
async def _ensure_transaction(self):
pass
async def query(self, query: str, *args, **kwargs) -> datatypes.Set:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.BINARY,
)
return result
async def query_single(self, query: str, *args, **kwargs) -> typing.Any:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.BINARY,
)
return result
async def query_json(self, query: str, *args, **kwargs) -> str:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.JSON,
)
return result
async def query_single_json(self, query: str, *args, **kwargs) -> str:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.JSON,
)
return result
async def execute(self, query: str) -> None:
"""Execute an EdgeQL command (or commands).
Example:
.. code-block:: pycon
>>> await con.execute('''
... CREATE TYPE MyType { CREATE PROPERTY a -> int64 };
... FOR x IN {100, 200, 300} UNION INSERT MyType { a := x };
... ''')
"""
await self._ensure_transaction()
await self._connection_impl._protocol.simple_query(
query, enums.Capability.EXECUTE)
class AsyncIOTransaction(BaseAsyncIOTransaction):
__slots__ = ()
async def __aenter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in an `async with` block')
self._managed = True
await self.start()
return self
async def __aexit__(self, extype, ex, tb):
try:
if extype is not None:
await self._rollback()
else:
await self._commit()
finally:
self._managed = False
async def start(self) -> None:
"""Enter the transaction or savepoint block."""
await self._start()
self._borrow()
async def commit(self) -> None:
"""Exit the transaction or savepoint block and commit changes."""
if self._managed:
raise errors.InterfaceError(
'cannot manually commit from within an `async with` block')
await self._commit()
async def rollback(self) -> None:
"""Exit the transaction or savepoint block and rollback changes."""
if self._managed:
raise errors.InterfaceError(
'cannot manually rollback from within an `async with` block')
await self._rollback()
class BaseBlockingIOTransaction(BaseTransaction, abstract.Executor):
__slots__ = ()
def _start(self, single_connect=False) -> None:
query = self._make_start_query()
# no pools supported for blocking con
inner = self._connection_inner
if not inner._impl or inner._impl.is_closed():
self._connection._reconnect(single_attempt=single_connect)
self._connection_inner = self._connection._inner
self._connection_impl = self._connection_inner._impl
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.STARTED
def _commit(self):
try:
query = self._make_commit_query()
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.COMMITTED
finally:
self._maybe_return()
def _rollback(self):
try:
query = self._make_rollback_query()
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.ROLLEDBACK
finally:
self._maybe_return()
def _ensure_transaction(self):
pass
def query(self, query: str, *args, **kwargs) -> datatypes.Set:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.BINARY,
)
def query_single(self, query: str, *args, **kwargs) -> typing.Any:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.BINARY,
)
def query_json(self, query: str, *args, **kwargs) -> str:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.JSON,
)
def query_single_json(self, query: str, *args, **kwargs) -> str:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.JSON,
)
def execute(self, query: str) -> None:
self._ensure_transaction()
self._connection_impl._protocol.sync_simple_query(
query, enums.Capability.EXECUTE)
class Transaction(BaseBlockingIOTransaction):
def __enter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in a `with` block')
self._managed = True
self.start()
return self
def __exit__(self, extype, ex, tb):
try:
if extype is not None:
self._rollback()
else:
self._commit()
finally:
self._managed = False
def start(self) -> None:
"""Enter the transaction or savepoint block."""
self._start()
self._borrow()
def commit(self) -> None:
"""Exit the transaction or savepoint block and commit changes."""
if self._managed:
raise errors.InterfaceError(
'cannot manually commit from within a `with` block')
self._commit()
def rollback(self) -> None:
"""Exit the transaction or savepoint block and rollback changes."""
if self._managed:
raise errors.InterfaceError(
'cannot manually rollback from within a `with` block')
self._rollback()
| 32.696347
| 77
| 0.601215
|
import enum
import typing
from . import abstract
from . import base_con
from . import enums
from . import errors
from . import options
from .datatypes import datatypes
from .protocol import protocol
__all__ = ('Transaction', 'AsyncIOTransaction')
class TransactionState(enum.Enum):
NEW = 0
STARTED = 1
COMMITTED = 2
ROLLEDBACK = 3
FAILED = 4
class BaseTransaction:
__slots__ = (
'_connection',
'_connection_inner',
'_connection_impl',
'_pool',
'_options',
'_state',
'_managed',
)
def __init__(self, owner, options: options.TransactionOptions):
if isinstance(owner, base_con.BaseConnection):
self._connection = owner
self._connection_inner = owner._inner
self._pool = None
else:
self._connection = None
self._connection_inner = None
self._pool = owner
self._connection_impl = None
self._options = options
self._state = TransactionState.NEW
self._managed = False
def is_active(self) -> bool:
return self._state is TransactionState.STARTED
def __check_state_base(self, opname):
if self._state is TransactionState.COMMITTED:
raise errors.InterfaceError(
'cannot {}; the transaction is already committed'.format(
opname))
if self._state is TransactionState.ROLLEDBACK:
raise errors.InterfaceError(
'cannot {}; the transaction is already rolled back'.format(
opname))
if self._state is TransactionState.FAILED:
raise errors.InterfaceError(
'cannot {}; the transaction is in error state'.format(
opname))
def __check_state(self, opname):
if self._state is not TransactionState.STARTED:
if self._state is TransactionState.NEW:
raise errors.InterfaceError(
'cannot {}; the transaction is not yet started'.format(
opname))
self.__check_state_base(opname)
def _make_start_query(self):
self.__check_state_base('start')
if self._state is TransactionState.STARTED:
raise errors.InterfaceError(
'cannot start; the transaction is already started')
return self._options.start_transaction_query()
def _make_commit_query(self):
self.__check_state('commit')
return 'COMMIT;'
def _make_rollback_query(self):
self.__check_state('rollback')
return 'ROLLBACK;'
def _borrow(self):
inner = self._connection_inner
if inner._borrowed_for:
raise base_con.borrow_error(inner._borrowed_for)
inner._borrowed_for = base_con.BorrowReason.TRANSACTION
def _maybe_return(self):
if self._connection_inner is not None:
self._connection_inner._borrowed_for = None
def __repr__(self):
attrs = []
attrs.append('state:{}'.format(self._state.name.lower()))
attrs.append(repr(self._options))
if self.__class__.__module__.startswith('edgedb.'):
mod = 'edgedb'
else:
mod = self.__class__.__module__
return '<{}.{} {} {:#x}>'.format(
mod, self.__class__.__name__, ' '.join(attrs), id(self))
class BaseAsyncIOTransaction(BaseTransaction, abstract.AsyncIOExecutor):
__slots__ = ()
async def _start(self, single_connect=False) -> None:
query = self._make_start_query()
if self._pool is not None:
self._connection = await self._pool._acquire()
self._connection_inner = self._connection._inner
inner = self._connection_inner
if not inner._impl or inner._impl.is_closed():
await self._connection._reconnect(single_attempt=single_connect)
self._connection_impl = self._connection._inner._impl
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.STARTED
async def _commit(self):
try:
query = self._make_commit_query()
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.COMMITTED
finally:
self._maybe_return()
if self._pool is not None:
await self._pool._release(self._connection)
async def _rollback(self):
try:
query = self._make_rollback_query()
try:
await self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.ROLLEDBACK
finally:
self._maybe_return()
if self._pool is not None:
await self._pool._release(self._connection)
async def _ensure_transaction(self):
pass
async def query(self, query: str, *args, **kwargs) -> datatypes.Set:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.BINARY,
)
return result
async def query_single(self, query: str, *args, **kwargs) -> typing.Any:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.BINARY,
)
return result
async def query_json(self, query: str, *args, **kwargs) -> str:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.JSON,
)
return result
async def query_single_json(self, query: str, *args, **kwargs) -> str:
await self._ensure_transaction()
con = self._connection_inner
result, _ = await self._connection_impl._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.JSON,
)
return result
async def execute(self, query: str) -> None:
await self._ensure_transaction()
await self._connection_impl._protocol.simple_query(
query, enums.Capability.EXECUTE)
class AsyncIOTransaction(BaseAsyncIOTransaction):
__slots__ = ()
async def __aenter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in an `async with` block')
self._managed = True
await self.start()
return self
async def __aexit__(self, extype, ex, tb):
try:
if extype is not None:
await self._rollback()
else:
await self._commit()
finally:
self._managed = False
async def start(self) -> None:
await self._start()
self._borrow()
async def commit(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually commit from within an `async with` block')
await self._commit()
async def rollback(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually rollback from within an `async with` block')
await self._rollback()
class BaseBlockingIOTransaction(BaseTransaction, abstract.Executor):
__slots__ = ()
def _start(self, single_connect=False) -> None:
query = self._make_start_query()
inner = self._connection_inner
if not inner._impl or inner._impl.is_closed():
self._connection._reconnect(single_attempt=single_connect)
self._connection_inner = self._connection._inner
self._connection_impl = self._connection_inner._impl
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.STARTED
def _commit(self):
try:
query = self._make_commit_query()
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.COMMITTED
finally:
self._maybe_return()
def _rollback(self):
try:
query = self._make_rollback_query()
try:
self._connection_impl.privileged_execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.ROLLEDBACK
finally:
self._maybe_return()
def _ensure_transaction(self):
pass
def query(self, query: str, *args, **kwargs) -> datatypes.Set:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.BINARY,
)
def query_single(self, query: str, *args, **kwargs) -> typing.Any:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.BINARY,
)
def query_json(self, query: str, *args, **kwargs) -> str:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
io_format=protocol.IoFormat.JSON,
)
def query_single_json(self, query: str, *args, **kwargs) -> str:
self._ensure_transaction()
con = self._connection_inner
return self._connection_impl._protocol.sync_execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=con._codecs_registry,
qc=con._query_cache,
expect_one=True,
io_format=protocol.IoFormat.JSON,
)
def execute(self, query: str) -> None:
self._ensure_transaction()
self._connection_impl._protocol.sync_simple_query(
query, enums.Capability.EXECUTE)
class Transaction(BaseBlockingIOTransaction):
def __enter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in a `with` block')
self._managed = True
self.start()
return self
def __exit__(self, extype, ex, tb):
try:
if extype is not None:
self._rollback()
else:
self._commit()
finally:
self._managed = False
def start(self) -> None:
self._start()
self._borrow()
def commit(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually commit from within a `with` block')
self._commit()
def rollback(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually rollback from within a `with` block')
self._rollback()
| true
| true
|
7909157e975064cc5d3fcee7b606dbc848876a6b
| 2,646
|
py
|
Python
|
src/abaqus/BoundaryCondition/DisplacementBaseMotionBCState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/BoundaryCondition/DisplacementBaseMotionBCState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/BoundaryCondition/DisplacementBaseMotionBCState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .BoundaryConditionState import BoundaryConditionState
class DisplacementBaseMotionBCState(BoundaryConditionState):
"""The DisplacementBaseMotionBCState object stores the propagating data for a velocity base
motion boundary condition in a step. One instance of this object is created internally
by the DisplacementBaseMotionBC object for each step. The instance is also deleted
internally by the DisplacementBaseMotionBC object.
The DisplacementBaseMotionBCState object has no constructor or methods.
The DisplacementBaseMotionBCState object is derived from the BoundaryConditionState
object.
Attributes
----------
amplitudeState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the amplitude reference. Possible
values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
status: SymbolicConstant
A SymbolicConstant specifying the propagation state of the :py:class:`~abaqus.BoundaryCondition.BoundaryConditionState.BoundaryConditionState` object. Possible values are:
NOT_YET_ACTIVE
CREATED
PROPAGATED
MODIFIED
DEACTIVATED
NO_LONGER_ACTIVE
TYPE_NOT_APPLICABLE
INSTANCE_NOT_APPLICABLE
PROPAGATED_FROM_BASE_STATE
MODIFIED_FROM_BASE_STATE
DEACTIVATED_FROM_BASE_STATE
BUILT_INTO_MODES
amplitude: str
A String specifying the name of the amplitude reference. The String is empty if the
boundary condition has no amplitude reference.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].steps[name].boundaryConditionStates[name]
The corresponding analysis keywords are:
- BASE MOTION
"""
# A SymbolicConstant specifying the propagation state of the amplitude reference. Possible
# values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
amplitudeState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the BoundaryConditionState object. Possible values are:
# NOT_YET_ACTIVE
# CREATED
# PROPAGATED
# MODIFIED
# DEACTIVATED
# NO_LONGER_ACTIVE
# TYPE_NOT_APPLICABLE
# INSTANCE_NOT_APPLICABLE
# PROPAGATED_FROM_BASE_STATE
# MODIFIED_FROM_BASE_STATE
# DEACTIVATED_FROM_BASE_STATE
# BUILT_INTO_MODES
status: SymbolicConstant = None
# A String specifying the name of the amplitude reference. The String is empty if the
# boundary condition has no amplitude reference.
amplitude: str = ''
| 35.756757
| 179
| 0.733182
|
from abaqusConstants import *
from .BoundaryConditionState import BoundaryConditionState
class DisplacementBaseMotionBCState(BoundaryConditionState):
amplitudeState: SymbolicConstant = None
status: SymbolicConstant = None
amplitude: str = ''
| true
| true
|
790915af616b625666da4042e9338d6c6ec67436
| 4,391
|
py
|
Python
|
tensorflow/python/ops/data_flow_grad.py
|
devsangwoo/tensor
|
066592c9f9cdf4acdd1b9b104766271133e9088e
|
[
"Apache-2.0"
] | 1
|
2020-01-12T14:38:34.000Z
|
2020-01-12T14:38:34.000Z
|
tensorflow/python/ops/data_flow_grad.py
|
devsangwoo/tensor
|
066592c9f9cdf4acdd1b9b104766271133e9088e
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/ops/data_flow_grad.py
|
devsangwoo/tensor
|
066592c9f9cdf4acdd1b9b104766271133e9088e
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in data_flow_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
@ops.RegisterGradient("DynamicPartition")
def _DynamicPartitionGrads(op, *grads):
"""Gradients for DynamicPartition."""
data = op.inputs[0]
indices = op.inputs[1]
num_partitions = op.get_attr("num_partitions")
prefix_shape = array_ops.shape(indices)
original_indices = array_ops.reshape(
math_ops.range(math_ops.reduce_prod(prefix_shape)), prefix_shape)
partitioned_indices = data_flow_ops.dynamic_partition(
original_indices, indices, num_partitions)
reconstructed = data_flow_ops.parallel_dynamic_stitch(partitioned_indices,
grads)
reconstructed = array_ops.reshape(reconstructed, array_ops.shape(data))
return [reconstructed, None]
@ops.RegisterGradient("DynamicStitch")
@ops.RegisterGradient("ParallelDynamicStitch")
def _DynamicStitchGrads(op, grad):
"""Gradients for DynamicStitch and ParallelDynamicStitch."""
num_values = len(op.inputs) // 2
indices_grad = [None] * num_values
def AsInt32(x):
return (x if op.inputs[0].dtype == dtypes.int32 else
math_ops.cast(x, dtypes.int32))
inputs = [AsInt32(op.inputs[i]) for i in xrange(num_values)]
=======
"""Gradients for operators defined in data_flow_ops.py."""
from tensorflow.python.framework import ops
from tensorflow.python.framework import types
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import math_ops
@ops.RegisterGradient("DynamicStitch")
def _DynamicStitchGrads(op, grad):
"""Gradients for DynamicStitch."""
num_values = len(op.inputs) / 2
indices_grad = [None] * num_values
def AsInt32(x):
return (x if op.inputs[0].dtype == types.int32 else
math_ops.cast(x, types.int32))
inputs = [AsInt32(op.inputs[i]) for i in range(num_values)]
>>>>>>> f41959ccb2... TensorFlow: Initial commit of TensorFlow library.
if isinstance(grad, ops.IndexedSlices):
output_shape = array_ops.shape(op.outputs[0])
output_rows = output_shape[0]
grad = math_ops.unsorted_segment_sum(grad.values, grad.indices, output_rows)
values_grad = [array_ops.gather(grad, inp) for inp in inputs]
return indices_grad + values_grad
<<<<<<< HEAD
ops.NotDifferentiable("Queue")
ops.NotDifferentiable("QueueEnqueue")
ops.NotDifferentiable("QueueEnqueueMany")
ops.NotDifferentiable("QueueDequeue")
ops.NotDifferentiable("QueueDequeueMany")
ops.NotDifferentiable("QueueDequeueUpTo")
ops.NotDifferentiable("QueueClose")
ops.NotDifferentiable("QueueSize")
ops.NotDifferentiable("Stack")
ops.NotDifferentiable("StackPush")
ops.NotDifferentiable("StackPop")
ops.NotDifferentiable("StackClose")
ops.NotDifferentiable("GetSessionHandle")
ops.NotDifferentiable("GetSessionHandleV2")
ops.NotDifferentiable("GetSessionTensor")
ops.NotDifferentiable("DeleteSessionTensor")
=======
ops.NoGradient("Queue")
ops.NoGradient("QueueEnqueue")
ops.NoGradient("QueueEnqueueMany")
ops.NoGradient("QueueDequeue")
ops.NoGradient("QueueDequeueMany")
ops.NoGradient("QueueClose")
ops.NoGradient("QueueSize")
>>>>>>> f41959ccb2... TensorFlow: Initial commit of TensorFlow library.
| 36.289256
| 80
| 0.751765
|
<<<<<<< HEAD
"""Gradients for operators defined in data_flow_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
@ops.RegisterGradient("DynamicPartition")
def _DynamicPartitionGrads(op, *grads):
"""Gradients for DynamicPartition."""
data = op.inputs[0]
indices = op.inputs[1]
num_partitions = op.get_attr("num_partitions")
prefix_shape = array_ops.shape(indices)
original_indices = array_ops.reshape(
math_ops.range(math_ops.reduce_prod(prefix_shape)), prefix_shape)
partitioned_indices = data_flow_ops.dynamic_partition(
original_indices, indices, num_partitions)
reconstructed = data_flow_ops.parallel_dynamic_stitch(partitioned_indices,
grads)
reconstructed = array_ops.reshape(reconstructed, array_ops.shape(data))
return [reconstructed, None]
@ops.RegisterGradient("DynamicStitch")
@ops.RegisterGradient("ParallelDynamicStitch")
def _DynamicStitchGrads(op, grad):
"""Gradients for DynamicStitch and ParallelDynamicStitch."""
num_values = len(op.inputs) // 2
indices_grad = [None] * num_values
def AsInt32(x):
return (x if op.inputs[0].dtype == dtypes.int32 else
math_ops.cast(x, dtypes.int32))
inputs = [AsInt32(op.inputs[i]) for i in xrange(num_values)]
=======
"""Gradients for operators defined in data_flow_ops.py."""
from tensorflow.python.framework import ops
from tensorflow.python.framework import types
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import math_ops
@ops.RegisterGradient("DynamicStitch")
def _DynamicStitchGrads(op, grad):
"""Gradients for DynamicStitch."""
num_values = len(op.inputs) / 2
indices_grad = [None] * num_values
def AsInt32(x):
return (x if op.inputs[0].dtype == types.int32 else
math_ops.cast(x, types.int32))
inputs = [AsInt32(op.inputs[i]) for i in range(num_values)]
>>>>>>> f41959ccb2... TensorFlow: Initial commit of TensorFlow library.
if isinstance(grad, ops.IndexedSlices):
output_shape = array_ops.shape(op.outputs[0])
output_rows = output_shape[0]
grad = math_ops.unsorted_segment_sum(grad.values, grad.indices, output_rows)
values_grad = [array_ops.gather(grad, inp) for inp in inputs]
return indices_grad + values_grad
<<<<<<< HEAD
ops.NotDifferentiable("Queue")
ops.NotDifferentiable("QueueEnqueue")
ops.NotDifferentiable("QueueEnqueueMany")
ops.NotDifferentiable("QueueDequeue")
ops.NotDifferentiable("QueueDequeueMany")
ops.NotDifferentiable("QueueDequeueUpTo")
ops.NotDifferentiable("QueueClose")
ops.NotDifferentiable("QueueSize")
ops.NotDifferentiable("Stack")
ops.NotDifferentiable("StackPush")
ops.NotDifferentiable("StackPop")
ops.NotDifferentiable("StackClose")
ops.NotDifferentiable("GetSessionHandle")
ops.NotDifferentiable("GetSessionHandleV2")
ops.NotDifferentiable("GetSessionTensor")
ops.NotDifferentiable("DeleteSessionTensor")
=======
ops.NoGradient("Queue")
ops.NoGradient("QueueEnqueue")
ops.NoGradient("QueueEnqueueMany")
ops.NoGradient("QueueDequeue")
ops.NoGradient("QueueDequeueMany")
ops.NoGradient("QueueClose")
ops.NoGradient("QueueSize")
>>>>>>> f41959ccb2... TensorFlow: Initial commit of TensorFlow library.
| false
| true
|
790915ec8056e8dd88be493b2112f4eb4e3539c8
| 2,653
|
py
|
Python
|
ssd_mobilenetv2/BoundingBoxes.py
|
videetparekh/model-zoo-models
|
431d6e8f04c343a2e6dbc140e1b060cc5f0a089d
|
[
"Apache-2.0"
] | null | null | null |
ssd_mobilenetv2/BoundingBoxes.py
|
videetparekh/model-zoo-models
|
431d6e8f04c343a2e6dbc140e1b060cc5f0a089d
|
[
"Apache-2.0"
] | null | null | null |
ssd_mobilenetv2/BoundingBoxes.py
|
videetparekh/model-zoo-models
|
431d6e8f04c343a2e6dbc140e1b060cc5f0a089d
|
[
"Apache-2.0"
] | null | null | null |
from BoundingBox import *
from eval_utils import *
class BoundingBoxes:
def __init__(self):
self._boundingBoxes = []
def addBoundingBox(self, bb):
self._boundingBoxes.append(bb)
def removeBoundingBox(self, _boundingBox):
for d in self._boundingBoxes:
if BoundingBox.compare(d, _boundingBox):
del self._boundingBoxes[d]
return
def removeAllBoundingBoxes(self):
self._boundingBoxes = []
def getBoundingBoxes(self):
return self._boundingBoxes
def getBoundingBoxByClass(self, classId):
boundingBoxes = []
for d in self._boundingBoxes:
if d.getClassId() == classId: # get only specified bounding box type
boundingBoxes.append(d)
return boundingBoxes
def getClasses(self):
classes = []
for d in self._boundingBoxes:
c = d.getClassId()
if c not in classes:
classes.append(c)
return classes
def getBoundingBoxesByType(self, bbType):
# get only specified bb type
return [d for d in self._boundingBoxes if d.getBBType() == bbType]
def getBoundingBoxesByImageName(self, imageName):
# get only specified bb type
return [d for d in self._boundingBoxes if d.getImageName() == imageName]
def count(self, bbType=None):
if bbType is None: # Return all bounding boxes
return len(self._boundingBoxes)
count = 0
for d in self._boundingBoxes:
if d.getBBType() == bbType: # get only specified bb type
count += 1
return count
def clone(self):
newBoundingBoxes = BoundingBoxes()
for d in self._boundingBoxes:
det = BoundingBox.clone(d)
newBoundingBoxes.addBoundingBox(det)
return newBoundingBoxes
def drawAllBoundingBoxes(self, image, imageName):
bbxes = self.getBoundingBoxesByImageName(imageName)
for bb in bbxes:
if bb.getBBType() == BBType.GroundTruth: # if ground truth
image = add_bb_into_image(image, bb, color=(0, 255, 0)) # green
else: # if detection
image = add_bb_into_image(image, bb, color=(255, 0, 0)) # red
return image
# def drawAllBoundingBoxes(self, image):
# for gt in self.getBoundingBoxesByType(BBType.GroundTruth):
# image = add_bb_into_image(image, gt ,color=(0,255,0))
# for det in self.getBoundingBoxesByType(BBType.Detected):
# image = add_bb_into_image(image, det ,color=(255,0,0))
# return image
| 34.012821
| 81
| 0.614776
|
from BoundingBox import *
from eval_utils import *
class BoundingBoxes:
def __init__(self):
self._boundingBoxes = []
def addBoundingBox(self, bb):
self._boundingBoxes.append(bb)
def removeBoundingBox(self, _boundingBox):
for d in self._boundingBoxes:
if BoundingBox.compare(d, _boundingBox):
del self._boundingBoxes[d]
return
def removeAllBoundingBoxes(self):
self._boundingBoxes = []
def getBoundingBoxes(self):
return self._boundingBoxes
def getBoundingBoxByClass(self, classId):
boundingBoxes = []
for d in self._boundingBoxes:
if d.getClassId() == classId:
boundingBoxes.append(d)
return boundingBoxes
def getClasses(self):
classes = []
for d in self._boundingBoxes:
c = d.getClassId()
if c not in classes:
classes.append(c)
return classes
def getBoundingBoxesByType(self, bbType):
return [d for d in self._boundingBoxes if d.getBBType() == bbType]
def getBoundingBoxesByImageName(self, imageName):
return [d for d in self._boundingBoxes if d.getImageName() == imageName]
def count(self, bbType=None):
if bbType is None:
return len(self._boundingBoxes)
count = 0
for d in self._boundingBoxes:
if d.getBBType() == bbType:
count += 1
return count
def clone(self):
newBoundingBoxes = BoundingBoxes()
for d in self._boundingBoxes:
det = BoundingBox.clone(d)
newBoundingBoxes.addBoundingBox(det)
return newBoundingBoxes
def drawAllBoundingBoxes(self, image, imageName):
bbxes = self.getBoundingBoxesByImageName(imageName)
for bb in bbxes:
if bb.getBBType() == BBType.GroundTruth:
image = add_bb_into_image(image, bb, color=(0, 255, 0))
else:
image = add_bb_into_image(image, bb, color=(255, 0, 0))
return image
| true
| true
|
790915feecd00d4818aeaa33269fedf6ba8e231d
| 779
|
py
|
Python
|
DNA.py
|
bharathgs/warcode
|
b810ab6722c9d6a9aa3dc443dece682479ca9ed4
|
[
"MIT"
] | 5
|
2018-06-02T07:54:41.000Z
|
2022-02-07T19:37:17.000Z
|
DNA.py
|
Thearakim/warcode
|
b810ab6722c9d6a9aa3dc443dece682479ca9ed4
|
[
"MIT"
] | 1
|
2017-08-06T19:19:55.000Z
|
2017-08-06T19:19:55.000Z
|
DNA.py
|
bharathgs/Codewars
|
b810ab6722c9d6a9aa3dc443dece682479ca9ed4
|
[
"MIT"
] | 6
|
2018-05-10T23:11:32.000Z
|
2022-03-03T13:24:27.000Z
|
'''Deoxyribonucleic acid (DNA) is a chemical found in the nucleus
of cells and carries the "instructions" for the development and functioning of living organisms.
If you want to know more http://en.wikipedia.org/wiki/DNA
In DNA strings, symbols "A" and "T" are complements of each other,
as "C" and "G". You have function with one side of the DNA
(string, except for Haskell); you need to get the other complementary side.
DNA strand is never empty or there is no DNA at all (again, except for Haskell).'''
#ATTGC >>>> TAACG
def DNA_strand(dna):
Dna_dict = {'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G'}
return ''.join(Dna_dict[letter] for letter in dna if letter in Dna_dict.keys())
'''also we can simply do : return dna.translate(str.maketrans('ATTGC', 'TAACG'))'''
| 41
| 96
| 0.698331
|
def DNA_strand(dna):
Dna_dict = {'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G'}
return ''.join(Dna_dict[letter] for letter in dna if letter in Dna_dict.keys())
| true
| true
|
79091623a8a682cbdb929e2f50e5725f793d8b44
| 23,970
|
py
|
Python
|
pw_watch/py/pw_watch/watch.py
|
mspang/pigweed
|
89ff5f98f38b1ff7a1ff0633c590479e9b592a14
|
[
"Apache-2.0"
] | null | null | null |
pw_watch/py/pw_watch/watch.py
|
mspang/pigweed
|
89ff5f98f38b1ff7a1ff0633c590479e9b592a14
|
[
"Apache-2.0"
] | 1
|
2021-06-18T13:54:41.000Z
|
2021-06-18T13:54:41.000Z
|
pw_watch/py/pw_watch/watch.py
|
mspang/pigweed
|
89ff5f98f38b1ff7a1ff0633c590479e9b592a14
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Watch files for changes and rebuild.
pw watch runs Ninja in a build directory when source files change. It works with
any Ninja project (GN or CMake).
Usage examples:
# Find a build directory and build the default target
pw watch
# Find a build directory and build the stm32f429i target
pw watch python.lint stm32f429i
# Build pw_run_tests.modules in the out/cmake directory
pw watch -C out/cmake pw_run_tests.modules
# Build the default target in out/ and pw_apps in out/cmake
pw watch -C out -C out/cmake pw_apps
# Find a directory and build python.tests, and build pw_apps in out/cmake
pw watch python.tests -C out/cmake pw_apps
"""
import argparse
from dataclasses import dataclass
import logging
import os
from pathlib import Path
import shlex
import subprocess
import sys
import threading
from typing import (Iterable, List, NamedTuple, NoReturn, Optional, Sequence,
Tuple)
from watchdog.events import FileSystemEventHandler # type: ignore[import]
from watchdog.observers import Observer # type: ignore[import]
import pw_cli.branding
import pw_cli.color
import pw_cli.env
import pw_cli.plugins
from pw_watch.debounce import DebouncedFunction, Debouncer
_COLOR = pw_cli.color.colors()
_LOG = logging.getLogger(__name__)
_ERRNO_INOTIFY_LIMIT_REACHED = 28
# Suppress events under 'fsevents', generated by watchdog on every file
# event on MacOS.
# TODO(b/182281481): Fix file ignoring, rather than just suppressing logs
_FSEVENTS_LOG = logging.getLogger('fsevents')
_FSEVENTS_LOG.setLevel(logging.WARNING)
_PASS_MESSAGE = """
██████╗ █████╗ ███████╗███████╗██╗
██╔══██╗██╔══██╗██╔════╝██╔════╝██║
██████╔╝███████║███████╗███████╗██║
██╔═══╝ ██╔══██║╚════██║╚════██║╚═╝
██║ ██║ ██║███████║███████║██╗
╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝
"""
# Pick a visually-distinct font from "PASS" to ensure that readers can't
# possibly mistake the difference between the two states.
_FAIL_MESSAGE = """
▄██████▒░▄▄▄ ██▓ ░██▓
▓█▓ ░▒████▄ ▓██▒ ░▓██▒
▒████▒ ░▒█▀ ▀█▄ ▒██▒ ▒██░
░▓█▒ ░░██▄▄▄▄██ ░██░ ▒██░
░▒█░ ▓█ ▓██▒░██░░ ████████▒
▒█░ ▒▒ ▓▒█░░▓ ░ ▒░▓ ░
░▒ ▒ ▒▒ ░ ▒ ░░ ░ ▒ ░
░ ░ ░ ▒ ▒ ░ ░ ░
░ ░ ░ ░ ░
"""
# TODO(keir): Figure out a better strategy for exiting. The problem with the
# watcher is that doing a "clean exit" is slow. However, by directly exiting,
# we remove the possibility of the wrapper script doing anything on exit.
def _die(*args) -> NoReturn:
_LOG.critical(*args)
sys.exit(1)
class WatchCharset(NamedTuple):
slug_ok: str
slug_fail: str
_ASCII_CHARSET = WatchCharset(_COLOR.green('OK '), _COLOR.red('FAIL'))
_EMOJI_CHARSET = WatchCharset('✔️ ', '💥')
@dataclass(frozen=True)
class BuildCommand:
build_dir: Path
targets: Tuple[str, ...] = ()
def args(self) -> Tuple[str, ...]:
return (str(self.build_dir), *self.targets)
def __str__(self) -> str:
return ' '.join(shlex.quote(arg) for arg in self.args())
def git_ignored(file: Path) -> bool:
"""Returns true if this file is in a Git repo and ignored by that repo.
Returns true for ignored files that were manually added to a repo.
"""
file = file.resolve()
directory = file.parent
# Run the Git command from file's parent so that the correct repo is used.
while True:
try:
returncode = subprocess.run(
['git', 'check-ignore', '--quiet', '--no-index', file],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
cwd=directory).returncode
return returncode in (0, 128)
except FileNotFoundError:
# If the directory no longer exists, try parent directories until
# an existing directory is found or all directories have been
# checked. This approach makes it possible to check if a deleted
# path is ignored in the repo it was originally created in.
if directory == directory.parent:
return False
directory = directory.parent
class PigweedBuildWatcher(FileSystemEventHandler, DebouncedFunction):
"""Process filesystem events and launch builds if necessary."""
def __init__(
self,
patterns: Sequence[str] = (),
ignore_patterns: Sequence[str] = (),
build_commands: Sequence[BuildCommand] = (),
charset: WatchCharset = _ASCII_CHARSET,
restart: bool = True,
):
super().__init__()
self.patterns = patterns
self.ignore_patterns = ignore_patterns
self.build_commands = build_commands
self.charset: WatchCharset = charset
self.restart_on_changes = restart
self._current_build: subprocess.Popen
self.debouncer = Debouncer(self)
# Track state of a build. These need to be members instead of locals
# due to the split between dispatch(), run(), and on_complete().
self.matching_path: Optional[Path] = None
self.builds_succeeded: List[bool] = []
self.wait_for_keypress_thread = threading.Thread(
None, self._wait_for_enter)
self.wait_for_keypress_thread.start()
def _wait_for_enter(self) -> NoReturn:
try:
while True:
_ = input()
self._current_build.kill()
self.debouncer.press('Manual build requested...')
# Ctrl-C on Unix generates KeyboardInterrupt
# Ctrl-Z on Windows generates EOFError
except (KeyboardInterrupt, EOFError):
_exit_due_to_interrupt()
def _path_matches(self, path: Path) -> bool:
"""Returns true if path matches according to the watcher patterns"""
return (not any(path.match(x) for x in self.ignore_patterns)
and any(path.match(x) for x in self.patterns))
def dispatch(self, event) -> None:
# There isn't any point in triggering builds on new directory creation.
# It's the creation or modification of files that indicate something
# meaningful enough changed for a build.
if event.is_directory:
return
# Collect paths of interest from the event.
paths: List[str] = []
if hasattr(event, 'dest_path'):
paths.append(os.fsdecode(event.dest_path))
if event.src_path:
paths.append(os.fsdecode(event.src_path))
for raw_path in paths:
_LOG.debug('File event: %s', raw_path)
# Check whether Git cares about any of these paths.
for path in (Path(p).resolve() for p in paths):
if not git_ignored(path) and self._path_matches(path):
self._handle_matched_event(path)
return
def _handle_matched_event(self, matching_path: Path) -> None:
if self.matching_path is None:
self.matching_path = matching_path
self.debouncer.press(
f'File change detected: {os.path.relpath(matching_path)}')
# Implementation of DebouncedFunction.run()
#
# Note: This will run on the timer thread created by the Debouncer, rather
# than on the main thread that's watching file events. This enables the
# watcher to continue receiving file change events during a build.
def run(self) -> None:
"""Run all the builds in serial and capture pass/fail for each."""
# Clear the screen and show a banner indicating the build is starting.
print('\033c', end='') # TODO(pwbug/38): Not Windows compatible.
print(pw_cli.branding.banner())
print(
_COLOR.green(
' Watching for changes. Ctrl-C to exit; enter to rebuild'))
print()
_LOG.info('Change detected: %s', self.matching_path)
self.builds_succeeded = []
num_builds = len(self.build_commands)
_LOG.info('Starting build with %d directories', num_builds)
env = os.environ.copy()
# Force colors in Pigweed subcommands run through the watcher.
env['PW_USE_COLOR'] = '1'
for i, cmd in enumerate(self.build_commands, 1):
_LOG.info('[%d/%d] Starting build: %s', i, num_builds, cmd)
# Run the build. Put a blank before/after for visual separation.
print()
self._current_build = subprocess.Popen(
['ninja', '-C', *cmd.args()], env=env)
returncode = self._current_build.wait()
print()
build_ok = (returncode == 0)
if build_ok:
level = logging.INFO
tag = '(OK)'
else:
level = logging.ERROR
tag = '(FAIL)'
_LOG.log(level, '[%d/%d] Finished build: %s %s', i, num_builds,
cmd, tag)
self.builds_succeeded.append(build_ok)
# Implementation of DebouncedFunction.cancel()
def cancel(self) -> bool:
if self.restart_on_changes:
self._current_build.kill()
return True
return False
# Implementation of DebouncedFunction.run()
def on_complete(self, cancelled: bool = False) -> None:
# First, use the standard logging facilities to report build status.
if cancelled:
_LOG.error('Finished; build was interrupted')
elif all(self.builds_succeeded):
_LOG.info('Finished; all successful')
else:
_LOG.info('Finished; some builds failed')
# Then, show a more distinct colored banner.
if not cancelled:
# Write out build summary table so you can tell which builds passed
# and which builds failed.
print()
print(' .------------------------------------')
print(' |')
for (succeeded, cmd) in zip(self.builds_succeeded,
self.build_commands):
slug = (self.charset.slug_ok
if succeeded else self.charset.slug_fail)
print(f' | {slug} {cmd}')
print(' |')
print(" '------------------------------------")
else:
# Build was interrupted.
print()
print(' .------------------------------------')
print(' |')
print(' | ', self.charset.slug_fail, '- interrupted')
print(' |')
print(" '------------------------------------")
# Show a large color banner so it is obvious what the overall result is.
if all(self.builds_succeeded) and not cancelled:
print(_COLOR.green(_PASS_MESSAGE))
else:
print(_COLOR.red(_FAIL_MESSAGE))
self.matching_path = None
# Implementation of DebouncedFunction.on_keyboard_interrupt()
def on_keyboard_interrupt(self) -> NoReturn:
_exit_due_to_interrupt()
_WATCH_PATTERN_DELIMITER = ','
_WATCH_PATTERNS = (
'*.bloaty',
'*.c',
'*.cc',
'*.css',
'*.cpp',
'*.cmake',
'CMakeLists.txt',
'*.gn',
'*.gni',
'*.go',
'*.h',
'*.hpp',
'*.ld',
'*.md',
'*.options',
'*.proto',
'*.py',
'*.rst',
)
def add_parser_arguments(parser: argparse.ArgumentParser) -> None:
"""Sets up an argument parser for pw watch."""
parser.add_argument('--patterns',
help=(_WATCH_PATTERN_DELIMITER +
'-delimited list of globs to '
'watch to trigger recompile'),
default=_WATCH_PATTERN_DELIMITER.join(_WATCH_PATTERNS))
parser.add_argument('--ignore_patterns',
dest='ignore_patterns_string',
help=(_WATCH_PATTERN_DELIMITER +
'-delimited list of globs to '
'ignore events from'))
parser.add_argument('--exclude_list',
nargs='+',
type=Path,
help='directories to ignore during pw watch',
default=[])
parser.add_argument('--no-restart',
dest='restart',
action='store_false',
help='do not restart ongoing builds if files change')
parser.add_argument(
'default_build_targets',
nargs='*',
metavar='target',
default=[],
help=('Automatically locate a build directory and build these '
'targets. For example, `host docs` searches for a Ninja '
'build directory (starting with out/) and builds the '
'`host` and `docs` targets. To specify one or more '
'directories, ust the -C / --build_directory option.'))
parser.add_argument(
'-C',
'--build_directory',
dest='build_directories',
nargs='+',
action='append',
default=[],
metavar=('directory', 'target'),
help=('Specify a build directory and optionally targets to '
'build. `pw watch -C out tgt` is equivalent to `ninja '
'-C out tgt`'))
def _exit(code: int) -> NoReturn:
# Note: The "proper" way to exit is via observer.stop(), then
# running a join. However it's slower, so just exit immediately.
#
# Additionally, since there are several threads in the watcher, the usual
# sys.exit approach doesn't work. Instead, run the low level exit which
# kills all threads.
os._exit(code) # pylint: disable=protected-access
def _exit_due_to_interrupt() -> NoReturn:
# To keep the log lines aligned with each other in the presence of
# a '^C' from the keyboard interrupt, add a newline before the log.
print()
print()
_LOG.info('Got Ctrl-C; exiting...')
_exit(0)
def _exit_due_to_inotify_limit():
# Show information and suggested commands in OSError: inotify limit reached.
_LOG.error('Inotify limit reached: run this in your terminal if you '
'are in Linux to temporarily increase inotify limit. \n')
print(
_COLOR.green(' sudo sysctl fs.inotify.max_user_watches='
'$NEW_LIMIT$\n'))
print(' Change $NEW_LIMIT$ with an integer number, '
'e.g., 1000 should be enough.')
_exit(0)
def _exit_due_to_pigweed_not_installed():
# Show information and suggested commands when pigweed environment variable
# not found.
_LOG.error('Environment variable $PW_ROOT not defined or is defined '
'outside the current directory.')
_LOG.error('Did you forget to activate the Pigweed environment? '
'Try source ./activate.sh')
_LOG.error('Did you forget to install the Pigweed environment? '
'Try source ./bootstrap.sh')
_exit(1)
# Go over each directory inside of the current directory.
# If it is not on the path of elements in directories_to_exclude, add
# (directory, True) to subdirectories_to_watch and later recursively call
# Observer() on them.
# Otherwise add (directory, False) to subdirectories_to_watch and later call
# Observer() with recursion=False.
def minimal_watch_directories(to_watch: Path, to_exclude: Iterable[Path]):
"""Determine which subdirectory to watch recursively"""
try:
to_watch = Path(to_watch)
except TypeError:
assert False, "Please watch one directory at a time."
# Reformat to_exclude.
directories_to_exclude: List[Path] = [
to_watch.joinpath(directory_to_exclude)
for directory_to_exclude in to_exclude
if to_watch.joinpath(directory_to_exclude).is_dir()
]
# Split the relative path of directories_to_exclude (compared to to_watch),
# and generate all parent paths needed to be watched without recursion.
exclude_dir_parents = {to_watch}
for directory_to_exclude in directories_to_exclude:
parts = list(
Path(directory_to_exclude).relative_to(to_watch).parts)[:-1]
dir_tmp = to_watch
for part in parts:
dir_tmp = Path(dir_tmp, part)
exclude_dir_parents.add(dir_tmp)
# Go over all layers of directory. Append those that are the parents of
# directories_to_exclude to the list with recursion==False, and others
# with recursion==True.
for directory in exclude_dir_parents:
dir_path = Path(directory)
yield dir_path, False
for item in Path(directory).iterdir():
if (item.is_dir() and item not in exclude_dir_parents
and item not in directories_to_exclude):
yield item, True
def get_common_excludes() -> List[Path]:
"""Find commonly excluded directories, and return them as a [Path]"""
exclude_list: List[Path] = []
typical_ignored_directories: List[str] = [
'.environment', # Legacy bootstrap-created CIPD and Python venv.
'.presubmit', # Presubmit-created CIPD and Python venv.
'.git', # Pigweed's git repo.
'.mypy_cache', # Python static analyzer.
'.cargo', # Rust package manager.
'environment', # Bootstrap-created CIPD and Python venv.
'out', # Typical build directory.
]
# Preset exclude list for Pigweed's upstream directories.
pw_root_dir = Path(os.environ['PW_ROOT'])
exclude_list.extend(pw_root_dir / ignored_directory
for ignored_directory in typical_ignored_directories)
# Preset exclude for common downstream project structures.
#
# If watch is invoked outside of the Pigweed root, exclude common
# directories.
pw_project_root_dir = Path(os.environ['PW_PROJECT_ROOT'])
if pw_project_root_dir != pw_root_dir:
exclude_list.extend(
pw_project_root_dir / ignored_directory
for ignored_directory in typical_ignored_directories)
# Check for and warn about legacy directories.
legacy_directories = [
'.cipd', # Legacy CIPD location.
'.python3-venv', # Legacy Python venv location.
]
found_legacy = False
for legacy_directory in legacy_directories:
full_legacy_directory = pw_root_dir / legacy_directory
if full_legacy_directory.is_dir():
_LOG.warning('Legacy environment directory found: %s',
str(full_legacy_directory))
exclude_list.append(full_legacy_directory)
found_legacy = True
if found_legacy:
_LOG.warning('Found legacy environment directory(s); these '
'should be deleted')
return exclude_list
def _find_build_dir(default_build_dir: Path = Path('out')) -> Optional[Path]:
"""Searches for a build directory, returning the first it finds."""
# Give priority to out/, then something under out/.
if default_build_dir.joinpath('build.ninja').exists():
return default_build_dir
for path in default_build_dir.glob('**/build.ninja'):
return path.parent
for path in Path.cwd().glob('**/build.ninja'):
return path.parent
return None
def watch(default_build_targets: List[str], build_directories: List[str],
patterns: str, ignore_patterns_string: str, exclude_list: List[Path],
restart: bool):
"""Watches files and runs Ninja commands when they change."""
_LOG.info('Starting Pigweed build watcher')
# Get pigweed directory information from environment variable PW_ROOT.
if os.environ['PW_ROOT'] is None:
_exit_due_to_pigweed_not_installed()
pw_root = Path(os.environ['PW_ROOT']).resolve()
if Path.cwd().resolve() not in [pw_root, *pw_root.parents]:
_exit_due_to_pigweed_not_installed()
# Preset exclude list for pigweed directory.
exclude_list += get_common_excludes()
build_commands = [
BuildCommand(Path(build_dir[0]), tuple(build_dir[1:]))
for build_dir in build_directories
]
# If no build directory was specified, search the tree for a build.ninja.
if default_build_targets or not build_directories:
build_dir = _find_build_dir()
# Make sure we found something; if not, bail.
if build_dir is None:
_die("No build dirs found. Did you forget to run 'gn gen out'?")
build_commands.append(
BuildCommand(build_dir, tuple(default_build_targets)))
# Verify that the build output directories exist.
for i, build_target in enumerate(build_commands, 1):
if not build_target.build_dir.is_dir():
_die("Build directory doesn't exist: %s", build_target)
else:
_LOG.info('Will build [%d/%d]: %s', i, len(build_commands),
build_target)
_LOG.debug('Patterns: %s', patterns)
# Try to make a short display path for the watched directory that has
# "$HOME" instead of the full home directory. This is nice for users
# who have deeply nested home directories.
path_to_log = str(Path().resolve()).replace(str(Path.home()), '$HOME')
# Ignore the user-specified patterns.
ignore_patterns = (ignore_patterns_string.split(_WATCH_PATTERN_DELIMITER)
if ignore_patterns_string else [])
env = pw_cli.env.pigweed_environment()
if env.PW_EMOJI:
charset = _EMOJI_CHARSET
else:
charset = _ASCII_CHARSET
event_handler = PigweedBuildWatcher(
patterns=patterns.split(_WATCH_PATTERN_DELIMITER),
ignore_patterns=ignore_patterns,
build_commands=build_commands,
charset=charset,
restart=restart,
)
try:
# It can take awhile to configure the filesystem watcher, so have the
# message reflect that with the "...". Run inside the try: to
# gracefully handle the user Ctrl-C'ing out during startup.
_LOG.info('Attaching filesystem watcher to %s/...', path_to_log)
# Observe changes for all files in the root directory. Whether the
# directory should be observed recursively or not is determined by the
# second element in subdirectories_to_watch.
observers = []
for path, rec in minimal_watch_directories(Path.cwd(), exclude_list):
observer = Observer()
observer.schedule(
event_handler,
str(path),
recursive=rec,
)
observer.start()
observers.append(observer)
event_handler.debouncer.press('Triggering initial build...')
for observer in observers:
while observer.is_alive():
observer.join(1)
# Ctrl-C on Unix generates KeyboardInterrupt
# Ctrl-Z on Windows generates EOFError
except (KeyboardInterrupt, EOFError):
_exit_due_to_interrupt()
except OSError as err:
if err.args[0] == _ERRNO_INOTIFY_LIMIT_REACHED:
_exit_due_to_inotify_limit()
else:
raise err
_LOG.critical('Should never get here')
observer.join()
def main() -> None:
"""Watch files for changes and rebuild."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
add_parser_arguments(parser)
watch(**vars(parser.parse_args()))
if __name__ == '__main__':
main()
| 36.208459
| 80
| 0.617063
|
import argparse
from dataclasses import dataclass
import logging
import os
from pathlib import Path
import shlex
import subprocess
import sys
import threading
from typing import (Iterable, List, NamedTuple, NoReturn, Optional, Sequence,
Tuple)
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
import pw_cli.branding
import pw_cli.color
import pw_cli.env
import pw_cli.plugins
from pw_watch.debounce import DebouncedFunction, Debouncer
_COLOR = pw_cli.color.colors()
_LOG = logging.getLogger(__name__)
_ERRNO_INOTIFY_LIMIT_REACHED = 28
_FSEVENTS_LOG = logging.getLogger('fsevents')
_FSEVENTS_LOG.setLevel(logging.WARNING)
_PASS_MESSAGE = """
██████╗ █████╗ ███████╗███████╗██╗
██╔══██╗██╔══██╗██╔════╝██╔════╝██║
██████╔╝███████║███████╗███████╗██║
██╔═══╝ ██╔══██║╚════██║╚════██║╚═╝
██║ ██║ ██║███████║███████║██╗
╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝
"""
# possibly mistake the difference between the two states.
_FAIL_MESSAGE = """
▄██████▒░▄▄▄ ██▓ ░██▓
▓█▓ ░▒████▄ ▓██▒ ░▓██▒
▒████▒ ░▒█▀ ▀█▄ ▒██▒ ▒██░
░▓█▒ ░░██▄▄▄▄██ ░██░ ▒██░
░▒█░ ▓█ ▓██▒░██░░ ████████▒
▒█░ ▒▒ ▓▒█░░▓ ░ ▒░▓ ░
░▒ ▒ ▒▒ ░ ▒ ░░ ░ ▒ ░
░ ░ ░ ▒ ▒ ░ ░ ░
░ ░ ░ ░ ░
"""
# TODO(keir): Figure out a better strategy for exiting. The problem with the
# watcher is that doing a "clean exit" is slow. However, by directly exiting,
# we remove the possibility of the wrapper script doing anything on exit.
def _die(*args) -> NoReturn:
_LOG.critical(*args)
sys.exit(1)
class WatchCharset(NamedTuple):
slug_ok: str
slug_fail: str
_ASCII_CHARSET = WatchCharset(_COLOR.green('OK '), _COLOR.red('FAIL'))
_EMOJI_CHARSET = WatchCharset('✔️ ', '💥')
@dataclass(frozen=True)
class BuildCommand:
build_dir: Path
targets: Tuple[str, ...] = ()
def args(self) -> Tuple[str, ...]:
return (str(self.build_dir), *self.targets)
def __str__(self) -> str:
return ' '.join(shlex.quote(arg) for arg in self.args())
def git_ignored(file: Path) -> bool:
file = file.resolve()
directory = file.parent
# Run the Git command from file's parent so that the correct repo is used.
while True:
try:
returncode = subprocess.run(
['git', 'check-ignore', '--quiet', '--no-index', file],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
cwd=directory).returncode
return returncode in (0, 128)
except FileNotFoundError:
if directory == directory.parent:
return False
directory = directory.parent
class PigweedBuildWatcher(FileSystemEventHandler, DebouncedFunction):
def __init__(
self,
patterns: Sequence[str] = (),
ignore_patterns: Sequence[str] = (),
build_commands: Sequence[BuildCommand] = (),
charset: WatchCharset = _ASCII_CHARSET,
restart: bool = True,
):
super().__init__()
self.patterns = patterns
self.ignore_patterns = ignore_patterns
self.build_commands = build_commands
self.charset: WatchCharset = charset
self.restart_on_changes = restart
self._current_build: subprocess.Popen
self.debouncer = Debouncer(self)
self.matching_path: Optional[Path] = None
self.builds_succeeded: List[bool] = []
self.wait_for_keypress_thread = threading.Thread(
None, self._wait_for_enter)
self.wait_for_keypress_thread.start()
def _wait_for_enter(self) -> NoReturn:
try:
while True:
_ = input()
self._current_build.kill()
self.debouncer.press('Manual build requested...')
except (KeyboardInterrupt, EOFError):
_exit_due_to_interrupt()
def _path_matches(self, path: Path) -> bool:
return (not any(path.match(x) for x in self.ignore_patterns)
and any(path.match(x) for x in self.patterns))
def dispatch(self, event) -> None:
# It's the creation or modification of files that indicate something
if event.is_directory:
return
paths: List[str] = []
if hasattr(event, 'dest_path'):
paths.append(os.fsdecode(event.dest_path))
if event.src_path:
paths.append(os.fsdecode(event.src_path))
for raw_path in paths:
_LOG.debug('File event: %s', raw_path)
for path in (Path(p).resolve() for p in paths):
if not git_ignored(path) and self._path_matches(path):
self._handle_matched_event(path)
return
def _handle_matched_event(self, matching_path: Path) -> None:
if self.matching_path is None:
self.matching_path = matching_path
self.debouncer.press(
f'File change detected: {os.path.relpath(matching_path)}')
# watcher to continue receiving file change events during a build.
def run(self) -> None:
# Clear the screen and show a banner indicating the build is starting.
print('\033c', end='') # TODO(pwbug/38): Not Windows compatible.
print(pw_cli.branding.banner())
print(
_COLOR.green(
' Watching for changes. Ctrl-C to exit; enter to rebuild'))
print()
_LOG.info('Change detected: %s', self.matching_path)
self.builds_succeeded = []
num_builds = len(self.build_commands)
_LOG.info('Starting build with %d directories', num_builds)
env = os.environ.copy()
# Force colors in Pigweed subcommands run through the watcher.
env['PW_USE_COLOR'] = '1'
for i, cmd in enumerate(self.build_commands, 1):
_LOG.info('[%d/%d] Starting build: %s', i, num_builds, cmd)
# Run the build. Put a blank before/after for visual separation.
print()
self._current_build = subprocess.Popen(
['ninja', '-C', *cmd.args()], env=env)
returncode = self._current_build.wait()
print()
build_ok = (returncode == 0)
if build_ok:
level = logging.INFO
tag = '(OK)'
else:
level = logging.ERROR
tag = '(FAIL)'
_LOG.log(level, '[%d/%d] Finished build: %s %s', i, num_builds,
cmd, tag)
self.builds_succeeded.append(build_ok)
# Implementation of DebouncedFunction.cancel()
def cancel(self) -> bool:
if self.restart_on_changes:
self._current_build.kill()
return True
return False
# Implementation of DebouncedFunction.run()
def on_complete(self, cancelled: bool = False) -> None:
# First, use the standard logging facilities to report build status.
if cancelled:
_LOG.error('Finished; build was interrupted')
elif all(self.builds_succeeded):
_LOG.info('Finished; all successful')
else:
_LOG.info('Finished; some builds failed')
# Then, show a more distinct colored banner.
if not cancelled:
# Write out build summary table so you can tell which builds passed
# and which builds failed.
print()
print(' .------------------------------------')
print(' |')
for (succeeded, cmd) in zip(self.builds_succeeded,
self.build_commands):
slug = (self.charset.slug_ok
if succeeded else self.charset.slug_fail)
print(f' | {slug} {cmd}')
print(' |')
print(" '------------------------------------")
else:
print()
print(' .------------------------------------')
print(' |')
print(' | ', self.charset.slug_fail, '- interrupted')
print(' |')
print(" '------------------------------------")
# Show a large color banner so it is obvious what the overall result is.
if all(self.builds_succeeded) and not cancelled:
print(_COLOR.green(_PASS_MESSAGE))
else:
print(_COLOR.red(_FAIL_MESSAGE))
self.matching_path = None
# Implementation of DebouncedFunction.on_keyboard_interrupt()
def on_keyboard_interrupt(self) -> NoReturn:
_exit_due_to_interrupt()
_WATCH_PATTERN_DELIMITER = ','
_WATCH_PATTERNS = (
'*.bloaty',
'*.c',
'*.cc',
'*.css',
'*.cpp',
'*.cmake',
'CMakeLists.txt',
'*.gn',
'*.gni',
'*.go',
'*.h',
'*.hpp',
'*.ld',
'*.md',
'*.options',
'*.proto',
'*.py',
'*.rst',
)
def add_parser_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--patterns',
help=(_WATCH_PATTERN_DELIMITER +
'-delimited list of globs to '
'watch to trigger recompile'),
default=_WATCH_PATTERN_DELIMITER.join(_WATCH_PATTERNS))
parser.add_argument('--ignore_patterns',
dest='ignore_patterns_string',
help=(_WATCH_PATTERN_DELIMITER +
'-delimited list of globs to '
'ignore events from'))
parser.add_argument('--exclude_list',
nargs='+',
type=Path,
help='directories to ignore during pw watch',
default=[])
parser.add_argument('--no-restart',
dest='restart',
action='store_false',
help='do not restart ongoing builds if files change')
parser.add_argument(
'default_build_targets',
nargs='*',
metavar='target',
default=[],
help=('Automatically locate a build directory and build these '
'targets. For example, `host docs` searches for a Ninja '
'build directory (starting with out/) and builds the '
'`host` and `docs` targets. To specify one or more '
'directories, ust the -C / --build_directory option.'))
parser.add_argument(
'-C',
'--build_directory',
dest='build_directories',
nargs='+',
action='append',
default=[],
metavar=('directory', 'target'),
help=('Specify a build directory and optionally targets to '
'build. `pw watch -C out tgt` is equivalent to `ninja '
'-C out tgt`'))
def _exit(code: int) -> NoReturn:
# Note: The "proper" way to exit is via observer.stop(), then
# running a join. However it's slower, so just exit immediately.
# kills all threads.
os._exit(code) # pylint: disable=protected-access
def _exit_due_to_interrupt() -> NoReturn:
# To keep the log lines aligned with each other in the presence of
# a '^C' from the keyboard interrupt, add a newline before the log.
print()
print()
_LOG.info('Got Ctrl-C; exiting...')
_exit(0)
def _exit_due_to_inotify_limit():
# Show information and suggested commands in OSError: inotify limit reached.
_LOG.error('Inotify limit reached: run this in your terminal if you '
'are in Linux to temporarily increase inotify limit. \n')
print(
_COLOR.green(' sudo sysctl fs.inotify.max_user_watches='
'$NEW_LIMIT$\n'))
print(' Change $NEW_LIMIT$ with an integer number, '
'e.g., 1000 should be enough.')
_exit(0)
def _exit_due_to_pigweed_not_installed():
# Show information and suggested commands when pigweed environment variable
# not found.
_LOG.error('Environment variable $PW_ROOT not defined or is defined '
'outside the current directory.')
_LOG.error('Did you forget to activate the Pigweed environment? '
'Try source ./activate.sh')
_LOG.error('Did you forget to install the Pigweed environment? '
'Try source ./bootstrap.sh')
_exit(1)
# Go over each directory inside of the current directory.
# If it is not on the path of elements in directories_to_exclude, add
# (directory, True) to subdirectories_to_watch and later recursively call
# Observer() on them.
# Otherwise add (directory, False) to subdirectories_to_watch and later call
# Observer() with recursion=False.
def minimal_watch_directories(to_watch: Path, to_exclude: Iterable[Path]):
try:
to_watch = Path(to_watch)
except TypeError:
assert False, "Please watch one directory at a time."
# Reformat to_exclude.
directories_to_exclude: List[Path] = [
to_watch.joinpath(directory_to_exclude)
for directory_to_exclude in to_exclude
if to_watch.joinpath(directory_to_exclude).is_dir()
]
# Split the relative path of directories_to_exclude (compared to to_watch),
# and generate all parent paths needed to be watched without recursion.
exclude_dir_parents = {to_watch}
for directory_to_exclude in directories_to_exclude:
parts = list(
Path(directory_to_exclude).relative_to(to_watch).parts)[:-1]
dir_tmp = to_watch
for part in parts:
dir_tmp = Path(dir_tmp, part)
exclude_dir_parents.add(dir_tmp)
# Go over all layers of directory. Append those that are the parents of
# directories_to_exclude to the list with recursion==False, and others
# with recursion==True.
for directory in exclude_dir_parents:
dir_path = Path(directory)
yield dir_path, False
for item in Path(directory).iterdir():
if (item.is_dir() and item not in exclude_dir_parents
and item not in directories_to_exclude):
yield item, True
def get_common_excludes() -> List[Path]:
exclude_list: List[Path] = []
typical_ignored_directories: List[str] = [
'.environment', # Legacy bootstrap-created CIPD and Python venv.
'.presubmit', # Presubmit-created CIPD and Python venv.
'.git', # Pigweed's git repo.
'.mypy_cache',
'.cargo',
'environment',
'out',
]
pw_root_dir = Path(os.environ['PW_ROOT'])
exclude_list.extend(pw_root_dir / ignored_directory
for ignored_directory in typical_ignored_directories)
# Preset exclude for common downstream project structures.
#
# If watch is invoked outside of the Pigweed root, exclude common
# directories.
pw_project_root_dir = Path(os.environ['PW_PROJECT_ROOT'])
if pw_project_root_dir != pw_root_dir:
exclude_list.extend(
pw_project_root_dir / ignored_directory
for ignored_directory in typical_ignored_directories)
# Check for and warn about legacy directories.
legacy_directories = [
'.cipd', # Legacy CIPD location.
'.python3-venv', # Legacy Python venv location.
]
found_legacy = False
for legacy_directory in legacy_directories:
full_legacy_directory = pw_root_dir / legacy_directory
if full_legacy_directory.is_dir():
_LOG.warning('Legacy environment directory found: %s',
str(full_legacy_directory))
exclude_list.append(full_legacy_directory)
found_legacy = True
if found_legacy:
_LOG.warning('Found legacy environment directory(s); these '
'should be deleted')
return exclude_list
def _find_build_dir(default_build_dir: Path = Path('out')) -> Optional[Path]:
# Give priority to out/, then something under out/.
if default_build_dir.joinpath('build.ninja').exists():
return default_build_dir
for path in default_build_dir.glob('**/build.ninja'):
return path.parent
for path in Path.cwd().glob('**/build.ninja'):
return path.parent
return None
def watch(default_build_targets: List[str], build_directories: List[str],
patterns: str, ignore_patterns_string: str, exclude_list: List[Path],
restart: bool):
_LOG.info('Starting Pigweed build watcher')
# Get pigweed directory information from environment variable PW_ROOT.
if os.environ['PW_ROOT'] is None:
_exit_due_to_pigweed_not_installed()
pw_root = Path(os.environ['PW_ROOT']).resolve()
if Path.cwd().resolve() not in [pw_root, *pw_root.parents]:
_exit_due_to_pigweed_not_installed()
# Preset exclude list for pigweed directory.
exclude_list += get_common_excludes()
build_commands = [
BuildCommand(Path(build_dir[0]), tuple(build_dir[1:]))
for build_dir in build_directories
]
# If no build directory was specified, search the tree for a build.ninja.
if default_build_targets or not build_directories:
build_dir = _find_build_dir()
# Make sure we found something; if not, bail.
if build_dir is None:
_die("No build dirs found. Did you forget to run 'gn gen out'?")
build_commands.append(
BuildCommand(build_dir, tuple(default_build_targets)))
# Verify that the build output directories exist.
for i, build_target in enumerate(build_commands, 1):
if not build_target.build_dir.is_dir():
_die("Build directory doesn't exist: %s", build_target)
else:
_LOG.info('Will build [%d/%d]: %s', i, len(build_commands),
build_target)
_LOG.debug('Patterns: %s', patterns)
path_to_log = str(Path().resolve()).replace(str(Path.home()), '$HOME')
ignore_patterns = (ignore_patterns_string.split(_WATCH_PATTERN_DELIMITER)
if ignore_patterns_string else [])
env = pw_cli.env.pigweed_environment()
if env.PW_EMOJI:
charset = _EMOJI_CHARSET
else:
charset = _ASCII_CHARSET
event_handler = PigweedBuildWatcher(
patterns=patterns.split(_WATCH_PATTERN_DELIMITER),
ignore_patterns=ignore_patterns,
build_commands=build_commands,
charset=charset,
restart=restart,
)
try:
_LOG.info('Attaching filesystem watcher to %s/...', path_to_log)
# Observe changes for all files in the root directory. Whether the
# directory should be observed recursively or not is determined by the
# second element in subdirectories_to_watch.
observers = []
for path, rec in minimal_watch_directories(Path.cwd(), exclude_list):
observer = Observer()
observer.schedule(
event_handler,
str(path),
recursive=rec,
)
observer.start()
observers.append(observer)
event_handler.debouncer.press('Triggering initial build...')
for observer in observers:
while observer.is_alive():
observer.join(1)
# Ctrl-C on Unix generates KeyboardInterrupt
# Ctrl-Z on Windows generates EOFError
except (KeyboardInterrupt, EOFError):
_exit_due_to_interrupt()
except OSError as err:
if err.args[0] == _ERRNO_INOTIFY_LIMIT_REACHED:
_exit_due_to_inotify_limit()
else:
raise err
_LOG.critical('Should never get here')
observer.join()
def main() -> None:
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
add_parser_arguments(parser)
watch(**vars(parser.parse_args()))
if __name__ == '__main__':
main()
| true
| true
|
790917e35510609617691935a3f39c34c1af58af
| 1,886
|
py
|
Python
|
onconet/models/mirai_full.py
|
harrivle/Mirai
|
ea2d4839f1f8b9f881798b819b2192ce2795bd5d
|
[
"MIT"
] | 37
|
2021-01-28T06:00:34.000Z
|
2022-03-29T21:14:12.000Z
|
onconet/models/mirai_full.py
|
NkwamPhilip/Mirai
|
70413de690da36c5878e2e6006711476e166bb1d
|
[
"MIT"
] | null | null | null |
onconet/models/mirai_full.py
|
NkwamPhilip/Mirai
|
70413de690da36c5878e2e6006711476e166bb1d
|
[
"MIT"
] | 14
|
2021-02-02T09:42:18.000Z
|
2022-03-23T00:36:41.000Z
|
from onconet.models.factory import load_model, RegisterModel, get_model_by_name
import math
import torch
import torch.nn as nn
import pdb
import numpy as np
@RegisterModel("mirai_full")
class MiraiFull(nn.Module):
def __init__(self, args):
super(MiraiFull, self).__init__()
self.args = args
if args.img_encoder_snapshot is not None:
self.image_encoder = load_model(args.img_encoder_snapshot, args, do_wrap_model=False)
else:
self.image_encoder = get_model_by_name('custom_resnet', False, args)
if hasattr(self.args, "freeze_image_encoder") and self.args.freeze_image_encoder:
for param in self.image_encoder.parameters():
param.requires_grad = False
self.image_repr_dim = self.image_encoder._model.args.img_only_dim
if args.transformer_snapshot is not None:
self.transformer = load_model(args.transformer_snapshot, args, do_wrap_model=False)
else:
args.precomputed_hidden_dim = self.image_repr_dim
self.transformer = get_model_by_name('transformer', False, args)
args.img_only_dim = self.transformer.args.transfomer_hidden_dim
def forward(self, x, risk_factors=None, batch=None):
B, C, N, H, W = x.size()
x = x.transpose(1,2).contiguous().view(B*N, C, H, W)
risk_factors_per_img = (lambda N, risk_factors: [factor.expand( [N, *factor.size()]).contiguous().view([-1, factor.size()[-1]]).contiguous() for factor in risk_factors])(N, risk_factors) if risk_factors is not None else None
_, img_x, _ = self.image_encoder(x, risk_factors_per_img, batch)
img_x = img_x.view(B, N, -1)
img_x = img_x[:,:,: self.image_repr_dim]
logit, transformer_hidden, activ_dict = self.transformer(img_x, risk_factors, batch)
return logit, transformer_hidden, activ_dict
| 47.15
| 233
| 0.68929
|
from onconet.models.factory import load_model, RegisterModel, get_model_by_name
import math
import torch
import torch.nn as nn
import pdb
import numpy as np
@RegisterModel("mirai_full")
class MiraiFull(nn.Module):
def __init__(self, args):
super(MiraiFull, self).__init__()
self.args = args
if args.img_encoder_snapshot is not None:
self.image_encoder = load_model(args.img_encoder_snapshot, args, do_wrap_model=False)
else:
self.image_encoder = get_model_by_name('custom_resnet', False, args)
if hasattr(self.args, "freeze_image_encoder") and self.args.freeze_image_encoder:
for param in self.image_encoder.parameters():
param.requires_grad = False
self.image_repr_dim = self.image_encoder._model.args.img_only_dim
if args.transformer_snapshot is not None:
self.transformer = load_model(args.transformer_snapshot, args, do_wrap_model=False)
else:
args.precomputed_hidden_dim = self.image_repr_dim
self.transformer = get_model_by_name('transformer', False, args)
args.img_only_dim = self.transformer.args.transfomer_hidden_dim
def forward(self, x, risk_factors=None, batch=None):
B, C, N, H, W = x.size()
x = x.transpose(1,2).contiguous().view(B*N, C, H, W)
risk_factors_per_img = (lambda N, risk_factors: [factor.expand( [N, *factor.size()]).contiguous().view([-1, factor.size()[-1]]).contiguous() for factor in risk_factors])(N, risk_factors) if risk_factors is not None else None
_, img_x, _ = self.image_encoder(x, risk_factors_per_img, batch)
img_x = img_x.view(B, N, -1)
img_x = img_x[:,:,: self.image_repr_dim]
logit, transformer_hidden, activ_dict = self.transformer(img_x, risk_factors, batch)
return logit, transformer_hidden, activ_dict
| true
| true
|
79091b625d1a649330301222b1198dd838d24029
| 5,701
|
py
|
Python
|
xview/models/dirichletEstimation.py
|
ethz-asl/modular_semantic_segmentation
|
7c950f24df11540a7ddae4ff806d5b31934a3210
|
[
"BSD-3-Clause"
] | 20
|
2018-08-01T15:02:59.000Z
|
2021-04-19T07:22:17.000Z
|
xview/models/dirichletEstimation.py
|
davesean/modular_semantic_segmentation
|
5f9e34243915b862e8fef5e6195f1e29f4cebf50
|
[
"BSD-3-Clause"
] | null | null | null |
xview/models/dirichletEstimation.py
|
davesean/modular_semantic_segmentation
|
5f9e34243915b862e8fef5e6195f1e29f4cebf50
|
[
"BSD-3-Clause"
] | 9
|
2018-08-01T15:03:03.000Z
|
2019-12-17T05:12:48.000Z
|
#!/usr/bin/python
#
# A library for finding the optimal dirichlet prior from counts
# By: Max Sklar
# @maxsklar
# https://github.com/maxsklar
# Copyright 2013 Max Sklar
import math
import logging
import random
import scipy.special as mathExtra
import scipy
import numpy as np
def digamma(x): return mathExtra.psi(x)
def trigamma(x): return mathExtra.polygamma(1, x)
# Find the "sufficient statistic" for a group of multinomials.
# Essential, it's the average of the log probabilities
def getSufficientStatistic(multinomials):
N = len(multinomials)
K = len(multinomials[0])
retVal = [0]*K
for m in multinomials:
for k in range(0, K):
retVal[k] += math.log(m[k])
for k in range(0, K): retVal[k] /= N
return retVal
# Find the log probability of the data for a given dirichlet
# This is equal to the log probabiliy of the data.. up to a linear transform
def logProbForMultinomials(alphas, ss, delta):
alpha_sum = np.sum(alphas)
retVal = mathExtra.gammaln(alpha_sum)
retVal -= np.sum(mathExtra.gammaln(alphas))
retVal += np.sum(np.multiply(alphas, ss))
retVal -= delta * np.square(alphas).sum()
return retVal
#Gives the derivative with respect to the log of prior. This will be used to adjust the loss
def getGradientForMultinomials(alphas, ss, delta):
K = len(alphas)
C = digamma(sum(alphas)) # - DELTA * sum(alphas)
retVal = [C]*K
for k in range(0, K):
retVal[k] += ss[k] - digamma(alphas[k]) - 2 * delta * alphas[k]
return retVal
#The hessian is actually the sum of two matrices: a diagonal matrix and a constant-value matrix.
#We'll write two functions to get both
def priorHessianConst(alphas, ss, delta): return -trigamma(sum(alphas)) + 2 * delta
def priorHessianDiag(alphas, ss): return [trigamma(a) for a in alphas]
# Compute the next value to try here
# http://research.microsoft.com/en-us/um/people/minka/papers/dirichlet/minka-dirichlet.pdf (eq 18)
def getPredictedStep(hConst, hDiag, gradient):
K = len(gradient)
numSum = 0.0
for i in range(0, K):
numSum += gradient[i] / hDiag[i]
denSum = 0.0
for i in range(0, K): denSum += 1.0 / hDiag[i]
b = numSum / ((1.0/hConst) + denSum)
retVal = [0]*K
for i in range(0, K): retVal[i] = (b - gradient[i]) / hDiag[i]
return retVal
# Uses the diagonal hessian on the log-alpha values
def getPredictedStepAlt(hConst, hDiag, gradient, alphas):
K = len(gradient)
Z = 0
for k in range(0, K):
Z += alphas[k] / (gradient[k] - alphas[k]*hDiag[k])
Z *= hConst
Ss = [0]*K
for k in range(0, K):
Ss[k] = 1.0 / (gradient[k] - alphas[k]*hDiag[k]) / (1 + Z)
S = sum(Ss)
retVal = [0]*K
for i in range(0, K):
retVal[i] = gradient[i] / (gradient[i] - alphas[i]*hDiag[i]) * (1 - hConst * alphas[i] * S)
return retVal
#The priors and data are global, so we don't need to pass them in
def getTotalLoss(trialPriors, ss, delta):
return -1*logProbForMultinomials(trialPriors, ss, delta)
def predictStepUsingHessian(gradient, priors, ss, delta):
totalHConst = priorHessianConst(priors, ss, delta)
totalHDiag = priorHessianDiag(priors, ss)
return getPredictedStep(totalHConst, totalHDiag, gradient)
def predictStepLogSpace(gradient, priors, ss, delta):
totalHConst = priorHessianConst(priors, ss, delta)
totalHDiag = priorHessianDiag(priors, ss)
return getPredictedStepAlt(totalHConst, totalHDiag, gradient, priors)
# Returns whether it's a good step, and the loss
def testTrialPriors(trialPriors, ss, delta):
for alpha in trialPriors:
if alpha <= 0:
return float("inf")
return getTotalLoss(trialPriors, ss, delta)
def sqVectorSize(v):
s = 0
for i in range(0, len(v)): s += v[i] ** 2
return s
def findDirichletPriors(ss, initAlphas, max_iter=1000, delta=1e-2):
priors = initAlphas
# Let the learning begin!!
#Only step in a positive direction, get the current best loss.
currentLoss = getTotalLoss(priors, ss, delta)
gradientToleranceSq = 2 ** -20
learnRateTolerance = 2 ** -10
count = 0
while(count < max_iter):
count += 1
#Get the data for taking steps
gradient = getGradientForMultinomials(priors, ss, delta)
gradientSize = sqVectorSize(gradient)
#print(count, "Loss: ", currentLoss, ", Priors: ", priors, ", Gradient Size: ", gradientSize, gradient)
if (gradientSize < gradientToleranceSq):
#print("Converged with small gradient")
return priors
trialStep = predictStepUsingHessian(gradient, priors, ss, delta)
#First, try the second order method
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] + trialStep[i]
loss = testTrialPriors(trialPriors, ss, delta)
if loss < currentLoss:
currentLoss = loss
priors = trialPriors
continue
trialStep = predictStepLogSpace(gradient, priors, ss, delta)
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] * math.exp(trialStep[i])
loss = testTrialPriors(trialPriors, ss, delta)
#Step in the direction of the gradient until there is a loss improvement
loss = 10000000
learnRate = 1.0
while loss > currentLoss:
learnRate *= 0.9
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] + gradient[i]*learnRate
loss = testTrialPriors(trialPriors, ss, delta)
if (learnRate < learnRateTolerance):
#print("Converged with small learn rate")
return priors
currentLoss = loss
priors = trialPriors
#print("Reached max iterations")
return priors
def findDirichletPriorsFromMultinomials(multinomials, initAlphas):
ss = getSufficientStatistic(multinomials)
return findDirichletPriors(ss, initAlphas)
| 30.164021
| 107
| 0.695141
|
import math
import logging
import random
import scipy.special as mathExtra
import scipy
import numpy as np
def digamma(x): return mathExtra.psi(x)
def trigamma(x): return mathExtra.polygamma(1, x)
def getSufficientStatistic(multinomials):
N = len(multinomials)
K = len(multinomials[0])
retVal = [0]*K
for m in multinomials:
for k in range(0, K):
retVal[k] += math.log(m[k])
for k in range(0, K): retVal[k] /= N
return retVal
# Find the log probability of the data for a given dirichlet
# This is equal to the log probabiliy of the data.. up to a linear transform
def logProbForMultinomials(alphas, ss, delta):
alpha_sum = np.sum(alphas)
retVal = mathExtra.gammaln(alpha_sum)
retVal -= np.sum(mathExtra.gammaln(alphas))
retVal += np.sum(np.multiply(alphas, ss))
retVal -= delta * np.square(alphas).sum()
return retVal
#Gives the derivative with respect to the log of prior. This will be used to adjust the loss
def getGradientForMultinomials(alphas, ss, delta):
K = len(alphas)
C = digamma(sum(alphas)) # - DELTA * sum(alphas)
retVal = [C]*K
for k in range(0, K):
retVal[k] += ss[k] - digamma(alphas[k]) - 2 * delta * alphas[k]
return retVal
#The hessian is actually the sum of two matrices: a diagonal matrix and a constant-value matrix.
#We'll write two functions to get both
def priorHessianConst(alphas, ss, delta): return -trigamma(sum(alphas)) + 2 * delta
def priorHessianDiag(alphas, ss): return [trigamma(a) for a in alphas]
def getPredictedStep(hConst, hDiag, gradient):
K = len(gradient)
numSum = 0.0
for i in range(0, K):
numSum += gradient[i] / hDiag[i]
denSum = 0.0
for i in range(0, K): denSum += 1.0 / hDiag[i]
b = numSum / ((1.0/hConst) + denSum)
retVal = [0]*K
for i in range(0, K): retVal[i] = (b - gradient[i]) / hDiag[i]
return retVal
def getPredictedStepAlt(hConst, hDiag, gradient, alphas):
K = len(gradient)
Z = 0
for k in range(0, K):
Z += alphas[k] / (gradient[k] - alphas[k]*hDiag[k])
Z *= hConst
Ss = [0]*K
for k in range(0, K):
Ss[k] = 1.0 / (gradient[k] - alphas[k]*hDiag[k]) / (1 + Z)
S = sum(Ss)
retVal = [0]*K
for i in range(0, K):
retVal[i] = gradient[i] / (gradient[i] - alphas[i]*hDiag[i]) * (1 - hConst * alphas[i] * S)
return retVal
def getTotalLoss(trialPriors, ss, delta):
return -1*logProbForMultinomials(trialPriors, ss, delta)
def predictStepUsingHessian(gradient, priors, ss, delta):
totalHConst = priorHessianConst(priors, ss, delta)
totalHDiag = priorHessianDiag(priors, ss)
return getPredictedStep(totalHConst, totalHDiag, gradient)
def predictStepLogSpace(gradient, priors, ss, delta):
totalHConst = priorHessianConst(priors, ss, delta)
totalHDiag = priorHessianDiag(priors, ss)
return getPredictedStepAlt(totalHConst, totalHDiag, gradient, priors)
# Returns whether it's a good step, and the loss
def testTrialPriors(trialPriors, ss, delta):
for alpha in trialPriors:
if alpha <= 0:
return float("inf")
return getTotalLoss(trialPriors, ss, delta)
def sqVectorSize(v):
s = 0
for i in range(0, len(v)): s += v[i] ** 2
return s
def findDirichletPriors(ss, initAlphas, max_iter=1000, delta=1e-2):
priors = initAlphas
currentLoss = getTotalLoss(priors, ss, delta)
gradientToleranceSq = 2 ** -20
learnRateTolerance = 2 ** -10
count = 0
while(count < max_iter):
count += 1
gradient = getGradientForMultinomials(priors, ss, delta)
gradientSize = sqVectorSize(gradient)
if (gradientSize < gradientToleranceSq):
return priors
trialStep = predictStepUsingHessian(gradient, priors, ss, delta)
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] + trialStep[i]
loss = testTrialPriors(trialPriors, ss, delta)
if loss < currentLoss:
currentLoss = loss
priors = trialPriors
continue
trialStep = predictStepLogSpace(gradient, priors, ss, delta)
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] * math.exp(trialStep[i])
loss = testTrialPriors(trialPriors, ss, delta)
loss = 10000000
learnRate = 1.0
while loss > currentLoss:
learnRate *= 0.9
trialPriors = [0]*len(priors)
for i in range(0, len(priors)): trialPriors[i] = priors[i] + gradient[i]*learnRate
loss = testTrialPriors(trialPriors, ss, delta)
if (learnRate < learnRateTolerance):
return priors
currentLoss = loss
priors = trialPriors
return priors
def findDirichletPriorsFromMultinomials(multinomials, initAlphas):
ss = getSufficientStatistic(multinomials)
return findDirichletPriors(ss, initAlphas)
| true
| true
|
79091ba7fa4a904460858a2d54c32390dcc3491e
| 2,179
|
py
|
Python
|
test/functional/beerchain_gas_limit.py
|
beerchainproject/beerchain
|
85a2701aff25358bbf2da6651d407a8d63ea2be8
|
[
"MIT"
] | null | null | null |
test/functional/beerchain_gas_limit.py
|
beerchainproject/beerchain
|
85a2701aff25358bbf2da6651d407a8d63ea2be8
|
[
"MIT"
] | null | null | null |
test/functional/beerchain_gas_limit.py
|
beerchainproject/beerchain
|
85a2701aff25358bbf2da6651d407a8d63ea2be8
|
[
"MIT"
] | 2
|
2020-09-15T08:11:39.000Z
|
2021-03-22T21:56:28.000Z
|
#!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.beerchain import *
from test_framework.beerchainconfig import *
from test_framework.blocktools import *
import time
NUM_OUTPUTS = 1000
class BeerchainGasLimit(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-staking=1"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.node = self.nodes[0]
self.node.generate(100+COINBASE_MATURITY)
tx = CTransaction()
"""
contract Test {
function() {
while(msg.gas > 0) {}
}
}
"""
contract_address = self.node.createcontract("60606040523415600e57600080fd5b5b605080601c6000396000f30060606040525b3415600f57600080fd5b60225b5b60005a1115601f576013565b5b565b0000a165627a7a72305820efcd4d663aac9e7a94b44502e712d9eb63cd640efe3aebf9e79210ab63ea6ff60029")['address']
self.node.generate(1)
# Create a tx with 2000 outputs each with a gas stipend of 5*10^8 calling the contract.
tx = CTransaction()
tx.vin = [make_vin(self.node, NUM_OUTPUTS*5*COIN)]
tx.vout = [CTxOut(0, CScript([b"\x04", int(5*COIN), BEERCHAIN_MIN_GAS_PRICE, b"\x00", bytes.fromhex(contract_address), OP_CALL])) for i in range(NUM_OUTPUTS)]
tx.rehash()
signed_tx_hex = self.node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
# We may want to reject transactions which exceed the gas limit outright.
try:
self.node.sendrawtransaction(signed_tx_hex)
except:
pass
print("Tx size", len(signed_tx_hex))
t = time.time()
self.node.generate(1)
execution_time = time.time() - t
print('execution time:', execution_time, 'seconds')
assert(execution_time < 60)
if __name__ == '__main__':
BeerchainGasLimit().main()
| 35.721311
| 282
| 0.677375
|
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.beerchain import *
from test_framework.beerchainconfig import *
from test_framework.blocktools import *
import time
NUM_OUTPUTS = 1000
class BeerchainGasLimit(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-staking=1"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.node = self.nodes[0]
self.node.generate(100+COINBASE_MATURITY)
tx = CTransaction()
contract_address = self.node.createcontract("60606040523415600e57600080fd5b5b605080601c6000396000f30060606040525b3415600f57600080fd5b60225b5b60005a1115601f576013565b5b565b0000a165627a7a72305820efcd4d663aac9e7a94b44502e712d9eb63cd640efe3aebf9e79210ab63ea6ff60029")['address']
self.node.generate(1)
tx = CTransaction()
tx.vin = [make_vin(self.node, NUM_OUTPUTS*5*COIN)]
tx.vout = [CTxOut(0, CScript([b"\x04", int(5*COIN), BEERCHAIN_MIN_GAS_PRICE, b"\x00", bytes.fromhex(contract_address), OP_CALL])) for i in range(NUM_OUTPUTS)]
tx.rehash()
signed_tx_hex = self.node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
try:
self.node.sendrawtransaction(signed_tx_hex)
except:
pass
print("Tx size", len(signed_tx_hex))
t = time.time()
self.node.generate(1)
execution_time = time.time() - t
print('execution time:', execution_time, 'seconds')
assert(execution_time < 60)
if __name__ == '__main__':
BeerchainGasLimit().main()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.