text string | size int64 | token_count int64 |
|---|---|---|
import amqp
import datetime
import email.utils
import gocept.amqprun
import gocept.amqprun.interfaces
import gocept.amqprun.main
import gocept.amqprun.worker
import logging
import os
import pkg_resources
import plone.testing
import plone.testing.zca
import signal
import string
import subprocess
import sys
import tempfile
import time
import unittest
class ZCASandbox(plone.testing.Layer):
defaultBases = [plone.testing.zca.LAYER_CLEANUP]
def testSetUp(self):
plone.testing.zca.pushGlobalRegistry()
def testTearDown(self):
plone.testing.zca.popGlobalRegistry()
ZCA_LAYER = ZCASandbox()
class QueueLayer(plone.testing.Layer):
defaultBases = [ZCA_LAYER]
RABBITMQCTL_COMMAND = os.environ.get(
'AMQP_RABBITMQCTL', 'sudo rabbitmqctl')
def setUp(self):
self['amqp-hostname'] = os.environ.get('AMQP_HOSTNAME', 'localhost')
self['amqp-username'] = os.environ.get('AMQP_USERNAME', 'guest')
self['amqp-password'] = os.environ.get('AMQP_PASSWORD', 'guest')
self['amqp-virtualhost'] = os.environ.get('AMQP_VIRTUALHOST', None)
if self['amqp-virtualhost'] is None:
self['amqp-virtualhost'] = '/test.%f' % time.time()
self.rabbitmqctl('add_vhost %s' % self['amqp-virtualhost'])
self.rabbitmqctl(
'set_permissions -p %s %s ".*" ".*" ".*"'
% (self['amqp-virtualhost'], self['amqp-username']))
self['amqp-virtualhost-created'] = True
self['amqp-connection'] = amqp.Connection(
host=self['amqp-hostname'],
userid=self['amqp-username'],
password=self['amqp-password'],
virtual_host=self['amqp-virtualhost'])
self['amqp-connection'].connect()
self['amqp-channel'] = self['amqp-connection'].channel()
def tearDown(self):
self['amqp-channel'].close()
del self['amqp-channel']
self['amqp-connection'].close()
del self['amqp-connection']
if 'amqp-virtualhost-created' in self:
self.rabbitmqctl('delete_vhost %s' % self['amqp-virtualhost'])
del self['amqp-virtualhost-created']
def rabbitmqctl(self, parameter):
command = f'{self.RABBITMQCTL_COMMAND} {parameter}'
stdout = subprocess.check_output(
'LANG=C %s' % command, stderr=subprocess.STDOUT, shell=True)
if b'Error' in stdout:
raise RuntimeError(
f'{command} failed:\n{stdout}') # pragma: no cover
QUEUE_LAYER = QueueLayer()
class QueueTestCase(unittest.TestCase):
layer = QUEUE_LAYER
def setUp(self):
super().setUp()
self._queue_prefix = 'test.%f.' % time.time()
self._queues = []
self.connection = self.layer['amqp-connection']
self.channel = self.layer['amqp-channel']
self.receive_queue = self.get_queue_name('receive')
self.channel.queue_declare(queue=self.receive_queue)
self._queues.append(self.receive_queue)
def tearDown(self):
for queue_name in self._queues:
# NOTE: we seem to need a new channel for each delete;
# trying to use self.channel for all queues results in its
# closing after the first delete
with self.connection.channel() as channel:
channel.queue_delete(queue_name)
super().tearDown()
def get_queue_name(self, suffix):
queue_name = self._queue_prefix + suffix
self._queues.append(queue_name)
return queue_name
def send_message(self, body, routing_key='', headers=None, **kw):
self.channel.basic_publish(
amqp.Message(
body,
timestamp=int(
time.mktime(datetime.datetime.now().timetuple())),
application_headers=headers or {},
msgid=email.utils.make_msgid('gocept.amqprun.test'),
**kw),
'amq.topic', routing_key=routing_key)
time.sleep(0.1)
def expect_message_on(self, routing_key):
self.channel.queue_bind(
self.receive_queue, 'amq.topic', routing_key=routing_key)
# BBB
expect_response_on = expect_message_on
def wait_for_message(self, timeout=10):
"""Wait for a response on `self.receive_queue`.
timeout ... wait for n seconds.
"""
for i in range(timeout):
message = self.channel.basic_get(self.receive_queue, no_ack=True)
if message:
break
time.sleep(1)
else:
raise RuntimeError('No message received')
return message
def create_server(self, **kw):
import gocept.amqprun.server
params = dict(hostname=self.layer['amqp-hostname'],
username=self.layer['amqp-username'],
password=self.layer['amqp-password'],
virtual_host=self.layer['amqp-virtualhost'])
# XXX not DRY, the default value is declared in Server.__init__()
setup_handlers = kw.pop('setup_handlers', True)
params.update(kw)
return gocept.amqprun.server.Server(
params, setup_handlers=setup_handlers)
class MainTestCase(QueueTestCase):
def setUp(self):
super().setUp()
plone.testing.zca.pushGlobalRegistry()
def tearDown(self):
super().tearDown()
plone.testing.zca.popGlobalRegistry()
# heuristic to avoid accreting more and more debug log output handlers
if logging.root.handlers:
handler = logging.root.handlers[-1]
if isinstance(handler, logging.StreamHandler):
logging.root.handlers.pop()
def start_server(self):
self.server = gocept.amqprun.main.create_configured_server(
self.config.name)
self.server.connect()
def start_server_in_subprocess(self, *args, **kwargs):
script = tempfile.NamedTemporaryFile(
mode='w', suffix='.py', encoding='utf-8')
module = kwargs.pop('module', 'gocept.amqprun.main')
config = [self.config.name]
config.extend(args)
script.write("""
import sys
sys.path[:] = %(path)r
import %(module)s
%(module)s.main%(config)r
""" % dict(path=sys.path, config=tuple(config), module=module))
script.flush()
self.stdout = tempfile.TemporaryFile(mode='w+', encoding='utf-8')
process = subprocess.Popen(
[sys.executable, script.name],
stdout=self.stdout, stderr=subprocess.STDOUT)
time.sleep(1)
self.pid = process.pid
def stop_server_in_subprocess(self):
os.kill(self.pid, signal.SIGINT)
self.wait_for_subprocess_exit()
self.pid = None
def wait_for_subprocess_exit(self, timeout=30):
for i in range(timeout):
pid, status = os.waitpid(self.pid, os.WNOHANG)
if (pid, status) != (0, 0):
return status
time.sleep(0.5)
else: # pragma: no cover
os.kill(self.pid, signal.SIGKILL)
self.stdout.seek(0)
self.fail('Child process did not exit\n' + self.stdout.read())
def make_config(self, package, name, mapping=None):
zcml_base = string.Template(
# pkg_resources.resource_string actually provides bytes *sigh*
str(pkg_resources.resource_string(package, '%s.zcml' % name),
'utf8'))
self.zcml = tempfile.NamedTemporaryFile()
self.zcml.write(zcml_base.substitute(mapping).encode('utf8'))
self.zcml.flush()
sub = dict(
site_zcml=self.zcml.name,
amqp_hostname=self.layer['amqp-hostname'],
amqp_username=self.layer['amqp-username'],
amqp_password=self.layer['amqp-password'],
amqp_virtualhost=self.layer['amqp-virtualhost'],
)
if mapping:
sub.update(mapping)
base = string.Template(
# pkg_resources.resource_string actually provides bytes *sigh*
str(pkg_resources.resource_string(package, '%s.conf' % name),
'utf8'))
self.config = tempfile.NamedTemporaryFile()
self.config.write(base.substitute(sub).encode('utf8'))
self.config.flush()
return self.config.name
| 8,344 | 2,635 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 2 16:17:42 2020
Copyright 2020 by Hadrien Montanelli.
"""
# %% Imports.
# Standard library imports:
import matplotlib.pyplot as plt
import numpy as np
from statsmodels.tsa.stattools import acf as acf2
# Learnpy imports:
from learnpy.misc import csv_to_array
from learnpy.timeseries import acf
# %% Examples.
# AR(1)-type time series:
series = csv_to_array('../../datasets/time_series_ar1.csv')
sample_acf = acf(series)
plt.plot(sample_acf[1:], '-')
sample_acf_statsmodels = acf2(series, nlags=len(series), fft=False)
plt.plot(sample_acf_statsmodels[1:], '--')
error = np.linalg.norm(sample_acf - sample_acf_statsmodels)
print('Error: ', error) # compare with statsmodels' acf
# MA(1)-type time series:
series = csv_to_array('../../datasets/time_series_ma1.csv')
sample_acf = acf(series)
plt.figure()
plt.plot(sample_acf[1:], '-')
sample_acf_statsmodels = acf2(series, nlags=len(series), fft=False)
plt.plot(sample_acf_statsmodels[1:], '--')
error = np.linalg.norm(sample_acf - sample_acf_statsmodels)
print('Error: ', error) # compare with statsmodels' acf | 1,133 | 448 |
from django.http import HttpResponse
import logging
import elasticapm
logger = logging.getLogger('mysite')
def show(request):
return HttpResponse(bar())
@elasticapm.capture_span()
def bar():
extra()
return "bar"
@elasticapm.capture_span()
def extra():
return "extra"
| 289 | 97 |
#! /usr/bin/env python3
'''
Problem 14 - Project Euler
http://projecteuler.net/index.php?section=problems&id=014
'''
chains = {}
def getCollatzChainLength(n):
chain = []
extra = 0
while n != 1:
if n in chains:
extra = chains[n]
break
else:
chain.append(n)
if (n % 2 == 0): # even
n = n // 2
else: # odd
n = n * 3 + 1
else:
chain.append(n)
for i, item in enumerate(chain):
if item not in chains:
chains[item] = len(chain[i:]) + extra
return len(chain) + extra
if __name__ == '__main__':
print(max([(x, getCollatzChainLength(x)) for x in range(1, 1_000_000)],
key=(lambda x: x[1])))
| 772 | 275 |
from tkinter import *
class ButtonGlac(Button):
def __init__(self, master=None):
super().__init__(master)
self.configure(
bd=1,
bg='#49708D',
fg= 'white',
font=('Aharoni','10','bold'),
activebackground = '#278ab9',
activeforeground= "lightgray"
)
| 354 | 115 |
from Test import Test, Test as test
'''
In a small town the population is p0 = 1000 at the beginning of a year. The population regularly increases by 2 percent per year and moreover 50 new inhabitants per year come to live in the town. How many years does the town need to see its population greater or equal to p = 1200 inhabitants?
At the end of the first year there will be:
1000 + 1000 * 0.02 + 50 => 1070 inhabitants
At the end of the 2nd year there will be:
1070 + 1070 * 0.02 + 50 => 1141 inhabitants (number of inhabitants is an integer)
At the end of the 3rd year there will be:
1141 + 1141 * 0.02 + 50 => 1213
It will need 3 entire years.
More generally given parameters:
p0, percent, aug (inhabitants coming or leaving each year), p (population to surpass)
the function nb_year should return n number of entire years needed to get a population greater or equal to p.
aug is an integer, percent a positive or null number, p0 and p are positive integers (> 0)
Examples:
nb_year(1500, 5, 100, 5000) -> 15
nb_year(1500000, 2.5, 10000, 2000000) -> 10
Note: Don't forget to convert the percent parameter as a percentage in the body of your function: if the parameter percent is 2 you have to convert it to 0.02.
'''
def nb_year(p0, percent, aug, p):
years = 0
while p0 < p:
p0 = p0 + (p0 * percent / 100) + aug
years += 1
return years
Test.assert_equals(nb_year(1500, 5, 100, 5000), 15)
Test.assert_equals(nb_year(1500000, 2.5, 10000, 2000000), 10)
Test.assert_equals(nb_year(1500000, 0.25, 1000, 2000000), 94) | 1,558 | 616 |
# rabbitdnssec.py -- DNSSEC management through a RabbitMQ cluster
#
# These routines can be used somewhat generally within a cluster of
# DNSSEC signers as we are using at SURFnet.
#
# From: Rick van Rein <rick@openfortress.nl>
import sys
import socket
import time
import os.path
import importlib
import ssl
import json
import syslog
import atexit
import configparser
import pika
import pika.spec
import pika.credentials
# Setup configuration, such as settings and application name
#
homedir = os.path.expanduser ('~')
appdir = homedir + '/ods-amqp'
appname = os.path.basename (sys.argv [0])
appcfg = configparser.ConfigParser ()
appcfg.read ([appdir + '/config', '/etc/opendnssec/ods-amqp.config'])
# Recreate the prefix from sys.argv [0] and add to to $PATH
#
prefix = os.path.dirname (sys.argv [0])
os.environ ['PATH'] = prefix + ':' + os.environ.get ('PATH')
# Open syslog, using standard settings
#
def cleanup_syslog ():
syslog.syslog (syslog.LOG_INFO, 'Program exiting')
syslog.closelog ()
syslog.openlog (appname,
(syslog.LOG_PERROR if sys.stderr.isatty () else 0) |
syslog.LOG_PID,
syslog.LOG_USER)
syslog.syslog (syslog.LOG_INFO, 'Program starting')
atexit.register (cleanup_syslog)
# Setup the RabbitMQ client
#
this_machine = socket.gethostname ().split ('.') [0]
this_port = int (appcfg ['rabbitmq'] ['port'])
vhost = appcfg ['rabbitmq'] ['vhost']
signer_cluster = appcfg ['rabbitmq'] ['signer_cluster']
signer_machines = appcfg ['rabbitmq'] ['signer_machines'].split ()
backup_machines = appcfg ['rabbitmq'] ['backup_machines'].split ()
plugindir = appcfg ['rabbitmq'] ['plugindir']
ca_certs = appcfg ['rabbitmq'] ['ca_certs']
backend = appcfg ['rabbitmq'] ['backend']
#
assert ((this_machine in signer_machines) or (this_machine in backup_machines))
assert (len (signer_machines) >= 2)
# Setup for TLS
#
wrap_tls = True
conf_tls = {
'ssl_version': ssl.PROTOCOL_TLSv1_2,
'ca_certs': ca_certs,
'certfile': appdir + '/ssl/certs/' + this_machine + '.pem',
'keyfile': appdir + '/ssl/private/' + this_machine + '.pem',
'server_side': False,
}
# Setup PKCS #11
#
pkcs11_libfile = appcfg ['pkcs11'] ['libfile']
pkcs11_token_label = appcfg ['pkcs11'] ['token_label']
pkcs11_pinfile_path = appcfg ['pkcs11'] ['pinfile']
pkcs11_curve_name = appcfg ['pkcs11'] ['curve_name']
# Send messages at various levels to syslog
#
def log_debug (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_DEBUG, msg)
def log_info (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
# msg = msg % tuple (map (str, args))
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_INFO, msg)
def log_notice (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
# msg = msg % tuple (map (str, args))
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_NOTICE, msg)
def log_warning (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
# msg = msg % tuple (map (str, args))
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_WARNING, msg)
def log_error (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
# msg = msg % tuple (map (str, args))
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_ERR, msg)
def log_critical (msg, *args):
for a in args:
msg = msg + ' ' + unicode (str (a), 'utf-8')
# msg = msg % tuple (map (str, args))
msg = msg.encode ('ascii', 'replace')
syslog.syslog (syslog.LOG_CRIT, msg)
# Return the name of a queue on the current machine (prefix by hostname)
#
def my_queue (queue):
return this_machine + '_' + queue
# Return the name of an exchange on the current machine (prefix by hostname)
#
def my_exchange (exchange='signer'):
return this_machine + '_' + exchange
# Return configuration dict for the current app from config section [APPNAME]
# (Use ovr_appname to override the application name to something else)
#
def my_config (ovr_appname=None):
global appcfg, appname
assert (ovr_appname != 'accounts')
if ovr_appname is None:
ovr_appname = appname
return appcfg [ovr_appname]
# Return the backend module name used for signing DNS zone data.
#
def my_backend ():
return backend
# Return the plugin directory for this program.
#
def my_plugindir (ovr_appname=None):
return plugindir + '/' + (ovr_appname or appname)
# Return the backend module used for signing DNS zone data.
# By default, a possible loading location is the plugin directory's
# subdirectory named by sys.argv [0], but ovr_appname can be used to
# override this default name for the application subdirectory under
# the plugin directory.
#
def my_backendmod (modname_prefix, modname_postfix='', ovr_appname=None):
sys.path.append (my_plugindir (ovr_appname=ovr_appname))
backendmod = importlib.import_module (
modname_prefix + backend + modname_postfix )
sys.path.pop ()
return backendmod
# Retrieve a PlainCredentials object based on the current appname.
# Overrides exist for appname and username.
#
def my_credentials (ovr_appname=None, ovr_username=None):
global appcfg, appname
if ovr_username is None:
username = appcfg [ovr_appname or appname] ['username']
else:
username = ovr_username
password = appcfg ['accounts'] [username]
return pika.PlainCredentials (username, password)
# Retrieve a ConnectionParameters objcet. This is based on settings
# in the [rabbitmq] configuration section, which applies to all appnames
# under this UNIX account, except for the credentials which can be
# supplied here as a parameter, and may well be derived with
# my_credentials().
#
def my_connectionparameters (my_creds, host=this_machine, port=this_port, **params):
return pika.ConnectionParameters (
host,
port,
virtual_host=vhost,
ssl=wrap_tls,
ssl_options=conf_tls,
credentials=my_creds,
**params)
# Construct a BasicProperties object, based on standard available
# information and optional headers. There are options for overriding
# the username.
#
def my_basicproperties (headers=None, ovr_appname=None, ovr_username=None):
return pika.spec.BasicProperties (
timestamp=time.time (),
user_id=(ovr_username or appcfg [
ovr_appname or appname] ['username']),
cluster_id=signer_cluster,
headers=headers)
def pkcs11_pin ():
"""Load the PKCS #11 PIN from the OpenDNSSEC configuration.
"""
return open (pkcs11_pinfile_path).read ().strip ()
def pkcs11_pinfile ():
"""Return the PKCS #11 PIN file from the OpenDNSSEC configuration.
"""
return pkcs11_pinfile_path
class MessageCollector (object):
"""MessageCollector synchronously loads at least one message,
but more when they are immediately available. This helps
to speed up operations when work accumulates and batch-mode
operation is possible. At the same time, it does not slow
down operations when messages drip in one at a time.
This is probably best combined with transactions, as in
chan.tx_select ()
clx = MessageCollector (chan)
clx.collect ()
...
for m in clx.messages ():
...inner.loop...
...
if ...we are happy...:
clx.ack ()
else:
clx.nack ()
chan.tx_commit ()
"""
def __init__ (self, chan, queue=None):
self.chan = chan
self.queue = queue
self.msgtags = []
self.msglist = []
self.gotempty = False
def messages (self):
"""Return the list of messages collected.
"""
return self.msglist
def count (self):
"""Return the number of messages collected.
"""
return len (self.msglist)
def ack (self):
"""Send a basic_ack() on all collected messages.
"""
for tag in self.msgtags:
self.chan.basic_ack (delivery_tag=tag)
self.msgtags = []
def nack (self, requeue=True):
"""Send a basic_nack() on all collected messages.
"""
for tag in self.msgtags:
self.chan.basic_nack (delivery_tag=tag, requeue=requeue)
self.msgtags = []
def more_to_collect (self):
"""Call this to see if we should proceed; it means that
we collected at least one message, and nothing more
is available for immediate processing.
"""
# return len (self.msglist) == 0 or not self.empty
#FAIL# print 'Length of collected messages:', len (self.msglist)
#FAIL# print 'Number of waiting messages:', self.chan.get_waiting_message_count ()
qhdl = self.chan.queue_declare (queue=self.queue, passive=True)
# print 'qhdl.method.message_count =', qhdl.method.message_count
#FAIL# return len (self.msglist) == 0 or self.chan.get_waiting_message_count () > 0
return len (self.msglist) == 0 or qhdl.method.message_count > 0
def collect (self, queue=None):
"""Collect at least one message; if more can be collected
without waiting, then do so. This method is not
re-entrant. The queue defaults to the value that was
optionally set when this object was instantiated.
"""
regcb = False
self.empty = False
tout = None
while self.more_to_collect ():
# print 'There is more to collect...'
# Note: self.chan is an instance of
# pika.adapters.blocking_connection.BlockingChannel
# which returns (None,None,None) for an empty queue
# or (mth,props,body) otherwise
#FAIL# (mth, props, body) = self.chan.consume (
#FAIL# queue=(queue or self.queue),
#FAIL# inactivity_timeout=tout)
(mth,props,body) = self.chan.basic_get (
queue=(queue or self.queue))
# print 'Class MTH =', type (mth)
#TODO# No timeout... and bad reponses when empty!
if type (mth) != pika.spec.Basic.GetOk:
#TODO# raise Exception ('Unexpectedly found empty queue "' + (queue or self.queue) + '"')
# print 'Unexpectedly found empty queue "' + (queue or self.queue) + '"'
time.sleep (60)
continue
self.msgtags.append (mth.delivery_tag)
self.msglist.append (body)
# The next looping is not blocking
tout = 10
#TODO#FROMHERE#
#TODO# self.callback_GetOk (self, self.chan, mth, props, body)
#DROP# self.chan.basic_get (callback=self.callback_GetOk,
#DROP# queue=(queue or self.queue))
#DROP# if not regcb:
#DROP# self.chan.add_callback (clx.callback_GetEmpty,
#DROP# pika.spec.Basic.GetEmpty,
#DROP# one_shot=True)
#DROP# regcb = True
pass # print 'There is nothing more to collect'
def callback_GetEmpty (self, frame):
"""Take note that no messages are currently available.
"""
self.gotempty = True
def callback_GetOk (self, chan, mth, props, body):
"""Take note of a new message. Store its delivery_tag
for future use with self.ack() or self.nack().
"""
self.msgtags.append (mth.delivery_tag)
self.msglist.append (body)
def open_client_connection (username=None, hostname='localhost'):
"""Return a connection as an AMQP client, with the given
username. A password is determined locally. When
no username is provided, guest / guest will be used.
The default host to connect to is localhost, but
another value may be passed in.
The returned value is a connection, to be used as in
cnx = open_client_connection (...)
chan = cnx.channel ()
...
cnx.close ()
Exceptions that might be raised include
pika.exceptions.AMQPChannelError
pika.exceptions.AMQPError
See amqp_client_channel() for a "with" form.
"""
if username is not None:
password = appcfg ['accounts'] [username]
creds = pika.PlainCredentials (username, password)
else:
# Construct ConnectionParameters for guest / guest
creds = None
cnxparm = pika.ConnectionParameters (
host=hostname,
port=this_port,
virtual_host=vhost,
ssl=wrap_tls,
ssl_options=conf_tls,
credentials=creds
)
cnx = pika.BlockingConnection (cnxparm)
return cnx
class amqp_client_channel ():
"""Use this class in the "with" form:
with amqp_client_channel (...) as chan:
chan.basic_publish (...)
Set username to login in another way than guest / guest.
Set hostname to connect to another host than localhost.
Set transactional to request transactional behaviour.
Any AMQP exceptions will be caught, printed and fatally exited.
In the transactional variety, the channel is setup accordingly
and calls to tx_commit() and/or tx_rollback() are supported.
When normally ending the "with" clause, any remaining work will
be committed, and any failure to that end will be reported along
with the AMQP exceptions. When the "with" clause is left early
due to an exception, than the transaction will be rolled back.
"""
def __init__ (self, username=None, hostname='localhost', transactional=False):
self.username = username
self.hostname = hostname
self.transact = transactional
def __enter__ (self):
self.cnx = open_client_connection (self.username, self.hostname)
self.chan = self.cnx.channel ()
if self.transact:
self.chan.tx_select ()
return self.chan
def __exit__ (self, typ, val, tbk):
txfail = False
if self.transact:
if val is not None:
self.chan.tx_rollback ()
else:
frame_method = self.chan.tx_commit ()
txfail = type (frame_method.method) != pika.spec.Tx.CommitOk
self.cnx.close ()
if isinstance (val, pika.exceptions.AMQPChannelError):
log_error ('AMQP Channel Error:', val)
sys.exit (1)
if isinstance (val, pika.exceptions.AMQPConnectionError):
log_error ('AMQP Connection Error:', val)
sys.exit (1)
if isinstance (val, pika.exceptions.AMQPError):
log_error ('AMQP Error:', val)
sys.exit (1)
if self.transact:
if txfail:
log_error ('AMQP Transaction Failure')
sys.exit (1)
| 13,593 | 4,977 |
#!/usr/bin/env python
""" about - show system information
License: 3-clause BSD (see https://opensource.org/licenses/BSD-3-Clause)
Author: Hubert Tournier
"""
import getopt
import getpass
import locale
import logging
import os
import platform
import re
import shutil
import socket
import string
import sys
import sysconfig
import unicodedata
# Version string used by the what(1) and ident(1) commands:
ID = "@(#) $Id: about - show system information v1.1.2 (September 25, 2021) by Hubert Tournier $"
# Unix dependencies:
try:
import pwd
import grp
except ModuleNotFoundError:
pass
# Optional dependency upon py-cpuinfo
# Use "pip install py-cpuinfo" to install
try:
import cpuinfo
except ModuleNotFoundError:
pass
# Default parameters. Can be superseded by command line options
parameters = {
"Environment": False,
"Hardware": False,
"Operating System": False,
"Python": False,
"System": False,
"User": False,
}
################################################################################
def initialize_debugging(program_name):
"""Debugging set up"""
console_log_format = program_name + ": %(levelname)s: %(message)s"
logging.basicConfig(format=console_log_format, level=logging.DEBUG)
logging.disable(logging.INFO)
################################################################################
def display_help():
"""Displays usage and help"""
print()
print("usage: about [-d|--debug] [-h|--help|-?] [-v|--version] [-a|--all]")
print(" [-E|--env|--environment] [-H|--hw|--hardware] [-O|--os|--operating]")
print(" [-P|--py|--python] [-S|--sys|--system] [-U|--user] [--]")
print(" ---------------------- ---------------------------------------------")
print(" -a|--all Same as -SUHOEP")
print(" -E|--env|--environment Show information about the environment")
print(" -H|--hw|--hardware Show information about the hardware")
print(" -O|--os|--operating Show information about the Operating System")
print(" -P|--py|--python Show information about Python")
print(" -S|--sys|--system Show information about the system")
print(" -U|--user Show information about the user")
print(" -d|--debug Enable debug mode")
print(" -h|--help|-? Print usage and this help message and exit")
print(" -v|--version Print version and exit")
print(" -- Options processing terminator")
print()
################################################################################
def process_command_line():
"""Process command line"""
# pylint: disable=C0103
global parameters
# pylint: enable=C0103
try:
# option letters followed by : expect an argument
# same for option strings followed by =
options, remaining_arguments = getopt.getopt(
sys.argv[1:],
"adhvHOSEPU?",
[
"all",
"debug",
"env",
"environment",
"everything",
"hardware",
"help",
"hw",
"life",
"operating",
"os",
"py",
"python",
"sys",
"system",
"universe",
"user",
"version",
],
)
except getopt.GetoptError as error:
logging.critical("Syntax error: %s", error)
display_help()
sys.exit(1)
for option, _ in options:
if option in ("-a", "--all"):
parameters["Environment"] = True
parameters["Hardware"] = True
parameters["Operating System"] = True
parameters["Python"] = True
parameters["System"] = True
parameters["User"] = True
elif option in ("-E", "--env", "--environment"):
parameters["Environment"] = True
elif option in ("-H", "--hw", "--hardware"):
parameters["Hardware"] = True
elif option in ("-O", "--os", "--operating"):
parameters["Operating System"] = True
elif option in ("-P", "--py", "--python"):
parameters["Python"] = True
elif option in ("-S", "--sys", "--system"):
parameters["System"] = True
elif option in ("-U", "--user"):
parameters["User"] = True
elif option in ("--life", "--universe"):
print("42!")
sys.exit(42)
elif option == "--everything":
print("Mamma mia!")
sys.exit(88)
elif option in ("-d", "--debug"):
logging.disable(logging.NOTSET)
elif option in ("-h", "--help", "-?"):
display_help()
sys.exit(0)
elif option in ("-v", "--version"):
print(ID.replace("@(" + "#)" + " $" + "Id" + ": ", "").replace(" $", ""))
sys.exit(0)
logging.debug("process_commandline(): parameters:")
logging.debug(parameters)
logging.debug("process_commandline(): remaining_arguments:")
logging.debug(remaining_arguments)
return remaining_arguments
################################################################################
def printm(first_line, results):
"""Multi-lines print"""
print(first_line + ":")
print(">>>>>>>>>>")
if isinstance(results, list):
for line in results:
print(line)
elif isinstance(results, dict):
for key, value in results.items():
print("{}={}".format(key, value))
else:
print(results)
print("<<<<<<<<<<")
################################################################################
# Possible values derived from https://hg.python.org/cpython/file/3.5/Lib/platform.py
def sys_type():
"""Return (approximate) system type"""
operating_system_type = platform.system()
if operating_system_type in (
"FreeBSD",
"NetBSD",
"OpenBSD",
"Linux",
"Darwin",
"MacOS X Server",
"Solaris",
):
return "Unix"
return operating_system_type
################################################################################
def grep(filename, pattern):
"""Search a string in a file"""
regexp = re.compile(pattern)
results = []
with open(filename) as file:
lines = file.readlines()
for line in lines:
result = regexp.match(line)
if result:
results.append(line.strip())
return results
################################################################################
def about_local_system():
"""Show information about the local system"""
if parameters["System"]:
print("[System]")
if sys_type() == "Unix":
print("os.uname().nodename={}".format(os.uname().nodename))
hostname = socket.gethostname()
print("socket.gethostname()={}".format(hostname))
print("socket.getfqdn()={}".format(socket.getfqdn()))
print(
"socket.gethostbyname('{}')={}".format(
hostname, socket.gethostbyname(hostname)
)
)
print(
"socket.gethostbyname_ex('{}')={}".format(
hostname, socket.gethostbyname_ex(hostname)
)
)
print()
print("[System/Network]")
print("socket.if_nameindex()={}".format(socket.if_nameindex()))
print("socket.getdefaulttimeout()={}".format(socket.getdefaulttimeout()))
print("socket.has_dualstack_ipv6()={}".format(socket.has_dualstack_ipv6()))
print()
################################################################################
def about_user():
"""Show information about the user"""
if parameters["User"]:
print("[User]")
user = getpass.getuser()
print("getpass.getuser()={}".format(user))
print("os.getlogin()={}".format(os.getlogin()))
if sys_type() == "Unix":
print('pwd.getpwnam("{}")={}'.format(user, pwd.getpwnam(user)))
print("os.getgroups()={}".format(os.getgroups()))
for group_id in os.getgroups():
print("grp.getgrgid({})={}".format(group_id, grp.getgrgid(group_id)))
elif sys_type() == "Windows":
if os.environ["USERNAME"]:
print('os.environ["USERNAME"]={}'.format(os.environ["USERNAME"]))
if os.environ["USERPROFILE"]:
print('os.environ["USERPROFILE"]={}'.format(os.environ["USERPROFILE"]))
if os.environ["USERDOMAIN"]:
print('os.environ["USERDOMAIN"]={}'.format(os.environ["USERDOMAIN"]))
if os.environ["USERDOMAIN_ROAMINGPROFILE"]:
print(
'os.environ["USERDOMAIN_ROAMINGPROFILE"]={}'.format(
os.environ["USERDOMAIN_ROAMINGPROFILE"]
)
)
if os.environ["HOME"]:
print('os.environ["HOME"]={}'.format(os.environ["HOME"]))
if os.environ["HOMEDRIVE"]:
print('os.environ["HOMEDRIVE"]={}'.format(os.environ["HOMEDRIVE"]))
if os.environ["HOMEPATH"]:
print('os.environ["HOMEPATH"]={}'.format(os.environ["HOMEPATH"]))
print()
print("[User/Process]")
if sys_type() == "Unix":
print("os.getuid()={}".format(os.getuid()))
print("os.getgid()={}".format(os.getgid()))
print("os.geteuid()={}".format(os.geteuid()))
print("os.getegid()={}".format(os.getegid()))
print("os.getresuid()={}".format(os.getresuid()))
print("os.getresgid()={}".format(os.getresgid()))
print()
print("[Process]")
pid = os.getpid()
print("os.getpid()={}".format(pid))
print("os.getppid()={}".format(os.getppid()))
if sys_type() == "Unix":
print("os.getpgid({})={}".format(pid, os.getpgid(pid)))
print("os.getpgrp()={}".format(os.getpgrp()))
print(
"os.getpriority(os.PRIO_PROCESS, 0)={}".format(
os.getpriority(os.PRIO_PROCESS, 0)
)
)
print(
"os.getpriority(os.PRIO_PGRP, 0)={}".format(
os.getpriority(os.PRIO_PGRP, 0)
)
)
print(
"os.getpriority(os.PRIO_USER, 0)={}".format(
os.getpriority(os.PRIO_USER, 0)
)
)
print()
################################################################################
def about_hardware():
"""Show information about the hardware"""
if parameters["Hardware"]:
print("[Hardware]")
if sys_type() == "Unix":
print("os.uname().machine={}".format(os.uname().machine))
print("platform.machine()={}".format(platform.machine()))
print("platform.processor()={}".format(platform.processor()))
print("os.cpu_count()={}".format(os.cpu_count()))
print("sys.byteorder={}".format(sys.byteorder))
if platform.system() == "FreeBSD":
printm(
"/var/run/dmesg.boot scan",
grep("/var/run/dmesg.boot", "^(CPU: |FreeBSD/SMP: |real memory =)"),
)
elif sys_type() == "Windows":
if os.environ["NUMBER_OF_PROCESSORS"]:
print(
'os.environ["NUMBER_OF_PROCESSORS"]={}'.format(
os.environ["NUMBER_OF_PROCESSORS"]
)
)
if os.environ["PROCESSOR_ARCHITECTURE"]:
print(
'os.environ["PROCESSOR_ARCHITECTURE"]={}'.format(
os.environ["PROCESSOR_ARCHITECTURE"]
)
)
if os.environ["PROCESSOR_IDENTIFIER"]:
print(
'os.environ["PROCESSOR_IDENTIFIER"]={}'.format(
os.environ["PROCESSOR_IDENTIFIER"]
)
)
if os.environ["PROCESSOR_LEVEL"]:
print(
'os.environ["PROCESSOR_LEVEL"]={}'.format(
os.environ["PROCESSOR_LEVEL"]
)
)
if os.environ["PROCESSOR_REVISION"]:
print(
'os.environ["PROCESSOR_REVISION"]={}'.format(
os.environ["PROCESSOR_REVISION"]
)
)
print()
print("[Hardware/cpuinfo optional module]")
try:
for key, value in cpuinfo.get_cpu_info().items():
print("{}: {}".format(key, value))
except NameError:
print("# For more detailed (and portable) CPU information do:")
print("# pip install py-cpuinfo ; cpuinfo")
print()
print("[Hardware/Disk usage]")
if sys_type() == "Unix":
if os.path.exists("/etc/fstab"):
with open("/etc/fstab", "r") as file:
for line in file.readlines():
line = line.strip()
if not line.startswith("#"):
fields = line.split()
if fields[1] != "none":
print(
"File system={} Mount point={}".format(
fields[0], fields[1]
)
)
print(
' shutil.disk_usage("{}")={}'.format(
fields[1], shutil.disk_usage(fields[1])
)
)
elif sys_type() == "Windows":
for letter in string.ascii_uppercase:
drive = letter + ":\\"
if os.path.exists(drive):
print(
' shutil.disk_usage("{}")={}'.format(
drive, shutil.disk_usage(drive)
)
)
print()
################################################################################
def about_operating_system():
"""Show information about the operating system"""
if parameters["Operating System"]:
print("[Operating system]")
print("os.name={}".format(os.name))
print("platform.system()={}".format(platform.system()))
print("platform.release()={}".format(platform.release()))
print("sys.platform={}".format(sys.platform))
print("sysconfig.get_platform()={}".format(sysconfig.get_platform()))
print("platform.platform()={}".format(platform.platform()))
print("platform.version()={}".format(platform.version()))
print("platform.uname()={}".format(platform.uname()))
if sys_type() == "Unix":
print("os.uname().sysname={}".format(os.uname().sysname))
print("os.uname().release={}".format(os.uname().release))
print("os.uname().version={}".format(os.uname().version))
elif sys_type() == "Windows":
print("sys.getwindowsversion()={}".format(sys.getwindowsversion()))
print("platform.win32_ver()={}".format(platform.win32_ver()))
print("platform.win32_edition()={}".format(platform.win32_edition()))
print("platform.win32_is_iot()={}".format(platform.win32_is_iot()))
print()
if sys_type() == "Unix":
print("[Operating system/Configuration]")
for name in os.confstr_names:
try:
print("os.confstr('{}')={}".format(name, os.confstr(name)))
except OSError as error:
print("os.confstr('{}')={}".format(name, "Error: " + str(error)))
for name in os.sysconf_names:
try:
print("os.sysconf('{}')={}".format(name, os.sysconf(name)))
except OSError as error:
print("os.sysconf('{}')={}".format(name, "Error: " + str(error)))
print()
print("[Operating system/Portability]")
print("os.curdir={}".format(os.curdir))
print("os.pardir={}".format(os.pardir))
print("os.sep={}".format(os.sep))
print("os.altsep={}".format(os.altsep))
print("os.extsep={}".format(os.extsep))
print("os.pathsep={}".format(os.pathsep))
print("os.defpath={}".format(os.defpath))
print("os.devnull={}".format(os.devnull))
print("os.linesep={}".format(os.linesep))
################################################################################
def about_environment():
"""Show information about the environment"""
if parameters["Environment"]:
print("[Environment]")
print("os.getcwd()={}".format(os.getcwd()))
printm("dict(os.environ)", dict(os.environ))
print("os.supports_bytes_environ={}".format(os.supports_bytes_environ))
print("shutil.get_terminal_size()={}".format(shutil.get_terminal_size()))
print("sys.prefix={}".format(sys.prefix))
if sys_type() == "Unix":
print("os.getloadavg()={}".format(os.getloadavg()))
print()
print("[Environment/Locale]")
print("locale.getlocale()={}".format(locale.getlocale()))
printm("locale.localeconv()", locale.localeconv())
print()
print("locale.getlocale(locale.LC_CTYPE)={}".format(locale.getlocale(locale.LC_CTYPE)))
try:
print("locale.getlocale(locale.CODESET)={}".format(locale.nl_langinfo(locale.CODESET)))
except:
pass
print("locale.getdefaultlocale()={}".format(locale.getdefaultlocale()))
print("locale.getpreferredencoding()={}".format(locale.getpreferredencoding()))
print("locale.getlocale(locale.LC_COLLATE)={}".format(locale.getlocale(locale.LC_COLLATE)))
try:
print("locale.getlocale(locale.CHAR_MAX)={}".format(locale.getlocale(locale.CHAR_MAX)))
except:
pass
print()
try:
print("locale.getlocale(locale.LC_TIME)={}".format(locale.getlocale(locale.LC_TIME)))
print("locale.getlocale(locale.D_T_FMT)={}".format(locale.nl_langinfo(locale.D_T_FMT)))
print("locale.getlocale(locale.D_FMT)={}".format(locale.nl_langinfo(locale.D_FMT)))
print("locale.getlocale(locale.T_FMT)={}".format(locale.nl_langinfo(locale.T_FMT)))
print("locale.getlocale(locale.T_FMT_AMPM)={}".format(locale.nl_langinfo(locale.T_FMT_AMPM)))
print("locale.getlocale(locale.DAY_1)={}".format(locale.nl_langinfo(locale.DAY_1)))
print("locale.getlocale(locale.DAY_2)={}".format(locale.nl_langinfo(locale.DAY_2)))
print("locale.getlocale(locale.DAY_3)={}".format(locale.nl_langinfo(locale.DAY_3)))
print("locale.getlocale(locale.DAY_4)={}".format(locale.nl_langinfo(locale.DAY_4)))
print("locale.getlocale(locale.DAY_5)={}".format(locale.nl_langinfo(locale.DAY_5)))
print("locale.getlocale(locale.DAY_6)={}".format(locale.nl_langinfo(locale.DAY_6)))
print("locale.getlocale(locale.DAY_7)={}".format(locale.nl_langinfo(locale.DAY_7)))
print("locale.getlocale(locale.ABDAY_1)={}".format(locale.nl_langinfo(locale.ABDAY_1)))
print("locale.getlocale(locale.ABDAY_2)={}".format(locale.nl_langinfo(locale.ABDAY_2)))
print("locale.getlocale(locale.ABDAY_3)={}".format(locale.nl_langinfo(locale.ABDAY_3)))
print("locale.getlocale(locale.ABDAY_4)={}".format(locale.nl_langinfo(locale.ABDAY_4)))
print("locale.getlocale(locale.ABDAY_5)={}".format(locale.nl_langinfo(locale.ABDAY_5)))
print("locale.getlocale(locale.ABDAY_6)={}".format(locale.nl_langinfo(locale.ABDAY_6)))
print("locale.getlocale(locale.ABDAY_7)={}".format(locale.nl_langinfo(locale.ABDAY_7)))
print("locale.getlocale(locale.MON_1)={}".format(locale.nl_langinfo(locale.MON_1)))
print("locale.getlocale(locale.MON_2)={}".format(locale.nl_langinfo(locale.MON_2)))
print("locale.getlocale(locale.MON_3)={}".format(locale.nl_langinfo(locale.MON_3)))
print("locale.getlocale(locale.MON_4)={}".format(locale.nl_langinfo(locale.MON_4)))
print("locale.getlocale(locale.MON_5)={}".format(locale.nl_langinfo(locale.MON_5)))
print("locale.getlocale(locale.MON_6)={}".format(locale.nl_langinfo(locale.MON_6)))
print("locale.getlocale(locale.MON_7)={}".format(locale.nl_langinfo(locale.MON_7)))
print("locale.getlocale(locale.MON_8)={}".format(locale.nl_langinfo(locale.MON_8)))
print("locale.getlocale(locale.MON_9)={}".format(locale.nl_langinfo(locale.MON_9)))
print("locale.getlocale(locale.MON_10)={}".format(locale.nl_langinfo(locale.MON_10)))
print("locale.getlocale(locale.MON_11)={}".format(locale.nl_langinfo(locale.MON_11)))
print("locale.getlocale(locale.MON_12)={}".format(locale.nl_langinfo(locale.MON_12)))
print("locale.getlocale(locale.ABMON_1)={}".format(locale.nl_langinfo(locale.ABMON_1)))
print("locale.getlocale(locale.ABMON_2)={}".format(locale.nl_langinfo(locale.ABMON_2)))
print("locale.getlocale(locale.ABMON_3)={}".format(locale.nl_langinfo(locale.ABMON_3)))
print("locale.getlocale(locale.ABMON_4)={}".format(locale.nl_langinfo(locale.ABMON_4)))
print("locale.getlocale(locale.ABMON_5)={}".format(locale.nl_langinfo(locale.ABMON_5)))
print("locale.getlocale(locale.ABMON_6)={}".format(locale.nl_langinfo(locale.ABMON_6)))
print("locale.getlocale(locale.ABMON_7)={}".format(locale.nl_langinfo(locale.ABMON_7)))
print("locale.getlocale(locale.ABMON_8)={}".format(locale.nl_langinfo(locale.ABMON_8)))
print("locale.getlocale(locale.ABMON_9)={}".format(locale.nl_langinfo(locale.ABMON_9)))
print("locale.getlocale(locale.ABMON_10)={}".format(locale.nl_langinfo(locale.ABMON_10)))
print("locale.getlocale(locale.ABMON_11)={}".format(locale.nl_langinfo(locale.ABMON_11)))
print("locale.getlocale(locale.ABMON_12)={}".format(locale.nl_langinfo(locale.ABMON_12)))
print("locale.getlocale(locale.ERA)={}".format(locale.nl_langinfo(locale.ERA)))
print("locale.getlocale(locale.ERA_D_T_FMT)={}".format(locale.nl_langinfo(locale.ERA_D_T_FMT)))
print("locale.getlocale(locale.ERA_D_FMT)={}".format(locale.nl_langinfo(locale.ERA_D_FMT)))
print("locale.getlocale(locale.ERA_T_FMT)={}".format(locale.nl_langinfo(locale.ERA_T_FMT)))
print()
print("locale.getlocale(locale.LC_MESSAGES)={}".format(locale.getlocale(locale.LC_MESSAGES)))
print("locale.getlocale(locale.YESEXPR)={}".format(locale.nl_langinfo(locale.YESEXPR)))
print("locale.getlocale(locale.NOEXPR)={}".format(locale.nl_langinfo(locale.NOEXPR)))
print()
print("locale.getlocale(locale.LC_MONETARY)={}".format(locale.getlocale(locale.LC_MONETARY)))
print("locale.getlocale(locale.CRNCYSTR)={}".format(locale.nl_langinfo(locale.CRNCYSTR)))
print()
print("locale.getlocale(locale.LC_NUMERIC)={}".format(locale.getlocale(locale.LC_NUMERIC)))
print("locale.getlocale(locale.RADIXCHAR)={}".format(locale.nl_langinfo(locale.RADIXCHAR)))
print("locale.getlocale(locale.THOUSEP)={}".format(locale.nl_langinfo(locale.THOUSEP)))
print("locale.getlocale(locale.ALT_DIGITS)={}".format(locale.nl_langinfo(locale.ALT_DIGITS)))
print()
except:
pass
################################################################################
def about_python():
"""Show information about the python install"""
if parameters["Python"]:
print("[Python]")
print(
"sysconfig.get_python_version()={}".format(sysconfig.get_python_version())
)
if sys_type() == "Windows":
print("sys.winver={}".format(sys.winver))
printm("sys.version", sys.version)
print("sys.version_info={}".format(sys.version_info))
print("sys.hexversion={}".format(sys.hexversion))
print("sys.implementation={}".format(sys.implementation))
print("platform.python_build()={}".format(platform.python_build()))
print("platform.python_branch()={}".format(platform.python_branch()))
print(
"platform.python_implementation()={}".format(
platform.python_implementation()
)
)
print("platform.python_revision()={}".format(platform.python_revision()))
print("platform.python_version()={}".format(platform.python_version()))
print(
"platform.python_version_tuple()={}".format(platform.python_version_tuple())
)
printm("sys.copyright", sys.copyright)
print()
print("[Python/Config]")
print("sys.base_prefix={}".format(sys.base_prefix))
print("sys.executable={}".format(sys.executable))
print("sys.flags={}".format(sys.flags))
printm("sys.builtin_module_names", sys.builtin_module_names)
printm("sys.modules", sys.modules)
print("sys.path={}".format(sys.path))
python_version = platform.python_version_tuple()
if python_version[0] == 3 and python_version[1] >= 9:
printm("sys.platlibdir", sys.platlibdir) # Python 3.9+
print("sys.getrecursionlimit()={}".format(sys.getrecursionlimit()))
print("sys.getswitchinterval()={}".format(sys.getswitchinterval()))
print("sys.thread_info={}".format(sys.thread_info))
print("platform.python_compiler()={}".format(platform.python_compiler()))
if sys_type() == "Unix":
print("platform.libc_ver()={}".format(platform.libc_ver()))
print("sys.api_version={}".format(sys.api_version))
print()
print("[Python/Math]")
print("sys.int_info={}".format(sys.int_info))
print("sys.maxsize={}".format(sys.maxsize))
print("sys.float_info={}".format(sys.float_info))
print()
print("[Python/Unicode]")
print("sys.getdefaultencoding()={}".format(sys.getdefaultencoding()))
print("sys.getfilesystemencoding()={}".format(sys.getfilesystemencoding()))
print("unicodedata.unidata_version={}".format(unicodedata.unidata_version))
print("sys.maxunicode={}".format(sys.maxunicode))
print()
################################################################################
def main():
"""Program's entry point"""
program_name = os.path.basename(sys.argv[0])
initialize_debugging(program_name)
process_command_line()
if True not in parameters.values():
logging.warning("Please select something to show:")
display_help()
sys.exit(0)
about_local_system()
about_user()
about_hardware()
about_operating_system()
about_environment()
about_python()
sys.exit(0)
if __name__ == "__main__":
main()
| 27,538 | 8,260 |
import spira.all as spira
class Resistor(spira.PCell):
width = spira.NumberParameter(default=spira.RDD.R1.MIN_WIDTH, doc='Width of the shunt resistance.')
length = spira.NumberParameter(default=spira.RDD.R1.MIN_LENGTH, doc='Length of the shunt resistance.')
def validate_parameters(self):
if self.width > self.length:
raise ValueError('`Width` cannot be larger than `length`.')
return True
def create_elements(self, elems):
elems += spira.Box(alias='ply1', width=self.length, height=self.width, center=(0,0), layer=spira.RDD.PLAYER.R1.METAL)
return elems
def create_ports(self, ports):
# Process symbol will automatically be added to the port name.
ports += self.elements['ply1'].ports['E1_R1'].copy(name='P1')
ports += self.elements['ply1'].ports['E3_R1'].copy(name='P2')
return ports
if __name__ == '__main__':
D = Resistor()
D.gdsii_output(name='Resistor')
| 975 | 327 |
def parse_selection(selection,syntaxis="mdtraj"):
from molmodmt.topology import ion_residues
if syntaxis=="mdtraj":
selection=selection.replace("dna","(resname DA DG DC DT DI)")
selection=selection.replace("rna","(resname A G C U I)")
selection=selection.replace("ion",'(resname '+' '.join(['"'+str(ii)+'"' for ii in ion_residues])+')')
selection=selection.replace("cosolutes",'(resname '+' '.join(['"'+str(ii)+'"' for ii in ion_residues])+')')
return selection
| 511 | 171 |
from collections import defaultdict, namedtuple
from datetime import datetime
import importlib
import logging
import re
import os
import pytz
def get_delivery_report(sender, recipient, timestamp):
from django.conf import settings
reporter_path = settings.FROIDE_CONFIG.get('delivery_reporter', None)
if not reporter_path:
return
module, klass = reporter_path.rsplit('.', 1)
module = importlib.import_module(module)
reporter_klass = getattr(module, klass)
reporter = reporter_klass(time_zone=settings.TIME_ZONE)
return reporter.find(sender, recipient, timestamp)
DeliveryReport = namedtuple('DeliveryReport', ['log', 'time_diff',
'status', 'message_id'])
class PostfixDeliveryReporter(object):
SENDER_RE = r'\s(?P<mail_id>\w+): from=<{sender}'
MESSAGE_ID_RE = r'{mail_id}: message-id=<(?P<message_id>[^>]+)>'
ALL_RE = r' {mail_id}: '
RECIPIENT_RE = r'{mail_id}: to=<{recipient}'
STATUS_RE = re.compile(r'status=(\w+)')
TIMESTAMP_RE = re.compile(r'\w{3}\s+\d+\s+\d+:\d+:\d+')
TIME_PARSE_STR = '%b %d %H:%M:%S'
LOG_FILES = [
'/var/log/mail.log',
'/var/log/mail.log.1'
]
def __init__(self, time_zone=None):
self.timezone = pytz.timezone(time_zone)
def find(self, sender, recipient, timestamp):
for filename in self.LOG_FILES:
if not os.path.exists(filename):
continue
try:
with open(filename) as fp:
result = self.search_log(fp, sender, recipient, timestamp)
if result:
return result
except IOError as e:
logging.exception(e)
pass
def search_log(self, fp, sender, recipient, timestamp):
sender_re = re.compile(self.SENDER_RE.format(sender=sender))
mail_ids = set()
for line in fp:
match = sender_re.search(line)
if match:
mail_ids.add(match.group('mail_id'))
fp.seek(0)
mail_id_res = [re.compile(self.ALL_RE.format(mail_id=mail_id))
for mail_id in mail_ids]
lines = defaultdict(list)
for line in fp:
for mail_id, mail_id_re in zip(mail_ids, mail_id_res):
if mail_id_re.search(line) is not None:
lines[mail_id].append(line)
candidates = []
for mail_id in mail_ids:
candidate = self.extract(
lines[mail_id], mail_id, sender_re, recipient, timestamp)
if candidate is not None:
candidates.append(candidate)
if not candidates:
return None
if len(candidates) == 1:
return candidates[0]
candidates = sorted(candidates, key=lambda x: abs(x.time_diff))
return candidates[0]
def extract(self, lines, mail_id, sender_re, recipient, timestamp):
text = ''.join(lines)
recipient_re = re.compile(self.RECIPIENT_RE.format(
mail_id=mail_id, recipient=recipient))
match = recipient_re.search(text)
if match is None:
return
log_timestamp = self.get_timestamp(text, timestamp)
time_diff = (log_timestamp - timestamp).total_seconds()
if time_diff < -5:
# Log can't be before sending timestamp, allow for some overlap
return
message_id_re = re.compile(self.MESSAGE_ID_RE.format(mail_id=mail_id))
match = self.STATUS_RE.findall(text)
status = None
if match:
# find last status
status = match[-1]
match = message_id_re.search(text)
message_id = None
if match:
message_id = match.group('message_id')
return DeliveryReport(text, time_diff, status, message_id)
def get_timestamp(self, text, timestamp):
match = self.TIMESTAMP_RE.search(text)
date_str = match.group(0)
date = datetime.strptime(date_str, self.TIME_PARSE_STR)
date = date.replace(year=timestamp.year)
return self.timezone.localize(date)
| 4,173 | 1,302 |
# Basic example that sums a small list of numbers on the GPU.
import numpy as np
import pycuda.autoinit
from pycuda import gpuarray
from pycuda.scan import InclusiveScanKernel
seq = np.array([1, 2, 3, 4], dtype=np.int32)
seq_gpu = gpuarray.to_gpu(seq)
sum_gpu = InclusiveScanKernel(np.int32, "a+b")
print(sum_gpu(seq_gpu).get())
print(np.cumsum(seq))
| 352 | 146 |
import nextcord
from nextcord.ext import commands
class Admin(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(help= "delete messages in bulk", aliases=["purge", "c"])
@commands.has_permissions(manage_messages=True)
async def clear(self, ctx, amount = 5):
await ctx.channel.purge(limit = amount + 1)
await ctx.send(f"{amount} messages deleted" , delete_after = 5)
@commands.command(help= "kick a member", aliases=["k"])
@commands.has_permissions(kick_members=True)
async def kick(self, ctx, member: nextcord.Member, *, reason=None):
author = ctx.author
if member == author:
await ctx.send("you can't kick yourself")
else:
try:
await member.kick(reason=reason)
await member.send(f"```\nyou were kicked from {ctx.guild.name}\nreason={reason}\n```")
await ctx.send(f"```\n{member} was kicked by {ctx.author.name}\nreason={reason}\n```")
except:
await member.kick(reason=reason)
await ctx.send(f"```\n{member} was kicked by {ctx.author.name}\nreason={reason}\n```")
@commands.command(help= "ban a member", aliases=["b"])
@commands.has_permissions(ban_members=True)
async def ban(self, ctx, member: nextcord.Member, *, reason=None):
author = ctx.author
if member == author:
await ctx.send("you can't ban yourself")
else:
try:
await member.ban(reason=reason)
await member.send(f"```\nyou were banned from {ctx.guild.name}\nreason={reason}\n```")
await ctx.send(f"```\n{member} was banned by {ctx.author.name}\nreason={reason}\n```")
except:
await member.ban(reason=reason)
await ctx.send(f"```\n{member} was banned by {ctx.author.name}\nreason={reason}\n```")
@commands.command(help = "see how many ppl you banned")
@commands.has_permissions(ban_members=True)
async def bans(self, ctx):
banned = await ctx.guild.bans()
has_bans = banned != []
if not has_bans:
await ctx.send("no bans :)")
else:
await ctx.send(f"```py\n{banned}\n```")
@commands.command(help="unban a member")
@commands.has_permissions(ban_members=True)
async def unban(self, ctx, *, member):
banned = await ctx.guild.bans()
member_name, member_discrim = member.split("#")
for ban_entry in banned:
user = ban_entry.user
if (user.name, user.discriminator) == (member_name, member_discrim):
await ctx.guild.unban(user)
await ctx.send(f"```\n{user.name}#{user.discriminator} was unbanned by {ctx.author.name}\n```")
return
def setup(client):
client.add_cog(Admin(client)) | 2,664 | 911 |
import logging
import time
from contextlib import contextmanager
import numpy as np
import pandas as pd
import scipy.stats
from openml import datasets, runs
from sklearn.model_selection import train_test_split
logger = logging.getLogger("dashboard")
logger.setLevel(logging.DEBUG)
def get_run_df(run_id: int):
run = runs.get_run(int(run_id), ignore_cache=True)
df = pd.DataFrame(run.fold_evaluations.items(), columns=["evaluations", "results"])
# Evaluations table
result_list = []
result_string = []
for result in df["results"]:
k_folds = list(result[0].values())
mean = str(np.round(np.mean(np.array(k_folds)), 3))
std = str(np.round(np.std(np.array(k_folds)), 3))
result_list.append(k_folds)
result_string.append(mean + " \u00B1 " + std)
df.drop(["results"], axis=1, inplace=True)
df["results"] = result_list
df["values"] = result_string
# Add some more rows indicating output prediction file name
df2 = pd.DataFrame(run.output_files.items(), columns=["evaluations", "results"])
df2["values"] = ""
df3 = pd.DataFrame(
{"task_type": run.task_type}.items(), columns=["evaluations", "results"]
)
df2["values"] = ""
df = df.append(df2)
df = df.append(df3)
df.to_pickle("cache/run" + str(run_id) + ".pkl")
return run, df
def clean_dataset(df):
df = df.loc[:, df.isnull().mean() < 0.8]
out = df.fillna(df.mode().iloc[0])
return out
def get_metadata(data_id: int):
data = datasets.get_dataset(data_id, download_data=False)
features = pd.DataFrame(
[vars(data.features[i]) for i in range(0, len(data.features))]
)
is_target = [
"true" if name == data.default_target_attribute else "false"
for name in features["name"]
]
features["Target"] = is_target
# Extract #categories
size = [
str(len(value)) if value is not None else " "
for value in features["nominal_values"]
]
features["nominal_values"].replace({None: " "}, inplace=True)
features["# categories"] = size
# choose features to be displayed
meta_features = features[
["name", "data_type", "number_missing_values", "# categories", "Target"]
]
meta_features.rename(
columns={
"name": "Attribute",
"data_type": "DataType",
"number_missing_values": "Missing values",
},
inplace=True,
)
meta_features.sort_values(by="Target", ascending=False, inplace=True)
if meta_features.shape[0] > 1000:
meta_features = meta_features[:1000]
return meta_features, data, (vars(data)["name"])
def get_data_metadata(data_id):
"""Download the dataset and get metadata
:param data_id: ID of the OpenML dataset
:return:
"""
# Get data in pandas df format
import time
start = time.time()
meta_features, data, _ = get_metadata(data_id)
x, y, categorical, attribute_names = data.get_data()
df = pd.DataFrame(x, columns=attribute_names)
if x.shape[0] < 50000:
df.to_pickle("cache/df" + str(data_id) + ".pkl")
else:
# create a subsample of data for large datasets
try:
target_feat = meta_features[meta_features["Target"] == "true"][
"Attribute"
].values[0]
except IndexError:
target_feat = None
pass
if x.shape[0] >= 50000 and target_feat:
df = clean_dataset(df)
if x.shape[0] < 100000:
sample_size = 0.5
elif 100000 <= x.shape[0] < 500000:
sample_size = 0.25
elif 500000 <= x.shape[0] < 1e6:
sample_size = 0.1
else:
sample_size = 0.05
x = df.drop(target_feat, axis=1)
y = df[target_feat]
try:
X_train, X_test, y_train, y_test = train_test_split(
x, y, stratify=y, test_size=sample_size
)
except ValueError:
X_train, X_test, y_train, y_test = train_test_split(
x, y, stratify=None, test_size=sample_size
)
x = X_test
x[target_feat] = y_test
df = pd.DataFrame(x, columns=attribute_names)
df.to_pickle("cache/df" + str(data_id) + ".pkl")
else:
df.to_pickle("cache/df" + str(data_id) + ".pkl")
meta_features = meta_features[
meta_features["Attribute"].isin(pd.Series(df.columns))
]
# Add entropy
numerical_features = list(
meta_features["Attribute"][meta_features["DataType"] == "numeric"]
)
nominal_features = list(
meta_features["Attribute"][meta_features["DataType"] == "nominal"]
)
entropy = []
for column in meta_features["Attribute"]:
if column in nominal_features:
count = df[column].value_counts()
ent = round(scipy.stats.entropy(count), 2)
entropy.append(ent)
else:
entropy.append(" ")
meta_features["Entropy"] = entropy
meta_features["Target"].replace({"false": " "}, inplace=True)
end = time.time()
logger.debug("time taken download data and find entropy " + str(end - start))
return df, meta_features, numerical_features, nominal_features
def get_highest_rank(df, leaderboard):
df.sort_values(by=["upload_time"], inplace=True)
scores = []
# highest_rank = {}
highest_score = {}
setup_ids = []
for index, row in df.iterrows():
users = list(highest_score.keys())
new_user = row["uploader_name"] not in users
if row["setup_id"] not in setup_ids or new_user:
setup_ids.append(row["setup_id"])
score = row["value"]
if new_user or (score not in scores):
scores.append(score)
scores.sort(reverse=True)
# rank = scores.index(score) + 1
if new_user or (highest_score[row["uploader_name"]] < score):
# highest_rank[row['uploader_name']] = rank
highest_score[row["uploader_name"]] = score
# if highest_rank[row['uploader_name']] > row['Rank']:
# highest_rank[row['uploader_name']] = row['Rank']
# leaderboard['highest_rank'] = list(highest_rank.values())
leaderboard["Top Score"] = list(highest_score.values())
return leaderboard
def splitDataFrameList(df, target_column):
"""df = dataframe to split,
target_column = the column containing the values to split
separator = the symbol used to perform the split
returns: a dataframe with each entry for the target column separated,
with each element moved into a new row.
The values in the other columns are duplicated across the newly divided rows.
"""
def splitListToRows(row, row_accumulator, target_column):
split_row = row[target_column]
for s in split_row:
new_row = row.to_dict()
new_row[target_column] = s
row_accumulator.append(new_row)
new_rows = []
df.apply(splitListToRows, axis=1, args=(new_rows, target_column))
new_df = pd.DataFrame(new_rows)
return new_df
@contextmanager
def print_duration(name: str):
start = time.time()
yield
print(f"{name}: {time.time() - start:.3f}s")
def bin_numeric(df, column_name, output_name):
df[output_name] = pd.cut(df[column_name], 1000).astype(str)
cat = df[output_name].str.extract(r"\((.*),", expand=False).astype(float)
df["bin"] = pd.Series(cat)
df.sort_values(by="bin", inplace=True)
df[output_name] = df[output_name].str.replace(",", " -")
df[output_name] = df[output_name].str.replace("(", "")
df[output_name] = df[output_name].str.replace("]", "")
return df
| 7,862 | 2,534 |
"""Copyright (c) Facebook, Inc. and its affiliates.
All rights reserved.
This source code is licensed under the license found in the
LICENSE file in the root directory of this source tree.
Portions of the source code are from the OLTR project which
notice below and in LICENSE in the root directory of
this source tree.
Copyright (c) 2019, Zhongqi Miao
All rights reserved.
"""
import torch.nn as nn
def create_loss():
print('Loading Softmax Loss.')
return nn.CrossEntropyLoss()
| 494 | 156 |
def answer():
return 42
| 28 | 11 |
from rest_framework import serializers
from .models import DoctorClinic
class DoctorClinicSerializer(serializers.ModelSerializer):
class Meta:
model = DoctorClinic
fields = (
'id',
'user',
'doctor_name',
'specialty',
'clinic_name',
'clinic_street',
'clinic_city',
'clinic_state',
'clinic_country',
'clinic_zipcode',
'doctor_id'
) | 414 | 142 |
import os
import requests
from flask import current_app
from jose import jwt, exceptions
from benwaonline.cache import cache
from benwaonline.exceptions import BenwaOnlineAuthError
ALGORITHMS = ['RS256']
def verify_token(token):
unverified_header = jwt.get_unverified_header(token)
rsa_key = match_key_id(unverified_header)
try:
payload = jwt.decode(
token,
rsa_key,
algorithms=ALGORITHMS,
audience=current_app.config['API_AUDIENCE'],
issuer=current_app.config['ISSUER']
)
except jwt.ExpiredSignatureError as err:
handle_expired_signature(err)
except jwt.JWTClaimsError as err:
handle_claims(err)
except exceptions.JWTError as err:
handle_jwt(err)
except Exception as err:
handle_non_jwt()
return payload
def match_key_id(unverified_header):
"""Checks if the RSA key id given in the header exists in the JWKS."""
jwks = get_jwks()
rsa_keys = [
rsa_from_jwks(key)
for key in jwks["keys"]
if key["kid"] == unverified_header["kid"]
]
try:
return rsa_keys[0]
except IndexError:
return None
def rsa_from_jwks(key):
return {
"kty": key["kty"],
"kid": key["kid"],
"use": key["use"],
"n": key["n"],
"e": key["e"]
}
def handle_claims(err):
"""Handles tokens with invalid claims"""
raise BenwaOnlineAuthError(
detail='{0}'.format(err),
title='invalid claim',
status=401
)
def handle_expired_signature(err):
"""Handles tokens with expired signatures."""
raise err
def handle_jwt(err):
"""Handles tokens with other jwt-related issues."""
raise BenwaOnlineAuthError(
detail='{0}'.format(err),
title='invalid signature',
status=401
)
def handle_non_jwt():
"""Handles everything else."""
raise BenwaOnlineAuthError(
title='invalid header',
detail='unable to parse authentication token'
)
@cache.cached(timeout=48 * 3600, key_prefix='jwks')
def get_jwks():
try:
msg = 'JWKS not cached - requesting from {}'.format(current_app.config['JWKS_URL'])
current_app.logger.debug(msg)
jwksurl = requests.get(current_app.config['JWKS_URL'], timeout=5)
except requests.exceptions.Timeout:
raise BenwaOnlineAuthError(
title='JWKS Request Timed Out',
detail='the authentication server is unavailable, or another issue has occured',
status=500
)
return jwksurl.json()
def has_scope(scope, token):
unverified_claims = jwt.get_unverified_claims(token)
token_scopes = unverified_claims['scope'].split()
return True if scope in token_scopes else False
def refresh_token_request(client, refresh_token):
data = {
'grant_type': 'refresh_token',
'refresh_token': refresh_token,
'client_id': client.consumer_key,
'client_secret': client.consumer_secret
}
msg = 'Attempting to refresh token at {}'.format(client.base_url + client.access_token_url)
current_app.logger.debug(msg)
resp = requests.post(client.base_url + client.access_token_url, data=data)
return resp.json()
| 3,266 | 1,075 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import time
import getopt
import shlex, subprocess, tempfile
def main(sysargv):
testDes = [] # descriptions of the tests
testOut = [] # retur values of the tests
testExp = [] # expected return values of the tests
testCmd = [] # commands for calling the tests
fn = "../data/test-image.jpg"
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
print "========== Running test 01 =========="
cmd = ["./decapod-genpdf.py"]
ret = subprocess.call(cmd)
testDes.append("Testing w/o options. Should return help message.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 02 =========="
cmd = ["./decapod-genpdf.py","-h"]
ret = subprocess.call(cmd)
testDes.append("Testing -h option. Should return help message.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 03 =========="
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"fn"]
ret = subprocess.call(cmd)
testDes.append("Testing with wrong file name options.")
testOut.append(ret)
testExp.append(2)
testCmd.append(cmd)
os.rmdir(tmpDir)
print "========== Running test 04 =========="
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,fn]
ret = subprocess.call(cmd)
testDes.append("Testing with minimal options.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 05 =========="
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,fn]
ret = subprocess.call(cmd)
testDes.append("Testing with existing book directory.")
testOut.append(ret)
testExp.append(1)
testCmd.append(cmd)
print "========== Running test 06 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-W","1","-H","1",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with different width and height.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 07 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-W","-1","-H","-1",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with negative width and height.")
testOut.append(ret)
testExp.append(1)
testCmd.append(cmd)
print "========== Running test 08 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-W","10.0","-H","10.0",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with quadratic page size.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 09 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-W","100.0","-H","10.0",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with very wide page width.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 10 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-W","10.0","-H","100.0",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with very high page height.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 11 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-r","-300","-t","2",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with negative resolution.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 12 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-r","0","-t","2",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with zero resolution.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 13 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-r","10","-t","2",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with small resolution (10 dpi).")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 14 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-r","600","-t","2",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with high resolution (600 dpi).")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 15 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-r","300","-t","3",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with PDF output type 3.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 16 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,"-b",fn]
ret = subprocess.call(cmd)
testDes.append("Testing with -b bookFile as input.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== Running test 17 =========="
tmpDir = tempfile.mkdtemp()+"/"
os.rmdir(tmpDir)
outFn = tmpDir + "out.pdf"
cmd = ["./decapod-genpdf.py","-d",tmpDir,"-p",outFn,fn,fn,fn]
ret = subprocess.call(cmd)
testDes.append("Testing with multiple input files.")
testOut.append(ret)
testExp.append(0)
testCmd.append(cmd)
print "========== REPORT =========="
print "Test\tis exp. details"
for i in range(len(testDes)):
print("%d\t[%d] [%d] %s" %(i+1,testOut[i],testExp[i],testDes[i]))
print "========== ERRORS =========="
print "Test\tis exp. details command"
for i in range(len(testDes)):
if (testOut[i] != testExp[i]):
print("%d\t[%d] [%d] %s\t%s" %(i+1,testOut[i],testExp[i],testDes[i],testCmd[i]))
#print "01 [%d] [0] testing w/o option" %(retNoOpt)
#print "02 [%d] [0] testing -h option" %(retHelp)
#print "03 [%d] [2] testing with wrong file name options" %(retWrgFn)
#print "04 [%d] [0] testing with minimal options" %(retMinOpt)
#print "05 [%d] [1] testing with existing book directory" %(retExDir)
#print "06 [%d] [0] testing with different width and height" %(retWH)
#print "07 [%d] [1] testing with negative width and height" %(retNegWH)
#print "08 [%d] [0] testing with quadratic page size" %(retquadWH)
#print "09 [%d] [0] testing with very wide page width" %(retasyWH1)
#print "10 [%d] [0] testing with very heigh page height" %(retasyWH2)
#print "11 [%d] [0] testing with negative resolution" %(retNegRes)
#print "12 [%d] [0] testing with zero resolution" %(retZeroRes)
#print "13 [%d] [0] testing with small resolution (10dpi)" %(retSmallRes)
#print "14 [%d] [0] testing with high resolution (600dpi)" %(retHighRes)
#print "15 [%d] [0] testing with type 3 as output" %(retType3)
#print "16 [%d] [0] testing with -b bookFile as input" %(retBook)
#print "17 [%d] [0] testing with multiple input files" %(retMultiple)
if __name__ == "__main__":
main(sys.argv)
sys.exit(0)
| 8,235 | 3,187 |
"""Kernel implementation.
This module implements the kernel used in mulearn.
"""
import numpy as np
class Kernel:
"""Base kernel class."""
def __init__(self):
"""Create an instance of :class:`Kernel`."""
self.precomputed = False
self.kernel_computations = None
def compute(self, arg_1, arg_2):
"""Compute the kernel value, given two arguments.
:param arg_1: First kernel argument.
:type arg_1: Object
:param arg_2: Second kernel argument.
:type arg_2: Object
:raises: NotImplementedError (:class:`Kernel` is abstract)
:returns: `float` -- kernel value.
"""
raise NotImplementedError(
'The base class does not implement the `compute` method')
def __str__(self):
"""Return the string representation of a kernel."""
return self.__repr__()
def __eq__(self, other):
"""Check kernel equality w.r.t. other objects."""
return type(self) == type(other)
def __ne__(self, other):
"""Check kernel inequality w.r.t. other objects."""
return not self == other
@staticmethod
def __nonzero__():
"""Check if a kernel is non-null."""
return True
def __hash__(self):
"""Generate hashcode for a kernel."""
return hash(self.__repr__())
@classmethod
def get_default(cls):
"""Return the default kernel.
:returns: `LinearKernel()` -- the default kernel.
"""
return LinearKernel()
class LinearKernel(Kernel):
"""Linear kernel class."""
def compute(self, arg_1, arg_2):
r"""
Compute the kernel value.
The value $k(x_1, x_2)$ of a linear kernel is equal to the dot product
$x_1 \cdot x_2$, that is to $\sum_{i=1}^n (x_1)_i (x_2)_i$, $n$ being
the common dimension of $x_1$ and $x_2$.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
return float(np.dot(arg_1, arg_2))
def __repr__(self):
"""Return the python representation of the kernel."""
return 'LinearKernel()'
class PolynomialKernel(Kernel):
"""Polynomial kernel class."""
def __init__(self, degree):
r"""Create an instance of `PolynomialKernel`.
:param degree: degree of the polynomial kernel.
:type degree: `int`
:raises: ValueError if `degree` is not an integer or if it has a
negative value.
"""
super().__init__()
if degree > 0 and isinstance(degree, int):
self.degree = degree
else:
raise ValueError(f"{degree} is not usable as a polynomial degree")
def compute(self, arg_1, arg_2):
r"""
Compute the kernel value.
The value $k(x_1, x_2)$ of a polynomial kernel is equal to the
quantity $(x_1 \cdot x_2 + 1)^d$, $d$ being the polynomial degree of
the kernel.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
return float((np.dot(arg_1, arg_2) + 1) ** self.degree)
def __repr__(self):
"""Return the python representation of the kernel."""
return f"PolynomialKernel({self.degree})"
class HomogeneousPolynomialKernel(PolynomialKernel):
"""Homogeneous polynomial kernel class."""
def __init__(self, degree):
r"""Create an instance of `HomogeneousPolynomialKernel`.
:param degree: degree of the polynomial kernel.
:type degree: `int`
:raises: ValueError if `degree` is not an integer or if it has a
negative value.
"""
super().__init__(degree)
def compute(self, arg_1, arg_2):
r"""Compute the kernel value.
The value $k(x_1, x_2)$ of a homogeneous polynomial kernel is
intended as the quantity $(x_1 \cdot x_2)^d$, $d$ being the polynomial
degree of the kernel.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
return float(np.dot(arg_1, arg_2) ** self.degree)
def __repr__(self):
"""Return the python representation of the kernel."""
return f"HomogeneousPolynomialKernel({self.degree})"
class GaussianKernel(Kernel):
"""Gaussian kernel class."""
default_sigma = 1
def __init__(self, sigma=default_sigma):
r"""Create an instance of `GaussianKernel`.
:param sigma: gaussian standard deviation, defaults to 1.
:type sigma: `float`
:raises: ValueError if `sigma` has a negative value.
"""
super().__init__()
if sigma > 0:
self.sigma = sigma
else:
raise ValueError(f'{sigma} is not usable '
'as a gaussian standard deviation')
def compute(self, arg_1, arg_2):
r"""Compute the kernel value.
The value $k(x_1, x_2)$ of a gaussian kernel is intended as the
quantity $\mathrm e^{-\frac{||x_1 - x_2||^2}{2 \sigma^2}}$, $\sigma$
being the kernel standard deviation.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
diff = np.linalg.norm(np.array(arg_1) - np.array(arg_2)) ** 2
return float(np.exp(-1. * diff / (2 * self.sigma ** 2)))
def __repr__(self):
"""Return the python representation of the kernel."""
obj_repr = "GaussianKernel("
if self.sigma != self.default_sigma:
obj_repr += f"sigma={self.sigma}"
obj_repr += ")"
return obj_repr
class HyperbolicKernel(Kernel):
"""Hyperbolic kernel class."""
default_scale = 1
default_offset = 0
def __init__(self, scale=default_scale, offset=default_offset):
r"""Create an instance of `HyperbolicKernel`.
:param scale: scale constant, defaults to 1.
:type scale: `float`
:param offset: offset constant, defaults to 0.
:type offset: `float`
"""
super().__init__()
self.scale = scale
self.offset = offset
def compute(self, arg_1, arg_2):
r"""Compute the kernel value.
The value $k(x_1, x_2)$ of a hyperbolic kernel is intended as the
quantity $\tanh(\alpha x_1 \cdot x_2 + \beta)$, $\alpha$ and $\beta$
being the scale and offset parameters, respectively.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
dot_orig = np.dot(np.array(arg_1), np.array(arg_2))
return float(np.tanh(self.scale * dot_orig + self.offset))
def __repr__(self):
"""Return the python representation of the kernel."""
obj_repr = "HyperbolicKernel("
if self.scale != self.default_scale:
obj_repr += f"scale={self.scale}, "
if self.offset != self.default_offset:
obj_repr += f"offset={self.offset}, "
if obj_repr.endswith(", "):
return obj_repr[:-2] + ")"
else:
return "HyperbolicKernel()"
class PrecomputedKernel(Kernel):
"""Precomputed kernel class."""
def __init__(self, kernel_computations):
r"""Create an instance of `PrecomputedKernel`.
:param kernel_computations: kernel computations.
:type kernel_computations: square matrix of float elements
:raises: ValueError if `kernel_computations` is not a square
bidimensional array.
"""
super().__init__()
self.precomputed = True
try:
(rows, columns) = np.array(kernel_computations).shape
except ValueError:
raise ValueError('The supplied matrix is not array-like ')
if rows != columns:
raise ValueError('The supplied matrix is not square')
self.kernel_computations = kernel_computations
def compute(self, arg_1, arg_2):
r"""Compute the kernel value.
The value of a precomputed kernel is retrieved according to the indices
of the corresponding objects. Note that each index should be enclosed
within an iterable in order to be compatible with sklearn.
:param arg_1: First kernel argument.
:type arg_1: iterable of `float`
:param arg_2: Second kernel argument.
:type arg_2: iterable of `float`
:returns: `float` -- kernel value.
"""
return float(self.kernel_computations[arg_1[0]][arg_2[0]])
def __repr__(self):
"""Return the python representation of the kernel."""
return f"PrecomputedKernel({self.kernel_computations})"
| 9,242 | 2,747 |
# -*- coding: utf-8 -*-
# Name: config.py
# Authors: Stephan Meighen-Berger
# Config file for the ITM package.
from typing import Dict, Any
import yaml
_baseconfig: Dict[str, Any]
_baseconfig = {
###########################################################################
# General inputs
###########################################################################
"general": {
# Random state seed
"random state seed": 1337,
},
"pdf": {
# name of the pdf
# Currently supported:
# - "normal": Normal distribution
"name": "normal",
},
"sample": {
# name of the underlying pdf
# Currently supported:
# - "normal": Normal distribution
"pdf": {
"name": "normal",
"mean": 10.,
"sd": 0.5
},
# Number of samples
"sample size": int(1e5),
# Subset size
"subset size": 15,
}
}
class ConfigClass(dict):
""" The configuration class. This is used
by the package for all parameter settings. If something goes wrong
its usually here.
Parameters
----------
config : dic
The config dictionary
Returns
-------
None
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# TODO: Update this
def from_yaml(self, yaml_file: str) -> None:
""" Update config with yaml file
Parameters
----------
yaml_file : str
path to yaml file
Returns
-------
None
"""
yaml_config = yaml.load(open(yaml_file), Loader=yaml.SafeLoader)
self.update(yaml_config)
# TODO: Update this
def from_dict(self, user_dict: Dict[Any, Any]) -> None:
""" Creates a config from dictionary
Parameters
----------
user_dict : dic
The user dictionary
Returns
-------
None
"""
self.update(user_dict)
config = ConfigClass(_baseconfig)
| 2,057 | 580 |
"""
If you use this code, please cite one of the SynthSeg papers:
https://github.com/BBillot/SynthSeg/blob/master/bibtex.bib
Copyright 2020 Benjamin Billot
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions and limitations under the
License.
"""
"""This script enables to launch predictions with SynthSeg from the terminal."""
# print information
print('\n')
print('SynthSeg prediction')
print('\n')
# python imports
import os
import sys
from argparse import ArgumentParser
# add main folder to python path and import ./SynthSeg/predict.py
synthseg_home = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
sys.path.append(synthseg_home)
from SynthSeg.predict import predict
# parse arguments
parser = ArgumentParser()
# input/outputs
parser.add_argument("--i", type=str, dest='path_images',
help="Image(s) to segment. Can be a path to an image or to a folder.")
parser.add_argument("--o", type=str, dest="path_segmentations",
help="Segmentation output(s). Must be a folder if --i designates a folder.")
parser.add_argument("--post", type=str, default=None, dest="path_posteriors",
help="(optional) Posteriors output(s). Must be a folder if --i designates a folder.")
parser.add_argument("--resample", type=str, default=None, dest="path_resampled",
help="(optional) Resampled image(s). Must be a folder if --i designates a folder.")
parser.add_argument("--vol", type=str, default=None, dest="path_volumes",
help="(optional) Output CSV file with volumes for all structures and subjects.")
# parameters
parser.add_argument("--crop", nargs='+', type=int, default=192, dest="cropping",
help="(optional) Size of 3D patches to analyse. Default is 192.")
parser.add_argument("--threads", type=int, default=1, dest="threads",
help="(optional) Number of cores to be used. Default is 1.")
parser.add_argument("--cpu", action="store_true", help="(optional) Enforce running with CPU rather than GPU.")
# parse commandline
args = vars(parser.parse_args())
# enforce CPU processing if necessary
if args['cpu']:
print('using CPU, hiding all CUDA_VISIBLE_DEVICES')
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
del args['cpu']
# limit the number of threads to be used if running on CPU
import tensorflow as tf
tf.config.threading.set_intra_op_parallelism_threads(args['threads'])
del args['threads']
# default parameters
args['segmentation_labels'] = os.path.join(synthseg_home, 'data/labels_classes_priors/segmentation_labels.npy')
args['n_neutral_labels'] = 18
args['segmentation_label_names'] = os.path.join(synthseg_home, 'data/labels_classes_priors/segmentation_names.npy')
args['topology_classes'] = os.path.join(synthseg_home, 'data/labels_classes_priors/topological_classes.npy')
args['path_model'] = os.path.join(synthseg_home, 'models/SynthSeg.h5')
args['padding'] = args['cropping']
# call predict
predict(**args)
| 3,430 | 1,060 |
from __future__ import annotations
from typing import *
from .utils import encode_int, encode_varint, decode_int, decode_varint, ensure_stream, base58, hash256
from .script import Script
from io import BytesIO
from .keys import PublicKey
from .ecdsa import Signature, validate_signature
def get_tx_idx(wallet, prev_tx):
pkhash = base58.decode(wallet) #get the pkhash of the wallet
for idx in range(len(prev_tx.outputs)):
out = prev_tx.outputs[idx]
out_pkhash = [x for x in out["script_pubkey"].commands if not type(x) is int]
if out_pkhash:
match = list(filter(lambda x: x, [x == pkhash for x in out_pkhash]))
if match:
return idx
return None
def validate_tx(utxo: Tx,
tx: Tx,
message: bytes,
public_key: PublicKey) -> bool:
"""
-----------
Verify a p2pkh Transaction
-----------
"""
prev_idx = tx.inputs[0]["prev_idx"] #index of the UTXO spent
script_sig = tx.inputs[0]["script_sig"].commands #ScriptSig -> <der_sig> <sec_pubkey>
script_pubkey = utxo.outputs[prev_idx]["script_pubkey"].commands #"locking" script of UTXO
input_amt = utxo.outputs[prev_idx]["amount"] #UTXO amount
output_amt = sum([out["amount"] for out in tx.outputs])
if output_amt > input_amt: #ensure no new bitcoins are created
return False
pkhash = public_key.encode(compressed=True, hash_160=True) #hash of the sender's public key
#remember we are using p2pkh, so our pkhash is the third element
if pkhash != script_pubkey[2]: #==OP_EQUALVERIFY
return False
sig, pk = Signature.decode(script_sig[0]), PublicKey.decode(script_sig[1])
if not validate_signature(p=pk,
message=message,
sig=sig): #==OP_CHECKSIG
return False
#To do: hook into UTXO set to check if Tx is unspent
return True
class Tx(object):
"""
Object Representing a Bitcoin Transaction
"""
def __init__(self,
version: int,
inputs: List[dict],
outputs: List[dict],
locktime: int = 0):
self.version = version
self.inputs = inputs
self.outputs = outputs
self.locktime = locktime
def __repr__(self):
s = f"Version: {self.version}\nNum Inputs: {len(self.inputs)}\nInputs:\n"""
for i in self.inputs:
s += f'{i["prev_tx"].hex()} - {i["script_sig"]}\n'
s += f'Index: {i["prev_idx"]}\n'
s += f"Num Outputs: {len(self.outputs)}\nOutputs:\n"
for o in self.outputs:
s += f'{o["amount"]} SAT - {o["script_pubkey"]}\n'
s += f'Locktime: {self.locktime}'
return s
def encode(self, sig_idx: int = -1):
#version
out = [encode_int(self.version, 4)] #4 byte little-endian
#encode inputs
out += [encode_varint(len(self.inputs))]
out += [self.encode_inputs(sig_idx=sig_idx)]
#encode outputs
out += [encode_varint(len(self.outputs))]
out += [self.encode_outputs()]
#locktime and SIGHASH
out += [encode_int(self.locktime, 4)]
out += [encode_int(1, 4) if sig_idx != -1 else b""] #SIGHASH_ALL
return b"".join(out)
def encode_inputs(self, sig_idx: int = -1):
"""
prev_tx is encoded to be little endian
prev_idx, seq are 4 byte little endian encoded integers
script_sig uses Script encoding
"""
out = []
for idx in range(len(self.inputs)):
inp = self.inputs[idx]
if sig_idx == -1 or sig_idx == idx:
script_sig = inp["script_sig"].encode()
else:
script_sig = Script([]).encode()
out += [
inp["prev_tx"][::-1], #reverse bytes
encode_int(inp["prev_idx"], 4),
script_sig,
encode_int(inp["seq"], 4)
]
return b"".join(out)
def encode_outputs(self):
out = []
for o in self.outputs:
encoded = [
encode_int(o["amount"], 8),
o["script_pubkey"].encode()
]
out += encoded
return b"".join(out)
def get_id(self):
return hash256(self.encode())[::-1].hex() #little-endian, hexadecimal
@classmethod
def decode(cls, b: Union[bytes, BytesIO]) -> Tx:
"""
Decodes the raw bytes of a transaction into a Tx object
"""
b = ensure_stream(b)
segwit, witness = False, []
version = decode_int(b, 4)
num_inputs = decode_varint(b)
if num_inputs == 0:
assert b.read(1) == b"\x01" #segwit marker -- need to read one more
num_inputs = decode_varint(b)
segwit = True
inputs = []
for n in range(num_inputs):
prev_tx = b.read(32)[::-1] #little to big endian
prev_idx = decode_int(b, 4)
script_sig = Script.decode(b)
seq = decode_int(b, 4)
inputs.append({"prev_tx": prev_tx,
"prev_idx": prev_idx,
"script_sig": script_sig,
"seq": seq})
num_outputs = decode_varint(b)
outputs = []
for n in range(num_outputs):
amt = decode_int(b, 8)
script_pubkey = Script.decode(b)
outputs.append({"amount": amt,
"script_pubkey": script_pubkey})
if segwit:
for i in inputs:
num_items = decode_varint(b)
items = []
for _ in range(num_items):
item_len = decode_varint(b)
if item_len == 0:
items.append(0)
else:
items.append(b.read(item_len))
witness.append(items)
locktime = decode_int(b, 4)
return cls(version, inputs, outputs, locktime) #can include segwit, witness here
| 6,238 | 1,977 |
from typing import Any
import discord
from discord.ext import commands
from ..mofupoints import incrementEmbedCounter
async def sendEmbed(
ctx: commands.Context, url: str, localImageFile: discord.File = None, **kwargs: Any
) -> None:
print(url)
if hasattr(ctx, "author"):
incrementEmbedCounter(ctx.author)
embed = discord.Embed(color=discord.Colour.gold(), **kwargs)
embed.set_image(url=url)
try:
await ctx.send(embed=embed, file=localImageFile)
except discord.Forbidden: # we don't have permission to send embed
await ctx.send(url, file=localImageFile)
| 637 | 216 |
# Module for iTHOR env set up and simple navigation
from ai2thor.controller import Controller
from termcolor import colored
from dijkstar import Graph, find_path
from lib.params import SIM_WINDOW_HEIGHT, SIM_WINDOW_WIDTH, VISBILITY_DISTANCE, FIELD_OF_VIEW
import matplotlib.pyplot as plt
import numpy as np
import time, copy, sys
class Agent_Sim():
def __init__(self, scene_type='Kitchen', scene_num=1, scene_name=None, grid_size=0.25, rotation_step=10, sleep_time=0.05, ToggleMapView=False):
self._scene_type = scene_type
self._scene_num = scene_num
self._grid_size = grid_size
self._rotation_step = rotation_step
self._sleep_time = sleep_time
# Kitchens: FloorPlan1 - FloorPlan30
# Living rooms: FloorPlan201 - FloorPlan230
# Bedrooms: FloorPlan301 - FloorPlan330
# Bathrooms: FloorPLan401 - FloorPlan430
if (scene_num<1) or (scene_num>30):
sys.stderr.write(colored('ERROR: ','red')
+ "Expect scene_num within [1,30] while get '{}'\n".format(scene_num))
if scene_type == 'Kitchen':
add_on = 0
elif scene_type == 'Living room':
add_on = 200
elif scene_type == 'Bedroom':
add_on = 300
elif scene_type == 'Bathroom':
add_on = 400
else:
sys.stderr.write(colored('ERROR: ','red')
+ "Expect scene_type 'Kitchen', 'Living room', 'Bedroom' or 'Bathroom' while get '{}'\n".format(scene_type))
sys.exit(1)
if scene_name is None:
self._scene_name = 'FloorPlan' + str(add_on + self._scene_num)
else:
self._scene_name = scene_name
self._controller = Controller(scene=self._scene_name, gridSize=self._grid_size, visibilityDistance=VISBILITY_DISTANCE, fieldOfView=FIELD_OF_VIEW)
self._controller.step('ChangeResolution', x=SIM_WINDOW_WIDTH, y=SIM_WINDOW_HEIGHT) # Change simulation window size
if ToggleMapView: # Top view of the map to see the objets layout. issue: no SG can be enerated
self._controller.step({"action": "ToggleMapView"})
self._event = self._controller.step('Pass')
self._start_time = time.time()
self._action_type = {'MOVE_FORWARD': 1, 'STAY_IDLE' :2, 'TURN_RIGHT' :3, 'TURN_LEFT': 4}
def update_event(self):
self._event = self._controller.step('Pass')
def get_agent_position(self):
self.update_event()
return self._event.metadata['agent']['position']
def get_agent_rotation(self):
self.update_event()
return self._event.metadata['agent']['rotation']
def get_reachable_coordinate(self):
self._event = self._controller.step(action='GetReachablePositions')
return self._event.metadata['actionReturn']
def get_object(self):
self.update_event()
return self._event.metadata['objects']
def unit_move(self):
self._event = self._controller.step(action='MoveAhead')
return 'MOVE_FORWARD'
def unit_rotate(self, degree):
if np.abs(degree) < 2:
print(colored('INFO: ','blue') + 'Robot rotate for {} degree which is less than 2 deg'.format(degree))
return None
degree_corrected = degree
while degree_corrected > 180:
degree_corrected -= 360
while degree_corrected < -180:
degree_corrected += 360
if degree > 0:
self._event = self._controller.step(action='RotateRight', degrees=np.abs(degree_corrected))
return 'TURN_RIGHT'
else:
self._event = self._controller.step(action='RotateLeft', degrees=np.abs(degree_corrected))
return 'TURN_LEFT'
# Assume goal is {'position': position, 'rotation': rotation} where position and rotation are dict or list
def move_towards(self, goal):
self.update_event()
agent_position = self.get_agent_position()
agent_rotation = self.get_agent_rotation()
agent_position = list(agent_position.values())
agent_rotation = list(agent_rotation.values())
goal_position = goal['position']
goal_rotation = goal['rotation']
if isinstance(goal_position, dict):
goal_position = list(goal_position.values())
goal_rotation = list(goal_rotation.values())
heading_angle = np.arctan2((goal_position[0] - agent_position[0]), (goal_position[2] - agent_position[2])) * 180 / np.pi
heading_angle_list = copy.deepcopy(agent_rotation)
heading_angle_list[1] = heading_angle
position_error = list(map(lambda x, y: np.abs(x - y), goal_position, agent_position))
rotation_error = list(map(lambda x, y: x - y, heading_angle_list, agent_rotation))
rotation_error_abs = list(map(lambda x: np.abs(x), rotation_error))
rotation_error_corrected = rotation_error[rotation_error_abs.index(max(rotation_error_abs))]
while rotation_error_corrected > 180:
rotation_error_corrected -= 360
while rotation_error_corrected < -180:
rotation_error_corrected += 360
if np.linalg.norm(np.array(position_error)) > self._grid_size * 1.10:
sys.stderr.write(colored('ERROR: ','red')
+ 'Moving step {} greater than grid size {}'.format(position_error, self._grid_size))
sys.exit(1)
elif np.linalg.norm(np.array(position_error)) < self._grid_size * 0.10:
sys.stderr.write(colored('ERROR: ','red')
+ 'Moving distance {} too small'.format(position_error))
sys.exit(1)
rotate_steps = round(np.abs(rotation_error_corrected / self._rotation_step))
for _ in range(rotate_steps):
time.sleep(self._sleep_time)
action = self.unit_rotate(self._rotation_step * np.sign(rotation_error_corrected))
action = self.unit_rotate((rotation_error_corrected - rotate_steps * self._rotation_step * np.sign(rotation_error_corrected)))
time.sleep(self._sleep_time)
action = self.unit_move()
class Dumb_Navigetor():
def __init__(self, agent_sim):
self._map = {}
self._point_list = []
self._grid_size = agent_sim._grid_size
self._point_num = 0
self._agent_sim = agent_sim
self._starting_point = self._agent_sim.get_agent_position()
self._coordinate_dict = self._agent_sim.get_reachable_coordinate()
self._map_searched = [True] * len(self._coordinate_dict)
self._build_map()
def _build_map(self):
self._point_list.append(list(self._starting_point.values()))
self._map[self._point_num] = []
self._map_searched[self._point_num] = True
self._point_num += 1
for point_adding in self._coordinate_dict:
if self._starting_point == point_adding:
continue
self._point_list.append(list(point_adding.values()))
self._point_num += 1
self._map[self._point_num - 1] = []
for point_added_index in range(self._point_num - 1):
point_added = self._point_list[point_added_index]
distance = np.linalg.norm(np.array(list(map(lambda x, y: x - y, point_added, self._point_list[self._point_num - 1]))))
if distance < self._grid_size + 0.03 * self._grid_size:
self._map[self._point_num - 1].append(point_added_index)
self._map[point_added_index].append(self._point_num - 1)
return
# Assume goal_position is dict
def dumb_navigate(self, goal_position, server=None, comfirmed=None):
print(colored('Dumb navigate to: {}','cyan').format(goal_position))
#self._controller.step(action='TeleportFull', x=0.999, y=1.01, z=-0.3541, rotation=dict(x=0.0, y=90.0, z=0.0), horizon=30.0)
# server and comfirm is not none --> this function is used as a server node
graph = Graph()
nav_starting_point = self._agent_sim.get_agent_position()
print(nav_starting_point)
nav_starting_point = list(nav_starting_point.values())
for point in self._point_list:
if np.linalg.norm(np.array(list(map(lambda x, y: x - y, point, nav_starting_point)))) < 0.25 * self._grid_size:
nav_starting_point_index = self._point_list.index(point)
break
# nav_starting_point_index = self._point_list.index(nav_starting_point)
if isinstance(goal_position, dict):
goal_point = list(goal_position.values())
goal_point_index = None
for point in self._point_list:
if np.linalg.norm(np.array(list(map(lambda x, y: x - y, point, goal_point)))) < 0.25 * self._grid_size:
goal_point_index = self._point_list.index(point)
break
if goal_point_index is None or nav_starting_point_index is None:
sys.stderr.write(colored('ERROR: ','red') + 'No matching point in map' + '\n')
return
connected_point_index = self._map[goal_point_index]
nearest_reachable_index = None
goal_in_existing_map = False
if self._map_searched[goal_point_index]:
nearest_reachable_index = goal_point_index
goal_in_existing_map = True
else:
for index in connected_point_index:
if self._map_searched[index]:
nearest_reachable_index = index
break
if nearest_reachable_index is None:
sys.stderr.write(colored('ERROR: ','red') + 'Can not reach the point by existing map' + '\n')
return
for index in range(len(self._map)):
for connected_index in range(len(self._map[index])):
if self._map_searched[self._map[index][connected_index]]:
graph.add_edge(index, self._map[index][connected_index], 1)
result = find_path(graph, nav_starting_point_index, nearest_reachable_index)
path = result.nodes
for mid_point_index in range(1, len(path)):
# This navigator serve as a server node if server is not None
if server is not None:
objs = [obj for obj in self._agent_sim._event.metadata['objects'] if obj['visible']]
server.send(objs)
print(colored('Server: ','cyan') + 'Sent Data from navigator at mid_point_index {}'.format(mid_point_index))
while True: # Waiting for client to confirm
if comfirmed.value:
break
comfirmed.value = 0 # Turn off the switch
# Action
mid_point_pose = {'position': [], 'rotation': []}
mid_point_pose['position'] = copy.deepcopy(self._point_list[path[mid_point_index]])
mid_point_pose['rotation'] = [0, 0, 0]
self._agent_sim.move_towards(mid_point_pose)
# Terminate the service by sending 'END'
if server is not None:
server.send('END')
print(colored('Server: ','cyan') + 'END')
if not goal_in_existing_map:
self._agent_sim.move_towards({'position': copy.deepcopy(self._point_list[goal_point_index]), 'rotation': [0, 0, 0]})
self._map_searched[goal_point_index] = True
return
| 11,449 | 3,579 |
tweets_by_country = tweets['country'].value_counts()
fig, ax = plt.subplots(figsize=(20,10))
ax.tick_params(axis='x', labelsize=15)
ax.tick_params(axis='y', labelsize=15)
ax.set_xlabel('Paises'.decode('utf-8'), fontsize=20)
ax.set_ylabel('Numero de tweets'.decode('utf-8') , fontsize=20)
ax.set_title('Top 5 Paises'.decode('utf-8'), fontsize=20, fontweight='bold')
tweets_by_country[:5].plot(ax=ax, kind='bar', color='lightskyblue')
plt.grid()
| 445 | 188 |
# 486A - Calculating Function
# http://codeforces.com/problemset/problem/486/A
n = int(input())
ans = ((n + 1) // 2) * (-1 if n & 1 else 1)
print(ans)
| 152 | 70 |
from typing import Optional
from abeja.common.api_client import BaseAPIClient
from abeja.notebook.types import InstanceType, ImageType, NotebookType
class APIClient(BaseAPIClient):
"""A Low-Level client for Notebook API
.. code-block:: python
from abeja.notebook import APIClient
api_client = APIClient()
"""
def create_notebook(
self,
organization_id: str,
job_definition_name: str,
instance_type: Optional[str] = None,
image: Optional[str] = None,
notebook_type: Optional[str] = None) -> dict:
"""create a notebook.
API reference: POST /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
instance_type = 'cpu-1'
image = 'abeja-inc/all-cpu:19.10'
notebook_type = 'lab'
response = api_client.create_notebook(
organization_id, job_definition_name,
instance_type, image, notebook_type
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **instance_type** (str): **[optional]** instance type (ex. cpu-1)
- **image** (str): **[optional]** runtime environment (ex. abeja-inc/all-cpu:19.10)
- **notebook_type** (str): **[optional]** notebook type (notebook or lab)
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"job_definition_id": "1234567890123",
"training_notebook_id": "1410000000000",
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
Raises:
- NotFound
- BadRequest
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {}
if instance_type is not None and InstanceType.to_enum(instance_type):
params['instance_type'] = instance_type
if image is not None and ImageType.to_enum(image):
params['image'] = image
if notebook_type is not None and NotebookType.to_enum(notebook_type):
params['notebook_type'] = notebook_type
path = '/organizations/{}/training/definitions/{}/notebooks'.format(
organization_id, job_definition_name)
return self._connection.api_request(
method='POST', path=path, json=params)
def get_notebooks(
self,
organization_id: str,
job_definition_name: str) -> dict:
"""get notebooks.
API reference: GET /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
response = api_client.get_notebooks(
organization_id, job_definition_name
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"total": 1,
"offset": 0,
"limit": 10,
"entries": [
{
"job_definition_id": "1234567890123",
"training_notebook_id": "1410000000000",
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
]
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/training/definitions/{}/notebooks'.format(
organization_id, job_definition_name)
return self._connection.api_request(method='GET', path=path)
def get_notebook(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str=None) -> dict:
"""get a notebook.
API reference: GET /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
response = api_client.get_notebook(
organization_id, job_definition_name, notebook_id
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"job_definition_id": "1234567890123",
"training_notebook_id": "1410000000000",
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/training/definitions/{}/notebooks/{}'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(method='GET', path=path)
def update_notebook(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str,
instance_type: Optional[str] = None,
image: Optional[str] = None,
notebook_type: Optional[str] = None) -> dict:
"""update a notebook.
API reference: PUT /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
instance_type = 'cpu-1'
image = 'abeja-inc/all-cpu:19.10'
response = api_client.update_notebook(
organization_id, job_definition_name, notebook_id,
instance_type=instance_type, image=image
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
- **instance_type** (str): **[optional]** instance type (ex. cpu-1)
- **image** (str): **[optional]** runtime environment (ex. abeja-inc/all-cpu:19.10)
- **notebook_type** (str): **[optional]** notebook type (notebook or lab)
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"job_definition_id": "1234567890123",
"training_notebook_id": 0,
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
Raises:
- NotFound
- BadRequest
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {}
if instance_type is not None and InstanceType.to_enum(instance_type):
params['instance_type'] = instance_type
if image is not None and ImageType.to_enum(image):
params['image'] = image
if notebook_type is not None and NotebookType.to_enum(notebook_type):
params['notebook_type'] = notebook_type
path = '/organizations/{}/training/definitions/{}/notebooks/{}'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(
method='PUT', path=path, json=params)
def delete_notebook(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str) -> dict:
"""delete a notebook.
API reference: DELETE /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
response = api_client.delete_notebook(
organization_id, job_definition_name, notebook_id
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"value": {
"message": "1111111111111 deleted"
}
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/training/definitions/{}/notebooks/{}'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(method='DELETE', path=path)
def start_notebook(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str,
notebook_type: Optional[str] = None) -> dict:
"""start a notebook.
API reference: POST /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}/start
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
response = api_client.start_notebook(
organization_id, job_definition_name, notebook_id
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
- **notebook_type** (str): **[optional]** notebook type (notebook or lab)
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"job_definition_id": "1234567890123",
"training_notebook_id": 0,
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {}
if notebook_type is not None and NotebookType.to_enum(notebook_type):
params['notebook_type'] = notebook_type
path = '/organizations/{}/training/definitions/{}/notebooks/{}/start'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(
method='POST', path=path, json=params)
def stop_notebook(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str) -> dict:
"""stop a notebook.
API reference: POST /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}/stop
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
response = api_client.stop_notebook(
organization_id, job_definition_name, notebook_id
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"job_definition_id": "1234567890123",
"training_notebook_id": 0,
"name": "notebook-3",
"description": None,
"status": "Pending",
"status_message": None,
"instance_type": "cpu-1",
"image": "abeja-inc/all-cpu:18.10",
"creator": {
"updated_at": "2018-01-04T03:02:12Z",
"role": "admin",
"is_registered": True,
"id": "1122334455660",
"email": "test@abeja.asia",
"display_name": None,
"created_at": "2017-05-26T01:38:46Z"
},
"created_at": "2018-06-07T04:42:34.913644Z",
"modified_at": "2018-06-07T04:42:34.913726Z"
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/training/definitions/{}/notebooks/{}/stop'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(method='POST', path=path, json={})
def get_notebook_recent_logs(
self,
organization_id: str,
job_definition_name: str,
notebook_id: str,
next_forward_token: Optional[str]=None,
next_backward_token: Optional[str]=None,
) -> dict:
"""get recent logs of the notebook.
API reference: GET /organizations/{organization_id}/training/definitions/{job_definition_name}/notebooks/{notebook_id}/recentlogs
Request Syntax:
.. code-block:: python
organization_id = "1410000000000"
job_definition_name = "test_job_definition"
notebook_id = "1230000000000"
response = api_client.get_notebook_recent_logs(
organization_id, job_definition_name, notebook_id
)
Params:
- **organization_id** (str): organization id
- **job_definition_name** (str): training job definition name
- **notebook_id** (str): notebook id
- **next_forward_token** (str): **[optional]** token for the next page of logs
- **next_backward_token** (str): **[optional]** token for the next previous of logs
Return type:
dict
Returns:
Response Syntax:
.. code-block:: python
{
"events": [
{
"message": "start executing model with abeja-runtime-python36 (version: 0.X.X)",
"timestamp": "2019-10-16T00:00:00.000Z"
}
],
"next_backward_token": "...",
"next_forward_token": "..."
}
Raises:
- NotFound
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {}
if next_forward_token:
params['next_forward_token'] = next_forward_token
if next_backward_token:
params['next_backward_token'] = next_backward_token
path = '/organizations/{}/training/definitions/{}/notebooks/{}/recentlogs'.format(
organization_id, job_definition_name, notebook_id)
return self._connection.api_request(
method='GET', path=path, params=params)
| 20,935 | 5,960 |
import json
from io import BytesIO
from textwrap import dedent
from django.test import tag
from mock import ANY
from anymail.inbound import AnymailInboundMessage
from anymail.signals import AnymailInboundEvent
from anymail.webhooks.sendgrid import SendGridInboundWebhookView
from .utils import dedent_bytes, sample_image_content, sample_email_content
from .webhook_cases import WebhookTestCase
@tag('sendgrid')
class SendgridInboundTestCase(WebhookTestCase):
def test_inbound_basics(self):
raw_event = {
'headers': dedent("""\
Received: from mail.example.org by mx987654321.sendgrid.net ...
Received: by mail.example.org for <test@inbound.example.com> ...
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=example.org; ...
MIME-Version: 1.0
Received: by 10.10.1.71 with HTTP; Wed, 11 Oct 2017 18:31:04 -0700 (PDT)
From: "Displayed From" <from+test@example.org>
Date: Wed, 11 Oct 2017 18:31:04 -0700
Message-ID: <CAEPk3R+4Zr@mail.example.org>
Subject: Test subject
To: "Test Inbound" <test@inbound.example.com>, other@example.com
Cc: cc@example.com
Content-Type: multipart/mixed; boundary="94eb2c115edcf35387055b61f849"
"""),
'from': 'Displayed From <from+test@example.org>',
'to': 'Test Inbound <test@inbound.example.com>, other@example.com',
'subject': "Test subject",
'text': "Test body plain",
'html': "<div>Test body html</div>",
'attachments': "0",
'charsets': '{"to":"UTF-8","html":"UTF-8","subject":"UTF-8","from":"UTF-8","text":"UTF-8"}',
'envelope': '{"to":["test@inbound.example.com"],"from":"envelope-from@example.org"}',
'sender_ip': "10.10.1.71",
'dkim': "{@example.org : pass}", # yep, SendGrid uses not-exactly-json for this field
'SPF': "pass",
'spam_score': "1.7",
'spam_report': 'Spam detection software, running on the system "mx987654321.sendgrid.net", '
'has identified this incoming email as possible spam...',
}
response = self.client.post('/anymail/sendgrid/inbound/', data=raw_event)
self.assertEqual(response.status_code, 200)
kwargs = self.assert_handler_called_once_with(self.inbound_handler, sender=SendGridInboundWebhookView,
event=ANY, esp_name='SendGrid')
# AnymailInboundEvent
event = kwargs['event']
self.assertIsInstance(event, AnymailInboundEvent)
self.assertEqual(event.event_type, 'inbound')
self.assertIsNone(event.timestamp)
self.assertIsNone(event.event_id)
self.assertIsInstance(event.message, AnymailInboundMessage)
self.assertEqual(event.esp_event.POST.dict(), raw_event) # esp_event is a Django HttpRequest
# AnymailInboundMessage - convenience properties
message = event.message
self.assertEqual(message.from_email.display_name, 'Displayed From')
self.assertEqual(message.from_email.addr_spec, 'from+test@example.org')
self.assertEqual([str(e) for e in message.to],
['Test Inbound <test@inbound.example.com>', 'other@example.com'])
self.assertEqual([str(e) for e in message.cc],
['cc@example.com'])
self.assertEqual(message.subject, 'Test subject')
self.assertEqual(message.date.isoformat(" "), "2017-10-11 18:31:04-07:00")
self.assertEqual(message.text, 'Test body plain')
self.assertEqual(message.html, '<div>Test body html</div>')
self.assertEqual(message.envelope_sender, 'envelope-from@example.org')
self.assertEqual(message.envelope_recipient, 'test@inbound.example.com')
self.assertIsNone(message.stripped_text)
self.assertIsNone(message.stripped_html)
self.assertIsNone(message.spam_detected) # SendGrid doesn't give a simple yes/no; check the score yourself
self.assertEqual(message.spam_score, 1.7)
# AnymailInboundMessage - other headers
self.assertEqual(message['Message-ID'], "<CAEPk3R+4Zr@mail.example.org>")
self.assertEqual(message.get_all('Received'), [
"from mail.example.org by mx987654321.sendgrid.net ...",
"by mail.example.org for <test@inbound.example.com> ...",
"by 10.10.1.71 with HTTP; Wed, 11 Oct 2017 18:31:04 -0700 (PDT)",
])
def test_attachments(self):
att1 = BytesIO('test attachment'.encode('utf-8'))
att1.name = 'test.txt'
image_content = sample_image_content()
att2 = BytesIO(image_content)
att2.name = 'image.png'
email_content = sample_email_content()
att3 = BytesIO(email_content)
att3.content_type = 'message/rfc822; charset="us-ascii"'
raw_event = {
'headers': '',
'attachments': '3',
'attachment-info': json.dumps({
"attachment3": {"filename": "", "name": "", "charset": "US-ASCII", "type": "message/rfc822"},
"attachment2": {"filename": "image.png", "name": "image.png", "type": "image/png",
"content-id": "abc123"},
"attachment1": {"filename": "test.txt", "name": "test.txt", "type": "text/plain"},
}),
'content-ids': '{"abc123": "attachment2"}',
'attachment1': att1,
'attachment2': att2, # inline
'attachment3': att3,
}
response = self.client.post('/anymail/sendgrid/inbound/', data=raw_event)
self.assertEqual(response.status_code, 200)
kwargs = self.assert_handler_called_once_with(self.inbound_handler, sender=SendGridInboundWebhookView,
event=ANY, esp_name='SendGrid')
event = kwargs['event']
message = event.message
attachments = message.attachments # AnymailInboundMessage convenience accessor
self.assertEqual(len(attachments), 2)
self.assertEqual(attachments[0].get_filename(), 'test.txt')
self.assertEqual(attachments[0].get_content_type(), 'text/plain')
self.assertEqual(attachments[0].get_content_text(), 'test attachment')
self.assertEqual(attachments[1].get_content_type(), 'message/rfc822')
self.assertEqualIgnoringHeaderFolding(attachments[1].get_content_bytes(), email_content)
inlines = message.inline_attachments
self.assertEqual(len(inlines), 1)
inline = inlines['abc123']
self.assertEqual(inline.get_filename(), 'image.png')
self.assertEqual(inline.get_content_type(), 'image/png')
self.assertEqual(inline.get_content_bytes(), image_content)
def test_inbound_mime(self):
# SendGrid has an option to send the full, raw MIME message
raw_event = {
'email': dedent("""\
From: A tester <test@example.org>
Date: Thu, 12 Oct 2017 18:03:30 -0700
Message-ID: <CAEPk3RKEx@mail.example.org>
Subject: Raw MIME test
To: test@inbound.example.com
MIME-Version: 1.0
Content-Type: multipart/alternative; boundary="94eb2c05e174adb140055b6339c5"
--94eb2c05e174adb140055b6339c5
Content-Type: text/plain; charset="UTF-8"
Content-Transfer-Encoding: quoted-printable
It's a body=E2=80=A6
--94eb2c05e174adb140055b6339c5
Content-Type: text/html; charset="UTF-8"
Content-Transfer-Encoding: quoted-printable
<div dir=3D"ltr">It's a body=E2=80=A6</div>
--94eb2c05e174adb140055b6339c5--
"""),
'from': 'A tester <test@example.org>',
'to': 'test@inbound.example.com',
'subject': "Raw MIME test",
'charsets': '{"to":"UTF-8","subject":"UTF-8","from":"UTF-8"}',
'envelope': '{"to":["test@inbound.example.com"],"from":"envelope-from@example.org"}',
'sender_ip': "10.10.1.71",
'dkim': "{@example.org : pass}", # yep, SendGrid uses not-exactly-json for this field
'SPF': "pass",
'spam_score': "1.7",
'spam_report': 'Spam detection software, running on the system "mx987654321.sendgrid.net", '
'has identified this incoming email as possible spam...',
}
response = self.client.post('/anymail/sendgrid/inbound/', data=raw_event)
self.assertEqual(response.status_code, 200)
kwargs = self.assert_handler_called_once_with(self.inbound_handler, sender=SendGridInboundWebhookView,
event=ANY, esp_name='SendGrid')
event = kwargs['event']
message = event.message
self.assertEqual(message.envelope_sender, 'envelope-from@example.org')
self.assertEqual(message.envelope_recipient, 'test@inbound.example.com')
self.assertEqual(message.subject, 'Raw MIME test')
self.assertEqual(message.text, "It's a body\N{HORIZONTAL ELLIPSIS}\n")
self.assertEqual(message.html, """<div dir="ltr">It's a body\N{HORIZONTAL ELLIPSIS}</div>\n""")
def test_inbound_charsets(self):
# Captured (sanitized) from actual SendGrid inbound webhook payload 7/2020,
# using a test message constructed with a variety of charsets:
raw_post = dedent_bytes(b"""\
--xYzZY
Content-Disposition: form-data; name="headers"
Date: Fri, 24 Jul 2020 16:43:46 UTC
To: =?utf-8?q?R=C3=A9cipiendaire_pr=C3=A9cieux?= <inbound@sg.example.com>
From: =?utf-8?q?Op=C3=A9rateur?= de test <sender@example.com>
Subject: =?cp850?q?Como_usted_pidi=A2?=
--xYzZY
Content-Disposition: form-data; name="subject"
Como usted pidi\xa2
--xYzZY
Content-Disposition: form-data; name="to"
R\xc3\xa9cipiendaire pr\xc3\xa9cieux <inbound@sg.example.com>
--xYzZY
Content-Disposition: form-data; name="html"
<p>\xbfEsto se ve como esperabas?</p>
--xYzZY
Content-Disposition: form-data; name="from"
Op\xc3\xa9rateur de test <sender@example.com>
--xYzZY
Content-Disposition: form-data; name="text"
Test the ESP\x92s inbound charset handling\x85
--xYzZY
Content-Disposition: form-data; name="charsets"
{"to":"UTF-8","cc":"UTF-8","html":"iso-8859-1","subject":"cp850","from":"UTF-8","text":"windows-1252"}
--xYzZY--
""").replace(b"\n", b"\r\n")
response = self.client.post('/anymail/sendgrid/inbound/', data=raw_post,
content_type="multipart/form-data; boundary=xYzZY")
self.assertEqual(response.status_code, 200)
kwargs = self.assert_handler_called_once_with(self.inbound_handler, sender=SendGridInboundWebhookView,
event=ANY, esp_name='SendGrid')
event = kwargs['event']
message = event.message
self.assertEqual(message.from_email.display_name, "Opérateur de test")
self.assertEqual(message.from_email.addr_spec, "sender@example.com")
self.assertEqual(len(message.to), 1)
self.assertEqual(message.to[0].display_name, "Récipiendaire précieux")
self.assertEqual(message.to[0].addr_spec, "inbound@sg.example.com")
self.assertEqual(message.subject, "Como usted pidió")
self.assertEqual(message.text, "Test the ESP’s inbound charset handling…")
self.assertEqual(message.html, "<p>¿Esto se ve como esperabas?</p>")
| 12,033 | 3,939 |
# coding=utf-8
from random import shuffle
with open("../data/words.txt") as f:
words = f.read().splitlines()
shuffle(words)
with open("../data/random_words.txt", "w") as f:
for item in words:
f.write("%s\n" % item) | 233 | 86 |
import numpy as np
import math
def time_elapse_parser(time_elapsed):
# time_elapsed = time.time() - start_time
m_elapsed, s_elapsed = divmod(time_elapsed, 60)
h_elapsed, m_elapsed = divmod(m_elapsed, 60)
return '%d:%02d:%02d' % (h_elapsed, m_elapsed, s_elapsed)
| 281 | 122 |
from commandChanVim import urwidView
from Commands.SystemCommands import systemCommands
from Frames.reddit.indexFrame import RedditIndexFrame
from Frames.fchan.indexFrame import IndexFrame
from Frames.defaultFrame import DefaultFrame
from customeTypes import SITE
import pytest
@pytest.fixture
def view():
view = urwidView(True)
return view
test_boards = [
('add 4chan /r9k/', ['/r9k/']),
('add 4chan /r9k/ /s4s/', ['/r9k/', '/s4s/'])
]
@pytest.mark.parametrize("test_input, expected", test_boards)
def test_addChan(test_input, expected, view):
systemCommands(test_input, view)
result = all(ex in view.cfg.deep_get(SITE.FCHAN, 'boards') for ex in expected)
assert result
test_set = [
('set test ahoy'),
('set REDDIT username test')
]
@pytest.mark.parametrize("test_input", test_set)
def test_setCommand(test_input, view):
systemCommands(test_input, view)
cmd = test_input.split()
if len(cmd) == 3:
assert view.cfg.get(cmd[1]) == cmd[2]
else:
assert view.cfg.deep_get(cmd[1], cmd[2]) == cmd[3]
test_subs = [
('add reddit linuxgaming', ['linuxgaming']),
('add reddit linuxgaming sysadmin', ['linuxgaming', 'sysadmin'])
]
@pytest.mark.parametrize("test_input, expected", test_subs)
def test_addReddit(test_input, expected, view):
systemCommands(test_input, view)
result = all(ex in view.cfg.deep_get(SITE.REDDIT, 'boards') for ex in expected)
assert result
test_views = [
('view reddit', [SITE.REDDIT, RedditIndexFrame]),
('view 4chan', [SITE.FCHAN, IndexFrame]),
('view too long', [None, DefaultFrame])
]
@pytest.mark.parametrize("test_input, expected", test_views)
def test_view(test_input, expected, view):
systemCommands(test_input, view)
assert type(view.currFocusView.frame) == expected[1]
| 1,839 | 659 |
#%%
import numpy as np
from itertools import repeat
from itertools import starmap
from scipy.stats import norm
class ABCer:
def __init__(self, iterations, particles, observations):
self.iterations = iterations
self.particles = particles
self.observations = observations
def initialize_model(self, model):
self.model = model
def initialize_parameters(self, paras):
self.parameters = paras
return self.parameters
def normalized_norm(self, x):
diff_norm = np.linalg.norm(x / self.observations - 1, axis=1)
max_err = np.nanmax(diff_norm)
return diff_norm / max_err
def purterbation(self, index, weight, para):
para_last_iteration = para[index]
weight_update = weight[index] / sum(weight[index])
mean_para_last = np.sum(para_last_iteration * weight_update)
var_para_last = np.sum(
(para_last_iteration - mean_para_last)**2 * weight_update)
sample_index = np.random.choice(index, self.particles, p=weight_update)
mean_sample_para = para[sample_index]
propose_para = np.random.normal(mean_sample_para,
np.sqrt(2 * var_para_last))
evolve_weight = weight_update[index.searchsorted(sample_index)]
evolve_weight_denominator = np.sum(evolve_weight * norm.pdf(
propose_para, mean_sample_para, np.sqrt(2 * var_para_last)))
evolve_weight_numerator = norm.pdf(propose_para, mean_para_last,
np.sqrt(2 * var_para_last))
evolve_weight = evolve_weight_numerator / evolve_weight_denominator
evolve_weight = evolve_weight / sum(evolve_weight)
return evolve_weight, propose_para
def ABC(self, prior_paras):
# initialize the first iteration
number_parameters = len(self.parameters)
if len(prior_paras) != number_parameters * 2:
return print(
"Provide the corresponding length of the prior information of the parameters!"
)
para_each_iteration = np.tile(self.parameters, (self.particles, 1))
for i in range(number_parameters):
para_each_iteration[:, i] = np.random.uniform(
prior_paras[2 * i], prior_paras[2 * i + 1],
para_each_iteration.shape[0])
# store parameter evolution
disct_parameters = dict.fromkeys(range(number_parameters), [])
for key, value in disct_parameters.items():
l = np.zeros(shape=(self.iterations + 1, self.particles))
l[0,:] = para_each_iteration[:,key]
disct_parameters[key] = l
# fitness
fitness = np.zeros(shape=(self.iterations, self.particles))
# weights
disct_parameter_weights = dict.fromkeys(range(number_parameters), [])
for key, value in disct_parameter_weights.items():
l = np.zeros(self.particles)
l.fill(1 / self.particles)
disct_parameter_weights[key] = l
for g in range(self.iterations):
packed_para = [[para_each_iteration[i, :]]
for i in range(para_each_iteration.shape[0])]
simulation_each_iter_list = list(starmap(self.model, packed_para))
distance = self.normalized_norm(simulation_each_iter_list)
fitness[g, :] = 1 - distance
q5 = np.argsort(
fitness[g, :])[-int(self.particles // 4)] # best 25%
fit_index = np.where(fitness[g, :] > fitness[g, q5])[0]
print('Mean estimates: parameters: %.3e ; %.3e ' %
(np.mean(para_each_iteration[fit_index, 0]),
np.mean(para_each_iteration[fit_index, 1])))
for i in range(number_parameters):
disct_parameter_weights[i], disct_parameters[i][
g + 1, :] = self.purterbation(fit_index,
disct_parameter_weights[i],
disct_parameters[i][g, :])
para_each_iteration[:, i] = disct_parameters[i][g+1,:]
disct_parameters['fitness'] = fitness
# np.save(output, para_data)
return disct_parameters
# test
#%%
if __name__ == '__main__':
import matplotlib.pyplot as plt
# Example
def model_test(para, time_survey=np.arange(18)):
# time_survey = np.arange(18)
y = para[0] * np.exp(para[1] * time_survey)
return y
y = model_test([1, 2])
observations=np.array([1.0, 7.0,10.0,24.0,38.0,82.0,128.0,188.0,265.0,321.0,382.0,503.0,614.0,804.0,959.0,1135.0,1413.0,1705.0])
time = np.arange(len(observations))
test_ABC = ABCer(100, 10000, observations=observations)
test_ABC.initialize_model(model_test)
test_ABC.initialize_parameters([0.0, 1.0])
test_list = test_ABC.ABC(prior_paras=[0.0, 1.0, 1.0, 2.0])
# %%
plt.plot(time,observations, 'o')
para_inferred = []
para_inferred.append(np.mean(test_list[0][20,:]))
para_inferred.append(np.mean(test_list[1][20,:]))
extend_time = np.arange(21)
y_inferred = model_test(para_inferred, np.arange(21))
plt.plot(extend_time,y_inferred,'x',color = 'r')
# %%
| 5,312 | 1,777 |
#!/usr/bin/python
# This file is used to test vertical operations
# e.g.
# llvm_add_2 is an invalid operation since llc/lli raise exceptions on SelectionDAG
import config
import os
failed = []
def change_makefile_source(sfrom, sto):
comm = "sed -i 's/SOURCE = {sfrom}/SOURCE = {sto}/' Makefile".format(
sfrom=sfrom, sto=sto)
return os.system(comm)
def claim_make_fail(fw, ir_func):
print "Make failed for {ir_func}::{fw}".format(fw=fw, ir_func=ir_func)
def claim_lli_fail(fw, ir_func):
print "lli failed for {ir_func}::{fw}".format(fw=fw, ir_func=ir_func)
failed.append((fw, ir_func))
def generate_teseter_cpp(fw, ir_func):
with open('tester.cpp', 'w') as tester:
code = config.minimal_test_cpp.format(
llvm_func=config.get_llvm_func(fw, ir_func))
tester.write(code)
# Prepare Makefile
change_makefile_source('playground', 'tester')
for ir_func in config.vertical_ir_set:
for fw in config.fw_set:
if (fw, ir_func) in config.banned_vertical_fw_ir_pairs:
continue
generate_teseter_cpp(fw, ir_func)
if os.system("make with_ir_header >/dev/null") != 0:
claim_make_fail(fw, ir_func)
continue
if os.system("lli optimized.bc >/dev/null 2>>lli.log") != 0:
claim_lli_fail(fw, ir_func)
# Get back Makefile
change_makefile_source('tester', 'playground')
print "----------------------------All that failed---------------------------------"
print failed
# [(2, 'add'), (4, 'add'), (2, 'sub'), (4, 'sub'), (2, 'mul'), (4, 'mul'),
# (128, 'mul'), (2, 'and'), (4, 'and'), (2, 'or'), (4, 'or'), (2, 'xor'),
# (4, 'xor'), (2, 'icmp eq'), (4, 'icmp eq'), (2, 'icmp sgt'), (4, 'icmp sgt'),
# (2, 'icmp ugt'), (4, 'icmp ugt'), (2, 'icmp slt'), (4, 'icmp slt'), (2, 'icmp ult'),
# (4, 'icmp ult'), (2, 'shl'), (4, 'shl'), (64, 'shl'), (128, 'shl'), (2, 'lshr'),
# (4, 'lshr'), (64, 'lshr'), (128, 'lshr'), (2, 'ashr'), (4, 'ashr'), (128, 'ashr')] | 1,892 | 836 |
from django.conf.urls import patterns, url
from views import *
urlpatterns = patterns('',
url(r'^$', list_shared_repos, name='share_admin'),
url(r'^links/$', list_shared_links, name='list_shared_links'),
url(r'^folders/$', list_priv_shared_folders, name='list_priv_shared_folders'),
url(r'^add/$', share_repo, name='share_repo'),
url(r'^remove/$', repo_remove_share, name='repo_remove_share'),
url(r'^ajax/link/remove/$', ajax_remove_shared_link, name='ajax_remove_shared_link'),
url(r'^link/send/$', send_shared_link, name='send_shared_link'),
url(r'^link/save/$', save_shared_link, name='save_shared_link'),
url(r'^ajax/upload_link/remove/$', ajax_remove_shared_upload_link, name='ajax_remove_shared_upload_link'),
url(r'^upload_link/send/$', send_shared_upload_link, name='send_shared_upload_link'),
url(r'^permission_admin/$', share_permission_admin, name='share_permission_admin'),
url(r'^ajax/repo_remove_share/$', ajax_repo_remove_share, name='ajax_repo_remove_share'),
url(r'^ajax/get-download-link/$', ajax_get_download_link, name='ajax_get_download_link'),
url(r'^ajax/get-upload-link/$', ajax_get_upload_link, name='ajax_get_upload_link'),
url(r'^ajax/private-share-dir/$', ajax_private_share_dir, name='ajax_private_share_dir'),
url(r'^ajax/get-link-audit-code/$', ajax_get_link_audit_code, name='ajax_get_link_audit_code'),
)
| 1,405 | 509 |
from pylagrit import PyLaGriT
l = PyLaGriT()
m = l.create()
m.createpts_xyz((3,3,3),(0.,0.,0.),(1.,1.,1.),rz_switch=[1,1,1],connect=True)
m.status ()
m.status (brief=True)
fs = m.create_boundary_facesets(base_name='faceset_bounds')
m.dump_exo('cube.exo',facesets=fs.values())
| 276 | 128 |
from app import db
from app.models.group import Group
from app.models.page import Page, PageReadPermission
def get_read_permission_groups_by_page(page):
return db.session.query(Group) \
.join(PageReadPermission) \
.filter(PageReadPermission.page == page) \
.all()
def add_page_group_read_permission(page, added_groups):
permissions = [PageReadPermission(page=page, group=group)
for group in added_groups]
db.session.add_all(permissions)
db.session.commit()
def delete_page_group_read_permission(page, removed_groups):
db.session.query(PageReadPermission).filter(
PageReadPermission.page == page,
PageReadPermission.group_id.in_([g.id for g in removed_groups])
).delete(synchronize_session='fetch')
db.session.commit()
def get_page_by_path(path):
return db.session.query(Page).filter(Page.path == path).first()
def delete_page(page):
db.session.delete(page)
db.session.commit()
def delete_read_page_permission(page):
db.session.query(PageReadPermission).filter(
PageReadPermission.page_id == page.id
).delete(synchronize_session='fetch')
db.session.commit()
| 1,188 | 368 |
# (c) 2014-2015, Tomas Babej
| 29 | 22 |
inp = "AjaY Kumar"
out = inp.swapcase()
print(out)
inp = "AjaY kumAR"
out = inp.title()
print(out) | 107 | 54 |
import os
import sys
import h5py
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as ani
from cmap import *
from tensorflow.keras.models import load_model
# ----------------------------------------------------------------------
if len(sys.argv) < 6:
print('Usage: %s pulse t0 t1 dt vmax' % sys.argv[0])
print('Example: %s 92213 48.0 54.0 0.01 1.0' % sys.argv[0])
exit()
# ----------------------------------------------------------------------
pulse = int(sys.argv[1])
print('pulse:', pulse)
t0 = float(sys.argv[2])
print('t0:', t0)
t1 = float(sys.argv[3])
print('t1:', t1)
dt = float(sys.argv[4])
print('dt:', dt)
digits = len(str(dt).split('.')[-1])
vmax = float(sys.argv[5])
print('vmax:', vmax)
fps = 15
# ----------------------------------------------------------------------
fname = 'bolo_data.h5'
print('Reading:', fname)
f = h5py.File(fname, 'r')
g = f[str(pulse)]
tomo = np.clip(g['tomo'][:], 0., None)/1e6
tomo_t = g['tomo_t'][:]
print('%-10s %-10s %-20s %-10s' % (pulse, 'tomo', tomo.shape, tomo.dtype))
print('%-10s %-10s %-20s %-10s' % (pulse, 'tomo_t', tomo_t.shape, tomo_t.dtype))
f.close()
# ----------------------------------------------------------------------
if t0 < tomo_t[0]:
t0 = tomo_t[0]
print('t0:', t0, '(overwrite)')
if t1 > tomo_t[-1]:
t1 = tomo_t[-1]
print('t1:', t1, '(overwrite)')
# ----------------------------------------------------------------------
frames = []
frames_t = []
for t in np.arange(t0, t1, dt):
i = np.argmin(np.fabs(tomo_t - t))
frames.append(tomo[i])
frames_t.append(tomo_t[i])
frames = np.array(frames)
frames_t = np.array(frames_t)
print('%-10s %-10s %-20s %-10s' % (pulse, 'frames', frames.shape, frames.dtype))
print('%-10s %-10s %-20s %-10s' % (pulse, 'frames_t', frames_t.shape, frames_t.dtype))
# ----------------------------------------------------------------------
path = 'movies'
if not os.path.exists(path):
os.makedirs(path)
# ----------------------------------------------------------------------
fontsize = 'small'
R0 = 1.708 - 2*0.02
R1 = 3.988 + 3*0.02
Z0 = -1.77 - 2*0.02
Z1 = +2.13 + 2*0.02
im = plt.imshow(frames[0], cmap=get_cmap(),
vmin=0., vmax=vmax,
extent=[R0, R1, Z0, Z1],
interpolation='bilinear',
animated=True)
ticks = np.linspace(0., vmax, num=5)
labels = ['%.2f' % t for t in ticks]
labels[-1] = r'$\geq$' + labels[-1]
cb = plt.colorbar(im, fraction=0.26, ticks=ticks)
cb.ax.set_yticklabels(labels, fontsize=fontsize)
cb.ax.set_ylabel('MW/m3', fontsize=fontsize)
fig = plt.gcf()
ax = plt.gca()
title = 'Pulse %s t=%.*fs' % (pulse, digits, frames_t[0])
ax.set_title(title, fontsize=fontsize)
ax.tick_params(labelsize=fontsize)
ax.set_xlabel('R (m)', fontsize=fontsize)
ax.set_ylabel('Z (m)', fontsize=fontsize)
ax.set_xlim([R0, R1])
ax.set_ylim([Z0, Z1])
plt.setp(ax.spines.values(), linewidth=0.1)
plt.tight_layout()
def animate(k):
title = 'Pulse %s t=%.*fs' % (pulse, digits, frames_t[k])
ax.set_title(title, fontsize=fontsize)
im.set_data(frames[k])
animation = ani.FuncAnimation(fig, animate, frames=range(frames.shape[0]))
fname = '%s/%s_%.*f_%.*f.mp4' % (path, pulse, digits, frames_t[0], digits, frames_t[-1])
print('Writing:', fname)
animation.save(fname, fps=fps, extra_args=['-vcodec', 'libx264'])
| 3,388 | 1,426 |
# Encontrando ímpares em uma lista
def encontra_impares(lista, lista_impares = None):
if lista_impares == None:
lista_impares = []
if len(lista) < 1:
return []
else:
if lista[0]%2 != 0:
lista_impares.extend([lista[0]])
encontra_impares(lista[1:], lista_impares)
return lista_impares
| 353 | 129 |
# -*- coding: utf-8 -*-
from github import Github
from github.Issue import Issue
import argparse
MD_HEAD = """## Gitblog
My personal blog using issues and GitHub Action
"""
ME_GITHUB_NAME = "gatsby101"
ANCHOR_NUMBER = 5
TOP_ISSUES_LABELS = [
"Top",
]
def isMe(issue):
return issue.user.login == ME_GITHUB_NAME
def format_time(time):
return str(time)[:10]
def login(token):
return Github(token)
def get_repo(user: Github, repo: str):
return user.get_repo(repo)
def get_top_issues(repo):
return repo.get_issues(labels=TOP_ISSUES_LABELS)
def get_repo_labels(repo):
return [l for l in repo.get_labels()]
def get_issues_from_label(repo, label):
return repo.get_issues(labels=(label,))
def add_issue_info(issue, md):
time = format_time(issue.created_at)
md.write(f"- [{issue.title}]({issue.html_url})--{time}\n")
def add_md_top(repo, md):
if not TOP_ISSUES_LABELS:
return
top_issues = get_top_issues(repo)
with open(md, "a+", encoding="utf-8") as md:
md.write("## TOP\n")
for issue in top_issues:
if isMe(issue):
add_issue_info(issue, md)
def add_md_recent(repo, md):
new_five_issues = repo.get_issues()[:5]
with open(md, "a+", encoding="utf-8") as md:
md.write("## Recently updated\n")
for issue in new_five_issues:
if isMe(issue):
add_issue_info(issue, md)
def add_md_header(md):
with open(md, "w", encoding="utf-8") as md:
md.write(MD_HEAD)
def add_md_label(repo, md):
labels = get_repo_labels(repo)
with open(md, "a+", encoding="utf-8") as md:
for label in labels:
# we don't need add top label again
if label.name in TOP_ISSUES_LABELS:
continue
issues = get_issues_from_label(repo, label)
if issues.totalCount:
md.write("## " + label.name + "\n")
issues = sorted(issues, key=lambda x: x.created_at, reverse=True)
i = 0
for issue in issues:
if not issue:
continue
if isMe(issue):
if i == ANCHOR_NUMBER:
md.write("<details><summary>More</summary>\n")
md.write("\n")
add_issue_info(issue, md)
i += 1
if i > ANCHOR_NUMBER:
md.write("</details>\n")
md.write("\n")
def main(token):
user = login(token)
repo = get_repo(user, "gatsby101/gitblog")
get_top_issues(repo)
add_md_header("README.md")
add_md_top(repo, "README.md")
add_md_recent(repo, "README.md")
add_md_label(repo, "README.md")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("github_token", help="github_token")
options = parser.parse_args()
main(options.github_token) | 2,940 | 1,021 |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 15 22:15:28 2021
@author: ANalundasan
For: OMSBA 5067, Lab 3
"""
import numpy as np
import matplotlib.pyplot as plt
import math
#################### STEP 1 - KNN Classifier #################################
data = np.array([ [1, 1,1,1,1, 3],
[2, 1,1,1,2, 2],
[3, 1,1,2,1, 3],
[4, 1,1,2,2, 1],
[5, 1,2,1,1, 3],
[6, 1,2,1,2, 2],
[7, 1,2,2,1, 3],
[8, 1,2,2,2, 1],
[9, 2,1,1,1, 3],
[10, 2,1,1,2, 2],
[11, 2,1,2,1, 3],
[12, 2,1,2,2, 1],
[13, 2,2,1,1, 3],
[14, 2,2,1,2, 2],
[15, 2,2,2,1, 3],
[16, 2,2,2,2, 3],
[17, 3,1,1,1, 3],
[18, 3,1,1,2, 3],
[19, 3,1,2,1, 3],
[20, 3,1,2,2, 1],
[21, 3,2,1,1, 3],
[22, 3,2,1,2, 2],
[23, 3,2,2,1, 3],
[24, 3,2,2,2, 3]])
# 4 columns in the middle for Features
trainX = data[:, 1:5]
# first 19 rows for training data
trainY = data[0:19, :]
# last 5 rows for test data
testX = data[19:24, :]
# L1: Manhattan Distance
for i in trainX:
distance_L1 = 0
val = abs(ai - bi)
distance_L1 += val
print('Manhattan distance is: ', distance_L1)
# L2: Euclidean Distance
for i in something:
distance_L2 = 0
val = (ai - bi)**2
distance_L2 += val
print('Euclidean distance is: ', math.sqrt(distance_L2))
# L3: Chelyshev Distance
math.max(abs(ai - bi))
def myKNN(trainX, trainY, testX, distance, K):
""" trainX <- training input features
trainY <- training labels
testX <- test dataset
distance determines the distance metric and can be 1, 2, 3
(3 for L_chelyshev} ). Also, K is the KNN parameter. """
trainX = data[:, 1:5]
trainY = data[0:19, :]
testX = data[19:24, :]
# # distance equations
# L1_manhattan = math.sum((abs(ai - bi)))
# L2_euclidean = math.sum((ai - bi)**2)
# L_chelyshev = math.max(abs(ai - bi))
#################### STEP 2 - Decision Tree Toy Example ######################
# from sklearn.tree import DecisionTreeClassifier
# X = [[0, 0], [1, 1], [0, 1], [2, 2]]
# Y = [0, 1, 0, 1]
# clf = DecisionTreeClassifier()
# clf = clf.fit(X, Y)
# clf.predict([[1, 2]])
################# STEP 3 - Decision Tree With Larger Dataset #################
| 2,687 | 1,206 |
from __future__ import unicode_literals
from django.core.management.base import BaseCommand, CommandError
from gge_proxy_manager.methods import clean_duplicate_alliances, clean_duplicate_castles, clean_duplicate_players
from gge_proxy_manager.models import Castle, Player, Alliance
from django.utils.timezone import now, timedelta
# from pprint import pprint
class Command(BaseCommand):
args = '<object object ...>'
#help = 'Help text goes here'
def handle(self, *args, **options):
print "Clean duplicate castles..."
clean_duplicate_castles()
print "Clean duplicate players..."
clean_duplicate_players()
print "Clean duplicate alliances..."
clean_duplicate_alliances()
too_old = now() - timedelta(days=30)
print "Clean old castles..."
print "...removed %d castles." % len([c.delete() for c in Castle.objects.filter(updated__lte=too_old)])
print "Clean old players..."
print "...removed %d players." % len([p.delete() for p in Player.objects.filter(updated__lte=too_old)])
print "Clean old alliances..."
print "...removed %d alliances." % len([a.delete() for a in Alliance.objects.filter(updated__lte=too_old)]) | 1,236 | 372 |
# Generated by Django 2.2.9 on 2020-03-02 10:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('date', models.DateField()),
('start_time', models.TimeField()),
('end_time', models.TimeField(blank=True, null=True)),
('address', models.CharField(max_length=30)),
('city', models.CharField(max_length=30)),
('state', models.CharField(max_length=2)),
('zip', models.IntegerField()),
('description', models.TextField()),
('actionnetwork_link', models.URLField()),
],
),
]
| 995 | 274 |
# __init__.py
from myfirst2021.studentclass import Student,Specialstudent
| 78 | 27 |
import os
import pathlib
import sqlite3
from bs4 import BeautifulSoup
import pymysql.cursors
from datetime import datetime
#faz a conexao com o banco de dados
conexao = pymysql.connect(host = 'localhost',
user = 'root',
password = '',
db = 'tcxs_store',
charset = 'utf8mb4',
cursorclass = pymysql.cursors.DictCursor)
#variavies iniciais
dados = open('base.html', 'r', encoding="utf-8").read()
dados= BeautifulSoup(dados, 'html5lib')
key_titulo = dados.find_all('h2', {'class':'titulo_jogo'})
key_desc = dados.find_all('p', {'class':'textoJogo'})
key_contentid = dados.find_all('a', href=True)
key_imagem = dados.find_all('img',{'class':'caixa_imagem'})
key_links = dados.find_all('a', href=True)
titulos = []
for titulo in key_titulo:
titulo = str(titulo).split('"titulo_jogo">')[1].split('</h')[0].replace("'","").replace('</h2>','').replace(':','')
titulos.append(titulo)
#print(titulo)
descricoes = []
for desc in key_desc:
desc = str(desc).split('textoJogo">')[1].replace('</p>','')
descricoes.append(desc)
#print(desc)
ids = []
invalidar = ['index.php','psp.php','ps1.php','ps2.php','ps3.php','emuladores.php','https://tcxsproject.com.br/doadores/','https://tcxsproject.com.br/dev/ps3xploit.com/']
for id in key_contentid:
id = id['href']
if id in invalidar:
pass
else:
try:
id = id.split('/')[5].split('.pkg')[0]
ids.append(id)
#print(id)
except:
id = 'FALTA CONTENT_ID'
ids.append(id)
#print(id)
imagens = []
for imagem in key_imagem:
imagem = str(imagem).split('ps3/')[1].split('"/>')[0].replace('" width="170','')
imagens.append(imagem)
print(imagem)
links = []
invalidar = ['index.php','psp.php','ps1.php','ps2.php','ps3.php','emuladores.php','https://tcxsproject.com.br/doadores/','https://tcxsproject.com.br/dev/ps3xploit.com/']
for link in key_links:
link = link['href']
if link in invalidar:
#print(f'Pulando o {link}')
pass
else:
links.append(link)
#print(f'gravando o {link}')
print(len(titulos), len(descricoes), len(imagens), len(links))
dicionario_jogos = list(zip(list(titulos), list(imagens), list(links)))#--
#print(dicionario_jogos)
now = datetime.now()
hoje = now.strftime('%Y-%m-%d %H:%M:%S')
if len(links) == 30:
print('==== 30 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','{links[25]}','{links[26]}','{links[27]}',
'{links[28]}','{links[29]}') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 29:
print('==== 29 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','{links[25]}','{links[26]}','{links[27]}',
'{links[28]}','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 28:
print('==== 28 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','{links[25]}','{links[26]}','{links[27]}',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 27:
print('==== 27 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','{links[25]}','{links[26]}','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 26:
print('==== 26 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','{links[25]}','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 25:
print('==== 25 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','{links[24]}','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 24:
print('==== 24 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','{links[23]}','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 23:
print('==== 23 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','{links[22]}','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 22:
print('==== 22 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'{links[21]}','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 21:
print('==== 21 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','{links[20]}',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 20:
print('==== 20 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','{links[19]}','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 19:
print('==== 19 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','{links[18]}','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 18:
print('==== 18LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','{links[17]}','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 17:
print('==== 17 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','{links[16]}','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 16:
print('==== 16 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','{links[15]}','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 15:
print('==== 15 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'{links[14]}','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 14:
print('==== 14 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','{links[13]}',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 13:
print('==== 13 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','{links[12]}','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 12:
print('==== 12 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','{links[11]}','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 11:
print('==== 11 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','{links[10]}','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 10:
print('==== 10 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','{links[9]}','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 9:
print('==== 9 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','{links[8]}','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 8:
print('==== 8 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'{links[7]}','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 7:
print('==== 7 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','{links[6]}',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 6:
print('==== 6 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','{links[5]}','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 5:
print('==== 5 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','{links[4]}','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 4:
print('==== 4 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','{links[3]}','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 3:
print('==== 3 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','{links[2]}','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
if len(links) == 2:
print('==== 2 LINKS ENCONTRADOS ======')
print(f'Titulo: {titulos[0]}')
print(f'Descrição: {descricoes[0]}')
print(f'ContentID: {ids[0]}')
print(f'Link:{links[0:]}')
with conexao.cursor() as cursor:
tabela = f"""INSERT INTO playstation_ps3 (titulo,descricao,content_id,imagem,cadastro,
link1,link2,link3,link4,link5,link6,link7,link8,link9,link10,link11,link12,link13,link14,
link15,link16,link17,link18,link19,link20,link21,link22,link23,link24,link25,link26,link27,
link28,link29,link30) VALUES ('{titulos[0]}','{descricoes[0]}','{ids[0]}','{imagens[0]}','{hoje}',
'{links[0]}','{links[1]}','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---','---','---','---','---','---',
'---','---') """
cursor.execute(tabela)
conexao.commit()
conexao.close()
| 33,775 | 14,648 |
import json
def create_secret_data_from_config(config, server_ca, client_cert, client_key):
assert all(config.get(x) is not None
for x in ('host', 'user', 'password', 'db', 'ssl-ca', 'ssl-cert',
'ssl-key', 'ssl-mode')), config.keys()
secret_data = dict()
secret_data['sql-config.json'] = json.dumps(config)
secret_data['sql-config.cnf'] = f'''[client]
host={config["host"]}
user={config["user"]}
password="{config["password"]}"
database={config["db"]}
ssl-ca={config["ssl-ca"]}
ssl-cert={config["ssl-cert"]}
ssl-key={config["ssl-key"]}
ssl-mode={config["ssl-mode"]}
'''
secret_data['server-ca.pem'] = server_ca
secret_data['client-cert.pem'] = client_cert
secret_data['client-key.pem'] = client_key
return secret_data
| 792 | 277 |
# Copyright(c) 2019-2020 Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# pylint: disable-all
import csv
import numpy as np
import os
class MCGCWeightsGen:
def __init__(self, inhToExcWgtsFile="inhToExcWgtsFile.txt",
excToInhWgtsFile="excToInhWgtsFile.txt",
numCores=72, numGCPerCore=46, numMCPerCore=1, numDelays=4,
minDelay=16):
dir_path = os.path.dirname(os.path.abspath(__file__))
self.i2eWgtsFile = os.path.join(dir_path, inhToExcWgtsFile)
self.e2iWgtsFile = os.path.join(dir_path, excToInhWgtsFile)
self.numCores = numCores
self.numGCPerCore = numGCPerCore
self.numMCPerCore = numMCPerCore
self.numDelays = numDelays
self.minDelay = minDelay
self.saveWgtsE2I()
self.saveWgtsAndDelaysI2E()
@property
def numGC(self):
return self.numCores * self.numGCPerCore
@property
def numMC(self):
return self.numCores * self.numMCPerCore
def saveWgtsE2I(self):
e2iWgtMat = np.zeros((self.numDelays, self.numGC, self.numMC))
print(e2iWgtMat.shape)
with open(self.e2iWgtsFile) as e2iFile:
csvReader = csv.reader(e2iFile, delimiter=',')
for row in csvReader:
int_row = [int(item) for item in row]
coreIdx, gcIdx, mcIdx, wgt, dly = tuple(int_row)
gcIdx = coreIdx * self.numGCPerCore + gcIdx
mcIdx = self.numMCPerCore * mcIdx
dlyIdx = dly - self.minDelay
if e2iWgtMat[dlyIdx, gcIdx, mcIdx] != 0:
raise ValueError("Duplicate weights")
if wgt != 0:
e2iWgtMat[dlyIdx, gcIdx, mcIdx] = wgt
#print(np.where(e2iWgtMat > 0))
np.save("e2iWgtMat", e2iWgtMat)
def saveWgtsAndDelaysI2E(self):
i2eWgtMat = np.zeros((2, self.numCores, self.numMCPerCore,
self.numGCPerCore))
i2eDlyMat = np.zeros(i2eWgtMat.shape)
print(i2eWgtMat.shape)
with open(self.i2eWgtsFile) as i2eFile:
csvReader = csv.reader(i2eFile, delimiter=',')
for row in csvReader:
int_row = [int(item) for item in row]
coreIdx, gcIdx, mcIdx, wgt, dly = tuple(int_row)
boxIdx = 0 if wgt > 0 else 1
if i2eWgtMat[boxIdx, coreIdx, mcIdx, gcIdx] != 0:
raise ValueError("Duplicate weights for core, gc , mc",
coreIdx, gcIdx, mcIdx, i2eWgtMat[coreIdx, mcIdx, gcIdx])
if wgt != 0:
i2eWgtMat[boxIdx, coreIdx, mcIdx, gcIdx] = wgt
i2eDlyMat[boxIdx, coreIdx, mcIdx, gcIdx] = dly
#print(np.where(i2eWgtMat > 0))
np.save("i2eWgtMat", i2eWgtMat)
np.save("i2eDlyMat", i2eDlyMat)
if __name__ == '__main__':
wgen = MCGCWeightsGen()
| 4,440 | 1,654 |
from aiohttp.web_urldispatcher import View
from server.models import User, Bike, Reservation
from server.models.user import UserType
from server.permissions.permission import RoutePermissionError, Permission
from server.service.access.users import get_user
from server.service.verify_token import verify_token, TokenVerificationError
class UserIsAdmin(Permission):
"""Asserts that a given user is an admin."""
async def __call__(self, view: View, user: User = None, **kwargs):
if user is None:
raise RoutePermissionError("User does not exist.")
if "token" not in view.request:
raise RoutePermissionError("No admin firebase jwt was included in the Authorization header.")
if not view.request["token"] == user.firebase_id:
# an admin is fetching a user's details; we need to get the admin's details
user = await get_user(firebase_id=view.request["token"])
if user is None or not user.type is not UserType.USER:
raise RoutePermissionError("The supplied token doesn't have admin rights.")
@property
def openapi_security(self):
return [{"FirebaseToken": ["admin"]}]
class UserOwnsReservation(Permission):
"""Assert that a user owns the given reservation."""
async def __call__(self, view: View, user: User = None, reservation: Reservation = None, **kwargs):
if not reservation.user_id == user.id:
raise RoutePermissionError("The supplied token did not make this reservation.")
class UserIsRentingBike(Permission):
"""Asserts that the given user is renting the given bike."""
async def __call__(self, view: View, user: User = None, bike: Bike = None, **kwargs):
if not view.rental_manager.is_renting(user.id, bike.id):
raise RoutePermissionError("The supplied token does not have an active rental for this bike.")
@property
def openapi_security(self):
return [{"FirebaseToken": ["renting_user"]}]
class UserMatchesToken(Permission):
"""Asserts that the given user matches the firebase id."""
async def __call__(self, view: View, user: User = None, **kwargs):
if "token" not in view.request:
raise RoutePermissionError("No firebase jwt was included in the Authorization header.")
else:
token = view.request["token"]
if not user.firebase_id == token:
raise RoutePermissionError("The supplied token doesn't have access to this resource.")
@property
def openapi_security(self):
return [{"FirebaseToken": ["user"]}]
class UserCanPay(Permission):
"""Asserts that the given user matches the firebase id."""
async def __call__(self, view: View, user: User = None, **kwargs):
if not user.can_pay:
raise RoutePermissionError("User does not have any payment details associated with their account.")
@property
def openapi_security(self):
return [{"FirebaseToken": ["user"]}]
class ValidToken(Permission):
"""Asserts that the request has a valid firebase token."""
async def __call__(self, view: View, **kwargs):
try:
token = verify_token(view.request)
except TokenVerificationError as error:
raise RoutePermissionError(error.message)
else:
view.request["token"] = token
| 3,363 | 897 |
import os
from setuptools import setup
def readme():
try:
with open("README.md") as f:
return f.read()
except IOError:
return ""
setup(
name = "lang-trans",
version = "0.6.0",
author = "Abdelkrime Aries",
author_email = "kariminfo0@gmail.com",
description = ("Python transliteration library"),
license = "Apache-2.0",
keywords = "transliteration nlp languages romanization",
url = "https://github.com/kariminf/lang-trans",
packages=["lang_trans"],
long_description=readme(),
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Build Tools"
],
)
| 930 | 289 |
def removeElement(self, nums, val):
"""
:type nums: List[int]
:type val: int
:rtype: int
"""
zero = 0
for n in range(len(nums)):
if nums[n] != val:
nums[zero] = nums[n]
zero = zero + 1
return zero
| 315 | 101 |
import random
random.seed()
with open('datafile2.dat','w') as f:
for i in range(10000):
for j in range(50):
f.write(str(random.randint(0,1000)) + ' ')
f.write('\n')
| 199 | 80 |
#!/usr/bin/env python
# Turn a two-column CSV into a javascript object.
import os, sys, csv
import argparse
import doctest
import string
def escape(escapee):
""" Return a js-safe string.
>>> print escape('Escape "THIS"')
Escape \\"THIS\\"
"""
return string.replace(escapee, '"', '\\"')
def main(args):
""" Loop through each filename, read the CSV and return a js object.
>>> args = build_parser(['--verbose', 'csv/test.csv'])
>>> print args.files[0]
['csv/test.csv']
>>> main(args)
Namespace(files=[['csv/test.csv']], verbose=True)
matcher.lookup = {"Peyton Manning": "http://www.denverpost.com/peyton-manning"};
"""
if args.verbose:
print args
for item in args.files[0]:
c = "matcher.lookup = {"
f = open('%s' % item, 'rt')
reader = csv.reader(f)
for row in reader:
# Skip the keys
if reader.line_num == 1:
continue
c += '"%s": "%s",' % (escape(row[0]), escape(row[1]))
# Delete the comma, which will be the last character
c = c[:-1]
c += "};"
# Send to STDOUT
print c
def build_parser(args):
""" A method to make arg parsing testable.
>>> args = build_parser(['--verbose'])
>>> print args.verbose
True
>>> print args.files[0]
[]
"""
parser = argparse.ArgumentParser(usage='', description='Handle the options.',
epilog='')
parser.add_argument("-v", "--verbose", dest="verbose", default=False, action="store_true")
parser.add_argument("files", action="append", nargs="*")
args = parser.parse_args(args)
return args
if __name__ == '__main__':
args = build_parser(sys.argv[1:])
if args.verbose:
doctest.testmod(verbose=args.verbose)
main(args)
| 1,914 | 601 |
import distutils.core
distutils.core.setup(
name='py_starter',
version='1.0',
description='The very beginning of python code.',
author=AChao,
# to be continued.
) | 183 | 61 |
"""Test module for MFE class output details."""
import pytest
import sklearn.tree
import pymfe._internal as _internal
from pymfe.mfe import MFE
from tests.utils import load_xy
GNAME = "mfe-output-details"
class TestOutput:
"""TestClass dedicated to test MFE output details."""
def test_output_lengths_1(self):
X, y = load_xy(0)
res = MFE().fit(X=X.values, y=y.values).extract()
vals, names = res
assert len(vals) == len(names)
@pytest.mark.parametrize("dt_id, measure_time", [
(0, "total"),
(0, "total_summ"),
(0, "avg"),
(0, "avg_summ"),
(2, "total"),
(2, "total_summ"),
(2, "avg"),
(2, "avg_summ"),
])
def test_output_lengths_2(self, dt_id, measure_time):
X, y = load_xy(dt_id)
res = MFE(measure_time=measure_time).fit(
X=X.values, y=y.values).extract()
vals, names, time = res
assert len(vals) == len(names) == len(time)
def test_output_lengths_3(self):
X, y = load_xy(0)
res = MFE(summary=None).fit(X=X.values, y=y.values).extract()
vals, names = res
assert len(vals) == len(names)
@pytest.mark.parametrize("dt_id, measure_time", [
(0, "total"),
(0, "total_summ"),
(0, "avg"),
(0, "avg_summ"),
(2, "total"),
(2, "total_summ"),
(2, "avg"),
(2, "avg_summ"),
])
def test_output_lengths_4(self, dt_id, measure_time):
X, y = load_xy(dt_id)
res = MFE(
summary=None, measure_time=measure_time).fit(
X=X.values, y=y.values).extract()
vals, names, time = res
assert len(vals) == len(names) == len(time)
def test_verbosity_2(self, capsys):
X, y = load_xy(0)
MFE().fit(X=X.values, y=y.values).extract(verbose=0)
captured = capsys.readouterr().out
assert not captured
@pytest.mark.parametrize("verbosity, msg_expected", [
(0, False),
(1, True),
(2, True),
])
def test_verbosity_3(self, verbosity, msg_expected, capsys):
X, y = load_xy(0)
MFE().fit(X=X.values, y=y.values).extract(verbose=verbosity)
captured = capsys.readouterr().out
assert (not msg_expected) or captured
@pytest.mark.parametrize("verbosity, msg_expected", [
(0, False),
(1, True),
])
def test_verbosity_with_confidence(self, verbosity, msg_expected, capsys):
X, y = load_xy(2)
MFE().fit(X.values, y.values).extract_with_confidence(verbose=verbosity)
captured = capsys.readouterr().out
assert ((not msg_expected) and (not captured)) or (msg_expected and captured)
@pytest.mark.parametrize("verbosity, msg_expected", [
(0, False),
(1, True),
])
def test_verbosity_from_model(self, verbosity, msg_expected, capsys):
X, y = load_xy(2)
model = sklearn.tree.DecisionTreeClassifier().fit(X.values, y.values)
MFE().extract_from_model(model, verbose=verbosity)
captured = capsys.readouterr().out
assert ((not msg_expected) and (not captured)) or (msg_expected and captured)
| 3,231 | 1,166 |
from django.views.generic.base import TemplateView
class PrivacyPolicyView(TemplateView):
template_name = 'legal/privacy_policy.html'
class TermsOfServiceView(TemplateView):
template_name = 'legal/terms_of_service.html' | 229 | 69 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.forms import ModelForm
from models import SCUser,Post
from django import forms
class modifyUser(ModelForm):
class Meta:
model = SCUser
fields = ['first_name','last_name','username','email','birth_date','password','address','user_bio','profile_image']
class insertFile(ModelForm):
class Meta:
model = Post
fields = ['file','content']
| 436 | 128 |
#
# PySNMP MIB module CISCO-IP-NW-DISCOVERY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-IP-NW-DISCOVERY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:02:10 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
FcNameId, = mibBuilder.importSymbols("CISCO-ST-TC", "FcNameId")
InterfaceIndex, InterfaceIndexOrZero = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "InterfaceIndexOrZero")
InetAddressType, InetAddress, InetPortNumber = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress", "InetPortNumber")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, IpAddress, NotificationType, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Gauge32, Counter64, MibIdentifier, TimeTicks, Bits, ObjectIdentity, Integer32, iso = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "IpAddress", "NotificationType", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Gauge32", "Counter64", "MibIdentifier", "TimeTicks", "Bits", "ObjectIdentity", "Integer32", "iso")
TruthValue, DisplayString, TextualConvention, TestAndIncr = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "TextualConvention", "TestAndIncr")
ciscoIpNetworkDiscoveryMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 434))
ciscoIpNetworkDiscoveryMIB.setRevisions(('2006-10-03 00:00', '2005-08-09 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoIpNetworkDiscoveryMIB.setRevisionsDescriptions(('Updated with correct ciscoMgmt number for MODULE-IDENTITY.', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoIpNetworkDiscoveryMIB.setLastUpdated('200610030000Z')
if mibBuilder.loadTexts: ciscoIpNetworkDiscoveryMIB.setOrganization('Cisco Systems Inc.')
if mibBuilder.loadTexts: ciscoIpNetworkDiscoveryMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553 -NETS E-mail: cs-san@cisco.com')
if mibBuilder.loadTexts: ciscoIpNetworkDiscoveryMIB.setDescription("MIB to provide the information about the disjoint IP networks connected to the various gigabit ethernet interfaces in the 'Fabric' and objects to discover the same. It is possible that multiple disjoint IP networks may terminate on a single fibre channel switch in a Fabric. This MIB provides ability to initiate, configure and show discovery results of the IP Networks in the fabric. GLOSSARY: Fabric - The set of physically connected fibre channel switches. WWN - World Wide Name.")
cIpNetworkDiscoveryMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 0))
cIpNetworkDiscoveryMIBObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 1))
cIpNetworkDiscoveryConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 2))
cIpNetworkDiscoveryConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1))
cIpNetworkDiscoveryInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2))
cIpNetworkAutomaticDiscovery = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkAutomaticDiscovery.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkAutomaticDiscovery.setDescription("This object indicates whether the automatic discovery to find the reachability on the IP networks is enabled or not in the system. If automatic discovery is not enabled in the system then the discovery operation can be initiated only manually by the user. The value of this object set to 'true' indicates the automatic discovery for IP network is enabled in the system. The value of this object set to 'false' indicates the automatic discovery for IP network is not enabled in the system.")
cIpNetworkDiscoveryDelay = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5184000)).clone(10)).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: cIpNetworkDiscoveryDelay.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryDelay.setDescription("This object indicates the time duration between the state change of an IP interface and the start of discovery operation of interface, if Automatic discovery is configured as 'true' in the cIpNetworkAutomaticDiscovery object. The value '0' indicates no delay.")
cIpNetworkDiscoveryTypeSpinLock = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 3), TestAndIncr()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkDiscoveryTypeSpinLock.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryTypeSpinLock.setDescription("This object is used to facilitate modification of cIpNetworkDiscoverType, cIpNetworkDiscoverPort objects by multiple managers. The procedure for modifying the above two objects (only if the 'cIpNetworkDiscoveryType' is set to tcp/udp) are as follows : 1. Determine the new values for cIpNetworkDiscoverType, cIpNetworkDiscoverPort objects. 2. Retrieve the value of 'cIpNetworkDiscoveryTypeSpinLock' object. 3. Set the value of 'cIpNetworkDiscoveryTypeSpinLock' object to the retrieved value, and (in the same PDU) the value of 'cIpNetworkDiscoveryType' to the new value. If the set fails for the 'cIpNetworkDiscoveryTypeSpinLock' object, go back to step 2. 4. Set the value of 'cIpNetworkDiscoveryTypeSpinLock' object to the retrieved value incremented by one, and (in the same PDU) the value of 'cIpNetworkDiscoveryPort' object to the new value. Note that if it's possible to include 'cIpNetworkDiscoveryPort' object in the (Set Request) PDU of an earlier step, then step 4 can be eliminated.")
cIpNetworkDiscoveryType = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("icmp", 1), ("tcp", 2), ("udp", 3))).clone('icmp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkDiscoveryType.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryType.setDescription('This object indicates type of discovery packets to be sent for carrying out the IP network discovery.')
cIpNetworkDiscoveryPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 5), InetPortNumber()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkDiscoveryPort.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryPort.setDescription("This object indicates TCP/UDP port used for of IP network discovery operation, if the cIpNetworkDiscoveryType object is set to 'tcp' or 'udp'.")
cIpNetworkDiscoverySpinLock = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 6), TestAndIncr()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkDiscoverySpinLock.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoverySpinLock.setDescription("This object is used to facilitate modification of cIpNetworkGigEIfIndexToDiscover, cIpNetworkInetAddrTypeToDiscover, cIpNetworkGigEInetAddrToDiscover and cIpNetworkDiscoveryCommand objects by multiple managers. The procedure for modifying the above four objects are as follows: 1. For interface specific discovery, determine the new values for cIpNwGigEIfIndexToDiscover, cIpNetworkInetAddrTypeToDiscover, cIpNetworkGigEInetAddrToDiscover objects. 2. Retrieve the value of 'cIpNetworkDiscoverySpinLock' object. 3. Set the value of 'cIpNetworkDiscoverySpinLock' object to the retrieved value, and (in the same PDU) the value of 'cIpNetworkGigEIfIndexToDiscover' to the new value. If the set fails for the 'cIpNetworkDiscoverySpinLock' object, go back to step 2. 4. Set the value of 'cIpNetworkDiscoverySpinLock' object to the retrieved value incremented by one, and (in the same PDU) the value of 'cIpNetworkInetAddrTypeToDiscover' object to the new value. 5. Set the value of 'cIpNetworkDiscoverySpinLock' object to the retrieved value incremented by two, and (in the same PDU) the value of 'cIpNetworkGigEInetAddrToDiscover' object to the new value. If the set fails for 'cIpNetworkDiscoverySpinLock' object go back to step 2. 6. Set the value of 'cIpNetworkDiscoverySpinLock' object to the retrieved value incremented by four, and (in the same PDU) the value of 'cIpNetworkDiscoveryCommand' object to 'interfaceSpecific'. If the set fails for 'cIpNetworkDiscoverySpinLock' object go back to step 2. Note that if it's possible to include 'cIpNetworkGigEInetAddrToDiscover' and/or 'cIpNetworkDiscoveryCommand' object in the (Set Request) PDU of an earlier step, then step 4 (or) 5 (or) 6 can be eliminated.")
cIpNetworkGigEIfIndexToDiscover = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 7), InterfaceIndexOrZero()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkGigEIfIndexToDiscover.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEIfIndexToDiscover.setDescription("This object represents the 'ifIndex' of the specified gigabit ethernet interface for which the IP network discovery needs to be done incrementally. This object MUST be set to a valid value before or concurrently with setting the value of the 'cIpNetworkDiscoveryCommand' object to 'interfaceSpecific'.")
cIpNetworkInetAddrTypeToDiscover = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 8), InetAddressType().clone('ipv4')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkInetAddrTypeToDiscover.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkInetAddrTypeToDiscover.setDescription("This object represents the 'InetAddressType' of the address of the Gigabit ethernet port on which to perform the discovery. This object MUST be set to a valid value before or concurrently with setting the value of the 'cIpNetworkDiscoveryCommand' object to 'interfaceSpecific'.")
cIpNetworkGigEInetAddrToDiscover = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 9), InetAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkGigEInetAddrToDiscover.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEInetAddrToDiscover.setDescription("This object represents the 'InetAddress' of the specified gigabit ethernet interface specified in the 'cIpNetworkGigEIfIndexToDiscover' for which the IP network discovery needs to be done incrementally. This object MUST be set to a valid value before or concurrently with setting the value of the 'cIpNetworkDiscoveryCommand' object to 'interfaceSpecific'.")
cIpNetworkDiscoveryCommand = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("all", 1), ("noOp", 2), ("interfaceSpecific", 3))).clone('noOp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cIpNetworkDiscoveryCommand.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryCommand.setDescription("The IP network discovery command to be executed. Command Remarks all Run IP network discovery for all the gigabit ethernet interfaces in the fabric. noOp no operation will be performed. interfaceSpecific Run IP network discovery incrementally for the gigabit ethernet interface specified in the 'cIpNetworkGigEInterfaceToDiscover', 'cIpNetworkGigEInetAddrToDiscover' and 'cIpNetworkGigESwitchWWNToDiscover' objects. The value of this object when read is always 'noOp'.")
cIpNetworkDiscoveryCmdStatus = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("success", 1), ("none", 2), ("inProgress", 3), ("noGigEInterfaceIndexSpecified", 4), ("noGigEInetAddrSpecified", 5), ("noGigESwitchWWNSpecified", 6), ("invalidGigEInterfaceIndex", 7), ("invalidGigEInetAddrType", 8), ("invalidGigEInetAddr", 9), ("generalFailure", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cIpNetworkDiscoveryCmdStatus.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryCmdStatus.setDescription("The status of the last IP network discovery command operation. success (1) - IP network discovery operation completed successfully. none (2) - no IP network discovery operation is performed. inProgress (3) - discovery operation is in progress. noGigEInterfaceIndexSpecified (4) - 'ifIndex' of the gigabit eithernet interface is not specified, i.e., the value of 'cIpNetworkGigEInterfaceToDiscover' object is set to '0'. noGigEInetAddrSpecified (5) - InetAddress of the gigabit eithernet interface is not specified, i.e., the value of 'cIpNetworkpGigEInetAddrToDiscover' object is set to '0'. noGigESwitchWWNSpecified (6) - The Switch WWN of the gigabit eithernet interface is not specified, i.e., the value of 'cIpNetworkGigESwitchWWNToDiscover' object is set to '0'. invalidGigEInterfaceIndex (7) - 'ifIndex' of the gigabit ethernet interface specified in the 'cIpNetworkGigEInterfaceToDiscover' object is not valid. invalidGigEInetAddrType (8) - 'InetAddrType' of the gigabit ethernet interface specified in the 'cIpNetworkGigEInetAddrType' object is not valid. invalidGigEInetAddr (9) - 'InetAddr' of the gigabit ethernet interface specified in the 'cIpNetworkGigEInetAddr' object is not valid. generalFailure (10)- General IP network discovery operation Failure.")
cIpNetworkTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 1), )
if mibBuilder.loadTexts: cIpNetworkTable.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkTable.setDescription("Represents the discovered list of all IP networks connected to the gigabit ethernet interfaces in the fabric. The entries in this table are populated by the discovery process started either by the auto discovery mechanism or triggered by the user by the setting the cIpNetworkDiscoveryCommand object to 'all' or 'interfaceSpecific'.")
cIpNetworkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 1, 1), ).setIndexNames((0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkIndex"))
if mibBuilder.loadTexts: cIpNetworkEntry.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkEntry.setDescription('An entry represents the information about an IP network connected to the gigabit ethernet interface in the fabric.')
cIpNetworkIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: cIpNetworkIndex.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkIndex.setDescription('An arbitrary integer value, greater than zero which uniquely identifies the IP Network discovered in the fabric.')
cIpNetworkSwitchWWN = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 1, 1, 2), FcNameId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cIpNetworkSwitchWWN.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkSwitchWWN.setDescription('This object represents the WWN of the switch which has created the corresponding entry in this table.')
cIpNetworkInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2), )
if mibBuilder.loadTexts: cIpNetworkInterfaceTable.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkInterfaceTable.setDescription('A table consisting of entries for all the gigabit ethernet interfaces, which are connected to the various IP networks in the Fabric. The members of the IP networks are dynamically discovered by the discovery process.')
cIpNetworkInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2, 1), ).setIndexNames((0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkIndex"), (0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEPortSwitchWWN"), (0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEPortIfIndex"), (0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEPortInetAddrType"), (0, "CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEPortInetAddr"))
if mibBuilder.loadTexts: cIpNetworkInterfaceEntry.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkInterfaceEntry.setDescription("An entry (conceptual row) in the 'cIpNetworkInterfaceTable'. Each row represents a gigabit ethernet interface in the fabric and its IP network.")
cIpNetworkGigEPortSwitchWWN = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2, 1, 1), FcNameId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: cIpNetworkGigEPortSwitchWWN.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEPortSwitchWWN.setDescription("This object represents the World Wide Name (WWN) of the associated switch in the fabric, of the gigabit ethernet interface mentioned in corresponding instance of 'cIpNetworkGigEPortIfIndex' object.")
cIpNetworkGigEPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2, 1, 2), InterfaceIndex())
if mibBuilder.loadTexts: cIpNetworkGigEPortIfIndex.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEPortIfIndex.setDescription("This object represents the 'ifIndex' of the gigabit ethernet interface in a switch in the fabric. The World Wide Name (WWN) of the switch is mentioned in the corresponding instance of 'cIpNetworkGigEPortSwitchWWN'. The gigabit ethernet interface is connected to the IP network represented by the value of the corresponding instance of 'cIpNetworkIndex'.")
cIpNetworkGigEPortInetAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2, 1, 3), InetAddressType())
if mibBuilder.loadTexts: cIpNetworkGigEPortInetAddrType.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEPortInetAddrType.setDescription('This object represents the InetAddressType of the gigabit ethernet interface.')
cIpNetworkGigEPortInetAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 434, 1, 2, 2, 1, 4), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cIpNetworkGigEPortInetAddr.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkGigEPortInetAddr.setDescription("This object represents the InetAddress of the gigabit ethernet interface mentioned in the corresponding instance 'cIpNetworkGigEPortIfIndex' object. The address is of the type indicated by 'cIpNetworkGigEPortInetAddrType'. The gigabit ethernet interface is connected to the IP network represented by the value of the corresponding instance of 'cIpNetworkIndex'.")
cIpNetworkDiscoverCompliance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 2, 1))
cIpNetworkDiscoveryMIBComp = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 434, 2, 1, 1)).setObjects(("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryInfoGroup"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryCfgGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cIpNetworkDiscoveryMIBComp = cIpNetworkDiscoveryMIBComp.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryMIBComp.setDescription('The compliance statement for entities which implement the CISCO-IP-NW-DISCOVERY-MIB.')
cIpNetworkDiscoveryMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 434, 2, 2))
cIpNetworkDiscoveryInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 434, 2, 2, 1)).setObjects(("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkSwitchWWN"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEPortInetAddr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cIpNetworkDiscoveryInfoGroup = cIpNetworkDiscoveryInfoGroup.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryInfoGroup.setDescription('A Collection of objects for the IP networks discovered in the fabric and its associated gigabit ethernet interfaces details.')
cIpNetworkDiscoveryCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 434, 2, 2, 2)).setObjects(("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkAutomaticDiscovery"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryDelay"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryTypeSpinLock"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryType"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryPort"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoverySpinLock"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEIfIndexToDiscover"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkInetAddrTypeToDiscover"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkGigEInetAddrToDiscover"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryCommand"), ("CISCO-IP-NW-DISCOVERY-MIB", "cIpNetworkDiscoveryCmdStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cIpNetworkDiscoveryCfgGroup = cIpNetworkDiscoveryCfgGroup.setStatus('current')
if mibBuilder.loadTexts: cIpNetworkDiscoveryCfgGroup.setDescription('A Collection of objects to be configured, for doing the IP network discovery.')
mibBuilder.exportSymbols("CISCO-IP-NW-DISCOVERY-MIB", cIpNetworkGigEPortIfIndex=cIpNetworkGigEPortIfIndex, PYSNMP_MODULE_ID=ciscoIpNetworkDiscoveryMIB, cIpNetworkDiscoveryCommand=cIpNetworkDiscoveryCommand, cIpNetworkDiscoveryConfig=cIpNetworkDiscoveryConfig, cIpNetworkDiscoveryMIBComp=cIpNetworkDiscoveryMIBComp, cIpNetworkDiscoveryPort=cIpNetworkDiscoveryPort, cIpNetworkDiscoverCompliance=cIpNetworkDiscoverCompliance, cIpNetworkGigEPortInetAddr=cIpNetworkGigEPortInetAddr, cIpNetworkDiscoveryCmdStatus=cIpNetworkDiscoveryCmdStatus, cIpNetworkDiscoveryMIBObjs=cIpNetworkDiscoveryMIBObjs, cIpNetworkDiscoveryConform=cIpNetworkDiscoveryConform, cIpNetworkGigEPortInetAddrType=cIpNetworkGigEPortInetAddrType, cIpNetworkEntry=cIpNetworkEntry, cIpNetworkInterfaceEntry=cIpNetworkInterfaceEntry, cIpNetworkDiscoveryMIBGroups=cIpNetworkDiscoveryMIBGroups, cIpNetworkGigEPortSwitchWWN=cIpNetworkGigEPortSwitchWWN, cIpNetworkDiscoveryType=cIpNetworkDiscoveryType, cIpNetworkAutomaticDiscovery=cIpNetworkAutomaticDiscovery, ciscoIpNetworkDiscoveryMIB=ciscoIpNetworkDiscoveryMIB, cIpNetworkInetAddrTypeToDiscover=cIpNetworkInetAddrTypeToDiscover, cIpNetworkGigEInetAddrToDiscover=cIpNetworkGigEInetAddrToDiscover, cIpNetworkDiscoveryCfgGroup=cIpNetworkDiscoveryCfgGroup, cIpNetworkInterfaceTable=cIpNetworkInterfaceTable, cIpNetworkDiscoveryDelay=cIpNetworkDiscoveryDelay, cIpNetworkDiscoveryMIBNotifs=cIpNetworkDiscoveryMIBNotifs, cIpNetworkGigEIfIndexToDiscover=cIpNetworkGigEIfIndexToDiscover, cIpNetworkDiscoveryInfo=cIpNetworkDiscoveryInfo, cIpNetworkIndex=cIpNetworkIndex, cIpNetworkDiscoveryTypeSpinLock=cIpNetworkDiscoveryTypeSpinLock, cIpNetworkTable=cIpNetworkTable, cIpNetworkDiscoverySpinLock=cIpNetworkDiscoverySpinLock, cIpNetworkSwitchWWN=cIpNetworkSwitchWWN, cIpNetworkDiscoveryInfoGroup=cIpNetworkDiscoveryInfoGroup)
| 22,944 | 8,338 |
#
# Copyright 2020 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from hsfs.core import query, feature_group_base_engine
class FeatureGroupBase:
def __init__(self, featurestore_id):
self._feature_group_base_engine = feature_group_base_engine.FeatureGroupBaseEngine(
featurestore_id
)
def delete(self):
"""Drop the entire feature group along with its feature data.
!!! danger "Potentially dangerous operation"
This operation drops all metadata associated with **this version** of the
feature group **and** all the feature data in offline and online storage
associated with it.
# Raises
`RestAPIError`.
"""
self._feature_group_engine.delete(self)
def select_all(self):
"""Select all features in the feature group and return a query object.
The query can be used to construct joins of feature groups or create a
training dataset immediately.
# Returns
`Query`. A query object with all features of the feature group.
"""
return query.Query(
self._feature_store_name, self._feature_store_id, self, self._features
)
def select(self, features=[]):
"""Select a subset of features of the feature group and return a query object.
The query can be used to construct joins of feature groups or create a training
dataset with a subset of features of the feature group.
# Arguments
features: list, optional. A list of `Feature` objects or feature names as
strings to be selected, defaults to [].
# Returns
`Query`: A query object with the selected features of the feature group.
"""
return query.Query(
self._feature_store_name, self._feature_store_id, self, features
)
def add_tag(self, name: str, value: str = None):
"""Attach a name/value tag to a feature group.
A tag can consist of a name only or a name/value pair. Tag names are
unique identifiers.
# Arguments
name: Name of the tag to be added.
value: Value of the tag to be added, defaults to `None`.
# Raises
`RestAPIError`.
"""
self._feature_group_base_engine.add_tag(self, name, value)
def delete_tag(self, name: str):
"""Delete a tag from a feature group.
Tag names are unique identifiers.
# Arguments
name: Name of the tag to be removed.
# Raises
`RestAPIError`.
"""
self._feature_group_base_engine.delete_tag(self, name)
def get_tag(self, name: str = None):
"""Get the tags of a feature group.
Tag names are unique identifiers. Returns all tags if no tag name is
specified.
# Arguments
name: Name of the tag to get, defaults to `None`.
# Returns
`list[Tag]`. List of tags as name/value pairs.
# Raises
`RestAPIError`.
"""
return self._feature_group_base_engine.get_tags(self, name)
| 3,701 | 980 |
# python libs
import copy
# rasmus libs
from rasmus import stats
from rasmus import util
# rasmus bio libs
from compbio import fasta
from compbio import regionlib
import compbio.regionlib
from compbio.regionlib import iter_chrom
# graphics libs
from summon.core import *
from summon import shapes
import summon
# globals
gene_label_types = [False, "fix", "scale", "vertical"]
#
# Bindings
#
BINDINGS_HELP = """
==============================
Synteny Visualization Keys
==============================
c toggle controls
l toggle gene labels
"""
def invcolor(c, alpha = 1):
return color(1 - c[0], 1 - c[1], 1 - c[2], alpha)
light_colors = [color(1, 0, 0, .6),
color(1, .2, 0, .6),
color(1, .5, 0, .6),
color(1, 1, 0, .6),
color(.5, 1, 0, .6),
color(.2, 1, 0, .6),
color(0, 1, 0, .6),
color(0, 1, .2, .6),
color(0, 1, .5, .6),
color(0, 1, 1, .6),
color(0, .5, 1, .6),
color(0, .2, 1, .6),
color(0, 0, 1, .6),
color(.2, 0, 1, .6),
color(.5, 0, 1, .6),
color(1, 0, 1, .6),
color(1, 0, .5, .6),
color(1, 0, .2, .6),
color(.5, .5, .5, .6),
color(.5, 0, 0, .6),
color(.5, .5, 0, .6),
color(0, .5, 0, .6),
color(0 , .5, .5, .6),
color(0, 0, .5, .6),
color(.5, 0, .5, .6)
]
def draw_gene(vis, gene, direction, col):
"draw a single gene"
length = gene.length()
height = 1
steep = .1
mid = max(length - (height * steep), 0)
return group(polygon(col,
0, 0,
mid, 0,
length, height/2,
mid, height,
0, height),
lines(0, 0, 0, height))
def draw_gene_box(vis, gene, direction, col):
"draw a single gene"
length = gene.length()
height = 1
return group(quads(col,
0, 0,
length, 0,
length, height,
0, height),
lines(0, 0, 0, height))
# determine which row gene is in (for color)
def gene_color_alt_species(vis, gene, eff_dir):
order = 0
if eff_dir == 1:
if order % 2 == 0:
col = vis.colors['gene_pos']
else:
col = vis.colors['gene2_pos']
else:
if order % 2 == 0:
col = vis.colors['gene_neg']
else:
col = vis.colors['gene2_neg']
return col
def effective_dir(direction, geneDirection):
return (-1, 1)[(direction == 1) == (geneDirection == 1)]
#
# main visualization
#
class Frag:
"represents fragment of DNA"
def __init__(self, genome=None, chrom=None, start=0, end=0, strand=0,
x=0, y=0):
self.genome = genome
self.chrom = chrom
self.start = start
self.end = end
self.direction = strand
self.x = x
self.y = y
self.genes = []
def length(self):
return self.end - self.start + 1
class Layout (object):
def __init__(self, x, y, orient):
self.x = x
self.y = y
self.orient = orient
class SyntenyVisBase:
def __init__(self, genomes, chroms, regions, blocks, orths,
rootid=None,
winsize=(800, 400),
# graphics
gene_label=lambda x: x.data["ID"],
draw_gene=draw_gene_box,
gene_color=gene_color_alt_species,
# misc options
fat_matches = True,
use_controls = False,
min_frag = 0,
show_gene_labels = False,
font_size = 8,
# layout
genome_sep = 3,
max_genome_sep = 6,
frag_sep = 1.5,
# colors
color_genome_div = color(0, 0, 0, .5),
color_gene_pos = color(1, .6, 0, .95),
color_gene_neg = color(1, .6, 0, .95),
color_gene2_pos = color(1, .2, 0, .95),
color_gene2_neg = color(1, .2, 0, .95),
color_matches = color(.8, .8, 1, .8),
color_arrow = color(1, .8, 0, .5),
color_frag = color(0, 0, 0, .8),
color_blocks = [color(.8,.8,1,.5)]):
self.win = None
self.winsize = winsize
self.rootid = rootid
self.ref_genome = None
self.frags = set()
self.controlids = []
self.markids = []
self.labelids = []
self.groupid = 0
self.visid = None
self.gene_label = gene_label
self.draw_gene = draw_gene
self.gene_color = gene_color
self.font_size = font_size
self.region2frags = {}
self.region_layout = {}
self.genomes = genomes
self.chroms = chroms
self.chroms_lookup = dict(((x.species, x.seqname), x) for x in chroms)
self.db = regionlib.RegionDb(regions)
self.blocks = blocks
# filter orths for only regions we have in our db
self.orths = [[x for x in orth
if self.db.has_region(x)]
for orth in orths]
# make ortholog lookup
self.orth_lookup = {}
for orth in self.orths:
for region in orth:
self.orth_lookup[region] = orth
# options
self.fat_matches = fat_matches
self.use_controls = use_controls
self.min_frag = min_frag
self.show_gene_labels = show_gene_labels
# layout
self.genome_sep = genome_sep
self.max_genome_sep = max_genome_sep
self.frag_sep = frag_sep
self.colors = {
"genome_div" : color_genome_div,
"gene_pos" : color_gene_pos,
"gene_neg" : color_gene_neg,
"gene2_pos" : color_gene2_pos,
"gene2_neg" : color_gene2_neg,
"matches" : color_matches,
"arrow" : color_arrow,
"frag" : color_frag,
"blocks" : color_blocks
}
def show(self):
if self.win == None or not self.win.is_open():
self.win = summon.Window("synteny")
if self.rootid == None:
self.rootid = self.win.get_root()
if self.visid == None:
self.win.set_size(* self.winsize)
self.win.set_bgcolor(1,1,1)
self.win.set_binding(input_key("c"), self.toggle_controls)
self.win.set_binding(input_key("l"), self.toggle_labels)
self.visid = self.win.insert_group(self.rootid, group())
def home(self):
self.win.set_visible(*(self.win.get_root().get_bounding() + ("exact",)))
self.win.zoom(.9, .9)
def clear_drawing(self):
self.frags = set()
self.region_layout = {}
self.win.remove_group(self.visid)
self.clear_marks()
self.visid = self.win.insert_group(self.rootid, group())
def draw(self, genome, chrom, start, end, direction=1):
self.show()
self.clear_drawing()
self.win.insert_group(self.visid,
self.draw_chrom(genome, chrom,
start, end, direction=direction))
self.show_controls(self.use_controls)
'''
def drawAll(self, refGenomeName):
self.show()
self.clear_drawing()
y = 0
chroms = self.matching.genomes[refGenomeName].chroms.values()
chroms.sort(lambda a,b: cmp(b.size, a.size))
for chrom in chroms:
util.tic("drawing chrom %s" % chrom.name)
self.win.insert_group(self.visid, group(translate(0, y,
self.draw_chrom(refGenomeName, chrom.name, 0, chrom.size))))
self.show_controls(self.conf["use-controls"])
util.toc()
y -= self.conf['max-genome-sep'] * (len(self.matching.genomes) + 1)
'''
def draw_chrom(self, genome_name, chrom_name, start, end, direction=1):
"""Draw the synteny for a region of a chromosome"""
self.ref_genome = genome_name
self.layout_frags(genome_name, chrom_name, start, end, direction)
return self.draw_placed()
def layout_frags(self, genome_name, chrom_name, start, end, direction=1):
ref_chrom = self.chroms_lookup[(genome_name, chrom_name)]
# setup genome display order
order = {}
for i, genome in enumerate(self.genomes):
order[genome] = i
# swap the genome with order 0 and the reference genome
j = order[self.ref_genome]
order[self.genomes[0]] = j
order[self.ref_genome] = 0
# init reference fragment
ref_frag = Frag(genome=genome_name,
chrom=chrom_name,
start=max(start, 0),
end=min(end, ref_chrom.end),
strand=direction,
x=max(start,0),
y=0)
self.frags.add(ref_frag)
self.layout_frag_contents(ref_frag)
# find all synteny blocks in this region
# sort blocks by appearance in ref_chrom
blocks = list(self.filter_blocks(self.blocks, ref_chrom, start, end))
def blocksort(a):
if a[1] == 0:
starta = a[0].region1.start
else:
starta = a[0].region2.start
blocks.sort(key=blocksort)
# make lookup for genes to block and block to fragment
block_lookup = {}
frag_lookup = {}
for block, flip in blocks:
if flip == 0:
other = block.region2
else:
other = block.region1
frag = Frag()
frag.genome = other.species
frag.chrom = other.seqname
frag_lookup[block] = frag
for gene2 in iter_chrom(self.db.get_regions(frag.genome,
frag.chrom),
other.start, other.end):
block_lookup[gene2] = block
self.block_lookup = block_lookup
# find all genes that will be drawn
# walk along ref_chrom and store drawn genes into fragments
refLookup = {}
for gene in iter_chrom(self.db.get_regions(genome_name, chrom_name),
start, end):
for name2 in self.orth_lookup.get(gene.data["ID"], []):
gene2 = self.db.get_region(name2)
if gene2 in block_lookup:
frag_lookup[block_lookup[gene2]].genes.append(gene2)
refLookup[gene2] = gene
self.refLookup = refLookup
# determine fragment dimensions
for frag in frag_lookup.itervalues():
if len(frag.genes) == 0:
frag.x = None
continue
frag.genes.sort(key=lambda a: a.start)
# set fragment start and end
frag.start = frag.genes[0].start
frag.end = frag.genes[-1].end
# find fragment direction
vote = 0
last = None
for gene2 in frag.genes:
pos = refLookup[gene2].start
if last != None and pos != last:
if last < pos:
vote += 1
else:
vote -= 1
last = pos
if vote > 0:
frag.direction = direction
else:
frag.direction = -direction
# find fragment x-coordinate
diffs = []
for gene2 in frag.genes:
if direction == 1:
offset1 = refLookup[gene2].start - ref_frag.start
else:
offset1 = ref_frag.end - refLookup[gene2].end
if frag.direction == 1:
offset2 = gene2.start - frag.start
else:
offset2 = frag.end - gene2.end
diffs.append(offset2 - offset1)
frag.x = ref_frag.x - stats.median(diffs)
# place blocks
fragY = util.Dict(default=-self.genome_sep)
for block, flip in blocks:
frag = frag_lookup[block]
otherGenome = frag.genome
if frag.x == None:
# fragment could not be placed
continue
frag.y = fragY[otherGenome] - \
((order[otherGenome] - 1) *
self.max_genome_sep)
# re-get all genes between those coordinates
#frag.genes = list(iter_chrom(self.db.get_regions(frag.genome,
# frag.chrom),
# frag.start, frag.end))
# store and lyaout frag
self.frags.add(frag)
self.layout_frag_contents(frag)
# stagger fragments
fragY[otherGenome] -= self.frag_sep
if fragY[otherGenome] < -self.max_genome_sep:
fragY[otherGenome] = -self.genome_sep
def filter_blocks(self, blocks, ref_chrom, start, end):
"""Filter blocks for those that contain the chromosome.
Also the matching side of the block is returned
"""
for block in blocks:
if ((block.region1.species,
block.region1.seqname) not in self.chroms_lookup or
(block.region2.species,
block.region2.seqname) not in self.chroms_lookup):
continue
if (block.region1.seqname == ref_chrom.seqname and
util.overlap(block.region1.start,
block.region1.end,
start, end)):
yield (block, 0)
elif (block.region2.seqname == ref_chrom.seqname and
util.overlap(block.region2.start,
block.region2.end,
start, end)):
yield (block, 1)
def layout_frag_contents(self, frag):
"""Layout the contents of a fragment"""
for gene in iter_chrom(self.db.get_regions(frag.genome,
frag.chrom),
frag.start, frag.end):
if frag.direction == 1:
x = frag.x + gene.start - frag.start
else:
x = frag.x + frag.end - gene.end
self.region_layout[gene.data["ID"]] = Layout(x, frag.y, frag.direction)
self.region2frags[gene.data["ID"]] = frag
def draw_placed(self):
vis = []
util.tic("create draw code")
# draw frags
for frag in self.frags:
vis.append(self.frag_widget(frag))
# draw genes
for reg, l in self.region_layout.iteritems():
vis.append(translate(l.x, l.y,
self.gene_widget(self.db.get_region(reg))))
# draw matches
drawn = set()
for frag in self.frags:
vis.append(self.draw_matches(frag.genome, frag.chrom,
frag.start, frag.end, drawn))
util.toc()
self.groupid = group(*vis)
return self.groupid
def draw_matches(self, sp, chrom, start, end, drawn=None):
vis = []
if drawn is None:
drawn = set()
# build list of matches in order of drawing
for gene in iter_chrom(self.db.get_regions(sp, chrom), start, end):
# need to sort matches by genome order so that mult-genome synteny
# is drawn top-down
# get orthologs
genes2 = [x for x in self.orth_lookup.get(gene.data["ID"], [])
if x in self.region_layout]
if len(genes2) == 0:
continue
rows = util.groupby(lambda x: self.region_layout[x].y, genes2)
keys = util.sort(rows.keys(), reverse=True)
rows = util.mget(rows, keys)
l = self.region_layout
for i in range(1, len(rows)):
for botGene in rows[i]:
gene1 = self.db.get_region(botGene)
for topGene in rows[i-1]:
if (botGene, topGene) in drawn:
continue
drawn.add((botGene, topGene))
gene2 = self.db.get_region(topGene)
y1 = l[topGene].y
y2 = l[botGene].y + 1
x1 = l[topGene].x
x2 = l[topGene].x + gene2.length()
x3 = l[botGene].x + gene1.length()
x4 = l[botGene].x
if self.fat_matches:
vis.append(quads(
self.colors["matches"],
x1, y1,
x2, y1,
x3, y2,
x4, y2))
vis.append(lines(self.colors["matches"],
x1, y1,
x4, y2))
return group(* vis)
def frag_widget(self, frag):
'''
def arrow(direction, width, height, func):
return group(
triangles(conf['color-arrow'],
0, height/2,
direction * width, 0,
0, height/-2),
hotspot("click",
0, height/-2,
direction * width, height/2,
func))
def leftArrowFunc():
if frag.direction == 1:
frag.x = x - min(frag.start, conf['frag-extend'])
else:
frag.x = x - min(frag.chrom.size - frag.end,
conf['frag-extend'])
if frag.direction == 1:
frag.start = max(0, frag.start - conf['frag-extend'])
else:
frag.end = min(frag.chrom.size, frag.end + conf['frag-extend'])
self.assignFragPos(conf, frag)
self.redraw()
def rightArrowFunc():
if frag.direction == 1:
frag.end = min(frag.chrom.size, frag.end + conf['frag-extend'])
else:
frag.start = max(0, frag.start - conf['frag-extend'])
self.assignFragPos(conf, frag)
self.redraw()
'''
# calculate coordinates from drawing
x = frag.x
y = frag.y
x2 = x + frag.length()
mid = y + .5
top = y + 1
vis = []
# backbone
vis.append(lines(self.colors['frag'],
x, mid, x2, mid,
x, y, x, top,
x2, y, x2, top))
# hotspot
vis.append(hotspot("click", x, y, x2, top,
lambda: self.frag_click(frag)))
# controls
'''
if True: # conf['use-controls']:
# build controls
controls = group(
# left arrow
translate(x + conf['arrow-width'] / -2, y + conf['gene-size']/2,
arrow(-1, conf['arrow-width'], conf['arrow-height'],
leftArrowFunc)),
# right arrow
translate(x2 + conf['arrow-width'] / 2, y + conf['gene-size']/2,
arrow(1, conf['arrow-width'], conf['arrow-height'],
rightArrowFunc)))
# add controls to controls list
self.controlids.append(controls)
# add controls to vis
vis.append(controls)
'''
return group(* vis)
#
# gene functions
#
def gene_widget(self, gene):
def func():
self.gene_click(gene)
eff_dir = effective_dir(self.region_layout[gene.data["ID"]].orient,
gene.strand)
length = gene.length()
if length == 0:
return group()
col = self.gene_color(self, gene, eff_dir)
if eff_dir == 1:
g = self.draw_gene(self, gene, eff_dir, col)
else:
g = translate(length, 0,
flip(0, 1, self.draw_gene(self, gene, eff_dir, col)))
return group(g, hotspot("click", 0, 0, length, 1, func),
self.draw_gene_label(gene))
def draw_gene_label(self, gene):
name = self.gene_label(gene)
length = gene.length()
if self.show_gene_labels == "scale":
label = group(color(0, 0, 0),
text_clip(name, 0, 0, length, 1,
5, 20, "middle", "center"))
elif self.show_gene_labels == "fix":
label = group(color(0, 0, 0),
text(name, 0, 0, length, 1,
"middle", "center"))
elif self.show_gene_labels == "vertical":
if gene.species == self.ref_genome:
top = 10000000
else:
top = self.max_genome_sep
label = rotate(90, color(0, 0, 0),
text_clip(name, 0, -100000,
top, 0,
self.font_size, self.font_size,
"left", "top"))
elif self.show_gene_labels == 'main_only':
if gene.species == self.ref_genome:
label = rotate(90, color(0, 0, 0),
text_clip(name, 0, -100000,
10000000, 0,
self.font_size, self.font_size,
"left", "top"))
else:
label = group()
else:
label = group()
return label
def gene_click(self, gene):
self.print_gene(gene)
def frag_click(self, frag):
x, y = self.win.get_mouse_pos("world")
if frag.direction == 1:
pos = int(frag.start + x - frag.x)
else:
pos = int(frag.end - (x - frag.x))
print "%s:%s:%s" % (frag.genome, frag.chrom,
util.int2pretty(pos))
def print_gene(self, gene):
print "%s %s:%s:%s-%s (%s)" % (
gene.data["ID"], gene.species,
gene.seqname,
util.int2pretty(gene.start),
util.int2pretty(gene.end),
util.int2pretty(gene.length()))
print ";".join("%s=%s" % (a, b) for a,b in gene.data.items())
print
def redraw(self):
if self.groupid != 0:
self.win.replace_group(self.groupid, self.draw_placed())
else:
self.groupid = self.win.add_group(self.draw_placed())
self.show_controls()
def get_gene_coords(self, gene):
l = self.region_layout
name = gene.data["ID"]
return (l[name].x, l[name].y,
l[name].x + gene.end - gene.start,
l[name].y + 1)
def find(self, name):
try:
region = self.db.get_region(name)
if region in self.region_layout:
self.win.set_visible(* self.get_gene_coords(region))
else:
print "gene '%s' is not shown" % name
except KeyError:
print "cannot find gene '%s'" % name
def mark(self, name, shape="box", col=color(0, 0, 1)):
if not (name in self.region_layout): #placedGenes):
print "gene '%s' is not shown" % name
return
gene = self.db.get_region(name)
coords = self.get_gene_coords(gene)
gid = self.win.add_group(self.draw_marking(shape, col, coords[1],
coords[0], coords[2]))
self.markids.append(gid)
def show_marks(self, visible):
for gid in self.markids:
self.win.show_group(gid, visible)
def clear_marks(self):
for gid in self.markids:
self.win.remove_group(gid)
self.markids = []
def show_controls(self, visible = None):
if visible == None:
visible = self.use_controls
for gid in self.controlids:
self.win.show_group(gid, visible)
#===================================================================
# regions
#
'''
def add_regions(self, regions, shape=None, col=None, height=None):
for region in regions:
# set default visualizatios attributes
if "shape" not in region.data:
if shape == None:
region.data["shape"] = "fill"
else:
region.data["shape"] = shape
if "color" not in region.data:
if col == None:
region.data["color"] = color(0,1,0)
else:
region.data["color"] = col
else:
if isinstance(region.data["color"], str):
region.data["color"] = eval(region.data["color"])
else:
region.data["color"] = region.data["color"]
if "height" not in region.data:
if height == None:
region.data["height"] = 1.0
else:
region.data["height"] = height
else:
if isinstance(region.data["height"], str):
region.data["height"] = float(region.data["height"])
else:
region.data["height"] = region.data["height"]
# ensure species is specified
assert "species" in region.data
# force stand to +1 or -1
if region.strand not in [1, -1]:
region.strand = 1
chrom = self.matching.genomes[region.data["species"]].chroms[region.seqname]
self.regions[chrom].append(region)
for lst in self.regions.itervalues():
lst.sort(key=lambda x: x.start)
'''
# TODO: use regions as markings
def draw_marking(self, shape, col, y, x1, x2, direction=1, height=1.0):
mid = y + .5
y1 = mid - height / 2.0
y2 = mid + height / 2.0
if shape == "box":
return group(col, shapes.box(x1, y1, x2, y2, fill=False))
elif shape == "half_box":
if direction == 1:
return group(col, shapes.box(x1, mid, x2, y2, fill=False))
else:
return group(col, shapes.box(x1, y1, x2, mid, fill=False))
elif shape == "fill":
return group(col, quads(
x1, y1,
x1, y2,
x2, y2,
x2, y1), lines(x1, y1, x1, y2))
elif shape == "half_fill":
if direction == 1:
return group(col, quads(
x1, mid,
x1, y2,
x2, y2,
x2, mid), lines(x1, mid, x1, y2))
else:
return group(col, quads(
x1, y1,
x1, mid,
x2, mid,
x2, y1), lines(x1, y1, x1, mid))
elif shape == "cross":
return group(lines(col,
x1, y1,
x2, y2,
x1, y2,
x2, y1))
elif shape == "flag":
x = min(x1, x2)
return group(lines(col,
x, y1, x, 6))
elif shape == "hash":
return group(col, lines(x1, y1, x1, y2))
elif shape == "half_hash":
if direction == 1:
return group(col, lines(x1, mid, x1, y2))
else:
return group(col, lines(x1, y1, x1, mid))
else:
raise "unknown shape '%s'" % shape
'''
def drawMark(self, genome, chrom, start, end, strand=1, shape="box", col=color(0,0,1)):
y, x1, x2 = self.getRegionDrawCoords(genome, chrom, start, end)
if y == None:
print "region not shown"
else:
if x1 < x2:
direction = strand
else:
direction = -strand
self.drawMarking(shape, col, y, x1, x2, direction)
def getRegionDrawCoords(self, genome, chrom, start, end):
"""Returns (y, x1, x2) or (None, None, None)"""
chrom = self.matching.genomes[genome].chroms[chrom]
gene1, gene2 = self.matching.findNearGenes(chrom, start)
gene3, gene4 = self.matching.findNearGenes(chrom, end)
frags = []
for gene in [gene1, gene2, gene3, gene4]:
if gene in self.region_layout: #placedGenes:
frags.append(self.region2frag[gene]) #gene.frag)
for frag in frags:
if util.overlap(start, end, frag.start, frag.end):
if frag.direction == 1:
return frag.y, \
start - frag.start + frag.x, \
end - frag.start + frag.x
else:
return frag.y, \
frag.x + frag.end - start, \
frag.x + frag.end - end
return None, None, None
'''
def toggle_controls(self):
self.use_controls = not self.use_controls
self.show_controls(self.use_controls)
def toggle_labels(self):
i = gene_label_types.index(self.show_gene_labels)
i = (i + 1) % len(gene_label_types)
self.show_gene_labels = gene_label_types[i]
self.redraw()
#=============================================================================
# global variables
markColor = color(0,0,1)
context = 1e6
selgene = None
selgenes = []
genes = {}
##################################################################
# Classes and functions
#
class SyntenyVis (SyntenyVisBase):
def __init__(self, genomes, chroms, regions, blocks, orths,
**options):
SyntenyVisBase.__init__(self, genomes, chroms,
regions, blocks, orths, **options)
self.click_mode = "gene"
self.selgenes = []
self.seqs = fasta.FastaDict()
def show(self):
SyntenyVisBase.show(self)
self.win.set_binding(input_key("g"), self.press("gene"))
self.win.set_binding(input_key("v"), self.press("view"))
self.win.set_binding(input_key("s"), self.press("sequence"))
self.win.set_binding(input_key("a"), self.press("align"))
self.win.set_binding(input_key("d"), self.clear_selgenes)
self.win.set_binding(input_key("w"), self.align_selgenes)
self.win.set_binding(input_key("e"), self.print_selgenes)
def gene_click(self, gene):
global selgene
selgene = gene
selgenes.append(gene)
if self.click_mode == "gene":
self.print_gene(gene)
elif self.click_mode == "view":
drawGene(gene.name, context)
elif self.click_mode == "sequence":
if gene.name in self.seqs:
print
print "%s\n%s" % (gene.name, self.seqs[gene.name])
print
else:
print "%s has no sequence" % gene.name
'''
elif self.click_mode == "align":
if gene.name not in lookup:
print "gene %s has no matches" % gene.name
return
orth = filter(lambda x: x in self.genes,
self.comps[self.lookup[gene.name]])
seqs2 = util.subdict(self.seqs, self.comp)
aln = muscle.muscle(seqs2)
keys = aln.keys()
for key in keys:
if not hasattr(self.genes[key], "y"):
self.genes[key].y = -1e1000
keys.sort(lambda a,b: cmp(self.genes[b].y, self.genes[a].y))
alignlib.printAlign(aln, order=keys)
'''
# add key bindings
def press(self, mode):
def func():
print "mode is '%s'" % mode
self.click_mode = mode
return func
def clear_selgenes(self):
self.selgenes[:] = []
print "selgenes cleared"
def align_selgenes(self):
self.align(* self.selgenes)
def print_selgenes(self):
print self.selgenes
def align(self, * names):
if len(names) == 0:
print "nothing to align"
# get names from genes if they are not strings
if type(names[0]) != str:
names = [i.name for i in names]
seqs2 = util.subdict(self.seqs, names)
aln = muscle.muscle(seqs2)
muscle.printAlign(aln)
#=============================================================================
'''
def draw(genome, chrom, start, end):
util.tic("draw")
vis.draw(genome, chrom, start, end)
util.toc()
def drawGene(geneName, context = context):
gene = genes[geneName]
draw(gene.chrom.genome.name, gene.chrom.name,
gene.start - context, gene.end + context)
vis.mark(geneName, shape="box", col=markColor)
def drawAll(genome):
vis.drawAll(genome)
def mark(shape="box", col=util.blue):
names = []
while True:
line = sys.stdin.readline().rstrip()
if line == "": break
names.append(line)
markGenes(names, shape, col)
def markGenes(names, shape="box", col=util.blue):
for name in names:
vis.mark(name, shape, col)
def markHoles(shape="box", col=util.blue):
genes2 = filter(lambda x: len(x.matches) == 0, genes.values())
names = [x.name for x in genes2]
markGenes(names, "box", col)
def find(name):
return vis.find(name)
printscreen = lambda *args, **kargs: svg.printScreen(vis.win, *args, **kargs)
def read_fasta(filename):
vis.seqs.update(fasta.read_fasta(env.findFile(f)))
def readAllSeqs():
util.tic("read sequences")
for genome in m.getGenomeOrder():
try:
seqfile = env.findFile("%s.fasta" % genome)
util.tic("reading '%s'" % seqfile)
vis.seqs.read(seqfile)
util.toc()
except:
util.log("cannot read fasta '%s.fasta'" % genome)
util.toc()
#
# Visualize ortholog sets across the genome
#
def visparts(parts, refGenome, outdir):
for i in xrange(len(parts)):
util.log("visualizing part %d" % i)
# find the biggest group of gene from the reference genome from the same
# chromosome
part = parts[i]
refs = filter(lambda gene: genes[gene].chrom.genome.name == refGenome,
part)
if len(refs) == 0:
continue
counts = util.hist_dict([genes[ref].chrom for ref in refs])
keys = counts.keys()
keys.sort(lambda a,b: cmp(counts[b], counts[a]))
refChrom = keys[0]
refgenes = filter(lambda gene: genes[gene].chrom == refChrom, part)
start = min([genes[gene].start for gene in refgenes]) - context
end = max([genes[gene].end for gene in refgenes]) + context
draw(refGenome, refChrom.name, start, end)
markGenes(part, markColor, "box")
vis.win.set_visible(start, 2*conf["gene-size"], end,
-conf["max-genome-sep"] * len(m.genomes))
# output svg
svg.printScreen(param["visual"][-1] + "/synteny%d.svg" % i)
# conversion
#os.system("convert %s %s" % (
# param["visual"][-1] + "/synteny%d.svg" % i,
# param["visual"][-1] + "/synteny%d.png" % i))
svglib.svg2pdf(param["visual"][-1] + "/synteny%d.svg" % i)
os.remove(param["visual"][-1] + "/synteny%d.svg" % i)
#
# Visualize windows across the genome
#
def viswindows(refGenome, windowSize, windowStep, outdir):
chroms = m.genomes[refGenome].chroms.values()
chroms.sort(lambda a,b: cmp(a.name, b.name))
indexfile = file(param["chromWindows"][-1], "w")
print >>indexfile, "##version:1.0"
print >>indexfile, "##types:string\tstring\tstring\tint\tint\tstring"
print >>indexfile, "file\tgenome\tchrom\tstart\tend\tgenes"
for chrom in chroms:
i = 0
for start in xrange(0, int(chrom.size), windowStep):
end = start + windowSize
draw(refGenome, chrom.name, start, end)
#visgroup = vis.draw(conf, m, refGenome, chrom.name, start, end)
vis.win.set_visible(start, 10*conf["gene-size"], end,
-conf["max-genome-sep"] * len(m.genomes))
filename = ("%s_%s_%s-%s.svg" %
(refGenome, chrom.name,
util.int2pretty(int(start)),
util.int2pretty(int(end))))
# figure out which genes are in view
visgenes = filter(lambda gene:
start < gene.x < end, vis.placedGenes.keys())
# record in lookup filename->genes
#pdffile = os.path.split(filename)[1].replace(".svg", ".pdf")
#lookup.append([pdffile] + [x.name for x in visgenes])
indexfile.write("\t".join([filename,
refGenome,
chrom.name,
str(int(start)),
str(int(end)),
",".join([x.name for x in visgenes])])
+"\n")
# output svg
svg.printScreen(vis.win, outdir + "/" + filename) #, visgroup)
# conversion
#grid.execute("svg2pdf.py %s -r" % filename)
i += 1
indexfile.close()
'''
"""
def placeSyntenyBlock(self, conf, block, refGenome, refChrom,
start, end, direction, y):
if refGenome == block.genome2:
otherGenome = block.genome1
otherChrom = block.chrom1
else:
otherGenome = block.genome2
otherChrom = block.chrom2
# find gene index of first gene in block
low, startIndex = util.binsearch(refChrom.genes, start-1,
lambda a,b: cmp(a.start, b))
if startIndex == None:
# quit if no gene is found
return
# interpoloate start, end, and direction to other chrom
vote = 0
last = None
genes2 = {}
for gene in GeneIter(refChrom.genes, start, end, startIndex):
matches = self.findGeneSynteny(block, gene)
if len(matches) > 0:
pos = stats.mean(map(lambda x: x.start, matches))
if last != None and pos != last:
if last < pos:
vote += 1
else:
vote -= 1
last = pos
for gene2 in matches:
genes2[gene2] = 1
genes2 = genes2.keys()
# quit if no matches are found
if len(genes2) == 0:
return
otherStart = min(map(lambda x: x.start, genes2))
otherEnd = max(map(lambda x: x.end, genes2))
if vote > 0:
otherDir = direction
else:
otherDir = -direction
# create frament
frag = Frag()
frag.chrom = otherChrom
frag.start = otherStart
frag.end = otherEnd
frag.direction = otherDir
frag.x = self.findFragX(conf, block, startIndex, refChrom,
start, end, direction, otherChrom, otherStart,
otherEnd, otherDir)
frag.y = y - ((self.order[otherGenome.name] - 1) *
conf['max-genome-sep'])
# store frag
self.frags[frag] = True
# assign genes (x,y)-coords
self.assignFragPos(conf, frag)
def findOtherDir(self, conf, block, refChrom, refIndex, start, end,
direction, otherChrom, otherStart, otherEnd):
last = 0
vote = 0
# for every increase in refChrom is there a general dec or inc?
for gene in GeneIter(refChrom.genes, start, end, refIndex):
genes2 = self.findGeneSynteny(block, gene)
if len(genes2) > 0:
pos = stats.mean(map(lambda x: x.start, genes2))
if last < pos:
vote += 1
else:
vote -= 1
last = pos
# return dir based on majority vote
if vote > 0:
return direction
else:
return -direction
def findFragX(self, conf, block, index1,
chrom1, start1, end1, direction1,
chrom2, start2, end2, direction2):
diffs = []
initStart = chrom1.genes[index1].start
initX = chrom1.genes[index1].x
# find the average difference between start of frag and 'start' of gene
for gene in GeneIter(chrom1.genes, start1, end1, index1):
diff1 = gene.start - initStart
for gene2 in self.findGeneSynteny(block, gene):
diff2 = None
if direction2 == 1:
diff2 = gene2.start - start2
else:
diff2 = end2 - gene2.end
diffs.append(diff2 - diff1)
if len(diffs) == 0:
return None
else:
return initX - stats.mean(diffs)
def findGeneSynteny(self, block, gene):
genes2 = []
if gene in self.matching.complookup:
for gene2 in self.matching.comps[self.matching.complookup[gene]]:
# find the genes that match in this block
# not on same chrom, and with block start and end
if gene2.chrom != gene.chrom:
if block == None:
genes2.append(gene2)
elif gene2.chrom == block.chrom1:
if gene2.start >= block.start1 and \
gene2.end <= block.end1:
genes2.append(gene2)
elif gene2.chrom == block.chrom2:
if gene2.start >= block.start2 and \
gene2.end <= block.end2:
genes2.append(gene2)
return genes2
"""
| 44,776 | 13,494 |
from celery import shared_task
from django.core.mail import send_mail
from datetime import datetime
from time import sleep
# Start the worker process and be on top of the visual-option-chain directory: celery -A visual-option-chain worker -l info -E
@shared_task
def send_email_task(subject, message, from_email, recipient_list):
send_mail(
subject=subject,
message=message,
from_email=from_email,
recipient_list=recipient_list,
fail_silently=False,
)
return None
@shared_task
def display_time():
print("The time is %s :" % str(datetime.now()))
return True
| 620 | 183 |
from flask import Flask, Blueprint
from .settings import Setting
from .routes import main
from .line2telegram.routes import l2tg
def create_app():
app = Flask(__name__)
app.config.from_object(Setting)
app.register_blueprint(main)
app.register_blueprint(l2tg, url_prefix='/l2tg')
return app
| 315 | 108 |
"""
Given a csv or txt file and a Tensorflow 1.15 SavedModel file, run image classification on the urls
and write the predicted label and confidence back to the file
"""
import argparse
import os
from io import BytesIO
import requests
import pandas as pd
from csv import writer as csv_writer
from tqdm import tqdm
from model.model import ImageClassification
from PIL import Image
from concurrent.futures import ThreadPoolExecutor
def predict_dataset(filepath, model_dir, url_col=None, progress_hook=None):
"""
Given a file with urls to images, predict the given SavedModel on the image and write the label
and confidene back to the file.
:param filepath: path to a valid txt or csv file with image urls to download.
:param model_dir: path to the Lobe Tensorflow SavedModel export.
:param url_col: if this is a csv, the column header name for the urls to download.
:param progress_hook: an optional function that will be run with progress_hook(currentProgress, totalProgress) when progress updates.
"""
print(f"Predicting {filepath}")
filepath = os.path.abspath(filepath)
filename, ext = _name_and_extension(filepath)
# read the file
# if this a .txt file, don't treat the first row as a header. Otherwise, use the first row for header column names.
if ext != '.xlsx':
csv = pd.read_csv(filepath, header=None if ext == '.txt' else 0)
else:
csv = pd.read_excel(filepath, header=0)
if ext in ['.csv', '.xlsx'] and not url_col:
raise ValueError(f"Please specify an image url column for the csv.")
url_col_idx = 0
if url_col:
try:
url_col_idx = list(csv.columns).index(url_col)
except ValueError:
raise ValueError(f"Image url column {url_col} not found in csv headers {csv.columns}")
num_items = len(csv)
print(f"Predicting {num_items} items...")
# load the model
print("Loading model...")
model = ImageClassification(model_dir=model_dir)
model.load()
print("Model loaded!")
# create our output csv
fname, ext = os.path.splitext(filepath)
out_file = f"{fname}_predictions.csv"
with open(out_file, 'w', encoding="utf-8", newline='') as f:
# our header names from the pandas columns
writer = csv_writer(f)
writer.writerow([*[str(col) if not pd.isna(col) else '' for col in csv.columns], 'label', 'confidence'])
# iterate over the rows and predict the label
with tqdm(total=len(csv)) as pbar:
with ThreadPoolExecutor() as executor:
model_futures = []
# make our prediction jobs
for i, row in enumerate(csv.itertuples(index=False)):
url = row[url_col_idx]
model_futures.append(executor.submit(predict_image, url=url, model=model, row=row))
# write the results from the predict (this should go in order of the futures)
for future in model_futures:
label, confidence, row = future.result()
with open(out_file, 'a', encoding="utf-8", newline='') as f:
writer = csv_writer(f)
writer.writerow([*[str(col) if not pd.isna(col) else '' for col in row], label, confidence])
pbar.update(1)
if progress_hook:
progress_hook(i+1, len(csv))
def predict_image(url, model, row):
label, confidence = '', ''
try:
response = requests.get(url, timeout=30)
if response.ok:
image = Image.open(BytesIO(response.content))
predictions = model.predict(image)
predictions.sort(key=lambda x: x[1], reverse=True)
label, confidence = predictions[0]
except Exception:
pass
return label, confidence, row
def _name_and_extension(filepath):
# returns a tuple of the filename and the extension, ignoring any other prefixes in the filepath
# raises if not a file
fpath = os.path.abspath(filepath)
if not os.path.isfile(fpath):
raise ValueError(f"File {filepath} doesn't exist.")
filename = os.path.split(fpath)[-1]
name, ext = os.path.splitext(filename)
return name, str.lower(ext)
def _valid_file(filepath):
# file must exist and have a valid extension
valid_extensions = ['.txt', '.csv', '.xlsx']
_, extension = _name_and_extension(filepath)
if extension not in valid_extensions:
raise ValueError(f"File {filepath} doesn't have one of the valid extensions: {valid_extensions}")
# good to go
return filepath
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Label an image dataset from csv or txt file.')
parser.add_argument('file', help='Path to your csv or txt file.')
parser.add_argument('model_dir', help='Path to your SavedModel from Lobe.')
parser.add_argument('--url', help='If this is a csv with column headers, the column that contains the image urls to download.')
args = parser.parse_args()
predict_dataset(filepath=args.file, model_dir=args.model_dir, url_col=args.url)
| 4,631 | 1,570 |
import click
import matplotlib.pyplot as plt
import numpy as np
from loguru import logger
from matplotlib.collections import LineCollection
number_to_degrees = {
k: (np.cos(np.pi * 2 / 10 * k), np.sin(np.pi * 2 / 10 * k))
for k in range(10)
}
@click.command()
@click.option(
"--seed", "-s", default=0, help="Seed for random number generation."
)
@click.option(
"--length",
"-l",
default=10_000,
help="Number of random numbers to generate.",
)
@click.option(
"--cmap", "-c", default="RdYlGn", help="The matplotlib colour map to use."
)
@click.option(
"--outfile",
default=None,
help="The outfile to save the image to (default None displays the image).",
)
@click.option(
"--dpi",
default=None,
type=int,
help="The DPI for the output image -- set for high-quality images if using a format like PNG.",
)
@click.option(
"--linewidth",
default=1.0,
help="The width of the plotted lines -- consider decreasing for large lengths.",
)
def run(seed, length, cmap, outfile=None, dpi=None, linewidth=1.0):
logger.info(f"Running with seed = {seed}")
np.random.seed(seed)
logger.info("Generating sequence of random numbers")
seq = np.random.randint(0, 10, length)
logger.info("Generating line coordinates")
current_point = (0, 0)
lines = []
for number in seq:
previous_x, previous_y = current_point
delta_x, delta_y = number_to_degrees[number]
new_point = [previous_x + delta_x, previous_y + delta_y]
lines.append([current_point, new_point])
current_point = new_point
logger.info("Generating colours for lines")
color_map = plt.get_cmap(cmap)
colors = [color_map(i) for i in np.linspace(0, 1, length)]
logger.info("Creating line collection")
line_collection = LineCollection(
lines, colors=colors, capstyle="round", linewidth=linewidth
)
# Plot
logger.info("Plotting")
fig = plt.figure()
ax1 = fig.add_subplot(1, 1, 1)
ax1.add_collection(line_collection)
ax1.autoscale()
plt.gca().set_aspect("equal", adjustable="box")
plt.axis("off")
plt.tight_layout()
if outfile and outfile.endswith(".png") and dpi:
plt.savefig(outfile, dpi=dpi)
elif outfile:
plt.savefig(outfile)
else:
plt.show()
logger.success("Complete!")
if __name__ == "__main__":
run()
| 2,403 | 830 |
import datetime
import logging
logger = logging.getLogger("utils.reddit")
subreddit = None
over_threshold = []
def load_subreddit(_subreddit):
global subreddit
subreddit = _subreddit
return
def fetch_posts(_limit):
try:
logger.info(f"Fetching top {_limit} posts from {subreddit.display_name}")
top = subreddit.top("day", limit=_limit)
over_threshold.clear()
count = 0
total = 0
time = int(datetime.datetime.now().timestamp())
for post in top:
if post is None:
logger.info(f"None post encountered from praw")
continue
if not post.author:
name = '[deleted]'
else:
name = post.author.name
_post = (post.id, post.score, int(post.created_utc), False, name, post.url, post.title)
total += 1
if _post[1] >= 100 and time - _post[2] < 86400:
over_threshold.append(_post)
count += 1
logger.info(f'{str(count)} posts found over 100 upvotes out of {str(total)} posts')
logger.info(f'Current time is {time}')
except Exception as exception:
logger.exception(exception)
return None
return over_threshold
| 1,277 | 384 |
#from scikits.talkbox.features import mfcc
import scipy.io.wavfile
import numpy as np
import sys
import os
import glob
from utils1 import GENRE_DIR, GENRE_LIST
from python_speech_features import mfcc
#from librosa.feature import mfcc
# Given a wavfile, computes mfcc and saves mfcc data
def create_ceps(wavfile):
sampling_rate, song_array = scipy.io.wavfile.read(wavfile)
print(sampling_rate)
"""Get MFCC
ceps : ndarray of MFCC
mspec : ndarray of log-spectrum in the mel-domain
spec : spectrum magnitude
"""
ceps=mfcc(song_array)
#ceps, mspec, spec= mfcc(song_array)
print(ceps.shape)
#this is done in order to replace NaN and infinite value in array
bad_indices = np.where(np.isnan(ceps))
b=np.where(np.isinf(ceps))
ceps[bad_indices]=0
ceps[b]=0
write_ceps(ceps, wavfile)
# Saves mfcc data
def write_ceps(ceps, wavfile):
base_wav, ext = os.path.splitext(wavfile)
data_wav = base_wav + ".ceps"
np.save(data_wav, ceps)
def main():
for label, genre in enumerate(GENRE_LIST):
for fn in glob.glob(os.path.join(GENRE_DIR, genre)):
for wavfile in os.listdir(fn):
if wavfile.endswith("wav"):
create_ceps(os.path.join(GENRE_DIR, genre,wavfile))
if __name__ == "__main__":
main() | 1,220 | 524 |
#!/usr/bin/python3
# -*- coding:utf-8 -*-
# Project: http://cloudedbats.org
# Copyright (c) 2018 Arnold Andreasson
# License: MIT License (see LICENSE.txt or http://opensource.org/licenses/mit).
import re
import metadata4bats
class MetadataWavefileWurb(metadata4bats.MetadataWavefile):
""" """
def __init__(self, dir_path, file_name=None):
""" """
super().__init__(dir_path, file_name)
self.clear()
def clear(self):
""" """
super().clear()
def extract_metadata(self):
""" Used to extract file name parts from sound files created by CloudedBats-WURB.
Format: <recorder-id>_<time>_<position>_<rec-type>_<comments>.wav
Example: wurb1_20170611T005215+0200_N57.6548E12.6711_TE384_Mdau-in-tandem.wav
"""
super().extract_metadata()
# Extract parts based on format.
parts = self.file_path.stem.split('_')
# Check if the file is a WURB generated/formatted file.
self.set_field('wurb_format', False)
if self.file_path.suffix.lower() not in ['.wav']:
return
if len(parts) >= 4:
rec_type = parts[3]
if (len(rec_type) >= 4) and \
(parts[3][0:2] in ['TE', 'FS']):
pass
else:
return
else:
return
#
# self.set_field('wurb_format', True)
# Detector id.
if len(parts) > 0:
self.set_field('rec_detector_id', parts[0])
# Datetime in ISO format.
if len(parts) > 1:
try:
self.set_field('rec_datetime_local', parts[1])
except:
pass
# Latitude/longitude.
if len(parts) > 2:
latlong_str = parts[2].upper()
self.set_field('rec_latlong_str', latlong_str)
# Extract lat-DD and long-DD.
try:
ns_start = re.search(r'[NS]', latlong_str).span(0)[0]
ew_start = re.search(r'[EW]', latlong_str).span(0)[0]
latitude_dd = float(latlong_str[ns_start+1:ew_start])
longitude_dd = float(latlong_str[ew_start+1:])
if latlong_str[ns_start] == 'S':
latitude_dd *= -1.0
if latlong_str[ew_start] == 'W':
longitude_dd *= -1.0
self.set_field('rec_latitude_dd', latitude_dd)
self.set_field('rec_longitude_dd', longitude_dd)
except:
pass
# Framerates.
if len(parts) > 3:
self.set_field('rec_type', parts[3])
try:
frame_rate = float(parts[3][2:])
self.set_field('rec_frame_rate_hz', str(round(frame_rate * 1000.0)))
if parts[3][0:2] == 'TE':
self.set_field('rec_is_te', True) # TE, Time Expanded.
self.set_field('rec_file_framerate_hz', str(round(frame_rate * 100.0)))
else:
self.set_field('rec_is_te', False) # FS, Full Scan.
self.set_field('rec_file_framerate_hz', str(round(frame_rate * 1000.0)))
except:
pass
# Comments. All parts above index 4.
if len(parts) > 4:
self.set_field('rec_comment', '_'.join(parts[4:]))
if __name__ == '__main__':
""" """
import pprint
print('CloudedBats - metadata - test')
mw = MetadataWavefileWurb('/home/arnold/Desktop/batfiles',
'WURB2_20180908T212225+0200_N57.6627E12.6393_TE384_Myotis.wav')
mw.extract_metadata()
# print('Length: ', len(mw.get_wave_data()))
print('Metadata: ')
pp = pprint.PrettyPrinter(indent=1)
pp.pprint(mw.metadata)
| 3,879 | 1,329 |
from auto_schema.schema_change import SchemaChange
# Copy this file and make adjustments
# Set to None or 0 to skip downtiming
downtime_hours = 6
ticket = 'T298565'
fields = {
'user_newpass_time': 'BINARY(14) DEFAULT NULL',
'user_email_authenticated': 'BINARY(14) DEFAULT NULL',
'user_email_token': 'BINARY(32) DEFAULT NULL',
'user_email_token_expires': 'BINARY(14) DEFAULT NULL',
'user_touched': 'BINARY(14) NOT NULL',
'user_token': 'BINARY(32) DEFAULT \'\' NOT NULL',
'user_registration': 'BINARY(14) DEFAULT NULL'
}
# Set this to false if you don't want to run on all dbs
# In that case, you have to specify the db in the command and check function.
all_dbs = True
# DO NOT FORGET to set the right port if it's not 3306
# Use None instead of [] to get all direct replicas of master of active dc
replicas = None
section = 's7'
# The check function must return true if schema change is applied
# or not needed, False otherwise.
for field in fields:
def check(db):
query_res = db.run_sql('desc user;')
if not query_res:
# Dry run
return True
field_def = query_res.split(field)[1].split('\n')[0]
return 'varbinary' not in field_def.lower()
schema_change = SchemaChange(
replicas=replicas,
section=section,
all_dbs=all_dbs,
check=check,
command='ALTER TABLE /*_*/user CHANGE {} {} {};'.format(field, field, fields[field]),
ticket=ticket,
downtime_hours=downtime_hours
)
schema_change.run()
| 1,551 | 528 |
import geosoft.gxpy as gxpy
# a GX context is required, and must be assigned to a variable that persists through the life of execution.
gxc = gxpy.gx.GXpy()
# gid is a property of the context that holds the user's Geosoft ID. Say hello.
print('Hello {}'.format(gxc.gid))
| 274 | 95 |
# -*- coding: utf-8 -*-
from pupylib.PupyModule import *
import os
__class_name__="LoadPackageModule"
ROOT=os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
def package_completer(text, line, begidx, endidx):
try:
l=[]
for p in ["packages/all", "packages/linux/all/", "packages/windows/all", "packages/windows/x86", "packages/windows/amd64", "packages/android"]:
for pkg in os.listdir(os.path.join(ROOT, p)):
if pkg.endswith(".py"):
pkg=pkg[:-3]
elif pkg.endswith((".pyc",".pyd")):
pkg=pkg[:-4]
if pkg not in l and pkg.startswith(text):
l.append(pkg)
return l
except Exception as e:
print e
@config(cat="manage", compat="all")
class LoadPackageModule(PupyModule):
""" Load a python package onto a remote client. Packages files must be placed in one of the pupy/packages/<os>/<arch>/ repository """
def init_argparse(self):
self.arg_parser = PupyArgumentParser(prog="load_package", description=self.__doc__)
self.arg_parser.add_argument('-f', '--force', action='store_true', help='force package to reload even if it has already been loaded')
self.arg_parser.add_argument('-d', '--dll', action='store_true', help='load a dll instead')
self.arg_parser.add_argument('package', completer=package_completer, help='package name (example: psutil, scapy, ...)')
def run(self, args):
if args.dll:
if self.client.load_dll(args.package):
self.success("dll loaded !")
else:
self.error("the dll was already loaded")
else:
if self.client.load_package(args.package, force=args.force):
self.success("package loaded !")
else:
self.error("package is already loaded !")
| 1,901 | 577 |
# -*- coding: utf-8 -*-
# Copyright 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Pyramid views for Eucalyptus and AWS instances
"""
import base64
import re
import simplejson as json
from itertools import chain
from M2Crypto import RSA
from operator import attrgetter
from urllib2 import HTTPError, URLError
from boto.exception import BotoServerError
from boto.s3.key import Key
from boto.ec2.bundleinstance import BundleInstanceTask
from boto.ec2.networkinterface import NetworkInterfaceCollection, NetworkInterfaceSpecification
from pyramid.httpexceptions import HTTPNotFound, HTTPFound
from pyramid.view import view_config
from ..constants.cloudwatch import (
MONITORING_DURATION_CHOICES, METRIC_TITLE_MAPPING, STATISTIC_CHOICES, GRANULARITY_CHOICES,
DURATION_GRANULARITY_CHOICES_MAPPING
)
from ..constants.instances import INSTANCE_MONITORING_CHARTS_LIST
from ..forms.buckets import CreateBucketForm
from ..forms.images import ImagesFiltersForm
from ..forms.instances import (
InstanceForm, AttachVolumeForm, DetachVolumeForm, LaunchInstanceForm, LaunchMoreInstancesForm,
RebootInstanceForm, StartInstanceForm, StopInstanceForm, TerminateInstanceForm, InstanceCreateImageForm,
InstancesFiltersForm, InstanceTypeForm, InstanceMonitoringForm,
AssociateIpToInstanceForm, DisassociateIpFromInstanceForm)
from ..forms import ChoicesManager, GenerateFileForm
from ..forms.keypairs import KeyPairForm
from ..forms.securitygroups import SecurityGroupForm
from ..i18n import _
from ..models import Notification
from ..models.alarms import Alarm
from ..views import BaseView, LandingPageView, TaggedItemView, BlockDeviceMappingItemView, JSONResponse
from ..views.images import ImageView
from ..views.roles import RoleView
from ..views.securitygroups import SecurityGroupsView
from . import boto_error_handler
from . import guess_mimetype_from_buffer
from ..layout import __version__ as curr_version
class BaseInstanceView(BaseView):
"""Base class for instance-related views"""
def __init__(self, request):
super(BaseInstanceView, self).__init__(request)
self.conn = self.get_connection()
self.vpc_conn = self.get_connection(conn_type='vpc')
self.is_vpc_supported = BaseView.is_vpc_supported(request)
def get_instance(self, instance_id=None, reservations=None):
instance_id = instance_id or self.request.matchdict.get('id')
if instance_id:
try:
reservations_list = reservations or self.conn.get_all_reservations(instance_ids=[instance_id])
reservation = reservations_list[0] if reservations_list else None
if reservation:
instance = reservation.instances[0]
instance.reservation_id = reservation.id
instance.owner_id = reservation.owner_id
if instance.platform is None:
instance.platform = _(u"linux")
if instance.vpc_id:
vpc = self.vpc_conn.get_all_vpcs(vpc_ids=[instance.vpc_id])[0]
instance.vpc_name = TaggedItemView.get_display_name(vpc, escapebraces=True)
else:
instance.vpc_name = ''
instance.instance_profile_id = None
if instance.instance_profile is not None and len(instance.instance_profile.keys()) > 0:
instance.instance_profile_id = instance.instance_profile.keys()[0]
return instance
except BotoServerError:
pass
return None
def get_image(self, instance=None, image_id=None):
image_id = instance.image_id if instance else image_id
if image_id is None:
image_id = self.request.matchdict.get('image_id') or self.request.params.get('image_id')
if self.conn and image_id:
try:
image = self.conn.get_image(image_id)
if image:
platform = ImageView.get_platform(image)
image.platform_name = ImageView.get_platform_name(platform)
return image
except BotoServerError:
pass
return None
def get_security_groups(self):
if self.conn:
with boto_error_handler(self.request, self.location):
return self.conn.get_all_security_groups()
return []
def get_securitygroups_rules(self, securitygroups):
rules_dict = {}
for security_group in securitygroups:
rules = SecurityGroupsView.get_rules(security_group.rules)
if security_group.vpc_id is not None:
rules_egress = SecurityGroupsView.get_rules(security_group.rules_egress, rule_type='outbound')
rules = rules + rules_egress
rules_dict[security_group.id] = rules
return rules_dict
def get_ip_address(self, ip_address):
ip_addresses = self.conn.get_all_addresses(addresses=[ip_address]) if self.conn else []
return ip_addresses[0] if ip_addresses else []
def get_vpc_subnet_display(self, subnet_id, vpc_subnet_list=None):
if self.vpc_conn and subnet_id:
cidr_block = ''
if vpc_subnet_list:
for vpc in vpc_subnet_list:
if vpc.id == subnet_id:
cidr_block = vpc.cidr_block
else:
with boto_error_handler(self.request):
vpc_subnet = self.vpc_conn.get_all_subnets(subnet_ids=[subnet_id])
if vpc_subnet and vpc_subnet[0].cidr_block:
cidr_block = vpc_subnet[0].cidr_block
if cidr_block:
return u"{0} ({1})".format(cidr_block, subnet_id)
return ''
def get_monitoring_state(self, instance=None):
if not instance:
return False
if self.cloud_type == 'euca':
return instance.monitoring_state.capitalize()
if self.cloud_type == 'aws':
return _(u'Detailed') if instance.monitoring_state == 'enabled' else _(u'Basic')
def get_monitoring_tab_title(self, instance=None):
if self.cloud_type == 'euca':
return _(u'Monitoring')
if self.cloud_type == 'aws':
return _(u'Detailed Monitoring') if instance.monitoring_state == 'enabled' else _(u'Basic Monitoring')
class InstancesView(LandingPageView, BaseInstanceView):
def __init__(self, request):
super(InstancesView, self).__init__(request)
self.title_parts = [_(u'Instances')]
self.initial_sort_key = '-launch_time'
self.prefix = '/instances'
self.json_items_endpoint = self.get_json_endpoint('instances_json')
self.location = self.get_redirect_location('instances')
self.start_form = StartInstanceForm(self.request, formdata=self.request.params or None)
self.stop_form = StopInstanceForm(self.request, formdata=self.request.params or None)
self.reboot_form = RebootInstanceForm(self.request, formdata=self.request.params or None)
self.terminate_form = TerminateInstanceForm(self.request, formdata=self.request.params or None)
with boto_error_handler(self.request, self.location):
self.associate_ip_form = AssociateIpToInstanceForm(
self.request, conn=self.conn, formdata=self.request.params or None)
self.disassociate_ip_form = DisassociateIpFromInstanceForm(self.request, formdata=self.request.params or None)
self.enable_smart_table = True # Enables sortable tables via macros.pt
controller_options_json = BaseView.escape_json(json.dumps({
'addresses_json_items_endpoint': self.request.route_path('ipaddresses_json'),
'roles_json_items_endpoint': self.request.route_path('instances_roles_json'),
'cloud_type': self.cloud_type,
}))
self.render_dict = dict(
prefix=self.prefix,
initial_sort_key=self.initial_sort_key,
start_form=self.start_form,
stop_form=self.stop_form,
reboot_form=self.reboot_form,
terminate_form=self.terminate_form,
associate_ip_form=self.associate_ip_form,
disassociate_ip_form=self.disassociate_ip_form,
is_vpc_supported=self.is_vpc_supported,
controller_options_json=controller_options_json,
)
@view_config(route_name='instances', renderer='../templates/instances/instances.pt')
def instances_landing(self):
filter_keys = [
'id', 'name', 'image_id', 'instance_type', 'ip_address', 'key_name', 'placement',
'root_device', 'security_groups', 'state', 'tags', 'roles', 'vpc_id', 'subnet_id']
# filter_keys are passed to client-side filtering in search box
self.filter_keys = filter_keys
# sort_keys are passed to sorting drop-down
self.sort_keys = [
dict(key='launch_time', name=_(u'Launch time: Oldest to Newest')),
dict(key='-launch_time', name=_(u'Launch time: Newest to Oldest')),
dict(key='id', name=_(u'Instance ID')),
dict(key='name', name=_(u'Instance name: A to Z')),
dict(key='-name', name=_(u'Instance name: Z to A')),
dict(key='placement', name=_(u'Availability zone')),
dict(key='key_name', name=_(u'Key pair')),
]
autoscale_conn = self.get_connection(conn_type='autoscale')
iam_conn = None
if BaseView.has_role_access(self.request):
iam_conn = self.get_connection(conn_type='iam')
vpc_conn = self.get_connection(conn_type='vpc')
filters_form = InstancesFiltersForm(
self.request, ec2_conn=self.conn, autoscale_conn=autoscale_conn,
iam_conn=iam_conn, vpc_conn=vpc_conn,
cloud_type=self.cloud_type, formdata=self.request.params or None)
search_facets = filters_form.facets
if not BaseView.has_role_access(self.request):
del filters_form.roles
if not self.is_vpc_supported:
del filters_form.vpc_id
del filters_form.subnet_id
self.render_dict.update(dict(
filter_keys=self.filter_keys,
search_facets=BaseView.escape_json(json.dumps(search_facets)),
sort_keys=self.sort_keys,
json_items_endpoint=self.json_items_endpoint,
filters_form=filters_form,
))
return self.render_dict
@view_config(route_name='instances_start', request_method='POST')
def instances_start(self):
instance_id_param = self.request.params.get('instance_id')
instance_ids = [instance_id.strip() for instance_id in instance_id_param.split(',')]
if self.start_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Starting instances {0}").format(instance_id_param))
# Can only start an instance if it has a volume attached
started = self.conn.start_instances(instance_ids=instance_ids)
if len(instance_ids) == 1:
msg = _(u'Successfully sent start instance request. It may take a moment to start the instance.')
else:
prefix = _(u'Successfully sent request to start the following instances:')
msg = u'{0} {1}'.format(prefix, ', '.join(instance_ids))
if started:
self.request.session.flash(msg, queue=Notification.SUCCESS)
else:
msg = _(u'Unable to start instances')
self.request.session.flash(msg, queue=Notification.ERROR)
else:
msg = _(u'Unable to start instance')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
@view_config(route_name='instances_stop', request_method='POST')
def instances_stop(self):
instance_id_param = self.request.params.get('instance_id')
instance_ids = [instance_id.strip() for instance_id in instance_id_param.split(',')]
if self.stop_form.validate():
self.log_request(_(u"Stopping instance(s) {0}").format(instance_id_param))
with boto_error_handler(self.request, self.location):
stopped = self.conn.stop_instances(instance_ids=instance_ids)
if len(instance_ids) == 1:
msg = _(u'Successfully sent stop instance request. It may take a moment to stop the instance.')
else:
prefix = _(u'Successfully sent request to stop the following instances:')
msg = u'{0} {1}'.format(prefix, ', '.join(instance_ids))
if stopped:
self.request.session.flash(msg, queue=Notification.SUCCESS)
else:
msg = _(u'Unable to stop the instance(s).')
self.request.session.flash(msg, queue=Notification.ERROR)
else:
msg = _(u'Unable to stop instance')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
@view_config(route_name='instances_reboot', request_method='POST')
def instances_reboot(self):
instance_id_param = self.request.params.get('instance_id')
instance_ids = [instance_id.strip() for instance_id in instance_id_param.split(',')]
if self.reboot_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Rebooting instance(s) {0}").format(instance_id_param))
rebooted = self.conn.reboot_instances(instance_ids=instance_ids)
if len(instance_ids) == 1:
msg = _(u'Successfully sent reboot request. It may take a moment to reboot the instance.')
else:
prefix = _(u'Successfully sent request to reboot the following instances:')
msg = u'{0} {1}'.format(prefix, ', '.join(instance_ids))
if rebooted:
self.request.session.flash(msg, queue=Notification.SUCCESS)
else:
msg = _(u'Unable to reboot the instance.')
self.request.session.flash(msg, queue=Notification.ERROR)
else:
msg = _(u'Unable to reboot instance(s)')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
@view_config(route_name='instances_terminate', request_method='POST')
def instances_terminate(self):
instance_id_param = self.request.params.get('instance_id')
instance_ids = [instance_id.strip() for instance_id in instance_id_param.split(',')]
if self.terminate_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Terminating instance {0}").format(instance_id_param))
self.conn.terminate_instances(instance_ids=instance_ids)
if len(instance_ids) == 1:
msg = _(
u'Successfully sent terminate request. It may take a moment to shut down the instance(s).')
else:
prefix = _(u'Successfully sent request to terminate the following instances:')
msg = u'{0} {1}'.format(prefix, ', '.join(instance_ids))
if self.request.is_xhr:
return JSONResponse(status=200, message=msg)
else:
self.request.session.flash(msg, queue=Notification.SUCCESS)
else:
msg = _(u'Unable to terminate instance(s)')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
@view_config(route_name='instances_associate', request_method='POST')
def instances_associate_ip_address(self):
instance_id = self.request.params.get('instance_id')
if self.associate_ip_form.validate():
with boto_error_handler(self.request, self.location):
new_ip = self.request.params.get('ip_address')
self.log_request(_(u"Associating IP {0} with instances {1}").format(new_ip, instance_id))
address = self.get_ip_address(new_ip)
if address and address.allocation_id:
self.conn.associate_address(instance_id, new_ip, allocation_id=address.allocation_id)
else:
self.conn.associate_address(instance_id, new_ip)
msg = _(u'Successfully associated the IP to the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
msg = _(u'Failed to associate the IP address to the instance.')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
@view_config(route_name='instances_disassociate', request_method='POST')
def instances_disassociate_ip_address(self):
if self.disassociate_ip_form.validate():
ip_address_param = self.request.params.get('ip_address')
ip_addresses = [ip_address.strip() for ip_address in ip_address_param.split(',')]
with boto_error_handler(self.request, self.location):
for ip_address in ip_addresses:
self.log_request(_(u"Disassociating IP {0}").format(ip_address))
address = self.get_ip_address(ip_address)
if address and address.association_id:
self.conn.disassociate_address(ip_address, association_id=address.association_id)
else:
self.conn.disassociate_address(ip_address)
if len(ip_addresses) == 1:
msg = _(u'Successfully disassociated the IP from the instance.')
else:
prefix = _(u'Successfully sent request to disassociate the follow IP addresses:')
msg = u'{0} {1}'.format(prefix, ', '.join(ip_addresses))
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
msg = _(u'Failed to disassociate the IP address from the instance.')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.location)
class InstancesJsonView(LandingPageView, BaseInstanceView):
def __init__(self, request):
super(InstancesJsonView, self).__init__(request)
self.conn = self.get_connection()
self.vpc_conn = self.get_connection(conn_type='vpc')
self.cw_conn = self.get_connection(conn_type='cloudwatch')
self.vpcs = self.get_all_vpcs()
@view_config(route_name='instances_json', renderer='json', request_method='POST')
def instances_json(self):
if not (self.is_csrf_valid()):
return JSONResponse(status=400, message="missing CSRF token")
vpc_subnets = self.vpc_conn.get_all_subnets()
keypairs = self.get_all_keypairs()
security_groups = self.get_all_security_groups()
# Get alarms for instances and build a list of instance ids to optimize alarm status fetch
alarms = [alarm for alarm in self.cw_conn.describe_alarms() if 'InstanceId' in alarm.dimensions]
alarm_resource_ids = set(list(
chain.from_iterable([chain.from_iterable(alarm.dimensions.values()) for alarm in alarms])
))
instances = []
filters = {}
availability_zone_param = self.request.params.getall('availability_zone')
if availability_zone_param:
filters.update({'availability-zone': availability_zone_param})
instance_state_param = self.request.params.getall('state')
if instance_state_param:
filters.update({'instance-state-name': instance_state_param})
instance_type_param = self.request.params.getall('instance_type')
if instance_type_param:
filters.update({'instance-type': instance_type_param})
security_group_param = self.request.params.getall('security_group')
if security_group_param:
filters.update({'group-name': [self.unescape_braces(sg) for sg in security_group_param]})
root_device_type_param = self.request.params.getall('root_device_type')
if root_device_type_param:
filters.update({'root-device-type': root_device_type_param})
# Don't filter by these request params in Python, as they're included in the "filters" params sent to the CLC
# Note: the choices are from attributes in InstancesFiltersForm
ignore_params = [
'availability_zone', 'instance_type', 'state', 'security_group',
'scaling_group', 'root_device_type', 'roles']
filtered_items = self.filter_items(self.get_items(filters=filters), ignore=ignore_params)
if self.request.params.get('scaling_group'):
filtered_items = self.filter_by_scaling_group(filtered_items)
if self.request.params.get('roles'):
filtered_items = self.filter_by_roles(filtered_items)
transitional_states = ['pending', 'stopping', 'shutting-down']
elastic_ips = [ip.public_ip for ip in self.conn.get_all_addresses()]
for instance in filtered_items:
is_transitional = instance.state in transitional_states
security_groups_array = sorted({
'name': group.name,
'id': group.id,
'rules_count': self.get_security_group_rules_count_by_id(security_groups, group.id)
} for group in instance.groups)
security_group_names = [group.name for group in instance.groups] # Needed for sortable tables
if instance.platform is None:
instance.platform = _(u"linux")
has_elastic_ip = instance.ip_address in elastic_ips
exists_key = True if self.get_keypair_by_name(keypairs, instance.key_name) else False
sortable_ip = self.get_sortable_ip(instance.ip_address)
alarm_status = ''
if instance.id in alarm_resource_ids:
alarm_status = Alarm.get_resource_alarm_status(instance.id, alarms)
vpc_subnet_display = self.get_vpc_subnet_display(
instance.subnet_id, vpc_subnet_list=vpc_subnets) if instance.subnet_id else ''
sortable_subnet_zone = "{0}{1}{2}".format(vpc_subnet_display, instance.vpc_name, instance.placement)
instances.append(dict(
id=instance.id,
name=TaggedItemView.get_display_name(instance, escapebraces=False),
instance_type=instance.instance_type,
image_id=instance.image_id,
ip_address=instance.ip_address,
sortable_ip=sortable_ip,
has_elastic_ip=has_elastic_ip,
public_dns_name=instance.public_dns_name,
launch_time=instance.launch_time,
availability_zone=instance.placement,
platform=instance.platform,
root_device_type=instance.root_device_type,
security_groups=security_groups_array,
sortable_secgroups=','.join(security_group_names),
sortable_subnet_zone=sortable_subnet_zone,
key_name=instance.key_name,
exists_key=exists_key,
vpc_name=instance.vpc_name,
subnet_id=instance.subnet_id if instance.subnet_id else None,
vpc_subnet_display=vpc_subnet_display,
status=instance.state,
alarm_status=alarm_status,
tags=TaggedItemView.get_tags_display(instance.tags),
transitional=is_transitional,
running_create=True if instance.tags.get('ec_bundling') else False,
scaling_group=instance.tags.get('aws:autoscaling:groupName')
))
image_ids = [i['image_id'] for i in instances]
image_ids = list(set(image_ids))
images = self.conn.get_all_images(filters={'image-id': image_ids})
for instance in instances:
image = self.get_image_by_id(images, instance['image_id'])
image_name = None
if image:
image_name = u'{0}{1}'.format(
image.name if image.name else image.id,
u' ({0})'.format(image.id) if image.name else ''
)
instance['image_name'] = image_name
return dict(results=instances)
@view_config(route_name='instances_roles_json', renderer='json', request_method='GET')
def instances_roles_json(self):
instances = {}
iam_conn = self.get_connection(conn_type='iam')
result = iam_conn.list_instance_profiles()
instance_profiles_list = result.list_instance_profiles_response.list_instance_profiles_result.instance_profiles
for item in self.get_items():
if item.instance_profile:
arn = item.instance_profile['arn']
profile_name = arn[(arn.rindex('/') + 1):]
# look up profile in list
for profile in instance_profiles_list:
if profile.instance_profile_name == profile_name:
instances[item.id] = profile.roles.role_name
return dict(results=instances)
def get_items(self, filters=None):
if self.conn:
instances = []
with boto_error_handler(self.request):
for reservation in self.conn.get_all_reservations(filters=filters):
for instance in reservation.instances:
if instance.vpc_id:
vpc = self.get_vpc_by_id(instance.vpc_id)
instance.vpc_name = TaggedItemView.get_display_name(vpc)
else:
instance.vpc_name = ''
instances.append(instance)
return instances
return []
def get_all_vpcs(self):
return self.vpc_conn.get_all_vpcs() if self.vpc_conn else []
def get_vpc_by_id(self, vpc_id):
for vpc in self.vpcs:
if vpc_id == vpc.id:
return vpc
def get_all_keypairs(self):
return self.conn.get_all_key_pairs() if self.conn else []
def get_keypair_by_name(self, keypairs, keypair_name):
for keypair in keypairs:
if keypair_name == keypair.name:
return keypair
def get_all_security_groups(self):
return self.conn.get_all_security_groups() if self.conn else []
def get_security_group_by_id(self, security_groups, id):
for sgroup in security_groups:
if sgroup.id == id:
return sgroup
def get_security_group_rules_count_by_id(self, security_groups, id):
sgroup = self.get_security_group_by_id(security_groups, id)
if sgroup:
return len(sgroup.rules)
return None
@staticmethod
def get_image_by_id(images, image_id):
if images:
for image in images:
if image.id == image_id:
return image
return None
@staticmethod
def get_sortable_ip(ip_address):
if not ip_address:
return 0
return long("".join(["{0:08b}".format(int(num)) for num in ip_address.split('.')]), 2)
def filter_by_scaling_group(self, items):
filtered_items = []
for item in items:
autoscaling_tag = item.tags.get('aws:autoscaling:groupName')
if autoscaling_tag:
for scaling_group in self.request.params.getall('scaling_group'):
if autoscaling_tag == self.unescape_braces(scaling_group):
filtered_items.append(item)
return filtered_items
def filter_by_roles(self, items):
iam_conn = self.get_connection(conn_type="iam")
filtered_items = []
profiles = []
for role in self.request.params.getall('roles'):
instance_profiles_list = iam_conn.list_instance_profiles(
path_prefix='/' + role).list_instance_profiles_response.list_instance_profiles_result.instance_profiles
for profile in instance_profiles_list:
profiles.append(profile.instance_profile_id)
for item in items:
if len(item.instance_profile) > 0 and item.instance_profile['id'] in profiles:
filtered_items.append(item)
return filtered_items
class InstanceView(TaggedItemView, BaseInstanceView):
VIEW_TEMPLATE = '../templates/instances/instance_view.pt'
def __init__(self, request, instance=None, **kwargs):
super(InstanceView, self).__init__(request, **kwargs)
self.title_parts = [_(u'Instance'), request.matchdict.get('id'), _(u'General')]
self.conn = self.get_connection()
self.iam_conn = None
if BaseView.has_role_access(request):
self.iam_conn = self.get_connection(conn_type="iam")
self.instance = instance or self.get_instance()
self.image = self.get_image(self.instance)
self.scaling_group = self.get_scaling_group()
self.instance_form = InstanceForm(
self.request, instance=self.instance, conn=self.conn, formdata=self.request.params or None)
self.start_form = StartInstanceForm(self.request, formdata=self.request.params or None)
self.stop_form = StopInstanceForm(self.request, formdata=self.request.params or None)
self.reboot_form = RebootInstanceForm(self.request, formdata=self.request.params or None)
self.terminate_form = TerminateInstanceForm(self.request, formdata=self.request.params or None)
self.associate_ip_form = AssociateIpToInstanceForm(
self.request, conn=self.conn, instance=self.instance, formdata=self.request.params or None)
self.disassociate_ip_form = DisassociateIpFromInstanceForm(self.request, formdata=self.request.params or None)
self.tagged_obj = self.instance
self.location = self.get_redirect_location()
self.instance_name = TaggedItemView.get_display_name(self.instance)
self.security_groups_array = sorted(
{'name': group.name, 'id': group.id} for group in self.instance.groups) if self.instance else []
self.security_group_list = self.get_security_group_list()
self.security_group_list_string = ','.join(
[sgroup['id'] for sgroup in self.security_group_list]) if self.security_group_list else ''
self.instance_keypair = self.instance.key_name if self.instance else ''
self.has_elastic_ip = self.check_has_elastic_ip(self.instance.ip_address) if self.instance else False
self.role = None
if BaseView.has_role_access(request) and self.instance and self.instance.instance_profile:
arn = self.instance.instance_profile['arn']
profile_name = arn[(arn.rindex('/') + 1):]
inst_profile = self.iam_conn.get_instance_profile(profile_name)
self.role = inst_profile.roles.member.role_name if inst_profile.roles else None
self.running_create = False
if self.instance:
self.running_create = True if self.instance.tags.get('ec_bundling') else False
self.render_dict = dict(
instance=self.instance,
instance_name=self.instance_name,
instance_security_groups=self.security_group_list_string,
instance_keypair=self.instance_keypair,
instance_monitoring_state=self.get_monitoring_state(self.instance),
monitoring_tab_title=self.get_monitoring_tab_title(self.instance),
security_group_list=self.security_group_list,
image=self.image,
scaling_group=self.scaling_group,
instance_form=self.instance_form,
start_form=self.start_form,
stop_form=self.stop_form,
reboot_form=self.reboot_form,
terminate_form=self.terminate_form,
associate_ip_form=self.associate_ip_form,
disassociate_ip_form=self.disassociate_ip_form,
has_elastic_ip=self.has_elastic_ip,
vpc_subnet_display=self.get_vpc_subnet_display(self.instance.subnet_id) if self.instance else None,
role=self.role,
running_create=self.running_create,
controller_options_json=self.get_controller_options_json(),
is_vpc_supported=self.is_vpc_supported,
)
@view_config(route_name='instance_view', renderer=VIEW_TEMPLATE, request_method='GET')
def instance_view(self):
if self.instance is None:
raise HTTPNotFound()
return self.render_dict
@view_config(route_name='instance_update', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_update(self):
if self.instance and self.instance_form.validate():
with boto_error_handler(self.request, self.location):
# Update tags
self.update_tags()
# Save Name tag
name = self.request.params.get('name', '')
self.update_name_tag(name)
# Update stopped instance
if self.instance.state == 'stopped':
instance_type = self.request.params.get('instance_type')
kernel = self.request.params.get('kernel')
ramdisk = self.request.params.get('ramdisk')
self.log_request(_(u"Updating instance {0} (type={1}, kernel={2}, ramidisk={3})").format(
self.instance.id, instance_type, kernel, ramdisk))
if self.instance.instance_type != instance_type:
self.conn.modify_instance_attribute(self.instance.id, 'instanceType', instance_type)
user_data = self.get_user_data()
if user_data:
self.conn.modify_instance_attribute(self.instance.id, 'userData', base64.b64encode(user_data))
if kernel != '' and self.instance.kernel != kernel:
self.conn.modify_instance_attribute(self.instance.id, 'kernel', kernel)
if ramdisk != '' and self.instance.ramdisk != ramdisk:
self.conn.modify_instance_attribute(self.instance.id, 'ramdisk', ramdisk)
# Start instance if desired
if self.request.params.get('start_later'):
self.log_request(_(u"Starting instance {0}").format(self.instance.id))
self.instance.start()
msg = _(u'Successfully modified instance')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
@view_config(route_name='instance_start', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_start(self):
if self.instance and self.start_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Starting instance {0}").format(self.instance.id))
# Can only start an instance if it has a volume attached
self.instance.start()
msg = _(u'Successfully sent start instance request. It may take a moment to start the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
@view_config(route_name='instance_stop', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_stop(self):
if self.instance and self.stop_form.validate():
# Only EBS-backed instances can be stopped
if self.instance.root_device_type == 'ebs':
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Stopping instance {0}").format(self.instance.id))
self.instance.stop()
msg = _(u'Successfully sent stop instance request. It may take a moment to stop the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
@view_config(route_name='instance_reboot', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_reboot(self):
location = self.request.route_path('instance_view', id=self.instance.id)
if self.instance and self.reboot_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Rebooting instance {0}").format(self.instance.id))
rebooted = self.instance.reboot()
msg = _(u'Successfully sent reboot request. It may take a moment to reboot the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
if not rebooted:
msg = _(u'Unable to reboot the instance.')
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=location)
return self.render_dict
@view_config(route_name='instance_terminate', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_terminate(self):
if self.instance and self.terminate_form.validate():
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Terminating instance {0}").format(self.instance.id))
self.instance.terminate()
msg = _(
u'Successfully sent terminate instance request. It may take a moment to shut down the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
@view_config(route_name='instance_get_password', request_method='POST', renderer='json')
def instance_get_password(self):
if not self.is_csrf_valid():
return JSONResponse(status=400, message="missing CSRF token")
instance_id = self.request.matchdict.get('id')
with boto_error_handler(self.request, self.location):
try:
passwd_data = self.conn.get_password_data(instance_id)
priv_key_string = self.request.params.get('key')
priv_key_string = base64.b64decode(priv_key_string)
user_priv_key = RSA.load_key_string(priv_key_string)
string_to_decrypt = base64.b64decode(passwd_data)
ret = user_priv_key.private_decrypt(string_to_decrypt, RSA.pkcs1_padding)
return dict(results=dict(instance=instance_id, password=ret))
except RSA.RSAError: # likely, bad key
return JSONResponse(status=400, message=_(
u"There was a problem with the key, please try again, verifying the correct private key is used."))
@view_config(route_name='instance_associate', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_associate_ip_address(self):
if self.instance and self.associate_ip_form.validate():
with boto_error_handler(self.request, self.location):
new_ip = self.request.params.get('ip_address')
address = self.get_ip_address(new_ip)
if address and address.allocation_id:
self.conn.associate_address(self.instance.id, new_ip, allocation_id=address.allocation_id)
else:
self.conn.associate_address(self.instance.id, new_ip)
msg = _(u'Successfully associated the IP to the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
@view_config(route_name='instance_disassociate', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_disassociate_ip_address(self):
if self.disassociate_ip_form.validate():
with boto_error_handler(self.request, self.location):
ip_address = self.request.params.get('ip_address')
ip_addresses = self.conn.get_all_addresses(addresses=[ip_address])
elastic_ip = ip_addresses[0] if ip_addresses else None
if elastic_ip and elastic_ip.association_id:
self.conn.disassociate_address(elastic_ip.public_ip, association_id=elastic_ip.association_id)
else:
self.conn.disassociate_address(elastic_ip.public_ip)
msg = _(u'Successfully disassociated the IP from the instance.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
return self.render_dict
def get_scaling_group(self):
if self.instance:
return self.instance.tags.get('aws:autoscaling:groupName')
return None
def get_security_group_list(self):
security_group_list = []
rules_dict = self.get_securitygroups_rules(self.get_security_groups())
if self.instance:
instance_groups = self.instance.groups
if instance_groups:
for sgroup in instance_groups:
rules = rules_dict[sgroup.id]
sgroup_dict = {}
sgroup_dict['id'] = sgroup.id
sgroup_dict['name'] = sgroup.name
sgroup_dict['rules'] = rules
sgroup_dict['rule_count'] = len(rules)
security_group_list.append(sgroup_dict)
return security_group_list
def get_redirect_location(self):
if self.instance:
return self.request.route_path('instance_view', id=self.instance.id)
return ''
def disassociate_ip_address(self, ip_address=None):
ip_addresses = self.conn.get_all_addresses(addresses=[ip_address])
elastic_ip = ip_addresses[0] if ip_addresses else None
if elastic_ip:
self.log_request(_(u"Disassociating ip {0} from instance {1}").format(ip_address, self.instance.id))
elastic_ip.disassociate()
def check_has_elastic_ip(self, ip_address):
has_elastic_ip = False
elastic_ips = self.conn.get_all_addresses() if self.conn else []
if ip_address is not None:
for ip in elastic_ips:
if ip_address == ip.public_ip:
has_elastic_ip = True
return has_elastic_ip
def get_controller_options_json(self):
if not self.instance:
return ''
return BaseView.escape_json(json.dumps({
'instance_state_json_url': self.request.route_path('instance_state_json', id=self.instance.id),
'instance_userdata_json_url': self.request.route_path('instance_userdata_json', id=self.instance.id),
'instance_ip_address_json_url': self.request.route_path('instance_ip_address_json', id=self.instance.id),
'instance_console_json_url': self.request.route_path('instance_console_output_json', id=self.instance.id),
'instance_state': self.instance.state,
'instance_id': self.instance.id,
'instance_ip_address': self.instance.ip_address,
'instance_public_dns': self.instance.public_dns_name,
'instance_platform': self.instance.platform,
'instance_security_groups': self.security_groups_array,
'instance_keypair': self.instance_keypair,
'has_elastic_ip': self.has_elastic_ip,
}))
class InstanceStateView(BaseInstanceView):
def __init__(self, request):
super(InstanceStateView, self).__init__(request)
self.request = request
self.conn = self.get_connection()
self.instance = self.get_instance()
@view_config(route_name='instance_state_json', renderer='json', request_method='GET')
def instance_state_json(self):
"""Return current instance state"""
return dict(results=self.instance.state)
@view_config(route_name='instance_userdata_json', renderer='json', request_method='GET')
def instance_userdata_json(self):
"""Return current instance state"""
with boto_error_handler(self.request):
user_data = self.conn.get_instance_attribute(self.instance.id, 'userData')
if 'userData' in user_data.keys():
user_data = user_data['userData']
unencoded = base64.b64decode(user_data) if user_data is not None else ''
mime_type = guess_mimetype_from_buffer(unencoded, mime=True)
if mime_type.find('text') == 0:
user_data = unencoded
else:
# get more descriptive text
mime_type = guess_mimetype_from_buffer(unencoded)
user_data = None
else:
user_data = ''
mime_type = ''
return dict(results=dict(type=mime_type, data=user_data))
@view_config(route_name='instance_ip_address_json', renderer='json', request_method='GET')
def instance_ip_address_json(self):
"""Return current instance state"""
has_elastic_ip = self.check_has_elastic_ip(self.instance.ip_address) if self.instance else False
ip_address_dict = dict(
ip_address=self.instance.ip_address,
public_dns_name=self.instance.public_dns_name,
private_ip_address=self.instance.private_ip_address,
private_dns_name=self.instance.private_dns_name,
has_elastic_ip=has_elastic_ip,
)
return ip_address_dict
@view_config(route_name='instance_nextdevice_json', renderer='json', request_method='GET')
def instance_nextdevice_json(self):
"""Return current instance state"""
cloud_type = self.request.session.get('cloud_type')
if self.instance is not None:
mappings = self.instance.block_device_mapping
else:
current_mappings = self.request.GET.getall('currentMappings')
mappings = {}
for mapping in current_mappings:
mappings[mapping] = None
return dict(results=AttachVolumeForm.suggest_next_device_name(cloud_type, mappings))
@view_config(route_name='instance_console_output_json', renderer='json', request_method='GET')
def instance_console_output_json(self):
"""Return console output for instance"""
with boto_error_handler(self.request):
output = self.conn.get_console_output(instance_id=self.instance.id)
return dict(results=base64.b64encode(output.output))
def check_has_elastic_ip(self, ip_address):
has_elastic_ip = False
elastic_ips = self.conn.get_all_addresses()
if ip_address is not None:
for ip in elastic_ips:
if ip_address == ip.public_ip:
has_elastic_ip = True
return has_elastic_ip
class InstanceVolumesView(BaseInstanceView):
VIEW_TEMPLATE = '../templates/instances/instance_volumes.pt'
def __init__(self, request):
super(InstanceVolumesView, self).__init__(request)
self.title_parts = [_(u'Instance'), request.matchdict.get('id'), _(u'Volumes')]
self.request = request
self.conn = self.get_connection()
# fetching all volumes all the time is inefficient. should re-factor in the future
self.volumes = []
self.location = self.request.route_path('instance_volumes', id=self.request.matchdict.get('id'))
with boto_error_handler(request, self.location):
self.volumes = self.conn.get_all_volumes()
self.instance = self.get_instance()
self.attach_form = AttachVolumeForm(
self.request, volumes=self.volumes, instance=self.instance, formdata=self.request.params or None)
self.instance_name = TaggedItemView.get_display_name(self.instance)
self.detach_form = DetachVolumeForm(self.request, formdata=self.request.params or None)
self.render_dict = dict(
instance=self.instance,
instance_name=self.instance_name,
monitoring_tab_title=self.get_monitoring_tab_title(self.instance),
attach_form=self.attach_form,
detach_form=self.detach_form,
instance_zone=self.instance.placement,
controller_options_json=self.get_controller_options_json(),
)
def get_controller_options_json(self):
if not self.instance:
return ''
return BaseView.escape_json(json.dumps({
'instance_id': self.instance.id,
'instance_volumes_json_url': self.request.route_path('instance_volumes_json', id=self.instance.id),
}))
@view_config(route_name='instance_volumes', renderer=VIEW_TEMPLATE, request_method='GET')
def instance_volumes(self):
if self.instance is None:
raise HTTPNotFound()
render_dict = self.render_dict
render_dict['volumes'] = self.get_attached_volumes()
return render_dict
@view_config(route_name='instance_volumes_json', renderer='json', request_method='GET')
def instance_volumes_json(self):
volumes = []
transitional_states = ['creating', 'deleting', 'attaching', 'detaching']
with boto_error_handler(self.request, self.location):
filters = {'availability-zone': self.instance.placement}
self.volumes = self.conn.get_all_volumes(filters=filters)
for volume in self.volumes:
status = volume.status
attach_status = volume.attach_data.status
is_transitional = status in transitional_states or attach_status in transitional_states
is_root_volume = False
detach_form_action = self.request.route_path(
'instance_volume_detach', id=self.instance.id, volume_id=volume.id)
if self.instance.root_device_type == 'ebs' and volume.attach_data.device == self.instance.root_device_name:
is_root_volume = True # Note: Check for 'True' when passed to JS via Chameleon template
volumes.append(dict(
id=volume.id,
name=TaggedItemView.get_display_name(volume),
size=volume.size,
device=volume.attach_data.device,
attach_time=volume.attach_data.attach_time,
attach_instance_id=volume.attach_data.instance_id,
is_root_volume=is_root_volume,
status=status,
attach_status=volume.attach_data.status,
detach_form_action=detach_form_action,
transitional=is_transitional,
))
return dict(results=volumes)
@view_config(route_name='instance_volume_attach', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_volume_attach(self):
if self.attach_form.validate():
volume_id = self.request.params.get('volume_id')
device = self.request.params.get('device')
if self.instance and volume_id and device:
location = self.request.route_path('instance_volumes', id=self.instance.id)
with boto_error_handler(self.request, location):
self.log_request(_(u"Attaching volume {0} to {1} as {2}").format(
volume_id, self.instance.id, device))
self.conn.attach_volume(volume_id=volume_id, instance_id=self.instance.id, device=device)
msg = _(u'Request successfully submitted. It may take a moment to attach the volume.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=location)
@view_config(route_name='instance_volume_detach', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_volume_detach(self):
if self.detach_form.validate():
volume_id = self.request.matchdict.get('volume_id')
if volume_id:
location = self.request.route_path('instance_volumes', id=self.instance.id)
with boto_error_handler(self.request, location):
self.log_request(_(u"Dettaching volume {0} from {1}").format(volume_id, self.instance.id))
self.conn.detach_volume(volume_id=volume_id)
msg = _(u'Request successfully submitted. It may take a moment to detach the volume.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=location)
def get_attached_volumes(self):
volumes = [vol for vol in self.volumes if vol.attach_data.instance_id == self.instance.id]
# Sort by most recently attached first
return sorted(volumes, key=attrgetter('attach_data.attach_time'), reverse=True) if volumes else []
class InstanceMonitoringView(BaseInstanceView):
VIEW_TEMPLATE = '../templates/instances/instance_monitoring.pt'
def __init__(self, request, instance=None):
super(InstanceMonitoringView, self).__init__(request)
self.title_parts = [_(u'Instance'), request.matchdict.get('id'), _(u'Monitoring')]
self.cw_conn = self.get_connection(conn_type='cloudwatch')
self.instance_id = self.request.matchdict.get('id')
self.location = self.request.route_path('instance_monitoring', id=self.instance_id)
with boto_error_handler(self.request):
# Note: We're fetching reservations here since calling self.get_instance() in the context manager
# will return a 500 error instead of invoking the session timeout handler
reservations = self.conn.get_all_reservations(instance_ids=[self.instance_id]) if self.conn else []
self.instance = instance or self.get_instance(instance_id=self.instance_id, reservations=reservations)
self.instance_name = TaggedItemView.get_display_name(self.instance)
self.monitoring_form = InstanceMonitoringForm(self.request, formdata=self.request.params or None)
self.monitoring_enabled = self.instance.monitoring_state == 'enabled' if self.instance else False
self.render_dict = dict(
instance=self.instance,
instance_name=self.instance_name,
monitoring_enabled=self.monitoring_is_enabled(),
detailed_monitoring_enabled=self.detailed_monitoring_is_enabled(),
monitoring_form=self.monitoring_form,
monitoring_tab_title=self.get_monitoring_tab_title(self.instance),
metric_title=METRIC_TITLE_MAPPING,
duration_choices=MONITORING_DURATION_CHOICES,
statistic_choices=STATISTIC_CHOICES,
controller_options_json=self.get_controller_options_json()
)
@view_config(route_name='instance_monitoring', renderer=VIEW_TEMPLATE, request_method='GET')
def instance_monitoring(self):
if self.instance is None:
raise HTTPNotFound()
return self.render_dict
@view_config(route_name='instance_monitoring_update', renderer=VIEW_TEMPLATE, request_method='POST')
def instance_monitoring_update(self):
if self.monitoring_form.validate():
if self.instance:
location = self.request.route_path('instance_monitoring', id=self.instance.id)
with boto_error_handler(self.request, location):
monitoring_state = self.instance.monitoring_state
action = 'disabled' if monitoring_state == 'enabled' else 'enabled'
self.log_request(_(u"Monitoring for instance {0} {1}").format(self.instance.id, action))
if monitoring_state == 'disabled':
self.conn.monitor_instances([self.instance.id])
else:
self.conn.unmonitor_instances([self.instance.id])
msg = _(
u'Request successfully submitted. It may take a moment for the monitoring status to update')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=location)
def monitoring_is_enabled(self):
if self.cloud_type == 'aws':
return True
return self.instance.monitoring_state == 'enabled' if self.instance else False
def detailed_monitoring_is_enabled(self):
if self.cloud_type == 'euca':
return False
return self.instance.monitoring_state == 'enabled' if self.instance else False
def get_controller_options_json(self):
if not self.instance:
return ''
return BaseView.escape_json(json.dumps({
'metric_title_mapping': METRIC_TITLE_MAPPING,
'charts_list': INSTANCE_MONITORING_CHARTS_LIST,
'granularity_choices': GRANULARITY_CHOICES,
'duration_granularities_mapping': DURATION_GRANULARITY_CHOICES_MAPPING,
}))
class InstanceLaunchView(BaseInstanceView, BlockDeviceMappingItemView):
TEMPLATE = '../templates/instances/instance_launch.pt'
def __init__(self, request):
super(InstanceLaunchView, self).__init__(request)
self.request = request
self.image = self.get_image()
self.location = self.request.route_path('instances')
self.securitygroups = self.get_security_groups()
self.iam_conn = None
if BaseView.has_role_access(request):
self.iam_conn = self.get_connection(conn_type="iam")
self.vpc_conn = self.get_connection(conn_type='vpc')
self.launch_form = LaunchInstanceForm(
self.request, image=self.image, securitygroups=self.securitygroups,
conn=self.conn, vpc_conn=self.vpc_conn, iam_conn=self.iam_conn, formdata=self.request.params or None)
self.filters_form = ImagesFiltersForm(
self.request, cloud_type=self.cloud_type, formdata=self.request.params or None)
self.keypair_form = KeyPairForm(self.request, formdata=self.request.params or None)
self.securitygroup_form = SecurityGroupForm(self.request, self.vpc_conn, formdata=self.request.params or None)
self.generate_file_form = GenerateFileForm(self.request, formdata=self.request.params or None)
self.owner_choices = self.get_owner_choices()
controller_options_json = BaseView.escape_json(json.dumps({
'securitygroups_choices': dict(self.launch_form.securitygroup.choices),
'keypair_choices': dict(self.launch_form.keypair.choices),
'role_choices': dict(self.launch_form.role.choices),
'vpc_subnet_choices': self.get_vpc_subnets(),
'default_vpc_network': self.get_default_vpc_network(),
'securitygroups_json_endpoint': self.request.route_path('securitygroups_json'),
'securitygroups_rules_json_endpoint': self.request.route_path('securitygroups_rules_json'),
'image_json_endpoint': self.request.route_path('image_json', id='_id_'),
}))
self.render_dict = dict(
image=self.image,
launch_form=self.launch_form,
filters_form=self.filters_form,
keypair_form=self.keypair_form,
securitygroup_form=self.securitygroup_form,
generate_file_form=self.generate_file_form,
owner_choices=self.owner_choices,
snapshot_choices=self.get_snapshot_choices(),
security_group_placeholder_text=_(u'Select...'),
controller_options_json=controller_options_json,
is_vpc_supported=self.is_vpc_supported,
)
@view_config(route_name='instance_create', renderer=TEMPLATE, request_method='GET')
def instance_create(self):
"""Displays the Launch Instance wizard"""
return self.render_dict
@view_config(route_name='instance_launch', renderer=TEMPLATE, request_method='POST')
def instance_launch(self):
"""Handles the POST from the Launch instanced wizard"""
if self.launch_form.validate():
tags_json = self.request.params.get('tags')
image_id = self.image.id
num_instances = int(self.request.params.get('number', 1))
key_name = self.unescape_braces(self.request.params.get('keypair', ''))
if key_name:
# Handle "None (advanced)" option if key_name is 'none'
key_name = None if key_name == 'none' else self.unescape_braces(key_name)
securitygroup_ids = self.request.params.getall('securitygroup')
instance_type = self.request.params.get('instance_type', 'm1.small')
availability_zone = self.request.params.get('zone') or None
vpc_network = self.request.params.get('vpc_network') or None
if vpc_network == 'None':
vpc_network = None
vpc_subnet = self.request.params.get('vpc_subnet') or None
associate_public_ip_address = self.request.params.get('associate_public_ip_address')
if associate_public_ip_address == 'true':
associate_public_ip_address = True
elif associate_public_ip_address == 'false':
associate_public_ip_address = False
kernel_id = self.request.params.get('kernel_id') or None
ramdisk_id = self.request.params.get('ramdisk_id') or None
monitoring_enabled = self.request.params.get('monitoring_enabled') == 'y'
private_addressing = self.request.params.get('private_addressing') == 'y'
addressing_type = 'private' if private_addressing else 'public'
if vpc_network is not None and self.cloud_type == 'euca':
addressing_type = None # Don't pass addressing scheme if on Euca VPC
bdmapping_json = self.request.params.get('block_device_mapping')
block_device_map = self.get_block_device_map(bdmapping_json)
role = self.request.params.get('role')
new_instance_ids = []
with boto_error_handler(self.request, self.location):
instance_profile = None
if BaseView.has_role_access(self.request) and role != '':
# need to set up instance profile, add role and supply to run_instances
instance_profile = RoleView.get_or_create_instance_profile(self.iam_conn, role)
self.log_request(_(u"Running instance(s) (num={0}, image={1}, type={2})").format(
num_instances, image_id, instance_type))
# Create base params for run_instances()
params = dict(
min_count=num_instances,
max_count=num_instances,
key_name=key_name,
user_data=self.get_user_data(),
addressing_type=addressing_type,
instance_type=instance_type,
kernel_id=kernel_id,
ramdisk_id=ramdisk_id,
monitoring_enabled=monitoring_enabled,
block_device_map=block_device_map,
instance_profile_arn=instance_profile.arn if instance_profile else None,
)
if vpc_network is not None:
network_interface = NetworkInterfaceSpecification(
subnet_id=vpc_subnet,
groups=securitygroup_ids,
associate_public_ip_address=associate_public_ip_address,
)
network_interfaces = NetworkInterfaceCollection(network_interface)
# Specify VPC setting for the instances
params.update(dict(
network_interfaces=network_interfaces,
))
reservation = self.conn.run_instances(image_id, **params)
else:
# Use the EC2-Classic setting
params.update(dict(
placement=availability_zone,
security_group_ids=securitygroup_ids,
))
reservation = self.conn.run_instances(image_id, **params)
for idx, instance in enumerate(reservation.instances):
# Add tags for newly launched instance(s)
# Try adding name tag (from collection of name input fields)
input_field_name = u'name_{0}'.format(idx)
name = self.request.params.get(input_field_name, '').strip()
new_instance_ids.append(name or instance.id)
if name:
instance.add_tag('Name', name)
if tags_json:
tags = json.loads(tags_json)
for tagname, tagvalue in tags.items():
instance.add_tag(tagname, tagvalue)
msg = _(u'Successfully sent launch instances request. It may take a moment to launch instances ')
msg += ', '.join(new_instance_ids)
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
else:
self.request.error_messages = self.launch_form.get_errors_list()
return self.render_dict
def get_securitygroup_id(self, name, vpc_network=None):
for security_group in self.securitygroups:
if security_group.vpc_id == vpc_network and security_group.name == name:
return security_group.id
return None
def get_vpc_subnets(self):
subnets = []
if self.vpc_conn:
with boto_error_handler(self.request, self.location):
vpc_subnets = self.vpc_conn.get_all_subnets()
for vpc_subnet in vpc_subnets:
subnets.append(dict(
id=vpc_subnet.id,
vpc_id=vpc_subnet.vpc_id,
availability_zone=vpc_subnet.availability_zone,
state=vpc_subnet.state,
cidr_block=vpc_subnet.cidr_block,
))
return subnets
def get_default_vpc_network(self):
default_vpc = self.request.session.get('default_vpc', [])
if self.is_vpc_supported:
if 'none' in default_vpc or 'None' in default_vpc:
if self.cloud_type == 'aws':
return 'None'
# for euca, return the first vpc on the list
if self.vpc_conn:
with boto_error_handler(self.request):
vpc_networks = self.vpc_conn.get_all_vpcs()
if vpc_networks:
return vpc_networks[0].id
else:
return default_vpc[0]
return 'None'
class InstanceLaunchMoreView(BaseInstanceView, BlockDeviceMappingItemView):
"""Launch more like this instance view"""
TEMPLATE = '../templates/instances/instance_launch_more.pt'
def __init__(self, request):
super(InstanceLaunchMoreView, self).__init__(request)
self.request = request
self.iam_conn = None
if BaseView.has_role_access(request):
self.iam_conn = self.get_connection(conn_type="iam")
self.instance = self.get_instance()
self.instance_name = TaggedItemView.get_display_name(self.instance)
self.image = self.get_image(instance=self.instance) # From BaseInstanceView
self.location = self.request.route_path('instances')
self.launch_more_form = LaunchMoreInstancesForm(
self.request, image=self.image, instance=self.instance,
conn=self.conn, formdata=self.request.params or None)
self.role = None
self.associate_public_ip_address = 'Disabled'
if self.instance.interfaces:
if self.instance.interfaces[0] and hasattr(self.instance.interfaces[0], 'association'):
self.associate_public_ip_address = 'Enabled'
if BaseView.has_role_access(request) and self.instance.instance_profile:
arn = self.instance.instance_profile['arn']
profile_name = arn[(arn.rindex('/') + 1):]
inst_profile = self.iam_conn.get_instance_profile(profile_name)
self.role = inst_profile.roles.member.role_name
self.render_dict = dict(
image=self.image,
instance=self.instance,
instance_name=self.instance_name,
associate_public_ip_address=self.associate_public_ip_address,
launch_more_form=self.launch_more_form,
snapshot_choices=self.get_snapshot_choices(),
vpc_subnet_display=self.get_vpc_subnet_display(self.instance.subnet_id) if self.instance else None,
is_vpc_supported=self.is_vpc_supported,
role=self.role,
)
@view_config(route_name='instance_more', renderer=TEMPLATE, request_method='GET')
def instance_more(self):
return self.render_dict
@view_config(route_name='instance_more_launch', renderer=TEMPLATE, request_method='POST')
def instance_more_launch(self):
"""Handles the POST from the Launch more instances like this form"""
if self.launch_more_form.validate():
image_id = self.image.id
source_instance_tags = self.instance.tags
key_name = self.instance.key_name
num_instances = int(self.request.params.get('number', 1))
security_groups = [group.id for group in self.instance.groups]
instance_type = self.instance.instance_type
availability_zone = self.instance.placement
vpc_network = self.instance.vpc_id or None
vpc_subnet = self.instance.subnet_id or None
if self.associate_public_ip_address == 'Enabled':
associate_public_ip_address = True
else:
associate_public_ip_address = False
kernel_id = self.request.params.get('kernel_id') or None
ramdisk_id = self.request.params.get('ramdisk_id') or None
monitoring_enabled = self.request.params.get('monitoring_enabled') == 'y'
private_addressing = self.request.params.get('private_addressing') == 'y'
addressing_type = 'private' if private_addressing else 'public'
if vpc_network is not None and self.cloud_type == 'euca':
addressing_type = None # Don't pass addressing scheme if on Euca VPC
if self.cloud_type == 'aws': # AWS only supports public, so enforce that here
addressing_type = 'public'
bdmapping_json = self.request.params.get('block_device_mapping')
block_device_map = self.get_block_device_map(bdmapping_json)
new_instance_ids = []
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Running instance(s) (num={0}, image={1}, type={2})").format(
num_instances, image_id, instance_type))
instance_profile_arn = self.instance.instance_profile['arn'] if self.instance.instance_profile else None
# Create base params for run_instances()
params = dict(
min_count=num_instances,
max_count=num_instances,
key_name=key_name,
user_data=self.get_user_data(),
addressing_type=addressing_type,
instance_type=instance_type,
kernel_id=kernel_id,
ramdisk_id=ramdisk_id,
monitoring_enabled=monitoring_enabled,
block_device_map=block_device_map,
instance_profile_arn=instance_profile_arn,
)
if vpc_network is not None:
network_interface = NetworkInterfaceSpecification(
subnet_id=vpc_subnet,
groups=security_groups,
associate_public_ip_address=associate_public_ip_address,
)
network_interfaces = NetworkInterfaceCollection(network_interface)
# Use the EC2-VPC setting
params.update(dict(
network_interfaces=network_interfaces,
))
reservation = self.conn.run_instances(image_id, **params)
else:
# Use the EC2-Classic setting
params.update(dict(
placement=availability_zone,
security_group_ids=security_groups,
))
reservation = self.conn.run_instances(image_id, **params)
for idx, instance in enumerate(reservation.instances):
# Add tags for newly launched instance(s)
# Try adding name tag (from collection of name input fields)
input_field_name = u'name_{0}'.format(idx)
name = self.request.params.get(input_field_name, '').strip()
new_instance_ids.append(name or instance.id)
if name:
instance.add_tag('Name', name)
if source_instance_tags:
for tagname, tagvalue in source_instance_tags.items():
# Don't copy 'Name' tag, and avoid tags that start with 'aws:' and 'euca:'
if all([tagname != 'Name', not tagname.startswith('aws:'),
not tagname.startswith('euca:')]):
instance.add_tag(tagname, tagvalue)
msg = _(u'Successfully sent launch instances request. It may take a moment to launch instances ')
msg += ', '.join(new_instance_ids)
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.location)
else:
self.request.error_messages = self.launch_more_form.get_errors_list()
return self.render_dict
class InstanceCreateImageView(BaseInstanceView, BlockDeviceMappingItemView):
"""Create image from an instance view"""
TEMPLATE = '../templates/instances/instance_create_image.pt'
def __init__(self, request):
super(InstanceCreateImageView, self).__init__(request)
self.request = request
self.ec2_conn = self.get_connection()
self.s3_conn = self.get_connection(conn_type='s3')
self.instance = self.get_instance()
self.instance_name = TaggedItemView.get_display_name(self.instance)
self.location = self.request.route_path('instances')
self.image = self.get_image(instance=self.instance) # From BaseInstanceView
self.create_image_form = InstanceCreateImageForm(
self.request, instance=self.instance, ec2_conn=self.ec2_conn, s3_conn=self.s3_conn,
formdata=self.request.params or None)
if self.image is not None:
image_id = self.image.id
else:
image_id = self.instance.image_id
self.create_image_form.description.data = _(u"created from instance {0} running image {1}").format(
self.instance_name, image_id)
self.create_bucket_form = CreateBucketForm(self.request, formdata=self.request.params or None)
controller_options_json = BaseView.escape_json(json.dumps({
'bucket_choices': dict(self.create_image_form.s3_bucket.choices),
}))
volumes = []
if self.instance.root_device_type == 'ebs':
bdm_map = self.instance.block_device_mapping or []
for device_name in bdm_map:
bdm = bdm_map[device_name]
volumes.append(bdm.volume_id)
volumes = self.ec2_conn.get_all_volumes(volumes)
self.render_dict = dict(
instance=self.instance,
instance_name=self.instance_name,
volumes=volumes,
image=self.image,
snapshot_choices=self.get_snapshot_choices(),
create_image_form=self.create_image_form,
create_bucket_form=self.create_bucket_form,
controller_options_json=controller_options_json,
)
@view_config(route_name='instance_create_image', renderer=TEMPLATE, request_method='GET')
def instance_create_image_view(self):
return self.render_dict
@view_config(route_name='instance_create_image', renderer=TEMPLATE, request_method='POST')
def instance_create_image_post(self):
"""Handles the POST from the create image from instance form"""
is_ebs = True if self.instance.root_device_type == 'ebs' else False
if is_ebs: # remove fields not needed so validation passes
del self.create_image_form.s3_bucket
del self.create_image_form.s3_prefix
else:
del self.create_image_form.no_reboot
if self.create_image_form.validate():
instance_id = self.instance.id
name = self.request.params.get('name')
description = self.request.params.get('description')
tags_json = self.request.params.get('tags')
bdm_json = self.request.params.get('block_device_mapping')
if not is_ebs:
s3_bucket = self.request.params.get('s3_bucket')
if s3_bucket:
s3_bucket = self.unescape_braces(s3_bucket)
s3_prefix = self.request.params.get('s3_prefix', '')
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Bundling instance {0}").format(instance_id))
account = self.request.session['account']
username = self.request.session['username']
password = self.request.params.get('password')
auth = self.get_euca_authenticator()
msg = None
try:
creds = auth.authenticate(
account=account, user=username, passwd=password,
timeout=8, duration=86400) # 24 hours
except HTTPError, err: # catch error in authentication
if err.msg == 'Unauthorized':
msg = _(u"The password you entered is incorrect.")
except URLError, err: # catch error in authentication
msg = err.msg
if msg is not None:
self.request.session.flash(msg, queue=Notification.ERROR)
return HTTPFound(location=self.request.route_path('instance_create_image', id=instance_id))
upload_policy = BaseView.generate_default_policy(s3_bucket, s3_prefix, token=creds.session_token)
# we need to make the call ourselves to override boto's auto-signing
params = {
'InstanceId': instance_id,
'Storage.S3.Bucket': s3_bucket,
'Storage.S3.Prefix': s3_prefix,
'Storage.S3.UploadPolicy': upload_policy,
'Storage.S3.AWSAccessKeyId': creds.access_key,
'Storage.S3.UploadPolicySignature': BaseView.gen_policy_signature(
upload_policy, creds.secret_key)
}
result = self.conn.get_object('BundleInstance', params, BundleInstanceTask, verb='POST')
bundle_metadata = {
'version': curr_version,
'name': name,
'description': description,
'prefix': s3_prefix,
'virt_type': self.instance.virtualization_type,
'arch': self.instance.architecture,
'platform': self.instance.platform,
'kernel_id': self.instance.kernel,
'ramdisk_id': self.instance.ramdisk,
'bdm': bdm_json,
'tags': tags_json,
'bundle_id': result.id,
}
self.ec2_conn.create_tags(instance_id, {'ec_bundling': '%s/%s' % (s3_bucket, result.id)})
s3_conn = self.get_connection(conn_type='s3')
k = Key(s3_conn.get_bucket(s3_bucket))
k.key = result.id
k.set_contents_from_string(json.dumps(bundle_metadata))
msg = _(u'Successfully sent create image request. It may take a few minutes to create the image.')
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.request.route_path('image_view', id='p' + instance_id))
else:
no_reboot = self.request.params.get('no_reboot')
with boto_error_handler(self.request, self.location):
self.log_request(_(u"Creating image from instance {0}").format(instance_id))
bdm = self.get_block_device_map(bdm_json)
if bdm.get(self.instance.root_device_name) is not None:
del bdm[self.instance.root_device_name]
image_id = self.ec2_conn.create_image(
instance_id, name, description=description, no_reboot=no_reboot, block_device_mapping=bdm)
tags = json.loads(tags_json)
self.ec2_conn.create_tags(image_id, tags)
msg = _(u'Successfully sent create image request. It may take a few minutes to create the image.')
self.invalidate_images_cache()
self.request.session.flash(msg, queue=Notification.SUCCESS)
return HTTPFound(location=self.request.route_path('image_view', id=image_id))
else:
self.request.error_messages = self.create_image_form.get_errors_list()
return self.render_dict
class InstanceTypesView(LandingPageView, BaseInstanceView):
def __init__(self, request):
super(InstanceTypesView, self).__init__(request)
self.request = request
self.conn = self.get_connection()
self.render_dict = dict(
instance_type_form=InstanceTypeForm(self.request),
sort_keys=[],
filter_keys=[],
prefix='',
)
@view_config(route_name='instance_types', renderer='../templates/instances/instance_types.pt')
def instance_types_landing(self):
return self.render_dict
@view_config(route_name='instance_types_json', renderer='json', request_method='POST')
def instance_types_json(self):
if not (self.request.session['account_access']):
return JSONResponse(status=401, message=_(u"Unauthorized"))
if not (self.is_csrf_valid()):
return JSONResponse(status=400, message="missing CSRF token")
instance_types_results = []
with boto_error_handler(self.request):
instance_types = self.conn.get_all_instance_types()
for instance_type in instance_types:
instance_types_results.append(dict(
name=instance_type.name,
cpu=instance_type.cores,
memory=instance_type.memory,
disk=instance_type.disk,
))
return dict(results=instance_types_results)
@view_config(route_name='instance_types_update', renderer='json', request_method='POST')
def instance_types_update(self):
if not (self.is_csrf_valid()):
return JSONResponse(status=400, message="missing CSRF token")
# Extract the list of instance type updates
update = {}
for param in self.request.params.items():
match = re.search('update\[(\d+)\]\[(\w+)\]', param[0])
if match:
index = match.group(1)
attr = match.group(2)
value = param[1]
instance_type = {}
if index in update:
instance_type = update[index]
instance_type[attr] = value
update[index] = instance_type
# Modify instance type
for item in update.itervalues():
is_updated = self.modify_instance_type_attribute(
item['name'], item['cpu'], item['memory'], item['disk'])
if not is_updated:
return JSONResponse(status=400, message=_(u"Failed to instance type attributes"))
return dict(message=_(u"Successfully updated instance type attributes"))
def modify_instance_type_attribute(self, name, cpu, memory, disk):
# Ensure that the attributes are positive integers
if cpu <= 0 or memory <= 0 or disk <= 0:
return False
params = {'Name': name, 'Cpu': cpu, 'Memory': memory, 'Disk': disk}
with boto_error_handler(self.request):
status = self.conn.get_status('ModifyInstanceTypeAttribute', params, verb='POST')
ChoicesManager.invalidate_instance_types()
return status
| 87,434 | 24,682 |
from flask import (
Blueprint,
current_app,
request,
abort,
render_template
)
from minette.serializer import loads
# メインから読み込むBlueprintの定義
bp = Blueprint("messagelog", __name__)
# メッセージログのハンドラー
@bp.route("/messagelog", methods=["GET"])
def messagelog():
# BOTインスタンスの取得
bot = current_app.line_adapter.bot
# パスワードのチェック
if request.args.get("key", "") != bot.config.get("messagelog_password"):
abort(401)
# メッセージログの取得と表示(やっつけなのでプロダクションではクエリやテーブルをきちんとチューニングしてください)
with bot.connection_provider.get_connection() as connection:
cursor = connection.cursor()
cursor.execute("select * from messagelog order by id desc limit 50")
ml = []
for r in cursor.fetchall():
d = dict(zip([column[0] for column in cursor.description], r))
d["request_json"] = loads(d["request_json"])
d["context_json"] = loads(d["context_json"])
d["response_json"] = loads(d["response_json"])
ml.append(d)
return render_template("messagelog.html", ml=ml)
| 1,069 | 418 |
import glob
import os
# from nltk.tokenize import sent_e
import pickle
import nltk
import proper_names
import pdb
import itertools
import zlib
import pdb
import unicodedata
import pickle
from dateutil.parser import parse
def is_date(string):
try:
exceptions = ["sun", "sat"]
parse(string)
return string not in exceptions
except:
return False
def is_number(s):
try:
v = float(s)
# Crappy hack to capture most year dates and return False for them.
return v <= 1700 or v >= 3000
except: # (ValueError, OverflowError):
pass
try:
unicodedata.numeric(s)
return True
except: #(TypeError, ValueError, OverflowError):
pass
return False
def remove_non_ascii(text):
'''removes any characters outside of the ASCII range from a string '''
return ''.join([i if ord(i) < 128 else ' ' for i in text])
if __name__ == "__main__":
'''
saves the books into a file with CONV_LENGTH sentences (utterances) on each line
and sentence tags in between them
'''
# in_file = "/home/ubuntu/nn_chatbot/seq2seq/scripts/data/books_large_p1.txt"
# out_file = "/home/ubuntu/nn_chatbot/seq2seq/scripts/data/books_large_p1_simplified.txt"
in_file = "/media/snake/daphne/school/nn_chatbot/big_book_corpus/in_sentences/books_large_p1.txt"
out_file = "/media/snake/daphne/school/nn_chatbot/big_book_corpus/in_sentences/books_large_p1_simplified_2.txt"
# Simplify the text to only have these many unique names
NUM_NAMES = 100
output = ""
proper_names_trie = proper_names.get_or_create_proper_names()
names_so_far = {}
dates = set()
numbers = set()
line_index = 0
with open(in_file, 'r') as f_in:
with open(out_file, 'w') as f_out:
for line in f_in:
line_index = line_index + 1
if line_index <= -1:
pass
else:
words = line.split(" ")
for i in xrange(0, len(words)):
word = words[i]
if word.lower() in proper_names_trie: #and word[0] == word[0].upper():
# This is a proper name. Replace it with _name_%HASH(name)%
replacement_name = "_name_" + str(zlib.adler32(word) % NUM_NAMES)
words[i] = replacement_name
elif is_number(word):
words[i] = "_number_"
# Keep track of all 'number' words, in case we're interested in look at this.
if word not in numbers:
numbers.add(word)
elif is_date(word):
words[i] = "_date_"
# Keep track of all 'number' w, in case we're interested in look at this.
if word not in dates:
dates.add(word)
out_line = " ".join(words)
f_out.write(out_line)
if line_index%50000==0:
print("i = " + str(line_index) + ": " + line + " -> " + out_line)
with open("removed_meta.pickle", "w") as f_out:
pickle.dump((numbers, dates), f_out)
| 2,794 | 1,160 |
import torch
import torch.nn as nn
import torch.nn.functional as F
from utilities import DEVICE
def vae_loss(x_decoded_mean, x, z_mean, z_sd):
bce_loss = F.binary_cross_entropy(x_decoded_mean, x, reduction='sum')
kl_loss = -0.5 * torch.sum(1 + z_sd - z_mean.pow(2) - z_sd)
return bce_loss + kl_loss
class MolecularVAE(nn.Module):
'''
MolecularVAE with teacher forcing
'''
def __init__(self, vocab: dict, sos_idx: int, pad_idx: int, args):
super(MolecularVAE, self).__init__()
self.max_name_len = args.max_name_length
self.encoder_mlp_size = args.mlp_encode
self.latent_size = args.latent
self.num_layers = args.num_layers
self.embed_dim = args.word_embed
self.conv_in_c = args.conv_in_sz
self.conv_out_c = args.conv_out_sz
self.conv_kernals = args.conv_kernals
self.vocab_size = len(vocab)
self.eps = args.eps
self.conv_1 = nn.Conv1d(self.max_name_len, self.conv_out_c[
0], kernel_size=self.conv_kernals[0])
self.conv_2 = nn.Conv1d(self.conv_in_c[0], self.conv_out_c[
1], kernel_size=self.conv_kernals[1])
self.conv_3 = nn.Conv1d(self.conv_in_c[1], self.conv_out_c[
2], kernel_size=self.conv_kernals[2])
c1_out_sz = self.vocab_size-(self.conv_kernals[0]) + 1
c2_out_sz = c1_out_sz-(self.conv_kernals[1]) + 1
c3_out_sz = self.conv_out_c[2] * \
((c2_out_sz-(self.conv_kernals[2])) + 1)
self.encoder_layer = nn.Linear(c3_out_sz, self.encoder_mlp_size)
self.mean_layer = nn.Linear(self.encoder_mlp_size, self.latent_size)
self.sd_layer = nn.Linear(self.encoder_mlp_size, self.latent_size)
self.decoder_layer_start = nn.Linear(
self.latent_size, self.latent_size)
self.gru = nn.LSTM(args.latent,
args.rnn_hidd, args.num_layers, batch_first=True)
self.gru_last = nn.LSTM(args.rnn_hidd + self.embed_dim,
args.rnn_hidd, 1, batch_first=True)
self.decode_layer_final = nn.Linear(args.rnn_hidd, self.vocab_size)
self.sos_idx = sos_idx
self.pad_idx = pad_idx
self.char_embedder = nn.Embedding(
num_embeddings=self.vocab_size,
embedding_dim=self.embed_dim,
padding_idx=pad_idx
)
self.selu = nn.SELU()
self.softmax = nn.Softmax()
nn.init.xavier_normal_(self.encoder_layer.weight)
nn.init.xavier_normal_(self.mean_layer.weight)
nn.init.xavier_normal_(self.sd_layer.weight)
nn.init.xavier_normal_(self.decoder_layer_start.weight)
nn.init.xavier_normal_(self.decode_layer_final.weight)
def encode(self, x):
x0 = self.selu(self.conv_1(x))
x1 = self.selu(self.conv_2(x0))
x2 = self.selu(self.conv_3(x1))
x3 = x2.view(x.size(0), -1)
x4 = F.selu(self.encoder_layer(x3))
return self.mean_layer(x4), F.softplus(self.sd_layer(x4))
def sampling(self, z_mean, z_sd):
epsilon = self.eps * torch.randn_like(z_sd)
return z_sd * epsilon + z_mean
def decode(self, z, idx_tensor: torch.Tensor = None):
z = F.selu(self.decoder_layer_start(z))
z = z.view(z.size(0), 1, z.size(-1)).repeat(1, self.max_name_len, 1)
output, _ = self.gru(z)
if idx_tensor is not None:
x_embed = self.char_embedder(idx_tensor)
tf_input = torch.cat((output, x_embed), dim=2)
all_outs, _ = self.gru_last(tf_input)
out_reshape = all_outs.contiguous().view(-1, output.size(-1))
y0 = F.softmax(self.decode_layer_final(out_reshape), dim=1)
y = y0.contiguous().view(all_outs.size(0), -1, y0.size(-1))
else:
batch_sz = z.shape[0]
char_inputs = torch.LongTensor(
[self.sos_idx] * batch_sz).to(DEVICE)
embed_char = self.char_embedder(char_inputs)
y = None
for i in range(self.max_len):
input = torch.cat((output[:, i, :], embed_char), dim=1)
if i == 0:
out, hn = self.gru_last(input.unsqueeze(1))
else:
out, hn = self.gru_last(input.unsqueeze(1), hn)
sm_out = F.softmax(self.decode_layer_final(out), dim=1)
samples = torch.distributions.Categorical(
sm_out).sample()
if i == 0:
y = sm_out
else:
y = torch.cat(y, sm_out, dim=1)
embed_char = self.char_embedder(samples)
y.append(out)
y = torch.Tensor(y)
return y
def forward(self, x, idx_tensor: torch.Tensor = None):
z_mean, z_sd = self.encode(x)
z = self.sampling(z_mean, z_sd)
return self.decode(z, idx_tensor), z_mean, z_sd
| 5,015 | 1,877 |
from __future__ import annotations
from typing import Iterator, List
import numpy as np
from more_itertools import chunked
from torch.utils.data import Sampler
from .rank_dataset import TokenCountDataset
class DynamicBatchSampler(Sampler): # type:ignore
def __init__(
self,
dataset: TokenCountDataset,
batch_tokens: int,
mod: int,
shuffle: bool = True,
):
self.dataset = dataset
self.batch_tokens = batch_tokens
self.batches: List[List[int]] = []
self.mod = mod
batch: List[int] = [0]
size = self.dataset.estimate_tokens(0)
total_size = size
for idx in range(1, len(self.dataset)):
if total_size + size > self.batch_tokens:
bsz = len(batch) // self.mod * self.mod
self.batches.append(batch[:bsz])
batch = batch[bsz:] + [idx]
# Only estimate the size for the first example of a batch. Following
# will be padded to the same size.
size = self.dataset.estimate_tokens(batch[0])
total_size = size * len(batch)
else:
batch.append(idx)
total_size += size
if batch:
self.batches.append(batch)
if shuffle:
np.random.shuffle(self.batches)
def __iter__(self) -> Iterator[List[int]]:
return iter(self.batches)
def __len__(self) -> int:
return len(self.batches)
def avg_bsz(self) -> float:
size: float = np.mean([len(x) for x in self.batches])
return size
class FixedBatchSampler(Sampler): # type:ignore
"""This batch sample is identical to the default pytorch BatchSampler except that it
supports post-sampling shuffle. This is useful in the case that the dataset needs to
be sorted by length and thus the shuffling must be delayed here.
"""
def __init__(
self, dataset: TokenCountDataset, batch_size: int, shuffle: bool = True
):
self.dataset = dataset
self.batch_size = batch_size
self.batches = list(chunked(list(range(len(self.dataset))), batch_size))
if shuffle:
np.random.shuffle(self.batches)
def __iter__(self) -> Iterator[List[int]]:
return iter(self.batches)
def __len__(self) -> int:
return len(self.batches)
| 2,391 | 706 |
from typing import Dict
from typing import Optional
from typing import Type
from dataclasses import dataclass
from rest_framework.serializers import Serializer
from ..core import ComponentMethod
from ..core import annotate
@dataclass
class InputSerializer:
class_: Type[Serializer]
kwargs: Dict
destination_argument_name: str
def input_serializer(serializer_class: Type[Serializer], argument_name: str, **serializer_kwargs):
input_serializer_ = InputSerializer(serializer_class, serializer_kwargs, argument_name)
return annotate(input_serializer_, single=True)
def get_input_serializer(method: ComponentMethod) -> Optional[InputSerializer]:
return method.annotations.get_one_or_none(InputSerializer)
| 732 | 198 |
# http://stackoverflow.com/questions/5147112/matplotlib-how-to-put-individual-tags-for-a-scatter-plot
# http://stackoverflow.com/questions/10374930/matplotlib-annotating-a-3d-scatter-plot
import sys
from pyproct.clustering.cluster import Cluster
import prody
import matplotlib.pyplot as plt
import numpy
import json
from mpl_toolkits.mplot3d import Axes3D
from pyproct.tools.commonTools import convert_to_utf8
from pyproct.clustering.clustering import Clustering
import optparse
import matplotlib.cm as cm
def print_cluster_info(selection_class,clustering_id, results):
print clustering_id, results[selection_class][clustering_id]["clustering"]["number_of_clusters"], results[selection_class][clustering_id]["type"],
if selection_class == "selected":
print "".join([ (str(results["scores"][criteria][clustering_id])+", ") for criteria in results["scores"].keys()]),
print results[selection_class][clustering_id]["parameters"]
if __name__ == '__main__':
parser = optparse.OptionParser(usage='%prog -m <arg> -c <arglist> [-o <arg>]', version='1.0')
parser.add_option('-l', action="store_true", dest = "print_list", help="Print a list of generated clusters and some properties", metavar = "1")
parser.add_option("-s", action="store_true", dest="show_protein", help="Shows the protein backbone.")
parser.add_option('-r', action="store", dest = "results_file", help="", metavar = "results.json")
parser.add_option('-p', action="store", dest = "parameters_file", help="",metavar = "parameters.json")
parser.add_option('-c', action="store", dest = "clustering_to_see", help="",metavar = "clustering_0001")
parser.add_option('--all', action="store_true", dest = "all_clusterings", help="",metavar = "")
parser.add_option('--stride', type = "int", action="store", dest = "stride", help="",metavar = "5")
options, args = parser.parse_args()
params = convert_to_utf8(json.loads(open(options.parameters_file).read()))
if params["data"]["matrix"]["method"] == "distance":
results = convert_to_utf8(json.loads(open(options.results_file).read()))
else:
print "ERROR: Only 'distance' clusterings can be plotted."
if options.print_list:
print "SELECTED"
print "========"
for selected_cluster in results["selected"]:
print_cluster_info("selected",selected_cluster,results)
if options.all_clusterings:
print "NOT SELECTED"
print "============"
for not_selected_cluster in results["not_selected"]:
print_cluster_info("not_selected",not_selected_cluster,results)
exit()
fig = plt.figure()
ax = fig.gca(projection='3d')
# Plot protein
pdb = prody.parsePDB(params["data"]["files"][0])
if options.show_protein:
pdb_backbone = pdb.select("name CA").getCoordsets()[0] # "backbone not hetero"
ax.plot(pdb_backbone.T[0], pdb_backbone.T[1], pdb_backbone.T[2])
# Get geometric centers and plot ligands
ligand_coords = pdb.select(params["data"]["matrix"]["parameters"]["body_selection"]).getCoordsets()
# Get clustering
if options.clustering_to_see is None:
options.clustering_to_see = results["best_clustering"]
try:
clustering = Clustering.from_dic(results["selected"][options.clustering_to_see]["clustering"])
# Print some info
print_cluster_info("selected", options.clustering_to_see, results)
except:
clustering = Clustering.from_dic(results["not_selected"][options.clustering_to_see]["clustering"])
# Print some info
print_cluster_info("not_selected", options.clustering_to_see, results)
# Show all clusters
colors = iter(cm.rainbow(numpy.linspace(0, 1, len(clustering.clusters))))
for cluster in clustering.clusters:
centers = []
for i,element in enumerate(cluster.all_elements):
if options.stride is None or i%options.stride == 0:
coords = ligand_coords[element]
centers.append(coords.mean(0))
centers = numpy.array(centers)
ax.scatter(centers.T[0],centers.T[1],centers.T[2],color=next(colors))
# Plot prototypes
centers = numpy.array([ligand_coords[cluster.prototype].mean(0) for cluster in clustering.clusters])
ax.scatter([centers.T[0]],[centers.T[1]],[centers.T[2]], s = 100, c="red", marker='o')
plt.show()
| 4,417 | 1,450 |
import asyncio
import logging
import sys
import time
import uuid
from collections import OrderedDict
from collections.abc import Callable
from functools import partial
from threading import Thread
from typing import Coroutine, Union
from tamarco.core.dependency_resolver import CantSolveDependencies, resolve_dependency_order
from tamarco.core.logging.logging import Logging
from tamarco.core.patterns import Singleton
from tamarco.core.settings.settings import Settings, SettingsView
from tamarco.core.signals import SignalsManager
from tamarco.core.tasks import TasksManager, get_task_wrapper, get_thread_wrapper
from tamarco.core.utils import Informer, ROOT_SETTINGS, get_fn_full_signature
from tamarco.resources.bases import BaseResource
from tamarco.resources.basic.metrics.resource import MetricsResource
from tamarco.resources.basic.registry.resource import Registry
from tamarco.resources.basic.status.resource import StatusResource
from tamarco.resources.debug.profiler import ProfilerResource
from tamarco.resources.io.http.resource import HTTPServerResource
logger = logging.getLogger("tamarco")
class MicroserviceBase(metaclass=Singleton):
# Name of the microservice, is used by the resources
# to report a name of service.
name = None
# Instance id of the microservice, name is shared
# among instances but the instance id is unique.
instance_id = uuid.uuid4()
# Name of the deploy, is used by the resources
# to report a deploy name, is loaded by settings.
deploy_name = None
# Loggers to be added by the application code.
extra_loggers_names = []
# Main event loop.
loop = asyncio.get_event_loop()
# Manager for task.
tasks_manager = TasksManager()
# Settings manager.
settings = Settings()
# Logging manager.
logging = Logging()
@property
def loggers_names(self):
"""All loggers used by the framework.
Returns:
list: list of loggers names used by the microservice.
"""
loggers = {"tamarco", "tamarco.tasks", "tamarco.settings", "asyncio"}
for resource in self.resources.values():
loggers.update(resource.loggers_names)
loggers.update(self.extra_loggers_names)
loggers.update({self.name})
return loggers
def __new__(cls, *args, **kwargs):
cls.resources = OrderedDict()
dependency_graph = {
attr_name: getattr(cls, attr_name).depends_on
for attr_name in dir(cls)
if isinstance(getattr(cls, attr_name), BaseResource)
}
try:
resources_dep_ordered = resolve_dependency_order(dependency_graph)
except CantSolveDependencies as e:
print(e, file=sys.stderr)
exit(12)
else:
for name in resources_dep_ordered:
cls.resources[name] = getattr(cls, name)
return super().__new__(cls, *args, **kwargs)
def __init__(self):
assert self.name is not None, "Error, name should be defined in your microservice class"
self.logger = None
self._configure_provisional_logger()
def _configure_provisional_logger(self):
"""Provisional logging used before be able to read the final configuration from the settings."""
self.logger = logging.getLogger(self.name)
stdout_handler = logging.StreamHandler(sys.stdout)
print(f"Configuring logger provisional logger of {self.name} to INFO and stdout")
self.logger.setLevel(logging.INFO)
self.logger.addHandler(stdout_handler)
self.logger.info(f"Configured {self.name} logger")
async def bind(self):
"""Call the bind function of all the resources.
It binds the resources to the microservice, allowing to the resources to identify their microservice.
"""
self.logger.info(f"Binding to microservice the resources: {list(self.resources.keys())}")
await self.settings.bind(self.loop)
for name, resource in self.resources.items():
try:
await resource.bind(self, name)
except Exception:
self.logger.exception(f"Unexpected exception binding the resource {resource}")
exit(11)
async def run_in_all_resources(self, method):
"""Run the method name in all the resources.
Args:
method (str): Method name to run in all the resources.
"""
for resource in self.resources.values():
self.logger.debug(f"Calling {method} of resource {resource.name}")
try:
await getattr(resource, method)()
except Exception:
self.logger.exception(f"Error in {method} of resource {resource}")
else:
if method == "start":
self.logger.info(f"Started {resource.name} from {self.name}")
async def start_logging(self):
"""Initializes the logging of the microservice."""
self.logger.info(f"Starting logging in microservice {self.name} with loggers: {self.loggers_names}")
await self.logging.start(
loggers=self.loggers_names, microservice_name=self.name, deploy_name=self.deploy_name, loop=self.loop
)
Informer.log_all_info(self.logger)
async def stop_settings(self):
"""Stops the settings of the microservice."""
self.logger.info("Stopping microservice settings")
await self.settings.stop()
async def start_settings(self):
"""Initializes the settings of the microservice."""
self.logger.info("Starting microservice settings")
await self.settings.start()
self.deploy_name = await self.settings.get(f"{ROOT_SETTINGS}.deploy_name")
await self._configure_logging_settings()
await self._configure_resource_settings()
async def _configure_logging_settings(self):
self.logger.info("Configuring logging settings")
self.logging.configure_settings(SettingsView(self.settings, f"{ROOT_SETTINGS}.logging", self.name))
async def _configure_resource_settings(self):
self.logger.info("Configuring resources settings")
for resource in self.resources.values():
await resource.configure_settings(
SettingsView(self.settings, f"{ROOT_SETTINGS}.resources.{resource.name}", self.name)
)
def _collect_tasks(self):
for attr_name in dir(self):
attr = getattr(self, attr_name)
if hasattr(attr, "_mark_task"):
self.tasks_manager.register_task(attr._name, attr)
elif hasattr(attr, "_mark_thread"):
self.tasks_manager.register_thread(attr._name, attr)
class MicroserviceContext(MicroserviceBase):
""""This class is used to use tamarco resources without using a full microservice,
for example a script.
"""
name = "microservice_context"
async def start(self):
self.tasks_manager.set_loop(self.loop)
await self.bind()
await self.start_settings()
await self.start_logging()
await self.run_in_all_resources("pre_start")
await self.run_in_all_resources("start")
await self.run_in_all_resources("post_start")
self._collect_tasks()
self.tasks_manager.start_all()
async def stop(self):
self.tasks_manager.stop_all()
await self.stop_settings()
await self.run_in_all_resources("stop")
await self.run_in_all_resources("post_stop")
class Microservice(MicroserviceBase):
"""Main class of a microservice.
This class is responsible for controlling the lifecycle of the microservice, also
builds and provides the necessary elements that a resource needs to work.
The resources of a microservice should be declared in this class. The microservice automatically takes the ownership
of all the declared resources.
"""
# The signals manager are responsive of handling the signal_number of the system, providing a graceful stopping in
# the service when necessary.
signals_manager = SignalsManager()
# Default http server resource. It is used by the metrics and status resource to expose information.
tamarco_http_report_server = HTTPServerResource()
# Default metric resource.
metrics = MetricsResource()
# Default status resource. It is responsive of apply the restart policies and expose the status of the resources
# an HTTP API.
status = StatusResource()
# Default profiler resource. It is responsive of profile de application when the setting is activated.
profiler = ProfilerResource()
# Default registry resource. It is responsive of maintain a etcd registry with all the alive microservice instances
# and their IPs to be used by a discovery system.
registry = Registry()
def __init__(self):
super().__init__()
self.tasks_manager.set_loop(self.loop)
self.signals_manager.set_loop(self.loop)
async def pre_start(self):
"""Pre start stage of lifecycle.
This method can be overwritten by the user to add some logic in the start.
"""
self.logger.info("============ Pre Starting ============")
await self.run_in_all_resources("pre_start")
async def start(self):
"""Start stage of lifecycle.
This method can be overwritten by the user to add some logic in the start.
"""
self.logger.info("============ Starting ============")
await self.run_in_all_resources("start")
self._collect_tasks()
self.tasks_manager.start_all()
async def post_start(self):
"""Post start stage of lifecycle.
This method can be overwritten by the user to add some logic in the start.
"""
self.logger.info("============ Post Starting ============")
await self.run_in_all_resources("post_start")
async def stop(self):
"""Stop stage of the lifecycle.
This method can be overwritten by the user to add some logic to the shut down.
This method should close all the I/O operations opened by the resources.
"""
self.logger.info("============ Stopping ============")
await self.run_in_all_resources("stop")
await self.stop_settings()
self.tasks_manager.stop_all()
async def post_stop(self):
"""Post stop stage of the lifecycle.
This method can be overwritten by the user to add some logic to the shut down.
"""
self.logger.info("============ Post Stopping ============")
await self.run_in_all_resources("post_stop")
async def _setup(self):
await self.bind()
await self.start_settings()
await self.start_logging()
await self.pre_start()
await self.start()
await self.post_start()
def run(self):
"""Run a microservice.
It initializes the main event loop of asyncio, so this function only are going to end when the microservice
ends its live cycle.
"""
self.logger.info(f"Running microservice {self.name}. Calling setup method")
try:
self.loop.run_until_complete(self._setup())
self.loop.run_forever()
except Exception:
self.logger.critical(
"Unexpected exception in the setup or during the run of the loop, stopping the " "microservice",
exc_info=True,
)
self.loop.run_until_complete(self.stop_gracefully())
async def stop_gracefully(self):
"""Stop the microservice gracefully.
Shut down the microservice. If after 30 seconds the microservice is not closed gracefully it forces a exit.
"""
thread = Thread(target=self._wait_and_force_exit)
thread.start()
await self.stop()
await self.post_stop()
if self.loop.is_running():
self.loop.stop()
def _wait_and_force_exit(self):
time.sleep(30)
self.logger.critical("Error stopping all the resources. Forcing exit.")
exit(1)
def task(name_or_fn):
"""Decorator to convert a method of a microservice in a asyncio task.
The task is started and stopped when the microservice starts and stops respectively.
Args:
name_or_fn: Name of the task or function. If function the task name is the declared name of the function.
"""
def decorator(name, fn):
wrapper = get_task_wrapper(fn, name)
wrapper._mark_task = True
wrapper._name = name
return wrapper
if name_or_fn is str:
name = name_or_fn
return partial(decorator, name)
elif callable(name_or_fn):
if not asyncio.iscoroutinefunction(name_or_fn):
raise Exception(f"Tamarco {name_or_fn} task not created! The function is not asynchronous")
fn = name_or_fn
name = get_fn_full_signature(fn)
return decorator(name, fn)
else:
raise Exception("task decorator should be used with a parameter (name) that is a str or without parameter")
def thread(name_or_fn):
"""Decorator to convert a method of a microservice in a thread.
The thread is started and stopped when the microservice starts and stops respectively.
Args:
name_or_fn: Name of the thread or function. If function the thread name is the declared name of the function.
"""
def decorator(name: str, fn: Callable):
wrapper = get_thread_wrapper(fn, name)
wrapper._mark_thread = True
wrapper._name = name
return wrapper
if name_or_fn is str:
name = name_or_fn
return partial(decorator, name)
elif callable(name_or_fn):
fn = name_or_fn
name = get_fn_full_signature(fn)
return decorator(name, fn)
else:
raise Exception("task decorator should be used with a parameter (name) that is a str or without parameter")
def task_timer(interval=1000, one_shot=False, autostart=False) -> Union[Callable, Coroutine]:
"""Decorator to declare a task that should repeated in time intervals.
Examples:
>>> @task_timer()
>>> async def execute(*arg,**kwargs)
>>> print('tick')
>>> @task_timer(interval=1000, oneshot=True, autostart=True)
>>> async def execute(*args,**kwargs)
>>> print('tick')
Args:
interval (int): Interval in milliseconds when the task is repeated.
one_shot (bool): Only runs the task once.
autostart (bool): Task is automatically initialized with the microservice.
"""
def wrapper_task_timer(fn: Union[str, Callable]) -> Union[Callable, Coroutine]:
"""Function that adds timer functionality"""
async def fn_with_sleep(*args, **kwargs):
try:
# Interval time in float (seconds transform)
interval_seconds = interval / 1000
# Oneshot param True always first all sleep after that execute and finish
execute_task = autostart and not one_shot
while True:
if execute_task:
logger.debug(
f"Executing task timer {fn.__name__} with the params: interval = {interval}, "
f"one_shot = {one_shot}, autostart = {autostart}"
)
await fn(*args, **kwargs)
if one_shot and execute_task:
break
execute_task = True
await asyncio.sleep(interval_seconds)
except Exception:
logger.exception(f"Unexpected exception running task timer {fn.__name__}. Timer will not recover")
# Change name timer function with original task name
fn_with_sleep.__name__ = fn.__name__
return task(fn_with_sleep)
return wrapper_task_timer
| 15,973 | 4,294 |
import json
from itertools import cycle
import numpy as np
import pytest
from napari.utils.colormaps.categorical_colormap import CategoricalColormap
def test_default_categorical_colormap():
cmap = CategoricalColormap()
assert cmap.colormap == {}
color_cycle = cmap.fallback_color
np.testing.assert_almost_equal(color_cycle.values, [[1, 1, 1, 1]])
np.testing.assert_almost_equal(next(color_cycle.cycle), [1, 1, 1, 1])
def test_categorical_colormap_direct():
"""Test a categorical colormap with a provided mapping"""
colormap = {'hi': np.array([1, 1, 1, 1]), 'hello': np.array([0, 0, 0, 0])}
cmap = CategoricalColormap(colormap=colormap)
color = cmap.map(['hi'])
np.testing.assert_allclose(color, [[1, 1, 1, 1]])
color = cmap.map(['hello'])
np.testing.assert_allclose(color, [[0, 0, 0, 0]])
# test that the default fallback color (white) is applied
new_color_0 = cmap.map(['not a key'])
np.testing.assert_almost_equal(new_color_0, [[1, 1, 1, 1]])
new_cmap = cmap.colormap
np.testing.assert_almost_equal(new_cmap['not a key'], [1, 1, 1, 1])
# set a cycle of fallback colors
new_fallback_colors = [[1, 0, 0, 1], [0, 1, 0, 1]]
cmap.fallback_color = new_fallback_colors
new_color_1 = cmap.map(['new_prop 1'])
np.testing.assert_almost_equal(
np.squeeze(new_color_1), new_fallback_colors[0]
)
new_color_2 = cmap.map(['new_prop 2'])
np.testing.assert_almost_equal(
np.squeeze(new_color_2), new_fallback_colors[1]
)
def test_categorical_colormap_cycle():
color_cycle = [[1, 1, 1, 1], [1, 0, 0, 1]]
cmap = CategoricalColormap(fallback_color=color_cycle)
# verify that no mapping between prop value and color has been set
assert cmap.colormap == {}
# the values used to create the color cycle can be accessed via fallback color
np.testing.assert_almost_equal(cmap.fallback_color.values, color_cycle)
# map 2 colors, verify their colors are returned in order
colors = cmap.map(['hi', 'hello'])
np.testing.assert_almost_equal(colors, color_cycle)
# map a third color and verify the colors wrap around
third_color = cmap.map(['bonjour'])
np.testing.assert_almost_equal(np.squeeze(third_color), color_cycle[0])
def test_categorical_colormap_cycle_as_dict():
color_values = np.array([[1, 1, 1, 1], [1, 0, 0, 1]])
color_cycle = cycle(color_values)
fallback_color = {'values': color_values, 'cycle': color_cycle}
cmap = CategoricalColormap(fallback_color=fallback_color)
# verify that no mapping between prop value and color has been set
assert cmap.colormap == {}
# the values used to create the color cycle can be accessed via fallback color
np.testing.assert_almost_equal(cmap.fallback_color.values, color_values)
np.testing.assert_almost_equal(
next(cmap.fallback_color.cycle), color_values[0]
)
fallback_colors = np.array([[1, 0, 0, 1], [0, 1, 0, 1]])
def test_categorical_colormap_from_array():
cmap = CategoricalColormap.from_array(fallback_colors)
np.testing.assert_almost_equal(cmap.fallback_color.values, fallback_colors)
color_mapping = {
'typeA': np.array([1, 1, 1, 1]),
'typeB': np.array([1, 0, 0, 1]),
}
default_fallback_color = np.array([[1, 1, 1, 1]])
@pytest.mark.parametrize(
'params,expected',
[
({'colormap': color_mapping}, (color_mapping, default_fallback_color)),
(
{'colormap': color_mapping, 'fallback_color': fallback_colors},
(color_mapping, fallback_colors),
),
({'fallback_color': fallback_colors}, ({}, fallback_colors)),
(color_mapping, (color_mapping, default_fallback_color)),
],
)
def test_categorical_colormap_from_dict(params, expected):
cmap = CategoricalColormap.from_dict(params)
np.testing.assert_equal(cmap.colormap, expected[0])
np.testing.assert_almost_equal(cmap.fallback_color.values, expected[1])
def test_categorical_colormap_equality():
color_cycle = [[1, 1, 1, 1], [1, 0, 0, 1]]
cmap_1 = CategoricalColormap(fallback_color=color_cycle)
cmap_2 = CategoricalColormap(fallback_color=color_cycle)
cmap_3 = CategoricalColormap(fallback_color=[[1, 1, 1, 1], [1, 1, 0, 1]])
cmap_4 = CategoricalColormap(
colormap={0: np.array([0, 0, 0, 1])}, fallback_color=color_cycle
)
assert cmap_1 == cmap_2
assert cmap_1 != cmap_3
assert cmap_1 != cmap_4
# test equality against a different type
assert cmap_1 != color_cycle
@pytest.mark.parametrize(
'params',
[
{'colormap': color_mapping},
{'colormap': color_mapping, 'fallback_color': fallback_colors},
{'fallback_color': fallback_colors},
],
)
def test_categorical_colormap_serialization(params):
cmap_1 = CategoricalColormap(**params)
cmap_json = cmap_1.json()
json_dict = json.loads(cmap_json)
cmap_2 = CategoricalColormap(**json_dict)
assert cmap_1 == cmap_2
| 4,990 | 1,894 |
import socket
import logging
from sys import argv
import json
import re
def join(clientSocket, currNode, nextNode, oracleIP, oraclePORT):
message = '[JOIN] {}'.format(json.dumps(currNode))
logging.debug('JOIN MESSAGE: {}'.format(message))
clientSocket.sendto(message.encode(), (oracleIP, oraclePORT))
message, addr = clientSocket.recvfrom(1024)
message = message.decode('utf-8')
logging.debug('RESPONSE: {}'.format(message))
# update currNode, nextNode
def leave(clientSocket, currNode, nextNode, oracleIP, oraclePort):
pass
def sendMessage(clientSocket, nextNode, message):
pass
def receiveMessage(clientSocket):
pass
if __name__ == '__main__':
oracleIP = argv[1]
oraclePORT = int(argv[2])
clientIP = argv[3]
clientPORT = int(argv[4])
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.DEBUG)
clientSocket = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
clientSocket.bind((clientIP, clientPORT))
logging.info('CLIENT UP AND RUNNING')
currNode = {}
nextNode = {}
currNode['addr'] = clientIP
currNode['port'] = str(clientPORT)
join(clientSocket, currNode, nextNode, oracleIP, oraclePORT)
| 1,254 | 417 |
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
class BaseProcessesFinder(object):
"""
A base processes loader to be used for custom staticfiles finder
classes.
"""
def find_processes(self):
raise NotImplementedError('subclasses of BaseProcessesLoader must provide a find_processes() method')
def find_descriptors(self):
raise NotImplementedError('subclasses of BaseProcessesLoader must provide a find_descriptors() method')
class FileSystemProcessesFinder(BaseProcessesFinder):
def find_processes(self):
return getattr(settings, 'FLOW_PROCESSES_DIRS', ())
def find_descriptors(self):
return getattr(settings, 'FLOW_DESCRIPTORS_DIRS', ())
class AppDirectoriesFinder(BaseProcessesFinder):
def _find_folders(self, folder_name):
found_folders = []
for app_config in apps.get_app_configs():
folder_path = os.path.join(app_config.path, folder_name)
if os.path.isdir(folder_path):
found_folders.append(folder_path)
return found_folders
def find_processes(self):
return self._find_folders('processes')
def find_descriptors(self):
return self._find_folders('descriptors')
def get_finders():
for finder_path in settings.FLOW_PROCESSES_FINDERS:
yield get_finder(finder_path)
def get_finder(import_path):
Finder = import_string(import_path)
if not issubclass(Finder, BaseProcessesFinder):
raise ImproperlyConfigured(
'Finder "{}" is not a subclass of "{}"'.format(Finder, BaseProcessesFinder))
return Finder()
| 1,833 | 556 |
from graphite_feeder.handler.event.appliance.sound import player
| 65 | 19 |
"""
django-selectreverse
Custom manager to reduce sql queries for m2m and reverse fk relationships
"""
from django.db import models
from django.db.models.fields.related import ForeignRelatedObjectsDescriptor, ReverseManyRelatedObjectsDescriptor, ManyRelatedObjectsDescriptor
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.generic import ReverseGenericRelatedObjectsDescriptor
class ReverseManager(models.Manager):
"""
manager that allows you to pass in a dict, declaring a set of reverse relationships, matched to
an attributename.
If you use this manager to fetch the items,
the related items of the reverse relationship will be prefetched into a list for each object
made available as an attribute of the object.
"""
def __init__(self, reversemapping=None):
self.reversemapping = reversemapping or {}
super(ReverseManager, self).__init__()
def select_reverse(self, reversemapping=None):
reversemapping = reversemapping or self.reversemapping
return self.get_query_set(reversemapping)
def get_query_set(self, reversemapping=None):
reversemapping = reversemapping or self.reversemapping
return ReverseQuerySet(model=self.model, reversemapping=reversemapping)
class ReverseQuerySet(models.query.QuerySet):
def __init__(self, model=None, query=None, reversemapping=None, **kwargs):
self.reversemapping = reversemapping or {}
super(ReverseQuerySet, self).__init__(model, query, **kwargs)
def _clone(self, klass=None, setup=False, **kwargs):
c = super(ReverseQuerySet, self)._clone(klass=klass, setup=setup, **kwargs)
c.reversemapping = self.reversemapping
return c
def select_reverse(self, reversemapping=None):
if reversemapping:
q = self._clone()
q.reversemapping = reversemapping
return q
def _fill_cache(self, num=None):
super(ReverseQuerySet, self)._fill_cache(num)
reversemapping = self.reversemapping or {}
ids = [item.pk for item in self._result_cache]
target_maps = {}
for k, v in reversemapping.iteritems():
if hasattr(self.model, k):
raise ImproperlyConfigured, "Model %s already has an attribute %s" % (self.model, k)
for k, v in reversemapping.iteritems():
target_map= {}
descriptor = getattr(self.model, v)
if isinstance(descriptor, ForeignRelatedObjectsDescriptor):
rel = getattr(self.model, v).related
for item in rel.model.objects.filter(**{rel.field.name+'__in':ids}).all():
target_map.setdefault(getattr(item, rel.field.get_attname()), []).append(item)
target_maps[k]=target_map
elif isinstance(descriptor, ReverseManyRelatedObjectsDescriptor):
field = getattr(self.model, v).field
for item in field.rel.to.objects.filter(**{self.model.__name__.lower() +'__in':ids}).all().extra( \
select={'main_id': field.m2m_db_table() + '.' + field.m2m_column_name()}):
target_map.setdefault(getattr(item, 'main_id'), []).append(item)
target_maps[k]=target_map
elif isinstance(descriptor, ManyRelatedObjectsDescriptor):
rel = getattr(self.model, v).related
for item in rel.model.objects.filter(**{rel.field.name +'__in':ids}).all().extra( \
select={'main_id': rel.field.m2m_db_table() + '.' + rel.field.m2m_column_name()}):
target_map.setdefault(getattr(item, 'main_id'), []).append(item)
target_maps[k]=target_map
elif isinstance(descriptor, ReverseGenericRelatedObjectsDescriptor):
field = getattr(self.model, v).field
ct = ContentType.objects.get_for_model(self.model)
for item in field.rel.to.objects.filter(**{
field.object_id_field_name+'__in':ids,
field.content_type_field_name+'__exact':ct
}).all():
target_map.setdefault(getattr(item, field.object_id_field_name), []).append(item)
target_maps[k]=target_map
else:
raise ImproperlyConfigured, "Unsupported mapping %s %s" % (v, descriptor)
for item in self._result_cache:
for k, v in reversemapping.iteritems():
setattr(item, k, target_maps[k].get(item.pk,[]))
def __len__(self):
if self._result_cache is None:
list(self) # force the cache creation, is there a better way ?
return len(self._result_cache)
| 4,827 | 1,368 |
# Generated by Django 2.1.4 on 2018-12-14 04:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0006_auto_20181214_0019'),
]
operations = [
migrations.AlterField(
model_name='post',
name='created_at',
field=models.DateTimeField(),
),
migrations.AlterField(
model_name='post',
name='updated_at',
field=models.DateTimeField(),
),
]
| 521 | 174 |
from columnar import columnar
def print_table(minute: str, hour: str, day: str, month: str, week: str, cmd: str) -> str:
table = [
['minute', minute],
['hour', hour],
['day of month', day],
['month', month],
['day of week', week],
['command', cmd]
]
table = columnar(table, no_borders=True)
return str(table) | 375 | 122 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import sys
from typing import TextIO
def is_capable(file: TextIO = sys.stderr) -> bool:
"""
Determine whether we are connected to a capable terminal.
"""
if not os.isatty(file.fileno()):
return False
terminal = os.getenv("TERM", "dumb")
# Hardcoded list of non-capable terminals.
return terminal not in ["dumb", "emacs"]
| 547 | 169 |
from gather.utils import download
import hashlib
from helpers import get_datapath
def test_download(requests_mock):
beaco2n_data = get_datapath(filename="test_data.csv", network="beaco2n")
binary_data = beaco2n_data.read_bytes()
mock_url = "https://example.com/some_csv.txt"
requests_mock.get(
mock_url,
content=binary_data,
status_code=200,
)
data = download(url=mock_url)
md5 = hashlib.md5(data).hexdigest()
assert md5 == "b62cdb3234e6afb87fc3de8605ae1b09"
requests_mock.get(
mock_url,
status_code=404,
)
data = download(url=mock_url)
assert not data
requests_mock.get(
mock_url,
status_code=666,
)
data = download(url=mock_url)
assert not data
| 778 | 296 |
import torch
import torch.nn as nn
import torch.optim as optim
# Create Model
class LeNet_pt(nn.Module):
def __init__(self):
super(LeNet_pt , self).__init__()
self.ConvModel = nn.Sequential(
nn.Conv2d(in_channels = 3 , out_channels = 6 , kernel_size = (5 , 5) , padding = (0 , 0) , stride = (1 , 1)),
nn.ReLU() ,
nn.AvgPool2d(kernel_size = (2 , 2) , stride = (2 , 2)) ,
nn.Conv2d(in_channels = 6 , out_channels = 16 , kernel_size = (5 , 5) , padding = (0 , 0) , stride = (1 , 1)),
nn.ReLU() ,
nn.AvgPool2d(kernel_size = (2 , 2) , stride = (2 , 2)) ,
nn.Conv2d(in_channels = 16 , out_channels = 120 , kernel_size = (5 , 5) , padding = (0 , 0) , stride = (1 , 1)) )
self.DenseModel = nn.Sequential(
nn.Linear(120 , 84),
nn.Linear(84 , 10))
def forward(self , x):
y = self.ConvModel(x)
y = y.reshape(y.shape[0] , -1)
y = self.DenseModel(y)
return y
| 917 | 469 |
import itertools
import os
import random as random1
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from PIL import Image, ImageChops, ImageEnhance
from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPool2D
from keras.models import Sequential
from keras.optimizers import RMSprop
from keras.utils.np_utils import to_categorical
from pylab import *
from sklearn import *
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
def train_Ela_Model(csv_file , lr , ep):
def convert_to_ela_image(path, quality):
filename = path
resaved_filename = filename.split('.')[0] + '.resaved.jpg'
im = Image.open(filename).convert('RGB')
im.save(resaved_filename, 'JPEG', quality=quality)
resaved_im = Image.open(resaved_filename)
ela_im = ImageChops.difference(im, resaved_im)
extrema = ela_im.getextrema()
max_diff = max([ex[1] for ex in extrema])
if max_diff == 0:
max_diff = 1
scale = 255.0 / max_diff
ela_im = ImageEnhance.Brightness(ela_im).enhance(scale)
return ela_im
dataset = pd.read_csv(csv_file)
X = []
Y = []
for index, row in dataset.iterrows():
X.append(array(convert_to_ela_image(row[0], 90).resize((128, 128))).flatten() / 255.0)
Y.append(row[1])
X = np.array(X)
Y = to_categorical(Y, 2)
X = X.reshape(-1, 128, 128, 3)
X_train, X_val, Y_train, Y_val = train_test_split(X, Y, test_size=0.1, random_state=5, shuffle=True)
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(5, 5), padding='valid', activation='relu', input_shape=(128, 128, 3)))
model.add(Conv2D(filters=32, kernel_size=(5, 5), strides=(2, 2), padding='valid', activation='relu'))
model.add(MaxPool2D(pool_size=2, strides=None, padding='valid', data_format='channels_last'))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(256, activation="relu"))
model.add(Dropout(0.50))
model.add(Dense(2, activation="softmax"))
model.summary()
optimizer = RMSprop(lr=lr, rho=0.9, epsilon=1e-08, decay=0.0)
model.compile(optimizer=optimizer, loss="categorical_crossentropy", metrics=["accuracy"])
#early_stopping = EarlyStopping(monitor='val_acc', min_delta=0, patience=2, verbose=0, mode='auto')
epochs = ep
batch_size = 5
history = model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_val, Y_val), verbose=2)
fig, ax = plt.subplots(3, 1)
ax[0].plot(history.history['loss'], color='b', label="Training loss")
ax[0].plot(history.history['val_loss'], color='r', label="validation loss", axes=ax[0])
legend = ax[0].legend(loc='best', shadow=True)
ax[1].plot(history.history['accuracy'], color='b', label="Training accuracy")
ax[1].plot(history.history['val_accuracy'], color='r', label="Validation accuracy")
legend_ = ax[1].legend(loc='best', shadow=True)
def plot_confusion_matrix(cm_, classes, normalize=False, title_='Confusion matrix', cmap=cm.get_cmap("Spectral")):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.imshow(cm_, interpolation='nearest', cmap=cmap)
plt.title(title_)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes)
plt.yticks(tick_marks, classes)
if normalize:
cm_ = cm_.astype('float') / cm_.sum(axis=1)[:, np.newaxis]
thresh = cm_.max() / 2.
for i, j in itertools.product(range(cm_.shape[0]), range(cm_.shape[1])):
plt.text(j, i, cm_[i, j],
horizontalalignment="center",
color="white" if cm_[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
Y_pred = model.predict(X_val)
Y_pred_classes = np.argmax(Y_pred, axis=1)
Y_true = np.argmax(Y_val, axis=1)
confusion_mtx = confusion_matrix(Y_true, Y_pred_classes)
plot_confusion_matrix(confusion_mtx, classes=range(2))
#plt.show()
image_path = os.getcwd()+"\\Figures"
Models_path = os.getcwd()+"\\Re_Traind_Models"
file_number =random1.randint(1, 1000000)
plot_Name = image_path+"\\ELA_"+str(file_number)+".png"
Model_Name = Models_path+"\\ELA_"+str(file_number)+".h5"
plt.savefig(plot_Name , transparent =True , bbox_incehs="tight" , pad_inches = 2 , dpi = 50)
model.save(Model_Name)
return plot_Name , Model_Name
| 4,782 | 1,793 |
import os
import torch
import torch.nn as nn
from importlib import import_module
class Model(nn.Module):
def __init__(self, modelname):
super(Model, self).__init__()
print('Making model...')
module = import_module('model.' + modelname)
self.model = module.make_model()
def forward(self, x):
return self.model(x)
| 380 | 115 |
from natural.constant import _, IBAN_ALPHABET
from natural.constant import BBAN_RULES, BBAN_PATTERN, BBAN_MAP
import re
def bban_compact(number):
'''
Printable compacted Basic Bank Account Number. Removes all the padding
characters.
:param number: string
>>> bban_compact('1234.56.78.90')
'1234567890'
>>> bban_compact('068-9999995-01')
'068999999501'
'''
return re.sub(r'[-. ]', '', str(number))
def bban_base10(number):
'''
Printable Basic Bank Account Number in base-10.
:param number: string
>>> bban_base10('01234567')
'45670123'
>>> bban_base10('ABCD')
'10111213'
'''
number = bban_compact(number)
number = number[4:] + number[:4]
return ''.join([str(IBAN_ALPHABET.index(char)) for char in number])
def _bban_regex(structure):
return re.compile(
r'^%s$' % BBAN_PATTERN.sub(
lambda m: '%s{%s}' % (BBAN_MAP[m.group(2)], m.group(1)),
structure,
)
)
def bban(value, country=None, validate=False):
'''
Printable Basic Bank Account Number (BBAN) for the given country code. The
``country`` must be a valid ISO 3166-2 country code.
:param value: string or int
:param country: string
>>> bban('068-9999995-01', 'BE')
'068999999501'
>>> bban('555', 'NL')
'555'
>>> bban('555', 'NL', validate=True)
Traceback (most recent call last):
...
ValueError: Invalid BBAN, number does not match specification
>>> bban('123', 'XY', validate=True)
Traceback (most recent call last):
...
ValueError: Invalid BBAN, country unknown
'''
value = bban_compact(value)
if validate:
country = country.upper()
try:
rules = BBAN_RULES[country]
except KeyError:
raise ValueError(_('Invalid BBAN, country unknown'))
regex = _bban_regex(rules['bban'])
if not regex.match(value):
raise ValueError(
_('Invalid BBAN, number does not match specification')
)
return value
def iban(number, validate=False):
'''
Printable International Bank Account Number (IBAN) as specified in ISO
13616.
:param number: string
>>> iban('BE43068999999501')
'BE43 0689 9999 9501'
>>> iban('XY32012341234123', validate=True)
Traceback (most recent call last):
...
ValueError: Invalid IBAN, country unknown
>>> iban('BE43068999999502', validate=True)
Traceback (most recent call last):
...
ValueError: Invalid IBAN, digits check failed
'''
number = bban_compact(number)
if validate:
country = number[:2]
if country not in BBAN_RULES:
raise ValueError(_('Invalid IBAN, country unknown'))
# Do the 10-mod-97 check
digits = bban_base10(number)
if int(digits) % 97 != 1:
raise ValueError(_('Invalid IBAN, digits check failed'))
# Check BBAN for country
bban(number[4:], country, validate=True)
groups = [number[x:x + 4] for x in range(0, len(number), 4)]
return ' '.join(groups)
| 3,140 | 1,124 |
from synapyse.base.learning.unsupervised_learning import UnsupervisedLearning
__author__ = 'Douglas Eric Fonseca Rodrigues'
class UnsupervisedHebbianLearning(UnsupervisedLearning):
def __init__(self, neural_network, learning_rate):
"""
:type neural_network: synapyse.base.neural_network.NeuralNetwork
:type learning_rate: float
"""
UnsupervisedLearning.__init__(self, neural_network, learning_rate, 1)
def update_network_weights(self):
for neuron in self.neural_network.output_neurons:
for connection in neuron.input_connections.values():
self.calculate_new_weight(connection, connection.origin.output, neuron.output)
def calculate_new_weight(self, connection, input_value, output_value):
"""
:type connection: synapyse.base.connection.Connection
:type input_value: float
:type output_value: float
"""
connection.weight += input_value * output_value * self.learning_rate | 1,011 | 288 |
# -*- coding: UTF-8 -*-
from WinIO.Controls import ReuseWindow
from WinIO.Core import PyDelegateConverter as PyDel
from WinIO2.Core.FunctionTool import FunctionChain
class AcrylicWindowStyle(object):
Normal = 0
NoIcon = 1
Null = 2
class BlankWindow(ReuseWindow):
def __init__(self):
self.after_hidden = FunctionChain()
self.AfterHidden += PyDel.ToEventHandler(self.after_hidden)
@property
def title(self):
return self.Title
@title.setter
def title(self, value):
self.Title = value
@property
def height(self):
return self.Height
@height.setter
def height(self, value):
self.Height = value
@property
def width(self):
return self.Width
@width.setter
def width(self, value):
self.Width = value
def set_window_style(self, style):
self.AcrylicWindowStyle = style | 805 | 294 |
from collections import namedtuple
import state_manager
import background
import sound
import quit_
import howto
import locations
import pygame
import consts
class MenuEntry(object):
def __init__(self, name: str, typ) -> None:
super().__init__()
self._name = name
self._typ = typ
self._surface = None
self._surface_selected = None
self._x = 0
self._y = 0
self._h = 0
self._w = 0
def prepare(self, font: pygame.font.Font, font_selected: pygame.font.Font) -> None:
surface = font.render(self._name, True, (255, 255, 255), None)
surface_selected = font_selected.render(self._name, True, (255, 255, 255), None)
self._w = max(surface.get_width(), surface_selected.get_width()) + 20
self._h = max(surface.get_height(), surface_selected.get_height()) + 10
self._surface = pygame.Surface((self._w, self._h), pygame.SRCALPHA, None)
self._surface.blit(surface, (self._w // 2 - surface.get_width() // 2, self._h // 2 - surface.get_height() // 2))
self._surface_selected = pygame.Surface((self._w, self._h), pygame.SRCALPHA, None)
self._surface_selected.fill((0, 0, 0))
self._surface_selected.blit(surface_selected, (self._w // 2 - surface_selected.get_width() // 2, self._h // 2 - surface_selected.get_height() // 2))
def contains_pos(self, x: int, y: int) -> bool:
return (self._x < x < (self._x + self._surface.get_width())) and (self._y < y < (self._y + self._surface.get_height()))
def set_pos(self, x: int, y: int):
self._x = x
self._y = y
def render(self, target: pygame.Surface, selected: bool):
s = self._surface_selected if selected else self._surface
target.blit(s, (self._x, self._y))
if selected:
pygame.draw.rect(target, (255, 255, 255), (self._x, self._y, s.get_width(), s.get_height()), 3)
def __hash__(self):
return hash(self._name)
def __eq__(self, other):
return hash(self) == hash(other)
height = property(lambda s: s._h)
width = property(lambda s: s._w)
typ = property(lambda s: s._typ)
class Menu(state_manager.State):
def __init__(self) -> None:
super().__init__()
self._menu_entries = []
self._initialized = False
self._mouse = (consts.SCREEN_W + 1, consts.SCREEN_H + 1)
self._idx = 0
self._background = None
self._font = pygame.font.Font(locations.font('DejaVuSansMono.ttf'), 24)
self._font_selected = pygame.font.Font(locations.font('DejaVuSansMono-Bold.ttf'), 28)
self._sound = sound.Sound()
def add(self, menu_entry: MenuEntry):
menu_entry.prepare(self._font, self._font_selected)
self._menu_entries.append(menu_entry)
def select(self, idx: int) -> None:
if idx != self._idx:
self._idx = idx % len(self._menu_entries)
self._sound.play(sound.Sounds.MENU_HOVER)
def render(self) -> None:
self.screen.fill((0, 0, 0))
self._background.render(self.screen)
for i in range(len(self._menu_entries)):
self._menu_entries[i].render(self.screen, self._idx == i)
pygame.draw.line(self.screen, (255, 255, 255), (self._mouse[0], self._mouse[1] - 10), (self._mouse[0], self._mouse[1] + 10), 2)
pygame.draw.line(self.screen, (255, 255, 255), (self._mouse[0] - 10, self._mouse[1]), (self._mouse[0] + 10, self._mouse[1]), 2)
#pygame.draw.rect(self.screen, (255, 255, 255), (*self._mouse, 10, 10))
pygame.display.flip()
def input(self) -> None:
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.state_manager.change_state(quit_.Quit)
elif event.type == pygame.KEYDOWN:
i = 0
if event.key in (pygame.K_UP, pygame.K_LEFT):
i = -1
elif event.key in (pygame.K_DOWN, pygame.K_RIGHT):
i = 1
if i != 0:
self.select(self._idx + i)
continue
if event.key in (pygame.K_RETURN, pygame.K_SPACE):
me = self._menu_entries[self._idx]
self._sound.play(sound.Sounds.MENU_SELECT)
self.state_manager.change_state(me.typ)
continue
elif event.type == pygame.MOUSEMOTION:
self._mouse = event.pos
for i in range(len(self._menu_entries)):
if self._menu_entries[i].contains_pos(*self._mouse):
self.select(i)
elif event.type == pygame.MOUSEBUTTONDOWN:
l, m, r = pygame.mouse.get_pressed()
if l == 1:
me = self._menu_entries[self._idx]
if me.contains_pos(*self._mouse):
self._sound.play(sound.Sounds.MENU_SELECT)
self.state_manager.change_state(me.typ)
def update(self, delta: int, fps: float):
self._background.update(delta)
def enter(self, prev_: state_manager.StateType) -> None:
self._background = background.FloatingEditors(consts.SCREEN_W, consts.SCREEN_H)
self._idx = 0
if not self._initialized:
h = sum([me.height + 10 for me in self._menu_entries])
w = max([me.width for me in self._menu_entries])
offset = h // len(self._menu_entries)
t = consts.SCREEN_H // 2 - h // 2
for i in range(len(self._menu_entries)):
self._menu_entries[i].set_pos(consts.SCREEN_W // 2 - self._menu_entries[i].width // 2, t)
t += offset
self._initialized = True
if __name__ == '__main__':
import game
import about
def init():
pygame.init()
screen = pygame.display.set_mode((consts.SCREEN_W, consts.SCREEN_H))
pygame.display.set_caption('Coder')
pygame.mouse.set_visible(0)
return screen
sm = state_manager.StateManager(init())
m = Menu()
m.add(MenuEntry('Start', game.Game))
m.add(MenuEntry('HowTo', howto.HowTo))
m.add(MenuEntry('About', about.About))
m.add(MenuEntry('Quit', None))
sm.add_state(m)
sm.add_state(game.Game())
sm.add_state(howto.HowTo())
sm.add_state(about.About())
sm.change_state(Menu)
sm.main_loop()
| 6,435 | 2,191 |
from typing import (
Any,
Callable,
List,
NamedTuple,
Optional,
Tuple,
Type,
Union,
overload,
)
import numpy as np
from scipy import special
Array = Union[np.ndarray]
Numeric = Union[int, float]
# Lists = Union[Numeric, List['Lists']]
Tuplist = Union[Tuple[int, ...], List[int]]
Dim = Union[int, Tuplist]
Arrayable = Union[Numeric, Tuplist, Array]
Tensorable = Union[Arrayable, "Tensor"]
float32 = np.float32
float64 = np.float64
int64 = np.int64
def ensure_array(arr: Arrayable, dtype: Optional[Union[str, Type]] = None) -> Array:
return np.array(arr, dtype=dtype, copy=False)
def ensure_tensor(arr: Tensorable) -> "Tensor":
if not isinstance(arr, Tensor):
return Tensor(data=arr)
return arr
class Dependancy(NamedTuple):
tensor: "Tensor"
grad_fn: Callable[[Array], Array]
class Tensor:
no_grad = False
def __init__(
self,
data: Arrayable,
dtype: Optional[Union[str, Type]] = None,
depends_on: Optional[List[Dependancy]] = None,
requires_grad: bool = False,
) -> None:
self._data: Array = ensure_array(data, dtype=dtype)
self._dtype: str = self._data.dtype.name
if requires_grad and "float" not in self._dtype:
raise RuntimeError("Only float tensors support requires_grad")
self._depends_on: List[Dependancy] = depends_on or []
self._is_leaf: bool = not self._depends_on
self._requires_grad: bool = requires_grad
self._grad: Optional[Tensor] = None
def __neg__(self) -> "Tensor":
return neg(self)
def __add__(self, other: Tensorable) -> "Tensor":
return add(self, ensure_tensor(other))
def __radd__(self, other: Tensorable) -> "Tensor":
return self.__add__(other)
def __iadd__(self, other: Tensorable) -> "Tensor":
self.data += ensure_tensor(other).data
return self
def __sub__(self, other: Tensorable) -> "Tensor":
return sub(self, ensure_tensor(other))
def __rsub__(self, other: Tensorable) -> "Tensor":
return self.__sub__(other)
def __isub__(self, other: Tensorable) -> "Tensor":
self.data -= ensure_tensor(other).data
return self
def __mul__(self, other: Tensorable) -> "Tensor":
return mul(self, ensure_tensor(other))
def __rmul__(self, other: Tensorable) -> "Tensor":
return self.__mul__(other)
def __imul__(self, other: Tensorable) -> "Tensor":
self.data *= ensure_tensor(other).data
return self
def __truediv__(self, other: Tensorable) -> "Tensor":
return div(self, ensure_tensor(other))
def __rtruediv__(self, other: Tensorable) -> "Tensor":
return div(ensure_tensor(other), self)
def __itruediv__(self, other: Tensorable) -> "Tensor":
self.data /= ensure_tensor(other).data
return self
def __pow__(self, other: Numeric) -> "Tensor":
return pow(self, other)
def __matmul__(self, other: Tensorable) -> "Tensor":
return matmul(self, ensure_tensor(other))
def __len__(self) -> int:
return len(self.data)
def __getitem__(
self, indices: Union[None, int, slice, Tuple[Any, ...]]
) -> "Tensor":
return tslice(self, indices)
def __str__(self) -> str:
return self.__repr__()
def __repr__(self) -> str:
return f"tensor({self._data}, requires_grad={self.requires_grad})"
@property
def data(self) -> Array:
return self._data
@data.setter
def data(self, new_data: Array) -> None:
if not self.no_grad and self.requires_grad:
raise RuntimeError(
"Variable that requires grad has been used an in-place operation."
)
self._data = new_data
@property
def dtype(self) -> str:
return self._dtype
@property
def requires_grad(self) -> bool:
return self._requires_grad
@requires_grad.setter
def requires_grad(self, value: bool) -> None:
if not self.is_leaf:
raise RuntimeError(
"you can only change requires_grad flags of leaf variables"
)
self._requires_grad = value
@property
def is_leaf(self) -> bool:
return self._is_leaf
@property
def depends_on(self) -> List[Dependancy]:
return self._depends_on
@property
def grad(self) -> Optional["Tensor"]:
return self._grad
@grad.setter
def grad(self, other: Optional["Tensor"]) -> None:
self._grad = other
def numel(self) -> int:
return numel(self)
@property
def shape(self) -> Tuple[int, ...]:
return self.data.shape # type: ignore
@overload
def size(self) -> Tuple[int, ...]:
...
@overload
def size(self, dim: int) -> int:
...
def size(self, dim: Optional[int] = None) -> Union[int, Tuple[int, ...]]:
if dim is None:
return self.shape
return self.shape[dim]
@property
def ndim(self) -> int:
return len(self.shape)
def dim(self) -> int:
return len(self.shape)
def ndimension(self) -> int:
return len(self.shape)
def reshape(self, shape: Tuplist) -> "Tensor":
return reshape(self, shape)
def view(self, *shape: int) -> "Tensor":
return self.reshape(shape)
def transpose(self, dim0: int, dim1: int) -> "Tensor":
return transpose(self, dim0=dim0, dim1=dim1)
def t(self) -> "Tensor":
return self.transpose(dim0=0, dim1=1)
def item(self) -> Numeric:
return self.data.item() # type: ignore
def tolist(self) -> list:
return self.data.tolist() # type: ignore
def numpy(self) -> Array:
if self.requires_grad:
raise RuntimeError(
"Can't call numpy() on Variable that requires grad. Use .detach().numpy() istead"
)
return self.data
def cpu(self) -> "Tensor":
return self
def cuda(self) -> "Tensor":
return self
def sum(self, dim: Optional[Dim] = None, keepdim: bool = False) -> "Tensor":
return reduce_sum(self, dim=dim, keepdim=keepdim)
def mean(self, dim: Optional[Dim] = None, keepdim: bool = False) -> "Tensor":
return reduce_mean(self, dim=dim, keepdim=keepdim)
@overload
def max(self) -> "Tensor":
...
@overload
def max(self, dim: int) -> Tuple["Tensor", "Tensor"]:
...
@overload
def max(self, dim: int, keepdim: bool) -> Tuple["Tensor", "Tensor"]:
...
def max(
self, dim: Optional[int] = None, keepdim: bool = False
) -> Union["Tensor", Tuple["Tensor", "Tensor"]]:
return reduce_max(self, dim=dim, keepdim=keepdim)
def argmax(self, dim: Optional[int] = None, keepdim: bool = False) -> "Tensor":
return reduce_argmax(self, dim=dim, keepdim=keepdim)
def pow(self, val: Numeric) -> "Tensor":
return pow(self, val)
def exp(self) -> "Tensor":
return exp(self)
def log(self) -> "Tensor":
return log(self)
def log1p(self) -> "Tensor":
return log1p(self)
def sigmoid(self) -> "Tensor":
return sigmoid(self)
def tanh(self) -> "Tensor":
return tanh(self)
def detach(self) -> "Tensor":
return Tensor(data=self.data)
def zero_(self) -> None:
if not Tensor.no_grad and self.requires_grad:
raise RuntimeError(
"a leaf Variable that requires grad has been used in an in-place operation."
)
self._data.fill(0)
def uniform_(self, _from: Numeric = 0, to: Numeric = 1) -> None:
if not Tensor.no_grad and self.requires_grad:
raise RuntimeError(
"a leaf Variable that requires grad has been used in an in-place operation."
)
self._data[:] = np.random.uniform(low=_from, high=to, size=self._data.shape)
def backward(
self, grad: Optional["Tensor"] = None, retain_graph: bool = False
) -> None:
self._backward(grad)
if not retain_graph:
self._free_buffers()
def _backward(self, grad: Optional["Tensor"] = None) -> None:
if not self.requires_grad:
raise RuntimeError("Variable has to be differentiable")
if grad is None:
if np.prod(self.shape) == 1:
grad = Tensor(1, dtype=self.data.dtype)
else:
raise RuntimeError("Gradient shape is not the same as s one")
if self.is_leaf:
if self._grad is None:
self._grad = Tensor(np.zeros_like(self.data))
self._grad.data += grad.data
for dependancy in self.depends_on:
backward_grad = dependancy.grad_fn(grad.data)
dependancy.tensor._backward(Tensor(backward_grad))
def _free_buffers(self) -> None:
for dependancy in self.depends_on:
dependancy.tensor._free_buffers()
self._depends_on = []
def tensor(
data: Arrayable,
dtype: Optional[Union[str, Type]] = None,
requires_grad: bool = False,
) -> Tensor:
return Tensor(data=data, dtype=dtype, requires_grad=requires_grad)
class no_grad:
def __enter__(self) -> None:
self.prev = Tensor.no_grad
Tensor.no_grad = True
def __exit__(self, *args: Any) -> None:
Tensor.no_grad = self.prev
# Tensors
def numel(input: Tensor) -> int:
return input.data.size # type: ignore
# Reduction operations
def reduce_sum(
input: Tensor, dim: Optional[Dim] = None, keepdim: bool = False
) -> Tensor:
if dim is None:
assert not keepdim
data = input.data.sum(axis=dim, keepdims=keepdim)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
nonlocal dim
shape = [1] * input.data.ndim
if dim is not None:
if not keepdim:
grad = np.expand_dims(grad, dim)
if isinstance(dim, int):
dim = [dim]
for d in dim:
shape[d] = input.shape[d]
adjoint = np.ones(shape=shape, dtype=input.dtype)
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def reduce_mean(
input: Tensor, dim: Optional[Dim] = None, keepdim: bool = False
) -> Tensor:
shape = np.array(input.shape)[dim]
return reduce_sum(input / np.prod(shape), dim=dim, keepdim=keepdim)
def reduce_max(
input: Tensor, dim: Optional[int] = None, keepdim: bool = False
) -> Union[Tensor, Tuple[Tensor, Tensor]]:
if dim is None:
assert not keepdim
argmax = input.data.argmax(axis=dim)
if dim is not None:
data = np.take_along_axis(
input.data, np.expand_dims(argmax, axis=dim), axis=dim
)
if not keepdim:
data = data.squeeze(axis=dim)
else:
argmax_unravel = np.unravel_index(argmax, input.data.shape)
data = input.data[argmax_unravel]
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = np.zeros_like(input.data)
if dim is not None:
np.put_along_axis(
adjoint, np.expand_dims(argmax, axis=dim), 1, axis=dim
)
if not keepdim:
grad = np.expand_dims(grad, axis=dim)
else:
adjoint[argmax_unravel] = 1
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
out = Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
if dim is None:
return out
if keepdim:
indices = np.expand_dims(argmax, axis=dim)
else:
indices = argmax
return out, Tensor(data=indices)
def reduce_argmax(
input: Tensor, dim: Optional[int] = None, keepdim: bool = False
) -> Tensor:
if dim is None:
assert not keepdim
data = input.data.argmax(axis=dim)
if keepdim:
data = np.expand_dims(data, axis=dim)
return Tensor(data=data)
# Pointwise operations
def neg(input: Tensor) -> Tensor:
data = -input.data
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
return -grad
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def handle_grad_broadcasting(grad: Array, shape: Tuplist) -> Array:
# https://stackoverflow.com/questions/45428696/more-pythonic-way-to-compute-derivatives-of-broadcast-addition-in-numpy
ndim = grad.ndim - len(shape)
axis_first = tuple(range(ndim))
axis = axis_first + tuple(i + ndim for i, dim in enumerate(shape) if dim == 1)
grad = np.sum(grad, axis=axis, keepdims=True)
grad = np.squeeze(grad, axis=axis_first)
return grad
def add(left: Tensor, right: Tensor) -> Tensor:
data = left.data + right.data
depends_on = []
if not Tensor.no_grad and left.requires_grad:
def grad_fn_left(grad: Array) -> Array:
adjoint = np.ones_like(left.data)
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, left.shape)
return grad
depends_on.append(Dependancy(tensor=left, grad_fn=grad_fn_left))
if not Tensor.no_grad and right.requires_grad:
def grad_fn_right(grad: Array) -> Array:
adjoint = np.ones_like(right.data)
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, right.shape)
return grad
depends_on.append(Dependancy(tensor=right, grad_fn=grad_fn_right))
requires_grad = not Tensor.no_grad and (left.requires_grad or right.requires_grad)
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def sub(left: Tensor, right: Tensor) -> Tensor:
return add(left, neg(right))
def mul(left: Tensor, right: Tensor) -> Tensor:
data = left.data * right.data
depends_on = []
if not Tensor.no_grad and left.requires_grad:
def grad_fn_left(grad: Array) -> Array:
adjoint = right.data
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, left.shape)
return grad
depends_on.append(Dependancy(tensor=left, grad_fn=grad_fn_left))
if not Tensor.no_grad and right.requires_grad:
def grad_fn_right(grad: Array) -> Array:
adjoint = left.data
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, right.shape)
return grad
depends_on.append(Dependancy(tensor=right, grad_fn=grad_fn_right))
requires_grad = not Tensor.no_grad and (left.requires_grad or right.requires_grad)
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def div(left: Tensor, right: Tensor) -> Tensor:
data = left.data / right.data
depends_on = []
if not Tensor.no_grad and left.requires_grad:
def grad_fn_left(grad: Array) -> Array:
adjoint = 1 / right.data
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, left.shape)
return grad
depends_on.append(Dependancy(tensor=left, grad_fn=grad_fn_left))
if not Tensor.no_grad and right.requires_grad:
def grad_fn_right(grad: Array) -> Array:
adjoint = -left.data / right.data ** 2
grad = grad * adjoint
grad = handle_grad_broadcasting(grad, right.shape)
return grad
depends_on.append(Dependancy(tensor=right, grad_fn=grad_fn_right))
requires_grad = not Tensor.no_grad and (left.requires_grad or right.requires_grad)
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def pow(input: Tensor, exponent: Numeric) -> Tensor:
data = input.data ** exponent
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = exponent * input.data ** (exponent - 1)
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def exp(input: Tensor) -> Tensor:
data = np.exp(input.data)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = data
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def log(input: Tensor) -> Tensor:
data = np.log(input.data)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = 1 / input.data
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def log1p(input: Tensor) -> Tensor:
data = np.log1p(input.data)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = 1 / (1 + input.data)
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def sigmoid(input: Tensor) -> Tensor:
data = special.expit(input.data)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = data * (1 - data)
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def tanh(input: Tensor) -> Tensor:
data = np.tanh(input.data)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = 1 - data ** 2
return grad * adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
# Indexing, slicing operations
def tslice(input: Tensor, indices: Union[None, int, slice, Tuple[Any, ...]]) -> Tensor:
data = input.data[indices]
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
adjoint = np.zeros_like(input.data)
adjoint[indices] = grad
return adjoint
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def reshape(input: Tensor, shape: Tuplist) -> Tensor:
old_shape = input.shape
data = input.data.reshape(shape)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
return grad.reshape(old_shape)
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
def transpose(input: Tensor, dim0: int, dim1: int) -> Tensor:
data = input.data.swapaxes(dim0, dim1)
requires_grad = not Tensor.no_grad and input.requires_grad
depends_on = []
if requires_grad:
def grad_fn(grad: Array) -> Array:
return grad.swapaxes(dim0, dim1)
depends_on.append(Dependancy(tensor=input, grad_fn=grad_fn))
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
# BLAS and LAPACK operations
def matmul(left: Tensor, right: Tensor) -> Tensor:
data = left.data @ right.data
depends_on = []
if not Tensor.no_grad and left.requires_grad:
def grad_fn_left(grad: Array) -> Array:
adjoint = right.data.T
N = (grad.ndim + adjoint.ndim) - left.ndim
assert N % 2 == 0
N //= 2
axes = (tuple(range(-1, -N - 1, -1)), tuple(range(N)))
return np.tensordot(grad, adjoint, axes=axes)
depends_on.append(Dependancy(tensor=left, grad_fn=grad_fn_left))
if not Tensor.no_grad and right.requires_grad:
def grad_fn_right(grad: Array) -> Array:
adjoint = left.data.T
N = (grad.ndim + adjoint.ndim) - right.ndim
assert N % 2 == 0
N //= 2
axes = (tuple(range(-1, -N - 1, -1)), tuple(range(N)))
return np.tensordot(adjoint, grad, axes=axes)
depends_on.append(Dependancy(tensor=right, grad_fn=grad_fn_right))
requires_grad = not Tensor.no_grad and (left.requires_grad or right.requires_grad)
return Tensor(data=data, depends_on=depends_on, requires_grad=requires_grad)
| 21,634 | 7,289 |
"""
This file contains several reactions for RDKIT
NOTES:
- Check what happens with ASP ASP ester bond formation? need more specifications to do that side targets?
- check_reaction_mols, also checking how many of the reaction sites are present in one molecule? - (more specific smart: make_aa_backbone_bond = [N][C][O:5]=[C:2][OH:1].[N:3][CH2:4][C][=O][C]>>[O:5]=[C:2][N:3][CH2:4].[OH2:1] "
- Check in unittest also fail states
authors: Shuzhe Wang & Benjamin Schroeder
"""
from rdkit import Chem
from rdkit.Chem import AllChem
import typing as t
#GERNEAL Functions:
def check_reaction_mols(reactants: list, reaction_smarts: str) -> bool:
"""
This function checks if all requirement in the Educts are fullfilled for the reaction
:param reactants: list of Chem.Mol used as reactants
:param reaction_smarts: str defining the reaction
:return: boolean true if everything is fullfilled
"""
educts = [educt for educt in reaction_smarts.split(">>")[0].split(".")]
educts_mol = [AllChem.MolFromSmarts(educt) for educt in educts]
#TODO check Mol is None
if (len(reactants) > len(educts)):
raise ValueError("There are more rectants than expected for this reaction.\n Given reactants: "+str(len(reactants))+" expected reactants: "+str(len(educts)))
elif (len(reactants) < len(educts)):
raise ValueError("There are more "+str(len(educts))+" rectants expected for this reaction.\n But only "+str(len(reactants))+" reactants were given")
else:
for ind, reactant in enumerate(reactants):
if not (reactant.HasSubstructMatch(educts_mol[ind])):
raise ValueError("Reactant " + str(ind) + " with smile " + Chem.MolToSmiles(
reactants[ind]) + " has not the required reaction pattern: " + str(educts[ind]))
return True
return False
def do_reaction(reactants: list, reaction_smarts: str) -> t.List[Chem.Mol]:
"""
This function is executed to perform a reaction
:param reactants:
:param reaction_smarts:
:return: list[Chem.Mol] products
"""
try:
rxn = AllChem.ReactionFromSmarts(reaction_smarts)
except Exception as err:
raise ValueError("Could not generate reaction object in do_reaction function:\n\t"+str("\t".join(err.args)))
# reac1
ps = rxn.RunReactants(tuple(reactants))
if (ps == None):
raise Exception("Reaction did not happen.")
return ps[0]
def perform_generic_reaction(reactants:t.List[Chem.Mol], reaction_smarts:str):
"""
this wrapper is preforming an reaction with
:param reactants:
:param reaction_smarts:
:return:
"""
try:
check_reaction_mols(reactants=reactants, reaction_smarts=reaction_smarts)
products = do_reaction(reactants=reactants,
reaction_smarts=reaction_smarts)
except Exception as err:
raise ValueError("Could not perform Reaction.\n Failed in molecule checking Step\n\t" + str("\t".join(err.args)))
except ValueError as err:
raise ValueError("Could not perform Reaction.\n Failed in reaction Step:\n\t" + str("\t".join(err.args)))
return products
#REACTIONS:
def make_custom_bond(reactant1, reactant2, reaction_smarts):
products = perform_generic_reaction(reactants=[reactant1, reactant2], reaction_smarts=reaction_smarts)
return products
def make_peptide_bond(carboxylGroupContaining: Chem.Mol, nitrogenGroupContaining: Chem.Mol) -> (t.List[Chem.Mol], str):
reaction_smarts = {
"educts" : [
"[C:1](=[O])[OH:2]",
"[NH2,NH1,NH3+1:3]-[CH2,CH1:4]" ,
],
"products" : [
"[C:1](=[O])[N:3]-[C:4]",
"[OH2:2]",
],
}
reaction_smarts = [".".join(reaction_smarts[i]) for i in reaction_smarts]
reaction_smarts = ">>".join(reaction_smarts)
# reaction_smarts = '[O:5]=[C:2][OH:1].[N:3][CH2,CH1:4]>>[O:5]=[C:2][N:3][CH2:4].[OH2:1]'
products = perform_generic_reaction(reactants=[carboxylGroupContaining, nitrogenGroupContaining], reaction_smarts=reaction_smarts)
# return products, reaction_smarts
return products
def make_amide_bond(carboxylGroupContaining: Chem.Mol, nitrogenGroupContaining: Chem.Mol) -> (t.List[Chem.Mol], str):
return make_peptide_bond(carboxylGroupContaining, nitrogenGroupContaining)
def make_ester_bond(carboxylGroupContaining: Chem.Mol, alcoholGroupContaining: Chem.Mol) -> (t.List[Chem.Mol], str):
reaction_smarts = '[O:5]=[C:2][OH:1].[HO:3][CH2:4]>>[O:5]=[C:2][O:3][CH2:4].[OH2:1]'
products = perform_generic_reaction(reactants=[carboxylGroupContaining, alcoholGroupContaining], reaction_smarts=reaction_smarts)
# return products, reaction_smarts
return products
def make_disulfide_bond(thiolGroupContaining1: Chem.Mol, thiolGroupContaining2: Chem.Mol) -> (t.List[Chem.Mol], str):
reaction_smarts = '[C:1][SH:2].[HS:3][C:4]>>[C:1][S:2][S:3][C:4]'
products = perform_generic_reaction(reactants=[thiolGroupContaining1, thiolGroupContaining2], reaction_smarts=reaction_smarts)
# return products, reaction_smarts
return products
def make_disulphide_bond(thiolGroupContaining1: Chem.Mol, thiolGroupContaining2: Chem.Mol) -> (t.List[Chem.Mol], str):
return make_disulfide_bond(thiolGroupContaining1, thiolGroupContaining2)
a = Chem.MolFromSequence("A")
b = Chem.MolFromSequence("C")
# print(Chem.MolToPDBBlock(b))
out= make_amide_bond(a,b)
mol = out[0]
# Chem.CalcExplicitValence(mol)
mol.UpdatePropertyCache(strict = False)
print(Chem.MolToPDBBlock(mol), Chem.MolToSmiles(mol))
| 5,576 | 1,974 |
import os
# Flask settings
DEBUG=True
# Flask-restplus settings
RESTPLUS_MASK_SWAGGER=False
# Application settings
# API metadata
API_TITLE = 'Model Asset Exchange Server'
API_DESC = 'An API for serving models'
API_VERSION = '0.1'
# default model
# name of model to download
MODEL_NAME = 'ssd_mobilenet_v1_coco_2017_11_17'
DEFAULT_MODEL_PATH = 'assets'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
# Note: This needs to be downloaded and/or compiled into pb format.
PATH_TO_CKPT = '{}/{}/frozen_inference_graph.pb'.format(DEFAULT_MODEL_PATH, MODEL_NAME)
PATH_TO_LABELS = '{}/{}/mscoco_label_map.pbtxt'.format(DEFAULT_MODEL_PATH, 'data')
NUM_CLASSES = 90
# for image models, may not be required
MODEL_INPUT_IMG_SIZE = (299, 299)
MODEL_LICENSE = 'ApacheV2'
MODEL_META_DATA = {
'id': '{}-tf-mobilenet'.format(MODEL_NAME.lower()),
'name': '{} TensorFlow Model'.format(MODEL_NAME),
'description': '{} TensorFlow model trained on MobileNet'.format(MODEL_NAME),
'type': 'object_detection',
'license': '{}'.format(MODEL_LICENSE)
}
| 1,105 | 422 |
import time
import pandas as pd
from selenium import webdriver
SEARCH_URL = "https://licenseif.mhlw.go.jp/search_iyaku/top.jsp"
SLEEP_SEC = 3
IN_CSV_NAME = "./list.csv"
OUT_CSV_NAME = "./output.csv"
# 名前を投げると「登録年」の list が返ってくる
def get_years(name) :
driver.get(SEARCH_URL)
time.sleep(SLEEP_SEC)
search_box = driver.find_element_by_name("name")
search_box.send_keys(name)
search_box.submit()
regi = driver.find_elements_by_class_name('REGISTRATION_TD')
years = []
for r in regi:
years.append(r.text)
return years
# csv は name, years カラムの 2 行からなる(ヘッダー付き)
df = pd.read_csv(IN_CSV_NAME)
df["years"] = df["years"].astype(str)
driver = webdriver.Chrome()
for i, _ in df.iterrows():
result = get_years(df.at[i, "name"])
df.at[i, "years"] = " ".join(result) # スペース区切りで格納
driver.quit()
df.to_csv(open(OUT_CSV_NAME, "w", encoding="utf_8_sig", newline=""), index=False)
| 921 | 418 |
#!python
def linear_search(array, item):
"""return the first index of item in array or None if item is not found"""
# implement linear_search_iterative and linear_search_recursive below, then
# change this to call your implementation to verify it passes all tests
# return linear_search_iterative(array, item)
return linear_search_recursive(array, item)
def linear_search_iterative(array, item):
# loop over all array values until item is found
for index, value in enumerate(array):
if item == value:
return index # found
return None # not found
def linear_search_recursive(array, item, index=0):
# TODO: implement linear search recursively here
# check if index is out of bound
if index >= len(array):
return None
# if the value at a specific index of array is equivalent to the target value...
if array[index] == item:
# return the first index of item in array
return index
# recursively call the function while incrementing the index value
return linear_search_recursive(array, item, index + 1)
# once implemented, change linear_search to call linear_search_recursive
# to verify that your recursive implementation passes all tests
def binary_search(array, item):
"""return the index of item in sorted array or None if item is not found"""
# implement binary_search_iterative and binary_search_recursive below, then
# change this to call your implementation to verify it passes all tests
# return binary_search_iterative(array, item)
return binary_search_recursive(array, item)
def binary_search_iterative(array, item):
# TODO: implement binary search iteratively here
# initialize the lower and upper bound for the array
left = 0 # lower
right = len(array) - 1 # upper
while left <= right:
# find the rounded middle point via floor division
mid = (left + right) // 2
# if the value at a specific index of array is equivalent to the target value...
if item == array[mid]:
# return the first index of item in array
return(mid)
# if the targeted item is less than the middle point...
elif item < array[mid]:
# shift the upper bound to the middle point
right = mid - 1
# the targeted item is greater than the middle point
else:
# shift the lower bound to the middle point
left = mid + 1
return None
# once implemented, change binary_search to call binary_search_iterative
# to verify that your iterative implementation passes all tests
def binary_search_recursive(array, item, left=0, right=None):
# TODO: implement binary search recursively here
# initialize the lower and upper bound for the array
# if left == None:
# left = 0
if right == None:
right = len(array) -1
# check edge case where array is non-existent (lower bound cannot be greater than upper bound)
if left > right:
return None
else:
# find the rounded middle point via floor division
mid = (left + right) // 2
# if the value at a specific index of array is equivalent to the target value...
if item == array[mid]:
# return the first index of item in array
return(mid)
# if the targeted item is less than the middle point...
elif item < array[mid]:
# recursively calls itself with a new upper bound
return binary_search_recursive(array, item, left, mid-1)
# the targeted item is greater than the middle point
else:
# recursively calls itself with a new lower bound
return binary_search_recursive(array, item, mid+1, right)
# once implemented, change binary_search to call binary_search_recursive
# to verify that your recursive implementation passes all tests | 3,983 | 1,023 |