CombinedText stringlengths 4 3.42M |
|---|
ELECTRUM_VERSION = "0.36b"
SEED_VERSION = 4 # bump this everytime the seed generation is modified
bump version number
ELECTRUM_VERSION = "0.37"
SEED_VERSION = 4 # bump this everytime the seed generation is modified
|
#!/bin/env python
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A class to help start/stop the lighttpd server used by layout tests."""
import logging
import optparse
import os
import platform_utils
import subprocess
import sys
import tempfile
import time
import urllib
import google.path_utils
# This will be a native path to the directory this file resides in.
# It can either be relative or absolute depending how it's executed.
THISDIR = os.path.dirname(os.path.abspath(__file__))
def PathFromBase(*pathies):
return google.path_utils.FindUpward(THISDIR, *pathies)
class HttpdNotStarted(Exception):
pass
class Lighttpd:
# Webkit tests
try:
_webkit_tests = PathFromBase('webkit', 'data', 'layout_tests',
'LayoutTests', 'http', 'tests')
except google.path_utils.PathNotFound:
# If webkit/data/layout_tests/LayoutTests/http/tests does not exist, assume
# wekit tests are located in third_party/WebKit/LayoutTests/http/tests.
try:
_webkit_tests = PathFromBase('third_party', 'WebKit',
'LayoutTests', 'http', 'tests')
except google.path_utils.PathNotFound:
_webkit_tests = None
# New tests for Chrome
try:
_pending_tests = PathFromBase('webkit', 'data', 'layout_tests',
'pending', 'http', 'tests')
except google.path_utils.PathNotFound:
_pending_tests = None
# Path where we can access all of the tests
_all_tests = PathFromBase('webkit', 'data', 'layout_tests')
# Self generated certificate for SSL server (for client cert get
# <base-path>\chrome\test\data\ssl\certs\root_ca_cert.crt)
_pem_file = PathFromBase('tools', 'python', 'google', 'httpd_config',
'httpd2.pem')
VIRTUALCONFIG = [
# One mapping where we can get to everything
{'port': 8081, 'docroot': _all_tests}
]
if _webkit_tests:
VIRTUALCONFIG.extend(
# Three mappings (one with SSL enabled) for LayoutTests http tests
[{'port': 8000, 'docroot': _webkit_tests},
{'port': 8080, 'docroot': _webkit_tests},
{'port': 8443, 'docroot': _webkit_tests, 'sslcert': _pem_file}]
)
if _pending_tests:
VIRTUALCONFIG.extend(
# Three similar mappings (one with SSL enabled) for pending http tests
[{'port': 9000, 'docroot': _pending_tests},
{'port': 9080, 'docroot': _pending_tests},
{'port': 9443, 'docroot': _pending_tests, 'sslcert': _pem_file}]
)
def __init__(self, output_dir, background=False, port=None, root=None,
register_cygwin=None):
"""Args:
output_dir: the absolute path to the layout test result directory
"""
self._output_dir = output_dir
self._process = None
self._port = port
self._root = root
self._register_cygwin = register_cygwin
if self._port:
self._port = int(self._port)
def IsRunning(self):
return self._process != None
def Start(self):
if self.IsRunning():
raise 'Lighttpd already running'
base_conf_file = os.path.join(THISDIR, 'lighttpd.conf')
out_conf_file = os.path.join(self._output_dir, 'lighttpd.conf')
time_str = time.strftime("%d%b%Y-%H%M%S")
access_file_name = "access.log-" + time_str + ".txt"
access_log = os.path.join(self._output_dir, access_file_name)
log_file_name = "error.log-" + time_str + ".txt"
error_log = os.path.join(self._output_dir, log_file_name)
# Write out the config
f = file(base_conf_file, 'rb')
base_conf = f.read()
f.close()
f = file(out_conf_file, 'wb')
f.write(base_conf)
# Write out our cgi handlers. Run perl through env so that it processes
# the #! line and runs perl with the proper command line arguments.
# Emulate apache's mod_asis with a cat cgi handler.
platform_util = platform_utils.PlatformUtility('')
f.write(('cgi.assign = ( ".cgi" => "/usr/bin/env",\n'
' ".pl" => "/usr/bin/env",\n'
' ".asis" => "/bin/cat",\n'
' ".php" => "%s" )\n\n') %
platform_util.LigHTTPdPHPPath())
# Setup log files
f.write(('server.errorlog = "%s"\n'
'accesslog.filename = "%s"\n\n') % (error_log, access_log))
# Setup upload folders. Upload folder is to hold temporary upload files
# and also POST data. This is used to support XHR layout tests that does
# POST.
f.write(('server.upload-dirs = ( "%s" )\n\n') % (self._output_dir))
# dump out of virtual host config at the bottom.
if self._root:
if self._port:
# Have both port and root dir.
mappings = [{'port': self._port, 'docroot': self._root}]
else:
# Have only a root dir - set the ports as for LayoutTests.
# This is used in ui_tests to run http tests against a browser.
mappings = [
# default set of ports as for LayoutTests but with a specified root.
{'port': 8000, 'docroot': self._root},
{'port': 8080, 'docroot': self._root},
{'port': 8443, 'docroot': self._root, 'sslcert': Lighttpd._pem_file}
]
else:
mappings = self.VIRTUALCONFIG
for mapping in mappings:
ssl_setup = ''
if 'sslcert' in mapping:
ssl_setup = (' ssl.engine = "enable"\n'
' ssl.pemfile = "%s"\n' % mapping['sslcert'])
f.write(('$SERVER["socket"] == "127.0.0.1:%d" {\n'
' server.document-root = "%s"\n' +
ssl_setup +
'}\n\n') % (mapping['port'], mapping['docroot']))
f.close()
executable = platform_util.LigHTTPdExecutablePath()
module_path = platform_util.LigHTTPdModulePath()
start_cmd = [ executable,
# Newly written config file
'-f', PathFromBase(self._output_dir, 'lighttpd.conf'),
# Where it can find its module dynamic libraries
'-m', module_path,
# Don't background
'-D' ]
# Put the cygwin directory first in the path to find cygwin1.dll
env = os.environ
if sys.platform in ('cygwin', 'win32'):
env['PATH'] = '%s;%s' % (
PathFromBase('third_party', 'cygwin', 'bin'), env['PATH'])
if sys.platform == 'win32' and self._register_cygwin:
setup_mount = PathFromBase('third_party', 'cygwin', 'setup_mount.bat')
subprocess.Popen(setup_mount).wait()
logging.info('Starting http server')
self._process = subprocess.Popen(start_cmd, env=env)
# Wait for server to start.
time.sleep(3)
# Ensure that the server is running on all the desired ports.
for mapping in mappings:
url = 'http%s://127.0.0.1:%d/' % ('sslcert' in mapping and 's' or '',
mapping['port'])
if not self._UrlIsAlive(url):
raise HttpdNotStarted('Failed to start httpd on port %s' %
str(mapping['port']))
# Our process terminated already
if self._process.returncode != None:
raise HttpdNotStarted('Failed to start httpd.')
def _UrlIsAlive(self, url):
"""Checks to see if we get an http response from |url|.
We poll the url 5 times with a 3 second delay. If we don't
get a reply in that time, we give up and assume the httpd
didn't start properly.
Args:
url: The URL to check.
Return:
True if the url is alive.
"""
attempts = 5
while attempts > 0:
try:
response = urllib.urlopen(url)
# Server is up and responding.
return True
except IOError:
pass
attempts -= 1
# Wait 3 seconds and try again.
time.sleep(3)
return False
# TODO(deanm): Find a nicer way to shutdown cleanly. Our log files are
# probably not being flushed, etc... why doesn't our python have os.kill ?
def Stop(self, force=False):
if not force and not self.IsRunning():
return
logging.info('Shutting down http server')
platform_util = platform_utils.PlatformUtility('')
platform_util.ShutDownHTTPServer(self._process)
if self._process:
self._process.wait()
self._process = None
# Wait a bit to make sure the ports are free'd up
time.sleep(2)
if '__main__' == __name__:
# Provide some command line params for starting/stopping the http server
# manually. Also used in ui_tests to run http layout tests in a browser.
option_parser = optparse.OptionParser()
option_parser.add_option('-k', '--server', help='Server action (start|stop)')
option_parser.add_option('-p', '--port',
help='Port to listen on (overrides layout test ports)')
option_parser.add_option('-r', '--root',
help='Absolute path to DocumentRoot (overrides layout test roots)')
option_parser.add_option('--register_cygwin', action="store_true",
dest="register_cygwin", help='Register Cygwin paths (on Win try bots)')
options, args = option_parser.parse_args()
if not options.server:
print 'Usage: %s --server {start|stop} [--root=root_dir]'
print ' [--port=port_number]' % sys.argv[0]
else:
if (options.root is None) and (options.port is not None):
# specifying root but not port means we want httpd on default set of
# ports that LayoutTest use, but pointing to a different source of tests.
# Specifying port but no root does not seem meaningful.
raise 'Specifying port requires also a root.'
httpd = Lighttpd(tempfile.gettempdir(),
port=options.port,
root=options.root,
register_cygwin=options.register_cygwin)
if 'start' == options.server:
httpd.Start()
else:
httpd.Stop(force=True)
Remove old log files so they don't accumulate on try bots.
TEST=none
BUG=none
Review URL: http://codereview.chromium.org/149263
git-svn-id: dd90618784b6a4b323ea0c23a071cb1c9e6f2ac7@20057 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
#!/bin/env python
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A class to help start/stop the lighttpd server used by layout tests."""
import logging
import optparse
import os
import platform_utils
import subprocess
import sys
import tempfile
import time
import urllib
import google.path_utils
# This will be a native path to the directory this file resides in.
# It can either be relative or absolute depending how it's executed.
THISDIR = os.path.dirname(os.path.abspath(__file__))
def PathFromBase(*pathies):
return google.path_utils.FindUpward(THISDIR, *pathies)
def RemoveLogFiles(folder, starts_with):
files = os.listdir(folder)
for file in files:
if file.startswith(starts_with) :
full_path = os.path.join(folder, file)
os.remove(full_path)
class HttpdNotStarted(Exception):
pass
class Lighttpd:
# Webkit tests
try:
_webkit_tests = PathFromBase('webkit', 'data', 'layout_tests',
'LayoutTests', 'http', 'tests')
except google.path_utils.PathNotFound:
# If webkit/data/layout_tests/LayoutTests/http/tests does not exist, assume
# wekit tests are located in third_party/WebKit/LayoutTests/http/tests.
try:
_webkit_tests = PathFromBase('third_party', 'WebKit',
'LayoutTests', 'http', 'tests')
except google.path_utils.PathNotFound:
_webkit_tests = None
# New tests for Chrome
try:
_pending_tests = PathFromBase('webkit', 'data', 'layout_tests',
'pending', 'http', 'tests')
except google.path_utils.PathNotFound:
_pending_tests = None
# Path where we can access all of the tests
_all_tests = PathFromBase('webkit', 'data', 'layout_tests')
# Self generated certificate for SSL server (for client cert get
# <base-path>\chrome\test\data\ssl\certs\root_ca_cert.crt)
_pem_file = PathFromBase('tools', 'python', 'google', 'httpd_config',
'httpd2.pem')
VIRTUALCONFIG = [
# One mapping where we can get to everything
{'port': 8081, 'docroot': _all_tests}
]
if _webkit_tests:
VIRTUALCONFIG.extend(
# Three mappings (one with SSL enabled) for LayoutTests http tests
[{'port': 8000, 'docroot': _webkit_tests},
{'port': 8080, 'docroot': _webkit_tests},
{'port': 8443, 'docroot': _webkit_tests, 'sslcert': _pem_file}]
)
if _pending_tests:
VIRTUALCONFIG.extend(
# Three similar mappings (one with SSL enabled) for pending http tests
[{'port': 9000, 'docroot': _pending_tests},
{'port': 9080, 'docroot': _pending_tests},
{'port': 9443, 'docroot': _pending_tests, 'sslcert': _pem_file}]
)
def __init__(self, output_dir, background=False, port=None, root=None,
register_cygwin=None):
"""Args:
output_dir: the absolute path to the layout test result directory
"""
self._output_dir = output_dir
self._process = None
self._port = port
self._root = root
self._register_cygwin = register_cygwin
if self._port:
self._port = int(self._port)
def IsRunning(self):
return self._process != None
def Start(self):
if self.IsRunning():
raise 'Lighttpd already running'
base_conf_file = os.path.join(THISDIR, 'lighttpd.conf')
out_conf_file = os.path.join(self._output_dir, 'lighttpd.conf')
time_str = time.strftime("%d%b%Y-%H%M%S")
access_file_name = "access.log-" + time_str + ".txt"
access_log = os.path.join(self._output_dir, access_file_name)
log_file_name = "error.log-" + time_str + ".txt"
error_log = os.path.join(self._output_dir, log_file_name)
# Remove old log files. We only need to keep the last ones.
RemoveLogFiles(self._output_dir, "access.log-")
RemoveLogFiles(self._output_dir, "error.log-")
# Write out the config
f = file(base_conf_file, 'rb')
base_conf = f.read()
f.close()
f = file(out_conf_file, 'wb')
f.write(base_conf)
# Write out our cgi handlers. Run perl through env so that it processes
# the #! line and runs perl with the proper command line arguments.
# Emulate apache's mod_asis with a cat cgi handler.
platform_util = platform_utils.PlatformUtility('')
f.write(('cgi.assign = ( ".cgi" => "/usr/bin/env",\n'
' ".pl" => "/usr/bin/env",\n'
' ".asis" => "/bin/cat",\n'
' ".php" => "%s" )\n\n') %
platform_util.LigHTTPdPHPPath())
# Setup log files
f.write(('server.errorlog = "%s"\n'
'accesslog.filename = "%s"\n\n') % (error_log, access_log))
# Setup upload folders. Upload folder is to hold temporary upload files
# and also POST data. This is used to support XHR layout tests that does
# POST.
f.write(('server.upload-dirs = ( "%s" )\n\n') % (self._output_dir))
# dump out of virtual host config at the bottom.
if self._root:
if self._port:
# Have both port and root dir.
mappings = [{'port': self._port, 'docroot': self._root}]
else:
# Have only a root dir - set the ports as for LayoutTests.
# This is used in ui_tests to run http tests against a browser.
mappings = [
# default set of ports as for LayoutTests but with a specified root.
{'port': 8000, 'docroot': self._root},
{'port': 8080, 'docroot': self._root},
{'port': 8443, 'docroot': self._root, 'sslcert': Lighttpd._pem_file}
]
else:
mappings = self.VIRTUALCONFIG
for mapping in mappings:
ssl_setup = ''
if 'sslcert' in mapping:
ssl_setup = (' ssl.engine = "enable"\n'
' ssl.pemfile = "%s"\n' % mapping['sslcert'])
f.write(('$SERVER["socket"] == "127.0.0.1:%d" {\n'
' server.document-root = "%s"\n' +
ssl_setup +
'}\n\n') % (mapping['port'], mapping['docroot']))
f.close()
executable = platform_util.LigHTTPdExecutablePath()
module_path = platform_util.LigHTTPdModulePath()
start_cmd = [ executable,
# Newly written config file
'-f', PathFromBase(self._output_dir, 'lighttpd.conf'),
# Where it can find its module dynamic libraries
'-m', module_path,
# Don't background
'-D' ]
# Put the cygwin directory first in the path to find cygwin1.dll
env = os.environ
if sys.platform in ('cygwin', 'win32'):
env['PATH'] = '%s;%s' % (
PathFromBase('third_party', 'cygwin', 'bin'), env['PATH'])
if sys.platform == 'win32' and self._register_cygwin:
setup_mount = PathFromBase('third_party', 'cygwin', 'setup_mount.bat')
subprocess.Popen(setup_mount).wait()
logging.info('Starting http server')
self._process = subprocess.Popen(start_cmd, env=env)
# Wait for server to start.
time.sleep(3)
# Ensure that the server is running on all the desired ports.
for mapping in mappings:
url = 'http%s://127.0.0.1:%d/' % ('sslcert' in mapping and 's' or '',
mapping['port'])
if not self._UrlIsAlive(url):
raise HttpdNotStarted('Failed to start httpd on port %s' %
str(mapping['port']))
# Our process terminated already
if self._process.returncode != None:
raise HttpdNotStarted('Failed to start httpd.')
def _UrlIsAlive(self, url):
"""Checks to see if we get an http response from |url|.
We poll the url 5 times with a 3 second delay. If we don't
get a reply in that time, we give up and assume the httpd
didn't start properly.
Args:
url: The URL to check.
Return:
True if the url is alive.
"""
attempts = 5
while attempts > 0:
try:
response = urllib.urlopen(url)
# Server is up and responding.
return True
except IOError:
pass
attempts -= 1
# Wait 3 seconds and try again.
time.sleep(3)
return False
# TODO(deanm): Find a nicer way to shutdown cleanly. Our log files are
# probably not being flushed, etc... why doesn't our python have os.kill ?
def Stop(self, force=False):
if not force and not self.IsRunning():
return
logging.info('Shutting down http server')
platform_util = platform_utils.PlatformUtility('')
platform_util.ShutDownHTTPServer(self._process)
if self._process:
self._process.wait()
self._process = None
# Wait a bit to make sure the ports are free'd up
time.sleep(2)
if '__main__' == __name__:
# Provide some command line params for starting/stopping the http server
# manually. Also used in ui_tests to run http layout tests in a browser.
option_parser = optparse.OptionParser()
option_parser.add_option('-k', '--server', help='Server action (start|stop)')
option_parser.add_option('-p', '--port',
help='Port to listen on (overrides layout test ports)')
option_parser.add_option('-r', '--root',
help='Absolute path to DocumentRoot (overrides layout test roots)')
option_parser.add_option('--register_cygwin', action="store_true",
dest="register_cygwin", help='Register Cygwin paths (on Win try bots)')
options, args = option_parser.parse_args()
if not options.server:
print 'Usage: %s --server {start|stop} [--root=root_dir]'
print ' [--port=port_number]' % sys.argv[0]
else:
if (options.root is None) and (options.port is not None):
# specifying root but not port means we want httpd on default set of
# ports that LayoutTest use, but pointing to a different source of tests.
# Specifying port but no root does not seem meaningful.
raise 'Specifying port requires also a root.'
httpd = Lighttpd(tempfile.gettempdir(),
port=options.port,
root=options.root,
register_cygwin=options.register_cygwin)
if 'start' == options.server:
httpd.Start()
else:
httpd.Stop(force=True)
|
import abc
import os
import random
import socket
import time
from typing import Union
from unittest import TestCase
from mediawords.db import connect_to_db
from mediawords.job import JobBroker
from mediawords.test.db.create import create_test_medium, create_test_feed
from mediawords.test.hash_server import HashServer
from mediawords.util.log import create_logger
log = create_logger(__name__)
class AbstractFetchTranscriptTestCase(TestCase, metaclass=abc.ABCMeta):
__slots__ = [
'db',
'hs',
'stories_id',
'transcript_fetches',
]
@classmethod
@abc.abstractmethod
def input_media_path(cls) -> str:
"""Return full path to input media file."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def input_media_mime_type(cls) -> str:
"""Return input media file's MIME type."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def story_title_description(cls) -> str:
"""Return a string to store as both story title and description."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def retries_per_step(cls) -> int:
"""How many retries to do per each local step."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def seconds_between_retries(cls) -> float:
"""How many seconds to wait between retries."""
raise NotImplemented("Abstract method")
def setUp(self) -> None:
super().setUp()
self.db = connect_to_db()
test_medium = create_test_medium(db=self.db, label='test')
test_feed = create_test_feed(db=self.db, label='test', medium=test_medium)
# Add a story with a random ID to decrease the chance that object in GCS will collide with another test running
# at the same time
self.stories_id = random.randint(1, 2147483647 - 1)
self.db.query("""
INSERT INTO stories (
stories_id,
media_id,
url,
guid,
title,
description,
publish_date,
collect_date,
full_text_rss
) VALUES (
%(stories_id)s,
%(media_id)s,
'http://story.test/',
'guid://story.test/',
'story',
'description',
'2016-10-15 08:00:00',
'2016-10-15 10:00:00',
true
)
""", {
'stories_id': self.stories_id,
'media_id': test_feed['media_id'],
})
# Create missing partitions for "feeds_stories_map"
self.db.query('SELECT create_missing_partitions()')
self.db.create(
table='feeds_stories_map',
insert_hash={
'feeds_id': int(test_feed['feeds_id']),
'stories_id': self.stories_id,
}
)
assert os.path.isfile(self.input_media_path()), f"Test media file '{self.input_media_path()}' should exist."
with open(self.input_media_path(), mode='rb') as f:
test_data = f.read()
# noinspection PyUnusedLocal
def __media_callback(request: HashServer.Request) -> Union[str, bytes]:
response = "".encode('utf-8')
response += "HTTP/1.0 200 OK\r\n".encode('utf-8')
response += f"Content-Type: {self.input_media_mime_type()}\r\n".encode('utf-8')
response += f"Content-Length: {len(test_data)}\r\n".encode('utf-8')
response += "\r\n".encode('utf-8')
response += test_data
return response
port = 8080 # Port exposed on docker-compose.tests.yml
media_path = '/test_media_file'
pages = {
media_path: {
'callback': __media_callback,
}
}
self.hs = HashServer(port=port, pages=pages)
self.hs.start()
# Using our hostname as it will be another container that will be connecting to us
media_url = f'http://{socket.gethostname()}:{port}{media_path}'
self.db.insert(table='story_enclosures', insert_hash={
'stories_id': self.stories_id,
'url': media_url,
'mime_type': self.input_media_mime_type(),
'length': len(test_data),
})
# Add a "podcast-fetch-episode" job
JobBroker(queue_name='MediaWords::Job::Podcast::FetchEpisode').add_to_queue(stories_id=self.stories_id)
total_time = int(self.retries_per_step() * self.seconds_between_retries())
# Wait for "podcast-fetch-episode" to transcode, upload to Google Storage, and write it to "podcast_episodes"
episodes = None
for x in range(1, self.retries_per_step() + 1):
log.info(f"Waiting for episode to appear (#{x})...")
episodes = self.db.select(table='podcast_episodes', what_to_select='*').hashes()
if episodes:
log.info(f"Episode is here!")
break
time.sleep(self.seconds_between_retries())
assert episodes, f"Episode didn't show up in {total_time} seconds."
# Wait for "podcast-submit-operation" to submit Speech API operation
self.transcript_fetches = None
for x in range(1, self.retries_per_step() + 1):
log.info(f"Waiting for transcript fetch to appear (#{x})...")
self.transcript_fetches = self.db.select(
table='podcast_episode_transcript_fetches',
what_to_select='*'
).hashes()
if self.transcript_fetches:
log.info(f"Transcript fetch is here!")
break
time.sleep(self.seconds_between_retries())
assert self.transcript_fetches, f"Operation didn't show up in {total_time} seconds."
def tearDown(self) -> None:
super().tearDown()
self.hs.stop()
Choose smaller random stories_id range
import abc
import os
import random
import socket
import time
from typing import Union
from unittest import TestCase
from mediawords.db import connect_to_db
from mediawords.job import JobBroker
from mediawords.test.db.create import create_test_medium, create_test_feed
from mediawords.test.hash_server import HashServer
from mediawords.util.log import create_logger
log = create_logger(__name__)
class AbstractFetchTranscriptTestCase(TestCase, metaclass=abc.ABCMeta):
__slots__ = [
'db',
'hs',
'stories_id',
'transcript_fetches',
]
@classmethod
@abc.abstractmethod
def input_media_path(cls) -> str:
"""Return full path to input media file."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def input_media_mime_type(cls) -> str:
"""Return input media file's MIME type."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def story_title_description(cls) -> str:
"""Return a string to store as both story title and description."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def retries_per_step(cls) -> int:
"""How many retries to do per each local step."""
raise NotImplemented("Abstract method")
@classmethod
@abc.abstractmethod
def seconds_between_retries(cls) -> float:
"""How many seconds to wait between retries."""
raise NotImplemented("Abstract method")
def setUp(self) -> None:
super().setUp()
self.db = connect_to_db()
test_medium = create_test_medium(db=self.db, label='test')
test_feed = create_test_feed(db=self.db, label='test', medium=test_medium)
# Add a story with a random ID to decrease the chance that object in GCS will collide with another test running
# at the same time
self.stories_id = random.randint(1, 1000000)
self.db.query("""
INSERT INTO stories (
stories_id,
media_id,
url,
guid,
title,
description,
publish_date,
collect_date,
full_text_rss
) VALUES (
%(stories_id)s,
%(media_id)s,
'http://story.test/',
'guid://story.test/',
'story',
'description',
'2016-10-15 08:00:00',
'2016-10-15 10:00:00',
true
)
""", {
'stories_id': self.stories_id,
'media_id': test_feed['media_id'],
})
# Create missing partitions for "feeds_stories_map"
self.db.query('SELECT create_missing_partitions()')
self.db.create(
table='feeds_stories_map',
insert_hash={
'feeds_id': int(test_feed['feeds_id']),
'stories_id': self.stories_id,
}
)
assert os.path.isfile(self.input_media_path()), f"Test media file '{self.input_media_path()}' should exist."
with open(self.input_media_path(), mode='rb') as f:
test_data = f.read()
# noinspection PyUnusedLocal
def __media_callback(request: HashServer.Request) -> Union[str, bytes]:
response = "".encode('utf-8')
response += "HTTP/1.0 200 OK\r\n".encode('utf-8')
response += f"Content-Type: {self.input_media_mime_type()}\r\n".encode('utf-8')
response += f"Content-Length: {len(test_data)}\r\n".encode('utf-8')
response += "\r\n".encode('utf-8')
response += test_data
return response
port = 8080 # Port exposed on docker-compose.tests.yml
media_path = '/test_media_file'
pages = {
media_path: {
'callback': __media_callback,
}
}
self.hs = HashServer(port=port, pages=pages)
self.hs.start()
# Using our hostname as it will be another container that will be connecting to us
media_url = f'http://{socket.gethostname()}:{port}{media_path}'
self.db.insert(table='story_enclosures', insert_hash={
'stories_id': self.stories_id,
'url': media_url,
'mime_type': self.input_media_mime_type(),
'length': len(test_data),
})
# Add a "podcast-fetch-episode" job
JobBroker(queue_name='MediaWords::Job::Podcast::FetchEpisode').add_to_queue(stories_id=self.stories_id)
total_time = int(self.retries_per_step() * self.seconds_between_retries())
# Wait for "podcast-fetch-episode" to transcode, upload to Google Storage, and write it to "podcast_episodes"
episodes = None
for x in range(1, self.retries_per_step() + 1):
log.info(f"Waiting for episode to appear (#{x})...")
episodes = self.db.select(table='podcast_episodes', what_to_select='*').hashes()
if episodes:
log.info(f"Episode is here!")
break
time.sleep(self.seconds_between_retries())
assert episodes, f"Episode didn't show up in {total_time} seconds."
# Wait for "podcast-submit-operation" to submit Speech API operation
self.transcript_fetches = None
for x in range(1, self.retries_per_step() + 1):
log.info(f"Waiting for transcript fetch to appear (#{x})...")
self.transcript_fetches = self.db.select(
table='podcast_episode_transcript_fetches',
what_to_select='*'
).hashes()
if self.transcript_fetches:
log.info(f"Transcript fetch is here!")
break
time.sleep(self.seconds_between_retries())
assert self.transcript_fetches, f"Operation didn't show up in {total_time} seconds."
def tearDown(self) -> None:
super().tearDown()
self.hs.stop()
|
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Commission Invoices with Public Categories',
'version': '13.0.1.0.0',
'category': 'Accounting',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'account_invoice_commission',
'website_sale',
],
'data': [
'views/account_commission_rule_views.xml',
],
'demo': [
],
'installable': False,
'auto_install': True,
'application': False,
}
[MIG] website_sale_account_invoice_commission: Migration to 15.0
closes ingadhoc/account-invoicing#134
Signed-off-by: Juan José Scarafía <1d1652a8631a1f5a0ea40ef8dcad76f737ce6379@adhoc.com.ar>
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Commission Invoices with Public Categories',
'version': "15.0.1.0.0",
'category': 'Accounting',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'account_invoice_commission',
'website_sale',
],
'data': [
'views/account_commission_rule_views.xml',
],
'demo': [
],
'installable': True,
'auto_install': True,
'application': False,
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import sys, os.path
pkg_dir = os.path.dirname(os.path.realpath(__file__)) + '/../../'
sys.path.append(pkg_dir)
from MPBST import *
import numpy as np
from scipy.stats import multivariate_normal
class HMMSampler(BaseSampler):
def __init__(self, num_states, record_best = True, cl_mode = False, cl_device = None, niter = 1000):
"""Initialize the base HMM sampler.
"""
BaseSampler.__init__(self, record_best, cl_mode, cl_device, niter)
self.data = None
self.num_states = num_states
self.uniq_states = np.linspace(1, self.num_states, self.num_states).astype(np.int64)
self.trans_p_matrix = np.empty((num_states, num_states))
self.trans_p_matrix.fill(1 / num_states)
def read_csv(self, filepath, obsvar_names = ['obs'], header = True):
"""Read data from a csv file and check for observations.
"""
self.data = pd.read_csv(filepath, compression = 'gzip')
self.obs = self.data[obsvar_names]
self.N = self.data.shape[0]
#self.states = np.random.randint(low = 1, high = 2, size = self.N)#self.num_states + 1, size = self.N)
self.states = np.random.randint(low = 1, high = self.num_states + 1, size = self.N)
class GaussianHMMSampler(HMMSampler):
def __init__(self, num_states, record_best = True, cl_mode = False, cl_device = None, niter = 1000):
"""Initialize the base HMM sampler.
"""
HMMSampler.__init__(self, num_states, record_best, cl_mode, cl_device, niter)
def read_csv(self, filepath, obsvar_names = ['obs'], header = True):
"""Read data from a csv file and check for observations.
"""
HMMSampler.read_csv(self, filepath, obsvar_names, header)
self.dim = len(obsvar_names)
self.means = np.zeros((self.num_states, self.dim)) # the mean vector of each state
self.covs = np.array([np.eye(self.dim) for _ in xrange(self.num_states)]) # the covariance matrix of each state
self.gaussian_mu0 = np.zeros(self.dim)
self.gaussian_k0 = 1
self.wishart_T0 = np.eye(self.dim)
self.wishart_v0 = 1
def do_inference(self, output_file = None):
"""Perform inference on parameters.
"""
for i in xrange(self.niter):
self._infer_means_covs(output_file)
print('Means:\n', self.means)
print('Covs:\n', self.covs)
self._infer_states(output_file)
print('States:\n', self.states)
self._infer_transp(output_file)
print('Transitional matrix:\n', self.trans_p_matrix)
#raw_input()
return
def _infer_states(self, output_file):
"""Infer the state of each observation without OpenCL.
"""
for nth in xrange(self.N):
# set up sampling grid
state_logp_grid = np.empty(shape = self.num_states)
# loop over states
for state in self.uniq_states:
# compute the transitional probability from the previous state
if nth == 0:
trans_prev_logp = np.log(1 / self.num_states)
else:
prev_state = self.states[nth - 1]
trans_prev_logp = np.log(self.trans_p_matrix[prev_state-1, state-1])
# compute the transitional probability to the next state
if nth == self.N - 1:
trans_next_logp = np.log(1)
else:
next_state = self.states[nth + 1]
trans_next_logp = np.log(self.trans_p_matrix[state-1, next_state-1])
emit_logp = multivariate_normal.logpdf(self.obs.iloc[nth], mean = self.means[state-1], cov = self.covs[state-1])
state_logp_grid[state - 1] = trans_prev_logp + trans_next_logp + emit_logp
#state_logp_grid[state - 1] = trans_prev_logp + emit_logp
# resample state
self.states[nth] = sample(a = self.uniq_states, p = lognormalize(state_logp_grid))
#print('sampled:', self.states[nth])
return
def _infer_means_covs(self, output_file):
"""Infer the means of each hidden state without OpenCL.
"""
for state in self.uniq_states:
# get observations currently assigned to this state
cluster_obs = np.array(self.obs.iloc[np.where(self.states == state)])
n = cluster_obs.shape[0]
# compute sufficient statistics
if n == 0:
mu = np.zeros((self.dim, 1))
else:
mu = cluster_obs.mean()
obs_deviance = cluster_obs - mu
mu0_deviance = np.reshape(mu - self.gaussian_mu0, (1, self.dim))
cov_obs = np.dot(obs_deviance.T, obs_deviance)
cov_mu0 = np.dot(mu0_deviance.T, mu0_deviance)
v_n = self.wishart_v0 + n
k_n = self.gaussian_k0 + n
T_n = self.wishart_T0 + cov_obs + cov_mu0 * self.gaussian_k0 * n / k_n
# new mu is sampled from a multivariate t with the following parameters
df = v_n - self.dim + 1
mu_n = (self.gaussian_k0 * self.gaussian_mu0 + n * mu) / k_n
Sigma = T_n / (k_n * df)
# resample the new mean vector
new_mu = multivariate_t(mu = mu_n, Sigma = Sigma, df = df)
self.means[state-1] = new_mu
# resample the covariance matrix
new_cov = np.linalg.inv(sample_wishart(sigma = np.linalg.inv(T_n), df = v_n))
#new_cov = wishart(Sigma = Sigma, df = v_n)
self.covs[state-1] = new_cov
# a hacky way to alleviate label switching
reindex = self.means[:,0].argsort()
self.means = self.means[reindex]
self.covs = self.covs[reindex]
return
def _infer_transp(self, output_file):
"""Infer the transitional probabilities betweenn states without OpenCL.
"""
for state_from in self.uniq_states:
count_p = np.empty(self.num_states)
pairs = zip(self.states[:self.N-1], self.states[1:])
count_from_state = (self.states[:self.N-1] == state_from).sum()
for state_to in self.uniq_states:
count_p[state_to - 1] = (pairs.count((state_from, state_to)) + 1) / (count_from_state + self.num_states)
self.trans_p_matrix[state_from-1] = count_p
return
hs = GaussianHMMSampler(num_states = 2)
hs.read_csv('./toydata/speed.csv.gz', obsvar_names = ['rt'])
hs.do_inference()
Improved the speed of the non-OpenCL sampler by reducing Pandas calls
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import sys, os.path
pkg_dir = os.path.dirname(os.path.realpath(__file__)) + '/../../'
sys.path.append(pkg_dir)
from MPBST import *
import numpy as np
from scipy.stats import multivariate_normal
class HMMSampler(BaseSampler):
def __init__(self, num_states, record_best = True, cl_mode = False, cl_device = None, niter = 1000):
"""Initialize the base HMM sampler.
"""
BaseSampler.__init__(self, record_best, cl_mode, cl_device, niter)
self.data = None
self.num_states = num_states
self.uniq_states = np.linspace(1, self.num_states, self.num_states).astype(np.int64)
self.trans_p_matrix = np.empty((num_states, num_states))
self.trans_p_matrix.fill(1 / num_states)
def read_csv(self, filepath, obsvar_names = ['obs'], header = True):
"""Read data from a csv file and check for observations.
"""
self.data = pd.read_csv(filepath, compression = 'gzip')
self.obs = self.data[obsvar_names]
self.N = self.data.shape[0]
#self.states = np.random.randint(low = 1, high = 2, size = self.N)#self.num_states + 1, size = self.N)
self.states = np.random.randint(low = 1, high = self.num_states + 1, size = self.N)
class GaussianHMMSampler(HMMSampler):
def __init__(self, num_states, record_best = True, cl_mode = False, cl_device = None, niter = 1000):
"""Initialize the base HMM sampler.
"""
HMMSampler.__init__(self, num_states, record_best, cl_mode, cl_device, niter)
def read_csv(self, filepath, obsvar_names = ['obs'], header = True):
"""Read data from a csv file and check for observations.
"""
HMMSampler.read_csv(self, filepath, obsvar_names, header)
self.dim = len(obsvar_names)
self.means = np.zeros((self.num_states, self.dim)) # the mean vector of each state
self.covs = np.array([np.eye(self.dim) for _ in xrange(self.num_states)]) # the covariance matrix of each state
self.gaussian_mu0 = np.zeros(self.dim)
self.gaussian_k0 = 1
self.wishart_T0 = np.eye(self.dim)
self.wishart_v0 = 1
def do_inference(self, output_file = None):
"""Perform inference on parameters.
"""
for i in xrange(self.niter):
self._infer_means_covs(output_file)
self._infer_states(output_file)
self._infer_trans_p(output_file)
print('Means:\n', self.means)
print('Covs:\n', self.covs)
print('States:\n', self.states)
print('Transitional matrix:\n', self.trans_p_matrix)
return
def _infer_states(self, output_file):
"""Infer the state of each observation without OpenCL.
"""
# set up sampling grid, which can be reused
state_logp_grid = np.empty(shape = self.num_states)
# emission probabilities can be calculated in one pass
emit_logp = np.empty((self.num_states, self.N))
for state in self.uniq_states:
emit_logp[state-1] = multivariate_normal.logpdf(self.obs, mean = self.means[state-1], cov = self.covs[state-1])
for nth in xrange(self.N):
# loop over states
for state in self.uniq_states:
# compute the transitional probability from the previous state
if nth == 0:
trans_prev_logp = np.log(1 / self.num_states)
else:
prev_state = self.states[nth - 1]
trans_prev_logp = np.log(self.trans_p_matrix[prev_state-1, state-1])
# compute the transitional probability to the next state
if nth == self.N - 1:
trans_next_logp = np.log(1)
else:
next_state = self.states[nth + 1]
trans_next_logp = np.log(self.trans_p_matrix[state-1, next_state-1])
state_logp_grid[state - 1] = trans_prev_logp + trans_next_logp + emit_logp[state-1, nth]
# resample state
self.states[nth] = sample(a = self.uniq_states, p = lognormalize(state_logp_grid))
return
def _infer_means_covs(self, output_file):
"""Infer the means of each hidden state without OpenCL.
"""
for state in self.uniq_states:
# get observations currently assigned to this state
cluster_obs = np.array(self.obs.iloc[np.where(self.states == state)])
n = cluster_obs.shape[0]
# compute sufficient statistics
if n == 0:
mu = np.zeros((self.dim, 1))
else:
mu = cluster_obs.mean()
obs_deviance = cluster_obs - mu
mu0_deviance = np.reshape(mu - self.gaussian_mu0, (1, self.dim))
cov_obs = np.dot(obs_deviance.T, obs_deviance)
cov_mu0 = np.dot(mu0_deviance.T, mu0_deviance)
v_n = self.wishart_v0 + n
k_n = self.gaussian_k0 + n
T_n = self.wishart_T0 + cov_obs + cov_mu0 * self.gaussian_k0 * n / k_n
# new mu is sampled from a multivariate t with the following parameters
df = v_n - self.dim + 1
mu_n = (self.gaussian_k0 * self.gaussian_mu0 + n * mu) / k_n
Sigma = T_n / (k_n * df)
# resample the new mean vector
new_mu = multivariate_t(mu = mu_n, Sigma = Sigma, df = df)
self.means[state-1] = new_mu
# resample the covariance matrix
new_cov = np.linalg.inv(sample_wishart(sigma = np.linalg.inv(T_n), df = v_n))
#new_cov = wishart(Sigma = Sigma, df = v_n)
self.covs[state-1] = new_cov
# a hacky way to alleviate label switching
reindex = self.means[:,0].argsort()
self.means = self.means[reindex]
self.covs = self.covs[reindex]
return
def _infer_trans_p(self, output_file):
"""Infer the transitional probabilities betweenn states without OpenCL.
"""
# set up the sampling grid, which can be reused
count_p = np.empty(self.num_states)
# make bigram pairs for easier counting
pairs = zip(self.states[:self.N-1], self.states[1:])
for state_from in self.uniq_states:
count_from_state = (self.states[:self.N-1] == state_from).sum()
for state_to in self.uniq_states:
count_p[state_to - 1] = (pairs.count((state_from, state_to)) + 1) / (count_from_state + self.num_states)
self.trans_p_matrix[state_from-1] = count_p
return
hs = GaussianHMMSampler(num_states = 2, niter = 1000)
hs.read_csv('./toydata/speed.csv.gz', obsvar_names = ['rt'])
hs.do_inference()
|
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.db.models import Q
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory
from django.core.mail import send_mail
from django.core import serializers
from django.conf import settings
from decimal import *
from datetime import datetime, timedelta
from clusters.models import *
from clusters.forms import *
from clusters.utils import *
def cluster_params(cluster):
template_params = {}
linked_efs = []
resources = []
efs = EconomicFunction.objects.filter(cluster=cluster)
for ef in efs:
inputs = ef.inputs()
if inputs:
linked_efs.append(ef)
for inp in inputs:
resources.append(inp.resource_type)
outputs = ef.outputs()
if outputs:
linked_efs.append(ef)
for output in outputs:
resources.append(output.resource_type)
efs = list(set(linked_efs))
resources = list(set(resources))
agents = {}
for ef in efs:
for agent in ef.agents.all():
#agents.setdefault(ef.node_id(), []).append(agent.agent.name)
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
frtable = function_resource_table(cluster, "qty")
template_params = {
"cluster": cluster,
"functions": efs,
"resources": resources,
"function_agents": agents,
"root": root,
"frtable": frtable,
}
return template_params
def explore_params(cluster):
template_params = {}
linked_efs = []
resources = []
efs = EconomicFunction.objects.filter(cluster=cluster)
for ef in efs:
inputs = ef.inputs()
if inputs:
linked_efs.append(ef)
for inp in inputs:
resources.append(inp.resource_type)
outputs = ef.outputs()
if outputs:
linked_efs.append(ef)
for output in outputs:
resources.append(output.resource_type)
efs = list(set(linked_efs))
resources = list(set(resources))
agents = {}
for ef in efs:
for agent in ef.agents.all():
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
template_params = {
"cluster": cluster,
"functions": efs,
"resources": resources,
"function_agents": agents,
"root": root,
}
return template_params
class FlowResource(object):
def __init__(self, resource_type):
self.resource_type = resource_type
# does not work; FlowResource objects cannot fake it for FunctionResourceTypes
def flow_radial_graph_params(cluster):
template_params = {}
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
functions = []
resources = []
edges = []
for flow in flows:
from_fn = flow.from_function
try:
len(from_fn.inputs)
except TypeError:
from_fn.inputs = []
from_fn.inputs.append(FlowResource(flow.resource_type))
to_fn = flow.to_function
try:
len(from_fn.outputs)
except TypeError:
to_fn.outputs = []
to_fn.outputs.append(FlowResource(flow.resource_type))
functions.extend([from_fn, to_fn])
resources.append(flow.resource_type)
functions = list(set(functions))
resources = list(set(resources))
agents = {}
for ef in functions:
for agent in ef.agents.all():
#agents.setdefault(ef.node_id(), []).append(agent.agent.name)
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
template_params = {
"cluster": cluster,
"functions": functions,
"resources": resources,
"function_agents": agents,
"root": root,
}
return template_params
def clusters(request):
communities = Community.objects.all()
return render_to_response("clusters/clusters.html", {
"communities": communities,
}, context_instance=RequestContext(request))
def cluster(request, cluster_id, location="agt"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
location_form = None
if community.agent_geographic_area_name:
init = {"location": location,}
location_form = AgentAreaForm(community=community, initial=init, data=request.POST or None)
if request.method == "POST":
if location_form:
if location_form.is_valid():
location = location_form.cleaned_data["location"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/cluster', cluster_id, location))
if location == "agt":
agents = cluster.agents()
for agent in agents:
agent.all_functions = agent.functions.filter(
function__cluster=cluster)
else:
agents = cluster.regions()
color_functions = cluster.function_colors()
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
return render_to_response("clusters/cluster.html", {
"cluster": cluster,
"agents": agents,
"map_center": map_center,
"map_key": map_key,
"zoom_level": community.map_zoom_level,
"location_form": location_form,
"color_functions": color_functions,
}, context_instance=RequestContext(request))
def cluster_agents(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
#import pdb; pdb.set_trace()
agents = cluster.agents()
for agent in agents:
agent.cluster_functions = agent.functions.filter(function__cluster=cluster)
for cf in agent.cluster_functions:
cf.rsrcs = cf.function.resources.all()
if cf.rsrcs:
for res in cf.rsrcs:
res.agent_resource_list = res.function_resources_for_agent(agent)
else:
cf.agent_resources = cf.function_resources.all()
outliers = []
candidates = cf.function_resources.all()
for c in candidates:
if c.is_outlier():
outliers.append(c)
cf.outliers = outliers
#import pdb; pdb.set_trace()
return render_to_response("clusters/cluster_agents.html", {
"cluster": cluster,
"agents": agents,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster_functions(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
symbol = "$"
try:
symbol = community.unit_of_value.symbol
except:
pass
new_function_form = EconomicFunctionForm(prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
functions = cluster.functions.all()
for fun in functions:
fun.form = FunctionResourceTypeForm(community=cluster.community)
resources = cluster.resources()
for res in resources:
res.my_consumers = res.cluster_consumers(cluster)
res.my_producers = res.cluster_producers(cluster)
used = [cr.resource_type.id for cr in community.resources.all()]
resource_names = ';'.join([
res.name for res in EconomicResourceType.objects.all().exclude(id__in=used)])
template_params = network_params(cluster, "qty")
template_params["symbol"] = symbol
template_params["functions"] = functions
template_params["resources"] = resources
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
return render_to_response("clusters/edit_cluster_functions.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_flows(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
new_function_form = EconomicFunctionForm(prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
FlowFormSet = modelformset_factory(
FunctionResourceFlow,
form=FunctionResourceFlowForm,
can_delete=True,
extra=4,
)
formset = FlowFormSet(
queryset=FunctionResourceFlow.objects.filter(
from_function__cluster=cluster),
data=request.POST or None,
)
function_choices = [('', '----------')] + [
(fn.id, fn.name) for fn in cluster.functions.all()
]
resource_choices = [('', '----------')] + [
(cr.resource_type.id, cr.resource_type.name) for cr in cluster.community.resources.all()
]
for form in formset.forms:
form.fields['from_function'].choices = function_choices
form.fields['to_function'].choices = function_choices
form.fields['resource_type'].choices = resource_choices
used = [cr.resource_type.id for cr in cluster.community.resources.all()]
resource_names = ';'.join([
res.name for res in EconomicResourceType.objects.all().exclude(id__in=used)])
if request.method == "POST":
#import pdb; pdb.set_trace()
for form in formset.forms:
if form.is_valid():
delete = form.cleaned_data["DELETE"]
if delete:
#todo: this delete code is odd.
#First, I expected formsets to delete automatically id DELETE is True.
#Second, returning an object when requesting id is nice
#but smells like it might break in the future.
#import pdb; pdb.set_trace()
deleted = form.cleaned_data["id"]
deleted.delete()
else:
form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editflows', cluster.id))
template_params = flow_params(cluster, "qty")
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
template_params["formset"] = formset
return render_to_response("clusters/edit_flows.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_agent_flows(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
new_function_form = InlineAgentFunctionForm(cluster=cluster, prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
flows = list(AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster))
flows.extend(list(AgentResourceFlow.objects.filter(
to_function__function__cluster=cluster)))
FlowFormSet = modelformset_factory(
AgentResourceFlow,
form=AgentResourceFlowForm,
can_delete=True,
extra=3,
)
formset = FlowFormSet(
queryset=AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster),
data=request.POST or None,
)
agent_functions = AgentFunction.objects.filter(
function__cluster=cluster)
function_choices = [('', '----------')] + [
(fn.id, fn) for fn in agent_functions]
resources = cluster.community.resources.all()
resource_choices = [('', '----------')] + [
(cr.resource_type.id, cr.resource_type.name) for cr in resources
]
for form in formset.forms:
form.fields['from_function'].choices = function_choices
form.fields['to_function'].choices = function_choices
form.fields['resource_type'].choices = resource_choices
used = [cr.resource_type.id for cr in resources]
erts = EconomicResourceType.objects.all().exclude(id__in=used)
resource_names = '~'.join([res.name for res in erts])
function_names = '~'.join([fn.name for fn in cluster.functions.all()])
if request.method == "POST":
#import pdb; pdb.set_trace()
for form in formset.forms:
if form.is_valid():
delete = form.cleaned_data["DELETE"]
if delete:
#todo: this delete code is odd.
#First, I expected formsets to delete automatically id DELETE is True.
#Second, returning an object when requesting id is nice
#but smells like it might break in the future.
#import pdb; pdb.set_trace()
deleted = form.cleaned_data["id"]
deleted.delete()
else:
form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editagentflows', cluster.id))
template_params = agent_flow_params(cluster, "qty")
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
template_params["function_names"] = function_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
template_params["formset"] = formset
return render_to_response("clusters/edit_agent_flows.html",
template_params,
context_instance=RequestContext(request))
def featured_cluster(request):
cluster = get_featured_cluster()
template_params = {}
if cluster:
template_params = cluster_params(cluster)
return render_to_response("clusters/featured_cluster.html",
template_params,
context_instance=RequestContext(request))
def radial_graph(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
template_params = cluster_params(cluster)
return render_to_response("clusters/radial_graph.html",
template_params,
context_instance=RequestContext(request))
class Edge(object):
def __init__(self, from_node, to_node, quantity, label, width=1):
self.from_node = from_node
self.to_node = to_node
self.quantity = quantity
self.label = label
self.width = width
def agent_network_params(cluster, toggle):
template_params = {}
frts = AgentFunctionResourceType.objects.filter(
agent_function__function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if frts:
nodes = list(cluster.agents())
for agt in nodes:
for v in agt.function_inputs(cluster):
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, agt, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, agt, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, agt, v.quantity, qty_string))
for v in agt.function_outputs(cluster):
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(agt, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(agt, v.resource_type, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(agt, v.resource_type, v.quantity, qty_string))
else:
flows = AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster)
nodes = []
edges = []
for flow in flows:
nodes.extend([flow.from_function, flow.to_function, flow.resource_type])
if toggle == "val":
value = flow.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(flow.from_function, flow.resource_type, value, val_string))
edges.append(Edge(flow.resource_type, flow.to_function, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(flow.from_function, flow.resource_type, v.price, p_string))
edges.append(Edge(flow.resource_type, flow.to_function, v.price, p_string))
else:
total += flow.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(flow.from_function, flow.resource_type, flow.quantity, qty_string))
edges.append(Edge(flow.resource_type, flow.to_function, flow.quantity, qty_string))
nodes = list(set(nodes))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
#print "edge.quantity:", edge.quantity, "Width:", width
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def group_network_params(cluster, toggle):
template_params = {}
groups = cluster.groups()
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
nodes = []
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if groups:
nodes = groups
for agt in nodes:
for v in agt.function_inputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, agt, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, agt, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, agt, v.quantity, qty_string))
for v in agt.function_outputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(agt, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(agt, v.resource_type, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(agt, v.resource_type, v.quantity, qty_string))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def network_params(cluster, toggle):
template_params = {}
frts = FunctionResourceType.objects.filter(
function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if frts:
nodes = list(cluster.functions.all())
for fn in nodes:
for v in fn.inputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, fn, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, fn, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, fn, v.quantity, qty_string))
for v in fn.outputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(fn, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, fn, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(fn, v.resource_type, v.quantity, qty_string))
else:
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
nodes = []
edges = []
for flow in flows:
nodes.extend([flow.from_function, flow.to_function, flow.resource_type])
if toggle == "val":
value = flow.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(flow.from_function, flow.resource_type, value, val_string))
edges.append(Edge(flow.resource_type, flow.to_function, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(flow.from_function, flow.resource_type, v.price, p_string))
edges.append(Edge(flow.resource_type, flow.to_function, v.price, p_string))
else:
total += flow.quantity
qty_string = split_thousands(flow.quantity)
edges.append(Edge(flow.from_function, flow.resource_type, flow.quantity, qty_string))
edges.append(Edge(flow.resource_type, flow.to_function, flow.quantity, qty_string))
nodes = list(set(nodes))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def network(request, cluster_id, toggle="qty", level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityPriceValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
level_form = None
#import pdb; pdb.set_trace()
if cluster.agents():
level_form = FunctionAgentLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
if toggle_form.is_valid():
toggle = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/%s/'
% ('clusters/network', cluster_id, toggle, level))
if level == "agt":
template_params = agent_network_params(cluster, toggle)
elif level == "grp":
template_params = group_network_params(cluster, toggle)
else:
template_params = network_params(cluster, toggle)
template_params["use_window_size"] = True
template_params["toggle_form"] = toggle_form
template_params["level_form"] = level_form
#if request.method == "POST":
# if level_form:
# if level_form.is_valid():
# level = level_form.cleaned_data["level"]
# else:
# if toggle_form.is_valid():
# toggle = toggle_form.cleaned_data["toggle"]
# return HttpResponseRedirect('/%s/%s/%s/%s/'
# % ('clusters/network', cluster_id, toggle, level))
return render_to_response("clusters/network.html",
template_params,
context_instance=RequestContext(request))
class FlowEdge(object):
def __init__(self, from_node, to_node, label, quantity, width=1):
self.from_node = from_node
self.to_node = to_node
self.label = label
self.quantity = quantity
self.width = width
def group_flow_params(cluster, toggle):
template_params = {}
flows = cluster.group_flows()
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
edge.label = ";".join([edge.label, flow.resource_type.name])
if toggle == "val":
edge.quantity += flow.get_value()
elif toggle == "price":
edge.quantity += flow.price
else:
edge.quantity += flow.quantity
else:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
edge = FlowEdge(flow.from_function, flow.to_function, flow.resource_type.name, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def agent_flow_params(cluster, toggle):
template_params = {}
flows = AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster)
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
edge.label = ";".join([edge.label, flow.resource_type.name])
if toggle == "val":
edge.quantity += flow.get_value()
elif toggle == "price":
edge.quantity += flow.price
else:
edge.quantity += flow.quantity
else:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
edge = FlowEdge(flow.from_function, flow.to_function, flow.resource_type.name, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def flow_params(cluster, toggle):
template_params = {}
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
if toggle == "val":
value = flow.get_value()
if value:
label_nbr = "".join([symbol, split_thousands(value)])
else:
label_nbr = ""
edge.quantity += value
elif toggle == "price":
edge.quantity += flow.price
if flow.price:
label_nbr = "".join([
symbol,
str(flow.price.quantize(Decimal(".01")))])
else:
label_nbr = ""
else:
edge.quantity += flow.quantity
if flow.quantity:
label_nbr = split_thousands(flow.quantity)
else:
label_nbr = ""
new_label = "".join([
flow.resource_type.name,
" ",
label_nbr])
edge.label = ";".join([edge.label, new_label])
else:
if toggle == "val":
nbr = flow.get_value()
if nbr:
label = "".join([
flow.resource_type.name,
" ",
symbol,
split_thousands(nbr)])
else:
label = flow.resource_type.name
elif toggle == "price":
nbr = flow.price
if nbr:
label = "".join([
flow.resource_type.name,
" ",
symbol,
str(nbr.quantize(Decimal(".01")))])
else:
label = flow.resource_type.name
else:
nbr = flow.quantity
if nbr:
label = "".join([
flow.resource_type.name,
" ",
split_thousands(nbr)])
else:
label = flow.resource_type.name
edge = FlowEdge(flow.from_function, flow.to_function, label, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def flows(request, cluster_id, toggle="qty", level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityPriceValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
level_form = None
if cluster.agents():
level_form = FunctionAgentLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
if toggle_form.is_valid():
toggle = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/%s/'
% ('clusters/flows', cluster_id, toggle, level))
if level == "agt":
template_params = agent_flow_params(cluster, toggle)
elif level == "grp":
template_params = group_flow_params(cluster, toggle)
else:
template_params = flow_params(cluster, toggle)
template_params["use_window_size"] = True
template_params["toggle_form"] = toggle_form
template_params["level_form"] = level_form
#if request.method == "POST":
# if toggle_form.is_valid():
#import pdb; pdb.set_trace()
# tog = toggle_form.cleaned_data["toggle"]
# return HttpResponseRedirect('/%s/%s/%s/'
# % ('clusters/flows', cluster_id, tog))
return render_to_response("clusters/flows.html",
template_params,
context_instance=RequestContext(request))
def iotable(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
iotable = input_output_table(cluster)
return render_to_response("clusters/iotable.html",{
"cluster": cluster,
"iotable": iotable,
}, context_instance=RequestContext(request))
def explore(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
template_params = explore_params(cluster)
return render_to_response("clusters/explore.html",
template_params,
context_instance=RequestContext(request))
class SankeyLink(object):
def __init__(self, source, target, value, label=None):
self.source = source
self.target = target
self.value = value
self.label = label
def sankey_params(cluster, toggle):
template_params = {}
frts = FunctionResourceType.objects.filter(
function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if frts:
link_nodes = cluster.fr_graph_nodes()
nodes = list(cluster.functions.all())
for fn in nodes:
for v in fn.inputs():
rtypes.append(v.resource_type)
if toggle == "val":
qty = v.get_value()
elif toggle == "price":
qty = v.price
else:
qty = v.quantity
from_node = link_nodes.index(v.resource_type)
to_node = link_nodes.index(fn)
edges.append(SankeyLink(from_node, to_node, qty))
for v in fn.outputs():
rtypes.append(v.resource_type)
if toggle == "val":
qty = v.get_value()
elif toggle == "price":
qty = v.price
else:
qty = v.quantity
to_node = link_nodes.index(v.resource_type)
from_node = link_nodes.index(fn)
edges.append(SankeyLink(from_node, to_node, qty))
else:
import pdb; pdb.set_trace()
link_nodes = cluster.flow_graph_nodes()
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
edges = []
for flow in flows:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
from_index = link_nodes.index(flow.from_function)
to_index = link_nodes.index(flow.to_function)
resource_index = link_nodes.index(flow.resource_type)
if edges:
prev_match_from = None
prev_match_to = None
for prev in edges:
if not prev_match_from:
if from_index == prev.source and resource_index == prev.target:
prev_match_from = prev
prev.value += nbr
if not prev_match_to:
if resource_index == prev.source and to_index == prev.target:
prev_match_to = prev
prev.value += nbr
if prev_match_from and prev_match_to:
break
if not prev_match_from:
edges.append(SankeyLink(from_index, resource_index, nbr))
if not prev_match_to:
edges.append(SankeyLink(resource_index, to_index, nbr))
template_params = {
'cluster': cluster,
'net_nodes': link_nodes,
'net_links': edges,
}
return template_params
def sankey(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle = "qty"
template_params = sankey_params(cluster, toggle)
#import pdb; pdb.set_trace()
return render_to_response("clusters/sankey.html",
template_params,
context_instance=RequestContext(request))
def diagnostics(request, cluster_id, level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
symbol = "$"
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
level_form = None
if cluster.agents():
level_form = FunctionAgentTwoLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/diagnostics', cluster_id, level))
function_io_vs_flows = []
if level == "agt":
function_production_without_consumption = cluster.agent_function_production_without_consumption()
function_consumption_without_production = cluster.agent_function_consumption_without_production()
#if cluster.has_flows():
#function_io_vs_flows = cluster.agent_io_vs_flows()
else:
function_production_without_consumption = cluster.function_production_without_consumption()
function_consumption_without_production = cluster.function_consumption_without_production()
if cluster.has_flows():
function_io_vs_flows = cluster.function_io_vs_flows()
return render_to_response("clusters/diagnostics.html",{
"cluster": cluster,
"symbol": symbol,
"level_form": level_form,
"function_production_without_consumption": function_production_without_consumption,
"function_consumption_without_production": function_consumption_without_production,
"function_io_vs_flows": function_io_vs_flows,
}, context_instance=RequestContext(request))
def model_errors(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
disjoints = cluster.disjoints()
missing_function_numbers = cluster.missing_function_numbers()
missing_agent_numbers = cluster.missing_agent_numbers()
function_agent_diffs = cluster.function_agent_diffs()
return render_to_response("clusters/model_errors.html",{
"cluster": cluster,
"disjoints": disjoints,
"missing_function_numbers": missing_function_numbers,
"missing_agent_numbers": missing_agent_numbers,
"function_agent_diffs": function_agent_diffs,
}, context_instance=RequestContext(request))
def economic_functions(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
functions = cluster.functions.all()
resources = cluster.resources()
for res in resources:
res.my_consumers = res.cluster_consumers(cluster)
res.my_producers = res.cluster_producers(cluster)
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
return render_to_response("clusters/economic_functions.html", {
"cluster": cluster,
"functions": functions,
"resources": resources,
"flows": flows,
}, context_instance=RequestContext(request))
def economic_function(request, function_id):
ef = get_object_or_404(EconomicFunction, pk=function_id)
return render_to_response("clusters/economic_functions.html",{
"economic_function": ef,
}, context_instance=RequestContext(request))
@login_required
def edit_function(request, function_id):
fn = get_object_or_404(EconomicFunction, pk=function_id)
cluster = fn.cluster
function_form = EconomicFunctionForm(data=request.POST or None, instance=fn)
function_aspect_name = cluster.function_aspect_name
if request.method == "POST":
if function_form.is_valid():
function_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
return render_to_response("clusters/edit_function.html",{
"function": fn,
"cluster": cluster,
"function_aspect_name": function_aspect_name,
"function_form": function_form,
}, context_instance=RequestContext(request))
@login_required
def delete_function(request, function_id):
if request.method == "POST":
fn = get_object_or_404(EconomicFunction, pk=function_id)
cluster = fn.cluster
fn.delete()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
return render_to_response("clusters/edit_cluster_functions.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
@login_required
def delete_function_confirmation(request, function_id):
fn = get_object_or_404(EconomicFunction, pk=function_id)
consequences = False
function_resources = fn.resources.all()
incoming_flows = fn.incoming_flows.all()
outgoing_flows = fn.outgoing_flows.all()
agent_functions = fn.agents.all()
if function_resources or incoming_flows or outgoing_flows or agent_functions:
consequences = True
return render_to_response("clusters/delete_function_confirmation.html",{
"function": fn,
"cluster": fn.cluster,
"consequences": consequences,
"function_resources": function_resources,
"incoming_flows": incoming_flows,
"outgoing_flows": outgoing_flows,
"agent_functions": agent_functions,
}, context_instance=RequestContext(request))
@login_required
def new_function(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
function_form = EconomicFunctionForm()
ResourceFormSet = formset_factory(FunctionResourceTypeFormX, extra=5)
resource_formset = ResourceFormSet()
rtypes = CommunityResourceType.objects.filter(community=cluster.community)
for form in resource_formset.forms:
#form.fields['resource_type'].choices = [('', '----------')] + [(rt.resource_type.id, rt.resource_type.name) for rt in rtypes]
form.fields['resource_type'].widget.set_local_choices([('', '----------')] + [(rt.resource_type.id, rt.resource_type.name) for rt in rtypes])
#import pdb; pdb.set_trace()
AgentFormSet = formset_factory(FunctionAgentForm, extra=5)
agent_formset = AgentFormSet()
agents = CommunityAgent.objects.filter(community=cluster.community)
for form in agent_formset.forms:
form.fields['agent'].choices = [('', '----------')] + [(agt.agent.id, agt.agent.name) for agt in agents]
#form.fields['agent'].widget.set_local_choices([('', '----------')] + [(agt.agent.id, agt.agent.name) for agt in agents])
return render_to_response("clusters/new_function.html",{
"cluster": cluster,
"function_form": function_form,
"resource_formset": resource_formset,
"agent_formset": agent_formset,
}, context_instance=RequestContext(request))
@login_required
def inline_new_function(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicFunctionForm(request.POST, prefix="function")
#import pdb; pdb.set_trace()
if form.is_valid():
fun = form.save(commit=False)
fun.cluster = cluster
fun.save()
return HttpResponseRedirect(next)
@login_required
def new_agent_function(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = InlineAgentFunctionForm(data=request.POST, cluster=cluster, prefix="function")
#import pdb; pdb.set_trace()
if form.is_valid():
data = form.cleaned_data
agent = data["agent"]
name = data["name"]
aspect = data["aspect"]
funs = EconomicFunction.objects.filter(
cluster=cluster,
name=name)
if funs:
fun = funs[0]
if aspect:
if aspect != fun.aspect:
fun.aspect = aspect
fun.save()
else:
fun = EconomicFunction(
name=name,
cluster=cluster,
aspect=aspect)
fun.save()
af = AgentFunction(
agent=agent,
function=fun)
af.save()
return HttpResponseRedirect(next)
@login_required
def inline_new_agent_function(request, cluster_id, agent_id):
if request.method == "POST":
cluster = get_object_or_404(Cluster, pk=cluster_id)
agent = get_object_or_404(EconomicAgent, pk=agent_id)
form = AgentFunctionCreationForm(data=request.POST, prefix="function")
#import pdb; pdb.set_trace()
#print "b4 form validity check"
if form.is_valid():
#print "after form validity check"
name = form.cleaned_data["name"]
aspect = form.cleaned_data["aspect"]
funs = EconomicFunction.objects.filter(
cluster=cluster,
name=name)
if funs:
fun = funs[0]
if aspect:
if aspect != fun.aspect:
fun.aspect = aspect
fun.save()
else:
fun = EconomicFunction(
name=name,
cluster=cluster,
aspect=aspect)
fun.save()
af = AgentFunction(
agent=agent,
function=fun)
af.save()
#else:
# print "invalid form:", form
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent_id))
@login_required
def new_resource(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicResourceTypeFormX(data=request.POST or None)
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
name = data['name']
try:
resource = EconomicResourceType.objects.get(name=name)
except EconomicResourceType.DoesNotExist:
pass
#resource = form.save()
#crt, created = CommunityResourceType.objects.get_or_create(community=cluster.community, resource_type=resource)
return render_to_response("clusters/new_resource.html",{
"form": form,
}, context_instance=RequestContext(request))
@login_required
def new_community(request):
form = CommunityForm(data=request.POST or None)
map_key = settings.GOOGLE_API_KEY
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
form.save()
return redirect("clusters")
return render_to_response("clusters/new_community.html",{
"form": form,
"map_key": map_key,
}, context_instance=RequestContext(request))
@login_required
def new_cluster(request, community_id):
community = get_object_or_404(Community, pk=community_id)
form = ClusterForm(data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
cluster = form.save(commit=False)
cluster.community = community
cluster.save()
return redirect("clusters")
return render_to_response("clusters/new_cluster.html",{
"form": form,
"community": community,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
form = ClusterForm(instance=cluster, data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
cluster = form.save(commit=False)
cluster.community = community
cluster.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/cluster', cluster_id))
return render_to_response("clusters/edit_cluster.html",{
"form": form,
"cluster": cluster,
}, context_instance=RequestContext(request))
@login_required
def edit_community(request, community_id):
community = get_object_or_404(Community, pk=community_id)
form = CommunityForm(instance=community, data=request.POST or None)
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
form.save()
return redirect("clusters")
return render_to_response("clusters/edit_community.html",{
"form": form,
"community": community,
"map_center": map_center,
"map_key": map_key,
}, context_instance=RequestContext(request))
@login_required
def new_cluster_agent(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
area_name = community.agent_geographic_area_name
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = community.map_zoom_level - 1
form = EconomicAgentForm(data=request.POST or None)
agent_names = '~'.join([agt.name for agt in EconomicAgent.objects.all()])
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
name = data['name']
try:
agent = EconomicAgent.objects.get(name=name)
except EconomicAgent.DoesNotExist:
agent = form.save()
ca, created = CommunityAgent.objects.get_or_create(community=cluster.community, agent=agent)
ca.group = data["group"]
if area_name:
ca.geographic_area = data["geographic_area"]
ca.region_latitude = data["region_latitude"]
ca.region_longitude = data["region_longitude"]
ca.save()
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent.id))
return render_to_response("clusters/new_cluster_agent.html",{
"cluster": cluster,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"form": form,
"agent_names": agent_names,
"area_name": area_name,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster_agent(request, cluster_id, agent_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
agent = get_object_or_404(EconomicAgent, pk=agent_id)
community = cluster.community
agent_communities = agent.communities.all()
edit_address = True
if agent_communities.count() > 1:
edit_address = False
if agent_communities[0].community.id != community.id:
edit_address = False
#import pdb; pdb.set_trace()
agent.cluster_funs = agent.functions.filter(
function__cluster=cluster)
for cf in agent.cluster_funs:
cf.rsrcs = cf.function.resources.all()
if cf.rsrcs:
for res in cf.rsrcs:
agent_function = agent.functions.get(function=res.function)
init = {"agent_function_id": agent_function.id,}
res.agent_resource_form = AgentFunctionResourceForm(res, initial=init)
res.agent_resource_list = res.function_resources_for_agent(agent)
else:
cf.agent_resources = cf.function_resources.all()
init = {"agent_function_id": cf.id,}
cf.agent_resource_form = AgentFunctionResourceForm(initial=init)
outliers = []
candidates = cf.function_resources.all()
for c in candidates:
if c.is_outlier():
outliers.append(c)
cf.outliers = outliers
new_function_form = AgentFunctionCreationForm(prefix="function")
resource_names = '~'.join([res.name for res in EconomicResourceType.objects.all()])
used = [(af.function.id) for af in agent.functions.all()]
function_names = '~'.join([fn.name for fn in cluster.functions.all().exclude(id__in=used)])
template_params = agent_network_params(cluster, "qty")
template_params["cluster"] = cluster
template_params["agent"] = agent
template_params["edit_address"] = edit_address
template_params["cluster_funs"] = agent.cluster_funs
template_params["new_function_form"] = new_function_form
template_params["resource_names"] = resource_names
template_params["function_names"] = function_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
return render_to_response("clusters/edit_cluster_agent.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_agent_address(request, cluster_id, agent_id):
agent = get_object_or_404(EconomicAgent, pk=agent_id)
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
ca = CommunityAgent.objects.get(community=community, agent=agent)
area_name = community.agent_geographic_area_name
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = 0
if community.map_zoom_level:
zoom_level = community.map_zoom_level - 1
init = {
"group": ca.group,
"geographic_area": ca.geographic_area,
"region_latitude": ca.region_latitude,
"region_longitude": ca.region_longitude,
}
agent_form = EconomicAgentForm(instance=agent, initial=init, data=request.POST or None)
if request.method == "POST":
if agent_form.is_valid():
data = agent_form.cleaned_data
ca.group = data["group"]
if area_name:
ca.geographic_area = data["geographic_area"]
ca.region_latitude = data["region_latitude"]
ca.region_longitude = data["region_longitude"]
ca.save()
agent_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/clusteragents', cluster_id))
return render_to_response("clusters/edit_agent_address.html",{
"cluster": cluster,
"agent": agent,
"agent_form": agent_form,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"area_name": area_name,
}, context_instance=RequestContext(request))
@login_required
def edit_community_agent(request, cluster_id, agent_id):
agent = get_object_or_404(EconomicAgent, pk=agent_id)
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
ca = CommunityAgent.objects.get(community=community, agent=agent)
area_name = community.agent_geographic_area_name
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = 0
if community.map_zoom_level:
zoom_level = community.map_zoom_level - 1
agent_form = EditCommunityAgentForm(instance=ca, data=request.POST or None)
if request.method == "POST":
if agent_form.is_valid():
agent_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/clusteragents', cluster_id))
return render_to_response("clusters/edit_community_agent.html",{
"cluster": cluster,
"community": community,
"community_agent": ca,
"agent": agent,
"agent_form": agent_form,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"area_name": area_name,
}, context_instance=RequestContext(request))
def json_agent_address(request, agent_name):
# Note: serializer requires an iterable, not a single object. Thus filter rather than get.
data = serializers.serialize("json", EconomicAgent.objects.filter(name=agent_name), fields=('address',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_resource_unit(request, name):
# Note: serializer requires an iterable, not a single object. Thus filter rather than get.
data = serializers.serialize("json", EconomicResourceType.objects.filter(name=name), fields=('unit_of_quantity',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_resource_aspect(request, name, community_id):
#import pdb; pdb.set_trace()
community = get_object_or_404(Community, id=community_id)
erts = EconomicResourceType.objects.filter(name=name)
ert = erts[0]
qs = CommunityResourceType.objects.filter(community=community, resource_type=ert)
data = serializers.serialize("json", qs, fields=('aspect',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_function_aspect(request, function_name, cluster_id):
#import pdb; pdb.set_trace()
cluster = get_object_or_404(Cluster, id=cluster_id)
qs = EconomicFunction.objects.filter(cluster=cluster, name=function_name)
data = serializers.serialize("json", qs, fields=('aspect',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def change_function_resource_amount(request):
id = request.POST.get("id")
quantity = request.POST.get("quantity")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
quantity = int(quantity)
if quantity != frt.quantity:
frt.quantity = quantity
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_function_resource_value(request):
id = request.POST.get("id")
value = request.POST.get("value")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
value = int(value)
if value != frt.value:
frt.value = value
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_function_resource_price(request):
id = request.POST.get("id")
price = request.POST.get("price")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
price = Decimal(price)
if price != frt.price:
frt.price = price
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_amount(request):
id = request.POST.get("id")
quantity = request.POST.get("quantity")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
quantity = int(quantity)
if quantity != frt.quantity:
frt.quantity = quantity
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_value(request):
id = request.POST.get("id")
value = request.POST.get("value")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
value = int(value)
if value != frt.value:
frt.value = value
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_price(request):
id = request.POST.get("id")
price = request.POST.get("price")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
price = Decimal(price)
if price != frt.price:
frt.price = price
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def delete_function_resource(request, id):
frt = get_object_or_404(FunctionResourceType, pk=id)
cluster = frt.function.cluster
frt.delete()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
def delete_agent_function_resource(request, id):
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
cluster = frt.agent_function.function.cluster
agent = frt.agent_function.agent
frt.delete()
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster.id, agent.id))
@login_required
def inline_new_resource(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicResourceTypeForm(request.POST, prefix="resource")
if form.is_valid():
data = form.cleaned_data
name = data['name']
aspect = data['aspect']
try:
resource = EconomicResourceType.objects.get(name=name)
except EconomicResourceType.DoesNotExist:
resource = form.save()
crt, created = CommunityResourceType.objects.get_or_create(
community=cluster.community, resource_type=resource)
if aspect:
if aspect != crt.aspect:
crt.aspect = aspect
crt.save()
return HttpResponseRedirect(next)
@login_required
def inline_agent_resource(request, cluster_id, agent_id, parent_id):
if request.method == "POST":
agent = get_object_or_404(EconomicAgent, pk=agent_id)
parent_id = int(parent_id)
if parent_id:
parent = get_object_or_404(EconomicResourceType, pk=parent_id)
else:
parent = None
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = AgentFunctionResourceForm(function_resource=None, data=request.POST)
#import pdb; pdb.set_trace()
if form.is_valid():
data = form.cleaned_data
name = data['name']
role = data['role']
quantity = data['quantity']
price = data['price']
agent_function_id = data['agent_function_id']
new_resource = True
#import pdb; pdb.set_trace()
try:
resource = EconomicResourceType.objects.get(name=name)
new_resource = False
if parent:
if resource.id == parent.id:
new_resource = False
elif resource.parent:
if resource.parent.id == parent.id or resource.is_child_of(parent):
new_resource = False
except EconomicResourceType.DoesNotExist:
pass
if new_resource:
resource = EconomicResourceType(name=name, parent=parent)
resource.save()
agent_function = AgentFunction.objects.get(id=agent_function_id)
AgentFunctionResourceType(
resource_type=resource,
agent_function=agent_function,
role=role,
quantity=quantity,
price=price).save()
crt, created = CommunityResourceType.objects.get_or_create(community=cluster.community, resource_type=resource)
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent.id))
@login_required
def new_function_resource(request, function_id):
if request.method == "POST":
fun = get_object_or_404(EconomicFunction, pk=function_id)
community = fun.cluster.community
form = FunctionResourceTypeForm(community=community, data=request.POST)
if form.is_valid():
data = form.cleaned_data
fr = form.save(commit=False)
fr.function = fun
fr.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', fun.cluster.id))
def fr_table(request, cluster_id, toggle="qty"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
frtable = function_resource_table(cluster, toggle)
if request.method == "POST":
if toggle_form.is_valid():
tog = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/frtable', cluster_id, tog))
return render_to_response("clusters/fr_table.html",{
"cluster": cluster,
"frtable": frtable,
"toggle_form": toggle_form,
}, context_instance=RequestContext(request))
def send_email(request):
if request.method == "POST":
email_form = EmailForm(request.POST)
if email_form.is_valid():
data = email_form.cleaned_data
from_email = data["email_address"]
subject = " ".join(["[locecon]", data["subject"]])
message = data["message"]
send_mail(subject, message, from_email, ["bob.haugen@gmail.com",])
return HttpResponseRedirect(reverse("email_sent"))
else:
email_form = EmailForm()
return render_to_response("clusters/send_email.html", {
"email_form": email_form,
})
def object_filter(request, cluster_id=None, model=None, queryset=None, template_name=None, extra_context=None,
context_processors=None, filter_class=None, page_length=None, page_variable="p"):
#import pdb; pdb.set_trace()
if cluster_id:
cluster = get_object_or_404(Cluster, pk=cluster_id)
queryset = filter_class.queryset(cluster)
if model is None and filter_class is None:
raise TypeError("object_filter must be called with either model or filter_class")
if model is None:
model = filter_class._meta.model
if filter_class is None:
meta = type('Meta', (object,), {'model': model})
filter_class = type('%sFilterSet' % model._meta.object_name, (FilterSet,),
{'Meta': meta})
#import pdb; pdb.set_trace()
filterset = filter_class(request.GET or None, queryset=queryset)
if not template_name:
template_name = '%s/%s_filter.html' % (model._meta.app_label, model._meta.object_name.lower())
c = RequestContext(request, {
'filter': filterset,
})
if extra_context:
for k, v in extra_context.iteritems():
if callable(v):
v = v()
c[k] = v
if page_length:
from django.core.paginator import Paginator
p = Paginator(filterset.qs,page_length)
getvars = request.GET.copy()
if page_variable in getvars:
del getvars[page_variable]
if len(getvars.keys()) > 0:
p.querystring = "&%s" % getvars.urlencode()
try:
c['paginated_filter'] = p.page(request.GET.get(page_variable,1))
except EmptyPage:
raise Http404
c['paginator'] = p
return render_to_response(template_name, c)
def diagrams(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
return render_to_response("clusters/diagrams.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
def reports(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
return render_to_response("clusters/reports.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
def value_added_report(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
resource_aspect_name = cluster.community.resource_aspect_name
form = ValueAddedSelectionForm(cluster=cluster, data=request.POST or None)
rows = []
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
fn_id = data["starting_function"]
start = EconomicFunction.objects.get(pk=fn_id)
resource_filter = data["resource_name_contains"] or None
rows = cluster.value_added_rows(start, resource_filter)
return render_to_response("clusters/value_added.html",{
"cluster": cluster,
"form": form,
"resource_aspect_name": resource_aspect_name,
"rows": rows,
}, context_instance=RequestContext(request))
fix sankeys for flow models
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.db.models import Q
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory
from django.core.mail import send_mail
from django.core import serializers
from django.conf import settings
from decimal import *
from datetime import datetime, timedelta
from clusters.models import *
from clusters.forms import *
from clusters.utils import *
def cluster_params(cluster):
template_params = {}
linked_efs = []
resources = []
efs = EconomicFunction.objects.filter(cluster=cluster)
for ef in efs:
inputs = ef.inputs()
if inputs:
linked_efs.append(ef)
for inp in inputs:
resources.append(inp.resource_type)
outputs = ef.outputs()
if outputs:
linked_efs.append(ef)
for output in outputs:
resources.append(output.resource_type)
efs = list(set(linked_efs))
resources = list(set(resources))
agents = {}
for ef in efs:
for agent in ef.agents.all():
#agents.setdefault(ef.node_id(), []).append(agent.agent.name)
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
frtable = function_resource_table(cluster, "qty")
template_params = {
"cluster": cluster,
"functions": efs,
"resources": resources,
"function_agents": agents,
"root": root,
"frtable": frtable,
}
return template_params
def explore_params(cluster):
template_params = {}
linked_efs = []
resources = []
efs = EconomicFunction.objects.filter(cluster=cluster)
for ef in efs:
inputs = ef.inputs()
if inputs:
linked_efs.append(ef)
for inp in inputs:
resources.append(inp.resource_type)
outputs = ef.outputs()
if outputs:
linked_efs.append(ef)
for output in outputs:
resources.append(output.resource_type)
efs = list(set(linked_efs))
resources = list(set(resources))
agents = {}
for ef in efs:
for agent in ef.agents.all():
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
template_params = {
"cluster": cluster,
"functions": efs,
"resources": resources,
"function_agents": agents,
"root": root,
}
return template_params
class FlowResource(object):
def __init__(self, resource_type):
self.resource_type = resource_type
# does not work; FlowResource objects cannot fake it for FunctionResourceTypes
def flow_radial_graph_params(cluster):
template_params = {}
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
functions = []
resources = []
edges = []
for flow in flows:
from_fn = flow.from_function
try:
len(from_fn.inputs)
except TypeError:
from_fn.inputs = []
from_fn.inputs.append(FlowResource(flow.resource_type))
to_fn = flow.to_function
try:
len(from_fn.outputs)
except TypeError:
to_fn.outputs = []
to_fn.outputs.append(FlowResource(flow.resource_type))
functions.extend([from_fn, to_fn])
resources.append(flow.resource_type)
functions = list(set(functions))
resources = list(set(resources))
agents = {}
for ef in functions:
for agent in ef.agents.all():
#agents.setdefault(ef.node_id(), []).append(agent.agent.name)
agents.setdefault(ef, []).append(agent.agent)
root = cluster.root()
template_params = {
"cluster": cluster,
"functions": functions,
"resources": resources,
"function_agents": agents,
"root": root,
}
return template_params
def clusters(request):
communities = Community.objects.all()
return render_to_response("clusters/clusters.html", {
"communities": communities,
}, context_instance=RequestContext(request))
def cluster(request, cluster_id, location="agt"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
location_form = None
if community.agent_geographic_area_name:
init = {"location": location,}
location_form = AgentAreaForm(community=community, initial=init, data=request.POST or None)
if request.method == "POST":
if location_form:
if location_form.is_valid():
location = location_form.cleaned_data["location"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/cluster', cluster_id, location))
if location == "agt":
agents = cluster.agents()
for agent in agents:
agent.all_functions = agent.functions.filter(
function__cluster=cluster)
else:
agents = cluster.regions()
color_functions = cluster.function_colors()
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
return render_to_response("clusters/cluster.html", {
"cluster": cluster,
"agents": agents,
"map_center": map_center,
"map_key": map_key,
"zoom_level": community.map_zoom_level,
"location_form": location_form,
"color_functions": color_functions,
}, context_instance=RequestContext(request))
def cluster_agents(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
#import pdb; pdb.set_trace()
agents = cluster.agents()
for agent in agents:
agent.cluster_functions = agent.functions.filter(function__cluster=cluster)
for cf in agent.cluster_functions:
cf.rsrcs = cf.function.resources.all()
if cf.rsrcs:
for res in cf.rsrcs:
res.agent_resource_list = res.function_resources_for_agent(agent)
else:
cf.agent_resources = cf.function_resources.all()
outliers = []
candidates = cf.function_resources.all()
for c in candidates:
if c.is_outlier():
outliers.append(c)
cf.outliers = outliers
#import pdb; pdb.set_trace()
return render_to_response("clusters/cluster_agents.html", {
"cluster": cluster,
"agents": agents,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster_functions(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
symbol = "$"
try:
symbol = community.unit_of_value.symbol
except:
pass
new_function_form = EconomicFunctionForm(prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
functions = cluster.functions.all()
for fun in functions:
fun.form = FunctionResourceTypeForm(community=cluster.community)
resources = cluster.resources()
for res in resources:
res.my_consumers = res.cluster_consumers(cluster)
res.my_producers = res.cluster_producers(cluster)
used = [cr.resource_type.id for cr in community.resources.all()]
resource_names = ';'.join([
res.name for res in EconomicResourceType.objects.all().exclude(id__in=used)])
template_params = network_params(cluster, "qty")
template_params["symbol"] = symbol
template_params["functions"] = functions
template_params["resources"] = resources
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
return render_to_response("clusters/edit_cluster_functions.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_flows(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
new_function_form = EconomicFunctionForm(prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
FlowFormSet = modelformset_factory(
FunctionResourceFlow,
form=FunctionResourceFlowForm,
can_delete=True,
extra=4,
)
formset = FlowFormSet(
queryset=FunctionResourceFlow.objects.filter(
from_function__cluster=cluster),
data=request.POST or None,
)
function_choices = [('', '----------')] + [
(fn.id, fn.name) for fn in cluster.functions.all()
]
resource_choices = [('', '----------')] + [
(cr.resource_type.id, cr.resource_type.name) for cr in cluster.community.resources.all()
]
for form in formset.forms:
form.fields['from_function'].choices = function_choices
form.fields['to_function'].choices = function_choices
form.fields['resource_type'].choices = resource_choices
used = [cr.resource_type.id for cr in cluster.community.resources.all()]
resource_names = ';'.join([
res.name for res in EconomicResourceType.objects.all().exclude(id__in=used)])
if request.method == "POST":
#import pdb; pdb.set_trace()
for form in formset.forms:
if form.is_valid():
delete = form.cleaned_data["DELETE"]
if delete:
#todo: this delete code is odd.
#First, I expected formsets to delete automatically id DELETE is True.
#Second, returning an object when requesting id is nice
#but smells like it might break in the future.
#import pdb; pdb.set_trace()
deleted = form.cleaned_data["id"]
deleted.delete()
else:
form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editflows', cluster.id))
template_params = flow_params(cluster, "qty")
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
template_params["formset"] = formset
return render_to_response("clusters/edit_flows.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_agent_flows(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
new_function_form = InlineAgentFunctionForm(cluster=cluster, prefix="function")
new_resource_form = EconomicResourceTypeForm(prefix="resource")
flows = list(AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster))
flows.extend(list(AgentResourceFlow.objects.filter(
to_function__function__cluster=cluster)))
FlowFormSet = modelformset_factory(
AgentResourceFlow,
form=AgentResourceFlowForm,
can_delete=True,
extra=3,
)
formset = FlowFormSet(
queryset=AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster),
data=request.POST or None,
)
agent_functions = AgentFunction.objects.filter(
function__cluster=cluster)
function_choices = [('', '----------')] + [
(fn.id, fn) for fn in agent_functions]
resources = cluster.community.resources.all()
resource_choices = [('', '----------')] + [
(cr.resource_type.id, cr.resource_type.name) for cr in resources
]
for form in formset.forms:
form.fields['from_function'].choices = function_choices
form.fields['to_function'].choices = function_choices
form.fields['resource_type'].choices = resource_choices
used = [cr.resource_type.id for cr in resources]
erts = EconomicResourceType.objects.all().exclude(id__in=used)
resource_names = '~'.join([res.name for res in erts])
function_names = '~'.join([fn.name for fn in cluster.functions.all()])
if request.method == "POST":
#import pdb; pdb.set_trace()
for form in formset.forms:
if form.is_valid():
delete = form.cleaned_data["DELETE"]
if delete:
#todo: this delete code is odd.
#First, I expected formsets to delete automatically id DELETE is True.
#Second, returning an object when requesting id is nice
#but smells like it might break in the future.
#import pdb; pdb.set_trace()
deleted = form.cleaned_data["id"]
deleted.delete()
else:
form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editagentflows', cluster.id))
template_params = agent_flow_params(cluster, "qty")
template_params["new_function_form"] = new_function_form
template_params["new_resource_form"] = new_resource_form
template_params["resource_names"] = resource_names
template_params["function_names"] = function_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
template_params["formset"] = formset
return render_to_response("clusters/edit_agent_flows.html",
template_params,
context_instance=RequestContext(request))
def featured_cluster(request):
cluster = get_featured_cluster()
template_params = {}
if cluster:
template_params = cluster_params(cluster)
return render_to_response("clusters/featured_cluster.html",
template_params,
context_instance=RequestContext(request))
def radial_graph(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
template_params = cluster_params(cluster)
return render_to_response("clusters/radial_graph.html",
template_params,
context_instance=RequestContext(request))
class Edge(object):
def __init__(self, from_node, to_node, quantity, label, width=1):
self.from_node = from_node
self.to_node = to_node
self.quantity = quantity
self.label = label
self.width = width
def agent_network_params(cluster, toggle):
template_params = {}
frts = AgentFunctionResourceType.objects.filter(
agent_function__function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if frts:
nodes = list(cluster.agents())
for agt in nodes:
for v in agt.function_inputs(cluster):
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, agt, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, agt, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, agt, v.quantity, qty_string))
for v in agt.function_outputs(cluster):
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(agt, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(agt, v.resource_type, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(agt, v.resource_type, v.quantity, qty_string))
else:
flows = AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster)
nodes = []
edges = []
for flow in flows:
nodes.extend([flow.from_function, flow.to_function, flow.resource_type])
if toggle == "val":
value = flow.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(flow.from_function, flow.resource_type, value, val_string))
edges.append(Edge(flow.resource_type, flow.to_function, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(flow.from_function, flow.resource_type, v.price, p_string))
edges.append(Edge(flow.resource_type, flow.to_function, v.price, p_string))
else:
total += flow.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(flow.from_function, flow.resource_type, flow.quantity, qty_string))
edges.append(Edge(flow.resource_type, flow.to_function, flow.quantity, qty_string))
nodes = list(set(nodes))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
#print "edge.quantity:", edge.quantity, "Width:", width
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def group_network_params(cluster, toggle):
template_params = {}
groups = cluster.groups()
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
nodes = []
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if groups:
nodes = groups
for agt in nodes:
for v in agt.function_inputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, agt, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, agt, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, agt, v.quantity, qty_string))
for v in agt.function_outputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(agt, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(agt, v.resource_type, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(agt, v.resource_type, v.quantity, qty_string))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def network_params(cluster, toggle):
template_params = {}
frts = FunctionResourceType.objects.filter(
function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
if frts:
nodes = list(cluster.functions.all())
for fn in nodes:
for v in fn.inputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(v.resource_type, fn, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, fn, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(v.resource_type, fn, v.quantity, qty_string))
for v in fn.outputs():
rtypes.append(v.resource_type)
if toggle == "val":
value = v.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(fn, v.resource_type, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(v.resource_type, fn, v.price, p_string))
else:
total += v.quantity
qty_string = split_thousands(v.quantity)
edges.append(Edge(fn, v.resource_type, v.quantity, qty_string))
else:
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
nodes = []
edges = []
for flow in flows:
nodes.extend([flow.from_function, flow.to_function, flow.resource_type])
if toggle == "val":
value = flow.get_value()
total += value
val_string = "".join([symbol, split_thousands(value)])
edges.append(Edge(flow.from_function, flow.resource_type, value, val_string))
edges.append(Edge(flow.resource_type, flow.to_function, value, val_string))
elif toggle == "price":
total += v.price
p_string = "".join([symbol, str(v.price.quantize(Decimal(".01")))])
edges.append(Edge(flow.from_function, flow.resource_type, v.price, p_string))
edges.append(Edge(flow.resource_type, flow.to_function, v.price, p_string))
else:
total += flow.quantity
qty_string = split_thousands(flow.quantity)
edges.append(Edge(flow.from_function, flow.resource_type, flow.quantity, qty_string))
edges.append(Edge(flow.resource_type, flow.to_function, flow.quantity, qty_string))
nodes = list(set(nodes))
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
nodes.extend(list(set(rtypes)))
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def network(request, cluster_id, toggle="qty", level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityPriceValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
level_form = None
#import pdb; pdb.set_trace()
if cluster.agents():
level_form = FunctionAgentLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
if toggle_form.is_valid():
toggle = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/%s/'
% ('clusters/network', cluster_id, toggle, level))
if level == "agt":
template_params = agent_network_params(cluster, toggle)
elif level == "grp":
template_params = group_network_params(cluster, toggle)
else:
template_params = network_params(cluster, toggle)
template_params["use_window_size"] = True
template_params["toggle_form"] = toggle_form
template_params["level_form"] = level_form
#if request.method == "POST":
# if level_form:
# if level_form.is_valid():
# level = level_form.cleaned_data["level"]
# else:
# if toggle_form.is_valid():
# toggle = toggle_form.cleaned_data["toggle"]
# return HttpResponseRedirect('/%s/%s/%s/%s/'
# % ('clusters/network', cluster_id, toggle, level))
return render_to_response("clusters/network.html",
template_params,
context_instance=RequestContext(request))
class FlowEdge(object):
def __init__(self, from_node, to_node, label, quantity, width=1):
self.from_node = from_node
self.to_node = to_node
self.label = label
self.quantity = quantity
self.width = width
def group_flow_params(cluster, toggle):
template_params = {}
flows = cluster.group_flows()
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
edge.label = ";".join([edge.label, flow.resource_type.name])
if toggle == "val":
edge.quantity += flow.get_value()
elif toggle == "price":
edge.quantity += flow.price
else:
edge.quantity += flow.quantity
else:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
edge = FlowEdge(flow.from_function, flow.to_function, flow.resource_type.name, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def agent_flow_params(cluster, toggle):
template_params = {}
flows = AgentResourceFlow.objects.filter(
from_function__function__cluster=cluster)
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
edge.label = ";".join([edge.label, flow.resource_type.name])
if toggle == "val":
edge.quantity += flow.get_value()
elif toggle == "price":
edge.quantity += flow.price
else:
edge.quantity += flow.quantity
else:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
edge = FlowEdge(flow.from_function, flow.to_function, flow.resource_type.name, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def flow_params(cluster, toggle):
template_params = {}
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
nodes = []
if toggle == "price":
total = Decimal("0.00")
else:
total = 0.0
for flow in flows:
nodes.extend([flow.from_function, flow.to_function])
if toggle == "val":
total += flow.get_value()
elif toggle == "price":
total += flow.price
else:
total += flow.quantity
nodes = list(set(nodes))
prev = None
edges = []
for flow in flows:
if prev:
prev_match = prev.to_function.id==flow.to_function.id and prev.from_function.id==flow.from_function.id
else:
prev_match=False
if prev_match:
if toggle == "val":
value = flow.get_value()
if value:
label_nbr = "".join([symbol, split_thousands(value)])
else:
label_nbr = ""
edge.quantity += value
elif toggle == "price":
edge.quantity += flow.price
if flow.price:
label_nbr = "".join([
symbol,
str(flow.price.quantize(Decimal(".01")))])
else:
label_nbr = ""
else:
edge.quantity += flow.quantity
if flow.quantity:
label_nbr = split_thousands(flow.quantity)
else:
label_nbr = ""
new_label = "".join([
flow.resource_type.name,
" ",
label_nbr])
edge.label = ";".join([edge.label, new_label])
else:
if toggle == "val":
nbr = flow.get_value()
if nbr:
label = "".join([
flow.resource_type.name,
" ",
symbol,
split_thousands(nbr)])
else:
label = flow.resource_type.name
elif toggle == "price":
nbr = flow.price
if nbr:
label = "".join([
flow.resource_type.name,
" ",
symbol,
str(nbr.quantize(Decimal(".01")))])
else:
label = flow.resource_type.name
else:
nbr = flow.quantity
if nbr:
label = "".join([
flow.resource_type.name,
" ",
split_thousands(nbr)])
else:
label = flow.resource_type.name
edge = FlowEdge(flow.from_function, flow.to_function, label, nbr)
edges.append(edge)
prev = flow
for edge in edges:
width = 1
if total > 0:
width = round((edge.quantity / total), 2) * 50
width = int(width)
edge.width = width
template_params = {
'cluster': cluster,
'nodes': nodes,
'edges': edges,
}
return template_params
def flows(request, cluster_id, toggle="qty", level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityPriceValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
level_form = None
if cluster.agents():
level_form = FunctionAgentLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
if toggle_form.is_valid():
toggle = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/%s/'
% ('clusters/flows', cluster_id, toggle, level))
if level == "agt":
template_params = agent_flow_params(cluster, toggle)
elif level == "grp":
template_params = group_flow_params(cluster, toggle)
else:
template_params = flow_params(cluster, toggle)
template_params["use_window_size"] = True
template_params["toggle_form"] = toggle_form
template_params["level_form"] = level_form
#if request.method == "POST":
# if toggle_form.is_valid():
#import pdb; pdb.set_trace()
# tog = toggle_form.cleaned_data["toggle"]
# return HttpResponseRedirect('/%s/%s/%s/'
# % ('clusters/flows', cluster_id, tog))
return render_to_response("clusters/flows.html",
template_params,
context_instance=RequestContext(request))
def iotable(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
iotable = input_output_table(cluster)
return render_to_response("clusters/iotable.html",{
"cluster": cluster,
"iotable": iotable,
}, context_instance=RequestContext(request))
def explore(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
template_params = explore_params(cluster)
return render_to_response("clusters/explore.html",
template_params,
context_instance=RequestContext(request))
class SankeyLink(object):
def __init__(self, source, target, value, label=None):
self.source = source
self.target = target
self.value = value
self.label = label
def sankey_params(cluster, toggle):
template_params = {}
frts = FunctionResourceType.objects.filter(
function__cluster=cluster)
symbol = "$"
if toggle == "val" or toggle == "price":
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
edges = []
rtypes = []
if frts:
link_nodes = cluster.fr_graph_nodes()
nodes = list(cluster.functions.all())
for fn in nodes:
for v in fn.inputs():
rtypes.append(v.resource_type)
if toggle == "val":
qty = v.get_value()
elif toggle == "price":
qty = v.price
else:
qty = v.quantity
from_node = link_nodes.index(v.resource_type)
to_node = link_nodes.index(fn)
edges.append(SankeyLink(from_node, to_node, qty))
for v in fn.outputs():
rtypes.append(v.resource_type)
if toggle == "val":
qty = v.get_value()
elif toggle == "price":
qty = v.price
else:
qty = v.quantity
to_node = link_nodes.index(v.resource_type)
from_node = link_nodes.index(fn)
edges.append(SankeyLink(from_node, to_node, qty))
else:
import pdb; pdb.set_trace()
link_nodes = cluster.flow_graph_nodes()
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
edges = []
for flow in flows:
if toggle == "val":
nbr = flow.get_value()
elif toggle == "price":
nbr = flow.price
else:
nbr = flow.quantity
from_index = link_nodes.index(flow.from_function)
to_index = link_nodes.index(flow.to_function)
resource_index = link_nodes.index(flow.resource_type)
prev_match_from = None
prev_match_to = None
if edges:
for prev in edges:
if not prev_match_from:
if from_index == prev.source and resource_index == prev.target:
prev_match_from = prev
prev.value += nbr
if not prev_match_to:
if resource_index == prev.source and to_index == prev.target:
prev_match_to = prev
prev.value += nbr
if prev_match_from and prev_match_to:
break
if not prev_match_from:
edges.append(SankeyLink(from_index, resource_index, nbr))
if not prev_match_to:
edges.append(SankeyLink(resource_index, to_index, nbr))
template_params = {
'cluster': cluster,
'net_nodes': link_nodes,
'net_links': edges,
}
return template_params
def sankey(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle = "qty"
template_params = sankey_params(cluster, toggle)
#import pdb; pdb.set_trace()
return render_to_response("clusters/sankey.html",
template_params,
context_instance=RequestContext(request))
def diagnostics(request, cluster_id, level="fn"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
symbol = "$"
try:
symbol = cluster.community.unit_of_value.symbol
except:
pass
level_form = None
if cluster.agents():
level_form = FunctionAgentTwoLevelForm(
initial={"level": level,},
data=request.POST or None)
if request.method == "POST":
if level_form:
if level_form.is_valid():
level = level_form.cleaned_data["level"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/diagnostics', cluster_id, level))
function_io_vs_flows = []
if level == "agt":
function_production_without_consumption = cluster.agent_function_production_without_consumption()
function_consumption_without_production = cluster.agent_function_consumption_without_production()
#if cluster.has_flows():
#function_io_vs_flows = cluster.agent_io_vs_flows()
else:
function_production_without_consumption = cluster.function_production_without_consumption()
function_consumption_without_production = cluster.function_consumption_without_production()
if cluster.has_flows():
function_io_vs_flows = cluster.function_io_vs_flows()
return render_to_response("clusters/diagnostics.html",{
"cluster": cluster,
"symbol": symbol,
"level_form": level_form,
"function_production_without_consumption": function_production_without_consumption,
"function_consumption_without_production": function_consumption_without_production,
"function_io_vs_flows": function_io_vs_flows,
}, context_instance=RequestContext(request))
def model_errors(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
disjoints = cluster.disjoints()
missing_function_numbers = cluster.missing_function_numbers()
missing_agent_numbers = cluster.missing_agent_numbers()
function_agent_diffs = cluster.function_agent_diffs()
return render_to_response("clusters/model_errors.html",{
"cluster": cluster,
"disjoints": disjoints,
"missing_function_numbers": missing_function_numbers,
"missing_agent_numbers": missing_agent_numbers,
"function_agent_diffs": function_agent_diffs,
}, context_instance=RequestContext(request))
def economic_functions(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
functions = cluster.functions.all()
resources = cluster.resources()
for res in resources:
res.my_consumers = res.cluster_consumers(cluster)
res.my_producers = res.cluster_producers(cluster)
flows = FunctionResourceFlow.objects.filter(
from_function__cluster=cluster)
return render_to_response("clusters/economic_functions.html", {
"cluster": cluster,
"functions": functions,
"resources": resources,
"flows": flows,
}, context_instance=RequestContext(request))
def economic_function(request, function_id):
ef = get_object_or_404(EconomicFunction, pk=function_id)
return render_to_response("clusters/economic_functions.html",{
"economic_function": ef,
}, context_instance=RequestContext(request))
@login_required
def edit_function(request, function_id):
fn = get_object_or_404(EconomicFunction, pk=function_id)
cluster = fn.cluster
function_form = EconomicFunctionForm(data=request.POST or None, instance=fn)
function_aspect_name = cluster.function_aspect_name
if request.method == "POST":
if function_form.is_valid():
function_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
return render_to_response("clusters/edit_function.html",{
"function": fn,
"cluster": cluster,
"function_aspect_name": function_aspect_name,
"function_form": function_form,
}, context_instance=RequestContext(request))
@login_required
def delete_function(request, function_id):
if request.method == "POST":
fn = get_object_or_404(EconomicFunction, pk=function_id)
cluster = fn.cluster
fn.delete()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
return render_to_response("clusters/edit_cluster_functions.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
@login_required
def delete_function_confirmation(request, function_id):
fn = get_object_or_404(EconomicFunction, pk=function_id)
consequences = False
function_resources = fn.resources.all()
incoming_flows = fn.incoming_flows.all()
outgoing_flows = fn.outgoing_flows.all()
agent_functions = fn.agents.all()
if function_resources or incoming_flows or outgoing_flows or agent_functions:
consequences = True
return render_to_response("clusters/delete_function_confirmation.html",{
"function": fn,
"cluster": fn.cluster,
"consequences": consequences,
"function_resources": function_resources,
"incoming_flows": incoming_flows,
"outgoing_flows": outgoing_flows,
"agent_functions": agent_functions,
}, context_instance=RequestContext(request))
@login_required
def new_function(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
function_form = EconomicFunctionForm()
ResourceFormSet = formset_factory(FunctionResourceTypeFormX, extra=5)
resource_formset = ResourceFormSet()
rtypes = CommunityResourceType.objects.filter(community=cluster.community)
for form in resource_formset.forms:
#form.fields['resource_type'].choices = [('', '----------')] + [(rt.resource_type.id, rt.resource_type.name) for rt in rtypes]
form.fields['resource_type'].widget.set_local_choices([('', '----------')] + [(rt.resource_type.id, rt.resource_type.name) for rt in rtypes])
#import pdb; pdb.set_trace()
AgentFormSet = formset_factory(FunctionAgentForm, extra=5)
agent_formset = AgentFormSet()
agents = CommunityAgent.objects.filter(community=cluster.community)
for form in agent_formset.forms:
form.fields['agent'].choices = [('', '----------')] + [(agt.agent.id, agt.agent.name) for agt in agents]
#form.fields['agent'].widget.set_local_choices([('', '----------')] + [(agt.agent.id, agt.agent.name) for agt in agents])
return render_to_response("clusters/new_function.html",{
"cluster": cluster,
"function_form": function_form,
"resource_formset": resource_formset,
"agent_formset": agent_formset,
}, context_instance=RequestContext(request))
@login_required
def inline_new_function(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicFunctionForm(request.POST, prefix="function")
#import pdb; pdb.set_trace()
if form.is_valid():
fun = form.save(commit=False)
fun.cluster = cluster
fun.save()
return HttpResponseRedirect(next)
@login_required
def new_agent_function(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = InlineAgentFunctionForm(data=request.POST, cluster=cluster, prefix="function")
#import pdb; pdb.set_trace()
if form.is_valid():
data = form.cleaned_data
agent = data["agent"]
name = data["name"]
aspect = data["aspect"]
funs = EconomicFunction.objects.filter(
cluster=cluster,
name=name)
if funs:
fun = funs[0]
if aspect:
if aspect != fun.aspect:
fun.aspect = aspect
fun.save()
else:
fun = EconomicFunction(
name=name,
cluster=cluster,
aspect=aspect)
fun.save()
af = AgentFunction(
agent=agent,
function=fun)
af.save()
return HttpResponseRedirect(next)
@login_required
def inline_new_agent_function(request, cluster_id, agent_id):
if request.method == "POST":
cluster = get_object_or_404(Cluster, pk=cluster_id)
agent = get_object_or_404(EconomicAgent, pk=agent_id)
form = AgentFunctionCreationForm(data=request.POST, prefix="function")
#import pdb; pdb.set_trace()
#print "b4 form validity check"
if form.is_valid():
#print "after form validity check"
name = form.cleaned_data["name"]
aspect = form.cleaned_data["aspect"]
funs = EconomicFunction.objects.filter(
cluster=cluster,
name=name)
if funs:
fun = funs[0]
if aspect:
if aspect != fun.aspect:
fun.aspect = aspect
fun.save()
else:
fun = EconomicFunction(
name=name,
cluster=cluster,
aspect=aspect)
fun.save()
af = AgentFunction(
agent=agent,
function=fun)
af.save()
#else:
# print "invalid form:", form
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent_id))
@login_required
def new_resource(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicResourceTypeFormX(data=request.POST or None)
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
name = data['name']
try:
resource = EconomicResourceType.objects.get(name=name)
except EconomicResourceType.DoesNotExist:
pass
#resource = form.save()
#crt, created = CommunityResourceType.objects.get_or_create(community=cluster.community, resource_type=resource)
return render_to_response("clusters/new_resource.html",{
"form": form,
}, context_instance=RequestContext(request))
@login_required
def new_community(request):
form = CommunityForm(data=request.POST or None)
map_key = settings.GOOGLE_API_KEY
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
form.save()
return redirect("clusters")
return render_to_response("clusters/new_community.html",{
"form": form,
"map_key": map_key,
}, context_instance=RequestContext(request))
@login_required
def new_cluster(request, community_id):
community = get_object_or_404(Community, pk=community_id)
form = ClusterForm(data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
cluster = form.save(commit=False)
cluster.community = community
cluster.save()
return redirect("clusters")
return render_to_response("clusters/new_cluster.html",{
"form": form,
"community": community,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
form = ClusterForm(instance=cluster, data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
cluster = form.save(commit=False)
cluster.community = community
cluster.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/cluster', cluster_id))
return render_to_response("clusters/edit_cluster.html",{
"form": form,
"cluster": cluster,
}, context_instance=RequestContext(request))
@login_required
def edit_community(request, community_id):
community = get_object_or_404(Community, pk=community_id)
form = CommunityForm(instance=community, data=request.POST or None)
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
if request.method == "POST":
#import pdb; pdb.set_trace()
if form.is_valid():
form.save()
return redirect("clusters")
return render_to_response("clusters/edit_community.html",{
"form": form,
"community": community,
"map_center": map_center,
"map_key": map_key,
}, context_instance=RequestContext(request))
@login_required
def new_cluster_agent(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
area_name = community.agent_geographic_area_name
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = community.map_zoom_level - 1
form = EconomicAgentForm(data=request.POST or None)
agent_names = '~'.join([agt.name for agt in EconomicAgent.objects.all()])
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
name = data['name']
try:
agent = EconomicAgent.objects.get(name=name)
except EconomicAgent.DoesNotExist:
agent = form.save()
ca, created = CommunityAgent.objects.get_or_create(community=cluster.community, agent=agent)
ca.group = data["group"]
if area_name:
ca.geographic_area = data["geographic_area"]
ca.region_latitude = data["region_latitude"]
ca.region_longitude = data["region_longitude"]
ca.save()
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent.id))
return render_to_response("clusters/new_cluster_agent.html",{
"cluster": cluster,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"form": form,
"agent_names": agent_names,
"area_name": area_name,
}, context_instance=RequestContext(request))
@login_required
def edit_cluster_agent(request, cluster_id, agent_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
agent = get_object_or_404(EconomicAgent, pk=agent_id)
community = cluster.community
agent_communities = agent.communities.all()
edit_address = True
if agent_communities.count() > 1:
edit_address = False
if agent_communities[0].community.id != community.id:
edit_address = False
#import pdb; pdb.set_trace()
agent.cluster_funs = agent.functions.filter(
function__cluster=cluster)
for cf in agent.cluster_funs:
cf.rsrcs = cf.function.resources.all()
if cf.rsrcs:
for res in cf.rsrcs:
agent_function = agent.functions.get(function=res.function)
init = {"agent_function_id": agent_function.id,}
res.agent_resource_form = AgentFunctionResourceForm(res, initial=init)
res.agent_resource_list = res.function_resources_for_agent(agent)
else:
cf.agent_resources = cf.function_resources.all()
init = {"agent_function_id": cf.id,}
cf.agent_resource_form = AgentFunctionResourceForm(initial=init)
outliers = []
candidates = cf.function_resources.all()
for c in candidates:
if c.is_outlier():
outliers.append(c)
cf.outliers = outliers
new_function_form = AgentFunctionCreationForm(prefix="function")
resource_names = '~'.join([res.name for res in EconomicResourceType.objects.all()])
used = [(af.function.id) for af in agent.functions.all()]
function_names = '~'.join([fn.name for fn in cluster.functions.all().exclude(id__in=used)])
template_params = agent_network_params(cluster, "qty")
template_params["cluster"] = cluster
template_params["agent"] = agent
template_params["edit_address"] = edit_address
template_params["cluster_funs"] = agent.cluster_funs
template_params["new_function_form"] = new_function_form
template_params["resource_names"] = resource_names
template_params["function_names"] = function_names
function_aspect_name = cluster.function_aspect_name
resource_aspect_name = cluster.community.resource_aspect_name
template_params["function_aspect_name"] = function_aspect_name
template_params["resource_aspect_name"] = resource_aspect_name
return render_to_response("clusters/edit_cluster_agent.html",
template_params,
context_instance=RequestContext(request))
@login_required
def edit_agent_address(request, cluster_id, agent_id):
agent = get_object_or_404(EconomicAgent, pk=agent_id)
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
ca = CommunityAgent.objects.get(community=community, agent=agent)
area_name = community.agent_geographic_area_name
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = 0
if community.map_zoom_level:
zoom_level = community.map_zoom_level - 1
init = {
"group": ca.group,
"geographic_area": ca.geographic_area,
"region_latitude": ca.region_latitude,
"region_longitude": ca.region_longitude,
}
agent_form = EconomicAgentForm(instance=agent, initial=init, data=request.POST or None)
if request.method == "POST":
if agent_form.is_valid():
data = agent_form.cleaned_data
ca.group = data["group"]
if area_name:
ca.geographic_area = data["geographic_area"]
ca.region_latitude = data["region_latitude"]
ca.region_longitude = data["region_longitude"]
ca.save()
agent_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/clusteragents', cluster_id))
return render_to_response("clusters/edit_agent_address.html",{
"cluster": cluster,
"agent": agent,
"agent_form": agent_form,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"area_name": area_name,
}, context_instance=RequestContext(request))
@login_required
def edit_community_agent(request, cluster_id, agent_id):
agent = get_object_or_404(EconomicAgent, pk=agent_id)
cluster = get_object_or_404(Cluster, pk=cluster_id)
community = cluster.community
ca = CommunityAgent.objects.get(community=community, agent=agent)
area_name = community.agent_geographic_area_name
map_center = "0, 0"
if community.latitude and community.longitude:
map_center = ",".join([str(community.latitude), str(community.longitude)])
map_key = settings.GOOGLE_API_KEY
zoom_level = 0
if community.map_zoom_level:
zoom_level = community.map_zoom_level - 1
agent_form = EditCommunityAgentForm(instance=ca, data=request.POST or None)
if request.method == "POST":
if agent_form.is_valid():
agent_form.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/clusteragents', cluster_id))
return render_to_response("clusters/edit_community_agent.html",{
"cluster": cluster,
"community": community,
"community_agent": ca,
"agent": agent,
"agent_form": agent_form,
"map_center": map_center,
"map_key": map_key,
"zoom_level": zoom_level,
"area_name": area_name,
}, context_instance=RequestContext(request))
def json_agent_address(request, agent_name):
# Note: serializer requires an iterable, not a single object. Thus filter rather than get.
data = serializers.serialize("json", EconomicAgent.objects.filter(name=agent_name), fields=('address',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_resource_unit(request, name):
# Note: serializer requires an iterable, not a single object. Thus filter rather than get.
data = serializers.serialize("json", EconomicResourceType.objects.filter(name=name), fields=('unit_of_quantity',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_resource_aspect(request, name, community_id):
#import pdb; pdb.set_trace()
community = get_object_or_404(Community, id=community_id)
erts = EconomicResourceType.objects.filter(name=name)
ert = erts[0]
qs = CommunityResourceType.objects.filter(community=community, resource_type=ert)
data = serializers.serialize("json", qs, fields=('aspect',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def json_function_aspect(request, function_name, cluster_id):
#import pdb; pdb.set_trace()
cluster = get_object_or_404(Cluster, id=cluster_id)
qs = EconomicFunction.objects.filter(cluster=cluster, name=function_name)
data = serializers.serialize("json", qs, fields=('aspect',))
return HttpResponse(data, mimetype="text/json-comment-filtered")
def change_function_resource_amount(request):
id = request.POST.get("id")
quantity = request.POST.get("quantity")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
quantity = int(quantity)
if quantity != frt.quantity:
frt.quantity = quantity
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_function_resource_value(request):
id = request.POST.get("id")
value = request.POST.get("value")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
value = int(value)
if value != frt.value:
frt.value = value
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_function_resource_price(request):
id = request.POST.get("id")
price = request.POST.get("price")
frt = get_object_or_404(FunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
price = Decimal(price)
if price != frt.price:
frt.price = price
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_amount(request):
id = request.POST.get("id")
quantity = request.POST.get("quantity")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
quantity = int(quantity)
if quantity != frt.quantity:
frt.quantity = quantity
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_value(request):
id = request.POST.get("id")
value = request.POST.get("value")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
value = int(value)
if value != frt.value:
frt.value = value
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def change_agent_function_resource_price(request):
id = request.POST.get("id")
price = request.POST.get("price")
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
#import pdb; pdb.set_trace()
price = Decimal(price)
if price != frt.price:
frt.price = price
frt.save()
data = "ok"
return HttpResponse(data, mimetype="text/plain")
def delete_function_resource(request, id):
frt = get_object_or_404(FunctionResourceType, pk=id)
cluster = frt.function.cluster
frt.delete()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', cluster.id))
def delete_agent_function_resource(request, id):
frt = get_object_or_404(AgentFunctionResourceType, pk=id)
cluster = frt.agent_function.function.cluster
agent = frt.agent_function.agent
frt.delete()
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster.id, agent.id))
@login_required
def inline_new_resource(request, cluster_id):
if request.method == "POST":
next = request.POST.get("next")
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = EconomicResourceTypeForm(request.POST, prefix="resource")
if form.is_valid():
data = form.cleaned_data
name = data['name']
aspect = data['aspect']
try:
resource = EconomicResourceType.objects.get(name=name)
except EconomicResourceType.DoesNotExist:
resource = form.save()
crt, created = CommunityResourceType.objects.get_or_create(
community=cluster.community, resource_type=resource)
if aspect:
if aspect != crt.aspect:
crt.aspect = aspect
crt.save()
return HttpResponseRedirect(next)
@login_required
def inline_agent_resource(request, cluster_id, agent_id, parent_id):
if request.method == "POST":
agent = get_object_or_404(EconomicAgent, pk=agent_id)
parent_id = int(parent_id)
if parent_id:
parent = get_object_or_404(EconomicResourceType, pk=parent_id)
else:
parent = None
cluster = get_object_or_404(Cluster, pk=cluster_id)
form = AgentFunctionResourceForm(function_resource=None, data=request.POST)
#import pdb; pdb.set_trace()
if form.is_valid():
data = form.cleaned_data
name = data['name']
role = data['role']
quantity = data['quantity']
price = data['price']
agent_function_id = data['agent_function_id']
new_resource = True
#import pdb; pdb.set_trace()
try:
resource = EconomicResourceType.objects.get(name=name)
new_resource = False
if parent:
if resource.id == parent.id:
new_resource = False
elif resource.parent:
if resource.parent.id == parent.id or resource.is_child_of(parent):
new_resource = False
except EconomicResourceType.DoesNotExist:
pass
if new_resource:
resource = EconomicResourceType(name=name, parent=parent)
resource.save()
agent_function = AgentFunction.objects.get(id=agent_function_id)
AgentFunctionResourceType(
resource_type=resource,
agent_function=agent_function,
role=role,
quantity=quantity,
price=price).save()
crt, created = CommunityResourceType.objects.get_or_create(community=cluster.community, resource_type=resource)
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/editclusteragent', cluster_id, agent.id))
@login_required
def new_function_resource(request, function_id):
if request.method == "POST":
fun = get_object_or_404(EconomicFunction, pk=function_id)
community = fun.cluster.community
form = FunctionResourceTypeForm(community=community, data=request.POST)
if form.is_valid():
data = form.cleaned_data
fr = form.save(commit=False)
fr.function = fun
fr.save()
return HttpResponseRedirect('/%s/%s/'
% ('clusters/editclusterfunctions', fun.cluster.id))
def fr_table(request, cluster_id, toggle="qty"):
cluster = get_object_or_404(Cluster, pk=cluster_id)
toggle_form = QuantityValueForm(
initial={"toggle": toggle,},
data=request.POST or None)
frtable = function_resource_table(cluster, toggle)
if request.method == "POST":
if toggle_form.is_valid():
tog = toggle_form.cleaned_data["toggle"]
return HttpResponseRedirect('/%s/%s/%s/'
% ('clusters/frtable', cluster_id, tog))
return render_to_response("clusters/fr_table.html",{
"cluster": cluster,
"frtable": frtable,
"toggle_form": toggle_form,
}, context_instance=RequestContext(request))
def send_email(request):
if request.method == "POST":
email_form = EmailForm(request.POST)
if email_form.is_valid():
data = email_form.cleaned_data
from_email = data["email_address"]
subject = " ".join(["[locecon]", data["subject"]])
message = data["message"]
send_mail(subject, message, from_email, ["bob.haugen@gmail.com",])
return HttpResponseRedirect(reverse("email_sent"))
else:
email_form = EmailForm()
return render_to_response("clusters/send_email.html", {
"email_form": email_form,
})
def object_filter(request, cluster_id=None, model=None, queryset=None, template_name=None, extra_context=None,
context_processors=None, filter_class=None, page_length=None, page_variable="p"):
#import pdb; pdb.set_trace()
if cluster_id:
cluster = get_object_or_404(Cluster, pk=cluster_id)
queryset = filter_class.queryset(cluster)
if model is None and filter_class is None:
raise TypeError("object_filter must be called with either model or filter_class")
if model is None:
model = filter_class._meta.model
if filter_class is None:
meta = type('Meta', (object,), {'model': model})
filter_class = type('%sFilterSet' % model._meta.object_name, (FilterSet,),
{'Meta': meta})
#import pdb; pdb.set_trace()
filterset = filter_class(request.GET or None, queryset=queryset)
if not template_name:
template_name = '%s/%s_filter.html' % (model._meta.app_label, model._meta.object_name.lower())
c = RequestContext(request, {
'filter': filterset,
})
if extra_context:
for k, v in extra_context.iteritems():
if callable(v):
v = v()
c[k] = v
if page_length:
from django.core.paginator import Paginator
p = Paginator(filterset.qs,page_length)
getvars = request.GET.copy()
if page_variable in getvars:
del getvars[page_variable]
if len(getvars.keys()) > 0:
p.querystring = "&%s" % getvars.urlencode()
try:
c['paginated_filter'] = p.page(request.GET.get(page_variable,1))
except EmptyPage:
raise Http404
c['paginator'] = p
return render_to_response(template_name, c)
def diagrams(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
return render_to_response("clusters/diagrams.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
def reports(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
return render_to_response("clusters/reports.html",{
"cluster": cluster,
}, context_instance=RequestContext(request))
def value_added_report(request, cluster_id):
cluster = get_object_or_404(Cluster, pk=cluster_id)
resource_aspect_name = cluster.community.resource_aspect_name
form = ValueAddedSelectionForm(cluster=cluster, data=request.POST or None)
rows = []
if request.method == "POST":
if form.is_valid():
data = form.cleaned_data
fn_id = data["starting_function"]
start = EconomicFunction.objects.get(pk=fn_id)
resource_filter = data["resource_name_contains"] or None
rows = cluster.value_added_rows(start, resource_filter)
return render_to_response("clusters/value_added.html",{
"cluster": cluster,
"form": form,
"resource_aspect_name": resource_aspect_name,
"rows": rows,
}, context_instance=RequestContext(request))
|
#!/usr/bin/python
# Copyright 2016 The ci_edit Authors. All rights reserved.
# Use of this source code is governed by an Apache-style license that can be
# found in the LICENSE file.
import app.curses_util
import app.log
import app.text_buffer
import app.window
import sys
import curses
import time
import traceback
class CiProgram:
"""This is the main editor program. It holds top level information and runs
the main loop. The CiProgram is intended as a singleton."""
def __init__(self, stdscr):
self.bufferManager = app.text_buffer.BufferManager(self)
self.exiting = False
self.modeStack = []
self.priorClick = 0
self.savedMouseX = -1
self.savedMouseY = -1
self.stdscr = stdscr
self.ch = 0
curses.mousemask(-1)
curses.mouseinterval(0)
# Enable mouse tracking in xterm.
print '\033[?1002;h'
#print '\033[?1005;h'
curses.meta(1)
# Access ^c before shell does.
curses.raw()
#curses.start_color()
curses.use_default_colors()
if 0:
assert(curses.COLORS == 256)
assert(curses.can_change_color() == 1)
assert(curses.has_colors() == 1)
app.log.detail("color_content:")
for i in range(0, curses.COLORS):
app.log.detail("color", i, ": ", curses.color_content(i))
for i in range(16, curses.COLORS):
curses.init_color(i, 500, 500, i*787%1000)
app.log.detail("color_content, after:")
for i in range(0, curses.COLORS):
app.log.detail("color", i, ": ", curses.color_content(i))
self.showPalette = 0
self.shiftPalette()
self.zOrder = []
def startup(self):
"""A second init-like function. Called after command line arguments are
parsed."""
if self.showLogWindow:
self.debugWindow = app.window.StaticWindow(self)
self.zOrder += [self.debugWindow]
self.logWindow = app.window.LogWindow(self)
#self.zOrder += [self.logWindow]
else:
self.debugWindow = None
self.logWindow = None
self.paletteWindow = None
self.paletteWindow = app.window.PaletteWindow(self)
self.inputWindow = app.window.InputWindow(self, 10, 10, 0, 0, True, True,
True)
self.layout()
def layout(self):
"""Arrange the debug, log, and input windows."""
rows, cols = self.stdscr.getmaxyx()
#app.log.detail('layout', rows, cols)
if self.showLogWindow:
inputWidth = min(78, cols)
debugWidth = max(cols-inputWidth-1, 0)
debugRows = 15
self.debugWindow.reshape(debugRows, debugWidth, 0,
inputWidth+1)
self.logWindow.reshape(rows-debugRows, debugWidth, debugRows,
inputWidth+1)
else:
inputWidth = cols
self.inputWindow.reshape(rows, inputWidth, 0, 0)
def debugDraw(self, win):
"""Draw real-time debug information to the screen."""
if not self.debugWindow:
return
textBuffer = win.textBuffer
y, x = win.cursorWindow.getyx()
maxy, maxx = win.cursorWindow.getmaxyx()
self.debugWindow.writeLineRow = 0
self.debugWindow.writeLine(
" cRow %3d cCol %2d goalCol %2d"
%(textBuffer.cursorRow, textBuffer.cursorCol,
textBuffer.goalCol), self.debugWindow.color)
self.debugWindow.writeLine(
" mkrRow %3d mkrCol %2d sm %d"
%(textBuffer.markerRow, textBuffer.markerCol,
textBuffer.selectionMode),
self.debugWindow.color)
self.debugWindow.writeLine(
"scrlRow %3d scrlCol %2d lines %3d"
%(textBuffer.scrollRow, textBuffer.scrollCol,
len(textBuffer.lines)),
self.debugWindow.color)
self.debugWindow.writeLine(
"y %2d x %2d maxy %d maxx %d baud %d color %d"
%(y, x, maxy, maxx, curses.baudrate(), curses.can_change_color()),
self.debugWindow.color)
self.debugWindow.writeLine(
"ch %3s %s"
%(self.ch, app.curses_util.cursesKeyName(self.ch)),
self.debugWindow.color)
self.debugWindow.writeLine("win %r"%(win,),
self.debugWindow.color)
self.debugWindow.writeLine("tb %r"%(textBuffer,),
self.debugWindow.color)
try:
(id, mousex, mousey, mousez, bstate) = curses.getmouse()
self.debugWindow.writeLine(
"mouse id %d, mousex %d, mousey %d, mousez %d"
%(id, mousex, mousey, mousez),
self.debugWindow.color)
self.debugWindow.writeLine(
"bstate %s %d"
%(app.curses_util.mouseButtonName(bstate), bstate),
self.debugWindow.color)
except curses.error:
self.debugWindow.writeLine("mouse is not available.",
self.debugWindow.color)
# Display some of the redo chain.
self.debugWindow.writeLine(
"redoIndex %3d savedAt %3d depth %3d"
%(textBuffer.redoIndex, textBuffer.savedAtRedoIndex,
len(textBuffer.redoChain)),
self.debugWindow.color+100)
lenChain = textBuffer.redoIndex
for i in range(textBuffer.redoIndex-3, textBuffer.redoIndex):
text = i >= 0 and textBuffer.redoChain[i] or ''
self.debugWindow.writeLine(text, 101)
for i in range(textBuffer.redoIndex, textBuffer.redoIndex+2):
text = i < len(textBuffer.redoChain) and textBuffer.redoChain[i] or ''
self.debugWindow.writeLine(text, 1)
# Refresh the display.
self.debugWindow.cursorWindow.refresh()
def debugWindowOrder(self):
self.log('debugWindowOrder')
def recurse(list, indent):
for i in list:
self.log(indent, i)
recurse(i.zOrder, indent+' ')
recurse(self.zOrder, ' ')
self.log('top window', self.topWindow())
def topWindow(self):
top = self
while len(top.zOrder):
top = top.zOrder[-1]
return top
def clickedNearby(self, row, col):
y, x = self.priorClickRowCol
return y-1 <= row <= y+1 and x-1 <= col <= x+1
def handleMouse(self):
"""Mouse handling is a special case. The getch() curses function will
signal the existence of a mouse event, but the event must be fetched and
parsed separately."""
(id, mousex, mousey, mousez, bstate) = curses.getmouse()
rapidClickTimeout = .5
for i in reversed(self.zOrder):
if i.contains(mousey, mousex):
mousey -= i.top
mousex -= i.left
#self.log('bstate', app.curses_util.mouseButtonName(bstate))
if bstate & curses.BUTTON1_RELEASED:
if self.priorClick + rapidClickTimeout <= time.time():
i.mouseRelease(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON1_PRESSED:
if (self.priorClick + rapidClickTimeout > time.time() and
self.clickedNearby(mousey, mousex)):
self.clicks += 1
self.priorClick = time.time()
if self.clicks == 2:
i.mouseDoubleClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseTripleClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
self.clicks = 1
else:
self.clicks = 1
self.priorClick = time.time()
self.priorClickRowCol = (mousey, mousex)
i.mouseClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON2_PRESSED:
i.mouseWheelUp(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON4_PRESSED:
if self.savedMouseX == mousex and self.savedMouseY == mousey:
i.mouseWheelDown(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseMoved(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.REPORT_MOUSE_POSITION:
#self.log('REPORT_MOUSE_POSITION')
if self.savedMouseX == mousex and self.savedMouseY == mousey:
# This is a hack for dtterm on Mac OS X.
i.mouseWheelUp(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseMoved(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
self.log('got bstate', app.curses_util.mouseButtonName(bstate), bstate)
self.savedMouseX = mousex
self.savedMouseY = mousey
return
self.log('click landed on screen')
def handleScreenResize(self):
self.log('handleScreenResize')
self.layout()
def log(self, *args):
"""Log text to the logging window (for debugging)."""
if not self.logWindow:
return
app.log.info(*args)
def parseArgs(self):
"""Interpret the command line arguments."""
self.debugRedo = False
self.showLogWindow = False
self.cliFiles = []
takeAll = False
for i in sys.argv[1:]:
if not takeAll and i[:2] == '--':
self.debugRedo = self.debugRedo or i == '--debugRedo'
self.showLogWindow = self.showLogWindow or i == '--log'
app.log.shouldWritePrintLog = app.log.shouldWritePrintLog or i == '--logDetail'
app.log.shouldWritePrintLog = app.log.shouldWritePrintLog or i == '--p'
if i == '--':
# All remaining args are file paths.
takeAll = True
continue
self.cliFiles.append({'path': i})
def quit(self):
"""Set the intent to exit the program. The actual exit will occur a bit
later."""
app.log.info('self.exiting = True')
self.exiting = True
def refresh(self):
"""Repaint stacked windows, furthest to nearest."""
for i,k in enumerate(self.zOrder):
#self.log("[[%d]] %r"%(i, k))
k.refresh()
if self.showLogWindow:
# Refresh the log window last to pick up whole command loop output.
self.logWindow.refresh()
def run(self):
self.parseArgs()
self.startup()
self.changeTo = self.inputWindow
while not self.exiting:
win = self.changeTo
self.changeTo = None
win.refresh()
win.focus()
win.unfocus()
def shiftPalette(self):
"""Test different palette options. Each call to shiftPalette will change the
palette to the next one in the ring of palettes."""
self.showPalette = (self.showPalette+1)%3
if self.showPalette == 1:
dark = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
94, 134, 18, 240, 138, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 57,
]
light = [-1, 230, 147, 221, 255, 254, 253, 14]
for i in range(1, curses.COLORS):
curses.init_pair(i, dark[i%len(dark)], light[i/32])
elif self.showPalette == 2:
for i in range(1, curses.COLORS):
curses.init_pair(i, i, 231)
else:
for i in range(1, curses.COLORS):
curses.init_pair(i, 16, i)
def wrapped_ci(stdscr):
try:
prg = CiProgram(stdscr)
prg.run()
except Exception, e:
errorType, value, tb = sys.exc_info()
out = traceback.format_exception(errorType, value, tb)
for i in out:
app.log.error(i[:-1])
def run_ci():
try:
curses.wrapper(wrapped_ci)
finally:
app.log.flush()
if __name__ == '__main__':
run_ci()
update log sooner to control cursor
#!/usr/bin/python
# Copyright 2016 The ci_edit Authors. All rights reserved.
# Use of this source code is governed by an Apache-style license that can be
# found in the LICENSE file.
import app.curses_util
import app.log
import app.text_buffer
import app.window
import sys
import curses
import time
import traceback
class CiProgram:
"""This is the main editor program. It holds top level information and runs
the main loop. The CiProgram is intended as a singleton."""
def __init__(self, stdscr):
self.bufferManager = app.text_buffer.BufferManager(self)
self.exiting = False
self.modeStack = []
self.priorClick = 0
self.savedMouseX = -1
self.savedMouseY = -1
self.stdscr = stdscr
self.ch = 0
curses.mousemask(-1)
curses.mouseinterval(0)
# Enable mouse tracking in xterm.
print '\033[?1002;h'
#print '\033[?1005;h'
curses.meta(1)
# Access ^c before shell does.
curses.raw()
#curses.start_color()
curses.use_default_colors()
if 0:
assert(curses.COLORS == 256)
assert(curses.can_change_color() == 1)
assert(curses.has_colors() == 1)
app.log.detail("color_content:")
for i in range(0, curses.COLORS):
app.log.detail("color", i, ": ", curses.color_content(i))
for i in range(16, curses.COLORS):
curses.init_color(i, 500, 500, i*787%1000)
app.log.detail("color_content, after:")
for i in range(0, curses.COLORS):
app.log.detail("color", i, ": ", curses.color_content(i))
self.showPalette = 0
self.shiftPalette()
self.zOrder = []
def startup(self):
"""A second init-like function. Called after command line arguments are
parsed."""
if self.showLogWindow:
self.debugWindow = app.window.StaticWindow(self)
self.zOrder += [self.debugWindow]
self.logWindow = app.window.LogWindow(self)
#self.zOrder += [self.logWindow]
else:
self.debugWindow = None
self.logWindow = None
self.paletteWindow = None
self.paletteWindow = app.window.PaletteWindow(self)
self.inputWindow = app.window.InputWindow(self, 10, 10, 0, 0, True, True,
True)
self.layout()
def layout(self):
"""Arrange the debug, log, and input windows."""
rows, cols = self.stdscr.getmaxyx()
#app.log.detail('layout', rows, cols)
if self.showLogWindow:
inputWidth = min(78, cols)
debugWidth = max(cols-inputWidth-1, 0)
debugRows = 15
self.debugWindow.reshape(debugRows, debugWidth, 0,
inputWidth+1)
self.logWindow.reshape(rows-debugRows, debugWidth, debugRows,
inputWidth+1)
else:
inputWidth = cols
self.inputWindow.reshape(rows, inputWidth, 0, 0)
def debugDraw(self, win):
"""Draw real-time debug information to the screen."""
if not self.debugWindow:
return
textBuffer = win.textBuffer
y, x = win.cursorWindow.getyx()
maxy, maxx = win.cursorWindow.getmaxyx()
self.debugWindow.writeLineRow = 0
self.debugWindow.writeLine(
" cRow %3d cCol %2d goalCol %2d"
%(textBuffer.cursorRow, textBuffer.cursorCol,
textBuffer.goalCol), self.debugWindow.color)
self.debugWindow.writeLine(
" mkrRow %3d mkrCol %2d sm %d"
%(textBuffer.markerRow, textBuffer.markerCol,
textBuffer.selectionMode),
self.debugWindow.color)
self.debugWindow.writeLine(
"scrlRow %3d scrlCol %2d lines %3d"
%(textBuffer.scrollRow, textBuffer.scrollCol,
len(textBuffer.lines)),
self.debugWindow.color)
self.debugWindow.writeLine(
"y %2d x %2d maxy %d maxx %d baud %d color %d"
%(y, x, maxy, maxx, curses.baudrate(), curses.can_change_color()),
self.debugWindow.color)
self.debugWindow.writeLine(
"ch %3s %s"
%(self.ch, app.curses_util.cursesKeyName(self.ch)),
self.debugWindow.color)
self.debugWindow.writeLine("win %r"%(win,),
self.debugWindow.color)
self.debugWindow.writeLine("tb %r"%(textBuffer,),
self.debugWindow.color)
try:
(id, mousex, mousey, mousez, bstate) = curses.getmouse()
self.debugWindow.writeLine(
"mouse id %d, mousex %d, mousey %d, mousez %d"
%(id, mousex, mousey, mousez),
self.debugWindow.color)
self.debugWindow.writeLine(
"bstate %s %d"
%(app.curses_util.mouseButtonName(bstate), bstate),
self.debugWindow.color)
except curses.error:
self.debugWindow.writeLine("mouse is not available.",
self.debugWindow.color)
# Display some of the redo chain.
self.debugWindow.writeLine(
"redoIndex %3d savedAt %3d depth %3d"
%(textBuffer.redoIndex, textBuffer.savedAtRedoIndex,
len(textBuffer.redoChain)),
self.debugWindow.color+100)
lenChain = textBuffer.redoIndex
for i in range(textBuffer.redoIndex-3, textBuffer.redoIndex):
text = i >= 0 and textBuffer.redoChain[i] or ''
self.debugWindow.writeLine(text, 101)
for i in range(textBuffer.redoIndex, textBuffer.redoIndex+2):
text = i < len(textBuffer.redoChain) and textBuffer.redoChain[i] or ''
self.debugWindow.writeLine(text, 1)
# Refresh the display.
self.debugWindow.cursorWindow.refresh()
def debugWindowOrder(self):
self.log('debugWindowOrder')
def recurse(list, indent):
for i in list:
self.log(indent, i)
recurse(i.zOrder, indent+' ')
recurse(self.zOrder, ' ')
self.log('top window', self.topWindow())
def topWindow(self):
top = self
while len(top.zOrder):
top = top.zOrder[-1]
return top
def clickedNearby(self, row, col):
y, x = self.priorClickRowCol
return y-1 <= row <= y+1 and x-1 <= col <= x+1
def handleMouse(self):
"""Mouse handling is a special case. The getch() curses function will
signal the existence of a mouse event, but the event must be fetched and
parsed separately."""
(id, mousex, mousey, mousez, bstate) = curses.getmouse()
rapidClickTimeout = .5
for i in reversed(self.zOrder):
if i.contains(mousey, mousex):
mousey -= i.top
mousex -= i.left
#self.log('bstate', app.curses_util.mouseButtonName(bstate))
if bstate & curses.BUTTON1_RELEASED:
if self.priorClick + rapidClickTimeout <= time.time():
i.mouseRelease(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON1_PRESSED:
if (self.priorClick + rapidClickTimeout > time.time() and
self.clickedNearby(mousey, mousex)):
self.clicks += 1
self.priorClick = time.time()
if self.clicks == 2:
i.mouseDoubleClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseTripleClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
self.clicks = 1
else:
self.clicks = 1
self.priorClick = time.time()
self.priorClickRowCol = (mousey, mousex)
i.mouseClick(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON2_PRESSED:
i.mouseWheelUp(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.BUTTON4_PRESSED:
if self.savedMouseX == mousex and self.savedMouseY == mousey:
i.mouseWheelDown(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseMoved(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
elif bstate & curses.REPORT_MOUSE_POSITION:
#self.log('REPORT_MOUSE_POSITION')
if self.savedMouseX == mousex and self.savedMouseY == mousey:
# This is a hack for dtterm on Mac OS X.
i.mouseWheelUp(bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
i.mouseMoved(mousey, mousex, bstate&curses.BUTTON_SHIFT,
bstate&curses.BUTTON_CTRL, bstate&curses.BUTTON_ALT)
else:
self.log('got bstate', app.curses_util.mouseButtonName(bstate), bstate)
self.savedMouseX = mousex
self.savedMouseY = mousey
return
self.log('click landed on screen')
def handleScreenResize(self):
self.log('handleScreenResize')
self.layout()
def log(self, *args):
"""Log text to the logging window (for debugging)."""
if not self.logWindow:
return
app.log.info(*args)
def parseArgs(self):
"""Interpret the command line arguments."""
self.debugRedo = False
self.showLogWindow = False
self.cliFiles = []
takeAll = False
for i in sys.argv[1:]:
if not takeAll and i[:2] == '--':
self.debugRedo = self.debugRedo or i == '--debugRedo'
self.showLogWindow = self.showLogWindow or i == '--log'
app.log.shouldWritePrintLog = app.log.shouldWritePrintLog or i == '--logDetail'
app.log.shouldWritePrintLog = app.log.shouldWritePrintLog or i == '--p'
if i == '--':
# All remaining args are file paths.
takeAll = True
continue
self.cliFiles.append({'path': i})
def quit(self):
"""Set the intent to exit the program. The actual exit will occur a bit
later."""
app.log.info('self.exiting = True')
self.exiting = True
def refresh(self):
"""Repaint stacked windows, furthest to nearest."""
if self.showLogWindow:
self.logWindow.refresh()
for i,k in enumerate(self.zOrder):
#self.log("[[%d]] %r"%(i, k))
k.refresh()
def run(self):
self.parseArgs()
self.startup()
self.changeTo = self.inputWindow
while not self.exiting:
win = self.changeTo
self.changeTo = None
win.refresh()
win.focus()
win.unfocus()
def shiftPalette(self):
"""Test different palette options. Each call to shiftPalette will change the
palette to the next one in the ring of palettes."""
self.showPalette = (self.showPalette+1)%3
if self.showPalette == 1:
dark = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
94, 134, 18, 240, 138, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 57,
]
light = [-1, 230, 147, 221, 255, 254, 253, 14]
for i in range(1, curses.COLORS):
curses.init_pair(i, dark[i%len(dark)], light[i/32])
elif self.showPalette == 2:
for i in range(1, curses.COLORS):
curses.init_pair(i, i, 231)
else:
for i in range(1, curses.COLORS):
curses.init_pair(i, 16, i)
def wrapped_ci(stdscr):
try:
prg = CiProgram(stdscr)
prg.run()
except Exception, e:
errorType, value, tb = sys.exc_info()
out = traceback.format_exception(errorType, value, tb)
for i in out:
app.log.error(i[:-1])
def run_ci():
try:
curses.wrapper(wrapped_ci)
finally:
app.log.flush()
if __name__ == '__main__':
run_ci()
|
from django.conf import settings
from tempfile import mkdtemp, mkstemp
import os
import subprocess
import shutil
import badges.models
class BadgeCreatorError(Exception):
def __init__(self, value, latex_output=None):
self.value = value
self.latex_output = latex_output
Exception.__init__(self, value, latex_output)
def __str__(self):
return repr(self.value)
class BadgeCreator:
def __init__(self, badgesettings):
self.settings = badgesettings
self.columns = self.settings.columns
self.rows = self.settings.rows
# list of badges (dict with attributes)
self.badges = []
# create temporary files
self.dir = mkdtemp(dir=settings.TMP_ROOT, prefix="badges_")
self.latex_file, self.latex_file_path = mkstemp(suffix=".tex", dir=self.dir)
# we copy the photos and background images to the temporary directory
# pdflatex is only allowed to include files from there
self.dir_photos = os.path.join(self.dir, "photos")
os.mkdir(self.dir_photos, mode=0o700)
self.dir_backgrounds = os.path.join(self.dir, "backgrounds")
os.mkdir(self.dir_backgrounds, mode=0o700)
# prevent that the same file is copied multiple times
self._copied_files = []
def add_badge(self, badge):
design = badge.get_design()
role = badge.get_role()
tmp = {
# texts
"firstname": self._latex_escape(badge.get_firstname_text()),
"surname": self._latex_escape(badge.get_surname_text()),
"job": self._latex_escape(badge.get_job_text()),
"shift": self._latex_escape(badge.get_shift_text(self.settings)),
"role": self._latex_escape(badge.get_role_text(self.settings)),
"photo": "", # filled later
"fontcolor": self._latex_color(design.font_color),
"bgcolor": self._latex_color(design.bg_color),
"bgfront": "", # filled later
"bgback": "", # filled later
"id": "", # filled later (= barcode)
"roleid": role.latex_name,
}
# copy photo
if badge.photo:
tmp["photo"] = self._copy_photo(badge.photo.path)
# design
if design.bg_front:
tmp["bgfront"] = self._copy_background(design.bg_front.path)
if design.bg_back:
tmp["bgback"] = self._copy_background(design.bg_back.path)
# badge id
if self.settings.barcodes:
tmp["id"] = "%010d" % badge.barcode
# permissions
all_permissions = badges.models.BadgePermission.objects.filter(badge_settings=self.settings.pk).all()
selected_permissions = role.permissions
for perm in all_permissions:
if selected_permissions.filter(pk=perm.pk).exists():
tmp["perm-%s" % perm.latex_name] = "true"
else:
tmp["perm-%s" % perm.latex_name] = "false"
self.badges.append(tmp)
def generate(self):
latex_code = self._get_latex()
# read template
try:
f = self.settings.latex_template
f.open("r")
template = f.read()
f.close()
except IOError as e:
raise BadgeCreatorError('Cannot open file "%s": %s' % (self.settings.latex_template.path, str(e)))
# replace '%BADGEDATA%'
latex = template.replace("%BADGEDATA%", latex_code)
# write code
try:
f = os.fdopen(self.latex_file, "w")
f.write(latex)
f.close()
except IOError as e:
raise BadgeCreatorError('Cannot write to file "%s": %s' % (self.latex_file_path, str(e)))
# debug
if settings.BADGE_TEMPLATE_DEBUG_FILE:
shutil.copyfile(self.latex_file_path, settings.BADGE_TEMPLATE_DEBUG_FILE)
# call pdflatex
try:
# only allow read in the directory of the tex file (and write, but this is default)
env = os.environ
env["openin_any"] = "p"
env["openout_any"] = "p"
env["TEXMFOUTPUT"] = self.dir
subprocess.check_output(
[
settings.BADGE_PDFLATEX,
"-halt-on-error",
"-interaction=nonstopmode",
"-no-shell-escape",
"-output-directory",
self.dir,
os.path.basename(self.latex_file_path),
],
cwd=self.dir,
)
except subprocess.CalledProcessError as e:
raise BadgeCreatorError("PDF generation failed", e.output.decode("utf8"))
# return path to pdf
pdf_filename = "%s.pdf" % os.path.splitext(self.latex_file_path)[0]
return self.dir, pdf_filename
def finish(self):
if os.path.isdir(self.dir):
shutil.rmtree(self.dir)
def _get_latex(self):
# whitespace, if code would be empty
if len(self.badges) == 0:
return r"\ "
r = ""
# number of badges on one page
num_page = self.columns * self.rows
page = 1
while (page - 1) * num_page < len(self.badges):
# helper for this page
data_for_page = self.badges[(page - 1) * num_page : page * num_page]
# front side
r = r + self._create_table("badgefront", data_for_page)
# back
r = r + self._create_table("badgeback", data_for_page, True)
# next page
page = page + 1
return r
def _create_badge_side(self, latex_command, helper_data):
data = ",".join(["%s=%s" % (key, helper_data[key]) for key in helper_data])
template = r"\%s[%s]" % (latex_command, data)
return template
def _create_table(self, latex_command, helpers_data, reverse_rows=False):
r = ""
# begin of table
r = r + r"\begin{tabular}{|l|l|}" + "\n"
r = r + r"\hline" + "\n"
# add rows until all helpers were added
row = 1
while (row - 1) * self.columns < len(helpers_data):
# get helpers for this row
data_for_row = helpers_data[(row - 1) * self.columns : row * self.columns]
latex_for_row = [self._create_badge_side(latex_command, h) for h in data_for_row]
# fill row if necessary
while len(latex_for_row) < self.columns:
latex_for_row.append("")
# reverse?
if reverse_rows:
latex_for_row.reverse()
# insert ' & ' between items, add '\\' and linebreak
latex_row = " & ".join(latex_for_row) + r" \\" + "\n"
# add to result
r = r + latex_row
# add hline
r = r + r"\hline" + "\n"
# next row
row = row + 1
# end of table
r = r + r"\end{tabular}" + "\n"
# page break
r = r + "\n" + r"\pagebreak" + "\n\n"
return r
def _latex_color(self, string):
# latex expects HTML colors without '#' and uppercase
if string.startswith("#"):
string = string[1:]
return string.upper()
def _latex_escape(self, string):
string = string.replace("\\", r"\textbackslash ")
string = string.replace(r" ", r"\ ")
string = string.replace(r"&", r"\&")
string = string.replace(r"%", r"\%")
string = string.replace(r"$", r"\$")
string = string.replace(r"#", r"\#")
string = string.replace(r"_", r"\_")
string = string.replace(r"{", r"\{")
string = string.replace(r"}", r"\}")
string = string.replace(r"~", r"\textasciitilde ")
string = string.replace(r"^", r"\textasciicircum ")
return "{" + string + "}"
def _copy_photo(self, src_path):
return self._copy_file(src_path, self.dir_photos)
def _copy_background(self, src_path):
return self._copy_file(src_path, self.dir_backgrounds)
def _copy_file(self, src_path, dest_folder):
filename = os.path.basename(src_path)
dest_path = os.path.join(dest_folder, filename)
if src_path not in self._copied_files:
shutil.copyfile(src_path, dest_path)
self._copied_files.append(src_path)
return os.path.relpath(dest_path, self.dir)
fix bug when creating badges with other column number than 2
from django.conf import settings
from tempfile import mkdtemp, mkstemp
import os
import subprocess
import shutil
import badges.models
class BadgeCreatorError(Exception):
def __init__(self, value, latex_output=None):
self.value = value
self.latex_output = latex_output
Exception.__init__(self, value, latex_output)
def __str__(self):
return repr(self.value)
class BadgeCreator:
def __init__(self, badgesettings):
self.settings = badgesettings
self.columns = self.settings.columns
self.rows = self.settings.rows
# list of badges (dict with attributes)
self.badges = []
# create temporary files
self.dir = mkdtemp(dir=settings.TMP_ROOT, prefix="badges_")
self.latex_file, self.latex_file_path = mkstemp(suffix=".tex", dir=self.dir)
# we copy the photos and background images to the temporary directory
# pdflatex is only allowed to include files from there
self.dir_photos = os.path.join(self.dir, "photos")
os.mkdir(self.dir_photos, mode=0o700)
self.dir_backgrounds = os.path.join(self.dir, "backgrounds")
os.mkdir(self.dir_backgrounds, mode=0o700)
# prevent that the same file is copied multiple times
self._copied_files = []
def add_badge(self, badge):
design = badge.get_design()
role = badge.get_role()
tmp = {
# texts
"firstname": self._latex_escape(badge.get_firstname_text()),
"surname": self._latex_escape(badge.get_surname_text()),
"job": self._latex_escape(badge.get_job_text()),
"shift": self._latex_escape(badge.get_shift_text(self.settings)),
"role": self._latex_escape(badge.get_role_text(self.settings)),
"photo": "", # filled later
"fontcolor": self._latex_color(design.font_color),
"bgcolor": self._latex_color(design.bg_color),
"bgfront": "", # filled later
"bgback": "", # filled later
"id": "", # filled later (= barcode)
"roleid": role.latex_name,
}
# copy photo
if badge.photo:
tmp["photo"] = self._copy_photo(badge.photo.path)
# design
if design.bg_front:
tmp["bgfront"] = self._copy_background(design.bg_front.path)
if design.bg_back:
tmp["bgback"] = self._copy_background(design.bg_back.path)
# badge id
if self.settings.barcodes:
tmp["id"] = "%010d" % badge.barcode
# permissions
all_permissions = badges.models.BadgePermission.objects.filter(badge_settings=self.settings.pk).all()
selected_permissions = role.permissions
for perm in all_permissions:
if selected_permissions.filter(pk=perm.pk).exists():
tmp["perm-%s" % perm.latex_name] = "true"
else:
tmp["perm-%s" % perm.latex_name] = "false"
self.badges.append(tmp)
def generate(self):
latex_code = self._get_latex()
# read template
try:
f = self.settings.latex_template
f.open("r")
template = f.read()
f.close()
except IOError as e:
raise BadgeCreatorError('Cannot open file "%s": %s' % (self.settings.latex_template.path, str(e)))
# replace '%BADGEDATA%'
latex = template.replace("%BADGEDATA%", latex_code)
# write code
try:
f = os.fdopen(self.latex_file, "w")
f.write(latex)
f.close()
except IOError as e:
raise BadgeCreatorError('Cannot write to file "%s": %s' % (self.latex_file_path, str(e)))
# debug
if settings.BADGE_TEMPLATE_DEBUG_FILE:
shutil.copyfile(self.latex_file_path, settings.BADGE_TEMPLATE_DEBUG_FILE)
# call pdflatex
try:
# only allow read in the directory of the tex file (and write, but this is default)
env = os.environ
env["openin_any"] = "p"
env["openout_any"] = "p"
env["TEXMFOUTPUT"] = self.dir
subprocess.check_output(
[
settings.BADGE_PDFLATEX,
"-halt-on-error",
"-interaction=nonstopmode",
"-no-shell-escape",
"-output-directory",
self.dir,
os.path.basename(self.latex_file_path),
],
cwd=self.dir,
)
except subprocess.CalledProcessError as e:
raise BadgeCreatorError("PDF generation failed", e.output.decode("utf8"))
# return path to pdf
pdf_filename = "%s.pdf" % os.path.splitext(self.latex_file_path)[0]
return self.dir, pdf_filename
def finish(self):
if os.path.isdir(self.dir):
shutil.rmtree(self.dir)
def _get_latex(self):
# whitespace, if code would be empty
if len(self.badges) == 0:
return r"\ "
r = ""
# number of badges on one page
num_page = self.columns * self.rows
page = 1
while (page - 1) * num_page < len(self.badges):
# helper for this page
data_for_page = self.badges[(page - 1) * num_page : page * num_page]
# front side
r = r + self._create_table("badgefront", data_for_page)
# back
r = r + self._create_table("badgeback", data_for_page, True)
# next page
page = page + 1
return r
def _create_badge_side(self, latex_command, helper_data):
data = ",".join(["%s=%s" % (key, helper_data[key]) for key in helper_data])
template = r"\%s[%s]" % (latex_command, data)
return template
def _create_table(self, latex_command, helpers_data, reverse_rows=False):
r = ""
# begin of table
r = r + r"\begin{tabular}{|" + "|".join("l" * self.columns) + "|}" + "\n"
r = r + r"\hline" + "\n"
# add rows until all helpers were added
row = 1
while (row - 1) * self.columns < len(helpers_data):
# get helpers for this row
data_for_row = helpers_data[(row - 1) * self.columns : row * self.columns]
latex_for_row = [self._create_badge_side(latex_command, h) for h in data_for_row]
# fill row if necessary
while len(latex_for_row) < self.columns:
latex_for_row.append("")
# reverse?
if reverse_rows:
latex_for_row.reverse()
# insert ' & ' between items, add '\\' and linebreak
latex_row = " & ".join(latex_for_row) + r" \\" + "\n"
# add to result
r = r + latex_row
# add hline
r = r + r"\hline" + "\n"
# next row
row = row + 1
# end of table
r = r + r"\end{tabular}" + "\n"
# page break
r = r + "\n" + r"\pagebreak" + "\n\n"
return r
def _latex_color(self, string):
# latex expects HTML colors without '#' and uppercase
if string.startswith("#"):
string = string[1:]
return string.upper()
def _latex_escape(self, string):
string = string.replace("\\", r"\textbackslash ")
string = string.replace(r" ", r"\ ")
string = string.replace(r"&", r"\&")
string = string.replace(r"%", r"\%")
string = string.replace(r"$", r"\$")
string = string.replace(r"#", r"\#")
string = string.replace(r"_", r"\_")
string = string.replace(r"{", r"\{")
string = string.replace(r"}", r"\}")
string = string.replace(r"~", r"\textasciitilde ")
string = string.replace(r"^", r"\textasciicircum ")
return "{" + string + "}"
def _copy_photo(self, src_path):
return self._copy_file(src_path, self.dir_photos)
def _copy_background(self, src_path):
return self._copy_file(src_path, self.dir_backgrounds)
def _copy_file(self, src_path, dest_folder):
filename = os.path.basename(src_path)
dest_path = os.path.join(dest_folder, filename)
if src_path not in self._copied_files:
shutil.copyfile(src_path, dest_path)
self._copied_files.append(src_path)
return os.path.relpath(dest_path, self.dir)
|
"""The WaveBlocks Project
This file contains the class for representing the hypercubic basis shape
which is the full dense basis set.
@author: R. Bourquin
@copyright: Copyright (C) 2012 R. Bourquin
@license: Modified BSD License
"""
from numpy import eye, vstack, integer
from BasisShape import BasisShape
class HyperCubicShape(BasisShape):
r"""This class implements the hypercubic basis shape
which is the full dense basis set.
A basis shape is essentially all information and operations
related to the set :math:`\mathcal{K}` of multi-indices :math:`k`.
"""
def __init__(self, limits):
r"""
"""
# The dimension of K
self._dimension = len(limits)
# The limits Ki for each axis
self._limits = tuple(limits)
# TODO: Do we really want to store these maps or better compute data the fly
# The linear mapping k -> index for the basis
iil = self._get_index_iterator_lex()
self._lima = {k:index for index, k in enumerate(iil)}
# And the inverse mapping
self._lima_inv = {v:k for k, v in self._lima.iteritems()}
# The linear mapping k -> index for the extended basis
iil = self._get_index_iterator_lex(extended=True)
# Only store new nodes and DO NOT renumber the nodes of non-extended lattice
self._lima_ext = {}
index = max(self._lima.values()) + 1
for k in iil:
if not k in self._lima:
self._lima_ext[k] = index
index += 1
# And the inverse mapping
self._lima_ext_inv = {v:k for k, v in self._lima_ext.iteritems()}
# The basis size
self._basissize = len(self._lima)
# The extended basis size
self._basissize_ext = self._basissize + len(self._lima_ext)
def __getitem__(self, k):
r"""Make map lookups.
"""
if type(k) is tuple:
if k in self._lima:
return self._lima[k]
elif self.contains(k, extended=True):
return self._lima_ext[k]
elif type(k) is int:
if k in self._lima_inv:
return self._lima_inv[k]
elif k in self._lima_ext_inv:
return self._lima_ext_inv[k]
else:
raise IndexError("Wrong index type")
def __contains__(self, k):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathcal{K}`.
:param k: The multi-index we want to test.
:type k: tuple
"""
# This checks only the non-extended basis!
# For checking the extended basis set use the 'contains(...)' method.
return tuple(k) in self._lima
def __iter__(self):
r"""Implements iteration over the multi-indices :math:`k` of the non-extended
basis set :math:`\mathcal{K}`.
Note: The order of iteration is NOT fixed. If you need a special
iteration scheme, use :py:method:`get_node_iterator`. Also the iteration
is over the non-extended basis set only.
"""
# TODO: Better remove this as it may cause unexpected behaviour?
return iter(self._lima)
def contains(self, k, extended=False):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathcal{K}`.
:param k: The multi-index we want to test.
:type k: tuple
"""
if not extended:
return tuple(k) in self._lima
else:
l = tuple(k)
return (l in self._lima or l in self._lima_ext)
def _get_index_iterator_lex(self, extended=False):
r"""
"""
# Upper bounds in each dimension
if not extended:
bounds = self._limits[::-1]
else:
bounds = self._limits[::-1]
bounds = [ b+1 for b in bounds ]
def index_iterator_lex(bounds):
# Initialize a counter
z = [0 for i in xrange(self._dimension + 1)]
while z[self._dimension] == 0:
# Yield the current index vector
yield tuple(reversed(z[:-1]))
# Incremet fastest varying bit
z[0] += 1
# Reset overflows
for d in xrange(self._dimension):
if z[d] >= bounds[d]:
z[d] = 0
z[d+1] += 1
return index_iterator_lex(bounds)
def _get_index_iterator_chain(self, direction=0):
r"""
"""
# TODO: Fix iterator not to return k = (0,...,0) for limits = [1,...,1]
def index_iterator_chain(d):
# Number of functions in each dimension
bounds = self._limits[:]
# The counter
z = [ 0 for i in range(self._dimension + 1) ]
# Iterate over all valid stencil points
while not z[-1] > 0:
yield tuple(z[:-1])
# Increase index in the dimension we build the chain
z[d] += 1
# Check if we are done with the current base point
# If yes, move base point and start a new chain
if z[d] > bounds[d]-2:
z[d] = 0
z[d-1] += 1
for i in reversed(range(d)):
if z[i] > bounds[i]-1:
z[i] = 0
z[i-1] += 1
return index_iterator_chain(direction)
def get_node_iterator(self, mode="lex", direction=None, extended=False):
r"""
Returns an iterator to iterate over all basis elements :math:`k`.
:param mode: The mode by which we iterate over the indices. Default is 'lex'
for lexicographical order. Supported is also 'chain', for
the chain-like mode, details see the manual.
:type mode: string
:param direction: If iterating in `chainmode` this specifies the direction
the chains go.
:type direction: integer.
:param extended: Do we want to iterate over the extended basis shape. Default
is `False`. Note that this has no effect in `chainmode`.
:type extended: bool
"""
if mode == "lex":
return self._get_index_iterator_lex(extended=extended)
elif mode == "chain":
if direction < self._dimension:
return self._get_index_iterator_chain(direction=direction)
else:
raise ValueError("Can not build iterator for this direction.")
# TODO: Consider boundary node only iterator
else:
raise ValueError("Unknown iterator mode: "+str(mode)+".")
def get_limits(self):
r"""Returns the upper limit :math:`K_d` for all directions :math:`d`.
:return: A tuple of the maximum of the multi-index in each direction.
"""
return tuple(self._limits)
def get_neighbours(self, k, selection=None, direction=None):
r"""
Returns a list of all multi-indices that are neighbours of a given
multi-index :math:`k`. A direct neighbours is defines as
:math:`(k_0, \ldots, k_d \pm 1, \ldots, k_{D-1}) \forall d \in [0 \ldots D-1]`.
:param k: The multi-index of which we want to get the neighbours.
:type k: tuple
:param selection:
:type selection: string with fixed values ``forward``, ``backward`` or ``all``.
The values ``all`` is equivalent to the value ``None`` (default).
:param direction: The direction :math:`0 \leq d < D` in which we want to find
the neighbours :math:`k \pm e_d`.
:type direction: int
:return: A list containing the pairs :math:`(d, k^\prime)`.
"""
# First build a list of potential neighbours
I = eye(self._dimension, dtype=integer)
ki = vstack(k)
# Forward and backward direct neighbours
nbfw = ki + I
nbbw = ki - I
# Keep only the valid ones
nbh = []
if direction is not None:
directions = [ direction ]
else:
directions = xrange(self._dimension)
for d in directions:
nfw = tuple(nbfw[:,d])
nbw = tuple(nbbw[:,d])
# TODO: Try to simplify these nested if blocks
if selection in ("backward", "all", None):
if nbw in self:
nbh.append((d, nbw))
if selection in ("forward", "all", None):
if nfw in self:
nbh.append((d, nfw))
return nbh
Neighbours search in basis shape lattice for 'extended' basis shapes
"""The WaveBlocks Project
This file contains the class for representing the hypercubic basis shape
which is the full dense basis set.
@author: R. Bourquin
@copyright: Copyright (C) 2012 R. Bourquin
@license: Modified BSD License
"""
from numpy import eye, vstack, integer
from BasisShape import BasisShape
class HyperCubicShape(BasisShape):
r"""This class implements the hypercubic basis shape
which is the full dense basis set.
A basis shape is essentially all information and operations
related to the set :math:`\mathcal{K}` of multi-indices :math:`k`.
"""
def __init__(self, limits):
r"""
"""
# The dimension of K
self._dimension = len(limits)
# The limits Ki for each axis
self._limits = tuple(limits)
# TODO: Do we really want to store these maps or better compute data the fly
# The linear mapping k -> index for the basis
iil = self._get_index_iterator_lex()
self._lima = {k:index for index, k in enumerate(iil)}
# And the inverse mapping
self._lima_inv = {v:k for k, v in self._lima.iteritems()}
# The linear mapping k -> index for the extended basis
iil = self._get_index_iterator_lex(extended=True)
# Only store new nodes and DO NOT renumber the nodes of non-extended lattice
self._lima_ext = {}
index = max(self._lima.values()) + 1
for k in iil:
if not k in self._lima:
self._lima_ext[k] = index
index += 1
# And the inverse mapping
self._lima_ext_inv = {v:k for k, v in self._lima_ext.iteritems()}
# The basis size
self._basissize = len(self._lima)
# The extended basis size
self._basissize_ext = self._basissize + len(self._lima_ext)
def __getitem__(self, k):
r"""Make map lookups.
"""
if type(k) is tuple:
if k in self._lima:
return self._lima[k]
elif self.contains(k, extended=True):
return self._lima_ext[k]
elif type(k) is int:
if k in self._lima_inv:
return self._lima_inv[k]
elif k in self._lima_ext_inv:
return self._lima_ext_inv[k]
else:
raise IndexError("Wrong index type")
def __contains__(self, k):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathcal{K}`.
:param k: The multi-index we want to test.
:type k: tuple
"""
# This checks only the non-extended basis!
# For checking the extended basis set use the 'contains(...)' method.
return tuple(k) in self._lima
def __iter__(self):
r"""Implements iteration over the multi-indices :math:`k` of the non-extended
basis set :math:`\mathcal{K}`.
Note: The order of iteration is NOT fixed. If you need a special
iteration scheme, use :py:method:`get_node_iterator`. Also the iteration
is over the non-extended basis set only.
"""
# TODO: Better remove this as it may cause unexpected behaviour?
return iter(self._lima)
def contains(self, k, extended=False):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathcal{K}`.
:param k: The multi-index we want to test.
:type k: tuple
"""
if not extended:
return tuple(k) in self._lima
else:
l = tuple(k)
return (l in self._lima or l in self._lima_ext)
def _get_index_iterator_lex(self, extended=False):
r"""
"""
# Upper bounds in each dimension
if not extended:
bounds = self._limits[::-1]
else:
bounds = self._limits[::-1]
bounds = [ b+1 for b in bounds ]
def index_iterator_lex(bounds):
# Initialize a counter
z = [0 for i in xrange(self._dimension + 1)]
while z[self._dimension] == 0:
# Yield the current index vector
yield tuple(reversed(z[:-1]))
# Incremet fastest varying bit
z[0] += 1
# Reset overflows
for d in xrange(self._dimension):
if z[d] >= bounds[d]:
z[d] = 0
z[d+1] += 1
return index_iterator_lex(bounds)
def _get_index_iterator_chain(self, direction=0):
r"""
"""
# TODO: Fix iterator not to return k = (0,...,0) for limits = [1,...,1]
def index_iterator_chain(d):
# Number of functions in each dimension
bounds = self._limits[:]
# The counter
z = [ 0 for i in range(self._dimension + 1) ]
# Iterate over all valid stencil points
while not z[-1] > 0:
yield tuple(z[:-1])
# Increase index in the dimension we build the chain
z[d] += 1
# Check if we are done with the current base point
# If yes, move base point and start a new chain
if z[d] > bounds[d]-2:
z[d] = 0
z[d-1] += 1
for i in reversed(range(d)):
if z[i] > bounds[i]-1:
z[i] = 0
z[i-1] += 1
return index_iterator_chain(direction)
def get_node_iterator(self, mode="lex", direction=None, extended=False):
r"""
Returns an iterator to iterate over all basis elements :math:`k`.
:param mode: The mode by which we iterate over the indices. Default is 'lex'
for lexicographical order. Supported is also 'chain', for
the chain-like mode, details see the manual.
:type mode: string
:param direction: If iterating in `chainmode` this specifies the direction
the chains go.
:type direction: integer.
:param extended: Do we want to iterate over the extended basis shape. Default
is `False`. Note that this has no effect in `chainmode`.
:type extended: bool
"""
if mode == "lex":
return self._get_index_iterator_lex(extended=extended)
elif mode == "chain":
if direction < self._dimension:
return self._get_index_iterator_chain(direction=direction)
else:
raise ValueError("Can not build iterator for this direction.")
# TODO: Consider boundary node only iterator
else:
raise ValueError("Unknown iterator mode: "+str(mode)+".")
def get_limits(self):
r"""Returns the upper limit :math:`K_d` for all directions :math:`d`.
:return: A tuple of the maximum of the multi-index in each direction.
"""
return tuple(self._limits)
def get_neighbours(self, k, selection=None, direction=None, extended=False):
r"""
Returns a list of all multi-indices that are neighbours of a given
multi-index :math:`k`. A direct neighbour is defined as
:math:`(k_0, \ldots, k_d \pm 1, \ldots, k_{D-1}) \forall d \in [0 \ldots D-1]`.
:param k: The multi-index of which we want to get the neighbours.
:type k: tuple
:param selection:
:type selection: string with fixed values ``forward``, ``backward`` or ``all``.
The values ``all`` is equivalent to the value ``None`` (default).
:param direction: The direction :math:`0 \leq d < D` in which we want to find
the neighbours :math:`k \pm e_d`.
:type direction: int
:return: A list containing the pairs :math:`(d, k^\prime)`.
"""
# First build a list of potential neighbours
I = eye(self._dimension, dtype=integer)
ki = vstack(k)
# Forward and backward direct neighbours
nbfw = ki + I
nbbw = ki - I
# Keep only the valid ones
nbh = []
if direction is not None:
directions = [ direction ]
else:
directions = xrange(self._dimension)
for d in directions:
nfw = tuple(nbfw[:,d])
nbw = tuple(nbbw[:,d])
# TODO: Try to simplify these nested if blocks
if selection in ("backward", "all", None):
if self.contains(nbw, extended=extended):
nbh.append((d, nbw))
if selection in ("forward", "all", None):
if self.contains(nfw, extended=extended):
nbh.append((d, nfw))
return nbh
|
#author: Marek
import logging
import requests
from server.fileservice import write_to_file
def request_to_vt(sha256):
headers = {"Cache-Control": "no-cache",
"Cookie": "__utma=194538546.1374540295.1458168968.1462804416.1462835638.29; __utmz=194538546.1462534007.22.10.utmcsr=google|utmccn=(organic)|utmcmd=organic|utmctr=(not%20provided); VT_CSRF=7307f46a6f03a9a38eed4f19c801187a; VT_PREFERRED_LANGUAGE=en; __utmb=194538546.2.10.1462835638; __utmc=194538546; __utmt=1",
"Accept": "text/html",
"Connection": "keep-alive",
"Accept-Language":"pl,en-US;q=0.7,en;q=0.3",
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0"}
url = 'https://www.virustotal.com/en/file/' + sha256 + '/analysis/'
responseVT = requests.get(url, headers=headers)
data = responseVT.text
empty_file = b''
if(data == empty_file):
logging.info("No file with sha256:" + sha256 + " on VirusTotal.")
return
else:
write_to_file(sha256, data)
minor changes to vt_req
# author: Marek
import logging
import requests
from server.fileservice import write_to_file
def request_to_vt(sha256):
headers = {"Cache-Control": "no-cache",
"Cookie": "__utma=194538546.1374540295.1458168968.1462804416.1462835638.29; __utmz=194538546.1462534007.22.10.utmcsr=google|utmccn=(organic)|utmcmd=organic|utmctr=(not%20provided); VT_CSRF=7307f46a6f03a9a38eed4f19c801187a; VT_PREFERRED_LANGUAGE=en; __utmb=194538546.2.10.1462835638; __utmc=194538546; __utmt=1",
"Accept": "text/html",
"Connection": "keep-alive",
"Accept-Language": "pl,en-US;q=0.7,en;q=0.3",
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0"}
url = 'https://www.virustotal.com/en/file/' + sha256 + '/analysis/'
responseVT = requests.get(url, headers=headers)
data = responseVT.text
empty_file = ""
if (data == empty_file):
logging.info("No file with sha256:" + sha256 + " on VirusTotal.")
else:
write_to_file(sha256, data)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import warnings
from unittest import TestCase, main
from collections import Counter, defaultdict, OrderedDict
try:
from StringIO import StringIO
except ImportError: # python3 system
from io import StringIO
import tempfile
import numpy as np
from scipy.spatial.distance import hamming
from skbio.core.sequence import (NucleotideSequence, DNASequence, RNASequence,
DNA)
from skbio.core.alignment import (SequenceCollection, Alignment,
StockholmAlignment)
from skbio.core.exception import SequenceCollectionError, StockholmParseError
from skbio.core.distance import DistanceMatrix
class SequenceCollectionTests(TestCase):
"""Tests of the SequenceCollection class """
def setUp(self):
"""Initialize values to be used in tests
"""
self.d1 = DNASequence('GATTACA', id="d1")
self.d2 = DNASequence('TTG', id="d2")
self.d1_lower = DNASequence('gattaca', id="d1")
self.d2_lower = DNASequence('ttg', id="d2")
self.r1 = RNASequence('GAUUACA', id="r1")
self.r2 = RNASequence('UUG', id="r2")
self.r3 = RNASequence('U-----UGCC--', id="r3")
self.i1 = DNASequence('GATXACA', id="i1")
self.seqs1 = [self.d1, self.d2]
self.seqs1_lower = [self.d1_lower, self.d2_lower]
self.seqs2 = [self.r1, self.r2, self.r3]
self.seqs3 = self.seqs1 + self.seqs2
self.seqs1_t = [('d1', 'GATTACA'), ('d2', 'TTG')]
self.seqs2_t = [('r1', 'GAUUACA'), ('r2', 'UUG'),
('r3', 'U-----UGCC--')]
self.seqs3_t = self.seqs1_t + self.seqs2_t
self.s1 = SequenceCollection(self.seqs1)
self.s1_lower = SequenceCollection(self.seqs1_lower)
self.s2 = SequenceCollection(self.seqs2)
self.s3 = SequenceCollection(self.seqs3)
self.empty = SequenceCollection([])
self.invalid_s1 = SequenceCollection([self.i1])
def test_init(self):
"""Initialization functions as expected with varied input types
"""
SequenceCollection(self.seqs1)
SequenceCollection(self.seqs2)
SequenceCollection(self.seqs3)
SequenceCollection([])
def test_init_fail(self):
"""initialization with sequences with overlapping ids fails
"""
s1 = [self.d1, self.d1]
self.assertRaises(SequenceCollectionError, SequenceCollection, s1)
def test_init_validate(self):
"""initialization with validation functions as expected
"""
SequenceCollection(self.seqs1, validate=True)
SequenceCollection(self.seqs1, validate=True)
# can't validate self.seqs2 as a DNASequence
self.assertRaises(SequenceCollectionError, SequenceCollection,
self.invalid_s1, validate=True)
def test_from_fasta_records(self):
"""Initialization from list of tuples functions as expected
"""
SequenceCollection.from_fasta_records(self.seqs1_t, DNASequence)
SequenceCollection.from_fasta_records(self.seqs2_t, RNASequence)
SequenceCollection.from_fasta_records(self.seqs3_t, NucleotideSequence)
def test_contains(self):
"""in operator functions as expected
"""
self.assertTrue('d1' in self.s1)
self.assertTrue('r2' in self.s2)
self.assertFalse('r2' in self.s1)
def test_eq(self):
"""equality operator functions as expected
"""
self.assertTrue(self.s1 == self.s1)
self.assertFalse(self.s1 == self.s2)
# different objects can be equal
self.assertTrue(self.s1 == SequenceCollection([self.d1, self.d2]))
self.assertTrue(SequenceCollection([self.d1, self.d2]) == self.s1)
# SequenceCollections with different number of sequences are not equal
self.assertFalse(self.s1 == SequenceCollection([self.d1]))
class FakeSequenceCollection(SequenceCollection):
pass
# SequenceCollections of different types are not equal
self.assertFalse(self.s1 == FakeSequenceCollection([self.d1, self.d2]))
self.assertFalse(self.s1 == Alignment([self.d1, self.d2]))
# SequenceCollections with different sequences are not equal
self.assertFalse(self.s1 == SequenceCollection([self.d1, self.r1]))
def test_getitem(self):
"""getitem functions as expected
"""
self.assertEqual(self.s1[0], self.d1)
self.assertEqual(self.s1[1], self.d2)
self.assertEqual(self.s2[0], self.r1)
self.assertEqual(self.s2[1], self.r2)
self.assertRaises(IndexError, self.empty.__getitem__, 0)
self.assertRaises(KeyError, self.empty.__getitem__, '0')
def test_iter(self):
"""iter functions as expected
"""
s1_iter = iter(self.s1)
count = 0
for actual, expected in zip(s1_iter, self.seqs1):
count += 1
self.assertEqual(actual, expected)
self.assertEqual(count, len(self.seqs1))
self.assertRaises(StopIteration, lambda: next(s1_iter))
def test_len(self):
"""len functions as expected
"""
self.assertEqual(len(self.s1), 2)
self.assertEqual(len(self.s2), 3)
self.assertEqual(len(self.s3), 5)
self.assertEqual(len(self.empty), 0)
def test_ne(self):
"""inequality operator functions as expected
"""
self.assertFalse(self.s1 != self.s1)
self.assertTrue(self.s1 != self.s2)
# SequenceCollections with different number of sequences are not equal
self.assertTrue(self.s1 != SequenceCollection([self.d1]))
class FakeSequenceCollection(SequenceCollection):
pass
# SequenceCollections of different types are not equal
self.assertTrue(self.s1 != FakeSequenceCollection([self.d1, self.d2]))
self.assertTrue(self.s1 != Alignment([self.d1, self.d2]))
# SequenceCollections with different sequences are not equal
self.assertTrue(self.s1 !=
SequenceCollection([self.d1, self.r1]))
def test_repr(self):
"""repr functions as expected
"""
self.assertEqual(repr(self.s1),
"<SequenceCollection: n=2; "
"mean +/- std length=5.00 +/- 2.00>")
self.assertEqual(repr(self.s2),
"<SequenceCollection: n=3; "
"mean +/- std length=7.33 +/- 3.68>")
self.assertEqual(repr(self.s3),
"<SequenceCollection: n=5; "
"mean +/- std length=6.40 +/- 3.32>")
self.assertEqual(repr(self.empty),
"<SequenceCollection: n=0; "
"mean +/- std length=0.00 +/- 0.00>")
def test_reversed(self):
"""reversed functions as expected
"""
s1_iter = reversed(self.s1)
count = 0
for actual, expected in zip(s1_iter, self.seqs1[::-1]):
count += 1
self.assertEqual(actual, expected)
self.assertEqual(count, len(self.seqs1))
self.assertRaises(StopIteration, lambda: next(s1_iter))
def test_k_word_frequencies(self):
"""k_word_frequencies functions as expected
"""
expected1 = defaultdict(int)
expected1['A'] = 3/7.
expected1['C'] = 1/7.
expected1['G'] = 1/7.
expected1['T'] = 2/7.
expected2 = defaultdict(int)
expected2['G'] = 1/3.
expected2['T'] = 2/3.
self.assertEqual(self.s1.k_word_frequencies(k=1),
[expected1, expected2])
expected1 = defaultdict(int)
expected1['GAT'] = 1/2.
expected1['TAC'] = 1/2.
expected2 = defaultdict(int)
expected2['TTG'] = 1/1.
self.assertEqual(self.s1.k_word_frequencies(k=3, overlapping=False),
[expected1, expected2])
self.assertEqual(self.empty.k_word_frequencies(k=1), [])
def test_str(self):
"""str functions as expected
"""
exp1 = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(str(self.s1), exp1)
exp2 = ">r1\nGAUUACA\n>r2\nUUG\n>r3\nU-----UGCC--\n"
self.assertEqual(str(self.s2), exp2)
exp4 = ""
self.assertEqual(str(self.empty), exp4)
def test_distances(self):
"""distances functions as expected
"""
s1 = SequenceCollection([DNA("ACGT", "d1"), DNA("ACGG", "d2")])
expected = [[0, 0.25],
[0.25, 0]]
expected = DistanceMatrix(expected, ['d1', 'd2'])
actual = s1.distances(hamming)
self.assertEqual(actual, expected)
# alt distance function provided
def dumb_distance(s1, s2):
return 42.
expected = [[0, 42.],
[42., 0]]
expected = DistanceMatrix(expected, ['d1', 'd2'])
actual = s1.distances(dumb_distance)
self.assertEqual(actual, expected)
def test_distribution_stats(self):
"""distribution_stats functions as expected
"""
actual1 = self.s1.distribution_stats()
self.assertEqual(actual1[0], 2)
self.assertAlmostEqual(actual1[1], 5.0, 3)
self.assertAlmostEqual(actual1[2], 2.0, 3)
actual2 = self.s2.distribution_stats()
self.assertEqual(actual2[0], 3)
self.assertAlmostEqual(actual2[1], 7.333, 3)
self.assertAlmostEqual(actual2[2], 3.682, 3)
actual3 = self.s3.distribution_stats()
self.assertEqual(actual3[0], 5)
self.assertAlmostEqual(actual3[1], 6.400, 3)
self.assertAlmostEqual(actual3[2], 3.323, 3)
actual4 = self.empty.distribution_stats()
self.assertEqual(actual4[0], 0)
self.assertEqual(actual4[1], 0.0)
self.assertEqual(actual4[2], 0.0)
def test_degap(self):
"""degap functions as expected
"""
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs2_t]
expected = SequenceCollection.from_fasta_records(expected, RNASequence)
actual = self.s2.degap()
self.assertEqual(actual, expected)
def test_get_seq(self):
"""getseq functions asexpected
"""
self.assertEqual(self.s1.get_seq('d1'), self.d1)
self.assertEqual(self.s1.get_seq('d2'), self.d2)
def test_ids(self):
"""ids functions as expected
"""
self.assertEqual(self.s1.ids(), ['d1', 'd2'])
self.assertEqual(self.s2.ids(), ['r1', 'r2', 'r3'])
self.assertEqual(self.s3.ids(),
['d1', 'd2', 'r1', 'r2', 'r3'])
self.assertEqual(self.empty.ids(), [])
def test_int_map(self):
"""int_map functions as expected
"""
expected1 = {"1": self.d1, "2": self.d2}
expected2 = {"1": "d1", "2": "d2"}
self.assertEqual(self.s1.int_map(), (expected1, expected2))
expected1 = {"h-1": self.d1, "h-2": self.d2}
expected2 = {"h-1": "d1", "h-2": "d2"}
self.assertEqual(self.s1.int_map(prefix='h-'), (expected1, expected2))
def test_is_empty(self):
"""is_empty functions as expected
"""
self.assertFalse(self.s1.is_empty())
self.assertFalse(self.s2.is_empty())
self.assertFalse(self.s3.is_empty())
self.assertTrue(self.empty.is_empty())
def test_is_valid(self):
"""is_valid functions as expected
"""
self.assertTrue(self.s1.is_valid())
self.assertTrue(self.s2.is_valid())
self.assertTrue(self.s3.is_valid())
self.assertTrue(self.empty.is_valid())
self.assertFalse(self.invalid_s1.is_valid())
def test_iteritems(self):
"""iteritems functions as expected
"""
self.assertEqual(list(self.s1.iteritems()),
[(s.id, s) for s in self.s1])
def test_lower(self):
"""lower functions as expected
"""
self.assertEqual(self.s1.lower(), self.s1_lower)
def test_sequence_count(self):
"""num_seqs functions as expected
"""
self.assertEqual(self.s1.sequence_count(), 2)
self.assertEqual(self.s2.sequence_count(), 3)
self.assertEqual(self.s3.sequence_count(), 5)
self.assertEqual(self.empty.sequence_count(), 0)
def test_sequence_lengths(self):
"""sequence_lengths functions as expected
"""
self.assertEqual(self.s1.sequence_lengths(), [7, 3])
self.assertEqual(self.s2.sequence_lengths(), [7, 3, 12])
self.assertEqual(self.s3.sequence_lengths(), [7, 3, 7, 3, 12])
self.assertEqual(self.empty.sequence_lengths(), [])
def test_to_fasta(self):
"""to_fasta functions as expected
"""
exp1 = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(self.s1.to_fasta(), exp1)
exp2 = ">r1\nGAUUACA\n>r2\nUUG\n>r3\nU-----UGCC--\n"
self.assertEqual(self.s2.to_fasta(), exp2)
def test_toFasta(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
exp = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(self.s1.toFasta(), exp)
def test_upper(self):
"""upper functions as expected
"""
self.assertEqual(self.s1_lower.upper(), self.s1)
class AlignmentTests(TestCase):
def setUp(self):
self.d1 = DNASequence('..ACC-GTTGG..', id="d1")
self.d2 = DNASequence('TTACCGGT-GGCC', id="d2")
self.d3 = DNASequence('.-ACC-GTTGC--', id="d3")
self.r1 = RNASequence('UUAU-', id="r1")
self.r2 = RNASequence('ACGUU', id="r2")
self.seqs1 = [self.d1, self.d2, self.d3]
self.seqs2 = [self.r1, self.r2]
self.seqs1_t = [('d1', '..ACC-GTTGG..'), ('d2', 'TTACCGGT-GGCC'),
('d3', '.-ACC-GTTGC--')]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.a1 = Alignment(self.seqs1)
self.a2 = Alignment(self.seqs2)
self.a3 = Alignment(self.seqs2, score=42.0,
start_end_positions=[(0, 3), (5, 9)])
self.a4 = Alignment(self.seqs2, score=-42.0,
start_end_positions=[(1, 4), (6, 10)])
self.empty = Alignment([])
def test_degap(self):
"""degap functions as expected
"""
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs1_t]
expected = SequenceCollection.from_fasta_records(expected, DNASequence)
actual = self.a1.degap()
self.assertEqual(actual, expected)
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs2_t]
expected = SequenceCollection.from_fasta_records(expected, RNASequence)
actual = self.a2.degap()
self.assertEqual(actual, expected)
def test_distances(self):
"""distances functions as expected
"""
expected = [[0, 6./13, 4./13],
[6./13, 0, 7./13],
[4./13, 7./13, 0]]
expected = DistanceMatrix(expected, ['d1', 'd2', 'd3'])
actual = self.a1.distances()
self.assertEqual(actual, expected)
# alt distance function provided
def dumb_distance(s1, s2):
return 42.
expected = [[0, 42., 42.],
[42., 0, 42.],
[42., 42., 0]]
expected = DistanceMatrix(expected, ['d1', 'd2', 'd3'])
actual = self.a1.distances(dumb_distance)
self.assertEqual(actual, expected)
def test_score(self):
self.assertEqual(self.a3.score(), 42.0)
self.assertEqual(self.a4.score(), -42.0)
def test_start_end_positions(self):
self.assertEqual(self.a3.start_end_positions(), [(0, 3), (5, 9)])
self.assertEqual(self.a4.start_end_positions(), [(1, 4), (6, 10)])
def test_subalignment(self):
"""subalignment functions as expected
"""
# keep seqs by ids
actual = self.a1.subalignment(seqs_to_keep=['d1', 'd3'])
expected = Alignment([self.d1, self.d3])
self.assertEqual(actual, expected)
# keep seqs by indices
actual = self.a1.subalignment(seqs_to_keep=[0, 2])
expected = Alignment([self.d1, self.d3])
self.assertEqual(actual, expected)
# keep seqs by ids (invert)
actual = self.a1.subalignment(seqs_to_keep=['d1', 'd3'],
invert_seqs_to_keep=True)
expected = Alignment([self.d2])
self.assertEqual(actual, expected)
# keep seqs by indices (invert)
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
invert_seqs_to_keep=True)
expected = Alignment([self.d2])
self.assertEqual(actual, expected)
# keep positions
actual = self.a1.subalignment(positions_to_keep=[0, 2, 3])
d1 = DNASequence('.AC', id="d1")
d2 = DNASequence('TAC', id="d2")
d3 = DNASequence('.AC', id="d3")
expected = Alignment([d1, d2, d3])
self.assertEqual(actual, expected)
# keep positions (invert)
actual = self.a1.subalignment(positions_to_keep=[0, 2, 3],
invert_positions_to_keep=True)
d1 = DNASequence('.C-GTTGG..', id="d1")
d2 = DNASequence('TCGGT-GGCC', id="d2")
d3 = DNASequence('-C-GTTGC--', id="d3")
expected = Alignment([d1, d2, d3])
self.assertEqual(actual, expected)
# keep seqs and positions
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
positions_to_keep=[0, 2, 3])
d1 = DNASequence('.AC', id="d1")
d3 = DNASequence('.AC', id="d3")
expected = Alignment([d1, d3])
self.assertEqual(actual, expected)
# keep seqs and positions (invert)
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
positions_to_keep=[0, 2, 3],
invert_seqs_to_keep=True,
invert_positions_to_keep=True)
d2 = DNASequence('TCGGT-GGCC', id="d2")
expected = Alignment([d2])
self.assertEqual(actual, expected)
def test_subalignment_filter_out_everything(self):
exp = Alignment([])
# no sequences
obs = self.a1.subalignment(seqs_to_keep=None, invert_seqs_to_keep=True)
self.assertEqual(obs, exp)
# no positions
obs = self.a1.subalignment(positions_to_keep=None,
invert_positions_to_keep=True)
self.assertEqual(obs, exp)
def test_init_validate(self):
"""initialization with validation functions as expected
"""
Alignment(self.seqs1, validate=True)
# invalid DNA character
invalid_seqs1 = [self.d1, self.d2, self.d3,
DNASequence('.-ACC-GTXGC--', id="i1")]
self.assertRaises(SequenceCollectionError, Alignment,
invalid_seqs1, validate=True)
# invalid lengths (they're not all equal)
invalid_seqs2 = [self.d1, self.d2, self.d3,
DNASequence('.-ACC-GTGC--', id="i2")]
self.assertRaises(SequenceCollectionError, Alignment,
invalid_seqs2, validate=True)
def test_is_valid(self):
"""is_valid functions as expected
"""
self.assertTrue(self.a1.is_valid())
self.assertTrue(self.a2.is_valid())
self.assertTrue(self.empty.is_valid())
# invalid because of length mismatch
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGC', id="d2")
self.assertFalse(Alignment([d1, d2]).is_valid())
# invalid because of invalid charaters
d1 = DNASequence('..ACC-GTXGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
self.assertFalse(Alignment([d1, d2]).is_valid())
def test_iter_positions(self):
"""iter_positions functions as expected
"""
actual = list(self.a2.iter_positions())
expected = [[RNASequence(j) for j in i] for i in
['UA', 'UC', 'AG', 'UU', '-U']]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.assertEqual(actual, expected)
actual = list(self.a2.iter_positions(constructor=str))
expected = [list('UA'),
list('UC'),
list('AG'),
list('UU'),
list('-U')]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.assertEqual(actual, expected)
def test_majority_consensus(self):
"""majority_consensus functions as expected
"""
d1 = DNASequence('TTT', id="d1")
d2 = DNASequence('TT-', id="d2")
d3 = DNASequence('TC-', id="d3")
a1 = Alignment([d1, d2, d3])
self.assertEqual(a1.majority_consensus(), DNASequence('TT-'))
d1 = DNASequence('T', id="d1")
d2 = DNASequence('A', id="d2")
a1 = Alignment([d1, d2])
self.assertTrue(a1.majority_consensus() in
[DNASequence('T'), DNASequence('A')])
self.assertEqual(self.empty.majority_consensus(), '')
def test_omit_gap_positions(self):
"""omitting gap positions functions as expected
"""
expected = self.a2
self.assertEqual(self.a2.omit_gap_positions(1.0), expected)
self.assertEqual(self.a2.omit_gap_positions(0.51), expected)
r1 = RNASequence('UUAU', id="r1")
r2 = RNASequence('ACGU', id="r2")
expected = Alignment([r1, r2])
self.assertEqual(self.a2.omit_gap_positions(0.49), expected)
r1 = RNASequence('UUAU', id="r1")
r2 = RNASequence('ACGU', id="r2")
expected = Alignment([r1, r2])
self.assertEqual(self.a2.omit_gap_positions(0.0), expected)
self.assertEqual(self.empty.omit_gap_positions(0.0), self.empty)
self.assertEqual(self.empty.omit_gap_positions(0.49), self.empty)
self.assertEqual(self.empty.omit_gap_positions(1.0), self.empty)
def test_omit_gap_sequences(self):
"""omitting gap sequences functions as expected
"""
expected = self.a2
self.assertEqual(self.a2.omit_gap_sequences(1.0), expected)
self.assertEqual(self.a2.omit_gap_sequences(0.20), expected)
expected = Alignment([self.r2])
self.assertEqual(self.a2.omit_gap_sequences(0.19), expected)
self.assertEqual(self.empty.omit_gap_sequences(0.0), self.empty)
self.assertEqual(self.empty.omit_gap_sequences(0.2), self.empty)
self.assertEqual(self.empty.omit_gap_sequences(1.0), self.empty)
def test_position_counters(self):
"""position_counters functions as expected
"""
expected = [Counter({'U': 1, 'A': 1}),
Counter({'U': 1, 'C': 1}),
Counter({'A': 1, 'G': 1}),
Counter({'U': 2}),
Counter({'-': 1, 'U': 1})]
self.assertEqual(self.a2.position_counters(), expected)
self.assertEqual(self.empty.position_counters(), [])
def test_position_frequencies(self):
"""computing position frequencies functions as expected
"""
expected = [defaultdict(int, {'U': 0.5, 'A': 0.5}),
defaultdict(int, {'U': 0.5, 'C': 0.5}),
defaultdict(int, {'A': 0.5, 'G': 0.5}),
defaultdict(int, {'U': 1.0}),
defaultdict(int, {'-': 0.5, 'U': 0.5})]
self.assertEqual(self.a2.position_frequencies(), expected)
self.assertEqual(self.empty.position_frequencies(), [])
def test_position_entropies(self):
"""computing positional uncertainties functions as expected
tested by calculating values as described in this post:
http://stackoverflow.com/a/15476958/3424666
"""
expected = [0.69314, 0.69314, 0.69314, 0.0, np.nan]
np.testing.assert_almost_equal(self.a2.position_entropies(),
expected, 5)
expected = [1.0, 1.0, 1.0, 0.0, np.nan]
np.testing.assert_almost_equal(self.a2.position_entropies(base=2),
expected, 5)
np.testing.assert_almost_equal(self.empty.position_entropies(base=2),
[])
def test_k_word_frequencies(self):
"""k_word_frequencies functions as expected
"""
expected = [defaultdict(int, {'U': 3/5, 'A': 1/5, '-': 1/5}),
defaultdict(int, {'A': 1/5, 'C': 1/5, 'G': 1/5, 'U': 2/5})]
actual = self.a2.k_word_frequencies(k=1)
for a, e in zip(actual, expected):
self.assertEqual(sorted(a), sorted(e), 5)
np.testing.assert_almost_equal(sorted(a.values()),
sorted(e.values()), 5)
def test_sequence_length(self):
"""sequence_length functions as expected
"""
self.assertEqual(self.a1.sequence_length(), 13)
self.assertEqual(self.a2.sequence_length(), 5)
self.assertEqual(self.empty.sequence_length(), 0)
def test_to_phylip(self):
"""to_phylip functions as expected
"""
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
d3 = DNASequence('.-ACC-GTTGC--', id="d3")
a = Alignment([d1, d2, d3])
phylip_str, id_map = a.to_phylip(map_labels=False)
self.assertEqual(id_map, {'d1': 'd1',
'd3': 'd3',
'd2': 'd2'})
expected = "\n".join(["3 13",
"d1 ..ACC-GTTGG..",
"d2 TTACCGGT-GGCC",
"d3 .-ACC-GTTGC--"])
self.assertEqual(phylip_str, expected)
def test_to_phylip_map_labels(self):
"""to_phylip functions as expected with label mapping
"""
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
d3 = DNASequence('.-ACC-GTTGC--', id="d3")
a = Alignment([d1, d2, d3])
phylip_str, id_map = a.to_phylip(map_labels=True, label_prefix="s")
self.assertEqual(id_map, {'s1': 'd1',
's3': 'd3',
's2': 'd2'})
expected = "\n".join(["3 13",
"s1 ..ACC-GTTGG..",
"s2 TTACCGGT-GGCC",
"s3 .-ACC-GTTGC--"])
self.assertEqual(phylip_str, expected)
def test_to_phylip_unequal_sequence_lengths(self):
d1 = DNASequence('A-CT', id="d1")
d2 = DNASequence('TTA', id="d2")
d3 = DNASequence('.-AC', id="d3")
a = Alignment([d1, d2, d3])
with self.assertRaises(SequenceCollectionError):
a.to_phylip()
def test_to_phylip_no_sequences(self):
with self.assertRaises(SequenceCollectionError):
Alignment([]).to_phylip()
def test_to_phylip_no_positions(self):
d1 = DNASequence('', id="d1")
d2 = DNASequence('', id="d2")
a = Alignment([d1, d2])
with self.assertRaises(SequenceCollectionError):
a.to_phylip()
def test_validate_lengths(self):
"""
"""
self.assertTrue(self.a1._validate_lengths())
self.assertTrue(self.a2._validate_lengths())
self.assertTrue(self.empty._validate_lengths())
self.assertTrue(Alignment([
DNASequence('TTT', id="d1")])._validate_lengths())
self.assertFalse(Alignment([
DNASequence('TTT', id="d1"),
DNASequence('TT', id="d2")])._validate_lengths())
class StockholmAlignmentTests(TestCase):
"""Tests for stockholmAlignment object"""
def setUp(self):
"""Setup for stockholm tests."""
self.seqs = [DNASequence("ACC-G-GGTA", id="seq1"),
DNASequence("TCC-G-GGCA", id="seq2")]
self.GF = OrderedDict([
("AC", "RF00360"),
("BM", ["cmbuild -F CM SEED",
"cmsearch -Z 274931 -E 1000000"]),
("SQ", "9"),
("RT", ["TITLE1", "TITLE2"]),
("RN", ["[1]", "[2]"]),
("RA", ["Auth1;", "Auth2;"]),
("RL", ["J Mol Biol", "Cell"]),
("RM", ["11469857", "12007400"]),
('RN', ['[1]', '[2]'])
])
self.GS = {"AC": OrderedDict([("seq1", "111"), ("seq2", "222")])}
self.GR = {"SS": OrderedDict([("seq1", "1110101111"),
("seq2", "0110101110")])}
self.GC = {"SS_cons": "(((....)))"}
self.st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF,
gs=self.GS, gr=self.GR)
def test_retrieve_metadata(self):
self.assertEqual(self.st.gc, self.GC)
self.assertEqual(self.st.gf, self.GF)
self.assertEqual(self.st.gs, self.GS)
self.assertEqual(self.st.gr, self.GR)
def test_from_file_alignment(self):
"""make sure can parse basic sto file with interleaved alignment"""
sto = StringIO("# STOCKHOLM 1.0\n"
"seq1 ACC-G\n"
"seq2 TCC-G\n\n"
"seq1 -GGTA\n"
"seq2 -GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs)
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GF(self):
"""Make sure GF lines are parsed correctly"""
# remove rn line to make sure auto-added
self.GF.pop("RN")
sto = StringIO("# STOCKHOLM 1.0\n#=GF RN [1]\n#=GF RM 11469857\n"
"#=GF RT TITLE1\n#=GF RA Auth1;\n#=GF RL J Mol Biol\n"
"#=GF RN [2]\n#=GF RM 12007400\n#=GF RT TITLE2\n"
"#=GF RA Auth2;\n#=GF RL Cell\n#=GF AC RF00360\n"
"#=GF BM cmbuild -F CM SEED\n"
"#=GF BM cmsearch -Z 274931 -E 1000000\n#=GF SQ 9\n"
"seq1 ACC-G-GGTA\nseq2 TCC-G-GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, self.GF, {}, {}, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GC(self):
"""Make sure GC lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\n"
"seq1 ACC-G-GGTA\nseq2 TCC-G-GGCA\n"
"#=GC SS_cons (((....)))\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, {}, {}, self.GC)
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GS(self):
"""Make sure GS lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\n#=GS seq2 AC 222\n#=GS seq1 AC 111\n"
"seq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, self.GS, {}, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GR(self):
"""Make sure GR lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\nseq1 ACC-G\n"
"#=GR seq1 SS 11101\nseq2 TCC-G\n"
"#=GR seq2 SS 01101\n\nseq1 -GGTA\n"
"#=GR seq1 SS 01111\nseq2 -GGCA\n"
"#=GR seq2 SS 01110\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, {}, self.GR, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_multi(self):
"""Make sure yield works correctly with multi-alignment sto files"""
sto = StringIO("# STOCKHOLM 1.0\n#=GS seq2 AC 222\n#=GS seq1 AC 111\n"
"seq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//\n"
"# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"#=GR seq1 SS 1110101111\nseq2 TCC-G-GGCA\n"
"#=GR seq2 SS 0110101110\n//")
obs_sto = StockholmAlignment.from_file(sto, DNA)
count = 0
for obs in obs_sto:
if count == 0:
exp_sto = StockholmAlignment(self.seqs, {}, self.GS, {}, {})
self.assertEqual(obs, exp_sto)
elif count == 1:
exp_sto = StockholmAlignment(self.seqs, {}, {}, self.GR, {})
self.assertEqual(obs, exp_sto)
else:
raise AssertionError("More than 2 sto alignments parsed!")
count += 1
def test_parse_gf_multiline_nh(self):
"""Makes sure a multiline NH code is parsed correctly"""
sto = ["#=GF TN MULTILINE TREE",
"#=GF NH THIS IS FIRST", "#=GF NH THIS IS SECOND",
"#=GF AC 1283394"]
exp = {'TN': 'MULTILINE TREE',
'NH': 'THIS IS FIRST THIS IS SECOND',
'AC': '1283394'}
self.assertEqual(self.st._parse_gf_info(sto), exp)
def test_parse_gf_multiline_cc(self):
"""Makes sure a multiline CC code is parsed correctly"""
sto = ["#=GF CC THIS IS FIRST", "#=GF CC THIS IS SECOND"]
exp = {'CC': 'THIS IS FIRST THIS IS SECOND'}
self.assertEqual(self.st._parse_gf_info(sto), exp)
def test_parse_gf_info_nongf(self):
"""Makes sure error raised if non-GF line passed"""
sto = ["#=GF AC BLAAAAAAAHHH", "#=GC HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gf_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GF AC", "#=GF"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gc_info_nongf(self):
"""Makes sure error raised if non-GC line passed"""
sto = ["#=GC AC BLAAAAAAAHHH", "#=GF HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gc_info_strict_len(self):
"""Make sure error raised if GC lines bad length and strict parsing"""
sto = ["#=GC SS_cons (((..)))"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto, seqlen=20, strict=True)
def test_parse_gc_info_strict_duplicate(self):
"""Make sure error raised if GC lines repeated"""
sto = ["#=GC SS_cons (((..)))", "#=GC SS_cons (((..)))"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto, seqlen=8, strict=True)
def test_parse_gc_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GC AC BLAAAAAAAHHH", "#=GC"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto)
def test_parse_gs_gr_info_mixed(self):
"""Makes sure error raised if mixed GS and GR lines passed"""
sto = ["#=GS seq1 AC BLAAA", "#=GR seq2 HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto)
def test_parse_gs_gr_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GS AC BLAAAAAAAHHH", "#=GS"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto)
def test_parse_gs_gr_info_strict(self):
"""Make sure error raised if GR lines bad length and strict parsing"""
sto = ["#=GR seq1 SS 10101111", "#=GR seq2 SS 01101"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto, seqlen=20, strict=True)
def test_str(self):
""" Make sure stockholm with all information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF, gs=self.GS,
gr=self.GR)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'#=GR seq1 SS 1110101111\n'
'seq2 TCC-G-GGCA\n'
'#=GR seq2 SS 0110101110\n'
'#=GC SS_cons (((....)))\n//')
self.assertEqual(obs, exp)
def test_to_file(self):
"""Make sure stockholm file output with all information contained is
formatted correctly. This is the same as __str__ but in a file. """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF, gs=self.GS,
gr=self.GR)
tempfilename = tempfile.NamedTemporaryFile().name
st.to_file()
obs = open(tempfilename).read()
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'#=GR seq1 SS 1110101111\n'
'seq2 TCC-G-GGCA\n'
'#=GR seq2 SS 0110101110\n'
'#=GC SS_cons (((....)))\n//')
self.assertEqual(obs, exp)
os.remove(tempfilename)
def test_str_gc(self):
""" Make sure stockholm with only GC information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=None, gs=None,
gr=None)
obs = str(st)
exp = ("# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n"
"#=GC SS_cons (((....)))\n//")
self.assertEqual(obs, exp)
def test_str_gf(self):
""" Make sure stockholm with only GF information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=self.GF, gs=None,
gr=None)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'seq1 ACC-G-GGTA\n'
'seq2 TCC-G-GGCA\n//')
self.assertEqual(obs, exp)
def test_str_gs(self):
""" Make sure stockholm with only GS information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=None, gs=self.GS,
gr=None)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'seq2 TCC-G-GGCA\n//')
self.assertEqual(obs, exp)
def test_str_gr(self):
""" Make sure stockholm with only GR information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=None, gs=None,
gr=self.GR)
obs = str(st)
exp = ("# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"#=GR seq1 SS 1110101111\nseq2 TCC-G-GGCA\n"
"#=GR seq2 SS 0110101110\n//")
self.assertEqual(obs, exp)
def test_str_trees(self):
""" Make sure stockholm with trees printed correctly"""
GF = OrderedDict({"NH": ["IMATREE", "IMATREETOO"],
"TN": ["Tree2", "Tree1"]})
st = StockholmAlignment(self.seqs, gc=None, gf=GF, gs=None,
gr=None)
obs = str(st)
exp = ("# STOCKHOLM 1.0\n#=GF TN Tree2\n#=GF NH IMATREE\n#=GF TN Tree1"
"\n#=GF NH IMATREETOO\nseq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//")
self.assertEqual(obs, exp)
if __name__ == "__main__":
main()
Fixed my bad code, using travis #495
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import warnings
from unittest import TestCase, main
from collections import Counter, defaultdict, OrderedDict
try:
from StringIO import StringIO
except ImportError: # python3 system
from io import StringIO
import tempfile
import numpy as np
from scipy.spatial.distance import hamming
from skbio.core.sequence import (NucleotideSequence, DNASequence, RNASequence,
DNA)
from skbio.core.alignment import (SequenceCollection, Alignment,
StockholmAlignment)
from skbio.core.exception import SequenceCollectionError, StockholmParseError
from skbio.core.distance import DistanceMatrix
class SequenceCollectionTests(TestCase):
"""Tests of the SequenceCollection class """
def setUp(self):
"""Initialize values to be used in tests
"""
self.d1 = DNASequence('GATTACA', id="d1")
self.d2 = DNASequence('TTG', id="d2")
self.d1_lower = DNASequence('gattaca', id="d1")
self.d2_lower = DNASequence('ttg', id="d2")
self.r1 = RNASequence('GAUUACA', id="r1")
self.r2 = RNASequence('UUG', id="r2")
self.r3 = RNASequence('U-----UGCC--', id="r3")
self.i1 = DNASequence('GATXACA', id="i1")
self.seqs1 = [self.d1, self.d2]
self.seqs1_lower = [self.d1_lower, self.d2_lower]
self.seqs2 = [self.r1, self.r2, self.r3]
self.seqs3 = self.seqs1 + self.seqs2
self.seqs1_t = [('d1', 'GATTACA'), ('d2', 'TTG')]
self.seqs2_t = [('r1', 'GAUUACA'), ('r2', 'UUG'),
('r3', 'U-----UGCC--')]
self.seqs3_t = self.seqs1_t + self.seqs2_t
self.s1 = SequenceCollection(self.seqs1)
self.s1_lower = SequenceCollection(self.seqs1_lower)
self.s2 = SequenceCollection(self.seqs2)
self.s3 = SequenceCollection(self.seqs3)
self.empty = SequenceCollection([])
self.invalid_s1 = SequenceCollection([self.i1])
def test_init(self):
"""Initialization functions as expected with varied input types
"""
SequenceCollection(self.seqs1)
SequenceCollection(self.seqs2)
SequenceCollection(self.seqs3)
SequenceCollection([])
def test_init_fail(self):
"""initialization with sequences with overlapping ids fails
"""
s1 = [self.d1, self.d1]
self.assertRaises(SequenceCollectionError, SequenceCollection, s1)
def test_init_validate(self):
"""initialization with validation functions as expected
"""
SequenceCollection(self.seqs1, validate=True)
SequenceCollection(self.seqs1, validate=True)
# can't validate self.seqs2 as a DNASequence
self.assertRaises(SequenceCollectionError, SequenceCollection,
self.invalid_s1, validate=True)
def test_from_fasta_records(self):
"""Initialization from list of tuples functions as expected
"""
SequenceCollection.from_fasta_records(self.seqs1_t, DNASequence)
SequenceCollection.from_fasta_records(self.seqs2_t, RNASequence)
SequenceCollection.from_fasta_records(self.seqs3_t, NucleotideSequence)
def test_contains(self):
"""in operator functions as expected
"""
self.assertTrue('d1' in self.s1)
self.assertTrue('r2' in self.s2)
self.assertFalse('r2' in self.s1)
def test_eq(self):
"""equality operator functions as expected
"""
self.assertTrue(self.s1 == self.s1)
self.assertFalse(self.s1 == self.s2)
# different objects can be equal
self.assertTrue(self.s1 == SequenceCollection([self.d1, self.d2]))
self.assertTrue(SequenceCollection([self.d1, self.d2]) == self.s1)
# SequenceCollections with different number of sequences are not equal
self.assertFalse(self.s1 == SequenceCollection([self.d1]))
class FakeSequenceCollection(SequenceCollection):
pass
# SequenceCollections of different types are not equal
self.assertFalse(self.s1 == FakeSequenceCollection([self.d1, self.d2]))
self.assertFalse(self.s1 == Alignment([self.d1, self.d2]))
# SequenceCollections with different sequences are not equal
self.assertFalse(self.s1 == SequenceCollection([self.d1, self.r1]))
def test_getitem(self):
"""getitem functions as expected
"""
self.assertEqual(self.s1[0], self.d1)
self.assertEqual(self.s1[1], self.d2)
self.assertEqual(self.s2[0], self.r1)
self.assertEqual(self.s2[1], self.r2)
self.assertRaises(IndexError, self.empty.__getitem__, 0)
self.assertRaises(KeyError, self.empty.__getitem__, '0')
def test_iter(self):
"""iter functions as expected
"""
s1_iter = iter(self.s1)
count = 0
for actual, expected in zip(s1_iter, self.seqs1):
count += 1
self.assertEqual(actual, expected)
self.assertEqual(count, len(self.seqs1))
self.assertRaises(StopIteration, lambda: next(s1_iter))
def test_len(self):
"""len functions as expected
"""
self.assertEqual(len(self.s1), 2)
self.assertEqual(len(self.s2), 3)
self.assertEqual(len(self.s3), 5)
self.assertEqual(len(self.empty), 0)
def test_ne(self):
"""inequality operator functions as expected
"""
self.assertFalse(self.s1 != self.s1)
self.assertTrue(self.s1 != self.s2)
# SequenceCollections with different number of sequences are not equal
self.assertTrue(self.s1 != SequenceCollection([self.d1]))
class FakeSequenceCollection(SequenceCollection):
pass
# SequenceCollections of different types are not equal
self.assertTrue(self.s1 != FakeSequenceCollection([self.d1, self.d2]))
self.assertTrue(self.s1 != Alignment([self.d1, self.d2]))
# SequenceCollections with different sequences are not equal
self.assertTrue(self.s1 !=
SequenceCollection([self.d1, self.r1]))
def test_repr(self):
"""repr functions as expected
"""
self.assertEqual(repr(self.s1),
"<SequenceCollection: n=2; "
"mean +/- std length=5.00 +/- 2.00>")
self.assertEqual(repr(self.s2),
"<SequenceCollection: n=3; "
"mean +/- std length=7.33 +/- 3.68>")
self.assertEqual(repr(self.s3),
"<SequenceCollection: n=5; "
"mean +/- std length=6.40 +/- 3.32>")
self.assertEqual(repr(self.empty),
"<SequenceCollection: n=0; "
"mean +/- std length=0.00 +/- 0.00>")
def test_reversed(self):
"""reversed functions as expected
"""
s1_iter = reversed(self.s1)
count = 0
for actual, expected in zip(s1_iter, self.seqs1[::-1]):
count += 1
self.assertEqual(actual, expected)
self.assertEqual(count, len(self.seqs1))
self.assertRaises(StopIteration, lambda: next(s1_iter))
def test_k_word_frequencies(self):
"""k_word_frequencies functions as expected
"""
expected1 = defaultdict(int)
expected1['A'] = 3/7.
expected1['C'] = 1/7.
expected1['G'] = 1/7.
expected1['T'] = 2/7.
expected2 = defaultdict(int)
expected2['G'] = 1/3.
expected2['T'] = 2/3.
self.assertEqual(self.s1.k_word_frequencies(k=1),
[expected1, expected2])
expected1 = defaultdict(int)
expected1['GAT'] = 1/2.
expected1['TAC'] = 1/2.
expected2 = defaultdict(int)
expected2['TTG'] = 1/1.
self.assertEqual(self.s1.k_word_frequencies(k=3, overlapping=False),
[expected1, expected2])
self.assertEqual(self.empty.k_word_frequencies(k=1), [])
def test_str(self):
"""str functions as expected
"""
exp1 = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(str(self.s1), exp1)
exp2 = ">r1\nGAUUACA\n>r2\nUUG\n>r3\nU-----UGCC--\n"
self.assertEqual(str(self.s2), exp2)
exp4 = ""
self.assertEqual(str(self.empty), exp4)
def test_distances(self):
"""distances functions as expected
"""
s1 = SequenceCollection([DNA("ACGT", "d1"), DNA("ACGG", "d2")])
expected = [[0, 0.25],
[0.25, 0]]
expected = DistanceMatrix(expected, ['d1', 'd2'])
actual = s1.distances(hamming)
self.assertEqual(actual, expected)
# alt distance function provided
def dumb_distance(s1, s2):
return 42.
expected = [[0, 42.],
[42., 0]]
expected = DistanceMatrix(expected, ['d1', 'd2'])
actual = s1.distances(dumb_distance)
self.assertEqual(actual, expected)
def test_distribution_stats(self):
"""distribution_stats functions as expected
"""
actual1 = self.s1.distribution_stats()
self.assertEqual(actual1[0], 2)
self.assertAlmostEqual(actual1[1], 5.0, 3)
self.assertAlmostEqual(actual1[2], 2.0, 3)
actual2 = self.s2.distribution_stats()
self.assertEqual(actual2[0], 3)
self.assertAlmostEqual(actual2[1], 7.333, 3)
self.assertAlmostEqual(actual2[2], 3.682, 3)
actual3 = self.s3.distribution_stats()
self.assertEqual(actual3[0], 5)
self.assertAlmostEqual(actual3[1], 6.400, 3)
self.assertAlmostEqual(actual3[2], 3.323, 3)
actual4 = self.empty.distribution_stats()
self.assertEqual(actual4[0], 0)
self.assertEqual(actual4[1], 0.0)
self.assertEqual(actual4[2], 0.0)
def test_degap(self):
"""degap functions as expected
"""
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs2_t]
expected = SequenceCollection.from_fasta_records(expected, RNASequence)
actual = self.s2.degap()
self.assertEqual(actual, expected)
def test_get_seq(self):
"""getseq functions asexpected
"""
self.assertEqual(self.s1.get_seq('d1'), self.d1)
self.assertEqual(self.s1.get_seq('d2'), self.d2)
def test_ids(self):
"""ids functions as expected
"""
self.assertEqual(self.s1.ids(), ['d1', 'd2'])
self.assertEqual(self.s2.ids(), ['r1', 'r2', 'r3'])
self.assertEqual(self.s3.ids(),
['d1', 'd2', 'r1', 'r2', 'r3'])
self.assertEqual(self.empty.ids(), [])
def test_int_map(self):
"""int_map functions as expected
"""
expected1 = {"1": self.d1, "2": self.d2}
expected2 = {"1": "d1", "2": "d2"}
self.assertEqual(self.s1.int_map(), (expected1, expected2))
expected1 = {"h-1": self.d1, "h-2": self.d2}
expected2 = {"h-1": "d1", "h-2": "d2"}
self.assertEqual(self.s1.int_map(prefix='h-'), (expected1, expected2))
def test_is_empty(self):
"""is_empty functions as expected
"""
self.assertFalse(self.s1.is_empty())
self.assertFalse(self.s2.is_empty())
self.assertFalse(self.s3.is_empty())
self.assertTrue(self.empty.is_empty())
def test_is_valid(self):
"""is_valid functions as expected
"""
self.assertTrue(self.s1.is_valid())
self.assertTrue(self.s2.is_valid())
self.assertTrue(self.s3.is_valid())
self.assertTrue(self.empty.is_valid())
self.assertFalse(self.invalid_s1.is_valid())
def test_iteritems(self):
"""iteritems functions as expected
"""
self.assertEqual(list(self.s1.iteritems()),
[(s.id, s) for s in self.s1])
def test_lower(self):
"""lower functions as expected
"""
self.assertEqual(self.s1.lower(), self.s1_lower)
def test_sequence_count(self):
"""num_seqs functions as expected
"""
self.assertEqual(self.s1.sequence_count(), 2)
self.assertEqual(self.s2.sequence_count(), 3)
self.assertEqual(self.s3.sequence_count(), 5)
self.assertEqual(self.empty.sequence_count(), 0)
def test_sequence_lengths(self):
"""sequence_lengths functions as expected
"""
self.assertEqual(self.s1.sequence_lengths(), [7, 3])
self.assertEqual(self.s2.sequence_lengths(), [7, 3, 12])
self.assertEqual(self.s3.sequence_lengths(), [7, 3, 7, 3, 12])
self.assertEqual(self.empty.sequence_lengths(), [])
def test_to_fasta(self):
"""to_fasta functions as expected
"""
exp1 = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(self.s1.to_fasta(), exp1)
exp2 = ">r1\nGAUUACA\n>r2\nUUG\n>r3\nU-----UGCC--\n"
self.assertEqual(self.s2.to_fasta(), exp2)
def test_toFasta(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
exp = ">d1\nGATTACA\n>d2\nTTG\n"
self.assertEqual(self.s1.toFasta(), exp)
def test_upper(self):
"""upper functions as expected
"""
self.assertEqual(self.s1_lower.upper(), self.s1)
class AlignmentTests(TestCase):
def setUp(self):
self.d1 = DNASequence('..ACC-GTTGG..', id="d1")
self.d2 = DNASequence('TTACCGGT-GGCC', id="d2")
self.d3 = DNASequence('.-ACC-GTTGC--', id="d3")
self.r1 = RNASequence('UUAU-', id="r1")
self.r2 = RNASequence('ACGUU', id="r2")
self.seqs1 = [self.d1, self.d2, self.d3]
self.seqs2 = [self.r1, self.r2]
self.seqs1_t = [('d1', '..ACC-GTTGG..'), ('d2', 'TTACCGGT-GGCC'),
('d3', '.-ACC-GTTGC--')]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.a1 = Alignment(self.seqs1)
self.a2 = Alignment(self.seqs2)
self.a3 = Alignment(self.seqs2, score=42.0,
start_end_positions=[(0, 3), (5, 9)])
self.a4 = Alignment(self.seqs2, score=-42.0,
start_end_positions=[(1, 4), (6, 10)])
self.empty = Alignment([])
def test_degap(self):
"""degap functions as expected
"""
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs1_t]
expected = SequenceCollection.from_fasta_records(expected, DNASequence)
actual = self.a1.degap()
self.assertEqual(actual, expected)
expected = [(id_, seq.replace('.', '').replace('-', ''))
for id_, seq in self.seqs2_t]
expected = SequenceCollection.from_fasta_records(expected, RNASequence)
actual = self.a2.degap()
self.assertEqual(actual, expected)
def test_distances(self):
"""distances functions as expected
"""
expected = [[0, 6./13, 4./13],
[6./13, 0, 7./13],
[4./13, 7./13, 0]]
expected = DistanceMatrix(expected, ['d1', 'd2', 'd3'])
actual = self.a1.distances()
self.assertEqual(actual, expected)
# alt distance function provided
def dumb_distance(s1, s2):
return 42.
expected = [[0, 42., 42.],
[42., 0, 42.],
[42., 42., 0]]
expected = DistanceMatrix(expected, ['d1', 'd2', 'd3'])
actual = self.a1.distances(dumb_distance)
self.assertEqual(actual, expected)
def test_score(self):
self.assertEqual(self.a3.score(), 42.0)
self.assertEqual(self.a4.score(), -42.0)
def test_start_end_positions(self):
self.assertEqual(self.a3.start_end_positions(), [(0, 3), (5, 9)])
self.assertEqual(self.a4.start_end_positions(), [(1, 4), (6, 10)])
def test_subalignment(self):
"""subalignment functions as expected
"""
# keep seqs by ids
actual = self.a1.subalignment(seqs_to_keep=['d1', 'd3'])
expected = Alignment([self.d1, self.d3])
self.assertEqual(actual, expected)
# keep seqs by indices
actual = self.a1.subalignment(seqs_to_keep=[0, 2])
expected = Alignment([self.d1, self.d3])
self.assertEqual(actual, expected)
# keep seqs by ids (invert)
actual = self.a1.subalignment(seqs_to_keep=['d1', 'd3'],
invert_seqs_to_keep=True)
expected = Alignment([self.d2])
self.assertEqual(actual, expected)
# keep seqs by indices (invert)
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
invert_seqs_to_keep=True)
expected = Alignment([self.d2])
self.assertEqual(actual, expected)
# keep positions
actual = self.a1.subalignment(positions_to_keep=[0, 2, 3])
d1 = DNASequence('.AC', id="d1")
d2 = DNASequence('TAC', id="d2")
d3 = DNASequence('.AC', id="d3")
expected = Alignment([d1, d2, d3])
self.assertEqual(actual, expected)
# keep positions (invert)
actual = self.a1.subalignment(positions_to_keep=[0, 2, 3],
invert_positions_to_keep=True)
d1 = DNASequence('.C-GTTGG..', id="d1")
d2 = DNASequence('TCGGT-GGCC', id="d2")
d3 = DNASequence('-C-GTTGC--', id="d3")
expected = Alignment([d1, d2, d3])
self.assertEqual(actual, expected)
# keep seqs and positions
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
positions_to_keep=[0, 2, 3])
d1 = DNASequence('.AC', id="d1")
d3 = DNASequence('.AC', id="d3")
expected = Alignment([d1, d3])
self.assertEqual(actual, expected)
# keep seqs and positions (invert)
actual = self.a1.subalignment(seqs_to_keep=[0, 2],
positions_to_keep=[0, 2, 3],
invert_seqs_to_keep=True,
invert_positions_to_keep=True)
d2 = DNASequence('TCGGT-GGCC', id="d2")
expected = Alignment([d2])
self.assertEqual(actual, expected)
def test_subalignment_filter_out_everything(self):
exp = Alignment([])
# no sequences
obs = self.a1.subalignment(seqs_to_keep=None, invert_seqs_to_keep=True)
self.assertEqual(obs, exp)
# no positions
obs = self.a1.subalignment(positions_to_keep=None,
invert_positions_to_keep=True)
self.assertEqual(obs, exp)
def test_init_validate(self):
"""initialization with validation functions as expected
"""
Alignment(self.seqs1, validate=True)
# invalid DNA character
invalid_seqs1 = [self.d1, self.d2, self.d3,
DNASequence('.-ACC-GTXGC--', id="i1")]
self.assertRaises(SequenceCollectionError, Alignment,
invalid_seqs1, validate=True)
# invalid lengths (they're not all equal)
invalid_seqs2 = [self.d1, self.d2, self.d3,
DNASequence('.-ACC-GTGC--', id="i2")]
self.assertRaises(SequenceCollectionError, Alignment,
invalid_seqs2, validate=True)
def test_is_valid(self):
"""is_valid functions as expected
"""
self.assertTrue(self.a1.is_valid())
self.assertTrue(self.a2.is_valid())
self.assertTrue(self.empty.is_valid())
# invalid because of length mismatch
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGC', id="d2")
self.assertFalse(Alignment([d1, d2]).is_valid())
# invalid because of invalid charaters
d1 = DNASequence('..ACC-GTXGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
self.assertFalse(Alignment([d1, d2]).is_valid())
def test_iter_positions(self):
"""iter_positions functions as expected
"""
actual = list(self.a2.iter_positions())
expected = [[RNASequence(j) for j in i] for i in
['UA', 'UC', 'AG', 'UU', '-U']]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.assertEqual(actual, expected)
actual = list(self.a2.iter_positions(constructor=str))
expected = [list('UA'),
list('UC'),
list('AG'),
list('UU'),
list('-U')]
self.seqs2_t = [('r1', 'UUAU-'), ('r2', 'ACGUU')]
self.assertEqual(actual, expected)
def test_majority_consensus(self):
"""majority_consensus functions as expected
"""
d1 = DNASequence('TTT', id="d1")
d2 = DNASequence('TT-', id="d2")
d3 = DNASequence('TC-', id="d3")
a1 = Alignment([d1, d2, d3])
self.assertEqual(a1.majority_consensus(), DNASequence('TT-'))
d1 = DNASequence('T', id="d1")
d2 = DNASequence('A', id="d2")
a1 = Alignment([d1, d2])
self.assertTrue(a1.majority_consensus() in
[DNASequence('T'), DNASequence('A')])
self.assertEqual(self.empty.majority_consensus(), '')
def test_omit_gap_positions(self):
"""omitting gap positions functions as expected
"""
expected = self.a2
self.assertEqual(self.a2.omit_gap_positions(1.0), expected)
self.assertEqual(self.a2.omit_gap_positions(0.51), expected)
r1 = RNASequence('UUAU', id="r1")
r2 = RNASequence('ACGU', id="r2")
expected = Alignment([r1, r2])
self.assertEqual(self.a2.omit_gap_positions(0.49), expected)
r1 = RNASequence('UUAU', id="r1")
r2 = RNASequence('ACGU', id="r2")
expected = Alignment([r1, r2])
self.assertEqual(self.a2.omit_gap_positions(0.0), expected)
self.assertEqual(self.empty.omit_gap_positions(0.0), self.empty)
self.assertEqual(self.empty.omit_gap_positions(0.49), self.empty)
self.assertEqual(self.empty.omit_gap_positions(1.0), self.empty)
def test_omit_gap_sequences(self):
"""omitting gap sequences functions as expected
"""
expected = self.a2
self.assertEqual(self.a2.omit_gap_sequences(1.0), expected)
self.assertEqual(self.a2.omit_gap_sequences(0.20), expected)
expected = Alignment([self.r2])
self.assertEqual(self.a2.omit_gap_sequences(0.19), expected)
self.assertEqual(self.empty.omit_gap_sequences(0.0), self.empty)
self.assertEqual(self.empty.omit_gap_sequences(0.2), self.empty)
self.assertEqual(self.empty.omit_gap_sequences(1.0), self.empty)
def test_position_counters(self):
"""position_counters functions as expected
"""
expected = [Counter({'U': 1, 'A': 1}),
Counter({'U': 1, 'C': 1}),
Counter({'A': 1, 'G': 1}),
Counter({'U': 2}),
Counter({'-': 1, 'U': 1})]
self.assertEqual(self.a2.position_counters(), expected)
self.assertEqual(self.empty.position_counters(), [])
def test_position_frequencies(self):
"""computing position frequencies functions as expected
"""
expected = [defaultdict(int, {'U': 0.5, 'A': 0.5}),
defaultdict(int, {'U': 0.5, 'C': 0.5}),
defaultdict(int, {'A': 0.5, 'G': 0.5}),
defaultdict(int, {'U': 1.0}),
defaultdict(int, {'-': 0.5, 'U': 0.5})]
self.assertEqual(self.a2.position_frequencies(), expected)
self.assertEqual(self.empty.position_frequencies(), [])
def test_position_entropies(self):
"""computing positional uncertainties functions as expected
tested by calculating values as described in this post:
http://stackoverflow.com/a/15476958/3424666
"""
expected = [0.69314, 0.69314, 0.69314, 0.0, np.nan]
np.testing.assert_almost_equal(self.a2.position_entropies(),
expected, 5)
expected = [1.0, 1.0, 1.0, 0.0, np.nan]
np.testing.assert_almost_equal(self.a2.position_entropies(base=2),
expected, 5)
np.testing.assert_almost_equal(self.empty.position_entropies(base=2),
[])
def test_k_word_frequencies(self):
"""k_word_frequencies functions as expected
"""
expected = [defaultdict(int, {'U': 3/5, 'A': 1/5, '-': 1/5}),
defaultdict(int, {'A': 1/5, 'C': 1/5, 'G': 1/5, 'U': 2/5})]
actual = self.a2.k_word_frequencies(k=1)
for a, e in zip(actual, expected):
self.assertEqual(sorted(a), sorted(e), 5)
np.testing.assert_almost_equal(sorted(a.values()),
sorted(e.values()), 5)
def test_sequence_length(self):
"""sequence_length functions as expected
"""
self.assertEqual(self.a1.sequence_length(), 13)
self.assertEqual(self.a2.sequence_length(), 5)
self.assertEqual(self.empty.sequence_length(), 0)
def test_to_phylip(self):
"""to_phylip functions as expected
"""
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
d3 = DNASequence('.-ACC-GTTGC--', id="d3")
a = Alignment([d1, d2, d3])
phylip_str, id_map = a.to_phylip(map_labels=False)
self.assertEqual(id_map, {'d1': 'd1',
'd3': 'd3',
'd2': 'd2'})
expected = "\n".join(["3 13",
"d1 ..ACC-GTTGG..",
"d2 TTACCGGT-GGCC",
"d3 .-ACC-GTTGC--"])
self.assertEqual(phylip_str, expected)
def test_to_phylip_map_labels(self):
"""to_phylip functions as expected with label mapping
"""
d1 = DNASequence('..ACC-GTTGG..', id="d1")
d2 = DNASequence('TTACCGGT-GGCC', id="d2")
d3 = DNASequence('.-ACC-GTTGC--', id="d3")
a = Alignment([d1, d2, d3])
phylip_str, id_map = a.to_phylip(map_labels=True, label_prefix="s")
self.assertEqual(id_map, {'s1': 'd1',
's3': 'd3',
's2': 'd2'})
expected = "\n".join(["3 13",
"s1 ..ACC-GTTGG..",
"s2 TTACCGGT-GGCC",
"s3 .-ACC-GTTGC--"])
self.assertEqual(phylip_str, expected)
def test_to_phylip_unequal_sequence_lengths(self):
d1 = DNASequence('A-CT', id="d1")
d2 = DNASequence('TTA', id="d2")
d3 = DNASequence('.-AC', id="d3")
a = Alignment([d1, d2, d3])
with self.assertRaises(SequenceCollectionError):
a.to_phylip()
def test_to_phylip_no_sequences(self):
with self.assertRaises(SequenceCollectionError):
Alignment([]).to_phylip()
def test_to_phylip_no_positions(self):
d1 = DNASequence('', id="d1")
d2 = DNASequence('', id="d2")
a = Alignment([d1, d2])
with self.assertRaises(SequenceCollectionError):
a.to_phylip()
def test_validate_lengths(self):
"""
"""
self.assertTrue(self.a1._validate_lengths())
self.assertTrue(self.a2._validate_lengths())
self.assertTrue(self.empty._validate_lengths())
self.assertTrue(Alignment([
DNASequence('TTT', id="d1")])._validate_lengths())
self.assertFalse(Alignment([
DNASequence('TTT', id="d1"),
DNASequence('TT', id="d2")])._validate_lengths())
class StockholmAlignmentTests(TestCase):
"""Tests for stockholmAlignment object"""
def setUp(self):
"""Setup for stockholm tests."""
self.seqs = [DNASequence("ACC-G-GGTA", id="seq1"),
DNASequence("TCC-G-GGCA", id="seq2")]
self.GF = OrderedDict([
("AC", "RF00360"),
("BM", ["cmbuild -F CM SEED",
"cmsearch -Z 274931 -E 1000000"]),
("SQ", "9"),
("RT", ["TITLE1", "TITLE2"]),
("RN", ["[1]", "[2]"]),
("RA", ["Auth1;", "Auth2;"]),
("RL", ["J Mol Biol", "Cell"]),
("RM", ["11469857", "12007400"]),
('RN', ['[1]', '[2]'])
])
self.GS = {"AC": OrderedDict([("seq1", "111"), ("seq2", "222")])}
self.GR = {"SS": OrderedDict([("seq1", "1110101111"),
("seq2", "0110101110")])}
self.GC = {"SS_cons": "(((....)))"}
self.st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF,
gs=self.GS, gr=self.GR)
def test_retrieve_metadata(self):
self.assertEqual(self.st.gc, self.GC)
self.assertEqual(self.st.gf, self.GF)
self.assertEqual(self.st.gs, self.GS)
self.assertEqual(self.st.gr, self.GR)
def test_from_file_alignment(self):
"""make sure can parse basic sto file with interleaved alignment"""
sto = StringIO("# STOCKHOLM 1.0\n"
"seq1 ACC-G\n"
"seq2 TCC-G\n\n"
"seq1 -GGTA\n"
"seq2 -GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs)
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GF(self):
"""Make sure GF lines are parsed correctly"""
# remove rn line to make sure auto-added
self.GF.pop("RN")
sto = StringIO("# STOCKHOLM 1.0\n#=GF RN [1]\n#=GF RM 11469857\n"
"#=GF RT TITLE1\n#=GF RA Auth1;\n#=GF RL J Mol Biol\n"
"#=GF RN [2]\n#=GF RM 12007400\n#=GF RT TITLE2\n"
"#=GF RA Auth2;\n#=GF RL Cell\n#=GF AC RF00360\n"
"#=GF BM cmbuild -F CM SEED\n"
"#=GF BM cmsearch -Z 274931 -E 1000000\n#=GF SQ 9\n"
"seq1 ACC-G-GGTA\nseq2 TCC-G-GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, self.GF, {}, {}, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GC(self):
"""Make sure GC lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\n"
"seq1 ACC-G-GGTA\nseq2 TCC-G-GGCA\n"
"#=GC SS_cons (((....)))\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, {}, {}, self.GC)
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GS(self):
"""Make sure GS lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\n#=GS seq2 AC 222\n#=GS seq1 AC 111\n"
"seq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, self.GS, {}, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_GR(self):
"""Make sure GR lines are parsed correctly"""
sto = StringIO("# STOCKHOLM 1.0\nseq1 ACC-G\n"
"#=GR seq1 SS 11101\nseq2 TCC-G\n"
"#=GR seq2 SS 01101\n\nseq1 -GGTA\n"
"#=GR seq1 SS 01111\nseq2 -GGCA\n"
"#=GR seq2 SS 01110\n//")
obs_sto = next(StockholmAlignment.from_file(sto, DNA))
exp_sto = StockholmAlignment(self.seqs, {}, {}, self.GR, {})
self.assertEqual(obs_sto, exp_sto)
def test_from_file_multi(self):
"""Make sure yield works correctly with multi-alignment sto files"""
sto = StringIO("# STOCKHOLM 1.0\n#=GS seq2 AC 222\n#=GS seq1 AC 111\n"
"seq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//\n"
"# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"#=GR seq1 SS 1110101111\nseq2 TCC-G-GGCA\n"
"#=GR seq2 SS 0110101110\n//")
obs_sto = StockholmAlignment.from_file(sto, DNA)
count = 0
for obs in obs_sto:
if count == 0:
exp_sto = StockholmAlignment(self.seqs, {}, self.GS, {}, {})
self.assertEqual(obs, exp_sto)
elif count == 1:
exp_sto = StockholmAlignment(self.seqs, {}, {}, self.GR, {})
self.assertEqual(obs, exp_sto)
else:
raise AssertionError("More than 2 sto alignments parsed!")
count += 1
def test_parse_gf_multiline_nh(self):
"""Makes sure a multiline NH code is parsed correctly"""
sto = ["#=GF TN MULTILINE TREE",
"#=GF NH THIS IS FIRST", "#=GF NH THIS IS SECOND",
"#=GF AC 1283394"]
exp = {'TN': 'MULTILINE TREE',
'NH': 'THIS IS FIRST THIS IS SECOND',
'AC': '1283394'}
self.assertEqual(self.st._parse_gf_info(sto), exp)
def test_parse_gf_multiline_cc(self):
"""Makes sure a multiline CC code is parsed correctly"""
sto = ["#=GF CC THIS IS FIRST", "#=GF CC THIS IS SECOND"]
exp = {'CC': 'THIS IS FIRST THIS IS SECOND'}
self.assertEqual(self.st._parse_gf_info(sto), exp)
def test_parse_gf_info_nongf(self):
"""Makes sure error raised if non-GF line passed"""
sto = ["#=GF AC BLAAAAAAAHHH", "#=GC HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gf_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GF AC", "#=GF"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gc_info_nongf(self):
"""Makes sure error raised if non-GC line passed"""
sto = ["#=GC AC BLAAAAAAAHHH", "#=GF HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gf_info(sto)
def test_parse_gc_info_strict_len(self):
"""Make sure error raised if GC lines bad length and strict parsing"""
sto = ["#=GC SS_cons (((..)))"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto, seqlen=20, strict=True)
def test_parse_gc_info_strict_duplicate(self):
"""Make sure error raised if GC lines repeated"""
sto = ["#=GC SS_cons (((..)))", "#=GC SS_cons (((..)))"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto, seqlen=8, strict=True)
def test_parse_gc_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GC AC BLAAAAAAAHHH", "#=GC"]
with self.assertRaises(StockholmParseError):
self.st._parse_gc_info(sto)
def test_parse_gs_gr_info_mixed(self):
"""Makes sure error raised if mixed GS and GR lines passed"""
sto = ["#=GS seq1 AC BLAAA", "#=GR seq2 HUH THIS SHOULD NOT BE HERE"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto)
def test_parse_gs_gr_info_malformed(self):
"""Makes sure error raised if too short a line passed"""
sto = ["#=GS AC BLAAAAAAAHHH", "#=GS"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto)
def test_parse_gs_gr_info_strict(self):
"""Make sure error raised if GR lines bad length and strict parsing"""
sto = ["#=GR seq1 SS 10101111", "#=GR seq2 SS 01101"]
with self.assertRaises(StockholmParseError):
self.st._parse_gs_gr_info(sto, seqlen=20, strict=True)
def test_str(self):
""" Make sure stockholm with all information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF, gs=self.GS,
gr=self.GR)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'#=GR seq1 SS 1110101111\n'
'seq2 TCC-G-GGCA\n'
'#=GR seq2 SS 0110101110\n'
'#=GC SS_cons (((....)))\n//')
self.assertEqual(obs, exp)
def test_to_file(self):
"""Make sure stockholm file output with all information contained is
formatted correctly. This is the same as __str__ but in a file. """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=self.GF, gs=self.GS,
gr=self.GR)
tempfilename = tempfile.NamedTemporaryFile().name
st.to_file(tempfilename)
obs = open(tempfilename).read()
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'#=GR seq1 SS 1110101111\n'
'seq2 TCC-G-GGCA\n'
'#=GR seq2 SS 0110101110\n'
'#=GC SS_cons (((....)))\n//')
self.assertEqual(obs, exp)
os.remove(tempfilename)
def test_str_gc(self):
""" Make sure stockholm with only GC information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=self.GC, gf=None, gs=None,
gr=None)
obs = str(st)
exp = ("# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n"
"#=GC SS_cons (((....)))\n//")
self.assertEqual(obs, exp)
def test_str_gf(self):
""" Make sure stockholm with only GF information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=self.GF, gs=None,
gr=None)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GF AC RF00360\n'
'#=GF BM cmbuild -F CM SEED\n'
'#=GF BM cmsearch -Z 274931 -E 1000000\n'
'#=GF SQ 9\n'
'#=GF RN [1]\n'
'#=GF RM 11469857\n'
'#=GF RT TITLE1\n'
'#=GF RA Auth1;\n'
'#=GF RL J Mol Biol\n'
'#=GF RN [2]\n'
'#=GF RM 12007400\n'
'#=GF RT TITLE2\n'
'#=GF RA Auth2;\n'
'#=GF RL Cell\n'
'seq1 ACC-G-GGTA\n'
'seq2 TCC-G-GGCA\n//')
self.assertEqual(obs, exp)
def test_str_gs(self):
""" Make sure stockholm with only GS information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=None, gs=self.GS,
gr=None)
obs = str(st)
exp = ('# STOCKHOLM 1.0\n'
'#=GS seq1 AC 111\n'
'#=GS seq2 AC 222\n'
'seq1 ACC-G-GGTA\n'
'seq2 TCC-G-GGCA\n//')
self.assertEqual(obs, exp)
def test_str_gr(self):
""" Make sure stockholm with only GR information contained is formatted
correctly """
st = StockholmAlignment(self.seqs, gc=None, gf=None, gs=None,
gr=self.GR)
obs = str(st)
exp = ("# STOCKHOLM 1.0\nseq1 ACC-G-GGTA\n"
"#=GR seq1 SS 1110101111\nseq2 TCC-G-GGCA\n"
"#=GR seq2 SS 0110101110\n//")
self.assertEqual(obs, exp)
def test_str_trees(self):
""" Make sure stockholm with trees printed correctly"""
GF = OrderedDict({"NH": ["IMATREE", "IMATREETOO"],
"TN": ["Tree2", "Tree1"]})
st = StockholmAlignment(self.seqs, gc=None, gf=GF, gs=None,
gr=None)
obs = str(st)
exp = ("# STOCKHOLM 1.0\n#=GF TN Tree2\n#=GF NH IMATREE\n#=GF TN Tree1"
"\n#=GF NH IMATREETOO\nseq1 ACC-G-GGTA\n"
"seq2 TCC-G-GGCA\n//")
self.assertEqual(obs, exp)
if __name__ == "__main__":
main()
|
import numpy as np
import logging
log = logging.getLogger(__name__)
from ..matrix import CostMatrix
from ..cost_function import AbstractCostFunction
from ..cost_function.brownian import BrownianGapCloseCostFunction
from ..cost_function.diagonal import DiagonalCostFunction
from . import AbstractSolver
__all__ = []
class GapCloseSolver(AbstractSolver):
"""
Parameters
----------
trajs : :class:`pandas.DataFrame`
cost_functions : list of list
"""
def __init__(self, trajs, cost_functions,
maximum_gap, coords=['x', 'y', 'z']):
super().__init__(trajs)
self.coords = coords
self.trajs.check_trajs_df_structure(index=['t_stamp', 'label'],
columns=['t'] + coords)
self.link_cf = cost_functions['link']
self.check_cost_function_type(self.link_cf, AbstractCostFunction)
self.birth_cf = cost_functions['birth']
self.check_cost_function_type(self.birth_cf, AbstractCostFunction)
self.death_cf = cost_functions['death']
self.check_cost_function_type(self.death_cf, AbstractCostFunction)
self.maximum_gap = maximum_gap
@classmethod
def for_brownian_motion(cls, trajs, max_speed, maximum_gap,
penality=1.05, coords=['x', 'y', 'z']):
"""
"""
guessed_cost = float(max_speed ** 2)
diag_context = {'cost': guessed_cost}
diag_params = {}
link_cost_func = BrownianGapCloseCostFunction(parameters={'max_speed': max_speed})
birth_cost_func = DiagonalCostFunction(context=diag_context,
parameters=diag_params)
death_cost_func = DiagonalCostFunction(context=diag_context,
parameters=diag_params)
cost_functions = {'link': link_cost_func,
'birth': birth_cost_func,
'death': death_cost_func}
return cls(trajs, cost_functions, maximum_gap, coords=coords)
@property
def blocks_structure(self):
return [[self.link_cf.mat, self.death_cf.mat],
[self.birth_cf.mat, None]]
def track(self):
idxs_in, idxs_out = self._get_candidates()
self.link_cf.context['trajs'] = self.trajs
self.link_cf.context['idxs_in'] = idxs_in
self.link_cf.context['idxs_out'] = idxs_out
self.birth_cf.context['objects'] = self.trajs.labels
self.death_cf.context['objects'] = self.trajs.labels
if not len(idxs_in):
log.info('No gap needs closing here...')
return self.trajs
old_labels = self.trajs.index.get_level_values('label').values
self.trajs['new_label'] = old_labels.astype(np.float)
# ''' TFA: For track segment ends and starts, the alternative
# cost (b and d in Fig. 1c) had to be comparable in magnitude
# to the costs of potential assignments, making the rejection
# of gap closing, merging and splitting an accessible
# alternative. At the same time, the alternative cost had to
# be at the higher end of the range of potential assignment
# costs, so that the algorithm did not fail to close gaps and
# capture merge and split events. We performed empirical tests
# of the sensitivity of tracking results to variations in the
# alternative cost. We found that in a range 80th – 100th
# percentile of all potential assignment costs the outcome of
# gap closing, merging and splitting varied negligibly (data
# not shown). We attribute this robustness to the fact that
# track initiations and terminations competed globally, in
# space and time, with all other potential assignments. Thus,
# the alternative cost was taken as the 90th percentile.'''
link_percentile = 90
self.link_cf.get_block()
link_costs = np.ma.masked_invalid(self.link_cf.mat).compressed()
cost = np.percentile(link_costs, link_percentile)
self.birth_cf.context['cost'] = cost
self.birth_cf.get_block()
self.death_cf.context['cost'] = cost
self.death_cf.get_block()
self.cm = CostMatrix(self.blocks_structure)
self.cm.solve()
self.assign()
return self.trajs
def _get_candidates(self):
"""
"""
max_gap = self.maximum_gap
labels = self.trajs.labels
bounds = np.array([(idxs[0][0], idxs[-1][0]) for idxs
in self.trajs.segment_idxs.values()])
start_times = bounds[:, 0]
stop_times = bounds[:, 1]
ss_in, ss_out = np.meshgrid(labels, labels)
gaps_size = start_times[ss_out] - stop_times[ss_in]
matches = np.argwhere((gaps_size > 0) * (gaps_size < max_gap))
if not matches.shape[0]:
return [], []
matches_in = matches[:, 1]
matches_out = matches[:, 0]
in_idxs = [(in_time, in_lbl) for (in_time, in_lbl)
in zip(stop_times[matches_in],
self.trajs.labels[matches_in])]
# pos_in = self.trajs.loc[in_idxs]
out_idxs = [(out_time, out_lbl) for (out_time, out_lbl)
in zip(start_times[matches_out],
self.trajs.labels[matches_out])]
# pos_out = self.trajs.loc[out_idxs]
return in_idxs, out_idxs
def assign(self):
"""
"""
row_shapes, col_shapes = self.cm.get_shapes()
old_labels = self.trajs.index.get_level_values(level='label').values
new_labels = old_labels.copy()
unique_old = self.trajs.labels.copy() # np.unique(old_labels)
unique_new = self.trajs.labels.copy() # np.unique(new_labels)
last_in_link = row_shapes[0]
last_out_link = col_shapes[0]
for idx_out, idx_in in enumerate(self.cm.out_links[:last_out_link]):
if idx_in >= last_in_link:
# no merge
unique_new[idx_out] = unique_new.max() + 1
else:
# do merge
new_label = unique_new[idx_in]
unique_new[idx_out] = new_label
for old, new in zip(unique_old, unique_new):
new_labels[old_labels == old] = new
self.relabel_trajs(new_labels)
return self.trajs
passed lik_percentile as a parameter
import numpy as np
import logging
log = logging.getLogger(__name__)
from ..matrix import CostMatrix
from ..cost_function import AbstractCostFunction
from ..cost_function.brownian import BrownianGapCloseCostFunction
from ..cost_function.diagonal import DiagonalCostFunction
from . import AbstractSolver
__all__ = []
class GapCloseSolver(AbstractSolver):
"""
Parameters
----------
trajs : :class:`pandas.DataFrame`
cost_functions : list of list
"""
def __init__(self, trajs, cost_functions,
maximum_gap, coords=['x', 'y', 'z']):
super().__init__(trajs)
self.coords = coords
self.trajs.check_trajs_df_structure(index=['t_stamp', 'label'],
columns=['t'] + coords)
self.link_cf = cost_functions['link']
self.check_cost_function_type(self.link_cf, AbstractCostFunction)
self.birth_cf = cost_functions['birth']
self.check_cost_function_type(self.birth_cf, AbstractCostFunction)
self.death_cf = cost_functions['death']
self.check_cost_function_type(self.death_cf, AbstractCostFunction)
self.maximum_gap = maximum_gap
@classmethod
def for_brownian_motion(cls, trajs, max_speed, maximum_gap,
link_percentile=90,
coords=['x', 'y', 'z']):
"""
"""
guessed_cost = float(max_speed ** 2)
diag_context = {'cost': guessed_cost}
diag_params = {'link_percentile': link_percentile}
link_cost_func = BrownianGapCloseCostFunction(parameters={'max_speed': max_speed})
birth_cost_func = DiagonalCostFunction(context=diag_context,
parameters=diag_params)
death_cost_func = DiagonalCostFunction(context=diag_context,
parameters=diag_params)
cost_functions = {'link': link_cost_func,
'birth': birth_cost_func,
'death': death_cost_func}
return cls(trajs, cost_functions, maximum_gap, coords=coords)
@property
def blocks_structure(self):
return [[self.link_cf.mat, self.death_cf.mat],
[self.birth_cf.mat, None]]
def track(self):
idxs_in, idxs_out = self._get_candidates()
self.link_cf.context['trajs'] = self.trajs
self.link_cf.context['idxs_in'] = idxs_in
self.link_cf.context['idxs_out'] = idxs_out
self.birth_cf.context['objects'] = self.trajs.labels
self.death_cf.context['objects'] = self.trajs.labels
if not len(idxs_in):
log.info('No gap needs closing here...')
return self.trajs
old_labels = self.trajs.index.get_level_values('label').values
self.trajs['new_label'] = old_labels.astype(np.float)
# ''' TFA: For track segment ends and starts, the alternative
# cost (b and d in Fig. 1c) had to be comparable in magnitude
# to the costs of potential assignments, making the rejection
# of gap closing, merging and splitting an accessible
# alternative. At the same time, the alternative cost had to
# be at the higher end of the range of potential assignment
# costs, so that the algorithm did not fail to close gaps and
# capture merge and split events. We performed empirical tests
# of the sensitivity of tracking results to variations in the
# alternative cost. We found that in a range 80th – 100th
# percentile of all potential assignment costs the outcome of
# gap closing, merging and splitting varied negligibly (data
# not shown). We attribute this robustness to the fact that
# track initiations and terminations competed globally, in
# space and time, with all other potential assignments. Thus,
# the alternative cost was taken as the 90th percentile.'''
link_percentile_b = self.birth_cf.parameters['link_percentile']
link_percentile_d = self.death_cf.parameters['link_percentile']
self.link_cf.get_block()
link_costs = np.ma.masked_invalid(self.link_cf.mat).compressed()
cost_b = np.percentile(link_costs, link_percentile_b)
cost_d = np.percentile(link_costs, link_percentile_d)
self.birth_cf.context['cost'] = cost_b
self.birth_cf.get_block()
self.death_cf.context['cost'] = cost_d
self.death_cf.get_block()
self.cm = CostMatrix(self.blocks_structure)
self.cm.solve()
self.assign()
return self.trajs
def _get_candidates(self):
"""
"""
max_gap = self.maximum_gap
labels = self.trajs.labels
bounds = np.array([(idxs[0][0], idxs[-1][0]) for idxs
in self.trajs.segment_idxs.values()])
start_times = bounds[:, 0]
stop_times = bounds[:, 1]
ss_in, ss_out = np.meshgrid(labels, labels)
gaps_size = start_times[ss_out] - stop_times[ss_in]
matches = np.argwhere((gaps_size > 0) * (gaps_size < max_gap))
if not matches.shape[0]:
return [], []
matches_in = matches[:, 1]
matches_out = matches[:, 0]
in_idxs = [(in_time, in_lbl) for (in_time, in_lbl)
in zip(stop_times[matches_in],
self.trajs.labels[matches_in])]
# pos_in = self.trajs.loc[in_idxs]
out_idxs = [(out_time, out_lbl) for (out_time, out_lbl)
in zip(start_times[matches_out],
self.trajs.labels[matches_out])]
# pos_out = self.trajs.loc[out_idxs]
return in_idxs, out_idxs
def assign(self):
"""
"""
row_shapes, col_shapes = self.cm.get_shapes()
old_labels = self.trajs.index.get_level_values(level='label').values
new_labels = old_labels.copy()
unique_old = self.trajs.labels.copy() # np.unique(old_labels)
unique_new = self.trajs.labels.copy() # np.unique(new_labels)
last_in_link = row_shapes[0]
last_out_link = col_shapes[0]
for idx_out, idx_in in enumerate(self.cm.out_links[:last_out_link]):
if idx_in >= last_in_link:
# no merge
unique_new[idx_out] = unique_new.max() + 1
else:
# do merge
new_label = unique_new[idx_in]
unique_new[idx_out] = new_label
for old, new in zip(unique_old, unique_new):
new_labels[old_labels == old] = new
self.relabel_trajs(new_labels)
return self.trajs
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
Add ability to control read rate
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
import time
DEVICE = "PiSense"
DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
time.sleep(DELAY)
|
from datetime import datetime, timedelta
from djcelery import celery
from settings import *
from pymongo import MongoClient
client = MongoClient(MONGO_URL)
@celery.task(name='mongo_to_celery', bind=True, acks_late=True)
def mongo_to_celery():
coll = client.viae.ViaeTask
tasks = coll.find({'eta': {'$lt': datetime.now() + timedelta(seconds=TASK_ETA_THRESHOLD)}})
for task in tasks:
args = task.get('args', [])
kwargs = task.get('kwargs', {})
eta = task.get('eta')
expires = task.get('expires')
task = task['task']
result = celery.send_task(task, serializer='json', args=args, kwargs=kwargs, eta=eta,
expires=expires)
mongo_to_celery.apply_async(countdown=60)
MOD 修正取了数据没删除的bug
from datetime import datetime, timedelta
from djcelery import celery
from settings import *
from pymongo import MongoClient
client = MongoClient(MONGO_URL)
@celery.task(name='mongo_to_celery', bind=True, acks_late=True)
def mongo_to_celery():
coll = client.viae.ViaeTask
tasks = coll.find({'eta': {'$lt': datetime.now() + timedelta(seconds=TASK_ETA_THRESHOLD)}})
for task in tasks:
args = task.get('args', [])
kwargs = task.get('kwargs', {})
eta = task.get('eta')
expires = task.get('expires')
task = task['task']
result = celery.send_task(task, serializer='json', args=args, kwargs=kwargs, eta=eta,
expires=expires)
delete = coll.remove({'task':task})
mongo_to_celery.apply_async(countdown=60)
|
#!/usr/bin/python2.7
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar
import datetime
import time
from google.appengine import runtime
from google.appengine.api import quota
from google.appengine.api import taskqueue
from google.appengine.ext import db
import config
import delete
import model
import utils
CPU_MEGACYCLES_PER_REQUEST = 1000
EXPIRED_TTL = datetime.timedelta(delete.EXPIRED_TTL_DAYS, 0, 0)
FETCH_LIMIT = 100
class ScanForExpired(utils.BaseHandler):
"""Common logic for scanning the Person table looking for things to delete.
The common logic handles iterating through the query, updating the expiry
date and wiping/deleting as needed. The is_expired flag on all records whose
expiry_date has passed. Records that expired more than EXPIRED_TTL in the
past will also have their data fields, notes, and photos permanently
deleted.
Subclasses set the query and task_name."""
repo_required = False
def task_name(self):
"""Subclasses should implement this."""
pass
def query(self):
"""Subclasses should implement this."""
pass
def schedule_next_task(self, cursor):
"""Schedule the next task for to carry on with this query.
"""
self.add_task_for_repo(self.repo, self.task_name(), self.ACTION,
cursor=cursor, queue_name='expiry')
def get(self):
if self.repo:
query = self.query()
if self.params.cursor:
query.with_cursor(self.params.cursor)
cursor = self.params.cursor
try:
for person in query:
# query.cursor() returns a cursor which returns the entity
# next to this "person" as the first result.
next_cursor = query.cursor()
was_expired = person.is_expired
person.put_expiry_flags()
if (utils.get_utcnow() - person.get_effective_expiry_date()
> EXPIRED_TTL):
person.wipe_contents()
else:
# treat this as a regular deletion.
if person.is_expired and not was_expired:
delete.delete_person(self, person)
cursor = next_cursor
except runtime.DeadlineExceededError:
self.schedule_next_task(cursor)
else:
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
class DeleteExpired(ScanForExpired):
"""Scan for person records with expiry date thats past."""
ACTION = 'tasks/delete_expired'
def task_name(self):
return 'delete-expired'
def query(self):
return model.Person.past_due_records(self.repo)
class DeleteOld(ScanForExpired):
"""Scan for person records with old source dates for expiration."""
ACTION = 'tasks/delete_old'
def task_name(self):
return 'delete-old'
def query(self):
return model.Person.potentially_expired_records(self.repo)
class CleanUpInTestMode(utils.BaseHandler):
"""If the repository is in "test mode", this task deletes all entries older
than DELETION_AGE_SECONDS (defined below), regardless of their actual
expiration specification.
We delete entries quickly so that most of the test data does not persist in
real mode, and to reduce the effect of spam.
"""
repo_required = False
ACTION = 'tasks/clean_up_in_test_mode'
# Entries older than this age in seconds are deleted in test mode.
#
# If you are maintaining a single repository and switching it between test
# mode (for drills) and real mode (for real crises), you should be sure to
# switch to real mode within DELETION_AGE_SECONDS after a real crisis
# occurs, because:
# - When the crisis happens, the users may be confused and enter real
# information on the repository, even though it's still in test mode.
# (All pages show "test mode" message, but some users may be still
# confused.)
# - If we fail to make the switch in DELETION_AGE_SECONDS, such real
# entries are deleted.
# - If we make the switch in DELETION_AGE_SECONDS, such entries are not
# deleted, and handled as a part of real mode data.
DELETION_AGE_SECONDS = 6 * 3600
def __init__(self, request, response, env):
utils.BaseHandler.__init__(self, request, response, env)
self.__listener = None
def task_name(self):
return 'clean-up-in-test-mode'
def schedule_next_task(self, cursor, utcnow):
"""Schedule the next task for to carry on with this query.
"""
self.add_task_for_repo(
self.repo,
self.task_name(),
self.ACTION,
utcnow=str(calendar.timegm(utcnow.utctimetuple())),
cursor=cursor,
queue_name='clean_up_in_test_mode')
def in_test_mode(self, repo):
"""Returns True if the repository is in test mode."""
return config.get('test_mode', repo=repo)
def get(self):
if self.repo:
# To reuse the cursor from the previous task, we need to apply
# exactly the same filter. So we use utcnow previously used
# instead of the current time.
utcnow = self.params.utcnow or utils.get_utcnow()
max_entry_date = (
utcnow -
datetime.timedelta(
seconds=CleanUpInTestMode.DELETION_AGE_SECONDS))
query = model.Person.all_in_repo(self.repo)
query.filter('entry_date <=', max_entry_date)
if self.params.cursor:
query.with_cursor(self.params.cursor)
cursor = self.params.cursor
# Uses query.get() instead of "for person in query".
# If we use for-loop, query.cursor() points to an unexpected
# position.
person = query.get()
# When the repository is no longer in test mode, aborts the
# deletion.
try:
while person and self.in_test_mode(self.repo):
if self.__listener:
self.__listener.before_deletion(person.key())
person.delete_related_entities(delete_self=True)
cursor = query.cursor()
person = query.get()
except runtime.DeadlineExceededError:
self.schedule_next_task(cursor, utcnow)
else:
for repo in model.Repo.list():
if self.in_test_mode(repo):
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
def set_listener(self, listener):
self.__listener = listener
def run_count(make_query, update_counter, counter):
"""Scans the entities matching a query for a limited amount of CPU time."""
for _ in xrange(100):
# Get the next batch of entities.
query = make_query()
if counter.last_key:
query = query.filter('__key__ >', db.Key(counter.last_key))
entities = query.order('__key__').fetch(FETCH_LIMIT)
if not entities:
counter.last_key = ''
break
# Pass the entities to the counting function.
for entity in entities:
update_counter(counter, entity)
# Remember where we left off.
counter.last_key = str(entities[-1].key())
class CountBase(utils.BaseHandler):
"""A base handler for counting tasks. Making a request to this handler
without a specified repo will start tasks for all repositories in parallel.
Each subclass of this class handles one scan through the datastore."""
repo_required = False # can run without a repo
SCAN_NAME = '' # Each subclass should choose a unique scan_name.
ACTION = '' # Each subclass should set the action path that it handles.
def get(self):
if self.repo: # Do some counting.
try:
while True:
counter = model.Counter.get_unfinished_or_create(
self.repo, self.SCAN_NAME)
run_count(self.make_query, self.update_counter, counter)
counter.put()
if not counter.last_key: break
except runtime.DeadlineExceededError:
# Continue counting in another task.
self.add_task_for_repo(self.repo, self.SCAN_NAME, self.ACTION)
else: # Launch counting tasks for all repositories.
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.SCAN_NAME, self.ACTION)
def make_query(self):
"""Subclasses should implement this. This will be called to get the
datastore query; it should always return the same query."""
def update_counter(self, counter, entity):
"""Subclasses should implement this. This will be called once for
each entity that matches the query; it should call increment() on
the counter object for whatever accumulators it wants to increment."""
class CountPerson(CountBase):
SCAN_NAME = 'person'
ACTION = 'tasks/count/person'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
found = ''
if person.latest_found is not None:
found = person.latest_found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('original_domain=' + (person.original_domain or ''))
counter.increment('sex=' + (person.sex or ''))
counter.increment('home_country=' + (person.home_country or ''))
counter.increment('photo=' + (person.photo_url and 'present' or ''))
counter.increment('num_notes=%d' % len(person.get_notes()))
counter.increment('status=' + (person.latest_status or ''))
counter.increment('found=' + found)
counter.increment(
'linked_persons=%d' % len(person.get_linked_persons()))
class CountNote(CountBase):
SCAN_NAME = 'note'
ACTION = 'tasks/count/note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
author_made_contact = ''
if note.author_made_contact is not None:
author_made_contact = note.author_made_contact and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('status=' + (note.status or ''))
counter.increment('original_domain=' + (note.original_domain or ''))
counter.increment('author_made_contact=' + author_made_contact)
if note.linked_person_record_id:
counter.increment('linked_person')
if note.last_known_location:
counter.increment('last_known_location')
class AddReviewedProperty(CountBase):
"""Sets 'reviewed' to False on all notes that have no 'reviewed' property.
This task is for migrating datastores that were created before the
'reviewed' property existed; 'reviewed' has to be set to False so that
the Notes will be indexed."""
SCAN_NAME = 'unreview-note'
ACTION = 'tasks/count/unreview_note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
if not note.reviewed:
note.reviewed = False
note.put()
class UpdateDeadStatus(CountBase):
"""This task looks for Person records with the status 'believed_dead',
checks for the last non-hidden Note, and updates the status if necessary.
This is designed specifically to address bogus 'believed_dead' notes that
are flagged as spam. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-dead-status'
ACTION = 'tasks/count/update_dead_status'
def make_query(self):
return model.Person.all().filter('repo =', self.repo
).filter('latest_status =', 'believed_dead')
def update_counter(self, counter, person):
person.update_latest_status()
class UpdateStatus(CountBase):
"""This task scans Person records, looks for the last non-hidden Note, and
updates latest_status. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-status'
ACTION = 'tasks/count/update_status'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
person.update_latest_status()
class Reindex(CountBase):
"""A handler for re-indexing Persons."""
SCAN_NAME = 'reindex'
ACTION = 'tasks/count/reindex'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
person.update_index(['old', 'new'])
person.put()
Move the for loop from run_count to its caller.
#!/usr/bin/python2.7
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar
import datetime
import time
from google.appengine import runtime
from google.appengine.api import quota
from google.appengine.api import taskqueue
from google.appengine.ext import db
import config
import delete
import model
import utils
CPU_MEGACYCLES_PER_REQUEST = 1000
EXPIRED_TTL = datetime.timedelta(delete.EXPIRED_TTL_DAYS, 0, 0)
FETCH_LIMIT = 100
class ScanForExpired(utils.BaseHandler):
"""Common logic for scanning the Person table looking for things to delete.
The common logic handles iterating through the query, updating the expiry
date and wiping/deleting as needed. The is_expired flag on all records whose
expiry_date has passed. Records that expired more than EXPIRED_TTL in the
past will also have their data fields, notes, and photos permanently
deleted.
Subclasses set the query and task_name."""
repo_required = False
def task_name(self):
"""Subclasses should implement this."""
pass
def query(self):
"""Subclasses should implement this."""
pass
def schedule_next_task(self, cursor):
"""Schedule the next task for to carry on with this query.
"""
self.add_task_for_repo(self.repo, self.task_name(), self.ACTION,
cursor=cursor, queue_name='expiry')
def get(self):
if self.repo:
query = self.query()
if self.params.cursor:
query.with_cursor(self.params.cursor)
cursor = self.params.cursor
try:
for person in query:
# query.cursor() returns a cursor which returns the entity
# next to this "person" as the first result.
next_cursor = query.cursor()
was_expired = person.is_expired
person.put_expiry_flags()
if (utils.get_utcnow() - person.get_effective_expiry_date()
> EXPIRED_TTL):
person.wipe_contents()
else:
# treat this as a regular deletion.
if person.is_expired and not was_expired:
delete.delete_person(self, person)
cursor = next_cursor
except runtime.DeadlineExceededError:
self.schedule_next_task(cursor)
else:
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
class DeleteExpired(ScanForExpired):
"""Scan for person records with expiry date thats past."""
ACTION = 'tasks/delete_expired'
def task_name(self):
return 'delete-expired'
def query(self):
return model.Person.past_due_records(self.repo)
class DeleteOld(ScanForExpired):
"""Scan for person records with old source dates for expiration."""
ACTION = 'tasks/delete_old'
def task_name(self):
return 'delete-old'
def query(self):
return model.Person.potentially_expired_records(self.repo)
class CleanUpInTestMode(utils.BaseHandler):
"""If the repository is in "test mode", this task deletes all entries older
than DELETION_AGE_SECONDS (defined below), regardless of their actual
expiration specification.
We delete entries quickly so that most of the test data does not persist in
real mode, and to reduce the effect of spam.
"""
repo_required = False
ACTION = 'tasks/clean_up_in_test_mode'
# Entries older than this age in seconds are deleted in test mode.
#
# If you are maintaining a single repository and switching it between test
# mode (for drills) and real mode (for real crises), you should be sure to
# switch to real mode within DELETION_AGE_SECONDS after a real crisis
# occurs, because:
# - When the crisis happens, the users may be confused and enter real
# information on the repository, even though it's still in test mode.
# (All pages show "test mode" message, but some users may be still
# confused.)
# - If we fail to make the switch in DELETION_AGE_SECONDS, such real
# entries are deleted.
# - If we make the switch in DELETION_AGE_SECONDS, such entries are not
# deleted, and handled as a part of real mode data.
DELETION_AGE_SECONDS = 6 * 3600
def __init__(self, request, response, env):
utils.BaseHandler.__init__(self, request, response, env)
self.__listener = None
def task_name(self):
return 'clean-up-in-test-mode'
def schedule_next_task(self, cursor, utcnow):
"""Schedule the next task for to carry on with this query.
"""
self.add_task_for_repo(
self.repo,
self.task_name(),
self.ACTION,
utcnow=str(calendar.timegm(utcnow.utctimetuple())),
cursor=cursor,
queue_name='clean_up_in_test_mode')
def in_test_mode(self, repo):
"""Returns True if the repository is in test mode."""
return config.get('test_mode', repo=repo)
def get(self):
if self.repo:
# To reuse the cursor from the previous task, we need to apply
# exactly the same filter. So we use utcnow previously used
# instead of the current time.
utcnow = self.params.utcnow or utils.get_utcnow()
max_entry_date = (
utcnow -
datetime.timedelta(
seconds=CleanUpInTestMode.DELETION_AGE_SECONDS))
query = model.Person.all_in_repo(self.repo)
query.filter('entry_date <=', max_entry_date)
if self.params.cursor:
query.with_cursor(self.params.cursor)
cursor = self.params.cursor
# Uses query.get() instead of "for person in query".
# If we use for-loop, query.cursor() points to an unexpected
# position.
person = query.get()
# When the repository is no longer in test mode, aborts the
# deletion.
try:
while person and self.in_test_mode(self.repo):
if self.__listener:
self.__listener.before_deletion(person.key())
person.delete_related_entities(delete_self=True)
cursor = query.cursor()
person = query.get()
except runtime.DeadlineExceededError:
self.schedule_next_task(cursor, utcnow)
else:
for repo in model.Repo.list():
if self.in_test_mode(repo):
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
def set_listener(self, listener):
self.__listener = listener
def run_count(make_query, update_counter, counter):
"""Scans the entities matching a query for a limited amount of CPU time.
Returns False if we finished counting all entries."""
# Get the next batch of entities.
query = make_query()
if counter.last_key:
query = query.filter('__key__ >', db.Key(counter.last_key))
entities = query.order('__key__').fetch(FETCH_LIMIT)
if not entities:
counter.last_key = ''
return False
# Pass the entities to the counting function.
for entity in entities:
update_counter(counter, entity)
# Remember where we left off.
counter.last_key = str(entities[-1].key())
return True
class CountBase(utils.BaseHandler):
"""A base handler for counting tasks. Making a request to this handler
without a specified repo will start tasks for all repositories in parallel.
Each subclass of this class handles one scan through the datastore."""
repo_required = False # can run without a repo
SCAN_NAME = '' # Each subclass should choose a unique scan_name.
ACTION = '' # Each subclass should set the action path that it handles.
def get(self):
if self.repo: # Do some counting.
try:
counter = model.Counter.get_unfinished_or_create(
self.repo, self.SCAN_NAME)
counted_all_entities = False
while not counted_all_entities:
# Batch the db updates.
for _ in xrange(100):
counted_all_entities = run_count(
self.make_query, self.update_counter, counter)
if counted_all_entities:
break
counter.put()
except runtime.DeadlineExceededError:
# Continue counting in another task.
self.add_task_for_repo(self.repo, self.SCAN_NAME, self.ACTION)
else: # Launch counting tasks for all repositories.
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.SCAN_NAME, self.ACTION)
def make_query(self):
"""Subclasses should implement this. This will be called to get the
datastore query; it should always return the same query."""
def update_counter(self, counter, entity):
"""Subclasses should implement this. This will be called once for
each entity that matches the query; it should call increment() on
the counter object for whatever accumulators it wants to increment."""
class CountPerson(CountBase):
SCAN_NAME = 'person'
ACTION = 'tasks/count/person'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
found = ''
if person.latest_found is not None:
found = person.latest_found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('original_domain=' + (person.original_domain or ''))
counter.increment('sex=' + (person.sex or ''))
counter.increment('home_country=' + (person.home_country or ''))
counter.increment('photo=' + (person.photo_url and 'present' or ''))
counter.increment('num_notes=%d' % len(person.get_notes()))
counter.increment('status=' + (person.latest_status or ''))
counter.increment('found=' + found)
counter.increment(
'linked_persons=%d' % len(person.get_linked_persons()))
class CountNote(CountBase):
SCAN_NAME = 'note'
ACTION = 'tasks/count/note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
author_made_contact = ''
if note.author_made_contact is not None:
author_made_contact = note.author_made_contact and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('status=' + (note.status or ''))
counter.increment('original_domain=' + (note.original_domain or ''))
counter.increment('author_made_contact=' + author_made_contact)
if note.linked_person_record_id:
counter.increment('linked_person')
if note.last_known_location:
counter.increment('last_known_location')
class AddReviewedProperty(CountBase):
"""Sets 'reviewed' to False on all notes that have no 'reviewed' property.
This task is for migrating datastores that were created before the
'reviewed' property existed; 'reviewed' has to be set to False so that
the Notes will be indexed."""
SCAN_NAME = 'unreview-note'
ACTION = 'tasks/count/unreview_note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
if not note.reviewed:
note.reviewed = False
note.put()
class UpdateDeadStatus(CountBase):
"""This task looks for Person records with the status 'believed_dead',
checks for the last non-hidden Note, and updates the status if necessary.
This is designed specifically to address bogus 'believed_dead' notes that
are flagged as spam. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-dead-status'
ACTION = 'tasks/count/update_dead_status'
def make_query(self):
return model.Person.all().filter('repo =', self.repo
).filter('latest_status =', 'believed_dead')
def update_counter(self, counter, person):
person.update_latest_status()
class UpdateStatus(CountBase):
"""This task scans Person records, looks for the last non-hidden Note, and
updates latest_status. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-status'
ACTION = 'tasks/count/update_status'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
person.update_latest_status()
class Reindex(CountBase):
"""A handler for re-indexing Persons."""
SCAN_NAME = 'reindex'
ACTION = 'tasks/count/reindex'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
person.update_index(['old', 'new'])
person.put()
|
#!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import sys
import time
from google.appengine.api import quota
from google.appengine.api import taskqueue
from google.appengine.ext import db
import delete
import model
import utils
CPU_MEGACYCLES_PER_REQUEST = 1000
EXPIRED_TTL = datetime.timedelta(delete.EXPIRED_TTL_DAYS, 0, 0)
FETCH_LIMIT = 100
class DeleteExpired(utils.Handler):
"""Scans the Person table looking for expired records to delete, updating
the is_expired flag on all records whose expiry_date has passed. Records
that expired more than EXPIRED_TTL in the past will also have their data
fields, notes, and photos permanently deleted."""
URL = '/tasks/delete_expired'
subdomain_required = False
def get(self):
query = model.Person.past_due_records()
for person in query:
if quota.get_request_cpu_usage() > CPU_MEGACYCLES_PER_REQUEST:
# Stop before running into the hard limit on CPU time per
# request, to avoid aborting in the middle of an operation.
# TODO(kpy): Figure out whether to queue another task here.
# Is it safe for two tasks to run in parallel over the same
# set of records returned by the query?
break
person.put_expiry_flags()
if (person.expiry_date and
utils.get_utcnow() - person.expiry_date > EXPIRED_TTL):
person.wipe_contents()
def run_count(make_query, update_counter, counter):
"""Scans the entities matching a query for a limited amount of CPU time."""
while quota.get_request_cpu_usage() < CPU_MEGACYCLES_PER_REQUEST:
# Get the next batch of entities.
query = make_query()
if counter.last_key:
query = query.filter('__key__ >', db.Key(counter.last_key))
entities = query.order('__key__').fetch(FETCH_LIMIT)
if not entities:
counter.last_key = ''
break
# Pass the entities to the counting function.
for entity in entities:
update_counter(counter, entity)
# Remember where we left off.
counter.last_key = str(entities[-1].key())
class CountBase(utils.Handler):
"""A base handler for counting tasks. Making a request to this handler
without a subdomain will start tasks for all subdomains in parallel.
Each subclass of this class handles one scan through the datastore."""
subdomain_required = False # Run at the root domain, not a subdomain.
SCAN_NAME = '' # Each subclass should choose a unique scan_name.
URL = '' # Each subclass should set the URL path that it handles.
def get(self):
if self.subdomain: # Do some counting.
counter = model.Counter.get_unfinished_or_create(
self.subdomain, self.SCAN_NAME)
run_count(self.make_query, self.update_counter, counter)
counter.put()
if counter.last_key: # Continue counting in another task.
self.add_task(self.subdomain)
else: # Launch counting tasks for all subdomains.
for subdomain in model.Subdomain.list():
self.add_task(subdomain)
def add_task(self, subdomain):
"""Queues up a task for an individual subdomain."""
task_name = '%s-%s-%s' % (
subdomain, self.SCAN_NAME, int(time.time()*1000))
taskqueue.add(name=task_name, method='GET', url=self.URL,
params={'subdomain': subdomain})
def make_query(self):
"""Subclasses should implement this. This will be called to get the
datastore query; it should always return the same query."""
def update_counter(self, counter, entity):
"""Subclasses should implement this. This will be called once for
each entity that matches the query; it should call increment() on
the counter object for whatever accumulators it wants to increment."""
class CountPerson(CountBase):
SCAN_NAME = 'person'
URL = '/tasks/count/person'
def make_query(self):
return model.Person.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, person):
found = ''
if person.latest_found is not None:
found = person.latest_found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('original_domain=' + (person.original_domain or ''))
counter.increment('sex=' + (person.sex or ''))
counter.increment('home_country=' + (person.home_country or ''))
counter.increment('photo=' + (person.photo_url and 'present' or ''))
counter.increment('num_notes=%d' % len(person.get_notes()))
counter.increment('status=' + (person.latest_status or ''))
counter.increment('found=' + found)
counter.increment(
'linked_persons=%d' % len(person.get_linked_persons()))
class CountNote(CountBase):
SCAN_NAME = 'note'
URL = '/tasks/count/note'
def make_query(self):
return model.Note.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, note):
found = ''
if note.found is not None:
found = note.found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('status=' + (note.status or ''))
counter.increment('original_domain=' + (note.original_domain or ''))
counter.increment('found=' + found)
if note.linked_person_record_id:
counter.increment('linked_person')
if note.last_known_location:
counter.increment('last_known_location')
class AddReviewedProperty(CountBase):
"""Sets 'reviewed' to False on all notes that have no 'reviewed' property.
This task is for migrating datastores that were created before the
'reviewed' property existed; 'reviewed' has to be set to False so that
the Notes will be indexed."""
SCAN_NAME = 'unreview-note'
URL = '/tasks/count/unreview_note'
def make_query(self):
return Note.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, note):
if not note.reviewed:
note.reviewed = False
note.put()
class UpdateStatus(CountBase):
"""This task looks for Person records with the status 'believed_dead',
checks for the last non-hidden Note, and updates the status if necessary.
This is designed specifically to address bogus 'believed_dead' notes that
are flagged as spam. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-status'
URL = '/tasks/count/update_status'
def make_query(self):
return Person.all().filter('subdomain =', self.subdomain
).filter('latest_status =', 'believed_dead')
def update_counter(self, counter, person):
status = None
status_source_date = None
for note in person.get_notes():
if note.status and not note.hidden:
status = note.status
status_source_date = note.source_date
if status != person.latest_status:
person.latest_status = status
person.latest_status_source_date = status_source_date
person.put()
class Reindex(CountBase):
"""A handler for re-indexing Persons."""
SCAN_NAME = 'reindex'
URL = '/tasks/count/reindex'
def make_query(self):
return Person.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, person):
person.update_index(['old', 'new'])
person.put()
if __name__ == '__main__':
utils.run((CountPerson.URL, CountPerson),
(CountNote.URL, CountNote),
(DeleteExpired.URL, DeleteExpired),
(UpdateStatus.URL, UpdateStatus),
(Reindex.URL, Reindex))
Fix bad merges in app/tasks.py (due to removing "from model import *").
(transplanted from 27b530f4884d6fb6b1f78ebd9c8dde1b760d0663)
#!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import sys
import time
from google.appengine.api import quota
from google.appengine.api import taskqueue
from google.appengine.ext import db
import delete
import model
import utils
CPU_MEGACYCLES_PER_REQUEST = 1000
EXPIRED_TTL = datetime.timedelta(delete.EXPIRED_TTL_DAYS, 0, 0)
FETCH_LIMIT = 100
class DeleteExpired(utils.Handler):
"""Scans the Person table looking for expired records to delete, updating
the is_expired flag on all records whose expiry_date has passed. Records
that expired more than EXPIRED_TTL in the past will also have their data
fields, notes, and photos permanently deleted."""
URL = '/tasks/delete_expired'
subdomain_required = False
def get(self):
query = model.Person.past_due_records()
for person in query:
if quota.get_request_cpu_usage() > CPU_MEGACYCLES_PER_REQUEST:
# Stop before running into the hard limit on CPU time per
# request, to avoid aborting in the middle of an operation.
# TODO(kpy): Figure out whether to queue another task here.
# Is it safe for two tasks to run in parallel over the same
# set of records returned by the query?
break
person.put_expiry_flags()
if (person.expiry_date and
utils.get_utcnow() - person.expiry_date > EXPIRED_TTL):
person.wipe_contents()
def run_count(make_query, update_counter, counter):
"""Scans the entities matching a query for a limited amount of CPU time."""
while quota.get_request_cpu_usage() < CPU_MEGACYCLES_PER_REQUEST:
# Get the next batch of entities.
query = make_query()
if counter.last_key:
query = query.filter('__key__ >', db.Key(counter.last_key))
entities = query.order('__key__').fetch(FETCH_LIMIT)
if not entities:
counter.last_key = ''
break
# Pass the entities to the counting function.
for entity in entities:
update_counter(counter, entity)
# Remember where we left off.
counter.last_key = str(entities[-1].key())
class CountBase(utils.Handler):
"""A base handler for counting tasks. Making a request to this handler
without a subdomain will start tasks for all subdomains in parallel.
Each subclass of this class handles one scan through the datastore."""
subdomain_required = False # Run at the root domain, not a subdomain.
SCAN_NAME = '' # Each subclass should choose a unique scan_name.
URL = '' # Each subclass should set the URL path that it handles.
def get(self):
if self.subdomain: # Do some counting.
counter = model.Counter.get_unfinished_or_create(
self.subdomain, self.SCAN_NAME)
run_count(self.make_query, self.update_counter, counter)
counter.put()
if counter.last_key: # Continue counting in another task.
self.add_task(self.subdomain)
else: # Launch counting tasks for all subdomains.
for subdomain in model.Subdomain.list():
self.add_task(subdomain)
def add_task(self, subdomain):
"""Queues up a task for an individual subdomain."""
task_name = '%s-%s-%s' % (
subdomain, self.SCAN_NAME, int(time.time()*1000))
taskqueue.add(name=task_name, method='GET', url=self.URL,
params={'subdomain': subdomain})
def make_query(self):
"""Subclasses should implement this. This will be called to get the
datastore query; it should always return the same query."""
def update_counter(self, counter, entity):
"""Subclasses should implement this. This will be called once for
each entity that matches the query; it should call increment() on
the counter object for whatever accumulators it wants to increment."""
class CountPerson(CountBase):
SCAN_NAME = 'person'
URL = '/tasks/count/person'
def make_query(self):
return model.Person.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, person):
found = ''
if person.latest_found is not None:
found = person.latest_found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('original_domain=' + (person.original_domain or ''))
counter.increment('sex=' + (person.sex or ''))
counter.increment('home_country=' + (person.home_country or ''))
counter.increment('photo=' + (person.photo_url and 'present' or ''))
counter.increment('num_notes=%d' % len(person.get_notes()))
counter.increment('status=' + (person.latest_status or ''))
counter.increment('found=' + found)
counter.increment(
'linked_persons=%d' % len(person.get_linked_persons()))
class CountNote(CountBase):
SCAN_NAME = 'note'
URL = '/tasks/count/note'
def make_query(self):
return model.Note.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, note):
found = ''
if note.found is not None:
found = note.found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('status=' + (note.status or ''))
counter.increment('original_domain=' + (note.original_domain or ''))
counter.increment('found=' + found)
if note.linked_person_record_id:
counter.increment('linked_person')
if note.last_known_location:
counter.increment('last_known_location')
class AddReviewedProperty(CountBase):
"""Sets 'reviewed' to False on all notes that have no 'reviewed' property.
This task is for migrating datastores that were created before the
'reviewed' property existed; 'reviewed' has to be set to False so that
the Notes will be indexed."""
SCAN_NAME = 'unreview-note'
URL = '/tasks/count/unreview_note'
def make_query(self):
return model.Note.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, note):
if not note.reviewed:
note.reviewed = False
note.put()
class UpdateStatus(CountBase):
"""This task looks for Person records with the status 'believed_dead',
checks for the last non-hidden Note, and updates the status if necessary.
This is designed specifically to address bogus 'believed_dead' notes that
are flagged as spam. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-status'
URL = '/tasks/count/update_status'
def make_query(self):
return model.Person.all().filter('subdomain =', self.subdomain
).filter('latest_status =', 'believed_dead')
def update_counter(self, counter, person):
status = None
status_source_date = None
for note in person.get_notes():
if note.status and not note.hidden:
status = note.status
status_source_date = note.source_date
if status != person.latest_status:
person.latest_status = status
person.latest_status_source_date = status_source_date
person.put()
class Reindex(CountBase):
"""A handler for re-indexing Persons."""
SCAN_NAME = 'reindex'
URL = '/tasks/count/reindex'
def make_query(self):
return model.Person.all().filter('subdomain =', self.subdomain)
def update_counter(self, counter, person):
person.update_index(['old', 'new'])
person.put()
if __name__ == '__main__':
utils.run((CountPerson.URL, CountPerson),
(CountNote.URL, CountNote),
(DeleteExpired.URL, DeleteExpired),
(UpdateStatus.URL, UpdateStatus),
(Reindex.URL, Reindex))
|
#!/usr/bin/env python
import re
from operator import attrgetter
class MarathonBackend(object):
def __init__(self, host, port):
self.host = host
self.port = port
class MarathonApp(object):
def __init__(self, appId, hostname, servicePort, backends):
self.appId = appId
self.hostname = hostname
self.servicePort = servicePort
self.backends = backends
apps = [
MarathonApp('/mm/application/portal', 'app.mesosphere.com', 9000, [
MarathonBackend('srv3.hw.ca1.mesosphere.com', 31006),
MarathonBackend('srv2.hw.ca1.mesosphere.com', 31671),
MarathonBackend('srv2.hw.ca1.mesosphere.com', 31030),
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31006)
]),
MarathonApp('/mm/service/collector', 'collector.mesosphere.com', 7070, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31005)
]),
MarathonApp('/mm/service/collector', 'collector2.mesosphere.com', 9990, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31006)
]),
MarathonApp('/some/generic/tcp/app', '', 3306, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31632)
])
]
def config(apps):
config = str()
frontends = str()
backends = str()
head = '''global
daemon
log 127.0.0.1 local0
log 127.0.0.1 local1 notice
maxconn 4096
defaults
log global
retries 3
maxconn 2000
timeout connect 5000
timeout client 50000
timeout server 50000
listen stats
bind 127.0.0.1:9090
balance
mode http
stats enable
stats auth admin:admin
'''
http_frontends = '''frontend marathon_http_in
bind *:80
mode http
'''
for app in sorted(apps, key = attrgetter('appId', 'servicePort')):
listener = app.appId[1:].replace('/', '_') + '_' + str(app.servicePort)
frontends += "\nfrontend {0}\n".format(listener)
frontends += " bind *:{0}\n".format(app.servicePort)
backends += "\nbackend " + listener + "\n"
backends += " balance roundrobin\n"
# if it's a HTTP service
if app.hostname:
frontends += " mode http\n"
cleanedUpHostname = re.sub(r'[^a-zA-Z0-9\-]', '_', app.hostname)
http_frontends += " acl host_{0} hdr(host) -i {1}\n".format(cleanedUpHostname, app.hostname)
http_frontends += " use_backend {0} if host_{1}\n".format(listener, cleanedUpHostname)
backends += ''' option forwardfor
http-request set-header X-Forwarded-Port %[dst_port]
http-request add-header X-Forwarded-Proto https if { ssl_fc }
'''
else:
frontends += " mode tcp\n"
frontends += " use_backend {0}\n".format(listener)
for backend in sorted(app.backends, key = attrgetter('host', 'port')):
backends += " server {0}:{1}\n".format(backend.host, backend.port)
config += head
config += http_frontends
config += frontends
config += backends
return config
print config(apps)
cleaned up multiline strings
#!/usr/bin/env python
import re
from operator import attrgetter
class MarathonBackend(object):
def __init__(self, host, port):
self.host = host
self.port = port
class MarathonApp(object):
def __init__(self, appId, hostname, servicePort, backends):
self.appId = appId
self.hostname = hostname
self.servicePort = servicePort
self.backends = backends
apps = [
MarathonApp('/mm/application/portal', 'app.mesosphere.com', 9000, [
MarathonBackend('srv3.hw.ca1.mesosphere.com', 31006),
MarathonBackend('srv2.hw.ca1.mesosphere.com', 31671),
MarathonBackend('srv2.hw.ca1.mesosphere.com', 31030),
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31006)
]),
MarathonApp('/mm/service/collector', 'collector.mesosphere.com', 7070, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31005)
]),
MarathonApp('/mm/service/collector', 'collector2.mesosphere.com', 9990, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31006)
]),
MarathonApp('/some/generic/tcp/app', '', 3306, [
MarathonBackend('srv4.hw.ca1.mesosphere.com', 31632)
])
]
def config(apps):
config = str()
frontends = str()
backends = str()
head = '''global
daemon
log 127.0.0.1 local0
log 127.0.0.1 local1 notice
maxconn 4096
defaults
log global
retries 3
maxconn 2000
timeout connect 5000
timeout client 50000
timeout server 50000
listen stats
bind 127.0.0.1:9090
balance
mode http
stats enable
stats auth admin:admin
'''
http_frontends = '''
frontend marathon_http_in
bind *:80
mode http
'''
for app in sorted(apps, key = attrgetter('appId', 'servicePort')):
listener = app.appId[1:].replace('/', '_') + '_' + str(app.servicePort)
frontends += '''
frontend {0}
bind *:{1}
'''.format(listener, app.servicePort)
backends += '''
backend {0}
balance roundrobin
'''.format(listener)
# if it's a HTTP service
if app.hostname:
cleanedUpHostname = re.sub(r'[^a-zA-Z0-9\-]', '_', app.hostname)
http_frontends += ''' acl host_{0} hdr(host) -i {1}
use_backend {2} if host_{0}
'''.format(cleanedUpHostname, app.hostname, listener)
frontends += " mode http\n"
backends += ''' option forwardfor
http-request set-header X-Forwarded-Port %[dst_port]
http-request add-header X-Forwarded-Proto https if { ssl_fc }
'''
else:
frontends += " mode tcp\n"
frontends += " use_backend {0}\n".format(listener)
for backend in sorted(app.backends, key = attrgetter('host', 'port')):
backends += " server {0}:{1}\n".format(backend.host, backend.port)
config += head
config += http_frontends
config += frontends
config += backends
return config
print config(apps)
|
import os
import re
import csv
import yaml
from itertools import chain
import pytz
from io import StringIO
from os import path
from functools import wraps
import unicodedata
from urllib.parse import urlparse
from collections import namedtuple
from datetime import datetime, timedelta, timezone
import dateutil
import ago
from flask import (
abort,
current_app,
redirect,
request,
session,
url_for
)
from flask_login import current_user
import pyexcel
from notifications_utils.template import (
SMSPreviewTemplate,
EmailPreviewTemplate,
LetterImageTemplate,
LetterPreviewTemplate,
)
from orderedset._orderedset import OrderedSet
from werkzeug.datastructures import MultiDict
SENDING_STATUSES = ['created', 'pending', 'sending']
DELIVERED_STATUSES = ['delivered', 'sent']
FAILURE_STATUSES = ['failed', 'temporary-failure', 'permanent-failure', 'technical-failure']
REQUESTED_STATUSES = SENDING_STATUSES + DELIVERED_STATUSES + FAILURE_STATUSES
class BrowsableItem(object):
"""
Maps for the template browse-list.
"""
def __init__(self, item, *args, **kwargs):
self._item = item
super(BrowsableItem, self).__init__()
@property
def title(self):
pass
@property
def link(self):
pass
@property
def hint(self):
pass
@property
def destructive(self):
pass
def user_has_permissions(*permissions, admin_override=False, any_=False):
def wrap(func):
@wraps(func)
def wrap_func(*args, **kwargs):
if current_user and current_user.is_authenticated:
if current_user.has_permissions(
*permissions,
admin_override=admin_override,
any_=any_
):
return func(*args, **kwargs)
else:
abort(403)
else:
abort(401)
return wrap_func
return wrap
def redirect_to_sign_in(f):
@wraps(f)
def wrapped(*args, **kwargs):
if 'user_details' not in session:
return redirect(url_for('main.sign_in'))
else:
return f(*args, **kwargs)
return wrapped
def get_errors_for_csv(recipients, template_type):
errors = []
if recipients.rows_with_bad_recipients:
number_of_bad_recipients = len(list(recipients.rows_with_bad_recipients))
if 'sms' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 phone number")
else:
errors.append("fix {} phone numbers".format(number_of_bad_recipients))
elif 'email' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 email address")
else:
errors.append("fix {} email addresses".format(number_of_bad_recipients))
elif 'letter' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 address")
else:
errors.append("fix {} addresses".format(number_of_bad_recipients))
if recipients.rows_with_missing_data:
number_of_rows_with_missing_data = len(list(recipients.rows_with_missing_data))
if 1 == number_of_rows_with_missing_data:
errors.append("enter missing data in 1 row")
else:
errors.append("enter missing data in {} rows".format(number_of_rows_with_missing_data))
return errors
def generate_notifications_csv(**kwargs):
from app import notification_api_client
if 'page' not in kwargs:
kwargs['page'] = 1
if kwargs['job_id']:
fieldnames = ['Row number', 'Recipient', 'Template', 'Type', 'Job', 'Status', 'Time']
else:
fieldnames = ['Recipient', 'Template', 'Type', 'Job', 'Status', 'Time']
yield ','.join(fieldnames) + '\n'
while kwargs['page']:
# if job_id then response looks different
notifications_resp = notification_api_client.get_notifications_for_service(**kwargs)
notifications = notifications_resp['notifications']
if kwargs['job_id']:
for notification in notifications:
values = [
notification['row_number'],
notification['recipient'],
notification['template_name'],
notification['template_type'],
notification['job_name'],
notification['status'],
notification['created_at']
]
line = ','.join(str(i) for i in values) + '\n'
yield line
else:
# Change here
for notification in notifications:
values = [
notification['to'],
notification['template']['name'],
notification['template']['template_type'],
notification.get('job_name', None),
notification['status'],
notification['created_at'],
notification['updated_at']
]
line = ','.join(str(i) for i in values) + '\n'
yield line
if notifications_resp['links'].get('next'):
kwargs['page'] += 1
else:
return
raise Exception("Should never reach here")
def get_page_from_request():
if 'page' in request.args:
try:
return int(request.args['page'])
except ValueError:
return None
else:
return 1
def generate_previous_dict(view, service_id, page, url_args=None):
return generate_previous_next_dict(view, service_id, page - 1, 'Previous page', url_args or {})
def generate_next_dict(view, service_id, page, url_args=None):
return generate_previous_next_dict(view, service_id, page + 1, 'Next page', url_args or {})
def generate_previous_next_dict(view, service_id, page, title, url_args):
return {
'url': url_for(view, service_id=service_id, page=page, **url_args),
'title': title,
'label': 'page {}'.format(page)
}
def email_safe(string, whitespace='.'):
# strips accents, diacritics etc
string = ''.join(c for c in unicodedata.normalize('NFD', string) if unicodedata.category(c) != 'Mn')
string = ''.join(
word.lower() if word.isalnum() or word == whitespace else ''
for word in re.sub(r'\s+', whitespace, string.strip())
)
string = re.sub(r'\.{2,}', '.', string)
return string.strip('.')
class Spreadsheet():
allowed_file_extensions = ['csv', 'xlsx', 'xls', 'ods', 'xlsm', 'tsv']
def __init__(self, csv_data, filename=''):
self.filename = filename
self.as_csv_data = csv_data
self.as_dict = {
'file_name': self.filename,
'data': self.as_csv_data
}
@classmethod
def can_handle(cls, filename):
return cls.get_extension(filename) in cls.allowed_file_extensions
@staticmethod
def get_extension(filename):
return path.splitext(filename)[1].lower().lstrip('.')
@staticmethod
def normalise_newlines(file_content):
return '\r\n'.join(file_content.read().decode('utf-8').splitlines())
@classmethod
def from_rows(cls, rows, filename=''):
with StringIO() as converted:
output = csv.writer(converted)
for row in rows:
output.writerow(row)
return cls(converted.getvalue(), filename)
@classmethod
def from_dict(cls, dictionary, filename=''):
return cls.from_rows(
zip(
*sorted(dictionary.items(), key=lambda pair: pair[0])
),
filename
)
@classmethod
def from_file(cls, file_content, filename=''):
extension = cls.get_extension(filename)
if extension == 'csv':
return cls(Spreadsheet.normalise_newlines(file_content), filename)
if extension == 'tsv':
file_content = StringIO(
Spreadsheet.normalise_newlines(file_content))
instance = cls.from_rows(
pyexcel.iget_array(
file_type=extension,
file_stream=file_content),
filename)
pyexcel.free_resources()
return instance
def get_help_argument():
return request.args.get('help') if request.args.get('help') in ('1', '2', '3') else None
def is_gov_user(email_address):
try:
GovernmentDomain(email_address)
return True
except NotGovernmentDomain:
return False
def get_template(
template,
service,
show_recipient=False,
expand_emails=False,
letter_preview_url=None,
page_count=1,
redact_missing_personalisation=False,
email_reply_to=None,
sms_sender=None,
):
if 'email' == template['template_type']:
return EmailPreviewTemplate(
template,
from_name=service['name'],
from_address='{}@notifications.service.gov.uk'.format(service['email_from']),
expanded=expand_emails,
show_recipient=show_recipient,
redact_missing_personalisation=redact_missing_personalisation,
reply_to=email_reply_to,
)
if 'sms' == template['template_type']:
return SMSPreviewTemplate(
template,
prefix=service['name'],
show_prefix=service['prefix_sms'],
sender=sms_sender,
show_sender=bool(sms_sender),
show_recipient=show_recipient,
redact_missing_personalisation=redact_missing_personalisation,
)
if 'letter' == template['template_type']:
if letter_preview_url:
return LetterImageTemplate(
template,
image_url=letter_preview_url,
page_count=int(page_count),
contact_block=template['reply_to_text']
)
else:
return LetterPreviewTemplate(
template,
contact_block=template['reply_to_text'],
admin_base_url=current_app.config['ADMIN_BASE_URL'],
redact_missing_personalisation=redact_missing_personalisation,
)
def get_current_financial_year():
now = datetime.utcnow()
current_month = int(now.strftime('%-m'))
current_year = int(now.strftime('%Y'))
return current_year if current_month > 3 else current_year - 1
def get_time_left(created_at):
return ago.human(
(
datetime.now(timezone.utc).replace(hour=23, minute=59, second=59)
) - (
dateutil.parser.parse(created_at) + timedelta(days=8)
),
future_tense='Data available for {}',
past_tense='Data no longer available', # No-one should ever see this
precision=1
)
def email_or_sms_not_enabled(template_type, permissions):
return (template_type in ['email', 'sms']) and (template_type not in permissions)
def get_letter_timings(upload_time):
LetterTimings = namedtuple(
'LetterTimings',
'printed_by, is_printed, earliest_delivery, latest_delivery'
)
# shift anything after 5pm to the next day
processing_day = gmt_timezones(upload_time) + timedelta(hours=(7))
print_day, earliest_delivery, latest_delivery = (
processing_day + timedelta(days=days)
for days in {
'Wednesday': (1, 3, 5),
'Thursday': (1, 4, 5),
'Friday': (3, 5, 6),
'Saturday': (2, 4, 5),
}.get(processing_day.strftime('%A'), (1, 3, 4))
)
printed_by = print_day.astimezone(pytz.timezone('Europe/London')).replace(hour=15, minute=0)
now = datetime.utcnow().replace(tzinfo=pytz.timezone('Europe/London'))
return LetterTimings(
printed_by=printed_by,
is_printed=(now > printed_by),
earliest_delivery=earliest_delivery,
latest_delivery=latest_delivery,
)
def gmt_timezones(date):
date = dateutil.parser.parse(date)
forced_utc = date.replace(tzinfo=pytz.utc)
return forced_utc.astimezone(pytz.timezone('Europe/London'))
def get_cdn_domain():
parsed_uri = urlparse(current_app.config['ADMIN_BASE_URL'])
if parsed_uri.netloc.startswith('localhost'):
return 'static-logos.notify.tools'
subdomain = parsed_uri.hostname.split('.')[0]
domain = parsed_uri.netloc[len(subdomain + '.'):]
return "static-logos.{}".format(domain)
def parse_filter_args(filter_dict):
if not isinstance(filter_dict, MultiDict):
filter_dict = MultiDict(filter_dict)
return MultiDict(
(
key,
(','.join(filter_dict.getlist(key))).split(',')
)
for key in filter_dict.keys()
if ''.join(filter_dict.getlist(key))
)
def set_status_filters(filter_args):
status_filters = filter_args.get('status', [])
return list(OrderedSet(chain(
(status_filters or REQUESTED_STATUSES),
DELIVERED_STATUSES if 'delivered' in status_filters else [],
SENDING_STATUSES if 'sending' in status_filters else [],
FAILURE_STATUSES if 'failed' in status_filters else []
)))
class NotGovernmentDomain(Exception):
pass
class GovernmentDomain:
_dir_path = os.path.dirname(os.path.realpath(__file__))
with open('{}/domains.yml'.format(_dir_path)) as domains:
domains = yaml.safe_load(domains)
def __init__(self, email_address_or_domain):
try:
self._match = sorted(
(
domain for domain in self.domains.keys()
if self._domain_matches(email_address_or_domain, domain)
),
key=len
)[0]
except IndexError:
raise NotGovernmentDomain()
self.owner, self.sector, self.agreement_signed = self._get_details_of_domain()
@staticmethod
def _domain_matches(email_address_or_domain, domain):
email_address_or_domain = email_address_or_domain.lower()
return (email_address_or_domain == domain) or re.search(
"[\.|@]({})$".format(domain.replace(".", "\.")),
email_address_or_domain
)
def _get_details_of_domain(self):
details = self.domains[self._match]
if isinstance(details, str):
return GovernmentDomain(details)._get_details_of_domain()
elif isinstance(details, dict):
return(
details.get("owner"),
details.get("sector"),
bool(details.get("agreement_signed")),
)
else:
return(
None,
None,
False,
)
Refactor to only sort once
Rather than doing the sort every time an instance is initiated, we can
speed things up by just doing it the once when the app starts up.
import os
import re
import csv
import yaml
from itertools import chain
import pytz
from io import StringIO
from os import path
from functools import wraps
import unicodedata
from urllib.parse import urlparse
from collections import namedtuple
from datetime import datetime, timedelta, timezone
import dateutil
import ago
from flask import (
abort,
current_app,
redirect,
request,
session,
url_for
)
from flask_login import current_user
import pyexcel
from notifications_utils.template import (
SMSPreviewTemplate,
EmailPreviewTemplate,
LetterImageTemplate,
LetterPreviewTemplate,
)
from orderedset._orderedset import OrderedSet
from werkzeug.datastructures import MultiDict
SENDING_STATUSES = ['created', 'pending', 'sending']
DELIVERED_STATUSES = ['delivered', 'sent']
FAILURE_STATUSES = ['failed', 'temporary-failure', 'permanent-failure', 'technical-failure']
REQUESTED_STATUSES = SENDING_STATUSES + DELIVERED_STATUSES + FAILURE_STATUSES
class BrowsableItem(object):
"""
Maps for the template browse-list.
"""
def __init__(self, item, *args, **kwargs):
self._item = item
super(BrowsableItem, self).__init__()
@property
def title(self):
pass
@property
def link(self):
pass
@property
def hint(self):
pass
@property
def destructive(self):
pass
def user_has_permissions(*permissions, admin_override=False, any_=False):
def wrap(func):
@wraps(func)
def wrap_func(*args, **kwargs):
if current_user and current_user.is_authenticated:
if current_user.has_permissions(
*permissions,
admin_override=admin_override,
any_=any_
):
return func(*args, **kwargs)
else:
abort(403)
else:
abort(401)
return wrap_func
return wrap
def redirect_to_sign_in(f):
@wraps(f)
def wrapped(*args, **kwargs):
if 'user_details' not in session:
return redirect(url_for('main.sign_in'))
else:
return f(*args, **kwargs)
return wrapped
def get_errors_for_csv(recipients, template_type):
errors = []
if recipients.rows_with_bad_recipients:
number_of_bad_recipients = len(list(recipients.rows_with_bad_recipients))
if 'sms' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 phone number")
else:
errors.append("fix {} phone numbers".format(number_of_bad_recipients))
elif 'email' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 email address")
else:
errors.append("fix {} email addresses".format(number_of_bad_recipients))
elif 'letter' == template_type:
if 1 == number_of_bad_recipients:
errors.append("fix 1 address")
else:
errors.append("fix {} addresses".format(number_of_bad_recipients))
if recipients.rows_with_missing_data:
number_of_rows_with_missing_data = len(list(recipients.rows_with_missing_data))
if 1 == number_of_rows_with_missing_data:
errors.append("enter missing data in 1 row")
else:
errors.append("enter missing data in {} rows".format(number_of_rows_with_missing_data))
return errors
def generate_notifications_csv(**kwargs):
from app import notification_api_client
if 'page' not in kwargs:
kwargs['page'] = 1
if kwargs['job_id']:
fieldnames = ['Row number', 'Recipient', 'Template', 'Type', 'Job', 'Status', 'Time']
else:
fieldnames = ['Recipient', 'Template', 'Type', 'Job', 'Status', 'Time']
yield ','.join(fieldnames) + '\n'
while kwargs['page']:
# if job_id then response looks different
notifications_resp = notification_api_client.get_notifications_for_service(**kwargs)
notifications = notifications_resp['notifications']
if kwargs['job_id']:
for notification in notifications:
values = [
notification['row_number'],
notification['recipient'],
notification['template_name'],
notification['template_type'],
notification['job_name'],
notification['status'],
notification['created_at']
]
line = ','.join(str(i) for i in values) + '\n'
yield line
else:
# Change here
for notification in notifications:
values = [
notification['to'],
notification['template']['name'],
notification['template']['template_type'],
notification.get('job_name', None),
notification['status'],
notification['created_at'],
notification['updated_at']
]
line = ','.join(str(i) for i in values) + '\n'
yield line
if notifications_resp['links'].get('next'):
kwargs['page'] += 1
else:
return
raise Exception("Should never reach here")
def get_page_from_request():
if 'page' in request.args:
try:
return int(request.args['page'])
except ValueError:
return None
else:
return 1
def generate_previous_dict(view, service_id, page, url_args=None):
return generate_previous_next_dict(view, service_id, page - 1, 'Previous page', url_args or {})
def generate_next_dict(view, service_id, page, url_args=None):
return generate_previous_next_dict(view, service_id, page + 1, 'Next page', url_args or {})
def generate_previous_next_dict(view, service_id, page, title, url_args):
return {
'url': url_for(view, service_id=service_id, page=page, **url_args),
'title': title,
'label': 'page {}'.format(page)
}
def email_safe(string, whitespace='.'):
# strips accents, diacritics etc
string = ''.join(c for c in unicodedata.normalize('NFD', string) if unicodedata.category(c) != 'Mn')
string = ''.join(
word.lower() if word.isalnum() or word == whitespace else ''
for word in re.sub(r'\s+', whitespace, string.strip())
)
string = re.sub(r'\.{2,}', '.', string)
return string.strip('.')
class Spreadsheet():
allowed_file_extensions = ['csv', 'xlsx', 'xls', 'ods', 'xlsm', 'tsv']
def __init__(self, csv_data, filename=''):
self.filename = filename
self.as_csv_data = csv_data
self.as_dict = {
'file_name': self.filename,
'data': self.as_csv_data
}
@classmethod
def can_handle(cls, filename):
return cls.get_extension(filename) in cls.allowed_file_extensions
@staticmethod
def get_extension(filename):
return path.splitext(filename)[1].lower().lstrip('.')
@staticmethod
def normalise_newlines(file_content):
return '\r\n'.join(file_content.read().decode('utf-8').splitlines())
@classmethod
def from_rows(cls, rows, filename=''):
with StringIO() as converted:
output = csv.writer(converted)
for row in rows:
output.writerow(row)
return cls(converted.getvalue(), filename)
@classmethod
def from_dict(cls, dictionary, filename=''):
return cls.from_rows(
zip(
*sorted(dictionary.items(), key=lambda pair: pair[0])
),
filename
)
@classmethod
def from_file(cls, file_content, filename=''):
extension = cls.get_extension(filename)
if extension == 'csv':
return cls(Spreadsheet.normalise_newlines(file_content), filename)
if extension == 'tsv':
file_content = StringIO(
Spreadsheet.normalise_newlines(file_content))
instance = cls.from_rows(
pyexcel.iget_array(
file_type=extension,
file_stream=file_content),
filename)
pyexcel.free_resources()
return instance
def get_help_argument():
return request.args.get('help') if request.args.get('help') in ('1', '2', '3') else None
def is_gov_user(email_address):
try:
GovernmentDomain(email_address)
return True
except NotGovernmentDomain:
return False
def get_template(
template,
service,
show_recipient=False,
expand_emails=False,
letter_preview_url=None,
page_count=1,
redact_missing_personalisation=False,
email_reply_to=None,
sms_sender=None,
):
if 'email' == template['template_type']:
return EmailPreviewTemplate(
template,
from_name=service['name'],
from_address='{}@notifications.service.gov.uk'.format(service['email_from']),
expanded=expand_emails,
show_recipient=show_recipient,
redact_missing_personalisation=redact_missing_personalisation,
reply_to=email_reply_to,
)
if 'sms' == template['template_type']:
return SMSPreviewTemplate(
template,
prefix=service['name'],
show_prefix=service['prefix_sms'],
sender=sms_sender,
show_sender=bool(sms_sender),
show_recipient=show_recipient,
redact_missing_personalisation=redact_missing_personalisation,
)
if 'letter' == template['template_type']:
if letter_preview_url:
return LetterImageTemplate(
template,
image_url=letter_preview_url,
page_count=int(page_count),
contact_block=template['reply_to_text']
)
else:
return LetterPreviewTemplate(
template,
contact_block=template['reply_to_text'],
admin_base_url=current_app.config['ADMIN_BASE_URL'],
redact_missing_personalisation=redact_missing_personalisation,
)
def get_current_financial_year():
now = datetime.utcnow()
current_month = int(now.strftime('%-m'))
current_year = int(now.strftime('%Y'))
return current_year if current_month > 3 else current_year - 1
def get_time_left(created_at):
return ago.human(
(
datetime.now(timezone.utc).replace(hour=23, minute=59, second=59)
) - (
dateutil.parser.parse(created_at) + timedelta(days=8)
),
future_tense='Data available for {}',
past_tense='Data no longer available', # No-one should ever see this
precision=1
)
def email_or_sms_not_enabled(template_type, permissions):
return (template_type in ['email', 'sms']) and (template_type not in permissions)
def get_letter_timings(upload_time):
LetterTimings = namedtuple(
'LetterTimings',
'printed_by, is_printed, earliest_delivery, latest_delivery'
)
# shift anything after 5pm to the next day
processing_day = gmt_timezones(upload_time) + timedelta(hours=(7))
print_day, earliest_delivery, latest_delivery = (
processing_day + timedelta(days=days)
for days in {
'Wednesday': (1, 3, 5),
'Thursday': (1, 4, 5),
'Friday': (3, 5, 6),
'Saturday': (2, 4, 5),
}.get(processing_day.strftime('%A'), (1, 3, 4))
)
printed_by = print_day.astimezone(pytz.timezone('Europe/London')).replace(hour=15, minute=0)
now = datetime.utcnow().replace(tzinfo=pytz.timezone('Europe/London'))
return LetterTimings(
printed_by=printed_by,
is_printed=(now > printed_by),
earliest_delivery=earliest_delivery,
latest_delivery=latest_delivery,
)
def gmt_timezones(date):
date = dateutil.parser.parse(date)
forced_utc = date.replace(tzinfo=pytz.utc)
return forced_utc.astimezone(pytz.timezone('Europe/London'))
def get_cdn_domain():
parsed_uri = urlparse(current_app.config['ADMIN_BASE_URL'])
if parsed_uri.netloc.startswith('localhost'):
return 'static-logos.notify.tools'
subdomain = parsed_uri.hostname.split('.')[0]
domain = parsed_uri.netloc[len(subdomain + '.'):]
return "static-logos.{}".format(domain)
def parse_filter_args(filter_dict):
if not isinstance(filter_dict, MultiDict):
filter_dict = MultiDict(filter_dict)
return MultiDict(
(
key,
(','.join(filter_dict.getlist(key))).split(',')
)
for key in filter_dict.keys()
if ''.join(filter_dict.getlist(key))
)
def set_status_filters(filter_args):
status_filters = filter_args.get('status', [])
return list(OrderedSet(chain(
(status_filters or REQUESTED_STATUSES),
DELIVERED_STATUSES if 'delivered' in status_filters else [],
SENDING_STATUSES if 'sending' in status_filters else [],
FAILURE_STATUSES if 'failed' in status_filters else []
)))
class NotGovernmentDomain(Exception):
pass
class GovernmentDomain:
_dir_path = os.path.dirname(os.path.realpath(__file__))
with open('{}/domains.yml'.format(_dir_path)) as domains:
domains = yaml.safe_load(domains)
domain_names = sorted(domains.keys(), key=len)
def __init__(self, email_address_or_domain):
try:
self._match = next(filter(
self.get_matching_function(email_address_or_domain),
self.domain_names,
))
except StopIteration:
raise NotGovernmentDomain()
self.owner, self.sector, self.agreement_signed = self._get_details_of_domain()
@staticmethod
def get_matching_function(email_address_or_domain):
email_address_or_domain = email_address_or_domain.lower()
def fn(domain):
return (email_address_or_domain == domain) or re.search(
"[\.|@]({})$".format(domain.replace(".", "\.")),
email_address_or_domain
)
return fn
def _get_details_of_domain(self):
details = self.domains[self._match]
if isinstance(details, str):
return GovernmentDomain(details)._get_details_of_domain()
elif isinstance(details, dict):
return(
details.get("owner"),
details.get("sector"),
bool(details.get("agreement_signed")),
)
else:
return(
None,
None,
False,
)
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Quentin Gigon <gigon.quentin@gmail.com>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from datetime import date
from dateutil.relativedelta import relativedelta
from odoo.addons.queue_job.job import job, related_action
from odoo import api, models, fields, _
_logger = logging.getLogger(__name__)
class RecurringContract(models.Model):
_inherit = "recurring.contract"
group_id = fields.Many2one(required=False, readonly=False)
sms_request_id = fields.Many2one(
"sms.child.request",
"SMS request",
compute="_compute_sms_request_id",
readonly=False,
)
@api.multi
def _compute_sms_request_id(self):
for contract in self:
contract.sms_request_id = self.env["sms.child.request"].search(
[("sponsorship_id", "=", contract.id)], limit=1
)
@api.model
@job
def create_sms_sponsorship(self, vals, partner, sms_child_request):
""" Creates sponsorship from REACT webapp data.
:param vals: form values
:param partner: res.partner record
:param sms_child_request: sms.child.request record
:return: True
"""
frontend_lang = self.env["res.lang"].search(
[("code", "like", vals["lang"] + "_")], limit=1
)
if partner and (
partner.firstname != vals["firstname"]
or partner.lastname != vals["lastname"]
):
partner = False
if not partner:
match_obj = self.env["res.partner.match"]
vals["lang"] = frontend_lang.code
partner = match_obj.match_partner_to_infos(vals)
# Update SMS Request
sms_child_request.write({"partner_id": partner.id, "lang_code": partner.lang})
# prepare correct medium_id depending on sms_child_request's source
medium_name = "sms_sponsorship.utm_medium_"
medium_name += (
"android"
if sms_child_request.source == "Android"
else "ios"
if sms_child_request.source == "iOS"
else "sms"
)
# Create sponsorship
lines = self._get_sponsorship_standard_lines(False)
if not vals["sponsorship_plus"]:
lines = lines[:-1]
sponsorship = self.create(
{
"partner_id": partner.id,
"correspondent_id": partner.id,
"child_id": sms_child_request.child_id.id,
"type": "S",
"contract_line_ids": lines,
"medium_id": self.env.ref(medium_name).id,
"origin_id": sms_child_request.event_id.origin_id.id,
}
)
sponsorship.on_change_origin()
sponsorship._set_next_invoice_date_sms()
sponsorship.with_delay().put_child_on_no_money_hold()
partner.set_privacy_statement(origin="new_sponsorship")
sms_child_request.with_context(lang=partner.lang).complete_step1(sponsorship.id)
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def finalize_form(self, pay_first_month_ebanking, payment_mode_id):
""" validate sms sponsorship after step 2 and send confirmation email
:param pay_first_month_ebanking: has the sponsor paid first month
:param payment_mode_id: selected payment mode
:return: True
"""
self.associate_group(payment_mode_id)
if not pay_first_month_ebanking:
# update sms request and send confirmation. this will be done
# after the payment if the sponsor decides to pay the first month
self.sms_request_id.complete_step2()
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def post_message_from_step2(self, message):
# Post message in sponsorship
notify_ids = (
self.env["res.config.settings"]
.sudo()
.get_param("sms_new_partner_notify_ids")
)
intro = _("Please verify the following information given by the sponsor:")
return self.message_post(
body=intro + message,
subject=_("New SMS sponsorship information"),
partner_ids=notify_ids,
type="comment",
subtype="mail.mt_comment",
content_subtype="html",
)
def associate_group(self, payment_mode_id):
""" Create contract group when SMS sponsorship is validated.
:param payment_mode_id: selected payment mode
:return: True
"""
group = self.env["recurring.contract.group"].search(
[
("partner_id", "=", self.partner_id.id),
("payment_mode_id", "=", payment_mode_id),
],
order="next_invoice_date desc",
limit=1,
)
if not group:
group = group.create(
{"partner_id": self.partner_id.id, "payment_mode_id": payment_mode_id, }
)
self.group_id = group
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def create_first_sms_invoice(self):
"""In case the sponsor is a new partner, create first invoice
because the sponsorship won't be validated and invoices are not
generated. We therefore manually create an invoice that can be paid
online.
:return: True
"""
invoicer = self.env["recurring.invoicer"].create({})
journal = self.env["account.journal"].search(
[("type", "=", "sale"), ("company_id", "=", 1)], limit=1
)
inv_data = self.group_id._setup_inv_data(journal, invoicer, self)
self.env["account.invoice"].create(inv_data)
self.update_next_invoice_date()
return True
def contract_waiting(self):
"""
In case a new sponsor has already paid the first month, we
automatically pay the first invoice when contract is validated,
supposing that the staff has verified the partner.
:return: True
"""
super().contract_waiting()
self._post_payment_first_month()
return True
def _post_payment_first_month(self):
for contract in self.filtered("invoice_line_ids"):
invoices = contract.invoice_line_ids.mapped("invoice_id")
payment = self.env["account.payment"].search(
[("invoice_ids", "in", invoices.ids), ("state", "=", "draft")]
)
draft_invoices = invoices.filtered(lambda i: i.state == "draft")
draft_invoices.action_invoice_open()
payment.post()
def _set_next_invoice_date_sms(self):
""" Just compute the default next_invoice_date for new sponsorship. """
self.ensure_one()
current_date = date.today()
if self.group_id:
contract_group = self.group_id
if contract_group.next_invoice_date:
next_group_date = contract_group.next_invoice_date
next_invoice_date = current_date.replace(day=next_group_date.day)
else:
next_invoice_date = current_date.replace(day=1)
else:
next_invoice_date = current_date.replace(day=1)
next_invoice_date += relativedelta(months=+1)
self.next_invoice_date = next_invoice_date
AP-345 Fixing utm_source for iOS application
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Quentin Gigon <gigon.quentin@gmail.com>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from datetime import date
from dateutil.relativedelta import relativedelta
from odoo.addons.queue_job.job import job, related_action
from odoo import api, models, fields, _
_logger = logging.getLogger(__name__)
class RecurringContract(models.Model):
_inherit = "recurring.contract"
group_id = fields.Many2one(required=False, readonly=False)
sms_request_id = fields.Many2one(
"sms.child.request",
"SMS request",
compute="_compute_sms_request_id",
readonly=False,
)
@api.multi
def _compute_sms_request_id(self):
for contract in self:
contract.sms_request_id = self.env["sms.child.request"].search(
[("sponsorship_id", "=", contract.id)], limit=1
)
@api.model
@job
def create_sms_sponsorship(self, vals, partner, sms_child_request):
""" Creates sponsorship from REACT webapp data.
:param vals: form values
:param partner: res.partner record
:param sms_child_request: sms.child.request record
:return: True
"""
frontend_lang = self.env["res.lang"].search(
[("code", "like", vals["lang"] + "_")], limit=1
)
if partner and (
partner.firstname != vals["firstname"]
or partner.lastname != vals["lastname"]
):
partner = False
if not partner:
match_obj = self.env["res.partner.match"]
vals["lang"] = frontend_lang.code
partner = match_obj.match_partner_to_infos(vals)
# Update SMS Request
sms_child_request.write({"partner_id": partner.id, "lang_code": partner.lang})
# prepare correct medium_id depending on sms_child_request's source
medium_name = "sms_sponsorship.utm_medium_"
medium_name += (
"android"
if sms_child_request.source == "Android"
else "ios"
if sms_child_request.source == "IOS"
else "sms"
)
# Create sponsorship
lines = self._get_sponsorship_standard_lines(False)
if not vals["sponsorship_plus"]:
lines = lines[:-1]
sponsorship = self.create(
{
"partner_id": partner.id,
"correspondent_id": partner.id,
"child_id": sms_child_request.child_id.id,
"type": "S",
"contract_line_ids": lines,
"medium_id": self.env.ref(medium_name).id,
"origin_id": sms_child_request.event_id.origin_id.id,
}
)
sponsorship.on_change_origin()
sponsorship._set_next_invoice_date_sms()
sponsorship.with_delay().put_child_on_no_money_hold()
partner.set_privacy_statement(origin="new_sponsorship")
sms_child_request.with_context(lang=partner.lang).complete_step1(sponsorship.id)
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def finalize_form(self, pay_first_month_ebanking, payment_mode_id):
""" validate sms sponsorship after step 2 and send confirmation email
:param pay_first_month_ebanking: has the sponsor paid first month
:param payment_mode_id: selected payment mode
:return: True
"""
self.associate_group(payment_mode_id)
if not pay_first_month_ebanking:
# update sms request and send confirmation. this will be done
# after the payment if the sponsor decides to pay the first month
self.sms_request_id.complete_step2()
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def post_message_from_step2(self, message):
# Post message in sponsorship
notify_ids = (
self.env["res.config.settings"]
.sudo()
.get_param("sms_new_partner_notify_ids")
)
intro = _("Please verify the following information given by the sponsor:")
return self.message_post(
body=intro + message,
subject=_("New SMS sponsorship information"),
partner_ids=notify_ids,
type="comment",
subtype="mail.mt_comment",
content_subtype="html",
)
def associate_group(self, payment_mode_id):
""" Create contract group when SMS sponsorship is validated.
:param payment_mode_id: selected payment mode
:return: True
"""
group = self.env["recurring.contract.group"].search(
[
("partner_id", "=", self.partner_id.id),
("payment_mode_id", "=", payment_mode_id),
],
order="next_invoice_date desc",
limit=1,
)
if not group:
group = group.create(
{"partner_id": self.partner_id.id, "payment_mode_id": payment_mode_id, }
)
self.group_id = group
return True
@job(default_channel="root.sms_sponsorship")
@related_action(action="related_action_contract")
def create_first_sms_invoice(self):
"""In case the sponsor is a new partner, create first invoice
because the sponsorship won't be validated and invoices are not
generated. We therefore manually create an invoice that can be paid
online.
:return: True
"""
invoicer = self.env["recurring.invoicer"].create({})
journal = self.env["account.journal"].search(
[("type", "=", "sale"), ("company_id", "=", 1)], limit=1
)
inv_data = self.group_id._setup_inv_data(journal, invoicer, self)
self.env["account.invoice"].create(inv_data)
self.update_next_invoice_date()
return True
def contract_waiting(self):
"""
In case a new sponsor has already paid the first month, we
automatically pay the first invoice when contract is validated,
supposing that the staff has verified the partner.
:return: True
"""
super().contract_waiting()
self._post_payment_first_month()
return True
def _post_payment_first_month(self):
for contract in self.filtered("invoice_line_ids"):
invoices = contract.invoice_line_ids.mapped("invoice_id")
payment = self.env["account.payment"].search(
[("invoice_ids", "in", invoices.ids), ("state", "=", "draft")]
)
draft_invoices = invoices.filtered(lambda i: i.state == "draft")
draft_invoices.action_invoice_open()
payment.post()
def _set_next_invoice_date_sms(self):
""" Just compute the default next_invoice_date for new sponsorship. """
self.ensure_one()
current_date = date.today()
if self.group_id:
contract_group = self.group_id
if contract_group.next_invoice_date:
next_group_date = contract_group.next_invoice_date
next_invoice_date = current_date.replace(day=next_group_date.day)
else:
next_invoice_date = current_date.replace(day=1)
else:
next_invoice_date = current_date.replace(day=1)
next_invoice_date += relativedelta(months=+1)
self.next_invoice_date = next_invoice_date
|
#!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'kpy@google.com (Ka-Ping Yee) and many other Googlers'
from django_setup import ugettext as _ # always keep this first
import calendar
import cgi
from datetime import datetime, timedelta
import httplib
import logging
import os
import random
import re
import sys
import time
import traceback
import unicodedata
import urllib
import urlparse
import django.utils.html
from google.appengine.api import images
from google.appengine.api import mail
from google.appengine.api import taskqueue
from google.appengine.api import users
from google.appengine.ext import webapp
import google.appengine.ext.webapp.template
import google.appengine.ext.webapp.util
from recaptcha.client import captcha
import const
import config
import legacy_redirect
import model
import pfif
import resources
import user_agents
# The domain name from which to send e-mail.
EMAIL_DOMAIN = 'appspotmail.com' # All apps on appspot.com use this for mail.
# Query parameters which are automatically preserved on page transition
# if you use utils.BaseHandler.get_url() or
# env.hidden_input_tags_for_preserved_query_params.
PRESERVED_QUERY_PARAM_NAMES = ['style', 'small', 'charsets']
# ==== Field value text ========================================================
def get_person_sex_text(person):
"""Returns the UI text for a person's sex field."""
return const.PERSON_SEX_TEXT.get(person.sex or '')
def get_note_status_text(note):
"""Returns the UI text for a note's status field."""
return const.NOTE_STATUS_TEXT.get(note.status or '')
def get_person_status_text(person):
"""Returns the UI text for a person's latest_status."""
return const.PERSON_STATUS_TEXT.get(person.latest_status or '')
# Things that occur as prefixes of global paths (i.e. no repository name).
GLOBAL_PATH_RE = re.compile(r'^/(global|personfinder)(/?|/.*)$')
# ==== String formatting =======================================================
def format_boolean(value):
return value and 'true' or 'false'
def format_utc_datetime(dt):
if not dt:
return ''
return dt.replace(microsecond=0).isoformat() + 'Z'
def format_utc_timestamp(timestamp):
if not isinstance(timestamp, (int, float)):
return ''
return format_utc_datetime(datetime.utcfromtimestamp(timestamp))
def format_sitemaps_datetime(dt):
integer_dt = datetime(
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
return integer_dt.isoformat() + '+00:00'
def encode(string, encoding='utf-8'):
"""If unicode, encode to encoding; if 8-bit string, leave unchanged."""
if isinstance(string, unicode):
string = string.encode(encoding)
return string
def urlencode(params, encoding='utf-8'):
"""Encode the key-value pairs in 'params' into a query string, applying
the specified encoding to any Unicode strings and ignoring any keys that
have value == None. (urllib.urlencode doesn't support Unicode)."""
keys = params.keys()
keys.sort() # Sort the keys to get canonical ordering
return urllib.urlencode([
(encode(key, encoding), encode(params[key], encoding))
for key in keys if isinstance(params[key], basestring)])
def set_param(params, param, value):
"""Take the params from a urlparse and override one of the values."""
# This will strip out None-valued params and collapse repeated params.
params = dict(cgi.parse_qsl(params))
if value is None:
if param in params:
del(params[param])
else:
params[param] = value
return urlencode(params)
def set_url_param(url, param, value):
"""This modifies a URL setting the given param to the specified value. This
may add the param or override an existing value, or, if the value is None,
it will remove the param. Note that value must be a basestring and can't be
an int, for example."""
url_parts = list(urlparse.urlparse(url))
url_parts[4] = set_param(url_parts[4], param, value)
return urlparse.urlunparse(url_parts)
def anchor_start(href):
"""Returns the HREF escaped and embedded in an anchor tag."""
return '<a href="%s">' % django.utils.html.escape(href)
def anchor(href, body):
"""Returns a string anchor HTML element with the given href and body."""
return anchor_start(href) + django.utils.html.escape(body) + '</a>'
# ==== Validators ==============================================================
# These validator functions are used to check and parse query parameters.
# Each validator should return a parsed, sanitized value, or return a default
# value, or raise ValueError to display an error message to the user.
def strip(string):
# Trailing nulls appear in some strange character encodings like Shift-JIS.
return string.strip().rstrip('\0')
def validate_yes(string):
return (strip(string).lower() == 'yes') and 'yes' or ''
def validate_checkbox(string):
return (strip(string).lower() == 'on') and 'yes' or ''
def validate_role(string):
return (strip(string).lower() == 'provide') and 'provide' or 'seek'
def validate_int(string):
return string and int(strip(string))
def validate_sex(string):
"""Validates the 'sex' parameter, returning a canonical value or ''."""
if string:
string = strip(string).lower()
return string in pfif.PERSON_SEX_VALUES and string or ''
def validate_expiry(value):
"""Validates that the 'expiry_option' parameter is a positive integer.
Returns:
the int() value if it's present and parses, or the default_expiry_days
for the repository, if it's set, otherwise -1 which represents the
'unspecified' status.
"""
try:
value = int(value)
except Exception, e:
return None
return value > 0 and value or None
APPROXIMATE_DATE_RE = re.compile(r'^\d{4}(-\d\d)?(-\d\d)?$')
def validate_approximate_date(string):
if string:
string = strip(string)
if APPROXIMATE_DATE_RE.match(string):
return string
return ''
AGE_RE = re.compile(r'^\d+(-\d+)?$')
# Hyphen with possibly surrounding whitespaces.
HYPHEN_RE = re.compile(
ur'\s*[-\u2010-\u2015\u2212\u301c\u30fc\ufe58\ufe63\uff0d]\s*',
re.UNICODE)
def validate_age(string):
"""Validates the 'age' parameter, returning a canonical value or ''."""
if string:
string = strip(string)
string = unicodedata.normalize('NFKC', unicode(string))
string = HYPHEN_RE.sub('-', string)
if AGE_RE.match(string):
return string
return ''
def validate_status(string):
"""Validates an incoming status parameter, returning one of the canonical
status strings or ''. Note that '' is always used as the Python value
to represent the 'unspecified' status."""
if string:
string = strip(string).lower()
return string in pfif.NOTE_STATUS_VALUES and string or ''
DATETIME_RE = re.compile(r'^(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)Z$')
def validate_datetime(string):
if not string:
return None # A missing value is okay.
match = DATETIME_RE.match(string)
if match:
return datetime(*map(int, match.groups()))
raise ValueError('Bad datetime: %r' % string)
def validate_timestamp(string):
try:
return string and datetime.utcfromtimestamp(float(strip(string)))
except:
raise ValueError('Bad timestamp: %s' % string)
def validate_image(bytestring):
try:
image = None
if bytestring:
image = images.Image(bytestring)
image.width
return image
except:
return False
def validate_version(string):
"""Version, if present, should be in pfif versions."""
if string and strip(string) not in pfif.PFIF_VERSIONS:
raise ValueError('Bad pfif version: %s' % string)
return pfif.PFIF_VERSIONS[strip(string) or pfif.PFIF_DEFAULT_VERSION]
REPO_RE = re.compile('^[a-z0-9-]+$')
def validate_repo(string):
string = (string or '').strip()
if not string:
return None
if string == 'global':
raise ValueError('"global" is an illegal repository name.')
if REPO_RE.match(string):
return string
raise ValueError('Repository names can only contain '
'lowercase letters, digits, and hyphens.')
RESOURCE_NAME_RE = re.compile('^[a-z0-9._-]+$')
def validate_resource_name(string):
"""A resource name or bundle label."""
string = (string or '').strip().lower()
if not string:
return None
if RESOURCE_NAME_RE.match(string):
return string
raise ValueError('Invalid resource name or bundle name: %r' % string)
LANG_RE = re.compile('^[A-Za-z0-9-]+$')
def validate_lang(string):
"""A BCP 47 language tag."""
string = (string or '').strip().lower()
if not string:
return None
if LANG_RE.match(string):
return string
raise ValueError('Invalid language tag: %r' % string)
def validate_cache_seconds(string):
"""A number of seconds to cache a Resource in RAM."""
string = (string or '').strip()
if string:
return float(string)
return 1.0
# ==== Other utilities =========================================================
def url_is_safe(url):
current_scheme, _, _, _, _ = urlparse.urlsplit(url)
return current_scheme in ['http', 'https']
def get_app_name():
"""Canonical name of the app, without HR s~ nonsense. This only works in
the context of the appserver (eg remote_api can't use it)."""
from google.appengine.api import app_identity
return app_identity.get_application_id()
def sanitize_urls(record):
"""Clean up URLs to protect against XSS."""
for field in ['photo_url', 'source_url']:
url = getattr(record, field, None)
if url and not url_is_safe(url):
setattr(record, field, None)
def get_host(host=None):
host = host or os.environ['HTTP_HOST']
"""Return the host name, without version specific details."""
parts = host.split('.')
if len(parts) > 3:
return '.'.join(parts[-3:])
else:
return host
def optionally_filter_sensitive_fields(records, auth=None):
"""Removes sensitive fields from a list of dictionaries, unless the client
has full read authorization."""
if not (auth and auth.full_read_permission):
filter_sensitive_fields(records)
def filter_sensitive_fields(records):
"""Removes sensitive fields from a list of dictionaries."""
for record in records:
if 'date_of_birth' in record:
record['date_of_birth'] = ''
if 'author_email' in record:
record['author_email'] = ''
if 'author_phone' in record:
record['author_phone'] = ''
if 'email_of_found_person' in record:
record['email_of_found_person'] = ''
if 'phone_of_found_person' in record:
record['phone_of_found_person'] = ''
def get_secret(name):
"""Gets a secret from the datastore by name, or returns None if missing."""
secret = model.Secret.get_by_key_name(name)
if secret:
return secret.secret
# The current time for testing as a datetime object, or None if using real time.
_utcnow_for_test = None
def set_utcnow_for_test(now):
"""Sets the current time for testing purposes. Pass in a datetime object
or a timestamp in epoch seconds; or pass None to revert to real time."""
global _utcnow_for_test
if isinstance(now, (int, float)):
now = datetime.utcfromtimestamp(float(now))
_utcnow_for_test = now
def get_utcnow():
"""Returns the current UTC datetime (settable with set_utcnow_for_test)."""
global _utcnow_for_test
return (_utcnow_for_test is None) and datetime.utcnow() or _utcnow_for_test
def get_timestamp(dt):
"""Converts datetime object to a float value in epoch seconds."""
return calendar.timegm(dt.utctimetuple()) + dt.microsecond * 1e-6
def get_utcnow_timestamp():
"""Returns the current time in epoch seconds (settable with
set_utcnow_for_test)."""
return get_timestamp(get_utcnow())
def log_api_action(handler, action, num_person_records=0, num_note_records=0,
people_skipped=0, notes_skipped=0):
"""Log an API action."""
if handler.config and handler.config.api_action_logging:
model.ApiActionLog.record_action(
handler.repo, handler.params.key,
handler.params.version.version, action,
num_person_records, num_note_records,
people_skipped, notes_skipped,
handler.request.headers.get('User-Agent'),
handler.request.remote_addr, handler.request.url)
def get_full_name(given_name, family_name, config):
"""Return full name string obtained by concatenating given_name and
family_name in the order specified by config.family_name_first, or just
given_name if config.use_family_name is False."""
if config.use_family_name:
separator = (given_name and family_name) and u' ' or u''
if config.family_name_first:
return separator.join([family_name, given_name])
else:
return separator.join([given_name, family_name])
else:
return given_name
def get_person_full_name(person, config):
"""Return person's full name. "person" can be any object with "given_name"
and "family_name" attributes."""
return get_full_name(person.given_name, person.family_name, config)
def send_confirmation_email_to_record_author(
handler, person, action, confirm_url, record_id):
"""Send the author an email to confirm enabling/disabling notes
of a record."""
if not person.author_email:
return handler.error(
400, _('No author email for record %(id)s.') % {'id' : record_id})
# i18n: Subject line of an e-mail message confirming the author
# wants to disable notes for this record
params = {
'given_name': person.given_name,
'family_name': person.family_name,
}
if action == 'enable':
subject = _(
'[Person Finder] Enable notes on '
'"%(given_name)s %(family_name)s"?'
) % params
elif action == 'disable':
subject = _(
'[Person Finder] Disable notes on '
'"%(given_name)s %(family_name)s"?'
) % params
else:
raise ValueError('Unknown action: %s' % action)
# send e-mail to record author confirming the lock of this record.
template_name = '%s_notes_email.txt' % action
handler.send_mail(
subject=subject,
to=person.author_email,
body=handler.render_to_string(
template_name,
author_name=person.author_name,
given_name=person.given_name,
family_name=person.family_name,
site_url=handler.get_url('/'),
confirm_url=confirm_url
)
)
def get_repo_url(request, repo, scheme=None):
"""Constructs the absolute root URL for a given repository."""
req_scheme, req_netloc, req_path, _, _ = urlparse.urlsplit(request.url)
prefix = req_path.startswith('/personfinder') and '/personfinder' or ''
if req_netloc.split(':')[0] == 'localhost':
scheme = 'http' # HTTPS is not available when using dev_appserver
return (scheme or req_scheme) + '://' + req_netloc + prefix + '/' + repo
def get_url(request, repo, action, charset='UTF-8', scheme=None, **params):
"""Constructs the absolute URL for a given action and query parameters,
preserving the current repo and the 'small' and 'style' parameters."""
repo_url = get_repo_url(request, repo or 'global', scheme)
for name in PRESERVED_QUERY_PARAM_NAMES:
params[name] = params.get(name, request.get(name, None))
query = urlencode(params, charset)
return repo_url + '/' + action.lstrip('/') + (query and '?' + query or '')
# ==== Struct ==================================================================
class Struct:
"""A simple bag of attributes."""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def get(self, name, default=None):
return self.__dict__.get(name, default)
# ==== Base Handler ============================================================
class BaseHandler(webapp.RequestHandler):
# Handlers that don't need a repository name can set this to False.
repo_required = True
# Handlers that require HTTPS can set this to True.
https_required = False
# Set this to True to enable a handler even for deactivated repositories.
ignore_deactivation = False
# List all accepted query parameters here with their associated validators.
auto_params = {
'add_note': validate_yes,
'age': validate_age,
'alternate_family_names': strip,
'alternate_given_names': strip,
'author_email': strip,
'author_made_contact': validate_yes,
'author_name': strip,
'author_phone': strip,
'cache_seconds': validate_cache_seconds,
'clone': validate_yes,
'confirm': validate_yes,
'content_id': strip,
'cursor': strip,
'date_of_birth': validate_approximate_date,
'description': strip,
'dupe_notes': validate_yes,
'email_of_found_person': strip,
'error': strip,
'expiry_option': validate_expiry,
'family_name': strip,
'given_name': strip,
'home_city': strip,
'home_country': strip,
'home_neighborhood': strip,
'home_postal_code': strip,
'home_state': strip,
'home_street': strip,
'id': strip,
'id1': strip,
'id2': strip,
'id3': strip,
'key': strip,
'lang': validate_lang,
'last_known_location': strip,
'max_results': validate_int,
'min_entry_date': validate_datetime,
'new_repo': validate_repo,
'note_photo': validate_image,
'note_photo_url': strip,
'omit_notes': validate_yes,
'operation': strip,
'person_record_id': strip,
'phone_of_found_person': strip,
'photo': validate_image,
'photo_url': strip,
'query': strip,
'resource_bundle': validate_resource_name,
'resource_bundle_original': validate_resource_name,
'resource_lang': validate_lang,
'resource_name': validate_resource_name,
'resource_set_preview': validate_yes,
'role': validate_role,
'sex': validate_sex,
'signature': strip,
'skip': validate_int,
'small': validate_yes,
'source_date': strip,
'source_name': strip,
'source_url': strip,
'status': validate_status,
'style': strip,
'subscribe': validate_checkbox,
'subscribe_email': strip,
'suppress_redirect': validate_yes,
'target': strip,
'text': strip,
'utcnow': validate_timestamp,
'version': validate_version,
}
def maybe_redirect_jp_tier2_mobile(self):
"""Returns a redirection URL based on the jp_tier2_mobile_redirect_url
setting if the request is from a Japanese Tier-2 phone."""
if (self.config and
self.config.jp_tier2_mobile_redirect_url and
not self.params.suppress_redirect and
not self.params.small and
user_agents.is_jp_tier2_mobile_phone(self.request)):
redirect_url = (self.config.jp_tier2_mobile_redirect_url + '/' +
self.env.action)
if self.request.query_string:
redirect_url += '?' + self.request.query_string
return redirect_url
return ''
def redirect(self, path, repo=None, permanent=False, **params):
# This will prepend the repo to the path to create a working URL,
# unless the path has a global prefix or is an absolute URL.
if re.match('^[a-z]+:', path) or GLOBAL_PATH_RE.match(path):
if params:
path += '?' + urlencode(params, self.charset)
else:
path = self.get_url(path, repo, **params)
return webapp.RequestHandler.redirect(self, path, permanent=permanent)
def render(self, name, language_override=None, cache_seconds=0,
get_vars=lambda: {}, **vars):
"""Renders a template to the output stream, passing in the variables
specified in **vars as well as any additional variables returned by
get_vars(). Since this is intended for use by a dynamic page handler,
caching is off by default; if cache_seconds is positive, then
get_vars() will be called only when cached content is unavailable."""
self.write(self.render_to_string(
name, language_override, cache_seconds, get_vars, **vars))
def render_to_string(self, name, language_override=None, cache_seconds=0,
get_vars=lambda: {}, **vars):
"""Renders a template to a string, passing in the variables specified
in **vars as well as any additional variables returned by get_vars().
Since this is intended for use by a dynamic page handler, caching is
off by default; if cache_seconds is positive, then get_vars() will be
called only when cached content is unavailable."""
# TODO(kpy): Make the contents of extra_key overridable by callers?
lang = language_override or self.env.lang
extra_key = (self.env.repo, self.env.charset, self.request.query_string)
def get_all_vars():
vars['env'] = self.env # pass along application-wide context
vars['config'] = self.config # pass along the configuration
vars['params'] = self.params # pass along the query parameters
vars.update(get_vars())
return vars
return resources.get_rendered(
name, lang, extra_key, get_all_vars, cache_seconds)
def error(self, code, message='', message_html=''):
self.info(code, message, message_html, style='error')
def info(self, code, message='', message_html='', style='info'):
is_error = 400 <= code < 600
if is_error:
webapp.RequestHandler.error(self, code)
else:
self.response.set_status(code)
if not message and not message_html:
message = '%d: %s' % (code, httplib.responses.get(code))
try:
self.render('message.html', cls=style,
message=message, message_html=message_html)
except:
self.response.out.write(message + '<p>' + message_html)
self.terminate_response()
def terminate_response(self):
"""Prevents any further output from being written."""
self.response.out.write = lambda *args: None
self.get = lambda *args: None
self.post = lambda *args: None
def write(self, text):
"""Sends text to the client using the charset from select_charset()."""
self.response.out.write(text.encode(self.env.charset, 'replace'))
def get_url(self, action, repo=None, scheme=None, **params):
"""Constructs the absolute URL for a given action and query parameters,
preserving the current repo and the 'small' and 'style' parameters."""
return get_url(self.request, repo or self.env.repo, action,
charset=self.env.charset, scheme=scheme, **params)
@staticmethod
def add_task_for_repo(repo, name, action, **kwargs):
"""Queues up a task for an individual repository."""
task_name = '%s-%s-%s' % (repo, name, int(time.time()*1000))
path = '/%s/%s' % (repo, action)
taskqueue.add(name=task_name, method='GET', url=path, params=kwargs)
def send_mail(self, to, subject, body):
"""Sends e-mail using a sender address that's allowed for this app."""
app_id = get_app_name()
sender = 'Do not reply <do-not-reply@%s.%s>' % (app_id, EMAIL_DOMAIN)
logging.info('Add mail task: recipient %r, subject %r' % (to, subject))
taskqueue.add(queue_name='send-mail', url='/global/admin/send_mail',
params={'sender': sender,
'to': to,
'subject': subject,
'body': body})
def get_captcha_html(self, error_code=None, use_ssl=False):
"""Generates the necessary HTML to display a CAPTCHA validation box."""
# We use the 'custom_translations' parameter for UI messages, whereas
# the 'lang' parameter controls the language of the challenge itself.
# reCAPTCHA falls back to 'en' if this parameter isn't recognized.
lang = self.env.lang.split('-')[0]
return captcha.get_display_html(
public_key=config.get('captcha_public_key'),
use_ssl=use_ssl, error=error_code, lang=lang,
custom_translations={
# reCAPTCHA doesn't support all languages, so we treat its
# messages as part of this app's usual translation workflow
'instructions_visual': _('Type the two words:'),
'instructions_audio': _('Type what you hear:'),
'play_again': _('Play the sound again'),
'cant_hear_this': _('Download the sound as MP3'),
'visual_challenge': _('Get a visual challenge'),
'audio_challenge': _('Get an audio challenge'),
'refresh_btn': _('Get a new challenge'),
'help_btn': _('Help'),
'incorrect_try_again': _('Incorrect. Try again.')
}
)
def get_captcha_response(self):
"""Returns an object containing the CAPTCHA response information for the
given request's CAPTCHA field information."""
challenge = self.request.get('recaptcha_challenge_field')
response = self.request.get('recaptcha_response_field')
remote_ip = os.environ['REMOTE_ADDR']
return captcha.submit(
challenge, response, config.get('captcha_private_key'), remote_ip)
def handle_exception(self, exception, debug_mode):
logging.error(traceback.format_exc())
self.error(500, _(
'There was an error processing your request. Sorry for the '
'inconvenience. Our administrators will investigate the source '
'of the problem, but please check that the format of your '
'request is correct.'))
def to_local_time(self, date):
"""Converts a datetime object to the local time configured for the
current repository. For convenience, returns None if date is None."""
# TODO(kpy): This only works for repositories that have a single fixed
# time zone offset and never use Daylight Saving Time.
if date:
if self.config.time_zone_offset:
return date + timedelta(0, 3600*self.config.time_zone_offset)
return date
def initialize(self, request, response, env):
webapp.RequestHandler.initialize(self, request, response)
self.params = Struct()
self.env = env
self.repo = env.repo
self.config = env.config
self.charset = env.charset
# Validate query parameters.
for name, validator in self.auto_params.items():
try:
value = self.request.get(name, '')
setattr(self.params, name, validator(value))
except Exception, e:
setattr(self.params, name, validator(None))
return self.error(400, 'Invalid parameter %s: %s' % (name, e))
# Log the User-Agent header.
sample_rate = float(
self.config and self.config.user_agent_sample_rate or 0)
if random.random() < sample_rate:
model.UserAgentLog(
repo=self.repo, sample_rate=sample_rate,
user_agent=self.request.headers.get('User-Agent'), lang=lang,
accept_charset=self.request.headers.get('Accept-Charset', ''),
ip_address=self.request.remote_addr).put()
# Sets default Content-Type header.
self.response.headers['Content-Type'] = (
'text/html; charset=%s' % self.charset)
# Check for SSL (unless running on localhost for development).
if self.https_required and self.env.domain != 'localhost':
if self.env.scheme != 'https':
return self.error(403, 'HTTPS is required.')
# Check for an authorization key.
self.auth = None
if self.params.key:
if self.repo:
# check for domain specific one.
self.auth = model.Authorization.get(self.repo, self.params.key)
if not self.auth:
# perhaps this is a global key ('*' for consistency with config).
self.auth = model.Authorization.get('*', self.params.key)
# Handlers that don't need a repository configuration can skip it.
if not self.repo:
if self.repo_required:
return self.error(400, 'No repository specified.')
return
# Everything after this requires a repo.
# Reject requests for repositories that don't exist.
if not model.Repo.get_by_key_name(self.repo):
if legacy_redirect.do_redirect(self):
return legacy_redirect.redirect(self)
html = 'No such repository. '
if self.env.repo_options:
html += 'Select:<p>' + self.render_to_string('repo-menu.html')
return self.error(404, message_html=html)
# If this repository has been deactivated, terminate with a message.
if self.config.deactivated and not self.ignore_deactivation:
self.env.language_menu = []
self.env.robots_ok = True
self.render('message.html', cls='deactivation',
message_html=self.config.deactivation_message_html)
self.terminate_response()
Revert needless diff.
#!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'kpy@google.com (Ka-Ping Yee) and many other Googlers'
from django_setup import ugettext as _ # always keep this first
import calendar
import cgi
from datetime import datetime, timedelta
import httplib
import logging
import os
import random
import re
import sys
import time
import traceback
import unicodedata
import urllib
import urlparse
import django.utils.html
from google.appengine.api import images
from google.appengine.api import mail
from google.appengine.api import taskqueue
from google.appengine.api import users
from google.appengine.ext import webapp
import google.appengine.ext.webapp.template
import google.appengine.ext.webapp.util
from recaptcha.client import captcha
import const
import config
import legacy_redirect
import model
import pfif
import resources
import user_agents
# The domain name from which to send e-mail.
EMAIL_DOMAIN = 'appspotmail.com' # All apps on appspot.com use this for mail.
# Query parameters which are automatically preserved on page transition
# if you use utils.BaseHandler.get_url() or
# env.hidden_input_tags_for_preserved_query_params.
PRESERVED_QUERY_PARAM_NAMES = ['style', 'small', 'charsets']
# ==== Field value text ========================================================
def get_person_sex_text(person):
"""Returns the UI text for a person's sex field."""
return const.PERSON_SEX_TEXT.get(person.sex or '')
def get_note_status_text(note):
"""Returns the UI text for a note's status field."""
return const.NOTE_STATUS_TEXT.get(note.status or '')
def get_person_status_text(person):
"""Returns the UI text for a person's latest_status."""
return const.PERSON_STATUS_TEXT.get(person.latest_status or '')
# Things that occur as prefixes of global paths (i.e. no repository name).
GLOBAL_PATH_RE = re.compile(r'^/(global|personfinder)(/?|/.*)$')
# ==== String formatting =======================================================
def format_boolean(value):
return value and 'true' or 'false'
def format_utc_datetime(dt):
if not dt:
return ''
return dt.replace(microsecond=0).isoformat() + 'Z'
def format_utc_timestamp(timestamp):
if not isinstance(timestamp, (int, float)):
return ''
return format_utc_datetime(datetime.utcfromtimestamp(timestamp))
def format_sitemaps_datetime(dt):
integer_dt = datetime(
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
return integer_dt.isoformat() + '+00:00'
def encode(string, encoding='utf-8'):
"""If unicode, encode to encoding; if 8-bit string, leave unchanged."""
if isinstance(string, unicode):
string = string.encode(encoding)
return string
def urlencode(params, encoding='utf-8'):
"""Encode the key-value pairs in 'params' into a query string, applying
the specified encoding to any Unicode strings and ignoring any keys that
have value == None. (urllib.urlencode doesn't support Unicode)."""
keys = params.keys()
keys.sort() # Sort the keys to get canonical ordering
return urllib.urlencode([
(encode(key, encoding), encode(params[key], encoding))
for key in keys if isinstance(params[key], basestring)])
def set_param(params, param, value):
"""Take the params from a urlparse and override one of the values."""
# This will strip out None-valued params and collapse repeated params.
params = dict(cgi.parse_qsl(params))
if value is None:
if param in params:
del(params[param])
else:
params[param] = value
return urlencode(params)
def set_url_param(url, param, value):
"""This modifies a URL setting the given param to the specified value. This
may add the param or override an existing value, or, if the value is None,
it will remove the param. Note that value must be a basestring and can't be
an int, for example."""
url_parts = list(urlparse.urlparse(url))
url_parts[4] = set_param(url_parts[4], param, value)
return urlparse.urlunparse(url_parts)
def anchor_start(href):
"""Returns the HREF escaped and embedded in an anchor tag."""
return '<a href="%s">' % django.utils.html.escape(href)
def anchor(href, body):
"""Returns a string anchor HTML element with the given href and body."""
return anchor_start(href) + django.utils.html.escape(body) + '</a>'
# ==== Validators ==============================================================
# These validator functions are used to check and parse query parameters.
# Each validator should return a parsed, sanitized value, or return a default
# value, or raise ValueError to display an error message to the user.
def strip(string):
# Trailing nulls appear in some strange character encodings like Shift-JIS.
return string.strip().rstrip('\0')
def validate_yes(string):
return (strip(string).lower() == 'yes') and 'yes' or ''
def validate_checkbox(string):
return (strip(string).lower() == 'on') and 'yes' or ''
def validate_role(string):
return (strip(string).lower() == 'provide') and 'provide' or 'seek'
def validate_int(string):
return string and int(strip(string))
def validate_sex(string):
"""Validates the 'sex' parameter, returning a canonical value or ''."""
if string:
string = strip(string).lower()
return string in pfif.PERSON_SEX_VALUES and string or ''
def validate_expiry(value):
"""Validates that the 'expiry_option' parameter is a positive integer.
Returns:
the int() value if it's present and parses, or the default_expiry_days
for the repository, if it's set, otherwise -1 which represents the
'unspecified' status.
"""
try:
value = int(value)
except Exception, e:
return None
return value > 0 and value or None
APPROXIMATE_DATE_RE = re.compile(r'^\d{4}(-\d\d)?(-\d\d)?$')
def validate_approximate_date(string):
if string:
string = strip(string)
if APPROXIMATE_DATE_RE.match(string):
return string
return ''
AGE_RE = re.compile(r'^\d+(-\d+)?$')
# Hyphen with possibly surrounding whitespaces.
HYPHEN_RE = re.compile(
ur'\s*[-\u2010-\u2015\u2212\u301c\u30fc\ufe58\ufe63\uff0d]\s*',
re.UNICODE)
def validate_age(string):
"""Validates the 'age' parameter, returning a canonical value or ''."""
if string:
string = strip(string)
string = unicodedata.normalize('NFKC', unicode(string))
string = HYPHEN_RE.sub('-', string)
if AGE_RE.match(string):
return string
return ''
def validate_status(string):
"""Validates an incoming status parameter, returning one of the canonical
status strings or ''. Note that '' is always used as the Python value
to represent the 'unspecified' status."""
if string:
string = strip(string).lower()
return string in pfif.NOTE_STATUS_VALUES and string or ''
DATETIME_RE = re.compile(r'^(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)Z$')
def validate_datetime(string):
if not string:
return None # A missing value is okay.
match = DATETIME_RE.match(string)
if match:
return datetime(*map(int, match.groups()))
raise ValueError('Bad datetime: %r' % string)
def validate_timestamp(string):
try:
return string and datetime.utcfromtimestamp(float(strip(string)))
except:
raise ValueError('Bad timestamp: %s' % string)
def validate_image(bytestring):
try:
image = None
if bytestring:
image = images.Image(bytestring)
image.width
return image
except:
return False
def validate_version(string):
"""Version, if present, should be in pfif versions."""
if string and strip(string) not in pfif.PFIF_VERSIONS:
raise ValueError('Bad pfif version: %s' % string)
return pfif.PFIF_VERSIONS[strip(string) or pfif.PFIF_DEFAULT_VERSION]
REPO_RE = re.compile('^[a-z0-9-]+$')
def validate_repo(string):
string = (string or '').strip()
if not string:
return None
if string == 'global':
raise ValueError('"global" is an illegal repository name.')
if REPO_RE.match(string):
return string
raise ValueError('Repository names can only contain '
'lowercase letters, digits, and hyphens.')
RESOURCE_NAME_RE = re.compile('^[a-z0-9._-]+$')
def validate_resource_name(string):
"""A resource name or bundle label."""
string = (string or '').strip().lower()
if not string:
return None
if RESOURCE_NAME_RE.match(string):
return string
raise ValueError('Invalid resource name or bundle name: %r' % string)
LANG_RE = re.compile('^[A-Za-z0-9-]+$')
def validate_lang(string):
"""A BCP 47 language tag."""
string = (string or '').strip().lower()
if not string:
return None
if LANG_RE.match(string):
return string
raise ValueError('Invalid language tag: %r' % string)
def validate_cache_seconds(string):
"""A number of seconds to cache a Resource in RAM."""
string = (string or '').strip()
if string:
return float(string)
return 1.0
# ==== Other utilities =========================================================
def url_is_safe(url):
current_scheme, _, _, _, _ = urlparse.urlsplit(url)
return current_scheme in ['http', 'https']
def get_app_name():
"""Canonical name of the app, without HR s~ nonsense. This only works in
the context of the appserver (eg remote_api can't use it)."""
from google.appengine.api import app_identity
return app_identity.get_application_id()
def sanitize_urls(record):
"""Clean up URLs to protect against XSS."""
for field in ['photo_url', 'source_url']:
url = getattr(record, field, None)
if url and not url_is_safe(url):
setattr(record, field, None)
def get_host(host=None):
host = host or os.environ['HTTP_HOST']
"""Return the host name, without version specific details."""
parts = host.split('.')
if len(parts) > 3:
return '.'.join(parts[-3:])
else:
return host
def optionally_filter_sensitive_fields(records, auth=None):
"""Removes sensitive fields from a list of dictionaries, unless the client
has full read authorization."""
if not (auth and auth.full_read_permission):
filter_sensitive_fields(records)
def filter_sensitive_fields(records):
"""Removes sensitive fields from a list of dictionaries."""
for record in records:
if 'date_of_birth' in record:
record['date_of_birth'] = ''
if 'author_email' in record:
record['author_email'] = ''
if 'author_phone' in record:
record['author_phone'] = ''
if 'email_of_found_person' in record:
record['email_of_found_person'] = ''
if 'phone_of_found_person' in record:
record['phone_of_found_person'] = ''
def get_secret(name):
"""Gets a secret from the datastore by name, or returns None if missing."""
secret = model.Secret.get_by_key_name(name)
if secret:
return secret.secret
# The current time for testing as a datetime object, or None if using real time.
_utcnow_for_test = None
def set_utcnow_for_test(now):
"""Sets the current time for testing purposes. Pass in a datetime object
or a timestamp in epoch seconds; or pass None to revert to real time."""
global _utcnow_for_test
if isinstance(now, (int, float)):
now = datetime.utcfromtimestamp(float(now))
_utcnow_for_test = now
def get_utcnow():
"""Returns the current UTC datetime (settable with set_utcnow_for_test)."""
global _utcnow_for_test
return (_utcnow_for_test is None) and datetime.utcnow() or _utcnow_for_test
def get_timestamp(dt):
"""Converts datetime object to a float value in epoch seconds."""
return calendar.timegm(dt.utctimetuple()) + dt.microsecond * 1e-6
def get_utcnow_timestamp():
"""Returns the current time in epoch seconds (settable with
set_utcnow_for_test)."""
return get_timestamp(get_utcnow())
def log_api_action(handler, action, num_person_records=0, num_note_records=0,
people_skipped=0, notes_skipped=0):
"""Log an API action."""
if handler.config and handler.config.api_action_logging:
model.ApiActionLog.record_action(
handler.repo, handler.params.key,
handler.params.version.version, action,
num_person_records, num_note_records,
people_skipped, notes_skipped,
handler.request.headers.get('User-Agent'),
handler.request.remote_addr, handler.request.url)
def get_full_name(given_name, family_name, config):
"""Return full name string obtained by concatenating given_name and
family_name in the order specified by config.family_name_first, or just
given_name if config.use_family_name is False."""
if config.use_family_name:
separator = (given_name and family_name) and u' ' or u''
if config.family_name_first:
return separator.join([family_name, given_name])
else:
return separator.join([given_name, family_name])
else:
return given_name
def get_person_full_name(person, config):
"""Return person's full name. "person" can be any object with "given_name"
and "family_name" attributes."""
return get_full_name(person.given_name, person.family_name, config)
def send_confirmation_email_to_record_author(
handler, person, action, confirm_url, record_id):
"""Send the author an email to confirm enabling/disabling notes
of a record."""
if not person.author_email:
return handler.error(
400, _('No author email for record %(id)s.') % {'id' : record_id})
# i18n: Subject line of an e-mail message confirming the author
# wants to disable notes for this record
params = {
'given_name': person.given_name,
'family_name': person.family_name,
}
if action == 'enable':
subject = _(
'[Person Finder] Enable notes on '
'"%(given_name)s %(family_name)s"?'
) % params
elif action == 'disable':
subject = _(
'[Person Finder] Disable notes on '
'"%(given_name)s %(family_name)s"?'
) % params
else:
raise ValueError('Unknown action: %s' % action)
# send e-mail to record author confirming the lock of this record.
template_name = '%s_notes_email.txt' % action
handler.send_mail(
subject=subject,
to=person.author_email,
body=handler.render_to_string(
template_name,
author_name=person.author_name,
given_name=person.given_name,
family_name=person.family_name,
site_url=handler.get_url('/'),
confirm_url=confirm_url
)
)
def get_repo_url(request, repo, scheme=None):
"""Constructs the absolute root URL for a given repository."""
req_scheme, req_netloc, req_path, _, _ = urlparse.urlsplit(request.url)
prefix = req_path.startswith('/personfinder') and '/personfinder' or ''
if req_netloc.split(':')[0] == 'localhost':
scheme = 'http' # HTTPS is not available when using dev_appserver
return (scheme or req_scheme) + '://' + req_netloc + prefix + '/' + repo
def get_url(request, repo, action, charset='utf-8', scheme=None, **params):
"""Constructs the absolute URL for a given action and query parameters,
preserving the current repo and the 'small' and 'style' parameters."""
repo_url = get_repo_url(request, repo or 'global', scheme)
for name in PRESERVED_QUERY_PARAM_NAMES:
params[name] = params.get(name, request.get(name, None))
query = urlencode(params, charset)
return repo_url + '/' + action.lstrip('/') + (query and '?' + query or '')
# ==== Struct ==================================================================
class Struct:
"""A simple bag of attributes."""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def get(self, name, default=None):
return self.__dict__.get(name, default)
# ==== Base Handler ============================================================
class BaseHandler(webapp.RequestHandler):
# Handlers that don't need a repository name can set this to False.
repo_required = True
# Handlers that require HTTPS can set this to True.
https_required = False
# Set this to True to enable a handler even for deactivated repositories.
ignore_deactivation = False
# List all accepted query parameters here with their associated validators.
auto_params = {
'add_note': validate_yes,
'age': validate_age,
'alternate_family_names': strip,
'alternate_given_names': strip,
'author_email': strip,
'author_made_contact': validate_yes,
'author_name': strip,
'author_phone': strip,
'cache_seconds': validate_cache_seconds,
'clone': validate_yes,
'confirm': validate_yes,
'content_id': strip,
'cursor': strip,
'date_of_birth': validate_approximate_date,
'description': strip,
'dupe_notes': validate_yes,
'email_of_found_person': strip,
'error': strip,
'expiry_option': validate_expiry,
'family_name': strip,
'given_name': strip,
'home_city': strip,
'home_country': strip,
'home_neighborhood': strip,
'home_postal_code': strip,
'home_state': strip,
'home_street': strip,
'id': strip,
'id1': strip,
'id2': strip,
'id3': strip,
'key': strip,
'lang': validate_lang,
'last_known_location': strip,
'max_results': validate_int,
'min_entry_date': validate_datetime,
'new_repo': validate_repo,
'note_photo': validate_image,
'note_photo_url': strip,
'omit_notes': validate_yes,
'operation': strip,
'person_record_id': strip,
'phone_of_found_person': strip,
'photo': validate_image,
'photo_url': strip,
'query': strip,
'resource_bundle': validate_resource_name,
'resource_bundle_original': validate_resource_name,
'resource_lang': validate_lang,
'resource_name': validate_resource_name,
'resource_set_preview': validate_yes,
'role': validate_role,
'sex': validate_sex,
'signature': strip,
'skip': validate_int,
'small': validate_yes,
'source_date': strip,
'source_name': strip,
'source_url': strip,
'status': validate_status,
'style': strip,
'subscribe': validate_checkbox,
'subscribe_email': strip,
'suppress_redirect': validate_yes,
'target': strip,
'text': strip,
'utcnow': validate_timestamp,
'version': validate_version,
}
def maybe_redirect_jp_tier2_mobile(self):
"""Returns a redirection URL based on the jp_tier2_mobile_redirect_url
setting if the request is from a Japanese Tier-2 phone."""
if (self.config and
self.config.jp_tier2_mobile_redirect_url and
not self.params.suppress_redirect and
not self.params.small and
user_agents.is_jp_tier2_mobile_phone(self.request)):
redirect_url = (self.config.jp_tier2_mobile_redirect_url + '/' +
self.env.action)
if self.request.query_string:
redirect_url += '?' + self.request.query_string
return redirect_url
return ''
def redirect(self, path, repo=None, permanent=False, **params):
# This will prepend the repo to the path to create a working URL,
# unless the path has a global prefix or is an absolute URL.
if re.match('^[a-z]+:', path) or GLOBAL_PATH_RE.match(path):
if params:
path += '?' + urlencode(params, self.charset)
else:
path = self.get_url(path, repo, **params)
return webapp.RequestHandler.redirect(self, path, permanent=permanent)
def render(self, name, language_override=None, cache_seconds=0,
get_vars=lambda: {}, **vars):
"""Renders a template to the output stream, passing in the variables
specified in **vars as well as any additional variables returned by
get_vars(). Since this is intended for use by a dynamic page handler,
caching is off by default; if cache_seconds is positive, then
get_vars() will be called only when cached content is unavailable."""
self.write(self.render_to_string(
name, language_override, cache_seconds, get_vars, **vars))
def render_to_string(self, name, language_override=None, cache_seconds=0,
get_vars=lambda: {}, **vars):
"""Renders a template to a string, passing in the variables specified
in **vars as well as any additional variables returned by get_vars().
Since this is intended for use by a dynamic page handler, caching is
off by default; if cache_seconds is positive, then get_vars() will be
called only when cached content is unavailable."""
# TODO(kpy): Make the contents of extra_key overridable by callers?
lang = language_override or self.env.lang
extra_key = (self.env.repo, self.env.charset, self.request.query_string)
def get_all_vars():
vars['env'] = self.env # pass along application-wide context
vars['config'] = self.config # pass along the configuration
vars['params'] = self.params # pass along the query parameters
vars.update(get_vars())
return vars
return resources.get_rendered(
name, lang, extra_key, get_all_vars, cache_seconds)
def error(self, code, message='', message_html=''):
self.info(code, message, message_html, style='error')
def info(self, code, message='', message_html='', style='info'):
is_error = 400 <= code < 600
if is_error:
webapp.RequestHandler.error(self, code)
else:
self.response.set_status(code)
if not message and not message_html:
message = '%d: %s' % (code, httplib.responses.get(code))
try:
self.render('message.html', cls=style,
message=message, message_html=message_html)
except:
self.response.out.write(message + '<p>' + message_html)
self.terminate_response()
def terminate_response(self):
"""Prevents any further output from being written."""
self.response.out.write = lambda *args: None
self.get = lambda *args: None
self.post = lambda *args: None
def write(self, text):
"""Sends text to the client using the charset from select_charset()."""
self.response.out.write(text.encode(self.env.charset, 'replace'))
def get_url(self, action, repo=None, scheme=None, **params):
"""Constructs the absolute URL for a given action and query parameters,
preserving the current repo and the 'small' and 'style' parameters."""
return get_url(self.request, repo or self.env.repo, action,
charset=self.env.charset, scheme=scheme, **params)
@staticmethod
def add_task_for_repo(repo, name, action, **kwargs):
"""Queues up a task for an individual repository."""
task_name = '%s-%s-%s' % (repo, name, int(time.time()*1000))
path = '/%s/%s' % (repo, action)
taskqueue.add(name=task_name, method='GET', url=path, params=kwargs)
def send_mail(self, to, subject, body):
"""Sends e-mail using a sender address that's allowed for this app."""
app_id = get_app_name()
sender = 'Do not reply <do-not-reply@%s.%s>' % (app_id, EMAIL_DOMAIN)
logging.info('Add mail task: recipient %r, subject %r' % (to, subject))
taskqueue.add(queue_name='send-mail', url='/global/admin/send_mail',
params={'sender': sender,
'to': to,
'subject': subject,
'body': body})
def get_captcha_html(self, error_code=None, use_ssl=False):
"""Generates the necessary HTML to display a CAPTCHA validation box."""
# We use the 'custom_translations' parameter for UI messages, whereas
# the 'lang' parameter controls the language of the challenge itself.
# reCAPTCHA falls back to 'en' if this parameter isn't recognized.
lang = self.env.lang.split('-')[0]
return captcha.get_display_html(
public_key=config.get('captcha_public_key'),
use_ssl=use_ssl, error=error_code, lang=lang,
custom_translations={
# reCAPTCHA doesn't support all languages, so we treat its
# messages as part of this app's usual translation workflow
'instructions_visual': _('Type the two words:'),
'instructions_audio': _('Type what you hear:'),
'play_again': _('Play the sound again'),
'cant_hear_this': _('Download the sound as MP3'),
'visual_challenge': _('Get a visual challenge'),
'audio_challenge': _('Get an audio challenge'),
'refresh_btn': _('Get a new challenge'),
'help_btn': _('Help'),
'incorrect_try_again': _('Incorrect. Try again.')
}
)
def get_captcha_response(self):
"""Returns an object containing the CAPTCHA response information for the
given request's CAPTCHA field information."""
challenge = self.request.get('recaptcha_challenge_field')
response = self.request.get('recaptcha_response_field')
remote_ip = os.environ['REMOTE_ADDR']
return captcha.submit(
challenge, response, config.get('captcha_private_key'), remote_ip)
def handle_exception(self, exception, debug_mode):
logging.error(traceback.format_exc())
self.error(500, _(
'There was an error processing your request. Sorry for the '
'inconvenience. Our administrators will investigate the source '
'of the problem, but please check that the format of your '
'request is correct.'))
def to_local_time(self, date):
"""Converts a datetime object to the local time configured for the
current repository. For convenience, returns None if date is None."""
# TODO(kpy): This only works for repositories that have a single fixed
# time zone offset and never use Daylight Saving Time.
if date:
if self.config.time_zone_offset:
return date + timedelta(0, 3600*self.config.time_zone_offset)
return date
def initialize(self, request, response, env):
webapp.RequestHandler.initialize(self, request, response)
self.params = Struct()
self.env = env
self.repo = env.repo
self.config = env.config
self.charset = env.charset
# Validate query parameters.
for name, validator in self.auto_params.items():
try:
value = self.request.get(name, '')
setattr(self.params, name, validator(value))
except Exception, e:
setattr(self.params, name, validator(None))
return self.error(400, 'Invalid parameter %s: %s' % (name, e))
# Log the User-Agent header.
sample_rate = float(
self.config and self.config.user_agent_sample_rate or 0)
if random.random() < sample_rate:
model.UserAgentLog(
repo=self.repo, sample_rate=sample_rate,
user_agent=self.request.headers.get('User-Agent'), lang=lang,
accept_charset=self.request.headers.get('Accept-Charset', ''),
ip_address=self.request.remote_addr).put()
# Sets default Content-Type header.
self.response.headers['Content-Type'] = (
'text/html; charset=%s' % self.charset)
# Check for SSL (unless running on localhost for development).
if self.https_required and self.env.domain != 'localhost':
if self.env.scheme != 'https':
return self.error(403, 'HTTPS is required.')
# Check for an authorization key.
self.auth = None
if self.params.key:
if self.repo:
# check for domain specific one.
self.auth = model.Authorization.get(self.repo, self.params.key)
if not self.auth:
# perhaps this is a global key ('*' for consistency with config).
self.auth = model.Authorization.get('*', self.params.key)
# Handlers that don't need a repository configuration can skip it.
if not self.repo:
if self.repo_required:
return self.error(400, 'No repository specified.')
return
# Everything after this requires a repo.
# Reject requests for repositories that don't exist.
if not model.Repo.get_by_key_name(self.repo):
if legacy_redirect.do_redirect(self):
return legacy_redirect.redirect(self)
html = 'No such repository. '
if self.env.repo_options:
html += 'Select:<p>' + self.render_to_string('repo-menu.html')
return self.error(404, message_html=html)
# If this repository has been deactivated, terminate with a message.
if self.config.deactivated and not self.ignore_deactivation:
self.env.language_menu = []
self.env.robots_ok = True
self.render('message.html', cls='deactivation',
message_html=self.config.deactivation_message_html)
self.terminate_response()
|
import os
import sys
import vtkAll as vtk
from ddapp import botpy
import math
import time
import types
import functools
import numpy as np
from ddapp import transformUtils
from ddapp import lcmUtils
from ddapp.timercallback import TimerCallback
from ddapp.asynctaskqueue import AsyncTaskQueue
from ddapp import objectmodel as om
from ddapp import visualization as vis
from ddapp import applogic as app
from ddapp.debugVis import DebugData
from ddapp import ik
from ddapp import ikplanner
from ddapp import ioUtils
from ddapp.simpletimer import SimpleTimer
from ddapp.utime import getUtime
from ddapp import affordanceitems
from ddapp import robotstate
from ddapp import robotplanlistener
from ddapp import segmentation
from ddapp import planplayback
from ddapp import propertyset
from ddapp import asynctaskqueue as atq
import ddapp.tasks.robottasks as rt
import ddapp.tasks.taskmanagerwidget as tmw
import drc as lcmdrc
import traceback
from PythonQt import QtCore, QtGui
class ValvePlannerDemo(object):
def __init__(self, robotModel, footstepPlanner, manipPlanner, ikPlanner, lhandDriver, rhandDriver, atlasDriver, multisenseDriver, affordanceFitFunction, sensorJointController, planPlaybackFunction, showPoseFunction):
self.robotModel = robotModel
self.footstepPlanner = footstepPlanner
self.manipPlanner = manipPlanner
self.ikPlanner = ikPlanner
self.lhandDriver = lhandDriver
self.rhandDriver = rhandDriver
self.atlasDriver = atlasDriver
self.multisenseDriver = multisenseDriver
self.affordanceFitFunction = affordanceFitFunction
self.sensorJointController = sensorJointController
self.planPlaybackFunction = planPlaybackFunction
self.showPoseFunction = showPoseFunction
self.graspingObject='valve'
self.graspingHand='left'
self.valveAffordance = None
# live operation flags
self.useFootstepPlanner = True
self.visOnly = True
self.planFromCurrentRobotState = False
useDevelopment = False
if (useDevelopment):
self.visOnly = True
self.planFromCurrentRobotState = False
self.optionalUserPromptEnabled = False
self.requiredUserPromptEnabled = True
self.constraintSet = None
self.plans = []
self.faceTransformLocal = None
self.facePath = []
self.scribeInAir = False
self.palmInAngle = 30 # how much should the palm face the axis - 0 not at all, 90 entirely
self.scribeRadius = None
self.useLidar = True # else use stereo depth
# IK server speed:
self.speedLow = 5
self.speedHigh = 30
self.speedTurn = 50
if (useDevelopment): # for simulated dev
self.speedLow = 60
self.speedHigh = 60
# reach to center and back - for palm point
self.clenchFrameXYZ = [0.0, 0.0, -0.1]
self.clenchFrameRPY = [90, 0, 180]
self.reachHeight = 0.0 # distance above the valve axis for the hand center
self.reachDepth = -0.1 # distance away from valve for palm face on approach reach
self.retractDepth = -0.15 # distance away from valve for palm face on retraction
self.touchDepth = 0.05 # distance away from valve for palm face on approach reach
self.nominalPelvisXYZ = None
self.coaxialTol = 0.001
self.shxMaxTorque = 40
self.elxMaxTorque = 10
# top level switch between BDI (locked base) and MIT (moving base and back)
self.lockBack = False
self.lockBase = True
self.setupStance()
self._setupSubscriptions()
def _setupSubscriptions(self):
sub0 = lcmUtils.addSubscriber('AUTONOMOUS_TEST_VALVE', lcmdrc.utime_t, self.autonomousTest)
def setupStance(self):
if (self.graspingObject == 'valve'):
self.nextScribeAngleInitial = -60 # reach 60 degrees left of the valve spoke
self.turnAngle=60
#if self.scribeInAir:
# self.relativeStanceXYZInitial = [-0.6, -0.2, 0.0] # stand further away when scribing in air
#else:
# self.relativeStanceXYZInitial = [-0.48, -0.2, 0.0]
#self.relativeStanceRPYInitial = [0, 0, 16]
self.relativeStanceXYZInitial = [-1.05, 0.27, 0.0]
self.relativeStanceRPYInitial = [0, 0, 0.1]
else:
self.nextScribeAngleInitial = 0 # reach right into the valve axis
self.turnAngle=90
if self.scribeInAir:
self.relativeStanceXYZInitial = [-0.6, -0.4, 0.0] # stand further away when scribing in air
else:
self.relativeStanceXYZInitial = [-0.48, -0.4, 0.0]
self.relativeStanceRPYInitial = [0, 0, 16]
if (self.graspingHand is 'left'): # -1 = anticlockwise (left, default) | 1 = clockwise
self.scribeDirection = -1
else:
self.scribeDirection = 1
def setNextScribeAngle(self, nextScribeAngle):
self.nextScribeAngle = nextScribeAngle
def resetTurnPath(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'face frame desired':
om.removeFromObjectModel(obj)
for obj in om.getObjects():
if obj.getProperty('Name') == 'face frame desired path':
om.removeFromObjectModel(obj)
def addPlan(self, plan):
self.plans.append(plan)
def computeGroundFrame(self, robotModel):
'''
Given a robol model, returns a vtkTransform at a position between
the feet, on the ground, with z-axis up and x-axis aligned with the
robot pelvis x-axis.
'''
t1 = robotModel.getLinkFrame('l_foot')
t2 = robotModel.getLinkFrame('r_foot')
pelvisT = robotModel.getLinkFrame('pelvis')
xaxis = [1.0, 0.0, 0.0]
pelvisT.TransformVector(xaxis, xaxis)
xaxis = np.array(xaxis)
zaxis = np.array([0.0, 0.0, 1.0])
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
stancePosition = (np.array(t2.GetPosition()) + np.array(t1.GetPosition())) / 2.0
footHeight = 0.0811
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
t.PostMultiply()
t.Translate(stancePosition)
t.Translate([0.0, 0.0, -footHeight])
return t
def computeRobotStanceFrame(self, objectTransform, relativeStanceTransform):
'''
Given a robot model, determine the height of the ground
using an XY and Yaw standoff, combined to determine the relative 6DOF standoff
For a grasp or approach stance
'''
groundFrame = self.computeGroundFrame(self.robotModel)
groundHeight = groundFrame.GetPosition()[2]
graspPosition = np.array(objectTransform.GetPosition())
graspYAxis = [0.0, 1.0, 0.0]
graspZAxis = [0.0, 0.0, 1.0]
objectTransform.TransformVector(graspYAxis, graspYAxis)
objectTransform.TransformVector(graspZAxis, graspZAxis)
xaxis = graspYAxis
#xaxis = graspZAxis
zaxis = [0, 0, 1]
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
graspGroundTransform = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
graspGroundTransform.PostMultiply()
graspGroundTransform.Translate(graspPosition[0], graspPosition[1], groundHeight)
robotStance = transformUtils.copyFrame( relativeStanceTransform )
robotStance.Concatenate(graspGroundTransform)
return robotStance
def updatePointcloudSnapshot(self):
if (self.useLidar is True):
return vis.updatePolyData(segmentation.getCurrentRevolutionData(), 'pointcloud snapshot', parent='segmentation')
else:
return vis.updatePolyData(segmentation.getDisparityPointCloud(4), 'pointcloud snapshot', parent='segmentation')
### Valve Focused Functions ######################################################################
def segmentValveWallAuto(self, expectedValveRadius=0.195, mode='both'):
om.removeFromObjectModel(om.findObjectByName('affordances'))
self.grabPointcloudSnapshot()
self.affordanceFitFunction(expectedValveRadius=expectedValveRadius, mode=mode)
def onImageViewDoubleClick(self, displayPoint, modifiers, imageView):
if modifiers != QtCore.Qt.ControlModifier:
return
imagePixel = imageView.getImagePixel(displayPoint)
cameraPos, ray = imageView.getWorldPositionAndRay(imagePixel)
polyData = self.updatePointcloudSnapshot().polyData
pickPoint = segmentation.extractPointsAlongClickRay(cameraPos, ray, polyData)
om.removeFromObjectModel(om.findObjectByName('valve'))
segmentation.segmentValveByBoundingBox(polyData, pickPoint)
self.findAffordance()
def computeValveStanceFrame(self):
objectTransform = transformUtils.copyFrame( self.clenchFrame.transform )
self.relativeStanceTransform = transformUtils.copyFrame( transformUtils.frameFromPositionAndRPY( self.relativeStanceXYZ , self.relativeStanceRPY ) )
#robotStance = self.computeRobotStanceFrame(objectTransform, self.relativeStanceTransform)
robotStance = self.getStanceFrameCoaxial()
self.stanceFrame = vis.updateFrame(robotStance, 'valve grasp stance', parent=self.valveAffordance, visible=False, scale=0.2)
self.stanceFrame.addToView(app.getDRCView())
def spawnValveFrame(self, robotModel, height):
position = [0.7, 0.22, height]
rpy = [180, -90, 0]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.computeGroundFrame(robotModel))
return t
def spawnValveAffordance(self):
self.graspingObject = 'valve'
spawn_height = 1.2192 # 4ft
radius = 0.19558 # nominal initial value. 7.7in radius metal valve
zwidth = 0.02
thickness = 0.0254 # i think zwidth and thickness are duplicates
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
#z.addLine ( np.array([0, 0, -thickness]) , np.array([0, 0, thickness]), radius=radius)
z.addTorus( radius, 0.127 )
z.addLine(np.array([0,0,0]), np.array([radius-zwidth,0,0]), radius=zwidth) # main bar
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=affordanceitems.FrameAffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.valveFrame = self.valveFrame.transform
params = dict(radius=radius, length=zwidth, xwidth=radius, ywidth=radius, zwidth=zwidth,
otdf_type='steering_cyl', friendly_name='valve')
self.valveAffordance.setAffordanceParams(params)
self.valveAffordance.updateParamsFromActorTransform()
def spawnValveLeverAffordance(self):
self.graspingObject = 'lever'
spawn_height = 1.06 # 3.5ft
pipe_radius = 0.01
lever_length = 0.33
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
z.addLine([0,0,0], [ lever_length , 0, 0], radius=pipe_radius)
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'lever', color=[0.0, 1.0, 0.0], cls=affordanceitems.FrameAffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'lever frame', parent=self.valveAffordance, visible=False, scale=0.2)
otdfType = 'lever_valve'
params = dict( radius=pipe_radius, length=lever_length, friendly_name=otdfType, otdf_type=otdfType)
self.valveAffordance.setAffordanceParams(params)
self.valveAffordance.updateParamsFromActorTransform()
def findAffordance(self):
self.setupAffordanceParams()
if (self.graspingObject is 'valve'):
self.findValveAffordance()
else:
self.findValveLeverAffordance()
def setupAffordanceParams(self):
self.setupStance()
self.relativeStanceXYZ = self.relativeStanceXYZInitial
self.relativeStanceRPY = self.relativeStanceRPYInitial
self.nextScribeAngle = self.nextScribeAngleInitial
# mirror stance and rotation direction for right hand:
if (self.graspingHand is 'right'):
self.relativeStanceXYZ[1] = -self.relativeStanceXYZ[1]
self.relativeStanceRPY[2] = -self.relativeStanceRPY[2]
self.nextScribeAngle = -self.nextScribeAngle
def updateTouchAngleVisualization(self, angle):
if self.valveAffordance:
obj = om.findObjectByName('valve touch angle')
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateX(angle)
t.Concatenate(self.valveAffordance.getChildFrame().transform)
if not obj:
pose = transformUtils.poseFromTransform(t)
length = self.valveAffordance.getProperty('Radius')*2
desc = dict(classname='CylinderAffordanceItem', Name='valve touch angle',
uuid=segmentation.newUUID(), pose=pose, Radius=0.01, Length=length, Color=[1.0, 1.0, 0.0])
import affordancepanel
obj = affordancepanel.panel.affordanceFromDescription(desc)
obj.getChildFrame().copyFrame(t)
def findValveAffordance(self):
self.valveAffordance = om.findObjectByName('valve')
if self.valveAffordance is None:
return
valveFrame = self.valveAffordance.getChildFrame()
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateX(180)
t.RotateY(-90)
t.Concatenate(valveFrame.transform)
self.valveFrame = t
self.scribeRadius = self.valveAffordance.params.get('radius')# for pointer this was (radius - 0.06)
self.computeClenchFrame()
self.computeValveStanceFrame()
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(valveFrame)
self.frameSync.addFrame(self.clenchFrame, ignoreIncoming=True)
self.frameSync.addFrame(self.stanceFrame, ignoreIncoming=True)
# make an affordance to visualize the scribe angle
def findValveLeverAffordance(self):
self.valveAffordance = om.findObjectByName('lever')
self.valveFrame = om.findObjectByName('lever frame')
# length of lever is equivalent to radius of valve
self.scribeRadius = self.valveAffordance.params.get('length') - 0.10
self.computeClenchFrame()
self.computeValveStanceFrame()
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(self.valveFrame)
self.frameSync.addFrame(self.clenchFrame)
self.frameSync.addFrame(self.stanceFrame)
def computeClenchFrame(self):
t = transformUtils.frameFromPositionAndRPY(self.clenchFrameXYZ, self.clenchFrameRPY)
t_copy = transformUtils.copyFrame(t)
t_copy.Concatenate(self.valveFrame)
self.clenchFrame = vis.updateFrame(t_copy, 'valve clench frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.clenchFrame.addToView(app.getDRCView())
def computeTouchFrame(self, touchValve):
if touchValve:
faceDepth = self.touchDepth
else:
faceDepth = self.reachDepth
assert self.valveAffordance
t = transformUtils.frameFromPositionAndRPY([0,faceDepth,0], [0,0,0])
position = [ self.scribeRadius*math.cos( math.radians( self.nextScribeAngle )) , self.scribeRadius*math.sin( math.radians( self.nextScribeAngle )) , 0]
# roll angle governs how much the palm points along towards the rotation axis
# yaw ensures thumb faces the axis
if (self.graspingObject is 'valve'):
# valve, left and right
rpy = [90+self.palmInAngle, 0, (270+self.nextScribeAngle)]
else:
if (self.graspingHand is 'left'): # lever left
rpy = [90, 0, (180+self.nextScribeAngle)]
else:
rpy = [90, 0, self.nextScribeAngle]
t2 = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(t2)
self.faceTransformLocal = transformUtils.copyFrame(t)
t.Concatenate(self.valveFrame)
self.faceFrameDesired = vis.showFrame(t, 'face frame desired', parent=self.valveAffordance, visible=False, scale=0.2)
def drawFacePath(self):
path = DebugData()
for i in range(1,len(self.facePath)):
p0 = self.facePath[i-1].GetPosition()
p1 = self.facePath[i].GetPosition()
path.addLine ( np.array( p0 ) , np.array( p1 ), radius= 0.005)
pathMesh = path.getPolyData()
self.pointerTipLinePath = vis.showPolyData(pathMesh, 'face frame desired path', color=[0.0, 0.3, 1.0], parent=self.valveAffordance, alpha=0.6)
self.pointerTipLinePath.actor.SetUserTransform(self.valveFrame)
### End Valve Focused Functions ###############################################################
### Planning Functions ###############################################################
# These are operational conveniences:
def planFootstepsToStance(self):
self.planFootsteps(self.stanceFrame.transform)
def planFootsteps(self, goalFrame):
startPose = self.getPlanningStartPose()
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def planWalking(self):
startPose = self.getPlanningStartPose()
walkingPlan = self.footstepPlanner.sendWalkingPlanRequest(self.footstepPlan, startPose, waitForResponse=True)
self.addPlan(walkingPlan)
def planPreGrasp(self):
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'General', 'arm up pregrasp', side=self.graspingHand)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(newPlan)
def planNominal(self):
startPose = self.getPlanningStartPose()
endPose, info = self.ikPlanner.computeStandPose(startPose)
endPose = self.ikPlanner.getMergedPostureFromDatabase(endPose, 'General', 'safe nominal')
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(newPlan)
def coaxialGetPose(self, reachDepth, lockFeet=True, lockBack=None,
lockBase=None, resetBase=False, wristAngleCW=0,
startPose=None, verticalOffset=0):
_, _, zaxis = transformUtils.getAxesFromTransform(self.valveFrame)
yawDesired = np.arctan2(zaxis[1], zaxis[0])
wristAngleCW = min(np.pi-0.01, max(0.01, wristAngleCW))
if lockBase is None:
lockBase = self.lockBase
if lockBack is None:
lockBack = self.lockBack
if self.graspingHand == 'left':
larmName = 'l_larm'
mwxJoint = 'l_arm_mwx'
elxJoint = 'l_arm_elx'
shxJoint = 'l_arm_shx'
xJointLowerBound = [np.radians(45), -np.inf, 0];
xJointUpperBound = [np.inf, np.radians(-30), 0];
yJoints = ['l_arm_uwy']
yJointLowerBound = [wristAngleCW]
yJointUpperBound = [wristAngleCW]
else:
larmName = 'r_larm'
mwxJoint = 'r_arm_mwx'
elxJoint = 'r_arm_elx'
shxJoint = 'r_arm_shx'
yJoints = ['r_arm_uwy']
xJointLowerBound = [-np.inf, np.radians(30), 0];
xJointUpperBound = [np.radians(-45), np.inf, 0];
yJointLowerBound = [np.pi - wristAngleCW]
yJointUpperBound = [np.pi - wristAngleCW]
if startPose is None:
startPose = self.getPlanningStartPose()
nominalPose, _ = self.ikPlanner.computeNominalPose(startPose)
if self.nominalPelvisXYZ is not None:
nominalPose[2] = self.nominalPelvisXYZ[2]
else:
nominalPose[2] = startPose[2]
nominalPoseName = 'qNomAtRobot'
self.ikPlanner.addPose(nominalPose, nominalPoseName)
startPoseName = 'Start'
#startPose[5] = yawDesired
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.reachingSide = self.graspingHand
constraints = []
constraints.append(self.ikPlanner.createLockedArmPostureConstraint(startPoseName))
if resetBase:
baseConstraintRobotPoseName = nominalPoseName
else:
baseConstraintRobotPoseName = startPoseName
if lockFeet:
constraints.extend(self.ikPlanner.createFixedFootConstraints(startPoseName))
if lockBase:
constraints.append(self.ikPlanner.createLockedBasePostureConstraint(baseConstraintRobotPoseName, lockLegs=False))
else:
constraints.append(self.ikPlanner.createXYZMovingBasePostureConstraint(baseConstraintRobotPoseName))
else:
constraints.append(self.ikPlanner.createXYZYawMovingBasePostureConstraint(baseConstraintRobotPoseName))
constraints.extend(self.ikPlanner.createSlidingFootConstraints(startPose))
p = ik.RelativePositionConstraint()
p.bodyNameA = 'l_foot'
p.bodyNameB = 'r_foot'
p.positionTarget = np.array([0, 0.3, 0])
p.lowerBound = np.array([0, 0, -np.inf])
p.upperBound = np.array([0, 0, np.inf])
constraints.append(p)
p = ik.RelativePositionConstraint()
p.bodyNameA = 'r_foot'
p.bodyNameB = 'l_foot'
p.lowerBound = np.array([0, -np.inf, -np.inf])
p.upperBound = np.array([0, np.inf, np.inf])
constraints.append(p)
headGaze = ik.WorldGazeTargetConstraint(linkName='head',
bodyPoint=np.zeros(3),
worldPoint=np.array(self.clenchFrame.transform.GetPosition()),
coneThreshold = np.radians(20))
constraints.append(headGaze)
p = ik.PostureConstraint()
p.joints = ['base_yaw']
p.jointsLowerBound = [yawDesired - np.radians(20)]
p.jointsUpperBound = [yawDesired + np.radians(20)]
constraints.append(p)
if lockBack:
constraints.append(self.ikPlanner.createLockedBackPostureConstraint(startPoseName))
else:
constraints.append(self.ikPlanner.createMovingBackLimitedPostureConstraint())
constraints.append(self.ikPlanner.createKneePostureConstraint([0.7, 2.5]))
if reachDepth >= 0:
elbowTol = self.coaxialTol
wristTol = self.coaxialTol
gazeDegreesTol = 5
p = ik.PostureConstraint()
#p.joints = [shxJoint, elxJoint, mwxJoint]
#p.jointsLowerBound = xJointLowerBound
#p.jointsUpperBound = xJointUpperBound
p.joints = [mwxJoint]
p.jointsLowerBound = [0]
p.jointsUpperBound = [0]
constraints.append(p)
else:
elbowTol = self.coaxialTol
wristTol = self.coaxialTol
gazeDegreesTol = 5
elbowOnValveAxisConstraint = ik.PositionConstraint(linkName=larmName,
referenceFrame=self.clenchFrame.transform)
elbowOnValveAxisConstraint.lowerBound = [elbowTol, -np.inf, elbowTol]
elbowOnValveAxisConstraint.upperBound = [elbowTol, np.inf, elbowTol]
constraints.append(elbowOnValveAxisConstraint)
constraints.append(self.ikPlanner.createQuasiStaticConstraint())
constraints.append(self.ikPlanner.createGazeGraspConstraint(self.graspingHand, self.clenchFrame, coneThresholdDegrees=gazeDegreesTol))
p = ik.PostureConstraint()
p.joints = yJoints
p.jointsLowerBound = yJointLowerBound
p.jointsUpperBound = yJointUpperBound
constraints.append(p)
torqueConstraint = ik.GravityCompensationTorqueConstraint()
torqueConstraint.joints = [shxJoint, elxJoint]
torqueConstraint.torquesLowerBound = -np.array([self.shxMaxTorque, self.elxMaxTorque])
torqueConstraint.torquesUpperBound = np.array([self.shxMaxTorque, self.elxMaxTorque])
constraints.append(torqueConstraint)
t = transformUtils.frameFromPositionAndRPY([-verticalOffset,reachDepth,0], [0,0,0])
t.Concatenate(self.clenchFrame.transform)
constraintSet = self.ikPlanner.newReachGoal(startPoseName, self.graspingHand, t, constraints, lockOrient=False)
constraintSet.constraints[-1].lowerBound = np.array([-wristTol, 0, -wristTol])
constraintSet.constraints[-1].upperBound = np.array([wristTol, 0, wristTol])
constraintSet.nominalPoseName = nominalPoseName;
constraintSet.startPoseName = nominalPoseName;
return constraintSet.runIk()
def coaxialPlan(self, reachDepth, **kwargs):
startPose = self.getPlanningStartPose()
touchPose, info = self.coaxialGetPose(reachDepth, **kwargs)
plan = self.ikPlanner.computePostureGoal(startPose, touchPose)
app.displaySnoptInfo(info)
self.addPlan(plan)
def coaxialPlanReach(self, verticalOffset=None, **kwargs):
if verticalOffset is None:
verticalOffset = self.reachHeight
self.coaxialPlan(self.reachDepth, resetBase=True, lockBase=False, verticalOffset=verticalOffset, **kwargs)
def coaxialPlanTouch(self, **kwargs):
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.coaxialPlan(self.touchDepth, wristAngleCW=np.radians(20), **kwargs)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def coaxialPlanTurn(self, **kwargs):
startPose = self.getPlanningStartPose()
if self.graspingHand == 'left':
postureJoints = {'l_arm_uwy' : np.pi - 0.01}
else:
postureJoints = {'r_arm_uwy' : 0.01}
endPose = self.ikPlanner.mergePostures(startPose, postureJoints)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedTurn
plan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
app.displaySnoptInfo(1)
self.addPlan(plan)
def coaxialPlanRetract(self, **kwargs):
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.coaxialPlan(self.retractDepth, wristAngleCW=np.radians(180), lockBase=False, **kwargs)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def getStanceFrameCoaxial(self):
xaxis, _, _ = transformUtils.getAxesFromTransform(self.valveFrame)
yawDesired = np.arctan2(xaxis[1], xaxis[0])
seedDistance = 0.5
startPose = self.ikPlanner.jointController.getPose('q_nom')
startPose[0] -= seedDistance*xaxis[0]
startPose[1] -= seedDistance*xaxis[1]
startPose[5] -= yawDesired
stancePose, info = self.coaxialGetPose(self.touchDepth, lockFeet=False,
lockBase=False, lockBack=True,
startPose=startPose)
stanceRobotModel = self.ikPlanner.getRobotModelAtPose(stancePose)
self.nominalPelvisXYZ = stancePose[:3]
return self.footstepPlanner.getFeetMidPoint(stanceRobotModel)
#p = ik.PostureConstraint()
#if self.graspingHand is 'left':
#p.joints = ['l_arm_uwy', 'l_arm_mwx']
#constraints.append(reachingArmPostureConstraint)
#constraints.extend(self.ikPlanner.createSlidingFootConstraints(startPose))
#return self.ikPlanner.newReachGoal(startPoseName, self.graspingHand, self.clenchFrame, constraints, lockOrient=False)
def getPlannedTouchAngleCoaxial(self):
# when the pose is computed in getStanceFrameCoaxial, we could
# store the turn angle. This method just returns the stored value.
return 0.0
def setDesiredTouchAngleCoaxial(self, angle):
# this is the turn angle that the user wants.
# this should be close to the planned touch angle, but the user may
# adjust that value to avoid hitting the spokes.
self.updateTouchAngleVisualization(angle)
def planReach(self):
self.computeTouchFrame(False) # 0 = not in contact
self.computeTouchPlan()
def planGrasp(self):
self.computeTouchFrame(True)
self.computeTouchPlan()
def computeTouchPlan(self):
# new full 6 dof constraint:
startPose = self.getPlanningStartPose()
nominalPose, _ = self.ikPlanner.computeNominalPose(startPose)
self.ikPlanner.addPose(nominalPose, 'nominal_at_stance')
reachNominalPose = self.ikPlanner.getMergedPostureFromDatabase(nominalPose, 'General', 'arm up pregrasp', side=self.graspingHand)
self.ikPlanner.addPose(reachNominalPose, 'reach_nominal_at_stance')
self.constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, self.faceFrameDesired, lockBase=self.lockBase, lockBack=self.lockBack)
self.constraintSet.nominalPoseName = 'reach_nominal_at_stance'
self.constraintSet.seedPoseName = 'reach_nominal_at_stance'
endPose, info = self.constraintSet.runIk()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.planTrajectory()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def planValveTurn(self, turnDegrees=360):
# 10deg per sample
numberOfSamples = int(round(turnDegrees/10.0))
self.facePath = []
self.resetTurnPath()
degreeStep = float(turnDegrees) / numberOfSamples
tipMode = False if self.scribeInAir else True
self.computeTouchFrame(tipMode)
self.initConstraintSet(self.faceFrameDesired)
self.facePath.append(self.faceTransformLocal)
for i in xrange(numberOfSamples):
self.nextScribeAngle += self.scribeDirection*degreeStep
self.computeTouchFrame(tipMode)
self.appendPositionOrientationConstraintForTargetFrame(self.faceFrameDesired, i+1)
self.facePath.append(self.faceTransformLocal)
self.drawFacePath()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.planTrajectory()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def initConstraintSet(self, goalFrame):
# create constraint set
startPose = self.getPlanningStartPose()
startPoseName = 'gaze_plan_start'
endPoseName = 'gaze_plan_end'
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.addPose(startPose, endPoseName)
self.constraintSet = ikplanner.ConstraintSet(self.ikPlanner, [], startPoseName, endPoseName)
self.constraintSet.endPose = startPose
# add body constraints
bodyConstraints = self.ikPlanner.createMovingBodyConstraints(startPoseName, lockBase=self.lockBase, lockBack=self.lockBack, lockLeftArm=self.graspingHand=='right', lockRightArm=self.graspingHand=='left')
self.constraintSet.constraints.extend(bodyConstraints)
self.constraintSet.constraints.append(self.ikPlanner.createKneePostureConstraint([0.6, 2.5]))
# add gaze constraint - TODO: this gaze constraint shouldn't be necessary, fix
self.graspToHandLinkFrame = self.ikPlanner.newGraspToHandFrame(self.graspingHand)
#gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame, coneThresholdDegrees=5.0)
#self.constraintSet.constraints.insert(0, gazeConstraint)
def appendDistanceConstraint(self):
# add point to point distance constraint
c = ikplanner.ik.PointToPointDistanceConstraint()
c.bodyNameA = self.ikPlanner.getHandLink(self.graspingHand)
c.bodyNameB = 'world'
c.pointInBodyA = self.graspToHandLinkFrame
c.pointInBodyB = self.valveFrame
c.lowerBound = [self.scribeRadius]
c.upperBound = [self.scribeRadius]
self.constraintSet.constraints.insert(0, c)
def appendPositionOrientationConstraintForTargetFrame(self, goalFrame, t):
positionConstraint, orientationConstraint = self.ikPlanner.createPositionOrientationGraspConstraints(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
positionConstraint.tspan = [t, t]
orientationConstraint.tspan = [t, t]
self.constraintSet.constraints.append(positionConstraint)
self.constraintSet.constraints.append(orientationConstraint)
def planTrajectory(self):
self.ikPlanner.ikServer.usePointwise = False
plan = self.constraintSet.runIkTraj()
self.addPlan(plan)
########## Glue Functions ####################################
def moveRobotToStanceFrame(self, frame):
self.sensorJointController.setPose('q_nom')
stancePosition = frame.GetPosition()
stanceOrientation = frame.GetOrientation()
q = self.sensorJointController.q.copy()
q[:2] = [stancePosition[0], stancePosition[1]]
q[5] = math.radians(stanceOrientation[2])
self.sensorJointController.setPose('EST_ROBOT_STATE', q)
def getHandDriver(self, side):
assert side in ('left', 'right')
return self.lhandDriver if side == 'left' else self.rhandDriver
def openHand(self,side):
#self.handDriver(side).sendOpen()
self.getHandDriver(side).sendCustom(0.0, 100.0, 100.0, 0)
def openPinch(self,side):
self.getHandDriver(side).sendCustom(20.0, 100.0, 100.0, 1)
def closeHand(self, side):
#self.handDriver(side).sendClose(60)
self.getHandDriver(side).sendCustom(100.0, 100.0, 100.0, 0)
def sendNeckPitchLookDown(self):
self.multisenseDriver.setNeckPitch(40)
def sendNeckPitchLookForward(self):
self.multisenseDriver.setNeckPitch(15)
def waitForAtlasBehaviorAsync(self, behaviorName):
assert behaviorName in self.atlasDriver.getBehaviorMap().values()
while self.atlasDriver.getCurrentBehaviorName() != behaviorName:
yield
def printAsync(self, s):
yield
print s
def optionalUserPrompt(self, message):
if not self.optionalUserPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def requiredUserPrompt(self, message):
if not self.requiredUserPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def delay(self, delayTimeInSeconds):
yield
t = SimpleTimer()
while t.elapsed() < delayTimeInSeconds:
yield
def waitForCleanLidarSweepAsync(self):
currentRevolution = self.multisenseDriver.displayedRevolution
desiredRevolution = currentRevolution + 2
while self.multisenseDriver.displayedRevolution < desiredRevolution:
yield
def getEstimatedRobotStatePose(self):
return self.sensorJointController.getPose('EST_ROBOT_STATE')
def getPlanningStartPose(self):
if self.planFromCurrentRobotState:
return self.getEstimatedRobotStatePose()
else:
if self.plans:
return robotstate.convertStateMessageToDrakePose(self.plans[-1].plan[-1])
else:
return self.getEstimatedRobotStatePose()
def cleanupFootstepPlans(self):
om.removeFromObjectModel(om.findObjectByName('walking goal'))
om.removeFromObjectModel(om.findObjectByName('footstep plan'))
self.footstepPlan = None
def playSequenceNominal(self):
assert None not in self.plans
self.planPlaybackFunction(self.plans)
def commitManipPlan(self):
self.manipPlanner.commitManipPlan(self.plans[-1])
def commitFootstepPlan(self):
self.footstepPlanner.commitFootstepPlan(self.footstepPlan)
def waitForPlanExecution(self):
while self.atlasDriver.getControllerStatus() != 'manipulating':
yield
while self.atlasDriver.getControllerStatus() == 'manipulating':
yield
def waitForWalkExecution(self):
while self.atlasDriver.getControllerStatus() != 'walking':
yield
while self.atlasDriver.getControllerStatus() == 'walking':
yield
def waitForPlanAnimation(self, plan):
planElapsedTime = planplayback.PlanPlayback.getPlanElapsedTime(plan)
print 'waiting for plan animation:', planElapsedTime
return self.delay(planElapsedTime)
def animateLastPlan(self):
plan = self.plans[-1]
if self.visOnly:
return self.waitForPlanAnimation(plan)
else:
self.commitManipPlan()
return self.waitForPlanExecution()
######### Nominal Plans and Execution #################################################################
def planSequence(self):
self.cleanupFootstepPlans()
self.resetTurnPath()
self.planFromCurrentRobotState = False
self.findAffordance()
self.plans = []
# Approach valve:
if self.useFootstepPlanner:
self.planFootstepsToStance()
self.planWalking()
else:
self.moveRobotToStanceFrame(self.stanceFrame.transform )
# Reach and Turn:
self.planPreGrasp()
self.planReach()
self.planGrasp()
self.planValveTurn(self.turnAngle)
# Dereach and Stand
self.planReach()
self.planPreGrasp()
self.planNominal()
self.playSequenceNominal()
def autonomousTest(self, msg):
print "Got the autonomousTest message, executing valve test sequence"
q = self.autonomousExecute()
q.start()
def sendAutonmousTestDone(self):
msg = lcmdrc.utime_t()
msg.utime = getUtime()
lcmUtils.publish('AUTONOMOUS_TEST_VALVE_DONE', msg)
def autonomousExecute(self):
self.planFromCurrentRobotState = True
self.visOnly = False
self.nextScribeAngle = 45
self.turnAngle=70
self.graspingHand='right'
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.resetTurnPath)
# Approach valve:
taskQueue.addTask(self.waitForCleanLidarSweepAsync)
taskQueue.addTask( functools.partial(self.segmentValveWallAuto, 0.23, self.graspingObject) )
taskQueue.addTask(self.optionalUserPrompt('Accept valve fit, continue? y/n: '))
taskQueue.addTask(self.findAffordance)
taskQueue.addTask(self.printAsync('Plan and execute walking'))
taskQueue.addTask(self.planFootstepsToStance)
taskQueue.addTask(self.optionalUserPrompt('Send footstep plan. continue? y/n: '))
taskQueue.addTask(self.commitFootstepPlan)
taskQueue.addTask(self.waitForWalkExecution)
# Fit the Valve:
taskQueue.addTask(self.printAsync('Wait for sweep'))
taskQueue.addTask(self.waitForCleanLidarSweepAsync)
taskQueue.addTask( functools.partial(self.segmentValveWallAuto, 0.23, self.graspingObject) )
taskQueue.addTask(self.optionalUserPrompt('Accept valve re-fit, continue? y/n: '))
taskQueue.addTask(self.findAffordance)
# Move arm to pregrasp:
taskQueue.addTask(self.printAsync('Pre grasp'))
taskQueue.addTask(self.planPreGrasp)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('Turn 1'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('Turn 2'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('Turn 3'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('done!'))
taskQueue.addTask(self.sendAutonmousTestDone)
return taskQueue
def autonomousExecuteTurn(self):
'''
Turn a valve by the turnAngle and then retract
As initial conditions: assumes robot has hand in reach or pregrasp position
'''
self.planFromCurrentRobotState = True
self.visOnly = False
self.graspingHand='left'
self.scribeDirection = 1
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.resetTurnPath)
taskQueue.addTask(self.printAsync('Turn 1'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('done!'))
return taskQueue
def addAutomousValveTurn(self,taskQueue, nextScribeAngle):
taskQueue.addTask(functools.partial( self.setNextScribeAngle, nextScribeAngle))
taskQueue.addTask(self.printAsync('Reach'))
taskQueue.addTask(self.planReach)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('Reach'))
taskQueue.addTask(self.planGrasp)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(functools.partial(self.closeHand,self.graspingHand))
taskQueue.addTask(self.printAsync('Turn'))
taskQueue.addTask(functools.partial( self.planValveTurn, self.turnAngle))
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(functools.partial(self.openHand,self.graspingHand))
taskQueue.addTask(self.printAsync('Dereach'))
taskQueue.addTask(self.planReach)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
return taskQueue
import PythonQt
from PythonQt import QtCore, QtGui, QtUiTools
def addWidgetsToDict(widgets, d):
for widget in widgets:
if widget.objectName:
d[str(widget.objectName)] = widget
addWidgetsToDict(widget.children(), d)
class WidgetDict(object):
def __init__(self, widgets):
addWidgetsToDict(widgets, self.__dict__)
class ValveTaskPanel(object):
def __init__(self, valveDemo):
self.valveDemo = valveDemo
self.valveDemo.reachDepth = -0.1
self.valveDemo.speedLow = 10
loader = QtUiTools.QUiLoader()
uifile = QtCore.QFile(':/ui/ddValveTaskPanel.ui')
assert uifile.open(uifile.ReadOnly)
self.widget = loader.load(uifile)
self.ui = WidgetDict(self.widget.children())
self.ui.startButton.connect('clicked()', self.onStartClicked)
self.ui.footstepsButton.connect('clicked()', self.valveDemo.planFootstepsToStance)
self.ui.raiseArmButton.connect('clicked()', self.valveDemo.planPreGrasp)
self.ui.reachButton.connect('clicked()', self.reach)
self.ui.touchButton.connect('clicked()', self.grasp)
self.ui.turnButton.connect('clicked()', self.turnValve)
self.ui.fingersButton.connect('clicked()', self.setFingers)
self.ui.retractButton.connect('clicked()', self.retract)
self.ui.nominalButton.connect('clicked()', self.valveDemo.planNominal)
l = QtGui.QVBoxLayout(self.ui.imageFrame)
self._setupParams()
self._setupPropertiesPanel()
self._syncProperties()
self._initTaskPanel()
self._initTasks()
def onStartClicked(self):
self.valveDemo.findAffordance()
if self.valveDemo.valveAffordance is not None:
print 'Valve Demo: Start - Ready to proceed'
else:
print 'Valve Demo: Start - VALVE AFFORDANCE NOT FOUND'
# now get the planned turn angle and show it to the user
self.params.setProperty('Touch angle (deg)', self.valveDemo.getPlannedTouchAngleCoaxial())
def resetTouchAngle(self):
self.valveDemo.findAffordance()
self.params.setProperty('Touch angle (deg)', self.valveDemo.getPlannedTouchAngleCoaxial())
def closeHand(self):
self.valveDemo.closeHand(self.valveDemo.graspingHand)
def setFingers(self):
self.valveDemo.openPinch(self.valveDemo.graspingHand)
def reach(self):
self.valveDemo.coaxialPlanReach()
def grasp(self):
self.valveDemo.coaxialPlanTouch()
def turnValve(self):
self.valveDemo.coaxialPlanTurn()
def retract(self):
self.valveDemo.coaxialPlanRetract()
def _setupParams(self):
self.params = om.ObjectModelItem('Valve Task Params')
self.params.addProperty('Hand', 1, attributes=om.PropertyAttributes(enumNames=['Left', 'Right']))
self.params.addProperty('Turn direction', 0, attributes=om.PropertyAttributes(enumNames=['Clockwise', 'Counter clockwise']))
self.params.addProperty('Touch angle (deg)', 0)
#self.params.addProperty('Turn amount (deg)', 60)
self.params.properties.connectPropertyChanged(self.onPropertyChanged)
def _setupPropertiesPanel(self):
l = QtGui.QVBoxLayout(self.ui.propertyFrame)
l.setMargin(0)
self.propertiesPanel = PythonQt.dd.ddPropertiesPanel()
self.propertiesPanel.setBrowserModeToWidget()
l.addWidget(self.propertiesPanel)
self.panelConnector = propertyset.PropertyPanelConnector(self.params.properties, self.propertiesPanel)
def onPropertyChanged(self, propertySet, propertyName):
self._syncProperties()
def _syncProperties(self):
self.valveDemo.planFromCurrentRobotState = True
self.valveDemo.visOnly = False
self.valveDemo.graspingHand = self.params.getPropertyEnumValue('Hand').lower()
self.valveDemo.scribeDirection = 1 if self.params.getPropertyEnumValue('Turn direction') == 'Clockwise' else -1
self.valveDemo.setDesiredTouchAngleCoaxial(self.params.getProperty('Touch angle (deg)'))
#self.valveDemo.turnAngle = self.params.getProperty('Turn amount (deg)')
def onContinue(self):
self.completedTasks = []
self.taskQueue.reset()
for obj in self.taskTree.getSelectedTasks():
self.taskQueue.addTask(obj.task)
self.taskQueue.start()
def onStep(self):
assert not self.taskQueue.isRunning
tasks = self.taskTree.getSelectedTasks()
if not tasks:
return
task = tasks[0].task
self.nextStepTask = tasks[1].task if len(tasks) > 1 else None
self.completedTasks = []
self.taskQueue.reset()
self.taskQueue.addTask(task)
self.taskQueue.start()
def onPause(self):
if not self.taskQueue.isRunning:
return
self.nextStepTask = None
currentTask = self.taskQueue.currentTask
self.taskQueue.stop()
if currentTask:
currentTask.stop()
self.appendMessage('<font color="red">paused</font>')
def onTaskStarted(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="green">start</font>'
self.appendMessage(msg)
self.taskTree.selectTask(task)
item = self.taskTree.findTaskItem(task)
if len(self.completedTasks) and item.getProperty('Visible'):
self.appendMessage('<font color="red">paused</font>')
raise atq.AsyncTaskQueue.PauseException()
def onTaskEnded(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="green">end</font>'
self.appendMessage(msg)
self.completedTasks.append(task)
if self.taskQueue.tasks:
self.taskTree.selectTask(self.taskQueue.tasks[0])
elif self.nextStepTask:
self.taskTree.selectTask(self.nextStepTask)
#else:
# self.taskTree.selectTask(self.completedTasks[0])
def onTaskFailed(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">failed: %s</font>' % task.failReason
self.appendMessage(msg)
def onTaskPaused(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">paused</font>'
self.appendMessage(msg)
def onTaskException(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">exception:\n\n%s</font>' % traceback.format_exc()
self.appendMessage(msg)
def appendMessage(self, msg):
if msg == self.lastStatusMessage:
return
self.lastStatusMessage = msg
self.ui.outputConsole.append(msg.replace('\n', '<br/>'))
#print msg
def updateTaskStatus(self):
currentTask = self.taskQueue.currentTask
if not currentTask or not currentTask.statusMessage:
return
name = currentTask.properties.getProperty('Name')
status = currentTask.statusMessage
msg = name + ': ' + status
self.appendMessage(msg)
def onAcceptPrompt(self):
self.promptTask.accept()
self.promptTask = None
self.ui.promptLabel.text = ''
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def onRejectPrompt(self):
self.promptTask.reject()
self.promptTask = None
self.ui.promptLabel.text = ''
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def onTaskPrompt(self, task, message):
self.promptTask = task
self.ui.promptLabel.text = message
self.ui.promptAcceptButton.enabled = True
self.ui.promptRejectButton.enabled = True
def _initTaskPanel(self):
self.lastStatusMessage = ''
self.nextStepTask = None
self.completedTasks = []
self.taskQueue = atq.AsyncTaskQueue()
self.taskQueue.connectTaskStarted(self.onTaskStarted)
self.taskQueue.connectTaskEnded(self.onTaskEnded)
self.taskQueue.connectTaskPaused(self.onTaskPaused)
self.taskQueue.connectTaskFailed(self.onTaskFailed)
self.taskQueue.connectTaskException(self.onTaskException)
self.completedTasks = []
self.timer = TimerCallback(targetFps=2)
self.timer.callback = self.updateTaskStatus
self.timer.start()
rt.UserPromptTask.promptFunction = self.onTaskPrompt
rt.PrintTask.printFunction = self.appendMessage
self.taskTree = tmw.TaskTree()
self.ui.taskFrame.layout().insertWidget(0, self.taskTree.treeWidget)
l = QtGui.QVBoxLayout(self.ui.taskPropertiesGroupBox)
l.addWidget(self.taskTree.propertiesPanel)
PythonQt.dd.ddGroupBoxHider(self.ui.taskPropertiesGroupBox)
self.ui.taskStepButton.connect('clicked()', self.onStep)
self.ui.taskContinueButton.connect('clicked()', self.onContinue)
self.ui.taskPauseButton.connect('clicked()', self.onPause)
self.ui.promptAcceptButton.connect('clicked()', self.onAcceptPrompt)
self.ui.promptRejectButton.connect('clicked()', self.onRejectPrompt)
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def _initTasks(self):
# some helpers
def addTask(task, parent=None):
self.taskTree.onAddTask(task, copy=False, parent=parent)
def addFunc(func, name, parent=None):
addTask(rt.CallbackTask(callback=func, name=name), parent=parent)
def addValveTurn(parent=None):
group = self.taskTree.addGroup('Valve Turn', parent=parent)
# valve manip actions
addFunc(v.coaxialPlanReach, name='plan reach to valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanTouch, name='plan insert in valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanTurn, name='plan turn valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanRetract, name='plan retract', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
v = self.valveDemo
self.taskTree.removeAllTasks()
side = self.params.getPropertyEnumValue('Hand')
###############
# add the tasks
# prep
addTask(rt.CloseHand(name='close left hand', side='Left'))
addTask(rt.CloseHand(name='close right hand', side='Right'))
addTask(rt.SetNeckPitch(name='set neck position', angle=0))
addTask(rt.PlanPostureGoal(name='plan walk posture', postureGroup='General', postureName='safe nominal', side='Default'))
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'))
addTask(rt.CommitManipulationPlan(name='execute manip plan', planName='safe nominal posture plan'))
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
# fit
addTask(rt.WaitForMultisenseLidar(name='wait for lidar sweep'))
addTask(rt.UserPromptTask(name='fit valve', message='Please fit and approve valve affordance.'))
addTask(rt.FindAffordance(name='check valve affordance', affordanceName='valve'))
addFunc(self.resetTouchAngle, name='plan stance location')
# walk
addTask(rt.RequestFootstepPlan(name='plan walk to valve', stanceFrameName='valve grasp stance'))
addTask(rt.UserPromptTask(name='approve footsteps', message='Please approve footstep plan.'))
addTask(rt.CommitFootstepPlan(name='walk to valve', planName='valve grasp stance footstep plan'))
addTask(rt.WaitForWalkExecution(name='wait for walking'))
# refit
addTask(rt.SetNeckPitch(name='set neck position', angle=35))
addTask(rt.WaitForMultisenseLidar(name='wait for lidar sweep'))
addTask(rt.UserPromptTask(name='fit value', message='Please fit and approve valve affordance.'))
addFunc(self.resetTouchAngle, name='check valve affordance')
#addTask(rt.UserPromptTask(name='approve spoke location', message='Please approve valve spokes and touch angle.'))
# set fingers
addTask(rt.CloseHand(name='set finger positions', side=side, mode='Basic', amount=20))
# add valve turns
for i in range(0, 5):
addValveTurn()
Changes from testing on robot
import os
import sys
import vtkAll as vtk
from ddapp import botpy
import math
import time
import types
import functools
import numpy as np
from ddapp import transformUtils
from ddapp import lcmUtils
from ddapp.timercallback import TimerCallback
from ddapp.asynctaskqueue import AsyncTaskQueue
from ddapp import objectmodel as om
from ddapp import visualization as vis
from ddapp import applogic as app
from ddapp.debugVis import DebugData
from ddapp import ik
from ddapp import ikplanner
from ddapp import ioUtils
from ddapp.simpletimer import SimpleTimer
from ddapp.utime import getUtime
from ddapp import affordanceitems
from ddapp import robotstate
from ddapp import robotplanlistener
from ddapp import segmentation
from ddapp import planplayback
from ddapp import propertyset
from ddapp import asynctaskqueue as atq
import ddapp.tasks.robottasks as rt
import ddapp.tasks.taskmanagerwidget as tmw
import drc as lcmdrc
import traceback
from PythonQt import QtCore, QtGui
class ValvePlannerDemo(object):
def __init__(self, robotModel, footstepPlanner, manipPlanner, ikPlanner, lhandDriver, rhandDriver, atlasDriver, multisenseDriver, affordanceFitFunction, sensorJointController, planPlaybackFunction, showPoseFunction):
self.robotModel = robotModel
self.footstepPlanner = footstepPlanner
self.manipPlanner = manipPlanner
self.ikPlanner = ikPlanner
self.lhandDriver = lhandDriver
self.rhandDriver = rhandDriver
self.atlasDriver = atlasDriver
self.multisenseDriver = multisenseDriver
self.affordanceFitFunction = affordanceFitFunction
self.sensorJointController = sensorJointController
self.planPlaybackFunction = planPlaybackFunction
self.showPoseFunction = showPoseFunction
self.graspingObject='valve'
self.graspingHand='left'
self.valveAffordance = None
# live operation flags
self.useFootstepPlanner = True
self.visOnly = True
self.planFromCurrentRobotState = False
useDevelopment = False
if (useDevelopment):
self.visOnly = True
self.planFromCurrentRobotState = False
self.optionalUserPromptEnabled = False
self.requiredUserPromptEnabled = True
self.constraintSet = None
self.plans = []
self.faceTransformLocal = None
self.facePath = []
self.scribeInAir = False
self.palmInAngle = 30 # how much should the palm face the axis - 0 not at all, 90 entirely
self.scribeRadius = None
self.useLidar = True # else use stereo depth
# IK server speed:
self.speedLow = 5
self.speedHigh = 30
self.speedTurn = 50
if (useDevelopment): # for simulated dev
self.speedLow = 60
self.speedHigh = 60
# reach to center and back - for palm point
self.clenchFrameXYZ = [0.0, 0.0, -0.1]
self.clenchFrameRPY = [90, 0, 180]
self.reachHeight = 0.0 # distance above the valve axis for the hand center
self.reachDepth = -0.1 # distance away from valve for palm face on approach reach
self.retractDepth = -0.05 # distance away from valve for palm face on retraction
self.touchDepth = 0.05 # distance away from valve for palm face on approach reach
self.nominalPelvisXYZ = None
self.coaxialTol = 0.001
self.coaxialGazeTol = 2
self.shxMaxTorque = 40
self.elxMaxTorque = 10
# top level switch between BDI (locked base) and MIT (moving base and back)
self.lockBack = False
self.lockBase = True
self.setupStance()
self._setupSubscriptions()
def _setupSubscriptions(self):
sub0 = lcmUtils.addSubscriber('AUTONOMOUS_TEST_VALVE', lcmdrc.utime_t, self.autonomousTest)
def setupStance(self):
if (self.graspingObject == 'valve'):
self.nextScribeAngleInitial = -60 # reach 60 degrees left of the valve spoke
self.turnAngle=60
#if self.scribeInAir:
# self.relativeStanceXYZInitial = [-0.6, -0.2, 0.0] # stand further away when scribing in air
#else:
# self.relativeStanceXYZInitial = [-0.48, -0.2, 0.0]
#self.relativeStanceRPYInitial = [0, 0, 16]
self.relativeStanceXYZInitial = [-1.05, 0.27, 0.0]
self.relativeStanceRPYInitial = [0, 0, 0.1]
else:
self.nextScribeAngleInitial = 0 # reach right into the valve axis
self.turnAngle=90
if self.scribeInAir:
self.relativeStanceXYZInitial = [-0.6, -0.4, 0.0] # stand further away when scribing in air
else:
self.relativeStanceXYZInitial = [-0.48, -0.4, 0.0]
self.relativeStanceRPYInitial = [0, 0, 16]
if (self.graspingHand is 'left'): # -1 = anticlockwise (left, default) | 1 = clockwise
self.scribeDirection = -1
else:
self.scribeDirection = 1
def setNextScribeAngle(self, nextScribeAngle):
self.nextScribeAngle = nextScribeAngle
def resetTurnPath(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'face frame desired':
om.removeFromObjectModel(obj)
for obj in om.getObjects():
if obj.getProperty('Name') == 'face frame desired path':
om.removeFromObjectModel(obj)
def addPlan(self, plan):
self.plans.append(plan)
def computeGroundFrame(self, robotModel):
'''
Given a robol model, returns a vtkTransform at a position between
the feet, on the ground, with z-axis up and x-axis aligned with the
robot pelvis x-axis.
'''
t1 = robotModel.getLinkFrame('l_foot')
t2 = robotModel.getLinkFrame('r_foot')
pelvisT = robotModel.getLinkFrame('pelvis')
xaxis = [1.0, 0.0, 0.0]
pelvisT.TransformVector(xaxis, xaxis)
xaxis = np.array(xaxis)
zaxis = np.array([0.0, 0.0, 1.0])
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
stancePosition = (np.array(t2.GetPosition()) + np.array(t1.GetPosition())) / 2.0
footHeight = 0.0811
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
t.PostMultiply()
t.Translate(stancePosition)
t.Translate([0.0, 0.0, -footHeight])
return t
def computeRobotStanceFrame(self, objectTransform, relativeStanceTransform):
'''
Given a robot model, determine the height of the ground
using an XY and Yaw standoff, combined to determine the relative 6DOF standoff
For a grasp or approach stance
'''
groundFrame = self.computeGroundFrame(self.robotModel)
groundHeight = groundFrame.GetPosition()[2]
graspPosition = np.array(objectTransform.GetPosition())
graspYAxis = [0.0, 1.0, 0.0]
graspZAxis = [0.0, 0.0, 1.0]
objectTransform.TransformVector(graspYAxis, graspYAxis)
objectTransform.TransformVector(graspZAxis, graspZAxis)
xaxis = graspYAxis
#xaxis = graspZAxis
zaxis = [0, 0, 1]
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
graspGroundTransform = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
graspGroundTransform.PostMultiply()
graspGroundTransform.Translate(graspPosition[0], graspPosition[1], groundHeight)
robotStance = transformUtils.copyFrame( relativeStanceTransform )
robotStance.Concatenate(graspGroundTransform)
return robotStance
def updatePointcloudSnapshot(self):
if (self.useLidar is True):
return vis.updatePolyData(segmentation.getCurrentRevolutionData(), 'pointcloud snapshot', parent='segmentation')
else:
return vis.updatePolyData(segmentation.getDisparityPointCloud(4), 'pointcloud snapshot', parent='segmentation')
### Valve Focused Functions ######################################################################
def segmentValveWallAuto(self, expectedValveRadius=0.195, mode='both'):
om.removeFromObjectModel(om.findObjectByName('affordances'))
self.grabPointcloudSnapshot()
self.affordanceFitFunction(expectedValveRadius=expectedValveRadius, mode=mode)
def onImageViewDoubleClick(self, displayPoint, modifiers, imageView):
if modifiers != QtCore.Qt.ControlModifier:
return
imagePixel = imageView.getImagePixel(displayPoint)
cameraPos, ray = imageView.getWorldPositionAndRay(imagePixel)
polyData = self.updatePointcloudSnapshot().polyData
pickPoint = segmentation.extractPointsAlongClickRay(cameraPos, ray, polyData)
om.removeFromObjectModel(om.findObjectByName('valve'))
segmentation.segmentValveByBoundingBox(polyData, pickPoint)
self.findAffordance()
def computeValveStanceFrame(self):
objectTransform = transformUtils.copyFrame( self.clenchFrame.transform )
self.relativeStanceTransform = transformUtils.copyFrame( transformUtils.frameFromPositionAndRPY( self.relativeStanceXYZ , self.relativeStanceRPY ) )
#robotStance = self.computeRobotStanceFrame(objectTransform, self.relativeStanceTransform)
robotStance = self.getStanceFrameCoaxial()
self.stanceFrame = vis.updateFrame(robotStance, 'valve grasp stance', parent=self.valveAffordance, visible=False, scale=0.2)
self.stanceFrame.addToView(app.getDRCView())
def spawnValveFrame(self, robotModel, height):
position = [0.7, 0.22, height]
rpy = [180, -90, 0]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.computeGroundFrame(robotModel))
return t
def spawnValveAffordance(self):
self.graspingObject = 'valve'
spawn_height = 1.2192 # 4ft
radius = 0.19558 # nominal initial value. 7.7in radius metal valve
zwidth = 0.02
thickness = 0.0254 # i think zwidth and thickness are duplicates
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
#z.addLine ( np.array([0, 0, -thickness]) , np.array([0, 0, thickness]), radius=radius)
z.addTorus( radius, 0.127 )
z.addLine(np.array([0,0,0]), np.array([radius-zwidth,0,0]), radius=zwidth) # main bar
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=affordanceitems.FrameAffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.valveFrame = self.valveFrame.transform
params = dict(radius=radius, length=zwidth, xwidth=radius, ywidth=radius, zwidth=zwidth,
otdf_type='steering_cyl', friendly_name='valve')
self.valveAffordance.setAffordanceParams(params)
self.valveAffordance.updateParamsFromActorTransform()
def spawnValveLeverAffordance(self):
self.graspingObject = 'lever'
spawn_height = 1.06 # 3.5ft
pipe_radius = 0.01
lever_length = 0.33
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
z.addLine([0,0,0], [ lever_length , 0, 0], radius=pipe_radius)
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'lever', color=[0.0, 1.0, 0.0], cls=affordanceitems.FrameAffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'lever frame', parent=self.valveAffordance, visible=False, scale=0.2)
otdfType = 'lever_valve'
params = dict( radius=pipe_radius, length=lever_length, friendly_name=otdfType, otdf_type=otdfType)
self.valveAffordance.setAffordanceParams(params)
self.valveAffordance.updateParamsFromActorTransform()
def findAffordance(self):
self.setupAffordanceParams()
if (self.graspingObject is 'valve'):
self.findValveAffordance()
else:
self.findValveLeverAffordance()
def setupAffordanceParams(self):
self.setupStance()
self.relativeStanceXYZ = self.relativeStanceXYZInitial
self.relativeStanceRPY = self.relativeStanceRPYInitial
self.nextScribeAngle = self.nextScribeAngleInitial
# mirror stance and rotation direction for right hand:
if (self.graspingHand is 'right'):
self.relativeStanceXYZ[1] = -self.relativeStanceXYZ[1]
self.relativeStanceRPY[2] = -self.relativeStanceRPY[2]
self.nextScribeAngle = -self.nextScribeAngle
def updateTouchAngleVisualization(self, angle):
if self.valveAffordance:
obj = om.findObjectByName('valve touch angle')
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateX(angle)
t.Concatenate(self.valveAffordance.getChildFrame().transform)
if not obj:
pose = transformUtils.poseFromTransform(t)
length = self.valveAffordance.getProperty('Radius')*2
desc = dict(classname='CylinderAffordanceItem', Name='valve touch angle',
uuid=segmentation.newUUID(), pose=pose, Radius=0.01, Length=length, Color=[1.0, 1.0, 0.0])
import affordancepanel
obj = affordancepanel.panel.affordanceFromDescription(desc)
obj.getChildFrame().copyFrame(t)
def findValveAffordance(self):
self.valveAffordance = om.findObjectByName('valve')
if self.valveAffordance is None:
return
valveFrame = self.valveAffordance.getChildFrame()
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateX(180)
t.RotateY(-90)
t.Concatenate(valveFrame.transform)
self.valveFrame = t
self.scribeRadius = self.valveAffordance.params.get('radius')# for pointer this was (radius - 0.06)
self.computeClenchFrame()
self.computeValveStanceFrame()
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(valveFrame)
self.frameSync.addFrame(self.clenchFrame, ignoreIncoming=True)
self.frameSync.addFrame(self.stanceFrame, ignoreIncoming=True)
# make an affordance to visualize the scribe angle
def findValveLeverAffordance(self):
self.valveAffordance = om.findObjectByName('lever')
self.valveFrame = om.findObjectByName('lever frame')
# length of lever is equivalent to radius of valve
self.scribeRadius = self.valveAffordance.params.get('length') - 0.10
self.computeClenchFrame()
self.computeValveStanceFrame()
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(self.valveFrame)
self.frameSync.addFrame(self.clenchFrame)
self.frameSync.addFrame(self.stanceFrame)
def computeClenchFrame(self):
t = transformUtils.frameFromPositionAndRPY(self.clenchFrameXYZ, self.clenchFrameRPY)
t_copy = transformUtils.copyFrame(t)
t_copy.Concatenate(self.valveFrame)
self.clenchFrame = vis.updateFrame(t_copy, 'valve clench frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.clenchFrame.addToView(app.getDRCView())
def computeTouchFrame(self, touchValve):
if touchValve:
faceDepth = self.touchDepth
else:
faceDepth = self.reachDepth
assert self.valveAffordance
t = transformUtils.frameFromPositionAndRPY([0,faceDepth,0], [0,0,0])
position = [ self.scribeRadius*math.cos( math.radians( self.nextScribeAngle )) , self.scribeRadius*math.sin( math.radians( self.nextScribeAngle )) , 0]
# roll angle governs how much the palm points along towards the rotation axis
# yaw ensures thumb faces the axis
if (self.graspingObject is 'valve'):
# valve, left and right
rpy = [90+self.palmInAngle, 0, (270+self.nextScribeAngle)]
else:
if (self.graspingHand is 'left'): # lever left
rpy = [90, 0, (180+self.nextScribeAngle)]
else:
rpy = [90, 0, self.nextScribeAngle]
t2 = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(t2)
self.faceTransformLocal = transformUtils.copyFrame(t)
t.Concatenate(self.valveFrame)
self.faceFrameDesired = vis.showFrame(t, 'face frame desired', parent=self.valveAffordance, visible=False, scale=0.2)
def drawFacePath(self):
path = DebugData()
for i in range(1,len(self.facePath)):
p0 = self.facePath[i-1].GetPosition()
p1 = self.facePath[i].GetPosition()
path.addLine ( np.array( p0 ) , np.array( p1 ), radius= 0.005)
pathMesh = path.getPolyData()
self.pointerTipLinePath = vis.showPolyData(pathMesh, 'face frame desired path', color=[0.0, 0.3, 1.0], parent=self.valveAffordance, alpha=0.6)
self.pointerTipLinePath.actor.SetUserTransform(self.valveFrame)
### End Valve Focused Functions ###############################################################
### Planning Functions ###############################################################
# These are operational conveniences:
def planFootstepsToStance(self):
self.planFootsteps(self.stanceFrame.transform)
def planFootsteps(self, goalFrame):
startPose = self.getPlanningStartPose()
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def planWalking(self):
startPose = self.getPlanningStartPose()
walkingPlan = self.footstepPlanner.sendWalkingPlanRequest(self.footstepPlan, startPose, waitForResponse=True)
self.addPlan(walkingPlan)
def planPreGrasp(self):
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'General', 'arm up pregrasp', side=self.graspingHand)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(newPlan)
def planNominal(self):
startPose = self.getPlanningStartPose()
endPose, info = self.ikPlanner.computeStandPose(startPose)
endPose = self.ikPlanner.getMergedPostureFromDatabase(endPose, 'General', 'safe nominal')
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(newPlan)
def coaxialGetPose(self, reachDepth, lockFeet=True, lockBack=None,
lockBase=None, resetBase=False, wristAngleCW=0,
startPose=None, verticalOffset=0.01):
_, _, zaxis = transformUtils.getAxesFromTransform(self.valveFrame)
yawDesired = np.arctan2(zaxis[1], zaxis[0])
wristAngleCW = min(np.pi-0.01, max(0.01, wristAngleCW))
if lockBase is None:
lockBase = self.lockBase
if lockBack is None:
lockBack = self.lockBack
if self.graspingHand == 'left':
larmName = 'l_larm'
mwxJoint = 'l_arm_mwx'
elxJoint = 'l_arm_elx'
shxJoint = 'l_arm_shx'
xJointLowerBound = [np.radians(45), -np.inf, 0];
xJointUpperBound = [np.inf, np.radians(-30), 0];
yJoints = ['l_arm_uwy']
yJointLowerBound = [wristAngleCW]
yJointUpperBound = [wristAngleCW]
else:
larmName = 'r_larm'
mwxJoint = 'r_arm_mwx'
elxJoint = 'r_arm_elx'
shxJoint = 'r_arm_shx'
yJoints = ['r_arm_uwy']
xJointLowerBound = [-np.inf, np.radians(30), 0];
xJointUpperBound = [np.radians(-45), np.inf, 0];
yJointLowerBound = [np.pi - wristAngleCW]
yJointUpperBound = [np.pi - wristAngleCW]
if startPose is None:
startPose = self.getPlanningStartPose()
nominalPose, _ = self.ikPlanner.computeNominalPose(startPose)
if self.nominalPelvisXYZ is not None:
nominalPose[2] = self.nominalPelvisXYZ[2]
else:
nominalPose[2] = startPose[2]
nominalPoseName = 'qNomAtRobot'
self.ikPlanner.addPose(nominalPose, nominalPoseName)
startPoseName = 'Start'
#startPose[5] = yawDesired
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.reachingSide = self.graspingHand
constraints = []
constraints.append(self.ikPlanner.createLockedArmPostureConstraint(startPoseName))
if resetBase:
baseConstraintRobotPoseName = nominalPoseName
else:
baseConstraintRobotPoseName = startPoseName
if lockFeet:
constraints.extend(self.ikPlanner.createFixedFootConstraints(startPoseName))
if lockBase:
constraints.append(self.ikPlanner.createLockedBasePostureConstraint(baseConstraintRobotPoseName, lockLegs=False))
else:
constraints.append(self.ikPlanner.createXYZMovingBasePostureConstraint(baseConstraintRobotPoseName))
else:
constraints.append(self.ikPlanner.createXYZYawMovingBasePostureConstraint(baseConstraintRobotPoseName))
constraints.extend(self.ikPlanner.createSlidingFootConstraints(startPose))
p = ik.RelativePositionConstraint()
p.bodyNameA = 'l_foot'
p.bodyNameB = 'r_foot'
p.positionTarget = np.array([0, 0.3, 0])
p.lowerBound = np.array([0, 0, -np.inf])
p.upperBound = np.array([0, 0, np.inf])
constraints.append(p)
p = ik.RelativePositionConstraint()
p.bodyNameA = 'r_foot'
p.bodyNameB = 'l_foot'
p.lowerBound = np.array([0, -np.inf, -np.inf])
p.upperBound = np.array([0, np.inf, np.inf])
constraints.append(p)
headGaze = ik.WorldGazeTargetConstraint(linkName='head',
bodyPoint=np.zeros(3),
worldPoint=np.array(self.clenchFrame.transform.GetPosition()),
coneThreshold = np.radians(20))
constraints.append(headGaze)
p = ik.PostureConstraint()
p.joints = ['base_yaw']
p.jointsLowerBound = [yawDesired - np.radians(20)]
p.jointsUpperBound = [yawDesired + np.radians(20)]
constraints.append(p)
if lockBack:
constraints.append(self.ikPlanner.createLockedBackPostureConstraint(startPoseName))
else:
constraints.append(self.ikPlanner.createMovingBackLimitedPostureConstraint())
constraints.append(self.ikPlanner.createKneePostureConstraint([0.7, 2.5]))
if reachDepth >= 0:
elbowTol = self.coaxialTol
wristTol = self.coaxialTol
gazeDegreesTol = self.coaxialGazeTol
else:
elbowTol = self.coaxialTol
wristTol = self.coaxialTol
gazeDegreesTol = self.coaxialGazeTol
p = ik.PostureConstraint()
#p.joints = [shxJoint, elxJoint, mwxJoint]
#p.jointsLowerBound = xJointLowerBound
#p.jointsUpperBound = xJointUpperBound
p.joints = [mwxJoint]
p.jointsLowerBound = [0]
p.jointsUpperBound = [0]
constraints.append(p)
elbowOnValveAxisConstraint = ik.PositionConstraint(linkName=larmName,
referenceFrame=self.clenchFrame.transform)
elbowOnValveAxisConstraint.lowerBound = [elbowTol, -np.inf, elbowTol]
elbowOnValveAxisConstraint.upperBound = [elbowTol, np.inf, elbowTol]
constraints.append(elbowOnValveAxisConstraint)
constraints.append(self.ikPlanner.createQuasiStaticConstraint())
constraints.append(self.ikPlanner.createGazeGraspConstraint(self.graspingHand, self.clenchFrame, coneThresholdDegrees=gazeDegreesTol))
p = ik.PostureConstraint()
p.joints = yJoints
p.jointsLowerBound = yJointLowerBound
p.jointsUpperBound = yJointUpperBound
constraints.append(p)
torqueConstraint = ik.GravityCompensationTorqueConstraint()
torqueConstraint.joints = [shxJoint, elxJoint]
torqueConstraint.torquesLowerBound = -np.array([self.shxMaxTorque, self.elxMaxTorque])
torqueConstraint.torquesUpperBound = np.array([self.shxMaxTorque, self.elxMaxTorque])
constraints.append(torqueConstraint)
t = transformUtils.frameFromPositionAndRPY([-verticalOffset,reachDepth,0], [0,0,0])
t.Concatenate(self.clenchFrame.transform)
constraintSet = self.ikPlanner.newReachGoal(startPoseName, self.graspingHand, t, constraints, lockOrient=False)
constraintSet.constraints[-1].lowerBound = np.array([-wristTol, 0, -wristTol])
constraintSet.constraints[-1].upperBound = np.array([wristTol, 0, wristTol])
constraintSet.nominalPoseName = nominalPoseName;
constraintSet.startPoseName = nominalPoseName;
return constraintSet.runIk()
def coaxialPlan(self, reachDepth, **kwargs):
startPose = self.getPlanningStartPose()
touchPose, info = self.coaxialGetPose(reachDepth, **kwargs)
plan = self.ikPlanner.computePostureGoal(startPose, touchPose)
app.displaySnoptInfo(info)
self.addPlan(plan)
def coaxialPlanReach(self, verticalOffset=None, **kwargs):
if verticalOffset is None:
verticalOffset = self.reachHeight
self.coaxialPlan(self.reachDepth, resetBase=True, lockBase=False, verticalOffset=verticalOffset, **kwargs)
def coaxialPlanTouch(self, **kwargs):
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.coaxialPlan(self.touchDepth, wristAngleCW=np.radians(20), **kwargs)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def coaxialPlanTurn(self, **kwargs):
startPose = self.getPlanningStartPose()
if self.graspingHand == 'left':
postureJoints = {'l_arm_uwy' : np.pi - 0.01}
else:
postureJoints = {'r_arm_uwy' : 0.01}
endPose = self.ikPlanner.mergePostures(startPose, postureJoints)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedTurn
plan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
app.displaySnoptInfo(1)
self.addPlan(plan)
def coaxialPlanRetract(self, **kwargs):
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.coaxialPlan(self.retractDepth, wristAngleCW=np.radians(180), lockBase=False, **kwargs)
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def getStanceFrameCoaxial(self):
xaxis, _, _ = transformUtils.getAxesFromTransform(self.valveFrame)
yawDesired = np.arctan2(xaxis[1], xaxis[0])
seedDistance = 0.5
startPose = self.ikPlanner.jointController.getPose('q_nom')
startPose[0] -= seedDistance*xaxis[0]
startPose[1] -= seedDistance*xaxis[1]
startPose[5] -= yawDesired
stancePose, info = self.coaxialGetPose(self.touchDepth, lockFeet=False,
lockBase=False, lockBack=True,
startPose=startPose)
stanceRobotModel = self.ikPlanner.getRobotModelAtPose(stancePose)
self.nominalPelvisXYZ = stancePose[:3]
return self.footstepPlanner.getFeetMidPoint(stanceRobotModel)
#p = ik.PostureConstraint()
#if self.graspingHand is 'left':
#p.joints = ['l_arm_uwy', 'l_arm_mwx']
#constraints.append(reachingArmPostureConstraint)
#constraints.extend(self.ikPlanner.createSlidingFootConstraints(startPose))
#return self.ikPlanner.newReachGoal(startPoseName, self.graspingHand, self.clenchFrame, constraints, lockOrient=False)
def getPlannedTouchAngleCoaxial(self):
# when the pose is computed in getStanceFrameCoaxial, we could
# store the turn angle. This method just returns the stored value.
return 0.0
def setDesiredTouchAngleCoaxial(self, angle):
# this is the turn angle that the user wants.
# this should be close to the planned touch angle, but the user may
# adjust that value to avoid hitting the spokes.
self.updateTouchAngleVisualization(angle)
def planReach(self):
self.computeTouchFrame(False) # 0 = not in contact
self.computeTouchPlan()
def planGrasp(self):
self.computeTouchFrame(True)
self.computeTouchPlan()
def computeTouchPlan(self):
# new full 6 dof constraint:
startPose = self.getPlanningStartPose()
nominalPose, _ = self.ikPlanner.computeNominalPose(startPose)
self.ikPlanner.addPose(nominalPose, 'nominal_at_stance')
reachNominalPose = self.ikPlanner.getMergedPostureFromDatabase(nominalPose, 'General', 'arm up pregrasp', side=self.graspingHand)
self.ikPlanner.addPose(reachNominalPose, 'reach_nominal_at_stance')
self.constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, self.faceFrameDesired, lockBase=self.lockBase, lockBack=self.lockBack)
self.constraintSet.nominalPoseName = 'reach_nominal_at_stance'
self.constraintSet.seedPoseName = 'reach_nominal_at_stance'
endPose, info = self.constraintSet.runIk()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.planTrajectory()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def planValveTurn(self, turnDegrees=360):
# 10deg per sample
numberOfSamples = int(round(turnDegrees/10.0))
self.facePath = []
self.resetTurnPath()
degreeStep = float(turnDegrees) / numberOfSamples
tipMode = False if self.scribeInAir else True
self.computeTouchFrame(tipMode)
self.initConstraintSet(self.faceFrameDesired)
self.facePath.append(self.faceTransformLocal)
for i in xrange(numberOfSamples):
self.nextScribeAngle += self.scribeDirection*degreeStep
self.computeTouchFrame(tipMode)
self.appendPositionOrientationConstraintForTargetFrame(self.faceFrameDesired, i+1)
self.facePath.append(self.faceTransformLocal)
self.drawFacePath()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedLow
self.planTrajectory()
self.ikPlanner.ikServer.maxDegreesPerSecond = self.speedHigh
def initConstraintSet(self, goalFrame):
# create constraint set
startPose = self.getPlanningStartPose()
startPoseName = 'gaze_plan_start'
endPoseName = 'gaze_plan_end'
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.addPose(startPose, endPoseName)
self.constraintSet = ikplanner.ConstraintSet(self.ikPlanner, [], startPoseName, endPoseName)
self.constraintSet.endPose = startPose
# add body constraints
bodyConstraints = self.ikPlanner.createMovingBodyConstraints(startPoseName, lockBase=self.lockBase, lockBack=self.lockBack, lockLeftArm=self.graspingHand=='right', lockRightArm=self.graspingHand=='left')
self.constraintSet.constraints.extend(bodyConstraints)
self.constraintSet.constraints.append(self.ikPlanner.createKneePostureConstraint([0.6, 2.5]))
# add gaze constraint - TODO: this gaze constraint shouldn't be necessary, fix
self.graspToHandLinkFrame = self.ikPlanner.newGraspToHandFrame(self.graspingHand)
#gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame, coneThresholdDegrees=5.0)
#self.constraintSet.constraints.insert(0, gazeConstraint)
def appendDistanceConstraint(self):
# add point to point distance constraint
c = ikplanner.ik.PointToPointDistanceConstraint()
c.bodyNameA = self.ikPlanner.getHandLink(self.graspingHand)
c.bodyNameB = 'world'
c.pointInBodyA = self.graspToHandLinkFrame
c.pointInBodyB = self.valveFrame
c.lowerBound = [self.scribeRadius]
c.upperBound = [self.scribeRadius]
self.constraintSet.constraints.insert(0, c)
def appendPositionOrientationConstraintForTargetFrame(self, goalFrame, t):
positionConstraint, orientationConstraint = self.ikPlanner.createPositionOrientationGraspConstraints(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
positionConstraint.tspan = [t, t]
orientationConstraint.tspan = [t, t]
self.constraintSet.constraints.append(positionConstraint)
self.constraintSet.constraints.append(orientationConstraint)
def planTrajectory(self):
self.ikPlanner.ikServer.usePointwise = False
plan = self.constraintSet.runIkTraj()
self.addPlan(plan)
########## Glue Functions ####################################
def moveRobotToStanceFrame(self, frame):
self.sensorJointController.setPose('q_nom')
stancePosition = frame.GetPosition()
stanceOrientation = frame.GetOrientation()
q = self.sensorJointController.q.copy()
q[:2] = [stancePosition[0], stancePosition[1]]
q[5] = math.radians(stanceOrientation[2])
self.sensorJointController.setPose('EST_ROBOT_STATE', q)
def getHandDriver(self, side):
assert side in ('left', 'right')
return self.lhandDriver if side == 'left' else self.rhandDriver
def openHand(self,side):
#self.handDriver(side).sendOpen()
self.getHandDriver(side).sendCustom(0.0, 100.0, 100.0, 0)
def openPinch(self,side):
self.getHandDriver(side).sendCustom(20.0, 100.0, 100.0, 1)
def closeHand(self, side):
#self.handDriver(side).sendClose(60)
self.getHandDriver(side).sendCustom(100.0, 100.0, 100.0, 0)
def sendNeckPitchLookDown(self):
self.multisenseDriver.setNeckPitch(40)
def sendNeckPitchLookForward(self):
self.multisenseDriver.setNeckPitch(15)
def waitForAtlasBehaviorAsync(self, behaviorName):
assert behaviorName in self.atlasDriver.getBehaviorMap().values()
while self.atlasDriver.getCurrentBehaviorName() != behaviorName:
yield
def printAsync(self, s):
yield
print s
def optionalUserPrompt(self, message):
if not self.optionalUserPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def requiredUserPrompt(self, message):
if not self.requiredUserPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def delay(self, delayTimeInSeconds):
yield
t = SimpleTimer()
while t.elapsed() < delayTimeInSeconds:
yield
def waitForCleanLidarSweepAsync(self):
currentRevolution = self.multisenseDriver.displayedRevolution
desiredRevolution = currentRevolution + 2
while self.multisenseDriver.displayedRevolution < desiredRevolution:
yield
def getEstimatedRobotStatePose(self):
return self.sensorJointController.getPose('EST_ROBOT_STATE')
def getPlanningStartPose(self):
if self.planFromCurrentRobotState:
return self.getEstimatedRobotStatePose()
else:
if self.plans:
return robotstate.convertStateMessageToDrakePose(self.plans[-1].plan[-1])
else:
return self.getEstimatedRobotStatePose()
def cleanupFootstepPlans(self):
om.removeFromObjectModel(om.findObjectByName('walking goal'))
om.removeFromObjectModel(om.findObjectByName('footstep plan'))
self.footstepPlan = None
def playSequenceNominal(self):
assert None not in self.plans
self.planPlaybackFunction(self.plans)
def commitManipPlan(self):
self.manipPlanner.commitManipPlan(self.plans[-1])
def commitFootstepPlan(self):
self.footstepPlanner.commitFootstepPlan(self.footstepPlan)
def waitForPlanExecution(self):
while self.atlasDriver.getControllerStatus() != 'manipulating':
yield
while self.atlasDriver.getControllerStatus() == 'manipulating':
yield
def waitForWalkExecution(self):
while self.atlasDriver.getControllerStatus() != 'walking':
yield
while self.atlasDriver.getControllerStatus() == 'walking':
yield
def waitForPlanAnimation(self, plan):
planElapsedTime = planplayback.PlanPlayback.getPlanElapsedTime(plan)
print 'waiting for plan animation:', planElapsedTime
return self.delay(planElapsedTime)
def animateLastPlan(self):
plan = self.plans[-1]
if self.visOnly:
return self.waitForPlanAnimation(plan)
else:
self.commitManipPlan()
return self.waitForPlanExecution()
######### Nominal Plans and Execution #################################################################
def planSequence(self):
self.cleanupFootstepPlans()
self.resetTurnPath()
self.planFromCurrentRobotState = False
self.findAffordance()
self.plans = []
# Approach valve:
if self.useFootstepPlanner:
self.planFootstepsToStance()
self.planWalking()
else:
self.moveRobotToStanceFrame(self.stanceFrame.transform )
# Reach and Turn:
self.planPreGrasp()
self.planReach()
self.planGrasp()
self.planValveTurn(self.turnAngle)
# Dereach and Stand
self.planReach()
self.planPreGrasp()
self.planNominal()
self.playSequenceNominal()
def autonomousTest(self, msg):
print "Got the autonomousTest message, executing valve test sequence"
q = self.autonomousExecute()
q.start()
def sendAutonmousTestDone(self):
msg = lcmdrc.utime_t()
msg.utime = getUtime()
lcmUtils.publish('AUTONOMOUS_TEST_VALVE_DONE', msg)
def autonomousExecute(self):
self.planFromCurrentRobotState = True
self.visOnly = False
self.nextScribeAngle = 45
self.turnAngle=70
self.graspingHand='right'
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.resetTurnPath)
# Approach valve:
taskQueue.addTask(self.waitForCleanLidarSweepAsync)
taskQueue.addTask( functools.partial(self.segmentValveWallAuto, 0.23, self.graspingObject) )
taskQueue.addTask(self.optionalUserPrompt('Accept valve fit, continue? y/n: '))
taskQueue.addTask(self.findAffordance)
taskQueue.addTask(self.printAsync('Plan and execute walking'))
taskQueue.addTask(self.planFootstepsToStance)
taskQueue.addTask(self.optionalUserPrompt('Send footstep plan. continue? y/n: '))
taskQueue.addTask(self.commitFootstepPlan)
taskQueue.addTask(self.waitForWalkExecution)
# Fit the Valve:
taskQueue.addTask(self.printAsync('Wait for sweep'))
taskQueue.addTask(self.waitForCleanLidarSweepAsync)
taskQueue.addTask( functools.partial(self.segmentValveWallAuto, 0.23, self.graspingObject) )
taskQueue.addTask(self.optionalUserPrompt('Accept valve re-fit, continue? y/n: '))
taskQueue.addTask(self.findAffordance)
# Move arm to pregrasp:
taskQueue.addTask(self.printAsync('Pre grasp'))
taskQueue.addTask(self.planPreGrasp)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('Turn 1'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('Turn 2'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('Turn 3'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('done!'))
taskQueue.addTask(self.sendAutonmousTestDone)
return taskQueue
def autonomousExecuteTurn(self):
'''
Turn a valve by the turnAngle and then retract
As initial conditions: assumes robot has hand in reach or pregrasp position
'''
self.planFromCurrentRobotState = True
self.visOnly = False
self.graspingHand='left'
self.scribeDirection = 1
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.resetTurnPath)
taskQueue.addTask(self.printAsync('Turn 1'))
taskQueue = self.addAutomousValveTurn(taskQueue, self.nextScribeAngle)
taskQueue.addTask(self.printAsync('done!'))
return taskQueue
def addAutomousValveTurn(self,taskQueue, nextScribeAngle):
taskQueue.addTask(functools.partial( self.setNextScribeAngle, nextScribeAngle))
taskQueue.addTask(self.printAsync('Reach'))
taskQueue.addTask(self.planReach)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('Reach'))
taskQueue.addTask(self.planGrasp)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(functools.partial(self.closeHand,self.graspingHand))
taskQueue.addTask(self.printAsync('Turn'))
taskQueue.addTask(functools.partial( self.planValveTurn, self.turnAngle))
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(functools.partial(self.openHand,self.graspingHand))
taskQueue.addTask(self.printAsync('Dereach'))
taskQueue.addTask(self.planReach)
taskQueue.addTask(self.optionalUserPrompt('Continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
return taskQueue
import PythonQt
from PythonQt import QtCore, QtGui, QtUiTools
def addWidgetsToDict(widgets, d):
for widget in widgets:
if widget.objectName:
d[str(widget.objectName)] = widget
addWidgetsToDict(widget.children(), d)
class WidgetDict(object):
def __init__(self, widgets):
addWidgetsToDict(widgets, self.__dict__)
class ValveTaskPanel(object):
def __init__(self, valveDemo):
self.valveDemo = valveDemo
self.valveDemo.reachDepth = -0.1
self.valveDemo.speedLow = 10
loader = QtUiTools.QUiLoader()
uifile = QtCore.QFile(':/ui/ddValveTaskPanel.ui')
assert uifile.open(uifile.ReadOnly)
self.widget = loader.load(uifile)
self.ui = WidgetDict(self.widget.children())
self.ui.startButton.connect('clicked()', self.onStartClicked)
self.ui.footstepsButton.connect('clicked()', self.valveDemo.planFootstepsToStance)
self.ui.raiseArmButton.connect('clicked()', self.valveDemo.planPreGrasp)
self.ui.reachButton.connect('clicked()', self.reach)
self.ui.touchButton.connect('clicked()', self.grasp)
self.ui.turnButton.connect('clicked()', self.turnValve)
self.ui.fingersButton.connect('clicked()', self.setFingers)
self.ui.retractButton.connect('clicked()', self.retract)
self.ui.nominalButton.connect('clicked()', self.valveDemo.planNominal)
l = QtGui.QVBoxLayout(self.ui.imageFrame)
self._setupParams()
self._setupPropertiesPanel()
self._syncProperties()
self._initTaskPanel()
self._initTasks()
def onStartClicked(self):
self.valveDemo.findAffordance()
if self.valveDemo.valveAffordance is not None:
print 'Valve Demo: Start - Ready to proceed'
else:
print 'Valve Demo: Start - VALVE AFFORDANCE NOT FOUND'
# now get the planned turn angle and show it to the user
self.params.setProperty('Touch angle (deg)', self.valveDemo.getPlannedTouchAngleCoaxial())
def resetTouchAngle(self):
self.valveDemo.findAffordance()
self.params.setProperty('Touch angle (deg)', self.valveDemo.getPlannedTouchAngleCoaxial())
def closeHand(self):
self.valveDemo.closeHand(self.valveDemo.graspingHand)
def setFingers(self):
self.valveDemo.openPinch(self.valveDemo.graspingHand)
def reach(self):
self.valveDemo.coaxialPlanReach()
def grasp(self):
self.valveDemo.coaxialPlanTouch()
def turnValve(self):
self.valveDemo.coaxialPlanTurn()
def retract(self):
self.valveDemo.coaxialPlanRetract()
def _setupParams(self):
self.params = om.ObjectModelItem('Valve Task Params')
self.params.addProperty('Hand', 1, attributes=om.PropertyAttributes(enumNames=['Left', 'Right']))
self.params.addProperty('Turn direction', 0, attributes=om.PropertyAttributes(enumNames=['Clockwise', 'Counter clockwise']))
self.params.addProperty('Touch angle (deg)', 0)
#self.params.addProperty('Turn amount (deg)', 60)
self.params.properties.connectPropertyChanged(self.onPropertyChanged)
def _setupPropertiesPanel(self):
l = QtGui.QVBoxLayout(self.ui.propertyFrame)
l.setMargin(0)
self.propertiesPanel = PythonQt.dd.ddPropertiesPanel()
self.propertiesPanel.setBrowserModeToWidget()
l.addWidget(self.propertiesPanel)
self.panelConnector = propertyset.PropertyPanelConnector(self.params.properties, self.propertiesPanel)
def onPropertyChanged(self, propertySet, propertyName):
self._syncProperties()
def _syncProperties(self):
self.valveDemo.planFromCurrentRobotState = True
self.valveDemo.visOnly = False
self.valveDemo.graspingHand = self.params.getPropertyEnumValue('Hand').lower()
self.valveDemo.scribeDirection = 1 if self.params.getPropertyEnumValue('Turn direction') == 'Clockwise' else -1
self.valveDemo.setDesiredTouchAngleCoaxial(self.params.getProperty('Touch angle (deg)'))
#self.valveDemo.turnAngle = self.params.getProperty('Turn amount (deg)')
def onContinue(self):
self.completedTasks = []
self.taskQueue.reset()
for obj in self.taskTree.getSelectedTasks():
self.taskQueue.addTask(obj.task)
self.taskQueue.start()
def onStep(self):
assert not self.taskQueue.isRunning
tasks = self.taskTree.getSelectedTasks()
if not tasks:
return
task = tasks[0].task
self.nextStepTask = tasks[1].task if len(tasks) > 1 else None
self.completedTasks = []
self.taskQueue.reset()
self.taskQueue.addTask(task)
self.taskQueue.start()
def onPause(self):
if not self.taskQueue.isRunning:
return
self.nextStepTask = None
currentTask = self.taskQueue.currentTask
self.taskQueue.stop()
if currentTask:
currentTask.stop()
self.appendMessage('<font color="red">paused</font>')
def onTaskStarted(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="green">start</font>'
self.appendMessage(msg)
self.taskTree.selectTask(task)
item = self.taskTree.findTaskItem(task)
if len(self.completedTasks) and item.getProperty('Visible'):
self.appendMessage('<font color="red">paused</font>')
raise atq.AsyncTaskQueue.PauseException()
def onTaskEnded(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="green">end</font>'
self.appendMessage(msg)
self.completedTasks.append(task)
if self.taskQueue.tasks:
self.taskTree.selectTask(self.taskQueue.tasks[0])
elif self.nextStepTask:
self.taskTree.selectTask(self.nextStepTask)
#else:
# self.taskTree.selectTask(self.completedTasks[0])
def onTaskFailed(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">failed: %s</font>' % task.failReason
self.appendMessage(msg)
def onTaskPaused(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">paused</font>'
self.appendMessage(msg)
def onTaskException(self, taskQueue, task):
msg = task.properties.getProperty('Name') + ' ... <font color="red">exception:\n\n%s</font>' % traceback.format_exc()
self.appendMessage(msg)
def appendMessage(self, msg):
if msg == self.lastStatusMessage:
return
self.lastStatusMessage = msg
self.ui.outputConsole.append(msg.replace('\n', '<br/>'))
#print msg
def updateTaskStatus(self):
currentTask = self.taskQueue.currentTask
if not currentTask or not currentTask.statusMessage:
return
name = currentTask.properties.getProperty('Name')
status = currentTask.statusMessage
msg = name + ': ' + status
self.appendMessage(msg)
def onAcceptPrompt(self):
self.promptTask.accept()
self.promptTask = None
self.ui.promptLabel.text = ''
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def onRejectPrompt(self):
self.promptTask.reject()
self.promptTask = None
self.ui.promptLabel.text = ''
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def onTaskPrompt(self, task, message):
self.promptTask = task
self.ui.promptLabel.text = message
self.ui.promptAcceptButton.enabled = True
self.ui.promptRejectButton.enabled = True
def _initTaskPanel(self):
self.lastStatusMessage = ''
self.nextStepTask = None
self.completedTasks = []
self.taskQueue = atq.AsyncTaskQueue()
self.taskQueue.connectTaskStarted(self.onTaskStarted)
self.taskQueue.connectTaskEnded(self.onTaskEnded)
self.taskQueue.connectTaskPaused(self.onTaskPaused)
self.taskQueue.connectTaskFailed(self.onTaskFailed)
self.taskQueue.connectTaskException(self.onTaskException)
self.completedTasks = []
self.timer = TimerCallback(targetFps=2)
self.timer.callback = self.updateTaskStatus
self.timer.start()
rt.UserPromptTask.promptFunction = self.onTaskPrompt
rt.PrintTask.printFunction = self.appendMessage
self.taskTree = tmw.TaskTree()
self.ui.taskFrame.layout().insertWidget(0, self.taskTree.treeWidget)
l = QtGui.QVBoxLayout(self.ui.taskPropertiesGroupBox)
l.addWidget(self.taskTree.propertiesPanel)
PythonQt.dd.ddGroupBoxHider(self.ui.taskPropertiesGroupBox)
self.ui.taskStepButton.connect('clicked()', self.onStep)
self.ui.taskContinueButton.connect('clicked()', self.onContinue)
self.ui.taskPauseButton.connect('clicked()', self.onPause)
self.ui.promptAcceptButton.connect('clicked()', self.onAcceptPrompt)
self.ui.promptRejectButton.connect('clicked()', self.onRejectPrompt)
self.ui.promptAcceptButton.enabled = False
self.ui.promptRejectButton.enabled = False
def _initTasks(self):
# some helpers
def addTask(task, parent=None):
self.taskTree.onAddTask(task, copy=False, parent=parent)
def addFunc(func, name, parent=None):
addTask(rt.CallbackTask(callback=func, name=name), parent=parent)
def addValveTurn(parent=None):
group = self.taskTree.addGroup('Valve Turn', parent=parent)
# valve manip actions
addFunc(v.coaxialPlanReach, name='plan reach to valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanTouch, name='plan insert in valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanTurn, name='plan turn valve', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
addFunc(v.coaxialPlanRetract, name='plan retract', parent=group)
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'), parent=group)
addFunc(v.commitManipPlan, name='execute manip plan', parent=group)
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'), parent=group)
v = self.valveDemo
self.taskTree.removeAllTasks()
side = self.params.getPropertyEnumValue('Hand')
###############
# add the tasks
# prep
addTask(rt.CloseHand(name='close left hand', side='Left'))
addTask(rt.CloseHand(name='close right hand', side='Right'))
addTask(rt.SetNeckPitch(name='set neck position', angle=0))
addTask(rt.PlanPostureGoal(name='plan walk posture', postureGroup='General', postureName='safe nominal', side='Default'))
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'))
addTask(rt.CommitManipulationPlan(name='execute manip plan', planName='safe nominal posture plan'))
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
# fit
addTask(rt.WaitForMultisenseLidar(name='wait for lidar sweep'))
addTask(rt.UserPromptTask(name='fit valve', message='Please fit and approve valve affordance.'))
addTask(rt.FindAffordance(name='check valve affordance', affordanceName='valve'))
addFunc(self.resetTouchAngle, name='plan stance location')
# walk
addTask(rt.RequestFootstepPlan(name='plan walk to valve', stanceFrameName='valve grasp stance'))
addTask(rt.UserPromptTask(name='approve footsteps', message='Please approve footstep plan.'))
addTask(rt.CommitFootstepPlan(name='walk to valve', planName='valve grasp stance footstep plan'))
addTask(rt.WaitForWalkExecution(name='wait for walking'))
# refit
addTask(rt.SetNeckPitch(name='set neck position', angle=35))
addTask(rt.WaitForMultisenseLidar(name='wait for lidar sweep'))
addTask(rt.UserPromptTask(name='fit value', message='Please fit and approve valve affordance.'))
addFunc(self.resetTouchAngle, name='check valve affordance')
#addTask(rt.UserPromptTask(name='approve spoke location', message='Please approve valve spokes and touch angle.'))
# set fingers
addTask(rt.CloseHand(name='set finger positions', side=side, mode='Basic', amount=20))
# add valve turns
for i in range(0, 5):
addValveTurn()
|
"""
Tweets lib application file.
Handle fetching and storing of profile and tweet data.
Fetch profile or tweet data from the Twitter API using tweepy. Then insert the
data into the Tweet and Profile tables of the local database (see
models/tweets.py file). Also apply Campaign and Category labels.
That is done here either using the ORM (custom classes to represent tables
in the database) or by build and executing native SQL statements which will be
several times faster.
For a user interface on fetching and inserting data, see the utils directory.
Steps required to get profiles and their tweets:
1. Start with a Twitter screen name or screen names, read as
list in the command-line arguments or read from a text file.
2. Get the Profile data for the users and store in the database, either
creating the record or updating if record exists in Profile table.
3. Get tweets from the timeline of the user and store in Tweets table, with a
link back to the Profile record. Repeat for all profiles of interest.
"""
import json
import math
import tweepy
from sqlobject import SQLObjectNotFound
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.sqlbuilder import Insert, LIKE
from tweepy.error import TweepError
import lib
import lib.text_handling
from lib import database as db
from lib.twitter_api import authentication
def _parse_tweepy_profile(fetchedProfile):
"""
:param tweepy.User fetchedProfile: User data as fetched from Twitter API.
:return: Simplified user data, as a dict.
"""
return {
'guid': fetchedProfile.id,
'screenName': fetchedProfile.screen_name,
'name': fetchedProfile.name,
'description': fetchedProfile.description,
'location': fetchedProfile.location,
'imageUrl': fetchedProfile.profile_image_url_https,
'followersCount': fetchedProfile.followers_count,
'statusesCount': fetchedProfile.statuses_count,
'verified': fetchedProfile.verified,
}
def _parse_tweepy_tweet(fetchedTweet, profileID):
"""
:param tweepy.Status fetchedTweet: Tweet data as fetched from the Twitter
API.
:param int profileID: ID of the Profile record in the database which
is the tweet author.
:return tweetData: Simplified tweet data, as a dict.
"""
# Assume extended mode (as set on the API request), otherwise fall back to
# standard mode.
try:
text = fetchedTweet.full_text
except AttributeError:
text = fetchedTweet.text
return {
'guid': fetchedTweet.id,
'profileID': profileID,
'createdAt': fetchedTweet.created_at,
'message': text,
'favoriteCount': fetchedTweet.favorite_count,
'retweetCount': fetchedTweet.retweet_count,
'inReplyToTweetGuid': fetchedTweet.in_reply_to_status_id,
'inReplyToProfileGuid': fetchedTweet.in_reply_to_user_id,
}
def _getProfile(APIConn, screenName=None, userID=None):
"""
Get data of one profile from the Twitter API, for a specified user.
Either screenName string or userID integer must be specified, but not both.
:param APIConn: authenticated API connection object.
:param screenName: The name of Twitter user to fetch, as a string.
:param userID: The ID of the Twitter user to fetch, as an integer.
Cannot be set if screenName is also set.
:return tweepy.User: instance for requested Twitter user.
"""
assert screenName or userID, \
"Expected either screenName (str) or userID (int) to be set."
assert not (screenName and userID), \
"Cannot set both screenName ({screenName}) and userID ({userID})."\
.format(
screenName=screenName,
userID=userID
)
if screenName:
print("Fetching user: @{screenName}".format(screenName=screenName))
params = {'screen_name': screenName}
else:
print("Fetching user ID: {userID}".format(userID=userID))
params = {'user_id': userID}
return APIConn.get_user(**params)
def insertOrUpdateProfile(profile):
"""
Insert record in Profile table or update existing record if it exists.
Replace values in existing record with those fetched from Twitter
API, assuming that any value (except the GUID) could change. Even if their
screen name does change, we know that it is the same Profile based on the
GUID and so can update the existing record instead of inserting a new one.
:param [tweepy.User, dict] profile: Data for a Twitter user.
:return models.tweets.Profile profileRec: Local record for tweet author.
"""
if isinstance(profile, dict):
profileData = profile
else:
profileData = _parse_tweepy_profile(profile)
try:
# Attempt to insert new row, assuming GUID or screenName do not exist.
profileRec = db.Profile(**profileData)
except DuplicateEntryError:
guid = profileData.pop('guid')
profileRec = db.Profile.byGuid(guid)
profileRec.set(**profileData)
return profileRec
def insertOrUpdateProfileBatch(screenNames):
"""
Get Twitter profile data from the Twitter API and store in the database.
Profile records are created, or updated if they already exist.
:param screenNames: list of user screen names as strings, to be fetched
from the Twitter API.
:return successScreenNames: list of user screen names as strings, for the
Profiles which were successfully fetched then inserted/updated in
the db.
:return failedScreenNames: list of user screen names as strings, for the
Profiles which could not be fetched from the Twitter API and
inserted/updated in the db.
"""
APIConn = authentication.getAPIConnection()
successScreenNames = []
failedScreenNames = []
for s in screenNames:
try:
fetchedProf = _getProfile(APIConn, screenName=s)
except TweepError as e:
# The profile could be missing or suspended, so we log it
# and then skip inserting or updating (since we have no data).
print("Could not fetch user: @{name}. {error}. {msg}".format(
name=s,
error=type(e).__name__,
msg=str(e)
))
failedScreenNames.append(s)
else:
try:
localProf = insertOrUpdateProfile(fetchedProf)
# Represent log of followers count visually as repeated stars,
# sidestepping error for log of zero.
logFollowers = int(math.log10(localProf.followersCount)) \
if localProf.followersCount else 0
stars = '*' * logFollowers
print("Inserted/updated user: {name:20} {stars}".format(
name=u'@' + localProf.screenName,
stars=stars
))
successScreenNames.append(s)
except Exception as e:
print((
"Could not insert/update user: @{name}. {error}. {msg}"
.format(name=s, error=type(e).__name__, msg=str(e))
))
failedScreenNames.append(s)
return successScreenNames, failedScreenNames
def _getTweets(APIConn, screenName=None, userID=None, tweetsPerPage=200,
pageLimit=1, extended=True):
"""
Get tweets of one profile from the Twitter API, for a specified user.
Either screenName string or userID integer must be specified, but not both.
The result of (tweetsPerPage)*(pageLimit) indicates the total number
of tweets requested from the API on calling this function.
:param APIConn: authenticated API connection object.
:param screenName: Default None. The name of Twitter user to fetch, as
a string.
:param userID: Default None. The ID of the Twitter user to fetch, as an
integer.
:param tweetsPerPage: Default 200. Count of tweets to get on a page.
The API''s limit is 200 tweets, but a lower value can be used.
The `pageLimit` argument can be used to do additional calls
to get tweets above the 200 limit - see `tweepy.Cursor` method.
:param pageLimit: Default 1. Number of pages of tweets to get by doing
a sequence of queries with a cursor. The number of tweets
on each page is determined by `tweetsPerPage` argument.
:param extended: If True, get the expanded tweet message instead of the
truncated form.
:return list tweetsList: list of tweepy tweet objects for the requested
user.
"""
print("Fetching tweets for user: {0}".format(screenName if screenName
else userID))
assert screenName or userID, \
"Expected either screenName (str) or userID (int) to be set."
assert not (screenName and userID), "Cannot request both screenName and"\
" userID."
params = {'count': tweetsPerPage}
if extended:
params['tweet_mode'] = 'extended'
if screenName:
params['screen_name'] = screenName
else:
params['user_id'] = userID
if pageLimit == 1:
# Do a simple query without paging.
tweets = APIConn.user_timeline(**params)
else:
tweets = []
# Send the request parameters to Cursor object, with the page limit.
for page in tweepy.Cursor(APIConn.user_timeline, **params)\
.pages(pageLimit):
tweets.extend(page)
return tweets
def insertOrUpdateTweet(tweet, profileID, writeToDB=True,
onlyUpdateEngagements=True):
"""
Insert or update one record in the Tweet table.
Attempt to insert a new tweet row, but if the GUID exists locally then
retrieve and update the existing record.
:param [tweepy.Status, dict] tweet: Data for a single Tweet as fetched
from the Twitter API.
:param profileID: The ID of the tweet's author, as an integer from
the Profile ID column in the local db and NOT the Profile GUID.
This is used to set the Tweet object's foreign key.
:param writeToDB: Default True. If True, write the fetched tweets
to local database, otherwise print and discard them.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return dict data: Formatted Tweet data.
:return tweetRec: If writeToDB is True, then return the Tweet record
which was inserted or updated. Otherwise return None.
"""
if isinstance(tweet, dict):
tweetData = tweet
else:
tweetData = _parse_tweepy_tweet(tweet, profileID)
tweetData['createdAt'] = lib.set_tz(tweetData['createdAt'])
if writeToDB:
try:
tweetRec = db.Tweet(**tweetData)
except DuplicateEntryError:
guid = tweetData.pop('guid')
tweetRec = db.Tweet.byGuid(guid)
if onlyUpdateEngagements:
tweetRec.set(
favoriteCount=tweetData['favoriteCount'],
retweetCount=tweetData['retweetCount'],
)
else:
tweetRec.set(**tweetData)
else:
tweetRec = None
return tweetData, tweetRec
def insertOrUpdateTweetBatch(profileRecs,
tweetsPerProfile=200,
verbose=False,
writeToDB=True,
campaignRec=None,
onlyUpdateEngagements=True):
"""
Get Twitter tweet data from the Twitter API for a batch of profiles
and store their tweets in the database.
The verbose and writeToDB flags can be used together to print tweet
data which would be inserted into the database without actually inserting
it. This can be used preview tweet data without increasing storage or using
time to do inserts and updates.
:param profileRecs: list of Profile objects, to create or update
tweets for. This might be a list from the Profile table which
has been filtered based on a job schedule, or Profiles which
match criteria such as high follower count.
:param tweetsPerProfile: Default 200. Count of tweets to get for each
profile, as an integer. If this is 200 or less, then page limit is
left at 1 and the items per page count is reduced. If this is
more than 200, then the items per page count is left at 200
and page limit is adjusted to get a number of tweets as the
next multiple of 200.
e.g. 550 tweets needs 2 pages to get the first 400 tweets,
plus a 3rd page to the additional 150 tweets.
We simplify to get 200*3 = 600 tweets, to keep the count
consistent on each query.
Note that even if 200 tweets are requested, the API sometimes returns
only 199 and the user may have posted fewer than the requested tweets.
The limit for a single request to the API is 200, therefore any
number up to 200 has the same rate limit cost. It may be useful to set
a number here as 200 or less if we want to get through all the users
quickly, as this takes fewer API queries and fewer db inserts
or updates. Also, consider that a very low number may lead to deadtime,
where the script takes a fixed time to get 200 or 1 tweets and
now that is has processed the 1 requested and the window limit is
hit, it has no Tweet processing to do while waiting for the next rate
limited window. Thought a low value will mean less storage space
is required.
:param verbose: Default False. If True, print the data used to created
a local Tweet record. This data can be printed regardless of whether
the data is written to the db record or not.
:param writeToDB: Default True. If True, write the fetched tweets
to local database, otherwise print and discard them. This is useful
when used in combination with verbose flag which prints the data.
:param campaignRec: Campaign record to assign to the local Tweet records.
Default None to not assign any Campaign.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return: None
"""
APIConn = authentication.getAPIConnection()
if tweetsPerProfile <= 200:
tweetsPerPage = tweetsPerProfile
pageLimit = 1
else:
tweetsPerPage = 200
# Round up to get the last page which might have fewerb items
pageLimit = math.ceil(tweetsPerProfile / tweetsPerPage)
for p in profileRecs:
try:
fetchedTweets = _getTweets(
APIConn,
userID=p.guid,
tweetsPerPage=tweetsPerPage,
pageLimit=pageLimit
)
except TweepError as e:
print(
"Could not fetch tweets for user: @{screenName}."
" {type}. {msg}".format(
screenName=p.screenName,
type=type(e).__name__,
msg=str(e)
))
else:
print('User: {0}'.format(p.screenName))
if writeToDB:
print("Inserting/updating tweets in db...")
else:
print("Displaying tweets but not inserting/updating...")
added = errors = 0
for f in fetchedTweets:
try:
data, tweetRec = insertOrUpdateTweet(
tweet=f,
profileID=p.id,
writeToDB=writeToDB,
onlyUpdateEngagements=onlyUpdateEngagements
)
if tweetRec and campaignRec:
try:
campaignRec.addTweet(tweetRec)
except DuplicateEntryError:
# Ignore error if Tweet was already assigned.
pass
if verbose:
if tweetRec:
tweetRec.prettyPrint()
else:
# No record was created, so use data dict.
m = data['message']
created = data['createdAt']
data['message'] = lib.text_handling.flattenText(m)
data['createdAt'] = str(lib.set_tz(created))
# TODO: Check if this will raise an error
# on unicode symbols in message.
print(json.dumps(data, indent=4))
added += 1
except Exception as e:
print(
"Could not insert/update tweet `{id}` for user"
" @{screenName}. {type}. {msg}".format(
id=f.id,
screenName=p.screenName,
type=type(e).__name__,
msg=str(e)
))
errors += 1
total = added + errors
# Print stats on every 10 processed and on the last item.
if total % 10 == 0 or f == fetchedTweets[-1]:
print(
"Total: {total:2,d}. Added: {added:2,d}. "
"Errors: {errors:2,d}.".format(
total=total,
added=added,
errors=errors
))
def lookupTweetGuids(APIConn, tweetGuids, onlyUpdateEngagements=True):
"""
Lookup tweet GUIDs and store entire tweets and authors in the database.
Receive a list of tweet GUIDs (IDs in the Twitter API), break them into
chunks (lists of up to 100 GUIDs), look them up from the API and then
insert or update the tweets and their authors in the database.
Note that tweet_mode='extended' is not available in tweeypy for
statuses_lookup, though it is used on the other endpoints.
See https://github.com/tweepy/tweepy/issues/785.
:param APIConn: authorised tweepy.API connection.
:param tweetGuids: list of Twitter API tweet GUIDs, as integers or strings.
The list will be a split into a list of chunks each with a max
count of 100 items. The Cursor approach will not work because the
API endpoints limits the number of items be requested and since there
is only ever one page of results.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return: None
"""
chunks = [tweetGuids[i:(i + 100)] for i in range(0, len(tweetGuids), 100)]
for chunk in chunks:
fetchedTweetList = APIConn.statuses_lookup(chunk)
for t in fetchedTweetList:
profileRec = insertOrUpdateProfile(profile=t.author)
data, tweetRec = insertOrUpdateTweet(
tweet=t,
profileID=profileRec.id,
onlyUpdateEngagements=onlyUpdateEngagements
)
tweetRec.prettyPrint()
def updateTweetEngagements(APIConn, tweetRecSelect):
"""
Update engagements of local tweet records.
Expect a select results of Tweets in the db, extract their GUIDs, get the
latest favorite and retweet from the API and then store the updated values.
If any of the looked up Tweet GUIDs are not returned from the API
(deleted/private/reported) then we do not have anything to save for it.
It is necessary to split the records into chunks or pages of up to 100
items, since that is the maxinum number of tweet IDs which the statuses
lookup endpoint allows.
TODO: Instead of expecting tweet record select results, This could be more
efficient by doing a set filtered to where GUID is t.id, provided the
record is there, rather than getting the object and then setting. This can
be even more efficient by fetching of tweets from the API then
doing a single UPDATE query using native SQL, instead of using the ORM.
:param APIConn: API Connection.
:param tweetRecSelect: SQLObject select results for model.Tweet instances,
or simply a list of the instances.
:return: None
"""
# Use list() to get all the records at once, so only one fetch query
# is done. Also, its not possible to do .count() on sliced select results
# and we need to know the total before splitting into chunks of 100 items.
guids = [t.guid for t in list(tweetRecSelect)]
chunks = [guids[i:(i + 100)] for i in range(0, len(guids), 100)]
for chunk in chunks:
fetchedTweets = APIConn.statuses_lookup(chunk)
for t in fetchedTweets:
tweetRec = db.Tweet.byGuid(t.id)
oldEngagements = (tweetRec.favoriteCount, tweetRec.retweetCount)
tweetRec.set(
favoriteCount=t.favorite_count,
retweetCount=t.retweet_count
)
print(
"Updated tweet GUID: {guid}, fav: {fav:3,d} ({oldFav:3,d}),"
" RT: {rt:3,d} ({oldRt:3,d})".format(
guid=t.id,
fav=t.favorite_count,
oldFav=oldEngagements[0],
rt=t.retweet_count,
oldRt=oldEngagements[1]
))
def assignProfileCategory(categoryName, profileRecs=None, screenNames=None):
"""
Assign Categories to Profiles.
Fetch Category or create it if it does not exist. Put Profiles in the
Category but ignore if link exists already. An error is raised
if a Profile does not exist, but previous Profiles in the list still
have been allocated already before the error occurred.
:param categoryName: String. Get a category by name and create it
if it does not exist yet. If Profile records or Profile screen names
are provided, then assign all of those Profiles to the category.
Both Profile inputs can be left as not set to just create the
Category.
:param profileRecs: Default None. List of db Profile records to be
assigned to the category. Cannot be empty if screenNames is also empty.
:param screenNames: Default None. List of Profile screen names to be
assigned to the category. The screen names should exist as Profiles
in the db already (matching on exact case), otherwise an error will
be raised. The screenNames argument cannot be empty if profileRecs
is also empty.
:return tuple of new and existing counts.
- newCnt: Count of new Profile Category links created.
- existingCnt: Count of Profile Category links not created because
they already exist.
"""
newCnt = 0
existingCnt = 0
try:
categoryRec = db.Category.byName(categoryName)
except SQLObjectNotFound:
categoryRec = db.Category(name=categoryName)
print("Created category: {0}".format(categoryName))
if profileRecs or screenNames:
if profileRecs is None:
# Use screen names to populate an empty profileRecs list.
profileRecs = []
for screenName in screenNames:
# Get user using exact case of screen name, otherwise search
# case insensitively using LIKE in SQLite. Assume Twitter
# prevents two users having the same screen name across case,
# though SQLObjectIntegrityError will stil be raised here for
# that edgecase.
try:
profile = db.Profile.byScreenName(screenName)
except SQLObjectNotFound:
profile = db.Profile.select(
LIKE(
db.Profile.q.screenName,
screenName
)
).getOne(None)
if not profile:
raise SQLObjectNotFound(
"Cannot assign Category since Profile screen name"
" is not in db: {0}".format(screenName)
)
profileRecs.append(profile)
for profileRec in profileRecs:
try:
categoryRec.addProfile(profileRec)
newCnt += 1
except DuplicateEntryError:
existingCnt += 1
return newCnt, existingCnt
def assignTweetCampaign(campaignRec, tweetRecs=None, tweetGuids=None):
"""
Assign Campaigns to Tweets using the ORM.
Fetch a Campaign and assign it to Tweets, ignoring existing links
and raising an error on a Campaign which does not exist. For large
batches of inserts, rather use bulkAssignTweetCampaign.
Search query is not considered here and should be set using the
campaign manager utility or the ORM directly.
:param campaignRec: Campaign record to assign to all Tweet
records indicated with tweetRecs or tweetGuids inputs.
Both Tweet inputs can be left as not set to just create the
Campaign. Note that the assignProfileCategory function expects
a Category name because it can be created there, but here the actual
Campaign record is expected because creation must be handled with the
Campaign manager utility instead because of the search query field.
:param tweetRecs: Default None. List of db Tweet records to be
assigned to the campaign. Cannot be empty if tweetGuids is also empty.
:param tweetGuids: Default None. List of Tweet GUIDs to be assigned
to the campaign. The GUIDs should exist as Tweets in the db already,
otherwise an error will be printed and ignored. The tweetGuids
argument cannot be empty if tweetRecs is also empty.
:return newCnt: Count of new Tweet Campaign links created.
:return existingCnt: Count of Tweet Campaign links not created because
they already exist.
"""
newCnt = 0
existingCnt = 0
if not tweetRecs:
# Use GUIDs to populate tweetRecs list.
tweetRecs = []
for guid in tweetGuids:
try:
tweet = db.Tweet.byGuid(guid)
except SQLObjectNotFound:
raise SQLObjectNotFound("Cannot assign Campaign as Tweet"
" GUID is not in db: {0}"
.format(guid))
tweetRecs.append(tweet)
for tweet in tweetRecs:
try:
campaignRec.addTweet(tweet)
newCnt += 1
except DuplicateEntryError:
existingCnt += 1
return newCnt, existingCnt
def bulkAssignProfileCategory(categoryID, profileIDs):
"""
Assign Categories to a batch of Profiles using a single INSERT statement.
This function assumes the Category ID and the Profile IDs are for existing
values in the db. Any existing profile_category links which raise a
duplicate error are allowed to fail silently using INSERT OR IGNORE syntax.
:param categoryID: Category record ID to assign to Profile records.
:param profileIDs: Iterable of Profile ID records which must be a linked to
a Category record.
:return SQL: Multi-line SQL statement which was executed.
"""
insert = Insert(
'profile_category',
template=['category_id', 'profile_id'],
valueList=[(categoryID, profileID) for profileID in profileIDs]
)
SQL = db.conn.sqlrepr(insert)
SQL = SQL.replace("INSERT", "INSERT OR IGNORE")
db.conn.query(SQL)
return SQL
def bulkAssignTweetCampaign(campaignID, tweetIDs):
"""
Assign Campaigns to a batch of Tweets using a single INSERT statement.
This function assumes the Campaign ID and the Tweet IDs are for existing
values in the db. Any existing tweet_campaign links which raise a
duplicate error are allowed to fail silently using INSERT OR IGNORE syntax.
See SQLite INSERT documentation diagram syntax:
http://www.sqlite.org/lang_insert.html
A single INSERT statement is done here, since a mass-insertion using
the ORM is inefficient:
http://www.sqlobject.org/FAQ.html#how-to-do-mass-insertion
The links in tweet_campaign are relatively simple and require validation
at the schema level rather than the ORM level, therefore it is safe to
use a native SQL statement through sqlbuilder. The implementation is
based on an example here:
http://www.sqlobject.org/SQLBuilder.html#insert
:param campaignID: Campaign record ID to assign to Tweet records.
:param tweetIDs: Iterable of Tweet ID records which must be a linked to
a Campaign record.
:return SQL: Multi-line SQL statement which was executed.
"""
insert = Insert(
'tweet_campaign',
template=['campaign_id', 'tweet_id'],
valueList=[(campaignID, tweetID) for tweetID in tweetIDs]
)
SQL = db.conn.sqlrepr(insert)
SQL = SQL.replace("INSERT", "INSERT OR IGNORE")
db.conn.query(SQL)
return SQL
feat: Add NULL handling in tweets.py
"""
Tweets lib application file.
Handle fetching and storing of profile and tweet data.
Fetch profile or tweet data from the Twitter API using tweepy. Then insert the
data into the Tweet and Profile tables of the local database (see
models/tweets.py file). Also apply Campaign and Category labels.
That is done here either using the ORM (custom classes to represent tables
in the database) or by build and executing native SQL statements which will be
several times faster.
For a user interface on fetching and inserting data, see the utils directory.
Steps required to get profiles and their tweets:
1. Start with a Twitter screen name or screen names, read as
list in the command-line arguments or read from a text file.
2. Get the Profile data for the users and store in the database, either
creating the record or updating if record exists in Profile table.
3. Get tweets from the timeline of the user and store in Tweets table, with a
link back to the Profile record. Repeat for all profiles of interest.
"""
import json
import math
import tweepy
from sqlobject import SQLObjectNotFound
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.sqlbuilder import Insert, LIKE
from tweepy.error import TweepError
import lib
import lib.text_handling
from lib import database as db
from lib.twitter_api import authentication
# This null character is invisible but appears sometimes such in profile
# description from Twitter and cannot be inserted due to SQLite execute error.
NULL = '\x00'
# TODO Can this be done as an override for all fields when inserting into the
# model? Like for init / update or similar.
def clean(v):
return v.replace(NULL, '')
def _parse_tweepy_profile(fetchedProfile):
"""
:param tweepy.User fetchedProfile: User data as fetched from Twitter API.
:return: Simplified user data, as a dict.
"""
return {
'guid': fetchedProfile.id,
'screenName': fetchedProfile.screen_name,
'name': fetchedProfile.name,
'description': clean(fetchedProfile.description),
'location': fetchedProfile.location,
'imageUrl': fetchedProfile.profile_image_url_https,
'followersCount': fetchedProfile.followers_count,
'statusesCount': fetchedProfile.statuses_count,
'verified': fetchedProfile.verified,
}
def _parse_tweepy_tweet(fetchedTweet, profileID):
"""
:param tweepy.Status fetchedTweet: Tweet data as fetched from the Twitter
API.
:param int profileID: ID of the Profile record in the database which
is the tweet author.
:return tweetData: Simplified tweet data, as a dict.
"""
# Assume extended mode (as set on the API request), otherwise fall back to
# standard mode.
try:
text = fetchedTweet.full_text
except AttributeError:
text = fetchedTweet.text
return {
'guid': fetchedTweet.id,
'profileID': profileID,
'createdAt': fetchedTweet.created_at,
'message': text,
'favoriteCount': fetchedTweet.favorite_count,
'retweetCount': fetchedTweet.retweet_count,
'inReplyToTweetGuid': fetchedTweet.in_reply_to_status_id,
'inReplyToProfileGuid': fetchedTweet.in_reply_to_user_id,
}
def _getProfile(APIConn, screenName=None, userID=None):
"""
Get data of one profile from the Twitter API, for a specified user.
Either screenName string or userID integer must be specified, but not both.
:param APIConn: authenticated API connection object.
:param screenName: The name of Twitter user to fetch, as a string.
:param userID: The ID of the Twitter user to fetch, as an integer.
Cannot be set if screenName is also set.
:return tweepy.User: instance for requested Twitter user.
"""
assert screenName or userID, \
"Expected either screenName (str) or userID (int) to be set."
assert not (screenName and userID), \
"Cannot set both screenName ({screenName}) and userID ({userID})."\
.format(
screenName=screenName,
userID=userID
)
if screenName:
print("Fetching user: @{screenName}".format(screenName=screenName))
params = {'screen_name': screenName}
else:
print("Fetching user ID: {userID}".format(userID=userID))
params = {'user_id': userID}
return APIConn.get_user(**params)
def insertOrUpdateProfile(profile: [tweepy.User, dict]):
"""
Insert record in Profile table or update existing record if it exists.
Replace values in existing record with those fetched from Twitter
API, assuming that any value (except the GUID) could change. Even if their
screen name does change, we know that it is the same Profile based on the
GUID and so can update the existing record instead of inserting a new one.
:param [tweepy.User, dict] profile: Data for a Twitter user.
:return models.tweets.Profile profileRec: Local record for tweet author.
"""
if isinstance(profile, dict):
profileData = profile
else:
profileData = _parse_tweepy_profile(profile)
try:
# Attempt to insert new row, assuming GUID or screenName do not exist.
profileRec = db.Profile(**profileData)
except DuplicateEntryError:
guid = profileData.pop('guid')
profileRec = db.Profile.byGuid(guid)
profileRec.set(**profileData)
return profileRec
def insertOrUpdateProfileBatch(screenNames):
"""
Get Twitter profile data from the Twitter API and store in the database.
Profile records are created, or updated if they already exist.
:param screenNames: list of user screen names as strings, to be fetched
from the Twitter API.
:return successScreenNames: list of user screen names as strings, for the
Profiles which were successfully fetched then inserted/updated in
the db.
:return failedScreenNames: list of user screen names as strings, for the
Profiles which could not be fetched from the Twitter API and
inserted/updated in the db.
"""
APIConn = authentication.getAPIConnection()
successScreenNames = []
failedScreenNames = []
for s in screenNames:
try:
fetchedProf = _getProfile(APIConn, screenName=s)
except TweepError as e:
# The profile could be missing or suspended, so we log it
# and then skip inserting or updating (since we have no data).
print("Could not fetch user: @{name}. {error}. {msg}".format(
name=s,
error=type(e).__name__,
msg=str(e)
))
failedScreenNames.append(s)
else:
try:
localProf = insertOrUpdateProfile(fetchedProf)
# Represent log of followers count visually as repeated stars,
# sidestepping error for log of zero.
logFollowers = int(math.log10(localProf.followersCount)) \
if localProf.followersCount else 0
stars = '*' * logFollowers
print("Inserted/updated user: {name:20} {stars}".format(
name=u'@' + localProf.screenName,
stars=stars
))
successScreenNames.append(s)
except Exception as e:
print((
"Could not insert/update user: @{name}. {error}. {msg}"
.format(name=s, error=type(e).__name__, msg=str(e))
))
failedScreenNames.append(s)
return successScreenNames, failedScreenNames
def _getTweets(APIConn, screenName=None, userID=None, tweetsPerPage=200,
pageLimit=1, extended=True):
"""
Get tweets of one profile from the Twitter API, for a specified user.
Either screenName string or userID integer must be specified, but not both.
The result of (tweetsPerPage)*(pageLimit) indicates the total number
of tweets requested from the API on calling this function.
:param APIConn: authenticated API connection object.
:param screenName: Default None. The name of Twitter user to fetch, as
a string.
:param userID: Default None. The ID of the Twitter user to fetch, as an
integer.
:param tweetsPerPage: Default 200. Count of tweets to get on a page.
The API''s limit is 200 tweets, but a lower value can be used.
The `pageLimit` argument can be used to do additional calls
to get tweets above the 200 limit - see `tweepy.Cursor` method.
:param pageLimit: Default 1. Number of pages of tweets to get by doing
a sequence of queries with a cursor. The number of tweets
on each page is determined by `tweetsPerPage` argument.
:param extended: If True, get the expanded tweet message instead of the
truncated form.
:return list tweetsList: list of tweepy tweet objects for the requested
user.
"""
print("Fetching tweets for user: {0}".format(screenName if screenName
else userID))
assert screenName or userID, \
"Expected either screenName (str) or userID (int) to be set."
assert not (screenName and userID), "Cannot request both screenName and"\
" userID."
params = {'count': tweetsPerPage}
if extended:
params['tweet_mode'] = 'extended'
if screenName:
params['screen_name'] = screenName
else:
params['user_id'] = userID
if pageLimit == 1:
# Do a simple query without paging.
tweets = APIConn.user_timeline(**params)
else:
tweets = []
# Send the request parameters to Cursor object, with the page limit.
for page in tweepy.Cursor(APIConn.user_timeline, **params)\
.pages(pageLimit):
tweets.extend(page)
return tweets
def insertOrUpdateTweet(tweet, profileID, writeToDB=True,
onlyUpdateEngagements=True):
"""
Insert or update one record in the Tweet table.
Attempt to insert a new tweet row, but if the GUID exists locally then
retrieve and update the existing record.
:param [tweepy.Status, dict] tweet: Data for a single Tweet as fetched
from the Twitter API.
:param profileID: The ID of the tweet's author, as an integer from
the Profile ID column in the local db and NOT the Profile GUID.
This is used to set the Tweet object's foreign key.
:param writeToDB: Default True. If True, write the fetched tweets
to local database, otherwise print and discard them.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return dict data: Formatted Tweet data.
:return tweetRec: If writeToDB is True, then return the Tweet record
which was inserted or updated. Otherwise return None.
"""
if isinstance(tweet, dict):
tweetData = tweet
else:
tweetData = _parse_tweepy_tweet(tweet, profileID)
tweetData['createdAt'] = lib.set_tz(tweetData['createdAt'])
if writeToDB:
try:
tweetRec = db.Tweet(**tweetData)
except DuplicateEntryError:
guid = tweetData.pop('guid')
tweetRec = db.Tweet.byGuid(guid)
if onlyUpdateEngagements:
tweetRec.set(
favoriteCount=tweetData['favoriteCount'],
retweetCount=tweetData['retweetCount'],
)
else:
tweetRec.set(**tweetData)
else:
tweetRec = None
return tweetData, tweetRec
def insertOrUpdateTweetBatch(profileRecs,
tweetsPerProfile=200,
verbose=False,
writeToDB=True,
campaignRec=None,
onlyUpdateEngagements=True):
"""
Get Twitter tweet data from the Twitter API for a batch of profiles
and store their tweets in the database.
The verbose and writeToDB flags can be used together to print tweet
data which would be inserted into the database without actually inserting
it. This can be used preview tweet data without increasing storage or using
time to do inserts and updates.
:param profileRecs: list of Profile objects, to create or update
tweets for. This might be a list from the Profile table which
has been filtered based on a job schedule, or Profiles which
match criteria such as high follower count.
:param tweetsPerProfile: Default 200. Count of tweets to get for each
profile, as an integer. If this is 200 or less, then page limit is
left at 1 and the items per page count is reduced. If this is
more than 200, then the items per page count is left at 200
and page limit is adjusted to get a number of tweets as the
next multiple of 200.
e.g. 550 tweets needs 2 pages to get the first 400 tweets,
plus a 3rd page to the additional 150 tweets.
We simplify to get 200*3 = 600 tweets, to keep the count
consistent on each query.
Note that even if 200 tweets are requested, the API sometimes returns
only 199 and the user may have posted fewer than the requested tweets.
The limit for a single request to the API is 200, therefore any
number up to 200 has the same rate limit cost. It may be useful to set
a number here as 200 or less if we want to get through all the users
quickly, as this takes fewer API queries and fewer db inserts
or updates. Also, consider that a very low number may lead to deadtime,
where the script takes a fixed time to get 200 or 1 tweets and
now that is has processed the 1 requested and the window limit is
hit, it has no Tweet processing to do while waiting for the next rate
limited window. Thought a low value will mean less storage space
is required.
:param verbose: Default False. If True, print the data used to created
a local Tweet record. This data can be printed regardless of whether
the data is written to the db record or not.
:param writeToDB: Default True. If True, write the fetched tweets
to local database, otherwise print and discard them. This is useful
when used in combination with verbose flag which prints the data.
:param campaignRec: Campaign record to assign to the local Tweet records.
Default None to not assign any Campaign.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return: None
"""
APIConn = authentication.getAPIConnection()
if tweetsPerProfile <= 200:
tweetsPerPage = tweetsPerProfile
pageLimit = 1
else:
tweetsPerPage = 200
# Round up to get the last page which might have fewerb items
pageLimit = math.ceil(tweetsPerProfile / tweetsPerPage)
for p in profileRecs:
try:
fetchedTweets = _getTweets(
APIConn,
userID=p.guid,
tweetsPerPage=tweetsPerPage,
pageLimit=pageLimit
)
except TweepError as e:
print(
"Could not fetch tweets for user: @{screenName}."
" {type}. {msg}".format(
screenName=p.screenName,
type=type(e).__name__,
msg=str(e)
))
else:
print('User: {0}'.format(p.screenName))
if writeToDB:
print("Inserting/updating tweets in db...")
else:
print("Displaying tweets but not inserting/updating...")
added = errors = 0
for f in fetchedTweets:
try:
data, tweetRec = insertOrUpdateTweet(
tweet=f,
profileID=p.id,
writeToDB=writeToDB,
onlyUpdateEngagements=onlyUpdateEngagements
)
if tweetRec and campaignRec:
try:
campaignRec.addTweet(tweetRec)
except DuplicateEntryError:
# Ignore error if Tweet was already assigned.
pass
if verbose:
if tweetRec:
tweetRec.prettyPrint()
else:
# No record was created, so use data dict.
m = data['message']
created = data['createdAt']
data['message'] = lib.text_handling.flattenText(m)
data['createdAt'] = str(lib.set_tz(created))
# TODO: Check if this will raise an error
# on unicode symbols in message.
print(json.dumps(data, indent=4))
added += 1
except Exception as e:
print(
"Could not insert/update tweet `{id}` for user"
" @{screenName}. {type}. {msg}".format(
id=f.id,
screenName=p.screenName,
type=type(e).__name__,
msg=str(e)
))
errors += 1
total = added + errors
# Print stats on every 10 processed and on the last item.
if total % 10 == 0 or f == fetchedTweets[-1]:
print(
"Total: {total:2,d}. Added: {added:2,d}. "
"Errors: {errors:2,d}.".format(
total=total,
added=added,
errors=errors
))
def lookupTweetGuids(APIConn, tweetGuids, onlyUpdateEngagements=True):
"""
Lookup tweet GUIDs and store entire tweets and authors in the database.
Receive a list of tweet GUIDs (IDs in the Twitter API), break them into
chunks (lists of up to 100 GUIDs), look them up from the API and then
insert or update the tweets and their authors in the database.
Note that tweet_mode='extended' is not available in tweeypy for
statuses_lookup, though it is used on the other endpoints.
See https://github.com/tweepy/tweepy/issues/785.
:param APIConn: authorised tweepy.API connection.
:param tweetGuids: list of Twitter API tweet GUIDs, as integers or strings.
The list will be a split into a list of chunks each with a max
count of 100 items. The Cursor approach will not work because the
API endpoints limits the number of items be requested and since there
is only ever one page of results.
:param onlyUpdateEngagements: Default True to only update the favorite
and retweet count of the tweet in the local db. If False, update
other fields too. Those are expected to be static on the Twitter API,
but if rules change on this repo then it is useful to apply them
historically on existing Tweet records. This flag only affects
existing records.
:return: None
"""
chunks = [tweetGuids[i:(i + 100)] for i in range(0, len(tweetGuids), 100)]
for chunk in chunks:
fetchedTweetList = APIConn.statuses_lookup(chunk)
for t in fetchedTweetList:
profileRec = insertOrUpdateProfile(profile=t.author)
data, tweetRec = insertOrUpdateTweet(
tweet=t,
profileID=profileRec.id,
onlyUpdateEngagements=onlyUpdateEngagements
)
tweetRec.prettyPrint()
def updateTweetEngagements(APIConn, tweetRecSelect):
"""
Update engagements of local tweet records.
Expect a select results of Tweets in the db, extract their GUIDs, get the
latest favorite and retweet from the API and then store the updated values.
If any of the looked up Tweet GUIDs are not returned from the API
(deleted/private/reported) then we do not have anything to save for it.
It is necessary to split the records into chunks or pages of up to 100
items, since that is the maxinum number of tweet IDs which the statuses
lookup endpoint allows.
TODO: Instead of expecting tweet record select results, This could be more
efficient by doing a set filtered to where GUID is t.id, provided the
record is there, rather than getting the object and then setting. This can
be even more efficient by fetching of tweets from the API then
doing a single UPDATE query using native SQL, instead of using the ORM.
:param APIConn: API Connection.
:param tweetRecSelect: SQLObject select results for model.Tweet instances,
or simply a list of the instances.
:return: None
"""
# Use list() to get all the records at once, so only one fetch query
# is done. Also, its not possible to do .count() on sliced select results
# and we need to know the total before splitting into chunks of 100 items.
guids = [t.guid for t in list(tweetRecSelect)]
chunks = [guids[i:(i + 100)] for i in range(0, len(guids), 100)]
for chunk in chunks:
fetchedTweets = APIConn.statuses_lookup(chunk)
for t in fetchedTweets:
tweetRec = db.Tweet.byGuid(t.id)
oldEngagements = (tweetRec.favoriteCount, tweetRec.retweetCount)
tweetRec.set(
favoriteCount=t.favorite_count,
retweetCount=t.retweet_count
)
print(
"Updated tweet GUID: {guid}, fav: {fav:3,d} ({oldFav:3,d}),"
" RT: {rt:3,d} ({oldRt:3,d})".format(
guid=t.id,
fav=t.favorite_count,
oldFav=oldEngagements[0],
rt=t.retweet_count,
oldRt=oldEngagements[1]
))
def assignProfileCategory(categoryName, profileRecs=None, screenNames=None):
"""
Assign Categories to Profiles.
Fetch Category or create it if it does not exist. Put Profiles in the
Category but ignore if link exists already. An error is raised
if a Profile does not exist, but previous Profiles in the list still
have been allocated already before the error occurred.
:param categoryName: String. Get a category by name and create it
if it does not exist yet. If Profile records or Profile screen names
are provided, then assign all of those Profiles to the category.
Both Profile inputs can be left as not set to just create the
Category.
:param profileRecs: Default None. List of db Profile records to be
assigned to the category. Cannot be empty if screenNames is also empty.
:param screenNames: Default None. List of Profile screen names to be
assigned to the category. The screen names should exist as Profiles
in the db already (matching on exact case), otherwise an error will
be raised. The screenNames argument cannot be empty if profileRecs
is also empty.
:return tuple of new and existing counts.
- newCnt: Count of new Profile Category links created.
- existingCnt: Count of Profile Category links not created because
they already exist.
"""
newCnt = 0
existingCnt = 0
try:
categoryRec = db.Category.byName(categoryName)
except SQLObjectNotFound:
categoryRec = db.Category(name=categoryName)
print("Created category: {0}".format(categoryName))
if profileRecs or screenNames:
if profileRecs is None:
# Use screen names to populate an empty profileRecs list.
profileRecs = []
for screenName in screenNames:
# Get user using exact case of screen name, otherwise search
# case insensitively using LIKE in SQLite. Assume Twitter
# prevents two users having the same screen name across case,
# though SQLObjectIntegrityError will stil be raised here for
# that edgecase.
try:
profile = db.Profile.byScreenName(screenName)
except SQLObjectNotFound:
profile = db.Profile.select(
LIKE(
db.Profile.q.screenName,
screenName
)
).getOne(None)
if not profile:
raise SQLObjectNotFound(
"Cannot assign Category since Profile screen name"
" is not in db: {0}".format(screenName)
)
profileRecs.append(profile)
for profileRec in profileRecs:
try:
categoryRec.addProfile(profileRec)
newCnt += 1
except DuplicateEntryError:
existingCnt += 1
return newCnt, existingCnt
def assignTweetCampaign(campaignRec, tweetRecs=None, tweetGuids=None):
"""
Assign Campaigns to Tweets using the ORM.
Fetch a Campaign and assign it to Tweets, ignoring existing links
and raising an error on a Campaign which does not exist. For large
batches of inserts, rather use bulkAssignTweetCampaign.
Search query is not considered here and should be set using the
campaign manager utility or the ORM directly.
:param campaignRec: Campaign record to assign to all Tweet
records indicated with tweetRecs or tweetGuids inputs.
Both Tweet inputs can be left as not set to just create the
Campaign. Note that the assignProfileCategory function expects
a Category name because it can be created there, but here the actual
Campaign record is expected because creation must be handled with the
Campaign manager utility instead because of the search query field.
:param tweetRecs: Default None. List of db Tweet records to be
assigned to the campaign. Cannot be empty if tweetGuids is also empty.
:param tweetGuids: Default None. List of Tweet GUIDs to be assigned
to the campaign. The GUIDs should exist as Tweets in the db already,
otherwise an error will be printed and ignored. The tweetGuids
argument cannot be empty if tweetRecs is also empty.
:return newCnt: Count of new Tweet Campaign links created.
:return existingCnt: Count of Tweet Campaign links not created because
they already exist.
"""
newCnt = 0
existingCnt = 0
if not tweetRecs:
# Use GUIDs to populate tweetRecs list.
tweetRecs = []
for guid in tweetGuids:
try:
tweet = db.Tweet.byGuid(guid)
except SQLObjectNotFound:
raise SQLObjectNotFound("Cannot assign Campaign as Tweet"
" GUID is not in db: {0}"
.format(guid))
tweetRecs.append(tweet)
for tweet in tweetRecs:
try:
campaignRec.addTweet(tweet)
newCnt += 1
except DuplicateEntryError:
existingCnt += 1
return newCnt, existingCnt
def bulkAssignProfileCategory(categoryID, profileIDs):
"""
Assign Categories to a batch of Profiles using a single INSERT statement.
This function assumes the Category ID and the Profile IDs are for existing
values in the db. Any existing profile_category links which raise a
duplicate error are allowed to fail silently using INSERT OR IGNORE syntax.
:param categoryID: Category record ID to assign to Profile records.
:param profileIDs: Iterable of Profile ID records which must be a linked to
a Category record.
:return SQL: Multi-line SQL statement which was executed.
"""
insert = Insert(
'profile_category',
template=['category_id', 'profile_id'],
valueList=[(categoryID, profileID) for profileID in profileIDs]
)
SQL = db.conn.sqlrepr(insert)
SQL = SQL.replace("INSERT", "INSERT OR IGNORE")
db.conn.query(SQL)
return SQL
def bulkAssignTweetCampaign(campaignID, tweetIDs):
"""
Assign Campaigns to a batch of Tweets using a single INSERT statement.
This function assumes the Campaign ID and the Tweet IDs are for existing
values in the db. Any existing tweet_campaign links which raise a
duplicate error are allowed to fail silently using INSERT OR IGNORE syntax.
See SQLite INSERT documentation diagram syntax:
http://www.sqlite.org/lang_insert.html
A single INSERT statement is done here, since a mass-insertion using
the ORM is inefficient:
http://www.sqlobject.org/FAQ.html#how-to-do-mass-insertion
The links in tweet_campaign are relatively simple and require validation
at the schema level rather than the ORM level, therefore it is safe to
use a native SQL statement through sqlbuilder. The implementation is
based on an example here:
http://www.sqlobject.org/SQLBuilder.html#insert
:param campaignID: Campaign record ID to assign to Tweet records.
:param tweetIDs: Iterable of Tweet ID records which must be a linked to
a Campaign record.
:return SQL: Multi-line SQL statement which was executed.
"""
insert = Insert(
'tweet_campaign',
template=['campaign_id', 'tweet_id'],
valueList=[(campaignID, tweetID) for tweetID in tweetIDs]
)
SQL = db.conn.sqlrepr(insert)
SQL = SQL.replace("INSERT", "INSERT OR IGNORE")
db.conn.query(SQL)
return SQL
|
import asyncio
import log
from pkg_resources import get_distribution
from sanic import Sanic, response
from app import api, helpers, settings, utils
app = Sanic(name="memegen")
app.config.SERVER_NAME = settings.SERVER_NAME
app.config.API_SCHEMES = settings.API_SCHEMES
app.config.API_VERSION = get_distribution("memegen").version
app.config.API_TITLE = "Memes API"
app.blueprint(api.images.blueprint)
app.blueprint(api.templates.blueprint)
app.blueprint(api.docs.blueprint)
@app.get("/")
@api.docs.exclude
async def index(request):
loop = asyncio.get_event_loop()
urls = await loop.run_in_executor(None, helpers.get_sample_images, request)
refresh = "debug" in request.args and settings.DEBUG
content = utils.html.gallery(urls, refresh=refresh)
return response.html(content)
@app.get("/test")
@api.docs.exclude
async def test(request):
if settings.DEBUG:
loop = asyncio.get_event_loop()
urls = await loop.run_in_executor(None, helpers.get_test_images, request)
content = utils.html.gallery(urls, refresh=True)
return response.html(content)
return response.redirect("/")
if __name__ == "__main__":
log.silence("asyncio", "datafiles", allow_warning=True)
app.run(
host="0.0.0.0",
port=settings.PORT,
workers=settings.WORKERS,
debug=settings.DEBUG,
access_log=False,
)
Hard-code API version since package isn't installed on Heroku
import asyncio
import log
from sanic import Sanic, response
from app import api, helpers, settings, utils
app = Sanic(name="memegen")
app.config.SERVER_NAME = settings.SERVER_NAME
app.config.API_SCHEMES = settings.API_SCHEMES
app.config.API_VERSION = "6.0a1"
app.config.API_TITLE = "Memes API"
app.blueprint(api.images.blueprint)
app.blueprint(api.templates.blueprint)
app.blueprint(api.docs.blueprint)
@app.get("/")
@api.docs.exclude
async def index(request):
loop = asyncio.get_event_loop()
urls = await loop.run_in_executor(None, helpers.get_sample_images, request)
refresh = "debug" in request.args and settings.DEBUG
content = utils.html.gallery(urls, refresh=refresh)
return response.html(content)
@app.get("/test")
@api.docs.exclude
async def test(request):
if settings.DEBUG:
loop = asyncio.get_event_loop()
urls = await loop.run_in_executor(None, helpers.get_test_images, request)
content = utils.html.gallery(urls, refresh=True)
return response.html(content)
return response.redirect("/")
if __name__ == "__main__":
log.silence("asyncio", "datafiles", allow_warning=True)
app.run(
host="0.0.0.0",
port=settings.PORT,
workers=settings.WORKERS,
debug=settings.DEBUG,
access_log=False,
)
|
import os
import sys
import vtkAll as vtk
from ddapp import botpy
import math
import time
import types
import functools
import numpy as np
from ddapp import transformUtils
from ddapp import lcmUtils
from ddapp.timercallback import TimerCallback
from ddapp.asynctaskqueue import AsyncTaskQueue
from ddapp import objectmodel as om
from ddapp import visualization as vis
from ddapp import applogic as app
from ddapp.debugVis import DebugData
from ddapp import ikplanner
from ddapp import ioUtils
from ddapp.simpletimer import SimpleTimer
from ddapp.utime import getUtime
from ddapp import robotstate
from ddapp import robotplanlistener
from ddapp import segmentation
from ddapp import planplayback
import drc as lcmdrc
from PythonQt import QtCore, QtGui
class ValvePlannerDemo(object):
def __init__(self, robotModel, footstepPlanner, manipPlanner, ikPlanner, handDriver, atlasDriver, multisenseDriver, affordanceFitFunction, sensorJointController, planPlaybackFunction, showPoseFunction):
self.robotModel = robotModel
self.footstepPlanner = footstepPlanner
self.manipPlanner = manipPlanner
self.ikPlanner = ikPlanner
self.handDriver = handDriver
self.atlasDriver = atlasDriver
self.multisenseDriver = multisenseDriver
self.affordanceFitFunction = affordanceFitFunction
self.sensorJointController = sensorJointController
self.planPlaybackFunction = planPlaybackFunction
self.showPoseFunction = showPoseFunction
self.graspingHand = 'left'
self.planFromCurrentRobotState = True
self.visOnly = True
self.useFootstepPlanner = False
self.userPromptEnabled = True
self.walkingPlan = None
self.preGraspPlan = None
self.graspPlan = None
self.constraintSet = None
self.pointerTipTransformLocal = None
self.pointerTipPath = []
self.plans = []
self.scribeInAir = False
self.scribeDirection = 1 # 1 = clockwise | -1 = anticlockwise
self.startAngle = -30 # suitable for both types of valve
self.nextScribeAngle = self.startAngle
self.valveRadius = 0.19558 # nominal initial value. 7.7in radius metal valve
### Testing Parameters:
self.valveHeight = 1.2192 # 4ft
def scribeRadius(self):
if self.scribeInAir:
return self.valveRadius - 0.08
else:
return self.valveRadius - 0.08
def addPlan(self, plan):
self.plans.append(plan)
def setScribeAngleToCurrent(self):
'''
Compute the current angle of the robot's pointer relative to the valve
'''
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip angle':
om.removeFromObjectModel(obj)
if (self.graspingHand == 'left'):
tipFrame = self.robotModel.getLinkFrame('left_pointer_tip')
else:
tipFrame = self.robotModel.getLinkFrame('right_pointer_tip')
#vis.updateFrame(tipFrame, 'pointer tip current', visible=True, scale=0.2)
# Get the relative position of the pointer from the valve
valveTransform = transformUtils.copyFrame(self.valveFrame.transform)
#print valveTransform.GetPosition()
tipFrame.Concatenate(valveTransform.GetLinearInverse())
#vis.updateFrame(tipFrame, 'point relative', visible=True, scale=0.1)
# Set the Scribe angle to be the current angle
tPosition = tipFrame.GetPosition()
angle = math.degrees( math.atan2(tPosition[1], tPosition[0]) )
radius = math.sqrt( tPosition[0]*tPosition[0] + tPosition[1]*tPosition[1] )
print 'Current Scribe Angle: ', angle , ' and Radius: ' , radius
self.nextScribeAngle = angle
d = DebugData()
d.addSphere(tPosition, radius=0.01)
tPosition =[tPosition[0], tPosition[1], 0] # interested in the point on the plane too
d.addSphere(tPosition, radius=0.01)
currentTipMesh = d.getPolyData()
self.currentTipPosition = vis.showPolyData(currentTipMesh, 'pointer tip angle', color=[1.0, 0.5, 0.0], cls=vis.AffordanceItem, parent=self.valveAffordance, alpha=0.5)
self.currentTipPosition.actor.SetUserTransform(self.valveFrame.transform)
def computeGroundFrame(self, robotModel):
'''
Given a robol model, returns a vtkTransform at a position between
the feet, on the ground, with z-axis up and x-axis aligned with the
robot pelvis x-axis.
'''
t1 = robotModel.getLinkFrame('l_foot')
t2 = robotModel.getLinkFrame('r_foot')
pelvisT = robotModel.getLinkFrame('pelvis')
xaxis = [1.0, 0.0, 0.0]
pelvisT.TransformVector(xaxis, xaxis)
xaxis = np.array(xaxis)
zaxis = np.array([0.0, 0.0, 1.0])
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
stancePosition = (np.array(t2.GetPosition()) + np.array(t1.GetPosition())) / 2.0
footHeight = 0.0811
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
t.PostMultiply()
t.Translate(stancePosition)
t.Translate([0.0, 0.0, -footHeight])
return t
def computeGraspFrame(self):
assert self.valveAffordance
# reach to center and back - for palm point
position = [0.0, 0.0, -0.1]
rpy = [90, 0, 180]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.valveFrame.transform)
self.graspFrame = vis.updateFrame(t, 'valve grasp frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(self.graspFrame)
self.frameSync.addFrame(self.valveFrame)
def removePointerTipFrames(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip frame desired':
om.removeFromObjectModel(obj)
def removePointerTipPath(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip path':
om.removeFromObjectModel(obj)
def computePointerTipFrame(self, engagedTip):
if engagedTip:
tipDepth = 0.0
else:
tipDepth = -0.1 # - is outside the wheel
assert self.valveAffordance
position = [ self.scribeRadius()*math.cos( math.radians( self.nextScribeAngle )) , self.scribeRadius()*math.sin( math.radians( self.nextScribeAngle )) , tipDepth]
rpy = [90, 0, 180]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
self.pointerTipTransformLocal = transformUtils.copyFrame(t)
t.Concatenate(self.valveFrame.transform)
self.pointerTipFrameDesired = vis.showFrame(t, 'pointer tip frame desired', parent=self.valveAffordance, visible=False, scale=0.2)
def drawPointerTipPath(self):
path = DebugData()
for i in range(1,len(self.pointerTipPath)):
p0 = self.pointerTipPath[i-1].GetPosition()
p1 = self.pointerTipPath[i].GetPosition()
path.addLine ( np.array( p0 ) , np.array( p1 ), radius= 0.005)
pathMesh = path.getPolyData()
self.pointerTipLinePath = vis.showPolyData(pathMesh, 'pointer tip path', color=[0.0, 0.3, 1.0], cls=vis.AffordanceItem, parent=self.valveAffordance, alpha=0.6)
self.pointerTipLinePath.actor.SetUserTransform(self.valveFrame.transform)
def computeStanceFrame(self):
graspFrame = self.graspFrame.transform
groundFrame = self.computeGroundFrame(self.robotModel)
groundHeight = groundFrame.GetPosition()[2]
graspPosition = np.array(graspFrame.GetPosition())
graspYAxis = [0.0, 1.0, 0.0]
graspZAxis = [0.0, 0.0, 1.0]
graspFrame.TransformVector(graspYAxis, graspYAxis)
graspFrame.TransformVector(graspZAxis, graspZAxis)
xaxis = graspYAxis
#xaxis = graspZAxis
zaxis = [0, 0, 1]
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
graspGroundFrame = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
graspGroundFrame.PostMultiply()
graspGroundFrame.Translate(graspPosition[0], graspPosition[1], groundHeight)
if self.scribeInAir:
position = [-0.6, -0.4, 0.0] # stand further away when scribing in air
else:
position = [-0.48, -0.4, 0.0]
rpy = [0, 0, 16]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(graspGroundFrame)
self.graspStanceFrame = vis.updateFrame(t, 'valve grasp stance', parent=self.valveAffordance, visible=False, scale=0.2)
self.frameSync.addFrame(self.graspStanceFrame)
def moveRobotToStanceFrame(self):
frame = self.graspStanceFrame.transform
self.sensorJointController.setPose('q_nom')
stancePosition = frame.GetPosition()
stanceOrientation = frame.GetOrientation()
self.sensorJointController.q[:2] = [stancePosition[0], stancePosition[1]]
self.sensorJointController.q[5] = math.radians(stanceOrientation[2])
self.sensorJointController.push()
def computeFootstepPlan(self):
startPose = self.getPlanningStartPose()
goalFrame = self.graspStanceFrame.transform
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def computeWalkingPlan(self):
startPose = self.getPlanningStartPose()
self.walkingPlan = self.footstepPlanner.sendWalkingPlanRequest(self.footstepPlan, startPose, waitForResponse=True)
self.addPlan(self.walkingPlan)
def computePreGraspPlan(self):
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'General', 'arm up pregrasp', side=self.graspingHand)
self.preGraspPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(self.preGraspPlan)
def computeGraspPlan(self):
startPose = self.getPlanningStartPose()
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, self.graspFrame, lockTorso=True)
endPose, info = constraintSet.runIk()
self.graspPlan = constraintSet.runIkTraj()
self.addPlan(self.graspPlan)
def initGazeConstraintSet(self, goalFrame):
# create constraint set
startPose = self.getPlanningStartPose()
startPoseName = 'gaze_plan_start'
endPoseName = 'gaze_plan_end'
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.addPose(startPose, endPoseName)
self.constraintSet = ikplanner.ConstraintSet(self.ikPlanner, [], startPoseName, endPoseName)
self.constraintSet.endPose = startPose
# add body constraints
bodyConstraints = self.ikPlanner.createMovingBodyConstraints(startPoseName, lockBase=True, lockBack=False, lockLeftArm=self.graspingHand=='right', lockRightArm=self.graspingHand=='left')
self.constraintSet.constraints.extend(bodyConstraints)
# add gaze constraint
self.graspToHandLinkFrame = self.ikPlanner.newGraspToHandFrame(self.graspingHand)
gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
self.constraintSet.constraints.insert(0, gazeConstraint)
def appendDistanceConstraint(self):
# add point to point distance constraint
c = ikplanner.ik.PointToPointDistanceConstraint()
c.bodyNameA = self.ikPlanner.getHandLink(self.graspingHand)
c.bodyNameB = 'world'
c.pointInBodyA = self.graspToHandLinkFrame
c.pointInBodyB = self.valveFrame.transform
c.lowerBound = [self.scribeRadius()]
c.upperBound = [self.scribeRadius()]
self.constraintSet.constraints.insert(0, c)
def appendGazeConstraintForTargetFrame(self, goalFrame, t):
gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
gazeConstraint.tspan = [t, t]
self.constraintSet.constraints.append(gazeConstraint)
def appendPositionConstraintForTargetFrame(self, goalFrame, t):
positionConstraint, _ = self.ikPlanner.createPositionOrientationGraspConstraints(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
positionConstraint.tspan = [t, t]
self.constraintSet.constraints.append(positionConstraint)
def planGazeTrajectory(self):
self.ikPlanner.ikServer.usePointwise = False
plan = self.constraintSet.runIkTraj()
self.addPlan(plan)
def commitFootstepPlan(self):
self.footstepPlanner.commitFootstepPlan(self.footstepPlan)
def commitManipPlan(self):
self.manipPlanner.commitManipPlan(self.plans[-1])
def sendNeckPitchLookDown(self):
self.multisenseDriver.setNeckPitch(40)
def sendNeckPitchLookForward(self):
self.multisenseDriver.setNeckPitch(15)
def waitForAtlasBehaviorAsync(self, behaviorName):
assert behaviorName in self.atlasDriver.getBehaviorMap().values()
while self.atlasDriver.getCurrentBehaviorName() != behaviorName:
yield
def printAsync(self, s):
yield
print s
def userPrompt(self, message):
if not self.userPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def delay(self, delayTimeInSeconds):
yield
t = SimpleTimer()
while t.elapsed() < delayTimeInSeconds:
yield
def waitForCleanLidarSweepAsync(self):
currentRevolution = self.multisenseDriver.displayedRevolution
desiredRevolution = currentRevolution + 2
while self.multisenseDriver.displayedRevolution < desiredRevolution:
yield
def spawnValveFrame(self, robotModel, height):
position = [0.7, 0.22, height]
rpy = [180, -90, -16]
if (self.graspingHand == 'right'):
position[1] = -position[1]
rpy[2] = -rpy[2]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.computeGroundFrame(robotModel))
return t
def spawnValveAffordance(self):
spawn_height = 1.2192 # 4ft
radius = 0.19558 # nominal initial value. 7.7in radius metal valve
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
z.addLine ( np.array([0, 0, -0.0254]) , np.array([0, 0, 0.0254]), radius= self.valveRadius)
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=vis.AffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
# TODO: do I need to add a param dict?
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def spawnValveLeverAffordance(self):
spawn_height = 1.06 # 3.5ft
pipe_radius = 0.01
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
pipe_radius = 0.01
z = DebugData()
z.addLine([0,0,0], [ self.valveRadius , 0, 0], radius=pipe_radius)
valveMesh = z.getPolyData()
#z = DebugData()
#z.addLine ( np.array([0, 0, -0.0254]) , np.array([0, 0, 0.0254]), radius= self.valveRadius)
#valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=vis.AffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
# TODO: do I need to add a param dict?
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def findValveAffordance(self):
self.valveAffordance = om.findObjectByName('valve')
self.valveFrame = om.findObjectByName('valve frame')
#self.valveRadius = self.valveAffordance.params.get('radius')
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def findValveLeverAffordance(self):
self.valveAffordance = om.findObjectByName('valve lever')
self.valveFrame = om.findObjectByName('lever frame')
# length of lever is equivalent to radius of valve
self.valveRadius = self.valveAffordance.params.get('length')
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def getEstimatedRobotStatePose(self):
return np.array(self.sensorJointController.getPose('EST_ROBOT_STATE'))
def getPlanningStartPose(self):
if self.planFromCurrentRobotState:
return self.getEstimatedRobotStatePose()
else:
if self.plans:
return robotstate.convertStateMessageToDrakePose(self.plans[-1].plan[-1])
else:
return self.getEstimatedRobotStatePose()
def removeFootstepPlan(self):
om.removeFromObjectModel(om.findObjectByName('footstep plan'))
self.footstepPlan = None
def playNominalPlan(self):
assert None not in self.plans
self.planPlaybackFunction(self.plans)
def computePreGraspPlanGaze(self):
self.computePointerTipFrame(0)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, 1)
self.planGazeTrajectory()
def computeInsertPlan(self):
self.computePointerTipFrame(1)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, 1)
self.planGazeTrajectory()
def computeTurnPlan(self, turnDegrees=360, numberOfSamples=12):
self.pointerTipPath = []
self.removePointerTipFrames()
self.removePointerTipPath()
degreeStep = float(turnDegrees) / numberOfSamples
tipMode = 0 if self.scribeInAir else 1
self.computePointerTipFrame(tipMode)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
#self.appendDistanceConstraint()
self.pointerTipPath.append(self.pointerTipTransformLocal)
for i in xrange(numberOfSamples):
self.nextScribeAngle += self.scribeDirection*degreeStep
self.computePointerTipFrame(tipMode)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, i+1)
self.pointerTipPath.append(self.pointerTipTransformLocal)
gazeConstraint = self.constraintSet.constraints[0]
assert isinstance(gazeConstraint, ikplanner.ik.WorldGazeDirConstraint)
gazeConstraint.tspan = [1.0, numberOfSamples]
self.drawPointerTipPath()
self.planGazeTrajectory()
def computeStandPlan(self):
startPose = self.getPlanningStartPose()
self.standPlan = self.ikPlanner.computeNominalPlan(startPose)
self.addPlan(self.standPlan)
def computeNominalPlan(self):
self.plans = []
self.removeFootstepPlan()
self.removePointerTipFrames()
self.removePointerTipPath()
# self.findValveAffordance()
# self.computeGraspFrame()
# self.computeStanceFrame()
# if self.useFootstepPlanner:
# self.computeFootstepPlan()
# self.computeWalkingPlan()
# else:
# self.moveRobotToStanceFrame()
self.computePreGraspPlan()
self.computePreGraspPlanGaze()
if not self.scribeInAir:
self.computeInsertPlan()
self.computeTurnPlan()
self.computePreGraspPlanGaze()
self.computePreGraspPlan()
self.computeStandPlan()
self.playNominalPlan()
def waitForPlanExecution(self, plan):
planElapsedTime = planplayback.PlanPlayback.getPlanElapsedTime(plan)
print 'waiting for plan execution:', planElapsedTime
return self.delay(planElapsedTime + 1.0)
def animateLastPlan(self):
plan = self.plans[-1]
if not self.visOnly:
self.commitManipPlan()
return self.waitForPlanExecution(plan)
def addWalkingTasksToQueue(self, taskQueue, planFunc, walkFunc):
if self.useFootstepPlanner:
taskQueue.addTask(planFunc)
if self.visOnly:
taskQueue.addTask(self.computeWalkingPlan)
taskQueue.addTask(self.animateLastPlan)
else:
taskQueue.addTask(self.userPrompt('send stand command. continue? y/n: '))
taskQueue.addTask(self.atlasDriver.sendStandCommand)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('stand'))
taskQueue.addTask(self.userPrompt('commit footsteps. continue? y/n: '))
taskQueue.addTask(self.commitFootstepPlan)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('step'))
taskQueue.addTask(self.waitForAtlasBehaviorAsync('stand'))
taskQueue.addTask(self.removeFootstepPlan)
else:
taskQueue.addTask(walkFunc)
def autonomousExecute(self):
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.printAsync('computing grasp and stance frames'))
taskQueue.addTask(self.removePointerTipFrames)
taskQueue.addTask(self.removePointerTipPath)
taskQueue.addTask(self.findValveAffordance)
taskQueue.addTask(self.computeGraspFrame)
taskQueue.addTask(self.computeStanceFrame)
self.addWalkingTasksToQueue(taskQueue, self.computeFootstepPlan, self.moveRobotToStanceFrame)
self.addWalkingTasksToQueue(taskQueue, self.computeFootstepPlan, self.moveRobotToStanceFrame)
taskQueue.addTask(self.atlasDriver.sendManipCommand)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('manip'))
planningFunctions = [
self.computePreGraspPlan,
self.computePreGraspPlanGaze,
self.computeInsertPlan,
self.computeTurnPlan,
self.computePreGraspPlanGaze,
self.computePreGraspPlan,
self.computeStandPlan,
]
for planFunc in planningFunctions:
taskQueue.addTask(planFunc)
taskQueue.addTask(self.userPrompt('continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('done!'))
return taskQueue
removing valveRadius and explictly setting scribeRadius
import os
import sys
import vtkAll as vtk
from ddapp import botpy
import math
import time
import types
import functools
import numpy as np
from ddapp import transformUtils
from ddapp import lcmUtils
from ddapp.timercallback import TimerCallback
from ddapp.asynctaskqueue import AsyncTaskQueue
from ddapp import objectmodel as om
from ddapp import visualization as vis
from ddapp import applogic as app
from ddapp.debugVis import DebugData
from ddapp import ikplanner
from ddapp import ioUtils
from ddapp.simpletimer import SimpleTimer
from ddapp.utime import getUtime
from ddapp import robotstate
from ddapp import robotplanlistener
from ddapp import segmentation
from ddapp import planplayback
import drc as lcmdrc
from PythonQt import QtCore, QtGui
class ValvePlannerDemo(object):
def __init__(self, robotModel, footstepPlanner, manipPlanner, ikPlanner, handDriver, atlasDriver, multisenseDriver, affordanceFitFunction, sensorJointController, planPlaybackFunction, showPoseFunction):
self.robotModel = robotModel
self.footstepPlanner = footstepPlanner
self.manipPlanner = manipPlanner
self.ikPlanner = ikPlanner
self.handDriver = handDriver
self.atlasDriver = atlasDriver
self.multisenseDriver = multisenseDriver
self.affordanceFitFunction = affordanceFitFunction
self.sensorJointController = sensorJointController
self.planPlaybackFunction = planPlaybackFunction
self.showPoseFunction = showPoseFunction
self.graspingHand = 'left'
self.planFromCurrentRobotState = True
self.visOnly = True
self.useFootstepPlanner = False
self.userPromptEnabled = True
self.walkingPlan = None
self.preGraspPlan = None
self.graspPlan = None
self.constraintSet = None
self.pointerTipTransformLocal = None
self.pointerTipPath = []
self.plans = []
self.scribeInAir = False
self.scribeDirection = 1 # 1 = clockwise | -1 = anticlockwise
self.startAngle = -30 # suitable for both types of valve
self.nextScribeAngle = self.startAngle
self.scribeRadius = None
def addPlan(self, plan):
self.plans.append(plan)
def setScribeAngleToCurrent(self):
'''
Compute the current angle of the robot's pointer relative to the valve
'''
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip angle':
om.removeFromObjectModel(obj)
if (self.graspingHand == 'left'):
tipFrame = self.robotModel.getLinkFrame('left_pointer_tip')
else:
tipFrame = self.robotModel.getLinkFrame('right_pointer_tip')
#vis.updateFrame(tipFrame, 'pointer tip current', visible=True, scale=0.2)
# Get the relative position of the pointer from the valve
valveTransform = transformUtils.copyFrame(self.valveFrame.transform)
#print valveTransform.GetPosition()
tipFrame.Concatenate(valveTransform.GetLinearInverse())
#vis.updateFrame(tipFrame, 'point relative', visible=True, scale=0.1)
# Set the Scribe angle to be the current angle
tPosition = tipFrame.GetPosition()
angle = math.degrees( math.atan2(tPosition[1], tPosition[0]) )
radius = math.sqrt( tPosition[0]*tPosition[0] + tPosition[1]*tPosition[1] )
print 'Current Scribe Angle: ', angle , ' and Radius: ' , radius
self.nextScribeAngle = angle
d = DebugData()
d.addSphere(tPosition, radius=0.01)
tPosition =[tPosition[0], tPosition[1], 0] # interested in the point on the plane too
d.addSphere(tPosition, radius=0.01)
currentTipMesh = d.getPolyData()
self.currentTipPosition = vis.showPolyData(currentTipMesh, 'pointer tip angle', color=[1.0, 0.5, 0.0], cls=vis.AffordanceItem, parent=self.valveAffordance, alpha=0.5)
self.currentTipPosition.actor.SetUserTransform(self.valveFrame.transform)
def computeGroundFrame(self, robotModel):
'''
Given a robol model, returns a vtkTransform at a position between
the feet, on the ground, with z-axis up and x-axis aligned with the
robot pelvis x-axis.
'''
t1 = robotModel.getLinkFrame('l_foot')
t2 = robotModel.getLinkFrame('r_foot')
pelvisT = robotModel.getLinkFrame('pelvis')
xaxis = [1.0, 0.0, 0.0]
pelvisT.TransformVector(xaxis, xaxis)
xaxis = np.array(xaxis)
zaxis = np.array([0.0, 0.0, 1.0])
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
stancePosition = (np.array(t2.GetPosition()) + np.array(t1.GetPosition())) / 2.0
footHeight = 0.0811
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
t.PostMultiply()
t.Translate(stancePosition)
t.Translate([0.0, 0.0, -footHeight])
return t
def computeGraspFrame(self):
assert self.valveAffordance
# reach to center and back - for palm point
position = [0.0, 0.0, -0.1]
rpy = [90, 0, 180]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.valveFrame.transform)
self.graspFrame = vis.updateFrame(t, 'valve grasp frame', parent=self.valveAffordance, visible=False, scale=0.2)
self.frameSync = vis.FrameSync()
self.frameSync.addFrame(self.graspFrame)
self.frameSync.addFrame(self.valveFrame)
def removePointerTipFrames(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip frame desired':
om.removeFromObjectModel(obj)
def removePointerTipPath(self):
for obj in om.getObjects():
if obj.getProperty('Name') == 'pointer tip path':
om.removeFromObjectModel(obj)
def computePointerTipFrame(self, engagedTip):
if engagedTip:
tipDepth = 0.0
else:
tipDepth = -0.1 # - is outside the wheel
assert self.valveAffordance
position = [ self.scribeRadius*math.cos( math.radians( self.nextScribeAngle )) , self.scribeRadius*math.sin( math.radians( self.nextScribeAngle )) , tipDepth]
rpy = [90, 0, 180]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
self.pointerTipTransformLocal = transformUtils.copyFrame(t)
t.Concatenate(self.valveFrame.transform)
self.pointerTipFrameDesired = vis.showFrame(t, 'pointer tip frame desired', parent=self.valveAffordance, visible=False, scale=0.2)
def drawPointerTipPath(self):
path = DebugData()
for i in range(1,len(self.pointerTipPath)):
p0 = self.pointerTipPath[i-1].GetPosition()
p1 = self.pointerTipPath[i].GetPosition()
path.addLine ( np.array( p0 ) , np.array( p1 ), radius= 0.005)
pathMesh = path.getPolyData()
self.pointerTipLinePath = vis.showPolyData(pathMesh, 'pointer tip path', color=[0.0, 0.3, 1.0], cls=vis.AffordanceItem, parent=self.valveAffordance, alpha=0.6)
self.pointerTipLinePath.actor.SetUserTransform(self.valveFrame.transform)
def computeStanceFrame(self):
graspFrame = self.graspFrame.transform
groundFrame = self.computeGroundFrame(self.robotModel)
groundHeight = groundFrame.GetPosition()[2]
graspPosition = np.array(graspFrame.GetPosition())
graspYAxis = [0.0, 1.0, 0.0]
graspZAxis = [0.0, 0.0, 1.0]
graspFrame.TransformVector(graspYAxis, graspYAxis)
graspFrame.TransformVector(graspZAxis, graspZAxis)
xaxis = graspYAxis
#xaxis = graspZAxis
zaxis = [0, 0, 1]
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
graspGroundFrame = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
graspGroundFrame.PostMultiply()
graspGroundFrame.Translate(graspPosition[0], graspPosition[1], groundHeight)
if self.scribeInAir:
position = [-0.6, -0.4, 0.0] # stand further away when scribing in air
else:
position = [-0.48, -0.4, 0.0]
rpy = [0, 0, 16]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(graspGroundFrame)
self.graspStanceFrame = vis.updateFrame(t, 'valve grasp stance', parent=self.valveAffordance, visible=False, scale=0.2)
self.frameSync.addFrame(self.graspStanceFrame)
def moveRobotToStanceFrame(self):
frame = self.graspStanceFrame.transform
self.sensorJointController.setPose('q_nom')
stancePosition = frame.GetPosition()
stanceOrientation = frame.GetOrientation()
self.sensorJointController.q[:2] = [stancePosition[0], stancePosition[1]]
self.sensorJointController.q[5] = math.radians(stanceOrientation[2])
self.sensorJointController.push()
def computeFootstepPlan(self):
startPose = self.getPlanningStartPose()
goalFrame = self.graspStanceFrame.transform
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def computeWalkingPlan(self):
startPose = self.getPlanningStartPose()
self.walkingPlan = self.footstepPlanner.sendWalkingPlanRequest(self.footstepPlan, startPose, waitForResponse=True)
self.addPlan(self.walkingPlan)
def computePreGraspPlan(self):
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'General', 'arm up pregrasp', side=self.graspingHand)
self.preGraspPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(self.preGraspPlan)
def computeGraspPlan(self):
startPose = self.getPlanningStartPose()
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, self.graspFrame, lockTorso=True)
endPose, info = constraintSet.runIk()
self.graspPlan = constraintSet.runIkTraj()
self.addPlan(self.graspPlan)
def initGazeConstraintSet(self, goalFrame):
# create constraint set
startPose = self.getPlanningStartPose()
startPoseName = 'gaze_plan_start'
endPoseName = 'gaze_plan_end'
self.ikPlanner.addPose(startPose, startPoseName)
self.ikPlanner.addPose(startPose, endPoseName)
self.constraintSet = ikplanner.ConstraintSet(self.ikPlanner, [], startPoseName, endPoseName)
self.constraintSet.endPose = startPose
# add body constraints
bodyConstraints = self.ikPlanner.createMovingBodyConstraints(startPoseName, lockBase=True, lockBack=False, lockLeftArm=self.graspingHand=='right', lockRightArm=self.graspingHand=='left')
self.constraintSet.constraints.extend(bodyConstraints)
# add gaze constraint
self.graspToHandLinkFrame = self.ikPlanner.newGraspToHandFrame(self.graspingHand)
gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
self.constraintSet.constraints.insert(0, gazeConstraint)
def appendDistanceConstraint(self):
# add point to point distance constraint
c = ikplanner.ik.PointToPointDistanceConstraint()
c.bodyNameA = self.ikPlanner.getHandLink(self.graspingHand)
c.bodyNameB = 'world'
c.pointInBodyA = self.graspToHandLinkFrame
c.pointInBodyB = self.valveFrame.transform
c.lowerBound = [self.scribeRadius]
c.upperBound = [self.scribeRadius]
self.constraintSet.constraints.insert(0, c)
def appendGazeConstraintForTargetFrame(self, goalFrame, t):
gazeConstraint = self.ikPlanner.createGazeGraspConstraint(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
gazeConstraint.tspan = [t, t]
self.constraintSet.constraints.append(gazeConstraint)
def appendPositionConstraintForTargetFrame(self, goalFrame, t):
positionConstraint, _ = self.ikPlanner.createPositionOrientationGraspConstraints(self.graspingHand, goalFrame, self.graspToHandLinkFrame)
positionConstraint.tspan = [t, t]
self.constraintSet.constraints.append(positionConstraint)
def planGazeTrajectory(self):
self.ikPlanner.ikServer.usePointwise = False
plan = self.constraintSet.runIkTraj()
self.addPlan(plan)
def commitFootstepPlan(self):
self.footstepPlanner.commitFootstepPlan(self.footstepPlan)
def commitManipPlan(self):
self.manipPlanner.commitManipPlan(self.plans[-1])
def sendNeckPitchLookDown(self):
self.multisenseDriver.setNeckPitch(40)
def sendNeckPitchLookForward(self):
self.multisenseDriver.setNeckPitch(15)
def waitForAtlasBehaviorAsync(self, behaviorName):
assert behaviorName in self.atlasDriver.getBehaviorMap().values()
while self.atlasDriver.getCurrentBehaviorName() != behaviorName:
yield
def printAsync(self, s):
yield
print s
def userPrompt(self, message):
if not self.userPromptEnabled:
return
yield
result = raw_input(message)
if result != 'y':
raise Exception('user abort.')
def delay(self, delayTimeInSeconds):
yield
t = SimpleTimer()
while t.elapsed() < delayTimeInSeconds:
yield
def waitForCleanLidarSweepAsync(self):
currentRevolution = self.multisenseDriver.displayedRevolution
desiredRevolution = currentRevolution + 2
while self.multisenseDriver.displayedRevolution < desiredRevolution:
yield
def spawnValveFrame(self, robotModel, height):
position = [0.7, 0.22, height]
rpy = [180, -90, -16]
if (self.graspingHand == 'right'):
position[1] = -position[1]
rpy[2] = -rpy[2]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(self.computeGroundFrame(robotModel))
return t
def spawnValveAffordance(self):
spawn_height = 1.2192 # 4ft
radius = 0.19558 # nominal initial value. 7.7in radius metal valve
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
z = DebugData()
z.addLine ( np.array([0, 0, -0.0254]) , np.array([0, 0, 0.0254]), radius= self.valveRadius)
valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=vis.AffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
# TODO: do I need to add a param dict?
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def spawnValveLeverAffordance(self):
spawn_height = 1.06 # 3.5ft
pipe_radius = 0.01
valveFrame = self.spawnValveFrame(self.robotModel, spawn_height)
folder = om.getOrCreateContainer('affordances')
pipe_radius = 0.01
z = DebugData()
z.addLine([0,0,0], [ self.valveRadius , 0, 0], radius=pipe_radius)
valveMesh = z.getPolyData()
#z = DebugData()
#z.addLine ( np.array([0, 0, -0.0254]) , np.array([0, 0, 0.0254]), radius= self.valveRadius)
#valveMesh = z.getPolyData()
self.valveAffordance = vis.showPolyData(valveMesh, 'valve', color=[0.0, 1.0, 0.0], cls=vis.AffordanceItem, parent=folder, alpha=0.3)
self.valveAffordance.actor.SetUserTransform(valveFrame)
self.valveFrame = vis.showFrame(valveFrame, 'valve frame', parent=self.valveAffordance, visible=False, scale=0.2)
# TODO: do I need to add a param dict?
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def findValveAffordance(self):
self.valveAffordance = om.findObjectByName('valve')
self.valveFrame = om.findObjectByName('valve frame')
self.scribeRadius = self.valveAffordance.params.get('radius') - 0.08
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def findValveLeverAffordance(self):
self.valveAffordance = om.findObjectByName('valve lever')
self.valveFrame = om.findObjectByName('lever frame')
# length of lever is equivalent to radius of valve
self.scribeRadius = self.valveAffordance.params.get('length') - 0.08
self.computeGraspFrame()
self.computeStanceFrame()
self.computePointerTipFrame(0)
def getEstimatedRobotStatePose(self):
return np.array(self.sensorJointController.getPose('EST_ROBOT_STATE'))
def getPlanningStartPose(self):
if self.planFromCurrentRobotState:
return self.getEstimatedRobotStatePose()
else:
if self.plans:
return robotstate.convertStateMessageToDrakePose(self.plans[-1].plan[-1])
else:
return self.getEstimatedRobotStatePose()
def removeFootstepPlan(self):
om.removeFromObjectModel(om.findObjectByName('footstep plan'))
self.footstepPlan = None
def playNominalPlan(self):
assert None not in self.plans
self.planPlaybackFunction(self.plans)
def computePreGraspPlanGaze(self):
self.computePointerTipFrame(0)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, 1)
self.planGazeTrajectory()
def computeInsertPlan(self):
self.computePointerTipFrame(1)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, 1)
self.planGazeTrajectory()
def computeTurnPlan(self, turnDegrees=360, numberOfSamples=12):
self.pointerTipPath = []
self.removePointerTipFrames()
self.removePointerTipPath()
degreeStep = float(turnDegrees) / numberOfSamples
tipMode = 0 if self.scribeInAir else 1
self.computePointerTipFrame(tipMode)
self.initGazeConstraintSet(self.pointerTipFrameDesired)
#self.appendDistanceConstraint()
self.pointerTipPath.append(self.pointerTipTransformLocal)
for i in xrange(numberOfSamples):
self.nextScribeAngle += self.scribeDirection*degreeStep
self.computePointerTipFrame(tipMode)
self.appendPositionConstraintForTargetFrame(self.pointerTipFrameDesired, i+1)
self.pointerTipPath.append(self.pointerTipTransformLocal)
gazeConstraint = self.constraintSet.constraints[0]
assert isinstance(gazeConstraint, ikplanner.ik.WorldGazeDirConstraint)
gazeConstraint.tspan = [1.0, numberOfSamples]
self.drawPointerTipPath()
self.planGazeTrajectory()
def computeStandPlan(self):
startPose = self.getPlanningStartPose()
self.standPlan = self.ikPlanner.computeNominalPlan(startPose)
self.addPlan(self.standPlan)
def computeNominalPlan(self):
self.plans = []
self.removeFootstepPlan()
self.removePointerTipFrames()
self.removePointerTipPath()
# self.findValveAffordance()
# self.computeGraspFrame()
# self.computeStanceFrame()
# if self.useFootstepPlanner:
# self.computeFootstepPlan()
# self.computeWalkingPlan()
# else:
# self.moveRobotToStanceFrame()
self.computePreGraspPlan()
self.computePreGraspPlanGaze()
if not self.scribeInAir:
self.computeInsertPlan()
self.computeTurnPlan()
self.computePreGraspPlanGaze()
self.computePreGraspPlan()
self.computeStandPlan()
self.playNominalPlan()
def waitForPlanExecution(self, plan):
planElapsedTime = planplayback.PlanPlayback.getPlanElapsedTime(plan)
print 'waiting for plan execution:', planElapsedTime
return self.delay(planElapsedTime + 1.0)
def animateLastPlan(self):
plan = self.plans[-1]
if not self.visOnly:
self.commitManipPlan()
return self.waitForPlanExecution(plan)
def addWalkingTasksToQueue(self, taskQueue, planFunc, walkFunc):
if self.useFootstepPlanner:
taskQueue.addTask(planFunc)
if self.visOnly:
taskQueue.addTask(self.computeWalkingPlan)
taskQueue.addTask(self.animateLastPlan)
else:
taskQueue.addTask(self.userPrompt('send stand command. continue? y/n: '))
taskQueue.addTask(self.atlasDriver.sendStandCommand)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('stand'))
taskQueue.addTask(self.userPrompt('commit footsteps. continue? y/n: '))
taskQueue.addTask(self.commitFootstepPlan)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('step'))
taskQueue.addTask(self.waitForAtlasBehaviorAsync('stand'))
taskQueue.addTask(self.removeFootstepPlan)
else:
taskQueue.addTask(walkFunc)
def autonomousExecute(self):
taskQueue = AsyncTaskQueue()
taskQueue.addTask(self.printAsync('computing grasp and stance frames'))
taskQueue.addTask(self.removePointerTipFrames)
taskQueue.addTask(self.removePointerTipPath)
taskQueue.addTask(self.findValveAffordance)
taskQueue.addTask(self.computeGraspFrame)
taskQueue.addTask(self.computeStanceFrame)
self.addWalkingTasksToQueue(taskQueue, self.computeFootstepPlan, self.moveRobotToStanceFrame)
self.addWalkingTasksToQueue(taskQueue, self.computeFootstepPlan, self.moveRobotToStanceFrame)
taskQueue.addTask(self.atlasDriver.sendManipCommand)
taskQueue.addTask(self.waitForAtlasBehaviorAsync('manip'))
planningFunctions = [
self.computePreGraspPlan,
self.computePreGraspPlanGaze,
self.computeInsertPlan,
self.computeTurnPlan,
self.computePreGraspPlanGaze,
self.computePreGraspPlan,
self.computeStandPlan,
]
for planFunc in planningFunctions:
taskQueue.addTask(planFunc)
taskQueue.addTask(self.userPrompt('continue? y/n: '))
taskQueue.addTask(self.animateLastPlan)
taskQueue.addTask(self.printAsync('done!'))
return taskQueue
|
from flask import render_template, Flask, request, redirect, flash
from app import app
from .forms import RequestForm
import requests
from twilio.rest import TwilioRestClient
import twilio.twiml
import urllib
import math
import sys
import urllib2
'''
@author Arjun Jain
@author Chris Bernt
@author Greg Lyons
@author Wesley Valentine
'''
account_sid = "ACed174aa4db08574d608df749cd16e3fd"
auth_token = "d96a5e6b2722cac3116e0298c965efd0"
client = TwilioRestClient(account_sid, auth_token)
BASE_URL = "https://emergencytexttovoice.herokuapp.com/"
def makeCall(inputText):
extractedAddress = extractAddress(inputText)
if extractedAddress != "No address":
location = findClosestPSAP(extractedAddress)
else:
return "No valid address found"
modifiedText = urllib.quote("P S A P Location is. " + location + "." + "Your Message is. " + inputText)
urlToMake = BASE_URL + "call/" + modifiedText
client.calls.create(url = urlToMake , to="+17572823575", from_ = "+12039874014")
@app.route('/sms', methods=['GET', 'POST'])
def default():
inputText = request.values.get('Body',None)
if makeCall(inputText) == "No valid address found":
resp = twilio.twiml.Response()
resp.message("No address found try again")
return str(resp)
return ""
@app.route('/submitted', methods=['GET', 'POST'])
def submitted():
return render_template('submitted.html')
@app.route('/', methods=['GET', 'POST'])
def form():
form = RequestForm()
if form.validate_on_submit():
inputText = form.inputText.data
if makeCall(inputText) == "No valid address found":
flash("No address found")
return redirect("/")
return redirect("/submitted")
return render_template('request.html',
title= 'Request',
form= form)
@app.route('/call/<message>', methods=['GET', 'POST'])
def createTwiML(message):
resp = twilio.twiml.Response()
resp.say(message)
fixedMessage = urllib.quote(message)
resp.gather(numDigits=1, action=BASE_URL + "call/" + fixedMessage, method="POST").say("To repeat this message press any key")
'''
with resp.gather(numDigits=1, action=BASE_URL + "call/" + message, method="POST") as g:
g.say("To repeat this message press any key")
'''
return str(resp)
def distance(lat1, lng1, lat2, lng2):
"""
Calculates distance in miles between two lat, long pairs
Thanks to Philipp Jahoda of StackOverflow.com.
"""
earthRadius = 3958.75
dLat = math.radians(lat2 - lat1)
dLng = math.radians(lng2 - lng1)
sinDLat = math.sin(dLat / 2)
sinDLng = math.sin(dLng / 2)
a = (sinDLat ** 2) + (sinDLng ** 2) * math.cos(math.radians(lat1)) * math.cos(math.radians(lat2))
c = 2 * math.atan2(a ** .5, (1-a) ** .5)
dist = earthRadius * c
return dist
def geocode(loc):
url = "https://maps.googleapis.com/maps/api/geocode/json?parameters"
query_params = { 'address' : loc,
'sensor': 'false'
}
response = requests.get(url, params = query_params)
geoData = response.json()
#print geoData
lat = geoData['results'][0]['geometry']['location']['lat']
lng = geoData['results'][0]['geometry']['location']['lng']
latLong = str(lat) + "," + str(lng)
#print loc + ": " + latLong
return latLong
def extractAddress(message):
url = "https://extract-beta.api.smartystreets.com/"
query_params = {
"auth-id" : "6e1411df-8d1e-4928-93c2-a690e3176b84",
"auth-token" : "TKhLiK6xt76gfTGAQRi3",
"input": message
}
response = requests.get(url, params = query_params)
addressData = response.json()
try:
address = addressData["addresses"][0]["text"]
except:
address = "No address"
return address
def findClosestPSAP(location):
try:
latLong = geocode(location).split(",")
except:
return "Bad location."
myLat = float(latLong[0].strip())
myLong = float(latLong[1].strip())
f = open("PSAPData.txt")
lines = f.readlines()
bestDist = sys.maxint
bestPSAP = ""
PSAPAddress = ""
for line in lines:
lines = line.split(",")
curDist = distance(myLat, myLong, float(lines[6]), float(lines[7]))
if curDist < bestDist:
bestDist = curDist
bestPSAP = lines[0]
PSAPAddress = lines[1] + ", " + lines[4] + ", " + lines[2]
return bestPSAP + "; " + PSAPAddress
def determineToCall(PSAPNumber):
f = open("PhoneData copy.txt")
lines = f.readlines()
for line in lines:
l = line.split(",")
if l[0].strip("\n") == PSAPNumber:
return l[1].strip()
return "+19732162024"
new phone numbers
from flask import render_template, Flask, request, redirect, flash
from app import app
from .forms import RequestForm
import requests
from twilio.rest import TwilioRestClient
import twilio.twiml
import urllib
import math
import sys
import urllib2
'''
@author Arjun Jain
@author Chris Bernt
@author Greg Lyons
@author Wesley Valentine
'''
account_sid = "ACed174aa4db08574d608df749cd16e3fd"
auth_token = "d96a5e6b2722cac3116e0298c965efd0"
client = TwilioRestClient(account_sid, auth_token)
BASE_URL = "https://emergencytexttovoice.herokuapp.com/"
def makeCall(inputText):
extractedAddress = extractAddress(inputText)
if extractedAddress != "No address":
location = findClosestPSAP(extractedAddress)
else:
return "No valid address found"
modifiedText = urllib.quote("P S A P Location is. " + location + "." + "Your Message is. " + inputText)
urlToMake = BASE_URL + "call/" + modifiedText
number = determineToCall(location.split()[0]).strip()
client.calls.create(url = urlToMake , to=number, from_ = "+12039874014")
@app.route('/sms', methods=['GET', 'POST'])
def default():
inputText = request.values.get('Body',None)
if makeCall(inputText) == "No valid address found":
resp = twilio.twiml.Response()
resp.message("No address found try again")
return str(resp)
return ""
@app.route('/submitted', methods=['GET', 'POST'])
def submitted():
return render_template('submitted.html')
@app.route('/', methods=['GET', 'POST'])
def form():
form = RequestForm()
if form.validate_on_submit():
inputText = form.inputText.data
if makeCall(inputText) == "No valid address found":
flash("No address found")
return redirect("/")
return redirect("/submitted")
return render_template('request.html',
title= 'Request',
form= form)
@app.route('/call/<message>', methods=['GET', 'POST'])
def createTwiML(message):
resp = twilio.twiml.Response()
resp.say(message)
fixedMessage = urllib.quote(message)
resp.gather(numDigits=1, action=BASE_URL + "call/" + fixedMessage, method="POST").say("To repeat this message press any key")
'''
with resp.gather(numDigits=1, action=BASE_URL + "call/" + message, method="POST") as g:
g.say("To repeat this message press any key")
'''
return str(resp)
def distance(lat1, lng1, lat2, lng2):
"""
Calculates distance in miles between two lat, long pairs
Thanks to Philipp Jahoda of StackOverflow.com.
"""
earthRadius = 3958.75
dLat = math.radians(lat2 - lat1)
dLng = math.radians(lng2 - lng1)
sinDLat = math.sin(dLat / 2)
sinDLng = math.sin(dLng / 2)
a = (sinDLat ** 2) + (sinDLng ** 2) * math.cos(math.radians(lat1)) * math.cos(math.radians(lat2))
c = 2 * math.atan2(a ** .5, (1-a) ** .5)
dist = earthRadius * c
return dist
def geocode(loc):
url = "https://maps.googleapis.com/maps/api/geocode/json?parameters"
query_params = { 'address' : loc,
'sensor': 'false'
}
response = requests.get(url, params = query_params)
geoData = response.json()
#print geoData
lat = geoData['results'][0]['geometry']['location']['lat']
lng = geoData['results'][0]['geometry']['location']['lng']
latLong = str(lat) + "," + str(lng)
#print loc + ": " + latLong
return latLong
def extractAddress(message):
url = "https://extract-beta.api.smartystreets.com/"
query_params = {
"auth-id" : "6e1411df-8d1e-4928-93c2-a690e3176b84",
"auth-token" : "TKhLiK6xt76gfTGAQRi3",
"input": message
}
response = requests.get(url, params = query_params)
addressData = response.json()
try:
address = addressData["addresses"][0]["text"]
except:
address = "No address"
return address
def findClosestPSAP(location):
try:
latLong = geocode(location).split(",")
except:
return "Bad location."
myLat = float(latLong[0].strip())
myLong = float(latLong[1].strip())
f = open("PSAPData.txt")
lines = f.readlines()
bestDist = sys.maxint
bestPSAP = ""
PSAPAddress = ""
for line in lines:
lines = line.split(",")
curDist = distance(myLat, myLong, float(lines[6]), float(lines[7]))
if curDist < bestDist:
bestDist = curDist
bestPSAP = lines[0]
PSAPAddress = lines[1] + ", " + lines[4] + ", " + lines[2]
return bestPSAP + "; " + PSAPAddress
def determineToCall(PSAPNumber):
f = open("PhoneData copy.txt")
lines = f.readlines()
for line in lines:
l = line.split(",")
if l[0].strip("\n") == PSAPNumber:
return l[1].strip()
return "+19732162024"
|
import datetime
from datetime import timedelta
import re
from flask import render_template, request
from app import app
import requests
@app.route('/')
@app.route('/index')
@app.route('/index.html')
def index():
tree = request.args.get('tree', 'mozilla-inbound')
closure_months, closure_dates, status, status_reason = main(tree)
uptime = get_uptime_stats(closure_months)
x, y = graph_data_for_uptime(closure_months)
wek = datetime.datetime.now() - timedelta(7)
week = "%s-%s-%s" % (wek.year,
wek.month if wek.month > 9 else "0%s" % wek.month,
wek.day if wek.day > 9 else "0%s" % wek.day)
backouts_since_week = backouts(tree, week)
tody = datetime.datetime.now()
backed = 0
today_pushes = 0
backoutln = re.compile('^.*[b,B]ackout.*')
backoutln2 = re.compile('^.*[b,B]acked out.*')
backoutln3 = re.compile('^.*[b,B]ack out.*')
backout_hours = [0] * 24
pushes_hours = [0] * 24
#if backouts_since_week
for resp in backouts_since_week['pushes']:
if (datetime.date.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])) == datetime.date.today()):
today_pushes += 1
bhour = datetime.datetime.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])).hour
pushes_hours[bhour] = pushes_hours[bhour] + 1
for chnge in range(len(backouts_since_week['pushes'][resp]['changesets'])):
if (backoutln.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc']) or
backoutln2.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc']) or
backoutln3.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc'])):
if (datetime.date.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])) == datetime.date.today()):
backed += 1
# Lets also track what hour the backouts happened in
bhour = datetime.datetime.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])).hour
backout_hours[bhour] = backout_hours[bhour] + 1
break
today = "%s-%s-%s" % (tody.year,
tody.month if tody.month > 9 else "0%s" % tody.month,
tody.day if tody.day > 9 else "0%s" % tody.day)
return render_template("index.html", total={"x": x, "y": y}, backout_hours=backout_hours, pushes_hours=pushes_hours,
backouts=backouts_since_week, today={"total": today_pushes, "backouts": backed, "search_date": today},
tree=tree, status={"status": status, "status_reason":status_reason}, uptime=uptime)
def main(tree):
response = requests.get('https://treestatus.mozilla.org/%s/logs?format=json&all=1' % tree, verify=False)
results = response.json()
delta = datetime.timedelta(0)
closed = None
closed_reason = None
dates = {}
month = {}
total = datetime.timedelta(0)
Added = None
status = results['logs'][0]['action']
status_reason = results['logs'][0]['reason']
for item in reversed(results['logs']):
if item['action'] == 'closed':
if closed is not None:
continue
closed = datetime.datetime.strptime(item['when'], "%Y-%m-%dT%H:%M:%S")
closed_reason = item['tags'][0] if len(item['tags']) > 0 else 'no reason'
elif item['action'] == 'open' or item['action'] == 'approval require':
if closed is None:
continue
opened = datetime.datetime.strptime(item['when'], "%Y-%m-%dT%H:%M:%S")
delta = opened - closed
if closed.date().isoformat() in dates:
try:
dates[closed.date().isoformat()]['total'] = dates[closed.date().isoformat()]['total'] + delta
dates[closed.date().isoformat()][closed_reason] = dates[closed.date().isoformat()][closed_reason] + delta
except:
dates[closed.date().isoformat()][closed_reason] = delta
else:
dates[closed.date().isoformat()] = {'total': delta, closed_reason: delta}
year_month = "%s-%s" % (closed.date().year, closed.date().month if closed.date().month >= 10 else '0%s' % closed.date().month)
if year_month not in ['2012-06', '2012-07']:
if year_month in month:
month[year_month]['total'] = month[year_month]['total'] + delta
try:
month[year_month][closed_reason] = month[year_month][closed_reason] + delta
except:
month[year_month][closed_reason] = delta
else:
month[year_month] = {'total': delta, closed_reason: delta}
total += delta
closed = None
closed_reason = None
elif item['action'] == 'added':
Added = item['when']
return month, dates, status, status_reason
def backouts(tree, search_date):
total_pushes = requests.get("https://hg.mozilla.org/%s/json-pushes?full=1&startdate=%s" %
("integration/%s" % tree if tree != "mozilla-central" else tree, search_date)).json()
backed = 0
backoutln = re.compile('^.*[b,B]ackout.*')
backoutln2 = re.compile('^.*[b,B]acked out.*')
backoutln3 = re.compile('^.*[b,B]ack out.*')
merges = re.compile('^.*[M,m]erge .* to .*')
keys_to_pop = []
for resp in total_pushes:
for chnge in range(len(total_pushes[resp]['changesets'])):
if merges.match(total_pushes[resp]['changesets'][chnge]['desc']):
keys_to_pop.append(resp)
for key in keys_to_pop:
total_pushes.pop(key, None)
backout_hours = [0] * 24
pushes_hours = [0] * 24
for resp in total_pushes:
# Lets also track what hour the push happened in
bhour = datetime.datetime.fromtimestamp(int(total_pushes[resp]['date'])).hour
pushes_hours[bhour] = pushes_hours[bhour] + 1
for chnge in range(len(total_pushes[resp]['changesets'])):
if (backoutln.match(total_pushes[resp]['changesets'][chnge]['desc']) or
backoutln2.match(total_pushes[resp]['changesets'][chnge]['desc']) or
backoutln3.match(total_pushes[resp]['changesets'][chnge]['desc'])):
backed += 1
# Lets also track what hour the backouts happened in
bhour = datetime.datetime.fromtimestamp(int(total_pushes[resp]['date'])).hour
backout_hours[bhour] = backout_hours[bhour] + 1
break
return {"total": len(total_pushes),
"backouts": backed,
"startdate": search_date,
"pushes": total_pushes,
"backoutHours": backout_hours,
"pushesHours": pushes_hours}
def get_uptime_stats(closure_months):
from calendar import monthrange
days_in_month = [monthrange(*[ int(y) for y in x.split('-')])[1] for x in sorted(closure_months.keys())[-12:]]
total_hours = [closure_months[x]['total'].total_seconds() for x in sorted(closure_months.keys())[-12:]]
count = 0
result = []
for days in days_in_month:
total_secs = days * 24 * 60 * 60
result.append(100 - ((total_hours[count]/total_secs) * 100))
count = count + 1
return result
def graph_data_for_uptime(closure_months):
x = []
y = {'no reason': [],
'checkin-test': [],
'checkin-compilation': [],
'infra': [],
'other': [],
'planned': [],
'backlog': [],
'checkin-test': [],
'total': []}
c_data = [(datetime.datetime.strptime(k, "%Y-%m"), closure_months[k]) for k in sorted(closure_months.keys())[-12:]]
x = ["%s-%s" % (date.year, date.month if date.month > 9 else "0%s" % date.month) for (date, value) in c_data]
for data in c_data:
# We need to make a sparse array so we can have the 2 arrays the same length when plotting
not_filled = [k for k in y.keys() if k not in data[1].keys()]
for nf in not_filled:
y[nf].append(0)
#show me the data
for _x in data[1].keys():
y[_x].append(data[1][_x].total_seconds() / 3600)
return x, y
Handle when we cant get data from the hg webserver
import datetime
from datetime import timedelta
import re
from flask import render_template, request
from app import app
import requests
@app.route('/')
@app.route('/index')
@app.route('/index.html')
def index():
tree = request.args.get('tree', 'mozilla-inbound')
closure_months, closure_dates, status, status_reason = main(tree)
uptime = get_uptime_stats(closure_months)
x, y = graph_data_for_uptime(closure_months)
wek = datetime.datetime.now() - timedelta(7)
week = "%s-%s-%s" % (wek.year,
wek.month if wek.month > 9 else "0%s" % wek.month,
wek.day if wek.day > 9 else "0%s" % wek.day)
backouts_since_week = backouts(tree, week)
tody = datetime.datetime.now()
backed = 0
today_pushes = 0
backoutln = re.compile('^.*[b,B]ackout.*')
backoutln2 = re.compile('^.*[b,B]acked out.*')
backoutln3 = re.compile('^.*[b,B]ack out.*')
backout_hours = [0] * 24
pushes_hours = [0] * 24
if backouts_since_week is not None:
for resp in backouts_since_week['pushes']:
if (datetime.date.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])) == datetime.date.today()):
today_pushes += 1
bhour = datetime.datetime.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])).hour
pushes_hours[bhour] = pushes_hours[bhour] + 1
for chnge in range(len(backouts_since_week['pushes'][resp]['changesets'])):
if (backoutln.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc']) or
backoutln2.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc']) or
backoutln3.match(backouts_since_week['pushes'][resp]['changesets'][chnge]['desc'])):
if (datetime.date.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])) == datetime.date.today()):
backed += 1
# Lets also track what hour the backouts happened in
bhour = datetime.datetime.fromtimestamp(int(backouts_since_week['pushes'][resp]['date'])).hour
backout_hours[bhour] = backout_hours[bhour] + 1
break
today = "%s-%s-%s" % (tody.year,
tody.month if tody.month > 9 else "0%s" % tody.month,
tody.day if tody.day > 9 else "0%s" % tody.day)
return render_template("index.html", total={"x": x, "y": y}, backout_hours=backout_hours, pushes_hours=pushes_hours,
backouts=backouts_since_week, today={"total": today_pushes, "backouts": backed, "search_date": today},
tree=tree, status={"status": status, "status_reason":status_reason}, uptime=uptime)
def main(tree):
response = requests.get('https://treestatus.mozilla.org/%s/logs?format=json&all=1' % tree, verify=False)
results = response.json()
delta = datetime.timedelta(0)
closed = None
closed_reason = None
dates = {}
month = {}
total = datetime.timedelta(0)
Added = None
status = results['logs'][0]['action']
status_reason = results['logs'][0]['reason']
for item in reversed(results['logs']):
if item['action'] == 'closed':
if closed is not None:
continue
closed = datetime.datetime.strptime(item['when'], "%Y-%m-%dT%H:%M:%S")
closed_reason = item['tags'][0] if len(item['tags']) > 0 else 'no reason'
elif item['action'] == 'open' or item['action'] == 'approval require':
if closed is None:
continue
opened = datetime.datetime.strptime(item['when'], "%Y-%m-%dT%H:%M:%S")
delta = opened - closed
if closed.date().isoformat() in dates:
try:
dates[closed.date().isoformat()]['total'] = dates[closed.date().isoformat()]['total'] + delta
dates[closed.date().isoformat()][closed_reason] = dates[closed.date().isoformat()][closed_reason] + delta
except:
dates[closed.date().isoformat()][closed_reason] = delta
else:
dates[closed.date().isoformat()] = {'total': delta, closed_reason: delta}
year_month = "%s-%s" % (closed.date().year, closed.date().month if closed.date().month >= 10 else '0%s' % closed.date().month)
if year_month not in ['2012-06', '2012-07']:
if year_month in month:
month[year_month]['total'] = month[year_month]['total'] + delta
try:
month[year_month][closed_reason] = month[year_month][closed_reason] + delta
except:
month[year_month][closed_reason] = delta
else:
month[year_month] = {'total': delta, closed_reason: delta}
total += delta
closed = None
closed_reason = None
elif item['action'] == 'added':
Added = item['when']
return month, dates, status, status_reason
def backouts(tree, search_date):
if tree.startswith('comm-'):
return None
total_pushes = requests.get("https://hg.mozilla.org/%s/json-pushes?full=1&startdate=%s" %
("integration/%s" % tree if tree != "mozilla-central" else tree, search_date)).json()
backed = 0
backoutln = re.compile('^.*[b,B]ackout.*')
backoutln2 = re.compile('^.*[b,B]acked out.*')
backoutln3 = re.compile('^.*[b,B]ack out.*')
merges = re.compile('^.*[M,m]erge .* to .*')
keys_to_pop = []
for resp in total_pushes:
for chnge in range(len(total_pushes[resp]['changesets'])):
if merges.match(total_pushes[resp]['changesets'][chnge]['desc']):
keys_to_pop.append(resp)
for key in keys_to_pop:
total_pushes.pop(key, None)
backout_hours = [0] * 24
pushes_hours = [0] * 24
for resp in total_pushes:
# Lets also track what hour the push happened in
bhour = datetime.datetime.fromtimestamp(int(total_pushes[resp]['date'])).hour
pushes_hours[bhour] = pushes_hours[bhour] + 1
for chnge in range(len(total_pushes[resp]['changesets'])):
if (backoutln.match(total_pushes[resp]['changesets'][chnge]['desc']) or
backoutln2.match(total_pushes[resp]['changesets'][chnge]['desc']) or
backoutln3.match(total_pushes[resp]['changesets'][chnge]['desc'])):
backed += 1
# Lets also track what hour the backouts happened in
bhour = datetime.datetime.fromtimestamp(int(total_pushes[resp]['date'])).hour
backout_hours[bhour] = backout_hours[bhour] + 1
break
return {"total": len(total_pushes),
"backouts": backed,
"startdate": search_date,
"pushes": total_pushes,
"backoutHours": backout_hours,
"pushesHours": pushes_hours}
def get_uptime_stats(closure_months):
from calendar import monthrange
days_in_month = [monthrange(*[ int(y) for y in x.split('-')])[1] for x in sorted(closure_months.keys())[-12:]]
total_hours = [closure_months[x]['total'].total_seconds() for x in sorted(closure_months.keys())[-12:]]
count = 0
result = []
for days in days_in_month:
total_secs = days * 24 * 60 * 60
result.append(100 - ((total_hours[count]/total_secs) * 100))
count = count + 1
return result
def graph_data_for_uptime(closure_months):
x = []
y = {'no reason': [],
'checkin-test': [],
'checkin-compilation': [],
'infra': [],
'other': [],
'planned': [],
'backlog': [],
'checkin-test': [],
'total': []}
c_data = [(datetime.datetime.strptime(k, "%Y-%m"), closure_months[k]) for k in sorted(closure_months.keys())[-12:]]
x = ["%s-%s" % (date.year, date.month if date.month > 9 else "0%s" % date.month) for (date, value) in c_data]
for data in c_data:
# We need to make a sparse array so we can have the 2 arrays the same length when plotting
not_filled = [k for k in y.keys() if k not in data[1].keys()]
for nf in not_filled:
y[nf].append(0)
#show me the data
for _x in data[1].keys():
y[_x].append(data[1][_x].total_seconds() / 3600)
return x, y
|
from flask import Flask, render_template, url_for, request
from flask import Response
from app import app
import requests, os, json
import twilio_routes
from twilio import twiml
#----------------------------------------
# Routes
#----------------------------------------
@app.route('/')
def index():
headers = {
"X-Parse-Application-Id" : app.config['X_PARSE_APPLICATION_ID'],
"X-Parse-REST-API-Key" : app.config['X_PARSE_REST_API_KEY']
}
r = requests.get("https://api.parse.com/1/classes/DeathSwitchMessage", headers=headers)
r = r.json()
death_switch_messages = r['results']
death_switch_message = death_switch_messages.pop()
death_switch_message = death_switch_message["text"]
r = requests.get("https://api.parse.com/1/classes/DeathTexts", headers=headers)
r = r.json()
r = r['results']
death_texts = []
for i in r:
death_texts.append(i['text'])
return render_template('index.html', death_switch_message=death_switch_message, death_texts=death_texts)
@app.route('/save_text', methods=['POST'])
def save_text():
headers = {
"X-Parse-Application-Id" : app.config['X_PARSE_APPLICATION_ID'],
"X-Parse-REST-API-Key" : app.config['X_PARSE_REST_API_KEY']
}
id = request.form['id']
post_data = json.dumps({"text" : request.form['value']})
if id == 'death-switch-message':
requests.post("https://api.parse.com/1/classes/DeathSwitchMessage/", data=post_data, headers=headers)
return json.loads(post_data)["text"]
@app.route('/voicemail', methods=['GET', 'POST'])
def voicemail():
resp = twilio.twiml.Response()
# Play an mp3
sound = request.args.get("sound")
resp.play(sound)
return str(resp)
# if request.method == 'GET':
# # sound = "http://after-the-tone.s3-us-west-1.amazonaws.com/i-have-died.mp3"
# sound = request.args.get("sound")
# xml = '<?xml version="1.0" encoding="UTF-8"?><Response><Play>%s</Play></Response>' % (sound)
# return Response(xml, mimetype ='text/xml')
# if request.method == "POST":
# pass
Testing twiml. My local version is broke though.
from flask import Flask, render_template, url_for, request
from flask import Response
from app import app
import requests, os, json
import twilio_routes
from twilio import twiml
#----------------------------------------
# Routes
#----------------------------------------
@app.route('/')
def index():
headers = {
"X-Parse-Application-Id" : app.config['X_PARSE_APPLICATION_ID'],
"X-Parse-REST-API-Key" : app.config['X_PARSE_REST_API_KEY']
}
r = requests.get("https://api.parse.com/1/classes/DeathSwitchMessage", headers=headers)
r = r.json()
death_switch_messages = r['results']
death_switch_message = death_switch_messages.pop()
death_switch_message = death_switch_message["text"]
r = requests.get("https://api.parse.com/1/classes/DeathTexts", headers=headers)
r = r.json()
r = r['results']
death_texts = []
for i in r:
death_texts.append(i['text'])
return render_template('index.html', death_switch_message=death_switch_message, death_texts=death_texts)
@app.route('/save_text', methods=['POST'])
def save_text():
headers = {
"X-Parse-Application-Id" : app.config['X_PARSE_APPLICATION_ID'],
"X-Parse-REST-API-Key" : app.config['X_PARSE_REST_API_KEY']
}
id = request.form['id']
post_data = json.dumps({"text" : request.form['value']})
if id == 'death-switch-message':
requests.post("https://api.parse.com/1/classes/DeathSwitchMessage/", data=post_data, headers=headers)
return json.loads(post_data)["text"]
@app.route('/voicemail', methods=['GET', 'POST'])
def voicemail():
resp = twiml.Response()
# Play an mp3
sound = request.args.get("sound")
resp.play(sound)
return str(resp)
# if request.method == 'GET':
# # sound = "http://after-the-tone.s3-us-west-1.amazonaws.com/i-have-died.mp3"
# sound = request.args.get("sound")
# xml = '<?xml version="1.0" encoding="UTF-8"?><Response><Play>%s</Play></Response>' % (sound)
# return Response(xml, mimetype ='text/xml')
# if request.method == "POST":
# pass
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from cms.admin.forms import PageForm
from cms.api import create_page
from cms.models import Page, Title
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugins.text.models import Text
from cms.sitemaps import CMSSitemap
from cms.test_utils.testcases import (CMSTestCase, URL_CMS_PAGE,
URL_CMS_PAGE_ADD)
from cms.test_utils.util.context_managers import (LanguageOverride,
SettingsOverride)
from cms.utils.page_resolver import get_page_from_request
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
import datetime
import os.path
class PagesTestCase(CMSTestCase):
def test_add_page(self):
"""
Test that the add admin page could be displayed via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 200)
def test_create_page(self):
"""
Test that a page can be created via the admin
"""
page_data = self.get_new_page_data()
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
title = Title.objects.get(slug=page_data['slug'])
self.assertNotEqual(title, None)
page = title.page
page.published = True
page.save()
self.assertEqual(page.get_title(), page_data['title'])
self.assertEqual(page.get_slug(), page_data['slug'])
self.assertEqual(page.placeholders.all().count(), 2)
# were public instanes created?
title = Title.objects.drafts().get(slug=page_data['slug'])
def test_slug_collision(self):
"""
Test a slug collision
"""
page_data = self.get_new_page_data()
# create first page
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
#page1 = Title.objects.get(slug=page_data['slug']).page
# create page with the same page_data
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
if settings.i18n_installed:
self.assertEqual(response.status_code, 302)
# did we got right redirect?
self.assertEqual(response['Location'].endswith(URL_CMS_PAGE), True)
else:
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Location'].endswith(URL_CMS_PAGE_ADD), True)
# TODO: check for slug collisions after move
# TODO: check for slug collisions with different settings
def test_details_view(self):
"""
Test the details view
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 404)
page = create_page('test page 1', "nav_playground.html", "en")
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 404)
self.assertTrue(page.publish())
create_page("test page 2", "nav_playground.html", "en",
parent=page, published=True)
homepage = Page.objects.get_home()
self.assertTrue(homepage.get_slug(), 'test-page-1')
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 200)
def test_edit_page(self):
"""
Test that a page can edited via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
response = self.client.get('/admin/cms/page/%s/' %page.id)
self.assertEqual(response.status_code, 200)
page_data['title'] = 'changed title'
response = self.client.post('/admin/cms/page/%s/' %page.id, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEqual(page.get_title(), 'changed title')
def test_meta_description_and_keywords_fields_from_admin(self):
"""
Test that description and keywords tags can be set via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
page_data["meta_description"] = "I am a page"
page_data["meta_keywords"] = "page,cms,stuff"
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
response = self.client.get('/admin/cms/page/%s/' %page.id)
self.assertEqual(response.status_code, 200)
page_data['meta_description'] = 'I am a duck'
response = self.client.post('/admin/cms/page/%s/' %page.id, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
page = Page.objects.get(title_set__slug=page_data["slug"])
self.assertEqual(page.get_meta_description(), 'I am a duck')
self.assertEqual(page.get_meta_keywords(), 'page,cms,stuff')
def test_meta_description_and_keywords_from_template_tags(self):
from django import template
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
page_data["title"] = "Hello"
page_data["meta_description"] = "I am a page"
page_data["meta_keywords"] = "page,cms,stuff"
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
self.client.post('/admin/cms/page/%s/' %page.id, page_data)
t = template.Template("{% load cms_tags %}{% page_attribute title %} {% page_attribute meta_description %} {% page_attribute meta_keywords %}")
req = HttpRequest()
page.published = True
page.save()
req.current_page = page
req.REQUEST = {}
self.assertEqual(t.render(template.Context({"request": req})), "Hello I am a page page,cms,stuff")
def test_copy_page(self):
"""
Test that a page can be copied via the admin
"""
page_a = create_page("page_a", "nav_playground.html", "en")
page_a_a = create_page("page_a_a", "nav_playground.html", "en",
parent=page_a)
create_page("page_a_a_a", "nav_playground.html", "en", parent=page_a_a)
page_b = create_page("page_b", "nav_playground.html", "en")
page_b_a = create_page("page_b", "nav_playground.html", "en",
parent=page_b)
count = Page.objects.drafts().count()
superuser = self.get_superuser()
with self.login_user_context(superuser):
self.copy_page(page_a, page_b_a)
self.assertEqual(Page.objects.drafts().count() - count, 3)
def test_language_change(self):
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
pk = Page.objects.all()[0].pk
response = self.client.get("/admin/cms/page/%s/" % pk, {"language":"en" })
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/cms/page/%s/" % pk, {"language":"de" })
self.assertEqual(response.status_code, 200)
def test_move_page(self):
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data1 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data1)
page_data2 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data2)
page_data3 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data3)
page1 = Page.objects.all()[0]
page2 = Page.objects.all()[1]
page3 = Page.objects.all()[2]
# move pages
response = self.client.post("/admin/cms/page/%s/move-page/" % page3.pk, {"target": page2.pk, "position": "last-child"})
self.assertEqual(response.status_code, 200)
response = self.client.post("/admin/cms/page/%s/move-page/" % page2.pk, {"target": page1.pk, "position": "last-child"})
self.assertEqual(response.status_code, 200)
# check page2 path and url
page2 = Page.objects.get(pk=page2.pk)
self.assertEqual(page2.get_path(), page_data1['slug']+"/"+page_data2['slug'])
self.assertEqual(page2.get_absolute_url(), self.get_pages_root()+page_data1['slug']+"/"+page_data2['slug']+"/")
# check page3 path and url
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page3.get_path(), page_data1['slug']+"/"+page_data2['slug']+"/"+page_data3['slug'])
self.assertEqual(page3.get_absolute_url(), self.get_pages_root()+page_data1['slug']+"/"+page_data2['slug']+"/"+page_data3['slug']+"/")
# publish page 1 (becomes home)
page1 = Page.objects.get(pk=page1.pk)
page1.publish()
public_page1 = page1.publisher_public
self.assertEqual(public_page1.get_path(), '')
# check that page2 and page3 url have changed
page2 = Page.objects.get(pk=page2.pk)
page2.publish()
public_page2 = page2.publisher_public
self.assertEqual(public_page2.get_absolute_url(), self.get_pages_root()+page_data2['slug']+"/")
page3 = Page.objects.get(pk=page3.pk)
page3.publish()
public_page3 = page3.publisher_public
self.assertEqual(public_page3.get_absolute_url(), self.get_pages_root()+page_data2['slug']+"/"+page_data3['slug']+"/")
# move page2 back to root and check path of 2 and 3
response = self.client.post("/admin/cms/page/%s/move-page/" % page2.pk, {"target": page1.pk, "position": "right"})
self.assertEqual(response.status_code, 200)
page1 = Page.objects.get(pk=page1.pk)
self.assertEqual(page1.get_path(), page_data1['slug'])
page2 = Page.objects.get(pk=page2.pk)
self.assertEqual(page2.get_path(), page_data2['slug'])
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page3.get_path(), page_data2['slug']+"/"+page_data3['slug'])
def test_move_page_inherit(self):
parent = create_page("Parent", 'col_three.html', "en")
child = create_page("Child", settings.CMS_TEMPLATE_INHERITANCE_MAGIC,
"en", parent=parent)
self.assertEqual(child.get_template(), parent.get_template())
child.move_page(parent, 'left')
self.assertEqual(child.get_template(), parent.get_template())
def test_add_placeholder(self):
# create page
page = create_page("Add Placeholder", "nav_playground.html", "en",
position="last-child", published=True, in_navigation=True)
page.template = 'add_placeholder.html'
page.save()
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(200, response.status_code)
path = os.path.join(settings.PROJECT_DIR, 'templates', 'add_placeholder.html')
f = open(path, 'r')
old = f.read()
f.close()
new = old.replace(
'<!-- SECOND_PLACEHOLDER -->',
'{% placeholder second_placeholder %}'
)
f = open(path, 'w')
f.write(new)
f.close()
response = self.client.get(url)
self.assertEqual(200, response.status_code)
f = open(path, 'w')
f.write(old)
f.close()
def test_sitemap_login_required_pages(self):
"""
Test that CMSSitemap object contains only published,public (login_required=False) pages
"""
create_page("page", "nav_playground.html", "en", login_required=True,
published=True, in_navigation=True)
self.assertEqual(CMSSitemap().items().count(),0)
def test_edit_page_other_site_and_language(self):
"""
Test that a page can edited via the admin when your current site is
different from the site you are editing and the language isn't available
for the current site.
"""
site = Site.objects.create(domain='otherlang', name='otherlang')
# Change site for this session
page_data = self.get_new_page_data()
page_data['site'] = site.pk
page_data['title'] = 'changed title'
TESTLANG = settings.CMS_SITE_LANGUAGES[site.pk][0]
page_data['language'] = TESTLANG
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
page = Page.objects.get(title_set__slug=page_data['slug'])
with LanguageOverride(TESTLANG):
self.assertEqual(page.get_title(), 'changed title')
def test_flat_urls(self):
with SettingsOverride(CMS_FLAT_URLS=True):
home_slug = "home"
child_slug = "child"
grandchild_slug = "grandchild"
home = create_page(home_slug, "nav_playground.html", "en",
published=True, in_navigation=True)
home.publish()
child = create_page(child_slug, "nav_playground.html", "en",
parent=home, published=True, in_navigation=True)
child.publish()
grandchild = create_page(grandchild_slug, "nav_playground.html", "en",
parent=child, published=True, in_navigation=True)
grandchild.publish()
response = self.client.get(home.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get(child.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get(grandchild.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertFalse(child.get_absolute_url() in grandchild.get_absolute_url())
def test_templates(self):
"""
Test the inheritance magic for templates
"""
parent = create_page("parent", "nav_playground.html", "en")
child = create_page("child", "nav_playground.html", "en", parent=parent)
child.template = settings.CMS_TEMPLATE_INHERITANCE_MAGIC
child.save()
self.assertEqual(child.template, settings.CMS_TEMPLATE_INHERITANCE_MAGIC)
self.assertEqual(parent.get_template_name(), child.get_template_name())
parent.template = settings.CMS_TEMPLATE_INHERITANCE_MAGIC
parent.save()
self.assertEqual(parent.template, settings.CMS_TEMPLATE_INHERITANCE_MAGIC)
self.assertEqual(parent.get_template(), settings.CMS_TEMPLATES[0][0])
self.assertEqual(parent.get_template_name(), settings.CMS_TEMPLATES[0][1])
def test_delete_with_plugins(self):
"""
Check that plugins and placeholders get correctly deleted when we delete
a page!
"""
page = create_page("page", "nav_playground.html", "en")
page.rescan_placeholders() # create placeholders
placeholder = page.placeholders.all()[0]
plugin_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=settings.LANGUAGES[0][0]
)
plugin_base.insert_at(None, position='last-child', save=False)
plugin = Text(body='')
plugin_base.set_base_attr(plugin)
plugin.save()
self.assertEqual(CMSPlugin.objects.count(), 1)
self.assertEqual(Text.objects.count(), 1)
self.assertTrue(Placeholder.objects.count() > 0)
page.delete()
self.assertEqual(CMSPlugin.objects.count(), 0)
self.assertEqual(Text.objects.count(), 0)
self.assertEqual(Placeholder.objects.count(), 0)
def test_get_page_from_request_on_non_cms_admin(self):
request = self.get_request(
reverse('admin:sampleapp_category_change', args=(1,))
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_on_cms_admin(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request(
reverse('admin:cms_page_change', args=(page.pk,))
)
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_nopage(self):
request = self.get_request(
reverse('admin:cms_page_change', args=(1,))
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_cached(self):
mock_page = 'hello world'
request = self.get_request(
reverse('admin:sampleapp_category_change', args=(1,))
)
request._current_page_cache = mock_page
page = get_page_from_request(request)
self.assertEqual(page, mock_page)
def test_get_page_from_request_nopage(self):
request = self.get_request('/')
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_with_page_404(self):
page = create_page("page", "nav_playground.html", "en", published=True)
page.publish()
request = self.get_request('/does-not-exist/')
found_page = get_page_from_request(request)
self.assertEqual(found_page, None)
def test_get_page_from_request_with_page_preview(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request('%s?preview' % page.get_absolute_url())
request.user.is_staff = False
found_page = get_page_from_request(request)
self.assertEqual(found_page, None)
superuser = self.get_superuser()
with self.login_user_context(superuser):
request = self.get_request('%s?preview&draft' % page.get_absolute_url())
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_with_editplugin(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request(
reverse('admin:cms_page_change', args=(page.pk,)) + 'edit-plugin/42/'
)
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_with_editplugin_nopage(self):
request = self.get_request(
reverse('admin:cms_page_change', args=(1,)) + 'edit-plugin/42/'
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_page_already_expired(self):
"""
Test that a page which has a end date in the past gives a 404, not a
500.
"""
yesterday = datetime.date.today() - datetime.timedelta(days=1)
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
page = create_page('page', 'nav_playground.html', 'en',
publication_end_date=yesterday, published=True)
resp = self.client.get(page.get_absolute_url('en'))
self.assertEqual(resp.status_code, 404)
def test_existing_overwrite_url(self):
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
create_page('home', 'nav_playground.html', 'en', published=True)
create_page('boo', 'nav_playground.html', 'en', published=True)
data = {
'title': 'foo',
'overwrite_url': '/boo/',
'slug': 'foo',
'language': 'en',
'template': 'nav_playground.html',
'site': 1,
}
form = PageForm(data)
self.assertFalse(form.is_valid())
self.assertTrue('overwrite_url' in form.errors)
def test_page_urls(self):
page1 = create_page('test page 1', 'nav_playground.html', 'en',
published=True)
page2 = create_page('test page 2', 'nav_playground.html', 'en',
published=True, parent=page1)
page3 = create_page('test page 3', 'nav_playground.html', 'en',
published=True, parent=page2)
page4 = create_page('test page 4', 'nav_playground.html', 'en',
published=True)
page5 = create_page('test page 5', 'nav_playground.html', 'en',
published=True, parent=page4)
self.assertEqual(page1.get_absolute_url(),
self.get_pages_root()+'')
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'test-page-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-2/test-page-3/')
self.assertEqual(page4.get_absolute_url(),
self.get_pages_root()+'test-page-4/')
self.assertEqual(page5.get_absolute_url(),
self.get_pages_root()+'test-page-4/test-page-5/')
page3 = self.move_page(page3, page1)
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-3/')
page5 = self.move_page(page5, page2)
self.assertEqual(page5.get_absolute_url(),
self.get_pages_root()+'test-page-2/test-page-5/')
page3 = self.move_page(page3, page4)
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-4/test-page-3/')
def test_page_overwrite_urls(self):
page1 = create_page('test page 1', 'nav_playground.html', 'en',
published=True)
page2 = create_page('test page 2', 'nav_playground.html', 'en',
published=True, parent=page1)
page3 = create_page('test page 3', 'nav_playground.html', 'en',
published=True, parent=page2, overwrite_url='i-want-another-url')
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'test-page-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
title2 = page2.title_set.get()
title2.slug = 'page-test-2'
title2.save()
page2 = Page.objects.get(pk=page2.pk)
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'page-test-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
page2.save()
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
def test_home_slug_not_accessible(self):
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
page = create_page('page', 'nav_playground.html', 'en', published=True)
self.assertEqual(page.get_absolute_url('en'), '/')
resp = self.client.get('/en/')
self.assertEqual(resp.status_code, HttpResponse.status_code)
resp = self.client.get('/en/page/')
self.assertEqual(resp.status_code, HttpResponseNotFound.status_code)
def test_public_home_page_replaced(self):
"""Test that publishing changes to the home page doesn't move the public version"""
home = create_page('home', 'nav_playground.html', 'en', published = True, slug = 'home')
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
home.publish()
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
other = create_page('other', 'nav_playground.html', 'en', published = True, slug = 'other')
other.publish()
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
home = Page.objects.get(pk = home.id)
home.in_navigation = True
home.save()
home.publish()
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
class NoAdminPageTests(CMSTestCase):
urls = 'project.noadmin_urls'
def setUp(self):
admin = 'django.contrib.admin'
noadmin_apps = [app for app in settings.INSTALLED_APPS if not app == admin]
self._ctx = SettingsOverride(INSTALLED_APPS=noadmin_apps)
self._ctx.__enter__()
def tearDown(self):
self._ctx.__exit__(None, None, None)
def test_get_page_from_request_fakeadmin_nopage(self):
request = self.get_request('/admin/')
page = get_page_from_request(request)
self.assertEqual(page, None)
class PreviousFilteredSiblingsTests(CMSTestCase):
def test_with_publisher(self):
home = create_page('home', 'nav_playground.html', 'en', published=True)
home.publish()
other = create_page('other', 'nav_playground.html', 'en', published=True)
other.publish()
other = Page.objects.get(pk=other.pk)
home = Page.objects.get(pk=home.pk)
self.assertEqual(other.get_previous_filtered_sibling(), home)
self.assertEqual(home.get_previous_filtered_sibling(), None)
def test_multisite(self):
firstsite = Site.objects.create(name='first', domain='first.com')
secondsite = Site.objects.create(name='second', domain='second.com')
home = create_page('home', 'nav_playground.html', 'en', published=True, site=firstsite)
home.publish()
other = create_page('other', 'nav_playground.html', 'en', published=True, site=secondsite)
other.publish()
other = Page.objects.get(pk=other.pk)
home = Page.objects.get(pk=home.pk)
self.assertEqual(other.get_previous_filtered_sibling(), None)
self.assertEqual(home.get_previous_filtered_sibling(), None)
added a comment to the test
# -*- coding: utf-8 -*-
from __future__ import with_statement
from cms.admin.forms import PageForm
from cms.api import create_page
from cms.models import Page, Title
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugins.text.models import Text
from cms.sitemaps import CMSSitemap
from cms.test_utils.testcases import (CMSTestCase, URL_CMS_PAGE,
URL_CMS_PAGE_ADD)
from cms.test_utils.util.context_managers import (LanguageOverride,
SettingsOverride)
from cms.utils.page_resolver import get_page_from_request
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
import datetime
import os.path
class PagesTestCase(CMSTestCase):
def test_add_page(self):
"""
Test that the add admin page could be displayed via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 200)
def test_create_page(self):
"""
Test that a page can be created via the admin
"""
page_data = self.get_new_page_data()
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
title = Title.objects.get(slug=page_data['slug'])
self.assertNotEqual(title, None)
page = title.page
page.published = True
page.save()
self.assertEqual(page.get_title(), page_data['title'])
self.assertEqual(page.get_slug(), page_data['slug'])
self.assertEqual(page.placeholders.all().count(), 2)
# were public instanes created?
title = Title.objects.drafts().get(slug=page_data['slug'])
def test_slug_collision(self):
"""
Test a slug collision
"""
page_data = self.get_new_page_data()
# create first page
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
#page1 = Title.objects.get(slug=page_data['slug']).page
# create page with the same page_data
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
if settings.i18n_installed:
self.assertEqual(response.status_code, 302)
# did we got right redirect?
self.assertEqual(response['Location'].endswith(URL_CMS_PAGE), True)
else:
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Location'].endswith(URL_CMS_PAGE_ADD), True)
# TODO: check for slug collisions after move
# TODO: check for slug collisions with different settings
def test_details_view(self):
"""
Test the details view
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 404)
page = create_page('test page 1', "nav_playground.html", "en")
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 404)
self.assertTrue(page.publish())
create_page("test page 2", "nav_playground.html", "en",
parent=page, published=True)
homepage = Page.objects.get_home()
self.assertTrue(homepage.get_slug(), 'test-page-1')
response = self.client.get(self.get_pages_root())
self.assertEqual(response.status_code, 200)
def test_edit_page(self):
"""
Test that a page can edited via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
response = self.client.get('/admin/cms/page/%s/' %page.id)
self.assertEqual(response.status_code, 200)
page_data['title'] = 'changed title'
response = self.client.post('/admin/cms/page/%s/' %page.id, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEqual(page.get_title(), 'changed title')
def test_meta_description_and_keywords_fields_from_admin(self):
"""
Test that description and keywords tags can be set via the admin
"""
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
page_data["meta_description"] = "I am a page"
page_data["meta_keywords"] = "page,cms,stuff"
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
response = self.client.get('/admin/cms/page/%s/' %page.id)
self.assertEqual(response.status_code, 200)
page_data['meta_description'] = 'I am a duck'
response = self.client.post('/admin/cms/page/%s/' %page.id, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
page = Page.objects.get(title_set__slug=page_data["slug"])
self.assertEqual(page.get_meta_description(), 'I am a duck')
self.assertEqual(page.get_meta_keywords(), 'page,cms,stuff')
def test_meta_description_and_keywords_from_template_tags(self):
from django import template
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
page_data["title"] = "Hello"
page_data["meta_description"] = "I am a page"
page_data["meta_keywords"] = "page,cms,stuff"
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.get(title_set__slug=page_data['slug'])
self.client.post('/admin/cms/page/%s/' %page.id, page_data)
t = template.Template("{% load cms_tags %}{% page_attribute title %} {% page_attribute meta_description %} {% page_attribute meta_keywords %}")
req = HttpRequest()
page.published = True
page.save()
req.current_page = page
req.REQUEST = {}
self.assertEqual(t.render(template.Context({"request": req})), "Hello I am a page page,cms,stuff")
def test_copy_page(self):
"""
Test that a page can be copied via the admin
"""
page_a = create_page("page_a", "nav_playground.html", "en")
page_a_a = create_page("page_a_a", "nav_playground.html", "en",
parent=page_a)
create_page("page_a_a_a", "nav_playground.html", "en", parent=page_a_a)
page_b = create_page("page_b", "nav_playground.html", "en")
page_b_a = create_page("page_b", "nav_playground.html", "en",
parent=page_b)
count = Page.objects.drafts().count()
superuser = self.get_superuser()
with self.login_user_context(superuser):
self.copy_page(page_a, page_b_a)
self.assertEqual(Page.objects.drafts().count() - count, 3)
def test_language_change(self):
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
pk = Page.objects.all()[0].pk
response = self.client.get("/admin/cms/page/%s/" % pk, {"language":"en" })
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/cms/page/%s/" % pk, {"language":"de" })
self.assertEqual(response.status_code, 200)
def test_move_page(self):
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_data1 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data1)
page_data2 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data2)
page_data3 = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data3)
page1 = Page.objects.all()[0]
page2 = Page.objects.all()[1]
page3 = Page.objects.all()[2]
# move pages
response = self.client.post("/admin/cms/page/%s/move-page/" % page3.pk, {"target": page2.pk, "position": "last-child"})
self.assertEqual(response.status_code, 200)
response = self.client.post("/admin/cms/page/%s/move-page/" % page2.pk, {"target": page1.pk, "position": "last-child"})
self.assertEqual(response.status_code, 200)
# check page2 path and url
page2 = Page.objects.get(pk=page2.pk)
self.assertEqual(page2.get_path(), page_data1['slug']+"/"+page_data2['slug'])
self.assertEqual(page2.get_absolute_url(), self.get_pages_root()+page_data1['slug']+"/"+page_data2['slug']+"/")
# check page3 path and url
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page3.get_path(), page_data1['slug']+"/"+page_data2['slug']+"/"+page_data3['slug'])
self.assertEqual(page3.get_absolute_url(), self.get_pages_root()+page_data1['slug']+"/"+page_data2['slug']+"/"+page_data3['slug']+"/")
# publish page 1 (becomes home)
page1 = Page.objects.get(pk=page1.pk)
page1.publish()
public_page1 = page1.publisher_public
self.assertEqual(public_page1.get_path(), '')
# check that page2 and page3 url have changed
page2 = Page.objects.get(pk=page2.pk)
page2.publish()
public_page2 = page2.publisher_public
self.assertEqual(public_page2.get_absolute_url(), self.get_pages_root()+page_data2['slug']+"/")
page3 = Page.objects.get(pk=page3.pk)
page3.publish()
public_page3 = page3.publisher_public
self.assertEqual(public_page3.get_absolute_url(), self.get_pages_root()+page_data2['slug']+"/"+page_data3['slug']+"/")
# move page2 back to root and check path of 2 and 3
response = self.client.post("/admin/cms/page/%s/move-page/" % page2.pk, {"target": page1.pk, "position": "right"})
self.assertEqual(response.status_code, 200)
page1 = Page.objects.get(pk=page1.pk)
self.assertEqual(page1.get_path(), page_data1['slug'])
page2 = Page.objects.get(pk=page2.pk)
self.assertEqual(page2.get_path(), page_data2['slug'])
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page3.get_path(), page_data2['slug']+"/"+page_data3['slug'])
def test_move_page_inherit(self):
parent = create_page("Parent", 'col_three.html', "en")
child = create_page("Child", settings.CMS_TEMPLATE_INHERITANCE_MAGIC,
"en", parent=parent)
self.assertEqual(child.get_template(), parent.get_template())
child.move_page(parent, 'left')
self.assertEqual(child.get_template(), parent.get_template())
def test_add_placeholder(self):
# create page
page = create_page("Add Placeholder", "nav_playground.html", "en",
position="last-child", published=True, in_navigation=True)
page.template = 'add_placeholder.html'
page.save()
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(200, response.status_code)
path = os.path.join(settings.PROJECT_DIR, 'templates', 'add_placeholder.html')
f = open(path, 'r')
old = f.read()
f.close()
new = old.replace(
'<!-- SECOND_PLACEHOLDER -->',
'{% placeholder second_placeholder %}'
)
f = open(path, 'w')
f.write(new)
f.close()
response = self.client.get(url)
self.assertEqual(200, response.status_code)
f = open(path, 'w')
f.write(old)
f.close()
def test_sitemap_login_required_pages(self):
"""
Test that CMSSitemap object contains only published,public (login_required=False) pages
"""
create_page("page", "nav_playground.html", "en", login_required=True,
published=True, in_navigation=True)
self.assertEqual(CMSSitemap().items().count(),0)
def test_edit_page_other_site_and_language(self):
"""
Test that a page can edited via the admin when your current site is
different from the site you are editing and the language isn't available
for the current site.
"""
site = Site.objects.create(domain='otherlang', name='otherlang')
# Change site for this session
page_data = self.get_new_page_data()
page_data['site'] = site.pk
page_data['title'] = 'changed title'
TESTLANG = settings.CMS_SITE_LANGUAGES[site.pk][0]
page_data['language'] = TESTLANG
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
page = Page.objects.get(title_set__slug=page_data['slug'])
with LanguageOverride(TESTLANG):
self.assertEqual(page.get_title(), 'changed title')
def test_flat_urls(self):
with SettingsOverride(CMS_FLAT_URLS=True):
home_slug = "home"
child_slug = "child"
grandchild_slug = "grandchild"
home = create_page(home_slug, "nav_playground.html", "en",
published=True, in_navigation=True)
home.publish()
child = create_page(child_slug, "nav_playground.html", "en",
parent=home, published=True, in_navigation=True)
child.publish()
grandchild = create_page(grandchild_slug, "nav_playground.html", "en",
parent=child, published=True, in_navigation=True)
grandchild.publish()
response = self.client.get(home.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get(child.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get(grandchild.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertFalse(child.get_absolute_url() in grandchild.get_absolute_url())
def test_templates(self):
"""
Test the inheritance magic for templates
"""
parent = create_page("parent", "nav_playground.html", "en")
child = create_page("child", "nav_playground.html", "en", parent=parent)
child.template = settings.CMS_TEMPLATE_INHERITANCE_MAGIC
child.save()
self.assertEqual(child.template, settings.CMS_TEMPLATE_INHERITANCE_MAGIC)
self.assertEqual(parent.get_template_name(), child.get_template_name())
parent.template = settings.CMS_TEMPLATE_INHERITANCE_MAGIC
parent.save()
self.assertEqual(parent.template, settings.CMS_TEMPLATE_INHERITANCE_MAGIC)
self.assertEqual(parent.get_template(), settings.CMS_TEMPLATES[0][0])
self.assertEqual(parent.get_template_name(), settings.CMS_TEMPLATES[0][1])
def test_delete_with_plugins(self):
"""
Check that plugins and placeholders get correctly deleted when we delete
a page!
"""
page = create_page("page", "nav_playground.html", "en")
page.rescan_placeholders() # create placeholders
placeholder = page.placeholders.all()[0]
plugin_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=settings.LANGUAGES[0][0]
)
plugin_base.insert_at(None, position='last-child', save=False)
plugin = Text(body='')
plugin_base.set_base_attr(plugin)
plugin.save()
self.assertEqual(CMSPlugin.objects.count(), 1)
self.assertEqual(Text.objects.count(), 1)
self.assertTrue(Placeholder.objects.count() > 0)
page.delete()
self.assertEqual(CMSPlugin.objects.count(), 0)
self.assertEqual(Text.objects.count(), 0)
self.assertEqual(Placeholder.objects.count(), 0)
def test_get_page_from_request_on_non_cms_admin(self):
request = self.get_request(
reverse('admin:sampleapp_category_change', args=(1,))
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_on_cms_admin(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request(
reverse('admin:cms_page_change', args=(page.pk,))
)
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_nopage(self):
request = self.get_request(
reverse('admin:cms_page_change', args=(1,))
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_cached(self):
mock_page = 'hello world'
request = self.get_request(
reverse('admin:sampleapp_category_change', args=(1,))
)
request._current_page_cache = mock_page
page = get_page_from_request(request)
self.assertEqual(page, mock_page)
def test_get_page_from_request_nopage(self):
request = self.get_request('/')
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_get_page_from_request_with_page_404(self):
page = create_page("page", "nav_playground.html", "en", published=True)
page.publish()
request = self.get_request('/does-not-exist/')
found_page = get_page_from_request(request)
self.assertEqual(found_page, None)
def test_get_page_from_request_with_page_preview(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request('%s?preview' % page.get_absolute_url())
request.user.is_staff = False
found_page = get_page_from_request(request)
self.assertEqual(found_page, None)
superuser = self.get_superuser()
with self.login_user_context(superuser):
request = self.get_request('%s?preview&draft' % page.get_absolute_url())
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_with_editplugin(self):
page = create_page("page", "nav_playground.html", "en")
request = self.get_request(
reverse('admin:cms_page_change', args=(page.pk,)) + 'edit-plugin/42/'
)
found_page = get_page_from_request(request)
self.assertTrue(found_page)
self.assertEqual(found_page.pk, page.pk)
def test_get_page_from_request_on_cms_admin_with_editplugin_nopage(self):
request = self.get_request(
reverse('admin:cms_page_change', args=(1,)) + 'edit-plugin/42/'
)
page = get_page_from_request(request)
self.assertEqual(page, None)
def test_page_already_expired(self):
"""
Test that a page which has a end date in the past gives a 404, not a
500.
"""
yesterday = datetime.date.today() - datetime.timedelta(days=1)
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
page = create_page('page', 'nav_playground.html', 'en',
publication_end_date=yesterday, published=True)
resp = self.client.get(page.get_absolute_url('en'))
self.assertEqual(resp.status_code, 404)
def test_existing_overwrite_url(self):
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
create_page('home', 'nav_playground.html', 'en', published=True)
create_page('boo', 'nav_playground.html', 'en', published=True)
data = {
'title': 'foo',
'overwrite_url': '/boo/',
'slug': 'foo',
'language': 'en',
'template': 'nav_playground.html',
'site': 1,
}
form = PageForm(data)
self.assertFalse(form.is_valid())
self.assertTrue('overwrite_url' in form.errors)
def test_page_urls(self):
page1 = create_page('test page 1', 'nav_playground.html', 'en',
published=True)
page2 = create_page('test page 2', 'nav_playground.html', 'en',
published=True, parent=page1)
page3 = create_page('test page 3', 'nav_playground.html', 'en',
published=True, parent=page2)
page4 = create_page('test page 4', 'nav_playground.html', 'en',
published=True)
page5 = create_page('test page 5', 'nav_playground.html', 'en',
published=True, parent=page4)
self.assertEqual(page1.get_absolute_url(),
self.get_pages_root()+'')
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'test-page-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-2/test-page-3/')
self.assertEqual(page4.get_absolute_url(),
self.get_pages_root()+'test-page-4/')
self.assertEqual(page5.get_absolute_url(),
self.get_pages_root()+'test-page-4/test-page-5/')
page3 = self.move_page(page3, page1)
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-3/')
page5 = self.move_page(page5, page2)
self.assertEqual(page5.get_absolute_url(),
self.get_pages_root()+'test-page-2/test-page-5/')
page3 = self.move_page(page3, page4)
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'test-page-4/test-page-3/')
def test_page_overwrite_urls(self):
page1 = create_page('test page 1', 'nav_playground.html', 'en',
published=True)
page2 = create_page('test page 2', 'nav_playground.html', 'en',
published=True, parent=page1)
page3 = create_page('test page 3', 'nav_playground.html', 'en',
published=True, parent=page2, overwrite_url='i-want-another-url')
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'test-page-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
title2 = page2.title_set.get()
title2.slug = 'page-test-2'
title2.save()
page2 = Page.objects.get(pk=page2.pk)
page3 = Page.objects.get(pk=page3.pk)
self.assertEqual(page2.get_absolute_url(),
self.get_pages_root()+'page-test-2/')
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
# tests a bug found in 2.2 where saving an ancestor page
# wiped out the overwrite_url for child pages
page2.save()
self.assertEqual(page3.get_absolute_url(),
self.get_pages_root()+'i-want-another-url/')
def test_home_slug_not_accessible(self):
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
page = create_page('page', 'nav_playground.html', 'en', published=True)
self.assertEqual(page.get_absolute_url('en'), '/')
resp = self.client.get('/en/')
self.assertEqual(resp.status_code, HttpResponse.status_code)
resp = self.client.get('/en/page/')
self.assertEqual(resp.status_code, HttpResponseNotFound.status_code)
def test_public_home_page_replaced(self):
"""Test that publishing changes to the home page doesn't move the public version"""
home = create_page('home', 'nav_playground.html', 'en', published = True, slug = 'home')
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
home.publish()
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
other = create_page('other', 'nav_playground.html', 'en', published = True, slug = 'other')
other.publish()
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
home = Page.objects.get(pk = home.id)
home.in_navigation = True
home.save()
home.publish()
self.assertEqual(Page.objects.drafts().get_home().get_slug(), 'home')
self.assertEqual(Page.objects.public().get_home().get_slug(), 'home')
class NoAdminPageTests(CMSTestCase):
urls = 'project.noadmin_urls'
def setUp(self):
admin = 'django.contrib.admin'
noadmin_apps = [app for app in settings.INSTALLED_APPS if not app == admin]
self._ctx = SettingsOverride(INSTALLED_APPS=noadmin_apps)
self._ctx.__enter__()
def tearDown(self):
self._ctx.__exit__(None, None, None)
def test_get_page_from_request_fakeadmin_nopage(self):
request = self.get_request('/admin/')
page = get_page_from_request(request)
self.assertEqual(page, None)
class PreviousFilteredSiblingsTests(CMSTestCase):
def test_with_publisher(self):
home = create_page('home', 'nav_playground.html', 'en', published=True)
home.publish()
other = create_page('other', 'nav_playground.html', 'en', published=True)
other.publish()
other = Page.objects.get(pk=other.pk)
home = Page.objects.get(pk=home.pk)
self.assertEqual(other.get_previous_filtered_sibling(), home)
self.assertEqual(home.get_previous_filtered_sibling(), None)
def test_multisite(self):
firstsite = Site.objects.create(name='first', domain='first.com')
secondsite = Site.objects.create(name='second', domain='second.com')
home = create_page('home', 'nav_playground.html', 'en', published=True, site=firstsite)
home.publish()
other = create_page('other', 'nav_playground.html', 'en', published=True, site=secondsite)
other.publish()
other = Page.objects.get(pk=other.pk)
home = Page.objects.get(pk=home.pk)
self.assertEqual(other.get_previous_filtered_sibling(), None)
self.assertEqual(home.get_previous_filtered_sibling(), None)
|
import datetime
import logging
import hashlib
import hmac
import json
import traceback
from app.models import SocialNetworkApp
from app.sync import save_sn_post, publish_idea_cp, save_sn_comment, publish_comment_cp, save_sn_vote, \
delete_post, delete_comment, delete_vote
from app.utils import get_timezone_aware_datetime, convert_to_utf8_str
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
logger = logging.getLogger(__name__)
def _process_post(post_id, update, fb_app, u_datetime):
template_url_post = 'https://www.facebook.com/{}/posts/{}'
if not 'message' in update.keys() or not update['message'].strip():
# Posts without text are ignored
return None
url = template_url_post.format(post_id.split('_')[0],post_id.split('_')[1])
post = {'id': post_id, 'text': update['message'], 'title': '',
'user_info': {'name': update['sender_name'], 'id': update['sender_id']},
'url': url, 'datetime': u_datetime, 'positive_votes': 0, 'negative_votes': 0,
'comments': 0}
ret_data = save_sn_post(fb_app, post)
if ret_data: publish_idea_cp(ret_data['idea'])
def _process_comment(comment_id, comment_raw, fb_app, c_datetime):
if not comment_raw['message'].strip():
# Comments without text are ignored
return None
if comment_raw['post_id'] == comment_raw['parent_id']:
parent_type = 'post'
else:
parent_type = 'comment'
comment = {'id': comment_id, 'text': comment_raw['message'],
'user_info': {'name': comment_raw['sender_name'], 'id': comment_raw['sender_id']},
'datetime': c_datetime, 'positive_votes': 0, 'negative_votes': 0, 'url': None,
'parent_type': parent_type, 'parent_id': comment_raw['parent_id'], 'comments': 0}
ret_data = save_sn_comment(fb_app, comment)
if ret_data: publish_comment_cp(ret_data['comment'])
def _generate_like_id(like_raw):
return like_raw['parent_id'].split('_')[1]+'_'+like_raw['sender_id']
def _process_like(like_raw, fb_app, l_datetime):
if like_raw['post_id'] == like_raw['parent_id']:
parent_type = 'post'
else:
parent_type = 'comment'
like = {'id': _generate_like_id(like_raw),
'user_info': {'id': like_raw['sender_id'], 'name': like_raw['sender_name']},
'parent_type': parent_type, 'parent_id': like_raw['parent_id'], 'value': 1,
'datetime': l_datetime}
save_sn_vote(fb_app, like)
def _process_update(fb_app, update, u_datetime):
if update['item'] == 'post':
post_id = update['post_id']
if update['verb'] == 'add':
_process_post(post_id, update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_post(post_id)
else:
pass # Ignore other (e.g., hide)
elif update['item'] == 'comment':
comment_id = update['comment_id']
if update['verb'] == 'add':
_process_comment(comment_id, update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_comment(comment_id)
else:
pass # Ignore other (e.g., hide)
elif update['item'] == 'like':
if update['verb'] == 'add':
_process_like(update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_vote(_generate_like_id(update))
else:
pass # Ignore other
else:
pass # Ignore (e.g., status)
def _get_datetime(raw_datetime):
dt = datetime.datetime.fromtimestamp(raw_datetime)
if timezone.is_naive(dt):
return get_timezone_aware_datetime(dt).isoformat()
else:
return dt.isoformat()
def _process_post_request(fb_app, exp_signature, payload):
# Save the current signature
fb_app.last_real_time_update_sig = str(exp_signature)
fb_app.save()
logger.info('Before converting to json')
req_json = json.loads(payload)
if req_json['object'] == fb_app.object_real_time_updates:
logger.info(req_json)
entries = req_json['entry']
for entry in entries:
if entry['id'] == fb_app.page_id:
e_datetime = _get_datetime(entry['time'])
changes = entry['changes']
for change in changes:
if change['field'] == fb_app.field_real_time_updates:
_process_update(fb_app, change['value'], e_datetime)
def _calculate_signature(app_secret, payload):
try:
return 'sha1=' + hmac.new(str(app_secret), msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
except Exception as e:
logger.warning('Signature could not be generated. Reason: {}'.format(e))
logger.warning(traceback.format_exc())
return None
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
return HttpResponse(challenge)
elif request.method == 'POST':
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
payload_str = convert_to_utf8_str(request.body)
exp_signature = _calculate_signature(fb_app.app_secret, payload_str)
if req_signature == exp_signature and \
not exp_signature == fb_app.last_real_time_update_sig:
# I'm comparing the current signature against the last one
# to discard duplicates that seem to arrive consecutively
_process_post_request(fb_app, exp_signature, payload_str)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
Add debug statements
import datetime
import logging
import hashlib
import hmac
import json
import traceback
from app.models import SocialNetworkApp
from app.sync import save_sn_post, publish_idea_cp, save_sn_comment, publish_comment_cp, save_sn_vote, \
delete_post, delete_comment, delete_vote
from app.utils import get_timezone_aware_datetime, convert_to_utf8_str
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
logger = logging.getLogger(__name__)
def _process_post(post_id, update, fb_app, u_datetime):
template_url_post = 'https://www.facebook.com/{}/posts/{}'
if not 'message' in update.keys() or not update['message'].strip():
# Posts without text are ignored
return None
url = template_url_post.format(post_id.split('_')[0],post_id.split('_')[1])
post = {'id': post_id, 'text': update['message'], 'title': '',
'user_info': {'name': update['sender_name'], 'id': update['sender_id']},
'url': url, 'datetime': u_datetime, 'positive_votes': 0, 'negative_votes': 0,
'comments': 0}
ret_data = save_sn_post(fb_app, post)
if ret_data: publish_idea_cp(ret_data['idea'])
def _process_comment(comment_id, comment_raw, fb_app, c_datetime):
if not comment_raw['message'].strip():
# Comments without text are ignored
return None
if comment_raw['post_id'] == comment_raw['parent_id']:
parent_type = 'post'
else:
parent_type = 'comment'
comment = {'id': comment_id, 'text': comment_raw['message'],
'user_info': {'name': comment_raw['sender_name'], 'id': comment_raw['sender_id']},
'datetime': c_datetime, 'positive_votes': 0, 'negative_votes': 0, 'url': None,
'parent_type': parent_type, 'parent_id': comment_raw['parent_id'], 'comments': 0}
ret_data = save_sn_comment(fb_app, comment)
if ret_data: publish_comment_cp(ret_data['comment'])
def _generate_like_id(like_raw):
return like_raw['parent_id'].split('_')[1]+'_'+like_raw['sender_id']
def _process_like(like_raw, fb_app, l_datetime):
if like_raw['post_id'] == like_raw['parent_id']:
parent_type = 'post'
else:
parent_type = 'comment'
like = {'id': _generate_like_id(like_raw),
'user_info': {'id': like_raw['sender_id'], 'name': like_raw['sender_name']},
'parent_type': parent_type, 'parent_id': like_raw['parent_id'], 'value': 1,
'datetime': l_datetime}
save_sn_vote(fb_app, like)
def _process_update(fb_app, update, u_datetime):
if update['item'] == 'post':
post_id = update['post_id']
if update['verb'] == 'add':
_process_post(post_id, update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_post(post_id)
else:
pass # Ignore other (e.g., hide)
elif update['item'] == 'comment':
comment_id = update['comment_id']
if update['verb'] == 'add':
_process_comment(comment_id, update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_comment(comment_id)
else:
pass # Ignore other (e.g., hide)
elif update['item'] == 'like':
if update['verb'] == 'add':
_process_like(update, fb_app, u_datetime)
elif update['verb'] == 'remove':
delete_vote(_generate_like_id(update))
else:
pass # Ignore other
else:
pass # Ignore (e.g., status)
def _get_datetime(raw_datetime):
dt = datetime.datetime.fromtimestamp(raw_datetime)
if timezone.is_naive(dt):
return get_timezone_aware_datetime(dt).isoformat()
else:
return dt.isoformat()
def _process_post_request(fb_app, exp_signature, payload):
# Save the current signature
fb_app.last_real_time_update_sig = str(exp_signature)
fb_app.save()
logger.info('Before converting to json')
req_json = json.loads(payload)
if req_json['object'] == fb_app.object_real_time_updates:
logger.info(req_json)
entries = req_json['entry']
for entry in entries:
if entry['id'] == fb_app.page_id:
e_datetime = _get_datetime(entry['time'])
changes = entry['changes']
for change in changes:
if change['field'] == fb_app.field_real_time_updates:
_process_update(fb_app, change['value'], e_datetime)
def _calculate_signature(app_secret, payload):
try:
return 'sha1=' + hmac.new(str(app_secret), msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
except Exception as e:
logger.warning('Signature could not be generated. Reason: {}'.format(e))
logger.warning(traceback.format_exc())
return None
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
return HttpResponse(challenge)
elif request.method == 'POST':
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(request.body)
payload_str = convert_to_utf8_str(request.body)
logger.info(payload_str)
exp_signature = _calculate_signature(fb_app.app_secret, payload_str)
if req_signature == exp_signature and \
not exp_signature == fb_app.last_real_time_update_sig:
# I'm comparing the current signature against the last one
# to discard duplicates that seem to arrive consecutively
_process_post_request(fb_app, exp_signature, payload_str)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
#! /bin/env python
"""
Examples
========
Create a grid that looks like this,
::
(0) --- (1) --- (2)
| | |
| 0 | 1 |
| | |
(3) --- (4) --- (5)
>>> ESMP.ESMP_Initialize()
>>> g = EsmpUnstructured ([0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1], [0, 1, 4, 3, 1, 2, 5, 4], [4, 8])
>>> g = EsmpStructured ([0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1], (3, 2))
The as_mesh method provides a view of the grid as an ESMP_Mesh.
>>> ESMP.ESMP_MeshGetLocalElementCount (g.as_mesh ())
2
>>> ESMP.ESMP_MeshGetLocalNodeCount (g.as_mesh ())
6
ESMP elements are the same as the grids cells. Likewise with nodes and points.
>>> g = EsmpRectilinear ([0, 1, 2], [0, 1])
>>> ESMP.ESMP_MeshGetLocalElementCount (g.as_mesh ()) == g.get_cell_count ()
True
>>> ESMP.ESMP_MeshGetLocalNodeCount (g.as_mesh ()) == g.get_point_count ()
True
>>> g = EsmpUniformRectilinear ([3, 2], [1., 1.], [0., 0.])
Uniform Rectilinear Field
-------------------------
Create a field on a grid that looks like this,
::
(0) --- (1) --- (2)
| | |
| 0 | 1 |
| | |
(3) --- (4) --- (5)
Create the field,
>>> g = EsmpRasterField ((3,2), (2,1), (0, 0))
>>> g.get_cell_count ()
2
>>> g.get_point_count ()
6
Add some data at the points of our grid.
>>> data = np.arange (6)
>>> g.add_field ('var0', data, centering='point')
>>> f = g.get_field ('var0')
>>> f
array([ 0., 1., 2., 3., 4., 5.])
>>> print f.dtype
float64
The data can be given either as a 1D array or with the same shape
as the point grid. In either case, though, it will be flattened.
>>> data = np.arange (6)
>>> data.shape = (2, 3)
>>> g.add_field ('var0', data, centering='point')
>>> f = g.get_field ('var0')
>>> f
array([ 0., 1., 2., 3., 4., 5.])
If the size or shape doesn't match, it's an error.
>>> data = np.arange (2)
>>> g.add_field ('bad var', data, centering='point')
Traceback (most recent call last):
...
DimensionError: 2 != 6
>>> data = np.ones ((3, 2))
>>> g.add_field ('bad var', data, centering='point')
Traceback (most recent call last):
...
DimensionError: (3, 2) != (2, 3)
Map between two fields
----------------------
>>> from cmt.grids.raster import UniformRectilinear
>>> from cmt.grids.rectilinear import Rectilinear
>>> #ESMP.ESMP_Initialize()
>>> src = EsmpRasterField ((3,3), (1,1), (0, 0))
>>> data = np.arange (src.get_cell_count (), dtype=np.float64)
>>> src.add_field ('srcfield', data, centering='zonal')
>>> src.get_point_count ()
9
>>> src.get_cell_count ()
4
>>> src.get_x ()
array([ 0., 1., 2., 0., 1., 2., 0., 1., 2.])
>>> src.get_y ()
array([ 0., 0., 0., 1., 1., 1., 2., 2., 2.])
>>> src.get_connectivity () + 1
array([1, 2, 5, 4, 2, 3, 6, 5, 4, 5, 8, 7, 5, 6, 9, 8], dtype=int32)
>>> # EsmpRectilinearField.__mro__
>>> dst = EsmpRectilinearField ([0., .5, 1.5, 2.], [0., .5, 1.5, 2.])
>>> data = np.empty (dst.get_cell_count (), dtype=np.float64)
>>> dst.add_field ('dstfield', data, centering='zonal')
>>> dst.get_point_count ()
16
>>> dst.get_cell_count ()
9
>>> dst.get_x ()
array([ 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. ])
>>> dst.get_y ()
array([ 0. , 0. , 0. , 0. , 0.5, 0.5, 0.5, 0.5, 1.5, 1.5, 1.5, 1.5, 2. , 2. , 2. , 2. ])
>>> dst.get_connectivity () + 1
array([ 1, 2, 6, 5, 2, 3, 7, 6, 3, 4, 8, 7, 5, 6, 10, 9, 6,
7, 11, 10, 7, 8, 12, 11, 9, 10, 14, 13, 10, 11, 15, 14, 11, 12,
16, 15], dtype=int32)
>>> src_field = src.as_esmp ('srcfield')
>>> dst_field = dst.as_esmp ('dstfield')
>>> ESMP.ESMP_MeshGetLocalElementCount (src.as_mesh ())
4
>>> ESMP.ESMP_MeshGetLocalNodeCount (src.as_mesh ())
9
>>> ESMP.ESMP_MeshGetLocalElementCount (dst.as_mesh ())
9
>>> ESMP.ESMP_MeshGetLocalNodeCount (dst.as_mesh ())
16
#>>> ESMP.ESMP_FieldPrint (src_field)
#>>> ESMP.ESMP_FieldPrint (dst_field)
>>> f = run_regridding (src_field, dst_field)
>>> field_ptr = ESMP.ESMP_FieldGetPtr(f, 0)
A bigger grid
-------------
>>> (M, N) = (300, 300)
>>> src = EsmpRasterField ((M, N), (1, 1), (0, 0))
Map values on cells
-------------------
>>> (X, Y) = np.meshgrid (np.arange (0.5, 299.5, 1.), np.arange (0.5, 299.5, 1.))
>>> data = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> src.add_field ('srcfield', data, centering='zonal')
>>> dst = EsmpRasterField ((M*2-1, N*2-1), (1./2, 1./2), (0, 0))
>>> data = np.empty (dst.get_cell_count (), dtype=np.float64)
>>> dst.add_field ('dstfield', data, centering='zonal')
>>> src_field = src.as_esmp ('srcfield')
>>> dst_field = dst.as_esmp ('dstfield')
>>> f = run_regridding (src_field, dst_field)
>>> ans = ESMP.ESMP_FieldGetPtr(f, 0)
>>> (X, Y) = np.meshgrid (np.arange (0.5, 299.5, .5), np.arange (0.5, 299.5, .5))
>>> exact = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> np.sum (np.abs (exact.flat-ans))/(M*N*4.) < 1e-2
True
Map values on points
--------------------
>>> (X, Y) = np.meshgrid (np.arange (0.5, 300.5, 1.), np.arange (0.5, 300.5, 1.))
>>> data = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> src.add_field ('srcfield_at_points', data, centering='point')
>>> data = np.empty (dst.get_point_count (), dtype=np.float64)
>>> dst.add_field ('dstfield_at_points', data, centering='point')
>>> src_field = src.as_esmp ('srcfield_at_points')
>>> dst_field = dst.as_esmp ('dstfield_at_points')
>>> f = run_regridding (src_field, dst_field, method=ESMP.ESMP_REGRIDMETHOD_BILINEAR)
>>> ans = ESMP.ESMP_FieldGetPtr(f, 0)
>>> (X, Y) = np.meshgrid (np.arange (0.5, 300., .5), np.arange (0.5, 300., .5))
>>> exact = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> np.sum (np.abs (exact.flat-ans))/(M*N*4.) < 1e-5
True
>>> ESMP.ESMP_Finalize()
"""
import numpy as np
from cmt.grids import (UniformRectilinear, Rectilinear, Structured,
Unstructured)
from cmt.grids.igrid import (IGrid, IField, DimensionError,
CenteringValueError, CENTERING_CHOICES)
try:
import ESMP
except ImportError:
import warnings
warnings.warn('unable to import ESMP', ImportWarning)
_WITH_ESMP = False
else:
_WITH_ESMP = True
if not _WITH_ESMP:
__doc__ = "This module is not available as no ESMP installation was found"
class EsmpGrid (IGrid):
def __init__ (self):
self._mesh = ESMP.ESMP_MeshCreate (2, 2)
self._mesh_add_nodes ()
self._mesh_add_elements ()
super (EsmpGrid, self).__init__ ()
def as_mesh (self):
return self._mesh
def _mesh_add_nodes (self):
node_ids = np.arange (1, self.get_point_count ()+1, dtype=np.int32)
(x, y) = (self.get_x (), self.get_y ())
node_coords = np.empty (x.size+y.size, dtype=np.float64)
(node_coords[0::2], node_coords[1::2]) = (x, y)
node_owner = np.zeros (self.get_point_count (), dtype=np.int32)
ESMP.ESMP_MeshAddNodes (self._mesh, self.get_point_count (), node_ids, node_coords, node_owner)
def _mesh_add_elements (self):
cell_ids = np.arange (1, self.get_cell_count ()+1, dtype=np.int32)
cell_types = (np.ones (self.get_cell_count (), dtype=np.int32) *
ESMP.ESMP_MESHELEMTYPE_QUAD)
cell_conn = np.array (self.get_connectivity (), dtype=np.int32)+1
ESMP.ESMP_MeshAddElements (self._mesh, self.get_cell_count (), cell_ids, cell_types, cell_conn)
def reverse_element_ordering (self):
last_offset = 0
for offset in self._offset:
c = self._connectivity[last_offset:offset].copy ()
self._connectivity[last_offset:offset] = c[::-1]
last_offset = offset
class EsmpUnstructured (Unstructured, EsmpGrid):
name = 'ESMPUnstructured'
class EsmpStructured (Structured, EsmpGrid):
name = 'ESMPStructured'
class EsmpRectilinear (Rectilinear, EsmpGrid):
name = 'ESMPRectilinear'
class EsmpUniformRectilinear (UniformRectilinear, EsmpStructured):
name = 'ESMPUniformRectilinear'
class EsmpField (IField):
def __init__ (self, *args, **kwargs):
super (EsmpField, self).__init__ (*args, **kwargs)
self._fields = {}
def add_field (self, field_name, val, centering='zonal'):
if centering not in CENTERING_CHOICES:
raise CenteringValueError (centering)
if centering=='zonal' and val.size != self.get_cell_count ():
raise DimensionError (val.size, self.get_cell_count ())
elif centering!='zonal' and val.size != self.get_point_count ():
raise DimensionError (val.size, self.get_point_count ())
if centering=='zonal':
meshloc=ESMP.ESMP_MESHLOC_ELEMENT
else:
meshloc=ESMP.ESMP_MESHLOC_NODE
field = ESMP.ESMP_FieldCreate (self._mesh, field_name, meshloc=meshloc)
field_ptr = ESMP.ESMP_FieldGetPtr(field, 0)
field_ptr.flat = val.flat
self._fields[field_name] = field
def get_field (self, field_name):
field = self._fields[field_name]
return ESMP.ESMP_FieldGetPtr(field, 0)
def as_esmp (self, field_name):
return self._fields[field_name]
class EsmpStructuredField (EsmpStructured, EsmpField):
def add_field (self, field_name, val, centering='zonal'):
if centering=='zonal':
if val.ndim > 1 and np.any (val.shape != self.get_shape ()-1):
raise DimensionError (val.shape, self.get_shape ()-1)
elif centering!='zonal':
if val.ndim > 1 and np.any (val.shape != self.get_shape ()):
raise DimensionError (val.shape, self.get_shape ())
try:
super (EsmpStructuredField, self).add_field (field_name, val, centering=centering)
except DimensionError, CenteringValueError:
raise
class EsmpUnstructuredField (EsmpUnstructured, EsmpField):
pass
class EsmpRectilinearField (EsmpRectilinear, EsmpStructuredField):
pass
class EsmpRasterField (EsmpUniformRectilinear, EsmpRectilinearField):
pass
def run_regridding(srcfield, dstfield, **kwds):
"""
run_regridding(source_field, destination_field,
method=ESMP_REGRIDMETHOD_CONSERVE,
unmapped=ESMP_UNMAPPEDACTION_ERROR)
PRECONDITIONS: Two ESMP_Fields have been created and a regridding operation
is desired from 'srcfield' to 'dstfield'.
POSTCONDITIONS: An ESMP regridding operation has set the data on 'dstfield'.
"""
method = kwds.get('method', ESMP.ESMP_REGRIDMETHOD_CONSERVE)
unmapped = kwds.get('unmapped', ESMP.ESMP_UNMAPPEDACTION_ERROR)
# call the regridding functions
routehandle = ESMP.ESMP_FieldRegridStore(srcfield, dstfield,
method, unmapped)
ESMP.ESMP_FieldRegrid(srcfield, dstfield, routehandle)
ESMP.ESMP_FieldRegridRelease(routehandle)
return dstfield
if __name__ == '__main__':
import doctest
doctest.testmod (optionflags=doctest.NORMALIZE_WHITESPACE)
Changed for PEP8 styling.
#! /bin/env python
"""
Examples
========
Create a grid that looks like this,
::
(0) --- (1) --- (2)
| | |
| 0 | 1 |
| | |
(3) --- (4) --- (5)
>>> ESMP.ESMP_Initialize()
>>> g = EsmpUnstructured ([0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1],
... [0, 1, 4, 3, 1, 2, 5, 4], [4, 8])
>>> g = EsmpStructured ([0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1], (3, 2))
The as_mesh method provides a view of the grid as an ESMP_Mesh.
>>> ESMP.ESMP_MeshGetLocalElementCount (g.as_mesh ())
2
>>> ESMP.ESMP_MeshGetLocalNodeCount (g.as_mesh ())
6
ESMP elements are the same as the grids cells. Likewise with nodes and points.
>>> g = EsmpRectilinear ([0, 1, 2], [0, 1])
>>> ESMP.ESMP_MeshGetLocalElementCount (g.as_mesh ()) == g.get_cell_count ()
True
>>> ESMP.ESMP_MeshGetLocalNodeCount (g.as_mesh ()) == g.get_point_count ()
True
>>> g = EsmpUniformRectilinear ([3, 2], [1., 1.], [0., 0.])
Uniform Rectilinear Field
-------------------------
Create a field on a grid that looks like this,
::
(0) --- (1) --- (2)
| | |
| 0 | 1 |
| | |
(3) --- (4) --- (5)
Create the field,
>>> g = EsmpRasterField ((3,2), (2,1), (0, 0))
>>> g.get_cell_count ()
2
>>> g.get_point_count ()
6
Add some data at the points of our grid.
>>> data = np.arange (6)
>>> g.add_field ('var0', data, centering='point')
>>> f = g.get_field ('var0')
>>> f
array([ 0., 1., 2., 3., 4., 5.])
>>> print f.dtype
float64
The data can be given either as a 1D array or with the same shape
as the point grid. In either case, though, it will be flattened.
>>> data = np.arange (6)
>>> data.shape = (2, 3)
>>> g.add_field ('var0', data, centering='point')
>>> f = g.get_field ('var0')
>>> f
array([ 0., 1., 2., 3., 4., 5.])
If the size or shape doesn't match, it's an error.
>>> data = np.arange (2)
>>> g.add_field ('bad var', data, centering='point')
Traceback (most recent call last):
...
DimensionError: 2 != 6
>>> data = np.ones ((3, 2))
>>> g.add_field ('bad var', data, centering='point')
Traceback (most recent call last):
...
DimensionError: (3, 2) != (2, 3)
Map between two fields
----------------------
>>> from cmt.grids.raster import UniformRectilinear
>>> from cmt.grids.rectilinear import Rectilinear
>>> #ESMP.ESMP_Initialize()
>>> src = EsmpRasterField ((3,3), (1,1), (0, 0))
>>> data = np.arange (src.get_cell_count (), dtype=np.float64)
>>> src.add_field ('srcfield', data, centering='zonal')
>>> src.get_point_count ()
9
>>> src.get_cell_count ()
4
>>> src.get_x ()
array([ 0., 1., 2., 0., 1., 2., 0., 1., 2.])
>>> src.get_y ()
array([ 0., 0., 0., 1., 1., 1., 2., 2., 2.])
>>> src.get_connectivity () + 1
array([1, 2, 5, 4, 2, 3, 6, 5, 4, 5, 8, 7, 5, 6, 9, 8], dtype=int32)
>>> # EsmpRectilinearField.__mro__
>>> dst = EsmpRectilinearField ([0., .5, 1.5, 2.], [0., .5, 1.5, 2.])
>>> data = np.empty (dst.get_cell_count (), dtype=np.float64)
>>> dst.add_field ('dstfield', data, centering='zonal')
>>> dst.get_point_count ()
16
>>> dst.get_cell_count ()
9
>>> dst.get_x ()
array([ 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. , 0. , 0.5, 1.5, 2. ])
>>> dst.get_y ()
array([ 0. , 0. , 0. , 0. , 0.5, 0.5, 0.5, 0.5, 1.5, 1.5, 1.5, 1.5, 2. , 2. , 2. , 2. ])
>>> dst.get_connectivity () + 1
array([ 1, 2, 6, 5, 2, 3, 7, 6, 3, 4, 8, 7, 5, 6, 10, 9, 6,
7, 11, 10, 7, 8, 12, 11, 9, 10, 14, 13, 10, 11, 15, 14, 11, 12,
16, 15], dtype=int32)
>>> src_field = src.as_esmp ('srcfield')
>>> dst_field = dst.as_esmp ('dstfield')
>>> ESMP.ESMP_MeshGetLocalElementCount (src.as_mesh ())
4
>>> ESMP.ESMP_MeshGetLocalNodeCount (src.as_mesh ())
9
>>> ESMP.ESMP_MeshGetLocalElementCount (dst.as_mesh ())
9
>>> ESMP.ESMP_MeshGetLocalNodeCount (dst.as_mesh ())
16
#>>> ESMP.ESMP_FieldPrint (src_field)
#>>> ESMP.ESMP_FieldPrint (dst_field)
>>> f = run_regridding (src_field, dst_field)
>>> field_ptr = ESMP.ESMP_FieldGetPtr(f, 0)
A bigger grid
-------------
>>> (M, N) = (300, 300)
>>> src = EsmpRasterField ((M, N), (1, 1), (0, 0))
Map values on cells
-------------------
>>> (X, Y) = np.meshgrid (np.arange (0.5, 299.5, 1.),
... np.arange (0.5, 299.5, 1.))
>>> data = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> src.add_field ('srcfield', data, centering='zonal')
>>> dst = EsmpRasterField ((M*2-1, N*2-1), (1./2, 1./2), (0, 0))
>>> data = np.empty (dst.get_cell_count (), dtype=np.float64)
>>> dst.add_field ('dstfield', data, centering='zonal')
>>> src_field = src.as_esmp ('srcfield')
>>> dst_field = dst.as_esmp ('dstfield')
>>> f = run_regridding (src_field, dst_field)
>>> ans = ESMP.ESMP_FieldGetPtr(f, 0)
>>> (X, Y) = np.meshgrid (np.arange (0.5, 299.5, .5),
... np.arange (0.5, 299.5, .5))
>>> exact = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> np.sum (np.abs (exact.flat-ans))/(M*N*4.) < 1e-2
True
Map values on points
--------------------
>>> (X, Y) = np.meshgrid(np.arange(0.5, 300.5, 1.),
... np.arange(0.5, 300.5, 1.))
>>> data = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> src.add_field ('srcfield_at_points', data, centering='point')
>>> data = np.empty (dst.get_point_count (), dtype=np.float64)
>>> dst.add_field ('dstfield_at_points', data, centering='point')
>>> src_field = src.as_esmp ('srcfield_at_points')
>>> dst_field = dst.as_esmp ('dstfield_at_points')
>>> f = run_regridding (src_field, dst_field,
... method=ESMP.ESMP_REGRIDMETHOD_BILINEAR)
>>> ans = ESMP.ESMP_FieldGetPtr(f, 0)
>>> (X, Y) = np.meshgrid(np.arange(0.5, 300., .5), np.arange(0.5, 300., .5))
>>> exact = np.sin (np.sqrt (X**2+Y**2)*np.pi/M)
>>> np.sum (np.abs (exact.flat-ans))/(M*N*4.) < 1e-5
True
>>> ESMP.ESMP_Finalize()
"""
import numpy as np
from cmt.grids import (UniformRectilinear, Rectilinear, Structured,
Unstructured)
from cmt.grids.igrid import (IGrid, IField, DimensionError,
CenteringValueError, CENTERING_CHOICES)
try:
import ESMP
except ImportError:
import warnings
warnings.warn('unable to import ESMP', ImportWarning)
_WITH_ESMP = False
else:
_WITH_ESMP = True
if not _WITH_ESMP:
__doc__ = "This module is not available as no ESMP installation was found"
class EsmpGrid(IGrid):
def __init__(self):
self._mesh = ESMP.ESMP_MeshCreate(2, 2)
self._mesh_add_nodes()
self._mesh_add_elements()
super(EsmpGrid, self).__init__()
def as_mesh(self):
return self._mesh
def _mesh_add_nodes(self):
node_ids = np.arange(1, self.get_point_count() + 1, dtype=np.int32)
(x, y) = (self.get_x(), self.get_y())
node_coords = np.empty(x. size + y.size, dtype=np.float64)
(node_coords[0::2], node_coords[1::2]) = (x, y)
node_owner = np.zeros(self.get_point_count(), dtype=np.int32)
ESMP.ESMP_MeshAddNodes(self._mesh, self.get_point_count(), node_ids,
node_coords, node_owner)
def _mesh_add_elements(self):
cell_ids = np.arange(1, self.get_cell_count() + 1, dtype=np.int32)
cell_types = (np.ones(self.get_cell_count(), dtype=np.int32) *
ESMP.ESMP_MESHELEMTYPE_QUAD)
cell_conn = np.array(self.get_connectivity(), dtype=np.int32) + 1
ESMP.ESMP_MeshAddElements(self._mesh, self.get_cell_count(), cell_ids,
cell_types, cell_conn)
def reverse_element_ordering(self):
last_offset = 0
for offset in self._offset:
c = self._connectivity[last_offset:offset].copy()
self._connectivity[last_offset:offset] = c[::-1]
last_offset = offset
class EsmpUnstructured(Unstructured, EsmpGrid):
name = 'ESMPUnstructured'
class EsmpStructured(Structured, EsmpGrid):
name = 'ESMPStructured'
class EsmpRectilinear(Rectilinear, EsmpGrid):
name = 'ESMPRectilinear'
class EsmpUniformRectilinear(UniformRectilinear, EsmpStructured):
name = 'ESMPUniformRectilinear'
class EsmpField(IField):
def __init__(self, *args, **kwargs):
super(EsmpField, self).__init__(*args, **kwargs)
self._fields = {}
def add_field(self, field_name, val, centering='zonal'):
if centering not in CENTERING_CHOICES:
raise CenteringValueError(centering)
if centering == 'zonal' and val.size != self.get_cell_count():
raise DimensionError(val.size, self.get_cell_count())
elif centering != 'zonal' and val.size != self.get_point_count():
raise DimensionError(val.size, self.get_point_count())
if centering == 'zonal':
meshloc = ESMP.ESMP_MESHLOC_ELEMENT
else:
meshloc = ESMP.ESMP_MESHLOC_NODE
field = ESMP.ESMP_FieldCreate(self._mesh, field_name, meshloc=meshloc)
field_ptr = ESMP.ESMP_FieldGetPtr(field, 0)
field_ptr.flat = val.flat
self._fields[field_name] = field
def get_field(self, field_name):
field = self._fields[field_name]
return ESMP.ESMP_FieldGetPtr(field, 0)
def as_esmp(self, field_name):
return self._fields[field_name]
class EsmpStructuredField(EsmpStructured, EsmpField):
def add_field(self, field_name, val, centering='zonal'):
if centering == 'zonal':
if val.ndim > 1 and np.any(val.shape != self.get_shape() - 1):
raise DimensionError(val.shape, self.get_shape() - 1)
elif centering != 'zonal':
if val.ndim > 1 and np.any(val.shape != self.get_shape()):
raise DimensionError(val.shape, self.get_shape())
try:
super(EsmpStructuredField, self).add_field(field_name, val,
centering=centering)
except DimensionError, CenteringValueError:
raise
class EsmpUnstructuredField(EsmpUnstructured, EsmpField):
pass
class EsmpRectilinearField(EsmpRectilinear, EsmpStructuredField):
pass
class EsmpRasterField(EsmpUniformRectilinear, EsmpRectilinearField):
pass
def run_regridding(srcfield, dstfield, **kwds):
"""
run_regridding(source_field, destination_field,
method=ESMP_REGRIDMETHOD_CONSERVE,
unmapped=ESMP_UNMAPPEDACTION_ERROR)
PRECONDITIONS: Two ESMP_Fields have been created and a regridding operation
is desired from 'srcfield' to 'dstfield'.
POSTCONDITIONS: An ESMP regridding operation has set the data on 'dstfield'.
"""
method = kwds.get('method', ESMP.ESMP_REGRIDMETHOD_CONSERVE)
unmapped = kwds.get('unmapped', ESMP.ESMP_UNMAPPEDACTION_ERROR)
# call the regridding functions
routehandle = ESMP.ESMP_FieldRegridStore(srcfield, dstfield, method,
unmapped)
ESMP.ESMP_FieldRegrid(srcfield, dstfield, routehandle)
ESMP.ESMP_FieldRegridRelease(routehandle)
return dstfield
|
from app import app
from flask import Flask, request, jsonify
from twilio_driver import send_text
import kairos
import json
DEFAULT_GALLERY = 'test1'
# App Logic
@app.route('/', methods=['GET'])
def index():
return 'yo'
@app.route('/upload', methods=['POST'])
def upload():
data = json.loads(request.data)
img_url = data.get('img_url')
name = data.get('name')
success = kairos.add_face_url(img_url, name, DEFAULT_GALLERY)
print 'status of upload: {}'.format(success)
return jsonify({'success': success})
@app.route('/verify', methods=['GET'])
def verify():
img_url = request.args.get('img_url')
name = kairos.identify_face_url(img_url, DEFAULT_GALLERY)
allowed = name is not None
# TODO: open the door.
print 'status of verification: {}'.format(allowed)
return jsonify({'allowed': allowed,
'name': name})
@app.route('/twilio-hook/',methods=['POST'])
def handle_text():
text = request.values.get('Body')
phone_num = request.values.get('From')[1:]
print 'new text from {}: {}'.format(phone_num, text)
return 'Thanks!', 200
verification working!
from app import app
from flask import Flask, request, jsonify
from twilio_driver import send_text
import kairos
import json
DEFAULT_GALLERY = 'test1'
# App Logic
@app.route('/', methods=['GET'])
def index():
return 'yo'
@app.route('/upload', methods=['POST'])
def upload():
data = json.loads(request.data)
img_url = data.get('img_url')
name = data.get('name')
success = kairos.add_face_url(img_url, name, DEFAULT_GALLERY)
print 'status of upload: {}'.format(success)
return jsonify({'success': success})
@app.route('/verify', methods=['POST'])
def verify():
data = json.loads(request.data)
img_url = data.get('img_url')
name = kairos.identify_face_url(img_url, DEFAULT_GALLERY)
allowed = name is not None
# TODO: open the door.
print 'status of verification: {}; name: {}'.format(allowed, name)
return jsonify({'allowed': allowed,
'name': name})
@app.route('/twilio-hook/',methods=['POST'])
def handle_text():
text = request.values.get('Body')
phone_num = request.values.get('From')[1:]
print 'new text from {}: {}'.format(phone_num, text)
return 'Thanks!', 200
|
import os
import tarfile
from flask import Blueprint, render_template, request, jsonify, current_app, g, url_for, redirect
from rq import Queue
from redis import Redis
from .forms import UploadForm
from .. import spfy
from werkzeug.utils import secure_filename
from datetime import datetime
bp = Blueprint('main', __name__)
def fetch_job(job_id):
'''
Iterates through all queues looking for the job.
'''
print 'received'
print job_id
for queue in current_app.config['QUEUES']:
q = Queue(queue, connection=Redis())
job = q.fetch_job(job_id)
if job is not None:
return job
@bp.route('/results/<job_id>')
def job_status(job_id):
job = fetch_job(job_id)
if job.is_finished:
return jsonify(job.result)
else:
return "Nay!", 202
@bp.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
form = request.form
options = {}
#defaults
options['amr']=True
options['vf']=True
options['serotype']=True
print "=== Form Data ==="
for key, value in form.items():
print key, '==>', value
#we need to convert lower-case true/false in js to upper case in python
if type(value) is str:
#remember, we also have numbers
value = value.title()
if key == 'options.amr':
options['amr']=value
elif key == 'options.vf':
options['vf']=value
elif key == 'options.serotype':
options['serotype']=value
file = request.files['file']
if file:
# for saving file
now = datetime.now()
now = now.strftime("%Y-%m-%d-%H-%M-%S-%f")
filename = os.path.join(current_app.config[
'UPLOAD_FOLDER'], now + '-' + secure_filename(file.filename))
file.save(filename)
if tarfile.is_tarfile(filename):
# set filename to dir for spfy call
filename = handle_tar(filename, now)
# for enqueing task
jobs_dict = spfy.spfy(
{'i': filename, 'disable_serotype': not options['serotype'], 'disable_amr': not options['amr'], 'disable_vf': not options['vf']})
print jobs_dict
#strip jobs that the user doesn't want to see
# we run them anyways cause we want the data analyzed on our end
for job_id, descrip_dict in jobs_dict.items():
print job_id, descrip_dict
if (not options['serotype']) and (not options['vf']):
if descrip_dict['analysis'] == 'Virulence Factors and Serotype':
del jobs_dict[job_id]
if (not options['amr']):
if descrip_dict['analysis'] == 'Antimicrobial Resistance':
del jobs_dict[job_id]
return jsonify(jobs_dict)
return 500
@bp.route('/', methods=['GET', 'POST'])
def index():
return render_template("index.html")
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1] in current_app.config['ALLOWED_EXTENSIONS']
def handle_tar(filename, now):
if tarfile.is_tarfile(filename):
tar = tarfile.open(filename)
extracted_dir = os.path.join(
current_app.config['UPLOAD_FOLDER'] + '/' + now)
os.mkdir(extracted_dir)
for member in tar.getmembers():
if not secure_filename(member.name):
return 'invalid upload', 500
# TODO: wipe temp data
tar.extractall(path=extracted_dir)
for fn in os.listdir(extracted_dir):
os.rename(extracted_dir +'/' + fn, extracted_dir +'/'+ now + '-' + fn)
tar.close()
# set filename to dir for spfy call
return extracted_dir
no deletions?
import os
import tarfile
from flask import Blueprint, render_template, request, jsonify, current_app, g, url_for, redirect
from rq import Queue
from redis import Redis
from .forms import UploadForm
from .. import spfy
from werkzeug.utils import secure_filename
from datetime import datetime
bp = Blueprint('main', __name__)
def fetch_job(job_id):
'''
Iterates through all queues looking for the job.
'''
print 'received'
print job_id
for queue in current_app.config['QUEUES']:
q = Queue(queue, connection=Redis())
job = q.fetch_job(job_id)
if job is not None:
return job
@bp.route('/results/<job_id>')
def job_status(job_id):
job = fetch_job(job_id)
if job.is_finished:
return jsonify(job.result)
else:
return "Nay!", 202
@bp.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
form = request.form
options = {}
#defaults
options['amr']=True
options['vf']=True
options['serotype']=True
print "=== Form Data ==="
for key, value in form.items():
print key, '==>', value
#we need to convert lower-case true/false in js to upper case in python
if type(value) is str:
#remember, we also have numbers
value = value.title()
if key == 'options.amr':
options['amr']=value
elif key == 'options.vf':
options['vf']=value
elif key == 'options.serotype':
options['serotype']=value
file = request.files['file']
if file:
# for saving file
now = datetime.now()
now = now.strftime("%Y-%m-%d-%H-%M-%S-%f")
filename = os.path.join(current_app.config[
'UPLOAD_FOLDER'], now + '-' + secure_filename(file.filename))
file.save(filename)
if tarfile.is_tarfile(filename):
# set filename to dir for spfy call
filename = handle_tar(filename, now)
# for enqueing task
jobs_dict = spfy.spfy(
{'i': filename, 'disable_serotype': not options['serotype'], 'disable_amr': not options['amr'], 'disable_vf': not options['vf']})
print jobs_dict
#strip jobs that the user doesn't want to see
# we run them anyways cause we want the data analyzed on our end
for job_id, descrip_dict in jobs_dict.items():
print job_id, descrip_dict
if (not options['serotype']) and (not options['vf']):
if descrip_dict['analysis'] == 'Virulence Factors and Serotype':
print 'deleteing s/vf'
del jobs_dict[job_id]
if (not options['amr']):
if descrip_dict['analysis'] == 'Antimicrobial Resistance':
print 'deleting amr'
del jobs_dict[job_id]
return jsonify(jobs_dict)
return 500
@bp.route('/', methods=['GET', 'POST'])
def index():
return render_template("index.html")
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1] in current_app.config['ALLOWED_EXTENSIONS']
def handle_tar(filename, now):
if tarfile.is_tarfile(filename):
tar = tarfile.open(filename)
extracted_dir = os.path.join(
current_app.config['UPLOAD_FOLDER'] + '/' + now)
os.mkdir(extracted_dir)
for member in tar.getmembers():
if not secure_filename(member.name):
return 'invalid upload', 500
# TODO: wipe temp data
tar.extractall(path=extracted_dir)
for fn in os.listdir(extracted_dir):
os.rename(extracted_dir +'/' + fn, extracted_dir +'/'+ now + '-' + fn)
tar.close()
# set filename to dir for spfy call
return extracted_dir
|
import httplib2
import os
from flask import url_for, session, redirect, request, render_template, jsonify, json
from apiclient import discovery, errors
from oauth2client import client
from app import app
from copy import deepcopy
import threading
import datetime
import base64
import re
from pybars import Compiler
from app import cache
from datetime import timedelta
from flask import make_response, current_app
from functools import update_wrapper
compiler = Compiler()
fullmessageset = []
parsedmessageset = []
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
@app.route('/')
def index():
basedir = os.path.abspath(os.path.dirname(__file__))
templatedir = os.path.join(basedir, 'static/piemail/www/libs/templates/email-list.handlebars')
source = open(templatedir, "r").read().decode('utf-8')
template = compiler.compile(source)
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
else:
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http_auth)
results = service.users().threads().list(userId='me', maxResults=50, fields="threads/id", q="in:inbox").execute()
batch = service.new_batch_http_request(callback=processthreads)
for thread in results['threads']:
batch.add(service.users().threads().get(userId='me', id=thread['id']))
# batch.add(service.users().threads().get(userId='me', id=thread['id'],
# fields="messages/snippet, messages/internalDate, messages/labelIds, "
# "messages/threadId, messages/payload/headers"))
batch.execute()
for emailthread in fullmessageset:
t = threading.Thread(target=parse_thread, kwargs={"emailthread": emailthread})
t.start()
# parse_thread(emailthread)
newcollection = deepcopy(parsedmessageset)
fullmessageset[:] = []
parsedmessageset[:] = []
context = newcollection
output = template(context)
# cache.set(credentials.access_token, newcollection, 15)
return render_template("piemail.html", output=output)
@app.route('/inbox', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*')
def inbox():
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
cachedcollection = cache.get(credentials.access_token)
return json.dumps({'newcollection': cachedcollection})
@app.route('/signmeout', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*')
def signmeout():
if request.is_xhr:
return json.dumps({'status': 'OK', 'redirect_url': '/signmeout'})
credentials = client.OAuth2Credentials.from_json(session['credentials'])
credentials.revoke(httplib2.Http())
session.clear()
return render_template("login.html")
@app.route('/threadslist', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def threadslist():
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
else:
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http_auth)
threadid = request.values['threadid']
try:
thread = service.users().threads().get(userId='me', id=threadid).execute()
except errors.HttpError, error:
print 'An error occurred: %s' % error
return jsonify(error)
batch = service.new_batch_http_request(callback=processmessages)
for message in thread['messages']:
batch.add(service.users().messages().get(userId='me', id=message['id']))
batch.execute()
for emailmessage in fullmessageset:
m = threading.Thread(target=parse_message, kwargs={"emailmessage": emailmessage})
m.start()
response = dict()
response['iserror'] = False
response['savedsuccess'] = True
response['currentMessageList'] = deepcopy(parsedmessageset)
fullmessageset[:] = []
parsedmessageset[:] = []
return jsonify(response)
def processthreads(request_id, response, exception):
if exception is not None:
pass
else:
fullmessageset.append((request_id, response['messages'][0]))
def processmessages(request_id, response, exception):
if exception is not None:
pass
else:
fullmessageset.append((request_id, response))
@app.route('/emaildata/<emailid>', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def emaildata(emailid):
return render_template('emaildata.html', emailid=emailid)
@app.context_processor
def inject_static_url():
local_static_url = app.static_url_path
static_url = 'https://s3.amazonaws.com/netbardus/'
if os.environ.get('HEROKU') is not None:
local_static_url = static_url
if not static_url.endswith('/'):
static_url += '/'
if not local_static_url.endswith('/'):
local_static_url += '/'
return dict(
static_url=static_url,
local_static_url=local_static_url,
host_url=request.url_root
)
def parse_thread(emailthread):
threaditems = dict()
# INBOX, CATEGORY_SOCIAL, CATEGORY_PERSONAL, CATEGORY_PROMOTIONS, CATEGORY_FORUMS, CATEGORY_UPDATES, SENT,
# PURCHASES, TRAVEL, FINANCE, STARRED, UNREAD, INBOX, IMPORTANT
threaditems['labels'] = emailthread[1]['labelIds']
if 'UNREAD' in emailthread[1]['labelIds']:
threaditems['unread'] = True
else:
threaditems['unread'] = False
if 'STARRED' in emailthread[1]['labelIds']:
threaditems['star'] = True
else:
threaditems['star'] = False
if 'CATEGORY_PROMOTIONS' in emailthread[1]['labelIds']:
threaditems['category'] = 'promotions'
if 'CATEGORY_SOCIAL' in emailthread[1]['labelIds']:
threaditems['category'] = 'social'
if 'CATEGORY_UPDATES' in emailthread[1]['labelIds']:
threaditems['category'] = 'updates'
if 'CATEGORY_FORUMS' in emailthread[1]['labelIds']:
threaditems['category'] = 'forums'
if 'INBOX' in emailthread[1]['labelIds'] \
and 'CATEGORY_SOCIAL' not in emailthread[1]['labelIds'] \
and 'CATEGORY_PROMOTIONS' not in emailthread[1]['labelIds'] \
and 'CATEGORY_UPDATES' not in emailthread[1]['labelIds'] \
and 'CATEGORY_FORUMS' not in emailthread[1]['labelIds']:
threaditems['category'] = 'primary'
if 'SENT' in emailthread[1]['labelIds']:
threaditems['category'] = 'sent'
if 'INBOX' in emailthread[1]['labelIds']:
threaditems['inbox'] = True
else:
threaditems['inbox'] = False
threaditems['threadId'] = emailthread[1]['threadId']
threaditems['id'] = emailthread[1]['threadId']
threaditems['snippet'] = emailthread[1]['snippet'] + "..."
threaditems['timestamp'] = datetime.datetime.fromtimestamp(float(emailthread[1]['internalDate'])/1000.)\
.strftime("%I:%M %p %b %d")
threaditems['sender'] = getheaders(emailthread[1], "From")
threaditems['subject'] = getheaders(emailthread[1], "Subject")
threaditems['ordinal'] = emailthread[0]
threaditems['body'] = getbody(emailthread[1])
threaditems['rawtimestamp'] = emailthread[1]['internalDate']
parsedmessageset.append(threaditems)
def parse_message(emailmessage):
messageitems = dict()
messageitems['id'] = emailmessage[1]['id']
messageitems['threadId'] = emailmessage[1]['threadId']
messageitems['snippet'] = emailmessage[1]['snippet']
messageitems['timestamp'] = datetime.datetime.fromtimestamp(float(emailmessage[1]['internalDate'])/1000.)\
.strftime("%H:%M:%S %Y-%m-%d ")
messageitems['sender'] = getheaders(emailmessage[1], "From")
messageitems['subject'] = getheaders(emailmessage[1], "Subject")
messageitems['body'] = getbody(emailmessage[1])
messageitems['ordinal'] = emailmessage[0]
parsedmessageset.append(messageitems)
def getheaders(emailthread, key):
for header in emailthread['payload']['headers']:
if header['name'] == key:
return header['value']
def getbody(message):
if 'parts' in message['payload']:
encodedbody = gethtmlpart(message['payload']['parts'])
else:
encodedbody = message['payload']['body']['data']
decodedbody = base64.urlsafe_b64decode(str(encodedbody))
decodedbody = \
re.sub(r'src="cid:([^"]+)"', "src='/static/piemail/www/icons/no_image_available.svg'", decodedbody) # cid hack
return decodedbody
def gethtmlpart(parts):
for part in parts:
if 'parts' not in part:
if part['mimeType'] == 'text/html':
return part['body']['data']
else:
return gethtmlpart(part['parts'])
return ''
@app.route('/oauth2callback', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def oauth2callback(final_url='index'):
flow = client.flow_from_clientsecrets(
'client_secrets.json',
scope='https://mail.google.com/',
redirect_uri=url_for('oauth2callback', _external=True)
)
if 'code' not in request.args:
auth_uri = flow.step1_get_authorize_url()
return redirect(auth_uri)
else:
auth_code = request.args.get('code')
credentials = flow.step2_exchange(auth_code)
session['credentials'] = credentials.to_json()
return redirect(url_for(final_url))
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html', error=error), 404
@app.errorhandler(500)
def internal_error(error):
return render_template('500.html', error=error), 500
testing cacheing
import httplib2
import os
from flask import url_for, session, redirect, request, render_template, jsonify, json
from apiclient import discovery, errors
from oauth2client import client
from app import app
from copy import deepcopy
import threading
import datetime
import base64
import re
from pybars import Compiler
from app import cache
from datetime import timedelta
from flask import make_response, current_app
from functools import update_wrapper
compiler = Compiler()
fullmessageset = []
parsedmessageset = []
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
@app.route('/')
def index():
basedir = os.path.abspath(os.path.dirname(__file__))
templatedir = os.path.join(basedir, 'static/piemail/www/libs/templates/email-list.handlebars')
source = open(templatedir, "r").read().decode('utf-8')
template = compiler.compile(source)
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
else:
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http_auth)
results = service.users().threads().list(userId='me', maxResults=50, fields="threads/id", q="in:inbox").execute()
batch = service.new_batch_http_request(callback=processthreads)
for thread in results['threads']:
batch.add(service.users().threads().get(userId='me', id=thread['id']))
# batch.add(service.users().threads().get(userId='me', id=thread['id'],
# fields="messages/snippet, messages/internalDate, messages/labelIds, "
# "messages/threadId, messages/payload/headers"))
batch.execute()
for emailthread in fullmessageset:
t = threading.Thread(target=parse_thread, kwargs={"emailthread": emailthread})
t.start()
# parse_thread(emailthread)
newcollection = deepcopy(parsedmessageset)
fullmessageset[:] = []
parsedmessageset[:] = []
context = newcollection
# output = template(context)
# cache.set(credentials.access_token, newcollection, 15)
cache.set("foo", "bar")
output = cache.get("foo")
return render_template("piemail.html", output=output)
@app.route('/inbox', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*')
def inbox():
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
cachedcollection = cache.get(credentials.access_token)
return json.dumps({'newcollection': cachedcollection})
@app.route('/signmeout', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*')
def signmeout():
if request.is_xhr:
return json.dumps({'status': 'OK', 'redirect_url': '/signmeout'})
credentials = client.OAuth2Credentials.from_json(session['credentials'])
credentials.revoke(httplib2.Http())
session.clear()
return render_template("login.html")
@app.route('/threadslist', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def threadslist():
if 'credentials' not in session:
return redirect(url_for('oauth2callback'))
credentials = client.OAuth2Credentials.from_json(session['credentials'])
if credentials.access_token_expired:
return redirect(url_for('oauth2callback'))
else:
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http_auth)
threadid = request.values['threadid']
try:
thread = service.users().threads().get(userId='me', id=threadid).execute()
except errors.HttpError, error:
print 'An error occurred: %s' % error
return jsonify(error)
batch = service.new_batch_http_request(callback=processmessages)
for message in thread['messages']:
batch.add(service.users().messages().get(userId='me', id=message['id']))
batch.execute()
for emailmessage in fullmessageset:
m = threading.Thread(target=parse_message, kwargs={"emailmessage": emailmessage})
m.start()
response = dict()
response['iserror'] = False
response['savedsuccess'] = True
response['currentMessageList'] = deepcopy(parsedmessageset)
fullmessageset[:] = []
parsedmessageset[:] = []
return jsonify(response)
def processthreads(request_id, response, exception):
if exception is not None:
pass
else:
fullmessageset.append((request_id, response['messages'][0]))
def processmessages(request_id, response, exception):
if exception is not None:
pass
else:
fullmessageset.append((request_id, response))
@app.route('/emaildata/<emailid>', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def emaildata(emailid):
return render_template('emaildata.html', emailid=emailid)
@app.context_processor
def inject_static_url():
local_static_url = app.static_url_path
static_url = 'https://s3.amazonaws.com/netbardus/'
if os.environ.get('HEROKU') is not None:
local_static_url = static_url
if not static_url.endswith('/'):
static_url += '/'
if not local_static_url.endswith('/'):
local_static_url += '/'
return dict(
static_url=static_url,
local_static_url=local_static_url,
host_url=request.url_root
)
def parse_thread(emailthread):
threaditems = dict()
# INBOX, CATEGORY_SOCIAL, CATEGORY_PERSONAL, CATEGORY_PROMOTIONS, CATEGORY_FORUMS, CATEGORY_UPDATES, SENT,
# PURCHASES, TRAVEL, FINANCE, STARRED, UNREAD, INBOX, IMPORTANT
threaditems['labels'] = emailthread[1]['labelIds']
if 'UNREAD' in emailthread[1]['labelIds']:
threaditems['unread'] = True
else:
threaditems['unread'] = False
if 'STARRED' in emailthread[1]['labelIds']:
threaditems['star'] = True
else:
threaditems['star'] = False
if 'CATEGORY_PROMOTIONS' in emailthread[1]['labelIds']:
threaditems['category'] = 'promotions'
if 'CATEGORY_SOCIAL' in emailthread[1]['labelIds']:
threaditems['category'] = 'social'
if 'CATEGORY_UPDATES' in emailthread[1]['labelIds']:
threaditems['category'] = 'updates'
if 'CATEGORY_FORUMS' in emailthread[1]['labelIds']:
threaditems['category'] = 'forums'
if 'INBOX' in emailthread[1]['labelIds'] \
and 'CATEGORY_SOCIAL' not in emailthread[1]['labelIds'] \
and 'CATEGORY_PROMOTIONS' not in emailthread[1]['labelIds'] \
and 'CATEGORY_UPDATES' not in emailthread[1]['labelIds'] \
and 'CATEGORY_FORUMS' not in emailthread[1]['labelIds']:
threaditems['category'] = 'primary'
if 'SENT' in emailthread[1]['labelIds']:
threaditems['category'] = 'sent'
if 'INBOX' in emailthread[1]['labelIds']:
threaditems['inbox'] = True
else:
threaditems['inbox'] = False
threaditems['threadId'] = emailthread[1]['threadId']
threaditems['id'] = emailthread[1]['threadId']
threaditems['snippet'] = emailthread[1]['snippet'] + "..."
threaditems['timestamp'] = datetime.datetime.fromtimestamp(float(emailthread[1]['internalDate'])/1000.)\
.strftime("%I:%M %p %b %d")
threaditems['sender'] = getheaders(emailthread[1], "From")
threaditems['subject'] = getheaders(emailthread[1], "Subject")
threaditems['ordinal'] = emailthread[0]
threaditems['body'] = getbody(emailthread[1])
threaditems['rawtimestamp'] = emailthread[1]['internalDate']
parsedmessageset.append(threaditems)
def parse_message(emailmessage):
messageitems = dict()
messageitems['id'] = emailmessage[1]['id']
messageitems['threadId'] = emailmessage[1]['threadId']
messageitems['snippet'] = emailmessage[1]['snippet']
messageitems['timestamp'] = datetime.datetime.fromtimestamp(float(emailmessage[1]['internalDate'])/1000.)\
.strftime("%H:%M:%S %Y-%m-%d ")
messageitems['sender'] = getheaders(emailmessage[1], "From")
messageitems['subject'] = getheaders(emailmessage[1], "Subject")
messageitems['body'] = getbody(emailmessage[1])
messageitems['ordinal'] = emailmessage[0]
parsedmessageset.append(messageitems)
def getheaders(emailthread, key):
for header in emailthread['payload']['headers']:
if header['name'] == key:
return header['value']
def getbody(message):
if 'parts' in message['payload']:
encodedbody = gethtmlpart(message['payload']['parts'])
else:
encodedbody = message['payload']['body']['data']
decodedbody = base64.urlsafe_b64decode(str(encodedbody))
decodedbody = \
re.sub(r'src="cid:([^"]+)"', "src='/static/piemail/www/icons/no_image_available.svg'", decodedbody) # cid hack
return decodedbody
def gethtmlpart(parts):
for part in parts:
if 'parts' not in part:
if part['mimeType'] == 'text/html':
return part['body']['data']
else:
return gethtmlpart(part['parts'])
return ''
@app.route('/oauth2callback', methods=['POST', 'GET', 'OPTIONS'])
@crossdomain(origin='*')
def oauth2callback(final_url='index'):
flow = client.flow_from_clientsecrets(
'client_secrets.json',
scope='https://mail.google.com/',
redirect_uri=url_for('oauth2callback', _external=True)
)
if 'code' not in request.args:
auth_uri = flow.step1_get_authorize_url()
return redirect(auth_uri)
else:
auth_code = request.args.get('code')
credentials = flow.step2_exchange(auth_code)
session['credentials'] = credentials.to_json()
return redirect(url_for(final_url))
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html', error=error), 404
@app.errorhandler(500)
def internal_error(error):
return render_template('500.html', error=error), 500
|
from datetime import datetime
from flask import render_template, session, redirect, url_for, request, abort, flash, make_response
from flask.ext.sqlalchemy import get_debug_queries
from flask.ext.mail import Message
#from ..email import send_email
from flask.ext.login import login_user, logout_user, login_required, current_user
from . import main
from .forms import PostForm, EditProfileForm, EditProfileAdminForm, CommentForm, ContactForm
from .. import db
from ..models import User, Role, Permission, Post, Follow, Comment
from flask import current_app
from ..decorators import admin_required, permission_required
from app import pages, mail
@main.after_app_request
def after_app_request(response):
for query in get_debug_queries():
if query.duration >= current_app.config['SLOW_DB_QUERY_TIME']:
current_app.logger.warning(
'Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n' % (query.statement, query.parameters, query.duration, query.context))
return response
@main.route('/shutdown')
def server_shutdown():
if not current_app.testing:
abort(404)
shutdown = request.environ.get('werkzeug.server.shutdown')
if not shutdown:
abort(500)
shutdown()
return 'Shutting down...'
@main.route('/', methods=['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated():
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(page, per_page=current_app.config['POSTS_PER_PAGE'], error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts, pagination=pagination, show_followed=show_followed)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
posts = user.posts.order_by(Post.timestamp.desc()).all()
return render_template('user.html', user=user, posts=posts)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('Your profile has been updated')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data, post=post, author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) / current_app.config['COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(page, per_page=current_app.config['COMMENTS_PER_PAGE'], error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form, comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid User.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('Your are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid User.')
return redirect(url_for('.index'))
current_user.unfollow(user)
flash('You are now unfollowing %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(page, per_page=current_app.config['FOLLOWERS_PER_PAGE'], error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp} for item in pagination.items]
return render_template('followers.html', user=user, title='Followers of', endpoint='.followers', pagination=pagination, follows=follows)
@main.route('/followed_by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(page, per_page=current_app.config['FOLLOWERS_PER_PAGE'], error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp} for item in pagination.items]
return render_template('followers.html', user=user, title='Followed by', endpoint='.followers', pagination=pagination, follows=follows)
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(page, per_page=current_app.config['COMMENTS_PER_PAGE'], error_out=False)
comments = pagination.items
return render_template('moderate.html', pagination=pagination, comments=comments, page=page)
@main.route('/contact/', methods=['GET', 'POST'])
def contact_us():
form = ContactForm()
if request.method == 'POST':
if form.validate() == False:
flash("All fields are required.")
return render_template('contact.html', form=form)
else:
msg = Message(form.subject.data, sender=form.email.data,recipients=current_app.config['ADMIN'])
msg.body = """
From: %s <%s>
%s
""" % (form.name.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('contact.html', success=True)
elif request.method == 'GET':
return render_template('contact.html', form=form)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
@main.route('/<path:path>/')
def page(path):
page = pages.get_or_404(path)
return render_template('page.html', page=page)
#return pages.get_or_404(path).html
#@main.route('/admin')
#@login_required
#@admin_required
#def for_admin_only():
#return "for admins!"
#@main.route('/moderator')
#@login_required
#@permission_required(Permission.MODERATE_COMMENTS)
#def for_moderators_only():
#return "for moderators !"
Update views.py
from datetime import datetime
from flask import render_template, session, redirect, url_for, request, abort, flash, make_response
from flask.ext.sqlalchemy import get_debug_queries
from flask.ext.mail import Message
#from ..email import send_email
from flask.ext.login import login_user, logout_user, login_required, current_user
from . import main
from .forms import PostForm, EditProfileForm, EditProfileAdminForm, CommentForm, ContactForm
from .. import db
from ..models import User, Role, Permission, Post, Follow, Comment
from flask import current_app
from ..decorators import admin_required, permission_required
from app import pages, mail
@main.after_app_request
def after_app_request(response):
for query in get_debug_queries():
if query.duration >= current_app.config['SLOW_DB_QUERY_TIME']:
current_app.logger.warning(
'Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n' % (query.statement, query.parameters, query.duration, query.context))
return response
@main.route('/shutdown')
def server_shutdown():
if not current_app.testing:
abort(404)
shutdown = request.environ.get('werkzeug.server.shutdown')
if not shutdown:
abort(500)
shutdown()
return 'Shutting down...'
@main.route('/', methods=['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated():
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(page, per_page=current_app.config['POSTS_PER_PAGE'], error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts, pagination=pagination, show_followed=show_followed)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
posts = user.posts.order_by(Post.timestamp.desc()).all()
return render_template('user.html', user=user, posts=posts)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('Your profile has been updated')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data, post=post, author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) / current_app.config['COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(page, per_page=current_app.config['COMMENTS_PER_PAGE'], error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form, comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid User.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('Your are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid User.')
return redirect(url_for('.index'))
current_user.unfollow(user)
flash('You are now unfollowing %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(page, per_page=current_app.config['FOLLOWERS_PER_PAGE'], error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp} for item in pagination.items]
return render_template('followers.html', user=user, title='Followers of', endpoint='.followers', pagination=pagination, follows=follows)
@main.route('/followed_by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(page, per_page=current_app.config['FOLLOWERS_PER_PAGE'], error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp} for item in pagination.items]
return render_template('followers.html', user=user, title='Followed by', endpoint='.followers', pagination=pagination, follows=follows)
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(page, per_page=current_app.config['COMMENTS_PER_PAGE'], error_out=False)
comments = pagination.items
return render_template('moderate.html', pagination=pagination, comments=comments, page=page)
@main.route('/contact/', methods=['GET', 'POST'])
def contact_us():
form = ContactForm()
if request.method == 'POST':
if form.validate() == False:
flash("All fields are required.")
return render_template('contact.html', form=form)
else:
msg = Message(form.subject.data, sender=form.email.data,recipients=current_app.config['ADMIN'])
msg.body = """
From: %s <%s>
%s
""" % (form.name.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('contact.html', success=True, form=form)
elif request.method == 'GET':
return render_template('contact.html', form=form)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
@main.route('/<path:path>/')
def page(path):
page = pages.get_or_404(path)
return render_template('page.html', page=page)
#return pages.get_or_404(path).html
#@main.route('/admin')
#@login_required
#@admin_required
#def for_admin_only():
#return "for admins!"
#@main.route('/moderator')
#@login_required
#@permission_required(Permission.MODERATE_COMMENTS)
#def for_moderators_only():
#return "for moderators !"
|
from app import app
import os
from flask import abort, \
flash, \
redirect, \
render_template, \
request, \
session, \
url_for
from flask.ext.login import current_user, \
login_user, \
logout_user, \
login_required
from .forms import LoginForm,\
GalleryForm
from .database import find_user_by_name, \
find_gallery_all, \
find_gallery_by_id, \
db, \
Gallery, \
Photo
from .helpers import delete_photo
import json
import base64
import hmac
import hashlib
from url_decode import urldecode
@app.route('/', methods=['GET'])
@login_required
def home():
""" Home page """
return render_template('index.html')
@app.route('/<int:id>-<name>', methods=['GET'])
@login_required
def home_gallery(id, name):
""" Home page """
return render_template('index.html')
@app.route("/login", methods=["GET", "POST"])
def login():
""" Login page """
if current_user.is_authenticated():
return redirect(url_for('home'))
form = LoginForm()
if form.validate_on_submit():
user = find_user_by_name(form.username.data)
if user is None or not user.is_valid_password(form.password.data):
flash('Invalid username or password', 'danger')
elif login_user(user, remember=form.remember.data):
# Enable session expiration only if user hasn't chosen to be remembered.
session.permanent = not form.remember.data
return redirect(request.args.get('next') or url_for('home'))
elif form.errors:
flash('Invalid username or password', 'danger')
return render_template('login.html', form=form)
@app.route('/logout')
@login_required
def logout():
""" Logout the user """
logout_user()
return redirect(url_for('login'))
@app.route('/create/', methods=['GET', 'POST'])
@login_required
def gallery_create():
""" Create a gallery """
form = GalleryForm()
if form.validate_on_submit():
gallery = Gallery(
name=form.name.data,
created=form.date.data
)
db.session.add(gallery)
db.session.commit()
return redirect(url_for('gallery_upload', gallery_id=gallery.id))
return render_template('gallery.html',
form=form,
page_title="Create an Album",
form_action=url_for('gallery_create'),
form_submit_button_title="Create"
)
@app.route('/update/<int:gallery_id>', methods=['GET', 'POST'])
@login_required
def gallery_update(gallery_id):
""" Updated a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
if request.method == 'GET':
form = GalleryForm(
name=gallery.name,
date=gallery.created
)
elif request.method == 'POST':
form = GalleryForm()
if form.validate_on_submit():
gallery.name = form.name.data
gallery.created = form.date.data
db.session.add(gallery)
db.session.commit()
return redirect(url_for('home'))
return render_template('gallery.html',
form=form,
page_title="Update %s" % gallery.name,
form_action=url_for('gallery_update', gallery_id=gallery.id),
form_submit_button_title="Update"
)
@app.route('/upload/<int:gallery_id>')
def gallery_upload(gallery_id):
""" Upload photos to a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
s3_success_action_status = '201'
s3_acl = "public-read"
folder = "%s/" % gallery.folder
s3_policy = {
"expiration": "2038-01-01T00:00:00Z",
"conditions": [
{"bucket": app.config['AWS_S3_BUCKET']},
["starts-with", "$key", folder],
{"acl": s3_acl},
{"success_action_status": s3_success_action_status},
["content-length-range", 0, app.config['MAX_UPLOAD_SIZE']]
]
}
policy = base64.b64encode(json.dumps(s3_policy))
signature = base64.b64encode(hmac.new(app.config['AWS_SECRET_ACCESS_KEY'], policy, hashlib.sha1).digest())
return render_template('upload.html',
gallery=gallery,
aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],
s3_acl=s3_acl,
s3_bucket=app.config['AWS_S3_BUCKET'],
s3_folder=folder,
s3_policy=policy,
s3_signature=signature,
s3_success_action_status=s3_success_action_status,
max_upload_size=app.config['MAX_UPLOAD_SIZE']
)
@app.route('/rest/gallery/<int:page_num>', methods=['GET', 'POST'])
@login_required
def gallery_index(page_num):
""" List all galleries, or add a new one """
if request.method == 'GET':
limit = 10
offset = (page_num - 1) * limit
galleries = find_gallery_all(offset, limit)
response = []
for gallery in galleries:
response.append(gallery.to_object())
elif request.method == 'POST':
gallery = Gallery(name=request.form['name'])
db.session.add(gallery)
db.session.commit()
response = gallery.to_object()
return app.response_class(response=json.dumps(response), mimetype='application/json')
@app.route('/rest/gallery/<int:gallery_id>', methods=['DELETE'])
@login_required
def gallery_item(gallery_id):
""" Get/update/delete an individual gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
if request.method == 'GET':
response = gallery.to_object()
elif request.method == 'DELETE':
# Delete all photos from the gallery
gallery.delete()
# Delete the gallery from the database
db.session.delete(gallery)
db.session.commit()
response = []
return app.response_class(response=json.dumps(response), mimetype='application/json')
@app.route('/rest/photo/', methods=['POST'])
@login_required
def photo_index():
""" Add a photo to a gallery """
photo = Photo(
name=urldecode(request.form['name']),
ext=request.form['ext'],
aspect_ratio=float(request.form['aspect_ratio']),
gallery_id=request.form['gallery_id'],
owner_id=int(current_user.id),
created=request.form['created']
)
# Save the updated photos JSON for this gallery
photo.save()
# Tell the thumbnail daemon to generate a thumbnail for this photo
photo.generate_thumbnail()
# Update the gallery modified date
photo.gallery.updateModified()
return app.response_class(response=json.dumps(photo.to_object()), mimetype='application/json')
@app.route('/rest/photo/<int:gallery_id>/<string:photo_id>', methods=['DELETE'])
@login_required
def photo_delete(gallery_id, photo_id):
""" Delete a photo from a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
response = []
if not delete_photo(gallery.folder, photo_id):
response = ["error"]
# Update the gallery modified date
gallery.updateModified()
return app.response_class(response=json.dumps(response), mimetype='application/json')
Show 5 galleries at a time
from app import app
import os
from flask import abort, \
flash, \
redirect, \
render_template, \
request, \
session, \
url_for
from flask.ext.login import current_user, \
login_user, \
logout_user, \
login_required
from .forms import LoginForm,\
GalleryForm
from .database import find_user_by_name, \
find_gallery_all, \
find_gallery_by_id, \
db, \
Gallery, \
Photo
from .helpers import delete_photo
import json
import base64
import hmac
import hashlib
from url_decode import urldecode
@app.route('/', methods=['GET'])
@login_required
def home():
""" Home page """
return render_template('index.html')
@app.route('/<int:id>-<name>', methods=['GET'])
@login_required
def home_gallery(id, name):
""" Home page """
return render_template('index.html')
@app.route("/login", methods=["GET", "POST"])
def login():
""" Login page """
if current_user.is_authenticated():
return redirect(url_for('home'))
form = LoginForm()
if form.validate_on_submit():
user = find_user_by_name(form.username.data)
if user is None or not user.is_valid_password(form.password.data):
flash('Invalid username or password', 'danger')
elif login_user(user, remember=form.remember.data):
# Enable session expiration only if user hasn't chosen to be remembered.
session.permanent = not form.remember.data
return redirect(request.args.get('next') or url_for('home'))
elif form.errors:
flash('Invalid username or password', 'danger')
return render_template('login.html', form=form)
@app.route('/logout')
@login_required
def logout():
""" Logout the user """
logout_user()
return redirect(url_for('login'))
@app.route('/create/', methods=['GET', 'POST'])
@login_required
def gallery_create():
""" Create a gallery """
form = GalleryForm()
if form.validate_on_submit():
gallery = Gallery(
name=form.name.data,
created=form.date.data
)
db.session.add(gallery)
db.session.commit()
return redirect(url_for('gallery_upload', gallery_id=gallery.id))
return render_template('gallery.html',
form=form,
page_title="Create an Album",
form_action=url_for('gallery_create'),
form_submit_button_title="Create"
)
@app.route('/update/<int:gallery_id>', methods=['GET', 'POST'])
@login_required
def gallery_update(gallery_id):
""" Updated a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
if request.method == 'GET':
form = GalleryForm(
name=gallery.name,
date=gallery.created
)
elif request.method == 'POST':
form = GalleryForm()
if form.validate_on_submit():
gallery.name = form.name.data
gallery.created = form.date.data
db.session.add(gallery)
db.session.commit()
return redirect(url_for('home'))
return render_template('gallery.html',
form=form,
page_title="Update %s" % gallery.name,
form_action=url_for('gallery_update', gallery_id=gallery.id),
form_submit_button_title="Update"
)
@app.route('/upload/<int:gallery_id>')
def gallery_upload(gallery_id):
""" Upload photos to a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
s3_success_action_status = '201'
s3_acl = "public-read"
folder = "%s/" % gallery.folder
s3_policy = {
"expiration": "2038-01-01T00:00:00Z",
"conditions": [
{"bucket": app.config['AWS_S3_BUCKET']},
["starts-with", "$key", folder],
{"acl": s3_acl},
{"success_action_status": s3_success_action_status},
["content-length-range", 0, app.config['MAX_UPLOAD_SIZE']]
]
}
policy = base64.b64encode(json.dumps(s3_policy))
signature = base64.b64encode(hmac.new(app.config['AWS_SECRET_ACCESS_KEY'], policy, hashlib.sha1).digest())
return render_template('upload.html',
gallery=gallery,
aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],
s3_acl=s3_acl,
s3_bucket=app.config['AWS_S3_BUCKET'],
s3_folder=folder,
s3_policy=policy,
s3_signature=signature,
s3_success_action_status=s3_success_action_status,
max_upload_size=app.config['MAX_UPLOAD_SIZE']
)
@app.route('/rest/gallery/<int:page_num>', methods=['GET', 'POST'])
@login_required
def gallery_index(page_num):
""" List all galleries, or add a new one """
if request.method == 'GET':
limit = 5
offset = (page_num - 1) * limit
galleries = find_gallery_all(offset, limit)
response = []
for gallery in galleries:
response.append(gallery.to_object())
elif request.method == 'POST':
gallery = Gallery(name=request.form['name'])
db.session.add(gallery)
db.session.commit()
response = gallery.to_object()
return app.response_class(response=json.dumps(response), mimetype='application/json')
@app.route('/rest/gallery/<int:gallery_id>', methods=['DELETE'])
@login_required
def gallery_item(gallery_id):
""" Get/update/delete an individual gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
if request.method == 'GET':
response = gallery.to_object()
elif request.method == 'DELETE':
# Delete all photos from the gallery
gallery.delete()
# Delete the gallery from the database
db.session.delete(gallery)
db.session.commit()
response = []
return app.response_class(response=json.dumps(response), mimetype='application/json')
@app.route('/rest/photo/', methods=['POST'])
@login_required
def photo_index():
""" Add a photo to a gallery """
photo = Photo(
name=urldecode(request.form['name']),
ext=request.form['ext'],
aspect_ratio=float(request.form['aspect_ratio']),
gallery_id=request.form['gallery_id'],
owner_id=int(current_user.id),
created=request.form['created']
)
# Save the updated photos JSON for this gallery
photo.save()
# Tell the thumbnail daemon to generate a thumbnail for this photo
photo.generate_thumbnail()
# Update the gallery modified date
photo.gallery.updateModified()
return app.response_class(response=json.dumps(photo.to_object()), mimetype='application/json')
@app.route('/rest/photo/<int:gallery_id>/<string:photo_id>', methods=['DELETE'])
@login_required
def photo_delete(gallery_id, photo_id):
""" Delete a photo from a gallery """
gallery = find_gallery_by_id(gallery_id)
if not gallery:
abort(404)
response = []
if not delete_photo(gallery.folder, photo_id):
response = ["error"]
# Update the gallery modified date
gallery.updateModified()
return app.response_class(response=json.dumps(response), mimetype='application/json')
|
from __future__ import print_function
from flask import Flask, render_template, redirect, url_for, abort, flash, request,\
current_app, make_response, g, jsonify
from flask import send_from_directory
from flask_login import login_required, current_user
from flask_sqlalchemy import get_debug_queries
from sqlalchemy import text
import os
import sys
import flask_sijax
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm, CommentForm
from .. import db
from ..models import Permission, Role, User, Post, Comment, Workflow, WorkItem, DataSource, Data, DataType, OperationSource, Operation
from ..decorators import admin_required, permission_required
from .ajax import WorkflowHandler
from ..util import Utility
#from ..io import PosixFileSystem, HadoopFileSystem, getFileSystem
from ..biowl.fileop import PosixFileSystem, HadoopFileSystem, IOHelper
from ..biowl.phenoparser import PhenoWLInterpreter, PhenoWLCodeGenerator, PhenoWLParser, PythonGrammar
from ..biowl.timer import Timer
from ..models import Runnable
from ..biowl.tasks import runnable_manager
import json
from werkzeug import secure_filename
import mimetypes
from ..jobs import long_task, run_script, stop_script, sync_task_status_with_db, sync_task_status_with_db_for_user
app = Flask(__name__)
@main.after_app_request
def after_request(response):
for query in get_debug_queries():
if query.duration >= current_app.config['PHENOPROC_SLOW_DB_QUERY_TIME']:
current_app.logger.warning(
'Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n'
% (query.statement, query.parameters, query.duration,
query.context))
return response
@main.route('/shutdown')
def server_shutdown():
if not current_app.testing:
abort(404)
shutdown = request.environ.get('werkzeug.server.shutdown')
if not shutdown:
abort(500)
shutdown()
return 'Shutting down...'
def load_data_sources():
# construct data source tree
datasources = DataSource.query.all()
datasource_tree = { 'type': DataType.Custom, 'children': [] }
for ds in datasources:
datasource = { 'datasource': ds.id, 'type': DataType.Root, 'base':'', 'path': ds.url, 'name': ds.name, 'children': []}
if ds.id == 1:
# hdfs tree
try:
hdfs = HadoopFileSystem()
if current_user.is_authenticated:
datasource['children'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['children'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
except:
pass
elif ds.id == 2:
# file system tree
posixFS = PosixFileSystem()
if current_user.is_authenticated:
datasource['children'].append(posixFS.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['children'].append(posixFS.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
datasource_tree['children'].append(datasource)
return datasource_tree
@main.route('/reloaddatasources', methods=['POST'])
def load_data_sources_json():
return json.dumps(load_data_sources())
@main.route('/', defaults={'id': ''}, methods = ['GET', 'POST'])
@main.route('/workflow/<int:id>/', methods = ['GET', 'POST'])
def index(id=None):
id = Utility.ValueOrNone(id)
if id <= 0:
id = request.args.get('workflow')
if g.sijax.is_sijax_request:
# Sijax request detected - let Sijax handle it
g.sijax.register_object(WorkflowHandler)
return g.sijax.process_request()
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
datasource_tree = load_data_sources()
# construct operation source tree
operationsources = OperationSource.query.all()
operation_tree = { 'name' : ('operations', ''), 'children' : [] }
for ops in operationsources:
operation_tree['children'].append({ 'name' : (ops.name, ops.id), 'children' : [] })
for op in ops.operations:
operation_tree['children'][-1]['children'].append({ 'name' : (op.name, op.id), 'children' : [] })
# workflows tree
workflows = []
if current_user.is_authenticated:
#workflows = Workflow.query.filter_by(user_id=current_user.id)
#sql = 'SELECT workflows.*, MAX(time), taskstatus.name AS status FROM workflows JOIN users ON workflows.user_id = users.id LEFT JOIN workitems ON workflows.id = workitems.workflow_id LEFT JOIN tasks ON workitems.id = tasks.workitem_id LEFT JOIN tasklogs ON tasks.id=tasklogs.task_id JOIN taskstatus ON tasklogs.status_id=taskstatus.id GROUP BY workflows.id HAVING users.id=' + str(current_user.id)
sql = 'SELECT workflows.*, MAX(time), taskstatus.name AS status FROM workflows LEFT JOIN workitems ON workflows.id = workitems.workflow_id LEFT JOIN tasks ON workitems.id = tasks.workitem_id LEFT JOIN tasklogs ON tasks.id=tasklogs.task_id LEFT JOIN taskstatus ON tasklogs.status_id=taskstatus.id WHERE workflows.user_id={0} GROUP BY workflows.id'.format(current_user.id)
workflows = db.engine.execute(sql)
workitems = []
# Workflow.query.join(WorkItem).join(Operation).filter_by(id=1).c
# sql = text('SELECT workitems.*, operations.name AS opname FROM workflows INNER JOIN workitems ON workflows.id=workitems.workflow_id INNER join operations ON workitems.operation_id=operations.id WHERE workflows.id=' + str(id))
workflow_name = ''
if id is not None and Workflow.query.get(id) is not None:
workflow_name = Workflow.query.get(id).name
# sql = text('SELECT workitems.*, operations.name AS opname, datasources.id AS datasource_id, datasources.name AS datasource_name, data.url AS path FROM workflows INNER JOIN workitems ON workflows.id=workitems.workflow_id INNER join operations ON workitems.operation_id=operations.id INNER JOIN data ON workitems.id = data.id INNER JOIN datasources ON data.datasource_id=datasources.id WHERE workflows.id=' + str(id))
# sql = text('SELECT s.name AS name, s.input AS input, s.output AS output, dx.url AS input_root, dx2.url AS output_root, dx.type AS input_type, dx2.type AS output_type, operations.name AS opname FROM (SELECT w.*, d1.datasource_id AS input_datasource, d1.url AS input, d2.datasource_id AS output_datasource, d2.url AS output FROM workitems w INNER JOIN data d1 ON d1.id=w.input_id INNER JOIN data d2 ON d2.id=w.output_id) s INNER JOIN datasources dx ON dx.id=s.input_datasource INNER JOIN datasources dx2 ON dx2.id=s.output_datasource INNER JOIN operations ON s.operation_id = operations.id INNER JOIN workflows ON s.workflow_id=workflows.id WHERE workflows.id=' + str(id))
# sql = text('SELECT s.id AS id, s.name AS name, s.input AS input, s.output AS output, dx.url AS input_root, dx2.url AS output_root, dx.type AS input_type, dx2.type AS output_type, operations.name AS opname FROM (SELECT w.*, d1.datasource_id AS input_datasource, d1.url AS input, d2.datasource_id AS output_datasource, d2.url AS output FROM workitems w LEFT JOIN data d1 ON d1.id=w.input_id LEFT JOIN data d2 ON d2.id=w.output_id) s LEFT JOIN datasources dx ON dx.id=s.input_datasource LEFT JOIN datasources dx2 ON dx2.id=s.output_datasource LEFT JOIN operations ON s.operation_id = operations.id INNER JOIN workflows ON s.workflow_id=workflows.id WHERE workflows.id=' + str(id))
sql = text('SELECT w.id AS id, w.name AS name, w.desc as desc, ops.name AS opsname, operations.name AS opname, d1.url AS input, d2.url AS output, dx1.id AS input_datasourceid, dx1.type AS input_datasource, dx1.url AS input_root, dx2.id AS output_datasourceid, dx2.type AS output_datasource, dx2.url AS output_root FROM workitems w LEFT JOIN operations ON w.operation_id=operations.id LEFT JOIN operationsources ops ON ops.id=operations.operationsource_id LEFT JOIN data d1 ON d1.id=w.input_id LEFT JOIN data d2 ON d2.id=w.output_id LEFT JOIN datasources dx1 ON dx1.id=d1.datasource_id LEFT JOIN datasources dx2 ON dx2.id=d2.datasource_id WHERE w.workflow_id=' + str(id))
workitems = db.engine.execute(sql)
# result = db.engine.execute(sql)
# for row in result:
# workitems.append(row);
# if id is not None:
# workflow = Workflow.query.filter_by(id=id)
# if workflow is not None and workflow.count() > 0:
# workitems = workflow.first().workitems
return render_template('index.html', form=form, posts=posts, datasources=datasource_tree, operations=operation_tree, workflow=workflow_name, workflows=workflows, workitems=workitems,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) // \
current_app.config['PHENOPROC_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['PHENOPROC_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/workflow/<int:id>', methods=['GET', 'POST'])
def workflow(id):
workflow = Workflow.query.get_or_404(id)
return render_template('workflow.html', workflows=[workflow])
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['PHENOPROC_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followers of",
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['PHENOPROC_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments,
pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/about')
def about():
return render_template('about.html')
@main.route('/contact')
def contact():
return render_template('contact.html')
from sqlalchemy.ext.declarative import DeclarativeMeta
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
# a json-encodable dict
return fields
return json.JSONEncoder.default(self, obj)
@main.route('/tasklogs', methods=['POST'])
@login_required
def translate():
workflow_id = request.form['text'] #request.args.get('workflow_id')
workflow_id = Utility.ValueOrNone(workflow_id)
if workflow_id is not None and Workflow.query.get(workflow_id) is not None:
sql = 'SELECT workitems.id, MAX(time), taskstatus.name as status FROM workitems LEFT JOIN tasks ON workitems.id=tasks.workitem_id LEFT JOIN tasklogs ON tasklogs.task_id=tasks.id LEFT JOIN taskstatus ON tasklogs.status_id = taskstatus.id WHERE workitems.workflow_id=' + str(workflow_id) + ' GROUP BY workitems.id'
result = db.engine.execute(sql)
return json.dumps([dict(r) for r in result], cls=AlchemyEncoder)
@main.route('/delete', methods=['POST'])
@login_required
def delete():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
filesystem.delete(path)
return json.dumps(dict())
@main.route('/rename', methods=['POST'])
@login_required
def rename():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
oldpath = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
newpath = os.path.join(os.path.dirname(oldpath), request.form['newname'])
filesystem.rename(oldpath, newpath)
return json.dumps(filesystem.make_json(datasource_id, Utility.get_rootdir(datasource_id), os.path.relpath(newpath, Utility.get_rootdir(datasource_id))))
return json.dumps(dict())
@main.route('/addfolder', methods=['POST'])
@login_required
def addfolder():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
newfolder = filesystem.addfolder(path)
return json.dumps(filesystem.make_json(datasource_id, Utility.get_rootdir(datasource_id), os.path.relpath(newfolder, Utility.get_rootdir(datasource_id))))
return json.dumps(dict())
# Route that will process the file upload
@main.route('/upload', methods=['POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if file:
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'], filename)
filesystem.saveUpload(file, path)
return json.dumps({})
@main.route('/download', methods=['POST'])
@login_required
def download():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = filesystem.download(os.path.join(Utility.get_rootdir(datasource_id), request.form['path']))
if path is not None:
# filename, ext = os.path.splitext(path)
# if ext in mimetypes.types_map:
# mime = mimetypes.types_map[ext]
#
# if mime is None:
# try:
# mimetypes = mimetypes.read_mime_types(path)
# if mimetypes:
# mime = list(mimetypes.values())[0]
# except:
# pass
# if mime is not None:
return send_from_directory(directory=os.path.dirname(path), filename=os.path.basename(path))
return json.dumps(dict())
def load_data_sources_biowl():
# construct data source tree
datasources = DataSource.query.all()
datasource_tree = []
for ds in datasources:
datasource = { 'path': ds.url, 'text': ds.name, 'nodes': [], 'folder': True}
if ds.id == 1:
# hdfs tree
try:
hdfs = HadoopFileSystem(ds.url, 'hdfs')
if current_user.is_authenticated:
datasource['nodes'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['nodes'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
except:
pass
elif ds.id == 2:
# file system tree
posixFS = PosixFileSystem(Utility.get_rootdir(ds.id))
if current_user.is_authenticated:
datasource['nodes'].append(posixFS.make_json(current_user.username))
datasource['nodes'].append(posixFS.make_json(current_app.config['PUBLIC_DIR']))
datasource_tree.append(datasource)
return datasource_tree
# class DataSource():
#
# #datasources = [{'path': 'http://sr-p2irc-big1.usask.ca:50070/user/phenodoop', 'text': 'HDFS', 'nodes': [], 'folder': True}, { 'text': 'LocalFS', 'path': current_app.config['DATA_DIR'], 'nodes': [], 'folder': True}]
#
# @staticmethod
# def load_data_sources():
# datasource_tree = []
# try:
# ds = datasources[0]
# hdfs = HadoopFileSystem(ds['path'], 'hdfs')
# ds['path'] = 'HDFS'
# ds['nodes'] = hdfs.make_json(os.sep)['nodes']
# # DataSource.normalize_node_path(ds['path'], ds)
# datasource_tree.append(ds)
# except:
# pass
#
# ds = datasources[1]
# ds['path'] = 'LocalFS'
# fs = PosixFileSystem()
# ds['nodes'] = fs.make_json(os.sep)['nodes']
# #DataSource.normalize_node_path(ds['path'], ds)
# datasource_tree.append(ds)
#
# return datasource_tree
#
# @staticmethod
# def normalize_node_path(root, ds):
# for n in ds['nodes']:
# n['path'] = root + n['path']
# if n['folder']:
# DataSource.normalize_node_path(root, n)
#
# @staticmethod
# def get_filesystem(path):
# for ds in datasources:
# if path.startswith(ds['text']):
# return IOHelper.getFileSystem(ds['path'])
#
# @staticmethod
# def load_data_sources_json():
# return json.dumps(DataSource.load_data_sources())
#
# @staticmethod
# def upload(file, fullpath):
# fs = DataSource.get_filesystem(fullpath)
# return fs.save_upload(file, fullpath)
#
# @staticmethod
# def download(path):
# fs = DataSource.get_filesystem(path)
# return fs.download(path)
class InterpreterHelper():
def __init__(self):
self.funcs = []
self.interpreter = PhenoWLInterpreter()
self.codeGenerator = PhenoWLCodeGenerator()
self.reload()
def reload(self):
self.funcs.clear()
librariesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/libraries')
librariesdir = os.path.normpath(librariesdir)
self.interpreter.context.load_library(librariesdir)
funclist = []
for f in self.interpreter.context.library.funcs.values():
funclist.extend(f)
funclist.sort(key=lambda x: (x.package, x.name))
for f in funclist:
self.funcs.append({"package_name": f.package if f.package else "", "name": f.name, "internal": f.internal, "example": f.example if f.example else "", "desc": f.desc if f.desc else "", "runmode": f.runmode if f.runmode else ""})
self.codeGenerator.context.load_library(librariesdir)
def run(self, machine, script):
parserdir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/'))
os.chdir(parserdir) #set dir of this file to current directory
duration = 0
try:
machine.context.reload()
parser = PhenoWLParser(PythonGrammar())
with Timer() as t:
prog = parser.parse(script)
machine.run(prog)
duration = t.secs
except:
machine.context.err.append("Error in parse and interpretation")
return { 'out': machine.context.out, 'err': machine.context.err, 'duration': "{:.4f}s".format(duration) }
interpreter = InterpreterHelper()
@main.route('/biowl', methods=['GET', 'POST'])
@login_required
def biowl():
return render_template('biowl.html')
@main.route('/datasources', methods=['GET', 'POST'])
@login_required
def datasources():
return json.dumps({'datasources': load_data_sources_biowl() })
@main.route('/functions', methods=['GET', 'POST'])
@login_required
def functions():
if request.args.get('script') or request.args.get('code'):
script = request.args.get('script') if request.args.get('script') else request.args.get('code')
machine = interpreter.interpreter if request.args.get('script') else interpreter.codeGenerator
runnable_id = Runnable.create_runnable(current_user.id)
runnable = Runnable.query.get(runnable_id)
runnable.script = script
runnable.name = script[:min(40, len(script))]
if len(script) > len(runnable.name):
runnable.name += "..."
db.session.commit()
task = run_script.delay(machine, script)
runnable.celery_id = task.id
db.session.commit()
#runnable_manager.submit_func(runnable_id, interpreter.run, machine, script)
return json.dumps({})
return json.dumps({'functions': interpreter.funcs })
class Samples():
@staticmethod
def load_samples_recursive(library_def_file):
if os.path.isfile(library_def_file):
return Samples.load_samples(library_def_file)
all_samples = []
for f in os.listdir(library_def_file):
samples = Samples.load_samples_recursive(os.path.join(library_def_file, f))
all_samples.extend(samples if isinstance(samples, list) else [samples])
#all_samples = {**all_samples, **samples}
return all_samples
@staticmethod
def load_samples(sample_def_file):
samples = []
if not os.path.isfile(sample_def_file) or not sample_def_file.endswith(".json"):
return samples
try:
with open(sample_def_file, 'r') as json_data:
d = json.load(json_data)
samples = d["samples"] if d.get("samples") else d
finally:
return samples
@staticmethod
def get_samples_as_list():
samples = []
samplesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/samples')
for s in Samples.load_samples_recursive(samplesdir):
samples.append({"name": s["name"], "desc": s["desc"], "sample": '\n'.join(s["sample"])})
return samples
@staticmethod
def unique_filename(path, prefix, ext):
make_fn = lambda i: os.path.join(path, '{0}({1}).{2}'.format(prefix, i, ext))
for i in range(1, sys.maxsize):
uni_fn = make_fn(i)
if not os.path.exists(uni_fn):
return uni_fn
@staticmethod
def add_sample(sample, name, desc):
this_path = os.path.dirname(os.path.abspath(__file__))
os.chdir(this_path) #set dir of this file to current directory
samplesdir = os.path.normpath(os.path.join(this_path, '../biowl/samples'))
try:
if sample and name:
new_path = os.path.normpath(os.path.join(samplesdir, 'users', current_user.username))
if not os.path.isdir(new_path):
os.makedirs(new_path)
path = Samples.unique_filename(new_path, 'sample', 'json')
with open(path, 'w') as fp:
fp.write("{\n")
fp.write('{0}"name":"{1}",\n'.format(" " * 4, name));
fp.write('{0}"desc":"{1}",\n'.format(" " * 4, desc));
fp.write('{0}"sample":[\n'.format(" " * 4))
sample = sample.replace("\\n", "\n").replace("\r\n", "\n").replace("\"", "\'")
lines = sample.split("\n")
for line in lines[0:-1]:
fp.write('{0}"{1}",\n'.format(" " * 8, line))
fp.write('{0}"{1}"\n'.format(" " * 8, lines[-1]))
fp.write("{0}]\n}}".format(" " * 4))
# json.dump(samples, fp, indent=4, separators=(',', ': '))
finally:
return { 'out': '', 'err': ''}, 201
@main.route('/samples', methods=['GET', 'POST'])
@login_required
def samples():
if request.form.get('sample'):
return Samples.add_sample(request.form.get('sample'), request.form.get('name'), request.form.get('desc'))
return json.dumps({'samples': Samples.get_samples_as_list()})
@main.route('/runnables', methods=['GET', 'POST'])
@login_required
def runnables():
if request.args.get('id'):
runnable = Runnable.query.get(int(request.args.get('id')))
return jsonify(runnable = runnable.to_json())
elif request.args.get('stop'):
ids = request.args.get('stop')
ids = ids.split(",")
new_status = []
for id in ids:
runnable = Runnable.query.get(int(id))
stop_script(runnable.celery_id)
sync_task_status_with_db(runnable)
new_status.append(runnable)
return jsonify(runnables =[i.to_json() for i in new_status])
sync_task_status_with_db_for_user(current_user.id)
# runnables_db = Runnable.query.filter(Runnable.user_id == current_user.id)
# rs = []
# for r in runnables_db:
# rs.append(r.to_json())
# return jsonify(runnables = rs)
return jsonify(runnables =[i.to_json() for i in Runnable.query.filter(Runnable.user_id == current_user.id)])
download
from __future__ import print_function
from flask import Flask, render_template, redirect, url_for, abort, flash, request,\
current_app, make_response, g, jsonify
from flask import send_from_directory
from flask_login import login_required, current_user
from flask_sqlalchemy import get_debug_queries
from sqlalchemy import text
import os
import sys
import flask_sijax
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm, CommentForm
from .. import db
from ..models import Permission, Role, User, Post, Comment, Workflow, WorkItem, DataSource, Data, DataType, OperationSource, Operation
from ..decorators import admin_required, permission_required
from .ajax import WorkflowHandler
from ..util import Utility
#from ..io import PosixFileSystem, HadoopFileSystem, getFileSystem
from ..biowl.fileop import PosixFileSystem, HadoopFileSystem, IOHelper
from ..biowl.phenoparser import PhenoWLInterpreter, PhenoWLCodeGenerator, PhenoWLParser, PythonGrammar
from ..biowl.timer import Timer
from ..models import Runnable
from ..biowl.tasks import runnable_manager
import json
from werkzeug import secure_filename
import mimetypes
from ..jobs import long_task, run_script, stop_script, sync_task_status_with_db, sync_task_status_with_db_for_user
app = Flask(__name__)
@main.after_app_request
def after_request(response):
for query in get_debug_queries():
if query.duration >= current_app.config['PHENOPROC_SLOW_DB_QUERY_TIME']:
current_app.logger.warning(
'Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n'
% (query.statement, query.parameters, query.duration,
query.context))
return response
@main.route('/shutdown')
def server_shutdown():
if not current_app.testing:
abort(404)
shutdown = request.environ.get('werkzeug.server.shutdown')
if not shutdown:
abort(500)
shutdown()
return 'Shutting down...'
def load_data_sources():
# construct data source tree
datasources = DataSource.query.all()
datasource_tree = { 'type': DataType.Custom, 'children': [] }
for ds in datasources:
datasource = { 'datasource': ds.id, 'type': DataType.Root, 'base':'', 'path': ds.url, 'name': ds.name, 'children': []}
if ds.id == 1:
# hdfs tree
try:
hdfs = HadoopFileSystem()
if current_user.is_authenticated:
datasource['children'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['children'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
except:
pass
elif ds.id == 2:
# file system tree
posixFS = PosixFileSystem()
if current_user.is_authenticated:
datasource['children'].append(posixFS.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['children'].append(posixFS.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
datasource_tree['children'].append(datasource)
return datasource_tree
@main.route('/reloaddatasources', methods=['POST'])
def load_data_sources_json():
return json.dumps(load_data_sources())
@main.route('/', defaults={'id': ''}, methods = ['GET', 'POST'])
@main.route('/workflow/<int:id>/', methods = ['GET', 'POST'])
def index(id=None):
id = Utility.ValueOrNone(id)
if id <= 0:
id = request.args.get('workflow')
if g.sijax.is_sijax_request:
# Sijax request detected - let Sijax handle it
g.sijax.register_object(WorkflowHandler)
return g.sijax.process_request()
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
datasource_tree = load_data_sources()
# construct operation source tree
operationsources = OperationSource.query.all()
operation_tree = { 'name' : ('operations', ''), 'children' : [] }
for ops in operationsources:
operation_tree['children'].append({ 'name' : (ops.name, ops.id), 'children' : [] })
for op in ops.operations:
operation_tree['children'][-1]['children'].append({ 'name' : (op.name, op.id), 'children' : [] })
# workflows tree
workflows = []
if current_user.is_authenticated:
#workflows = Workflow.query.filter_by(user_id=current_user.id)
#sql = 'SELECT workflows.*, MAX(time), taskstatus.name AS status FROM workflows JOIN users ON workflows.user_id = users.id LEFT JOIN workitems ON workflows.id = workitems.workflow_id LEFT JOIN tasks ON workitems.id = tasks.workitem_id LEFT JOIN tasklogs ON tasks.id=tasklogs.task_id JOIN taskstatus ON tasklogs.status_id=taskstatus.id GROUP BY workflows.id HAVING users.id=' + str(current_user.id)
sql = 'SELECT workflows.*, MAX(time), taskstatus.name AS status FROM workflows LEFT JOIN workitems ON workflows.id = workitems.workflow_id LEFT JOIN tasks ON workitems.id = tasks.workitem_id LEFT JOIN tasklogs ON tasks.id=tasklogs.task_id LEFT JOIN taskstatus ON tasklogs.status_id=taskstatus.id WHERE workflows.user_id={0} GROUP BY workflows.id'.format(current_user.id)
workflows = db.engine.execute(sql)
workitems = []
# Workflow.query.join(WorkItem).join(Operation).filter_by(id=1).c
# sql = text('SELECT workitems.*, operations.name AS opname FROM workflows INNER JOIN workitems ON workflows.id=workitems.workflow_id INNER join operations ON workitems.operation_id=operations.id WHERE workflows.id=' + str(id))
workflow_name = ''
if id is not None and Workflow.query.get(id) is not None:
workflow_name = Workflow.query.get(id).name
# sql = text('SELECT workitems.*, operations.name AS opname, datasources.id AS datasource_id, datasources.name AS datasource_name, data.url AS path FROM workflows INNER JOIN workitems ON workflows.id=workitems.workflow_id INNER join operations ON workitems.operation_id=operations.id INNER JOIN data ON workitems.id = data.id INNER JOIN datasources ON data.datasource_id=datasources.id WHERE workflows.id=' + str(id))
# sql = text('SELECT s.name AS name, s.input AS input, s.output AS output, dx.url AS input_root, dx2.url AS output_root, dx.type AS input_type, dx2.type AS output_type, operations.name AS opname FROM (SELECT w.*, d1.datasource_id AS input_datasource, d1.url AS input, d2.datasource_id AS output_datasource, d2.url AS output FROM workitems w INNER JOIN data d1 ON d1.id=w.input_id INNER JOIN data d2 ON d2.id=w.output_id) s INNER JOIN datasources dx ON dx.id=s.input_datasource INNER JOIN datasources dx2 ON dx2.id=s.output_datasource INNER JOIN operations ON s.operation_id = operations.id INNER JOIN workflows ON s.workflow_id=workflows.id WHERE workflows.id=' + str(id))
# sql = text('SELECT s.id AS id, s.name AS name, s.input AS input, s.output AS output, dx.url AS input_root, dx2.url AS output_root, dx.type AS input_type, dx2.type AS output_type, operations.name AS opname FROM (SELECT w.*, d1.datasource_id AS input_datasource, d1.url AS input, d2.datasource_id AS output_datasource, d2.url AS output FROM workitems w LEFT JOIN data d1 ON d1.id=w.input_id LEFT JOIN data d2 ON d2.id=w.output_id) s LEFT JOIN datasources dx ON dx.id=s.input_datasource LEFT JOIN datasources dx2 ON dx2.id=s.output_datasource LEFT JOIN operations ON s.operation_id = operations.id INNER JOIN workflows ON s.workflow_id=workflows.id WHERE workflows.id=' + str(id))
sql = text('SELECT w.id AS id, w.name AS name, w.desc as desc, ops.name AS opsname, operations.name AS opname, d1.url AS input, d2.url AS output, dx1.id AS input_datasourceid, dx1.type AS input_datasource, dx1.url AS input_root, dx2.id AS output_datasourceid, dx2.type AS output_datasource, dx2.url AS output_root FROM workitems w LEFT JOIN operations ON w.operation_id=operations.id LEFT JOIN operationsources ops ON ops.id=operations.operationsource_id LEFT JOIN data d1 ON d1.id=w.input_id LEFT JOIN data d2 ON d2.id=w.output_id LEFT JOIN datasources dx1 ON dx1.id=d1.datasource_id LEFT JOIN datasources dx2 ON dx2.id=d2.datasource_id WHERE w.workflow_id=' + str(id))
workitems = db.engine.execute(sql)
# result = db.engine.execute(sql)
# for row in result:
# workitems.append(row);
# if id is not None:
# workflow = Workflow.query.filter_by(id=id)
# if workflow is not None and workflow.count() > 0:
# workitems = workflow.first().workitems
return render_template('index.html', form=form, posts=posts, datasources=datasource_tree, operations=operation_tree, workflow=workflow_name, workflows=workflows, workitems=workitems,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) // \
current_app.config['PHENOPROC_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['PHENOPROC_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/workflow/<int:id>', methods=['GET', 'POST'])
def workflow(id):
workflow = Workflow.query.get_or_404(id)
return render_template('workflow.html', workflows=[workflow])
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['PHENOPROC_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followers of",
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['PHENOPROC_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['PHENOPROC_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments,
pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/about')
def about():
return render_template('about.html')
@main.route('/contact')
def contact():
return render_template('contact.html')
from sqlalchemy.ext.declarative import DeclarativeMeta
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
# a json-encodable dict
return fields
return json.JSONEncoder.default(self, obj)
@main.route('/tasklogs', methods=['POST'])
@login_required
def translate():
workflow_id = request.form['text'] #request.args.get('workflow_id')
workflow_id = Utility.ValueOrNone(workflow_id)
if workflow_id is not None and Workflow.query.get(workflow_id) is not None:
sql = 'SELECT workitems.id, MAX(time), taskstatus.name as status FROM workitems LEFT JOIN tasks ON workitems.id=tasks.workitem_id LEFT JOIN tasklogs ON tasklogs.task_id=tasks.id LEFT JOIN taskstatus ON tasklogs.status_id = taskstatus.id WHERE workitems.workflow_id=' + str(workflow_id) + ' GROUP BY workitems.id'
result = db.engine.execute(sql)
return json.dumps([dict(r) for r in result], cls=AlchemyEncoder)
@main.route('/delete', methods=['POST'])
@login_required
def delete():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
filesystem.delete(path)
return json.dumps(dict())
@main.route('/rename', methods=['POST'])
@login_required
def rename():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
oldpath = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
newpath = os.path.join(os.path.dirname(oldpath), request.form['newname'])
filesystem.rename(oldpath, newpath)
return json.dumps(filesystem.make_json(datasource_id, Utility.get_rootdir(datasource_id), os.path.relpath(newpath, Utility.get_rootdir(datasource_id))))
return json.dumps(dict())
@main.route('/addfolder', methods=['POST'])
@login_required
def addfolder():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'])
newfolder = filesystem.addfolder(path)
return json.dumps(filesystem.make_json(datasource_id, Utility.get_rootdir(datasource_id), os.path.relpath(newfolder, Utility.get_rootdir(datasource_id))))
return json.dumps(dict())
# Route that will process the file upload
@main.route('/upload', methods=['POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if file:
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
path = os.path.join(Utility.get_rootdir(datasource_id), request.form['path'], filename)
filesystem.saveUpload(file, path)
return json.dumps({})
@main.route('/download', methods=['POST'])
@login_required
def download():
datasource_id = Utility.ValueOrNone(request.form['datasource'])
filesystem = getFileSystem(datasource_id)
if filesystem is not None:
path = filesystem.download(os.path.join(Utility.get_rootdir(datasource_id), request.form['path']))
if path is not None:
# filename, ext = os.path.splitext(path)
# if ext in mimetypes.types_map:
# mime = mimetypes.types_map[ext]
#
# if mime is None:
# try:
# mimetypes = mimetypes.read_mime_types(path)
# if mimetypes:
# mime = list(mimetypes.values())[0]
# except:
# pass
# if mime is not None:
return send_from_directory(directory=os.path.dirname(path), filename=os.path.basename(path))
return json.dumps(dict())
def load_data_sources_biowl():
# construct data source tree
datasources = DataSource.query.all()
datasource_tree = []
for ds in datasources:
datasource = { 'path': ds.url, 'text': ds.name, 'nodes': [], 'folder': True}
if ds.id == 1:
# hdfs tree
try:
hdfs = HadoopFileSystem(ds.url, 'hdfs')
if current_user.is_authenticated:
datasource['nodes'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_user.username))
datasource['nodes'].append(hdfs.make_json(ds.id, Utility.get_rootdir(ds.id), current_app.config['PUBLIC_DIR']))
except:
pass
elif ds.id == 2:
# file system tree
posixFS = PosixFileSystem(Utility.get_rootdir(ds.id))
if current_user.is_authenticated:
datasource['nodes'].append(posixFS.make_json(current_user.username))
datasource['nodes'].append(posixFS.make_json(current_app.config['PUBLIC_DIR']))
datasource_tree.append(datasource)
return datasource_tree
def fs_id_by_prefix(path):
path = os.path.normpath(path)
fs = path.split(os.sep)
if not fs:
return None
dsid = 0
if fs[0] == 'HDFS':
dsid = 1
elif fs[0] == 'LocalFS':
dsid = 2
else:
return None
ds = DataSource.query.get(dsid)
root = Utility.get_rootdir(ds.id)
if dsid == 1:
return HadoopFileSystem(ds.url, 'hdfs')
else:
return PosixFileSystem(root)
def download_biowl(path):
# construct data source tree
fs = fs_id_by_prefix(path)
fullpath = fs.download(path)
mime = mimetypes.guess_type(fullpath)[0]
return send_from_directory(os.path.dirname(fullpath), os.path.basename(fullpath), mimetype=mime, as_attachment = mime is None )
class InterpreterHelper():
def __init__(self):
self.funcs = []
self.interpreter = PhenoWLInterpreter()
self.codeGenerator = PhenoWLCodeGenerator()
self.reload()
def reload(self):
self.funcs.clear()
librariesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/libraries')
librariesdir = os.path.normpath(librariesdir)
self.interpreter.context.load_library(librariesdir)
funclist = []
for f in self.interpreter.context.library.funcs.values():
funclist.extend(f)
funclist.sort(key=lambda x: (x.package, x.name))
for f in funclist:
self.funcs.append({"package_name": f.package if f.package else "", "name": f.name, "internal": f.internal, "example": f.example if f.example else "", "desc": f.desc if f.desc else "", "runmode": f.runmode if f.runmode else ""})
self.codeGenerator.context.load_library(librariesdir)
def run(self, machine, script):
parserdir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/'))
os.chdir(parserdir) #set dir of this file to current directory
duration = 0
try:
machine.context.reload()
parser = PhenoWLParser(PythonGrammar())
with Timer() as t:
prog = parser.parse(script)
machine.run(prog)
duration = t.secs
except:
machine.context.err.append("Error in parse and interpretation")
return { 'out': machine.context.out, 'err': machine.context.err, 'duration': "{:.4f}s".format(duration) }
interpreter = InterpreterHelper()
@main.route('/biowl', methods=['GET', 'POST'])
@login_required
def biowl():
return render_template('biowl.html')
@main.route('/datasources', methods=['GET', 'POST'])
@login_required
def datasources():
if request.form.get('download'):
return download_biowl(request.form['download'])
return json.dumps({'datasources': load_data_sources_biowl() })
@main.route('/functions', methods=['GET', 'POST'])
@login_required
def functions():
if request.args.get('script') or request.args.get('code'):
script = request.args.get('script') if request.args.get('script') else request.args.get('code')
machine = interpreter.interpreter if request.args.get('script') else interpreter.codeGenerator
runnable_id = Runnable.create_runnable(current_user.id)
runnable = Runnable.query.get(runnable_id)
runnable.script = script
runnable.name = script[:min(40, len(script))]
if len(script) > len(runnable.name):
runnable.name += "..."
db.session.commit()
task = run_script.delay(machine, script)
runnable.celery_id = task.id
db.session.commit()
#runnable_manager.submit_func(runnable_id, interpreter.run, machine, script)
return json.dumps({})
return json.dumps({'functions': interpreter.funcs })
class Samples():
@staticmethod
def load_samples_recursive(library_def_file):
if os.path.isfile(library_def_file):
return Samples.load_samples(library_def_file)
all_samples = []
for f in os.listdir(library_def_file):
samples = Samples.load_samples_recursive(os.path.join(library_def_file, f))
all_samples.extend(samples if isinstance(samples, list) else [samples])
#all_samples = {**all_samples, **samples}
return all_samples
@staticmethod
def load_samples(sample_def_file):
samples = []
if not os.path.isfile(sample_def_file) or not sample_def_file.endswith(".json"):
return samples
try:
with open(sample_def_file, 'r') as json_data:
d = json.load(json_data)
samples = d["samples"] if d.get("samples") else d
finally:
return samples
@staticmethod
def get_samples_as_list():
samples = []
samplesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../biowl/samples')
for s in Samples.load_samples_recursive(samplesdir):
samples.append({"name": s["name"], "desc": s["desc"], "sample": '\n'.join(s["sample"])})
return samples
@staticmethod
def unique_filename(path, prefix, ext):
make_fn = lambda i: os.path.join(path, '{0}({1}).{2}'.format(prefix, i, ext))
for i in range(1, sys.maxsize):
uni_fn = make_fn(i)
if not os.path.exists(uni_fn):
return uni_fn
@staticmethod
def add_sample(sample, name, desc):
this_path = os.path.dirname(os.path.abspath(__file__))
os.chdir(this_path) #set dir of this file to current directory
samplesdir = os.path.normpath(os.path.join(this_path, '../biowl/samples'))
try:
if sample and name:
new_path = os.path.normpath(os.path.join(samplesdir, 'users', current_user.username))
if not os.path.isdir(new_path):
os.makedirs(new_path)
path = Samples.unique_filename(new_path, 'sample', 'json')
with open(path, 'w') as fp:
fp.write("{\n")
fp.write('{0}"name":"{1}",\n'.format(" " * 4, name));
fp.write('{0}"desc":"{1}",\n'.format(" " * 4, desc));
fp.write('{0}"sample":[\n'.format(" " * 4))
sample = sample.replace("\\n", "\n").replace("\r\n", "\n").replace("\"", "\'")
lines = sample.split("\n")
for line in lines[0:-1]:
fp.write('{0}"{1}",\n'.format(" " * 8, line))
fp.write('{0}"{1}"\n'.format(" " * 8, lines[-1]))
fp.write("{0}]\n}}".format(" " * 4))
# json.dump(samples, fp, indent=4, separators=(',', ': '))
finally:
return { 'out': '', 'err': ''}, 201
@main.route('/samples', methods=['GET', 'POST'])
@login_required
def samples():
if request.form.get('sample'):
return Samples.add_sample(request.form.get('sample'), request.form.get('name'), request.form.get('desc'))
return json.dumps({'samples': Samples.get_samples_as_list()})
@main.route('/runnables', methods=['GET', 'POST'])
@login_required
def runnables():
if request.args.get('id'):
runnable = Runnable.query.get(int(request.args.get('id')))
return jsonify(runnable = runnable.to_json())
elif request.args.get('stop'):
ids = request.args.get('stop')
ids = ids.split(",")
new_status = []
for id in ids:
runnable = Runnable.query.get(int(id))
stop_script(runnable.celery_id)
sync_task_status_with_db(runnable)
new_status.append(runnable)
return jsonify(runnables =[i.to_json() for i in new_status])
sync_task_status_with_db_for_user(current_user.id)
# runnables_db = Runnable.query.filter(Runnable.user_id == current_user.id)
# rs = []
# for r in runnables_db:
# rs.append(r.to_json())
# return jsonify(runnables = rs)
return jsonify(runnables =[i.to_json() for i in Runnable.query.filter(Runnable.user_id == current_user.id)]) |
import os
import json
import jinja2
import traceback
import pyqrcode
import base64
from functools import wraps
from flask_login import login_user, logout_user, current_user, login_required
from flask import Flask, g, request, make_response, jsonify, render_template, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from lib import utils
from app import app, login_manager
from .models import User, Role, Domain, DomainUser, Record, Server, History, Anonymous, Setting, DomainSetting
from io import BytesIO
from distutils.util import strtobool
from distutils.version import StrictVersion
from optparse import Values
jinja2.filters.FILTERS['display_record_name'] = utils.display_record_name
jinja2.filters.FILTERS['display_master_name'] = utils.display_master_name
jinja2.filters.FILTERS['display_second_to_time'] = utils.display_time
# Flag for pdns v4.x.x
# TODO: Find another way to do this
PDNS_VERSION = app.config['PDNS_VERSION']
if StrictVersion(PDNS_VERSION) >= StrictVersion('4.0.0'):
NEW_SCHEMA = True
else:
NEW_SCHEMA = False
@app.context_processor
def inject_fullscreen_layout_setting():
fullscreen_layout_setting = Setting.query.filter(Setting.name == 'fullscreen_layout').first()
return dict(fullscreen_layout_setting=strtobool(fullscreen_layout_setting.value))
@app.context_processor
def inject_record_helper_setting():
record_helper_setting = Setting.query.filter(Setting.name == 'record_helper').first()
return dict(record_helper_setting=strtobool(record_helper_setting.value))
@app.context_processor
def inject_default_record_table_size_setting():
default_record_table_size_setting = Setting.query.filter(Setting.name == 'default_record_table_size').first()
return dict(default_record_table_size_setting=default_record_table_size_setting.value)
# START USER AUTHENTICATION HANDLER
@app.before_request
def before_request():
# check site maintenance mode first
maintenance = Setting.query.filter(Setting.name == 'maintenance').first()
if maintenance and maintenance.value == 'True':
return render_template('maintenance.html')
# check if user is anonymous
g.user = current_user
login_manager.anonymous_user = Anonymous
@login_manager.user_loader
def load_user(id):
"""
This will be current_user
"""
return User.query.get(int(id))
def dyndns_login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if current_user.is_authenticated is False:
return render_template('dyndns.html', response='badauth'), 200
return f(*args, **kwargs)
return decorated_function
@login_manager.request_loader
def login_via_authorization_header(request):
auth_header = request.headers.get('Authorization')
if auth_header:
auth_header = auth_header.replace('Basic ', '', 1)
try:
auth_header = base64.b64decode(auth_header)
username,password = auth_header.split(":")
except TypeError, e:
error = e.message['desc'] if 'desc' in e.message else e
return None
user = User(username=username, password=password, plain_text_password=password)
try:
auth = user.is_validate(method='LOCAL')
if auth == False:
return None
else:
login_user(user, remember = False)
return user
except Exception, e:
return None
return None
# END USER AUTHENTICATION HANDLER
# START CUSTOMIZE DECORATOR
def admin_role_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if g.user.role.name != 'Administrator':
return redirect(url_for('error', code=401))
return f(*args, **kwargs)
return decorated_function
# END CUSTOMIZE DECORATOR
# START VIEWS
@app.errorhandler(400)
def http_bad_request(e):
return redirect(url_for('error', code=400))
@app.errorhandler(401)
def http_unauthorized(e):
return redirect(url_for('error', code=401))
@app.errorhandler(404)
def http_internal_server_error(e):
return redirect(url_for('error', code=404))
@app.errorhandler(500)
def http_page_not_found(e):
return redirect(url_for('error', code=500))
@app.route('/error/<string:code>')
def error(code, msg=None):
supported_code = ('400', '401', '404', '500')
if code in supported_code:
return render_template('errors/%s.html' % code, msg=msg), int(code)
else:
return render_template('errors/404.html'), 404
@app.route('/register', methods=['GET'])
def register():
SIGNUP_ENABLED = app.config['SIGNUP_ENABLED']
if SIGNUP_ENABLED:
return render_template('register.html')
else:
return render_template('errors/404.html'), 404
@app.route('/login', methods=['GET', 'POST'])
@login_manager.unauthorized_handler
def login():
# these parameters will be needed in multiple paths
LDAP_ENABLED = True if 'LDAP_TYPE' in app.config.keys() else False
LOGIN_TITLE = app.config['LOGIN_TITLE'] if 'LOGIN_TITLE' in app.config.keys() else ''
BASIC_ENABLED = app.config['BASIC_ENABLED']
SIGNUP_ENABLED = app.config['SIGNUP_ENABLED']
if g.user is not None and current_user.is_authenticated:
return redirect(url_for('dashboard'))
if request.method == 'GET':
return render_template('login.html', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
# process login
username = request.form['username']
password = request.form['password']
otp_token = request.form['otptoken'] if 'otptoken' in request.form else None
auth_method = request.form['auth_method'] if 'auth_method' in request.form else 'LOCAL'
# addition fields for registration case
firstname = request.form['firstname'] if 'firstname' in request.form else None
lastname = request.form['lastname'] if 'lastname' in request.form else None
email = request.form['email'] if 'email' in request.form else None
rpassword = request.form['rpassword'] if 'rpassword' in request.form else None
if None in [firstname, lastname, email]:
#login case
remember_me = False
if 'remember' in request.form:
remember_me = True
user = User(username=username, password=password, plain_text_password=password)
try:
auth = user.is_validate(method=auth_method)
if auth == False:
return render_template('login.html', error='Invalid credentials', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
except Exception, e:
error = e.message['desc'] if 'desc' in e.message else e
return render_template('login.html', error=error, ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
# check if user enabled OPT authentication
if user.otp_secret:
if otp_token:
good_token = user.verify_totp(otp_token)
if not good_token:
return render_template('login.html', error='Invalid credentials', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
else:
return render_template('login.html', error='Token required', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
login_user(user, remember = remember_me)
return redirect(request.args.get('next') or url_for('index'))
else:
# registration case
user = User(username=username, plain_text_password=password, firstname=firstname, lastname=lastname, email=email)
# TODO: Move this into the JavaScript
# validate password and password confirmation
if password != rpassword:
error = "Passsword and confirmation do not match"
return render_template('register.html', error=error)
try:
result = user.create_local_user()
if result == True:
return render_template('login.html', username=username, password=password, ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
else:
return render_template('register.html', error=result)
except Exception, e:
error = e.message['desc'] if 'desc' in e.message else e
return render_template('register.html', error=error)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/dashboard', methods=['GET', 'POST'])
@login_required
def dashboard():
d = Domain().update()
if current_user.role.name == 'Administrator':
domains = Domain.query.all()
else:
domains = User(id=current_user.id).get_domain()
# stats for dashboard
domain_count = Domain.query.count()
users = User.query.all()
history_number = History.query.count()
history = History.query.order_by(History.created_on.desc()).limit(4)
server = Server(server_id='localhost')
statistics = server.get_statistic()
if statistics:
uptime = filter(lambda uptime: uptime['name'] == 'uptime', statistics)[0]['value']
else:
uptime = 0
return render_template('dashboard.html', domains=domains, domain_count=domain_count, users=users, history_number=history_number, uptime=uptime, histories=history)
@app.route('/domain/<string:domain_name>', methods=['GET', 'POST'])
@app.route('/domain', methods=['GET', 'POST'])
@login_required
def domain(domain_name):
r = Record()
domain = Domain.query.filter(Domain.name == domain_name).first()
if domain:
# query domain info from PowerDNS API
zone_info = r.get_record_data(domain.name)
if zone_info:
jrecords = zone_info['records']
else:
# can not get any record, API server might be down
return redirect(url_for('error', code=500))
records = []
#TODO: This should be done in the "model" instead of "view"
if NEW_SCHEMA:
for jr in jrecords:
if jr['type'] in app.config['RECORDS_ALLOW_EDIT']:
for subrecord in jr['records']:
record = Record(name=jr['name'], type=jr['type'], status='Disabled' if subrecord['disabled'] else 'Active', ttl=jr['ttl'], data=subrecord['content'])
records.append(record)
else:
for jr in jrecords:
if jr['type'] in app.config['RECORDS_ALLOW_EDIT']:
record = Record(name=jr['name'], type=jr['type'], status='Disabled' if jr['disabled'] else 'Active', ttl=jr['ttl'], data=jr['content'])
records.append(record)
return render_template('domain.html', domain=domain, records=records, editable_records=app.config['RECORDS_ALLOW_EDIT'])
else:
return redirect(url_for('error', code=404))
@app.route('/admin/domain/add', methods=['GET', 'POST'])
@login_required
@admin_role_required
def domain_add():
if request.method == 'POST':
try:
domain_name = request.form.getlist('domain_name')[0]
domain_type = request.form.getlist('radio_type')[0]
soa_edit_api = request.form.getlist('radio_type_soa_edit_api')[0]
if ' ' in domain_name or not domain_name or not domain_type:
return render_template('errors/400.html', msg="Please correct your input"), 400
if domain_type == 'slave':
if request.form.getlist('domain_master_address'):
domain_master_string = request.form.getlist('domain_master_address')[0]
domain_master_string = domain_master_string.replace(' ','')
domain_master_ips = domain_master_string.split(',')
else:
domain_master_ips = []
d = Domain()
result = d.add(domain_name=domain_name, domain_type=domain_type, soa_edit_api=soa_edit_api, domain_master_ips=domain_master_ips)
if result['status'] == 'ok':
history = History(msg='Add domain %s' % domain_name, detail=str({'domain_type': domain_type, 'domain_master_ips': domain_master_ips}), created_by=current_user.username)
history.add()
return redirect(url_for('dashboard'))
else:
return render_template('errors/400.html', msg=result['msg']), 400
except:
return redirect(url_for('error', code=500))
return render_template('domain_add.html')
@app.route('/admin/domain/<string:domain_name>/delete', methods=['GET'])
@login_required
@admin_role_required
def domain_delete(domain_name):
d = Domain()
result = d.delete(domain_name)
if result['status'] == 'error':
return redirect(url_for('error', code=500))
history = History(msg='Delete domain %s' % domain_name, created_by=current_user.username)
history.add()
return redirect(url_for('dashboard'))
@app.route('/admin/domain/<string:domain_name>/manage', methods=['GET', 'POST'])
@login_required
@admin_role_required
def domain_management(domain_name):
if request.method == 'GET':
domain = Domain.query.filter(Domain.name == domain_name).first()
if not domain:
return redirect(url_for('error', code=404))
users = User.query.all()
# get list of user ids to initilize selection data
d = Domain(name=domain_name)
domain_user_ids = d.get_user()
return render_template('domain_management.html', domain=domain, users=users, domain_user_ids=domain_user_ids)
if request.method == 'POST':
# username in right column
new_user_list = request.form.getlist('domain_multi_user[]')
# get list of user ids to compare
d = Domain(name=domain_name)
domain_user_ids = d.get_user()
# grant/revoke user privielges
d.grant_privielges(new_user_list)
history = History(msg='Change domain %s access control' % domain_name, detail=str({'user_has_access': new_user_list}), created_by=current_user.username)
history.add()
return redirect(url_for('domain_management', domain_name=domain_name))
@app.route('/domain/<string:domain_name>/apply', methods=['POST'], strict_slashes=False)
@login_required
def record_apply(domain_name):
"""
example jdata: {u'record_ttl': u'1800', u'record_type': u'CNAME', u'record_name': u'test4', u'record_status': u'Active', u'record_data': u'duykhanh.me'}
"""
#TODO: filter removed records / name modified records.
try:
pdata = request.data
jdata = json.loads(pdata)
records = []
for j in jdata:
record = {
"name": domain_name if j['record_name'] in ['@', ''] else j['record_name'] + '.' + domain_name,
"type": j['record_type'],
"content": j['record_data'],
"disabled": True if j['record_status'] == 'Disabled' else False,
"name": domain_name if j['record_name'] in ['@', ''] else j['record_name'] + '.' + domain_name,
"ttl": int(j['record_ttl']) if j['record_ttl'] else 3600,
"type": j['record_type'],
}
records.append(record)
r = Record()
result = r.apply(domain_name, records)
if result['status'] == 'ok':
history = History(msg='Apply record changes to domain %s' % domain_name, detail=str(records), created_by=current_user.username)
history.add()
return make_response(jsonify( result ), 200)
else:
return make_response(jsonify( result ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( {'status': 'error', 'msg': 'Error when applying new changes'} ), 500)
@app.route('/domain/<string:domain_name>/update', methods=['POST'], strict_slashes=False)
@login_required
def record_update(domain_name):
"""
This route is used for domain work as Slave Zone only
Pulling the records update from its Master
"""
try:
pdata = request.data
jdata = json.loads(pdata)
domain_name = jdata['domain']
d = Domain()
result = d.update_from_master(domain_name)
if result['status'] == 'ok':
return make_response(jsonify( {'status': 'ok', 'msg': result['msg']} ), 200)
else:
return make_response(jsonify( {'status': 'error', 'msg': result['msg']} ), 500)
except:
print traceback.format_exc()
return make_response(jsonify( {'status': 'error', 'msg': 'Error when applying new changes'} ), 500)
@app.route('/domain/<string:domain_name>/record/<string:record_name>/type/<string:record_type>/delete', methods=['GET'])
@login_required
@admin_role_required
def record_delete(domain_name, record_name, record_type):
try:
r = Record(name=record_name, type=record_type)
result = r.delete(domain=domain_name)
if result['status'] == 'error':
print result['msg']
except:
print traceback.format_exc()
return redirect(url_for('error', code=500)), 500
return redirect(url_for('domain', domain_name=domain_name))
@app.route('/domain/<string:domain_name>/dnssec', methods=['GET'])
@login_required
def domain_dnssec(domain_name):
domain = Domain()
dnssec = domain.get_domain_dnssec(domain_name)
return make_response(jsonify(dnssec), 200)
@app.route('/domain/<string:domain_name>/managesetting', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_setdomainsetting(domain_name):
if request.method == 'POST':
#
# post data should in format
# {'action': 'set_setting', 'setting': 'default_action, 'value': 'True'}
#
try:
pdata = request.data
jdata = json.loads(pdata)
data = jdata['data']
if jdata['action'] == 'set_setting':
new_setting = data['setting']
new_value = data['value']
domain = Domain.query.filter(Domain.name == domain_name).first()
setting = DomainSetting.query.filter(DomainSetting.domain == domain).filter(DomainSetting.setting == new_setting).first()
if setting:
if setting.set(new_value):
history = History(msg='Setting %s changed value to %s for %s' % (new_setting, new_value, domain.name), created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Setting updated.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to set value of setting.' } ))
else:
if domain.add_setting(new_setting, new_value):
history = History(msg='New setting %s with value %s for %s has been created' % (new_setting, new_value, domain.name), created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'New setting created and updated.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to create new setting.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Action not supported.' } ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( { 'status': 'error', 'msg': 'There is something wrong, please contact Administrator.' } ), 400)
@app.route('/admin', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin():
domains = Domain.query.all()
users = User.query.all()
server = Server(server_id='localhost')
configs = server.get_config()
statistics = server.get_statistic()
history_number = History.query.count()
if statistics:
uptime = filter(lambda uptime: uptime['name'] == 'uptime', statistics)[0]['value']
else:
uptime = 0
return render_template('admin.html', domains=domains, users=users, configs=configs, statistics=statistics, uptime=uptime, history_number=history_number)
@app.route('/admin/user/create', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_createuser():
if request.method == 'GET':
return render_template('admin_createuser.html')
if request.method == 'POST':
fdata = request.form
user = User(username=fdata['username'], plain_text_password=fdata['password'], firstname=fdata['firstname'], lastname=fdata['lastname'], email=fdata['email'])
if fdata['password'] == "":
return render_template('admin_createuser.html', user=user, blank_password=True)
result = user.create_local_user();
if result == 'Email already existed':
return render_template('admin_createuser.html', user=user, duplicate_email=True)
if result == 'Username already existed':
return render_template('admin_createuser.html', user=user, duplicate_username=True)
return redirect(url_for('admin_manageuser'))
@app.route('/admin/manageuser', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_manageuser():
if request.method == 'GET':
users = User.query.order_by(User.username).all()
return render_template('admin_manageuser.html', users=users)
if request.method == 'POST':
#
# post data should in format
# {'action': 'delete_user', 'data': 'username'}
#
try:
pdata = request.data
jdata = json.loads(pdata)
data = jdata['data']
if jdata['action'] == 'delete_user':
user = User(username=data)
result = user.delete()
if result:
history = History(msg='Delete username %s' % data, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'User has been removed.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot remove user.' } ), 500)
elif jdata['action'] == 'revoke_user_privielges':
user = User(username=data)
result = user.revoke_privilege()
if result:
history = History(msg='Revoke %s user privielges' % data, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Revoked user privielges.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot revoke user privilege.' } ), 500)
elif jdata['action'] == 'set_admin':
username = data['username']
is_admin = data['is_admin']
user = User(username=username)
result = user.set_admin(is_admin)
if result:
history = History(msg='Change user role of %s' % username, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Changed user role successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot change user role.' } ), 500)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Action not supported.' } ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( { 'status': 'error', 'msg': 'There is something wrong, please contact Administrator.' } ), 400)
@app.route('/admin/history', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_history():
if request.method == 'POST':
h = History()
result = h.remove_all()
if result:
history = History(msg='Remove all histories', created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Changed user role successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Can not remove histories.' } ), 500)
if request.method == 'GET':
histories = History.query.all()
return render_template('admin_history.html', histories=histories)
@app.route('/admin/settings', methods=['GET'])
@login_required
@admin_role_required
def admin_settings():
if request.method == 'GET':
settings = Setting.query.filter(Setting.name != 'maintenance')
return render_template('admin_settings.html', settings=settings)
@app.route('/admin/setting/<string:setting>/toggle', methods=['POST'])
@login_required
@admin_role_required
def admin_settings_toggle(setting):
result = Setting().toggle(setting)
if (result):
return make_response(jsonify( { 'status': 'ok', 'msg': 'Toggled setting successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to toggle setting.' } ), 500)
@app.route('/admin/setting/<string:setting>/edit', methods=['POST'])
@login_required
@admin_role_required
def admin_settings_edit(setting):
pdata = request.data
jdata = json.loads(pdata)
new_value = jdata['value']
result = Setting().set(setting, new_value)
if (result):
return make_response(jsonify( { 'status': 'ok', 'msg': 'Toggled setting successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to toggle setting.' } ), 500)
@app.route('/user/profile', methods=['GET', 'POST'])
@login_required
def user_profile():
if request.method == 'GET':
return render_template('user_profile.html')
if request.method == 'POST':
# get new profile info
firstname = request.form['firstname'] if 'firstname' in request.form else ''
lastname = request.form['lastname'] if 'lastname' in request.form else ''
email = request.form['email'] if 'email' in request.form else ''
new_password = request.form['password'] if 'password' in request.form else ''
# json data
if request.data:
jdata = json.loads(request.data)
data = jdata['data']
if jdata['action'] == 'enable_otp':
enable_otp = data['enable_otp']
user = User(username=current_user.username)
user.update_profile(enable_otp=enable_otp)
return make_response(jsonify( { 'status': 'ok', 'msg': 'Change OTP Authentication successfully. Status: %s' % enable_otp } ), 200)
# get new avatar
save_file_name = None
if 'file' in request.files:
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file_extension = filename.rsplit('.', 1)[1]
if file_extension.lower() in ['jpg', 'jpeg', 'png']:
save_file_name = current_user.username + '.' + file_extension
file.save(os.path.join(app.config['UPLOAD_DIR'], 'avatar', save_file_name))
# update user profile
user = User(username=current_user.username, plain_text_password=new_password, firstname=firstname, lastname=lastname, email=email, avatar=save_file_name, reload_info=False)
user.update_profile()
return render_template('user_profile.html')
@app.route('/user/avatar/<string:filename>')
def user_avatar(filename):
return send_from_directory(os.path.join(app.config['UPLOAD_DIR'], 'avatar'), filename)
@app.route('/qrcode')
@login_required
def qrcode():
if not current_user:
return redirect(url_for('index'))
# render qrcode for FreeTOTP
url = pyqrcode.create(current_user.get_totp_uri())
stream = BytesIO()
url.svg(stream, scale=3)
return stream.getvalue(), 200, {
'Content-Type': 'image/svg+xml',
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'}
@app.route('/nic/checkip.html', methods=['GET', 'POST'])
def dyndns_checkip():
# route covers the default ddclient 'web' setting for the checkip service
return render_template('dyndns.html', response=request.environ.get('HTTP_X_REAL_IP', request.remote_addr))
@app.route('/nic/update', methods=['GET', 'POST'])
@dyndns_login_required
def dyndns_update():
# dyndns protocol response codes in use are:
# good: update successful
# nochg: IP address already set to update address
# nohost: hostname does not exist for this user account
# 911: server error
# have to use 200 HTTP return codes because ddclient does not read the return string if the code is other than 200
# reference: https://help.dyn.com/remote-access-api/perform-update/
# reference: https://help.dyn.com/remote-access-api/return-codes/
hostname = request.args.get('hostname')
myip = request.args.get('myip')
try:
# get all domains owned by the current user
domains = User(id=current_user.id).get_domain()
except:
return render_template('dyndns.html', response='911'), 200
domain = None
domain_segments = hostname.split('.')
for index in range(len(domain_segments)):
domain_segments.pop(0)
full_domain = '.'.join(domain_segments)
potential_domain = Domain.query.filter(Domain.name == full_domain).first()
if potential_domain in domains:
domain = potential_domain
break
if not domain:
history = History(msg="DynDNS update: attempted update of %s but it does not exist for this user" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nohost'), 200
r = Record()
r.name = hostname
# check if the user requested record exists within this domain
if r.exists(domain.name) and r.is_allowed:
if r.data == myip:
# record content did not change, return 'nochg'
history = History(msg="DynDNS update: attempted update of %s but record did not change" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nochg'), 200
else:
oldip = r.data
result = r.update(domain.name, myip)
if result['status'] == 'ok':
history = History(msg='DynDNS update: updated record %s in zone %s, it changed from %s to %s' % (hostname,domain.name,oldip,myip), detail=str(result), created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='good'), 200
else:
return render_template('dyndns.html', response='911'), 200
elif r.is_allowed:
ondemand_creation = DomainSetting.query.filter(DomainSetting.domain == domain).filter(DomainSetting.setting == 'create_via_dyndns').first()
if bool(int(ondemand_creation.value)) == True:
record = Record(name=hostname,type='A',data=myip,status=False,ttl=3600)
result = record.add(domain.name)
if result['status'] == 'ok':
history = History(msg='DynDNS update: created record %s in zone %s, it now represents %s' % (hostname,domain.name,myip), detail=str(result), created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='good'), 200
history = History(msg="DynDNS update: attempted update of %s but it does not exist for this user" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nohost'), 200
@app.route('/', methods=['GET', 'POST'])
@login_required
def index():
return redirect(url_for('dashboard'))
# END VIEWS
Convert value returned from UI to string before storing it in DB.
This should allow for differences in how database engine store booleans.
import os
import json
import jinja2
import traceback
import pyqrcode
import base64
from functools import wraps
from flask_login import login_user, logout_user, current_user, login_required
from flask import Flask, g, request, make_response, jsonify, render_template, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from lib import utils
from app import app, login_manager
from .models import User, Role, Domain, DomainUser, Record, Server, History, Anonymous, Setting, DomainSetting
from io import BytesIO
from distutils.util import strtobool
from distutils.version import StrictVersion
from optparse import Values
jinja2.filters.FILTERS['display_record_name'] = utils.display_record_name
jinja2.filters.FILTERS['display_master_name'] = utils.display_master_name
jinja2.filters.FILTERS['display_second_to_time'] = utils.display_time
# Flag for pdns v4.x.x
# TODO: Find another way to do this
PDNS_VERSION = app.config['PDNS_VERSION']
if StrictVersion(PDNS_VERSION) >= StrictVersion('4.0.0'):
NEW_SCHEMA = True
else:
NEW_SCHEMA = False
@app.context_processor
def inject_fullscreen_layout_setting():
fullscreen_layout_setting = Setting.query.filter(Setting.name == 'fullscreen_layout').first()
return dict(fullscreen_layout_setting=strtobool(fullscreen_layout_setting.value))
@app.context_processor
def inject_record_helper_setting():
record_helper_setting = Setting.query.filter(Setting.name == 'record_helper').first()
return dict(record_helper_setting=strtobool(record_helper_setting.value))
@app.context_processor
def inject_default_record_table_size_setting():
default_record_table_size_setting = Setting.query.filter(Setting.name == 'default_record_table_size').first()
return dict(default_record_table_size_setting=default_record_table_size_setting.value)
# START USER AUTHENTICATION HANDLER
@app.before_request
def before_request():
# check site maintenance mode first
maintenance = Setting.query.filter(Setting.name == 'maintenance').first()
if maintenance and maintenance.value == 'True':
return render_template('maintenance.html')
# check if user is anonymous
g.user = current_user
login_manager.anonymous_user = Anonymous
@login_manager.user_loader
def load_user(id):
"""
This will be current_user
"""
return User.query.get(int(id))
def dyndns_login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if current_user.is_authenticated is False:
return render_template('dyndns.html', response='badauth'), 200
return f(*args, **kwargs)
return decorated_function
@login_manager.request_loader
def login_via_authorization_header(request):
auth_header = request.headers.get('Authorization')
if auth_header:
auth_header = auth_header.replace('Basic ', '', 1)
try:
auth_header = base64.b64decode(auth_header)
username,password = auth_header.split(":")
except TypeError, e:
error = e.message['desc'] if 'desc' in e.message else e
return None
user = User(username=username, password=password, plain_text_password=password)
try:
auth = user.is_validate(method='LOCAL')
if auth == False:
return None
else:
login_user(user, remember = False)
return user
except Exception, e:
return None
return None
# END USER AUTHENTICATION HANDLER
# START CUSTOMIZE DECORATOR
def admin_role_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if g.user.role.name != 'Administrator':
return redirect(url_for('error', code=401))
return f(*args, **kwargs)
return decorated_function
# END CUSTOMIZE DECORATOR
# START VIEWS
@app.errorhandler(400)
def http_bad_request(e):
return redirect(url_for('error', code=400))
@app.errorhandler(401)
def http_unauthorized(e):
return redirect(url_for('error', code=401))
@app.errorhandler(404)
def http_internal_server_error(e):
return redirect(url_for('error', code=404))
@app.errorhandler(500)
def http_page_not_found(e):
return redirect(url_for('error', code=500))
@app.route('/error/<string:code>')
def error(code, msg=None):
supported_code = ('400', '401', '404', '500')
if code in supported_code:
return render_template('errors/%s.html' % code, msg=msg), int(code)
else:
return render_template('errors/404.html'), 404
@app.route('/register', methods=['GET'])
def register():
SIGNUP_ENABLED = app.config['SIGNUP_ENABLED']
if SIGNUP_ENABLED:
return render_template('register.html')
else:
return render_template('errors/404.html'), 404
@app.route('/login', methods=['GET', 'POST'])
@login_manager.unauthorized_handler
def login():
# these parameters will be needed in multiple paths
LDAP_ENABLED = True if 'LDAP_TYPE' in app.config.keys() else False
LOGIN_TITLE = app.config['LOGIN_TITLE'] if 'LOGIN_TITLE' in app.config.keys() else ''
BASIC_ENABLED = app.config['BASIC_ENABLED']
SIGNUP_ENABLED = app.config['SIGNUP_ENABLED']
if g.user is not None and current_user.is_authenticated:
return redirect(url_for('dashboard'))
if request.method == 'GET':
return render_template('login.html', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
# process login
username = request.form['username']
password = request.form['password']
otp_token = request.form['otptoken'] if 'otptoken' in request.form else None
auth_method = request.form['auth_method'] if 'auth_method' in request.form else 'LOCAL'
# addition fields for registration case
firstname = request.form['firstname'] if 'firstname' in request.form else None
lastname = request.form['lastname'] if 'lastname' in request.form else None
email = request.form['email'] if 'email' in request.form else None
rpassword = request.form['rpassword'] if 'rpassword' in request.form else None
if None in [firstname, lastname, email]:
#login case
remember_me = False
if 'remember' in request.form:
remember_me = True
user = User(username=username, password=password, plain_text_password=password)
try:
auth = user.is_validate(method=auth_method)
if auth == False:
return render_template('login.html', error='Invalid credentials', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
except Exception, e:
error = e.message['desc'] if 'desc' in e.message else e
return render_template('login.html', error=error, ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
# check if user enabled OPT authentication
if user.otp_secret:
if otp_token:
good_token = user.verify_totp(otp_token)
if not good_token:
return render_template('login.html', error='Invalid credentials', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
else:
return render_template('login.html', error='Token required', ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
login_user(user, remember = remember_me)
return redirect(request.args.get('next') or url_for('index'))
else:
# registration case
user = User(username=username, plain_text_password=password, firstname=firstname, lastname=lastname, email=email)
# TODO: Move this into the JavaScript
# validate password and password confirmation
if password != rpassword:
error = "Passsword and confirmation do not match"
return render_template('register.html', error=error)
try:
result = user.create_local_user()
if result == True:
return render_template('login.html', username=username, password=password, ldap_enabled=LDAP_ENABLED, login_title=LOGIN_TITLE, basic_enabled=BASIC_ENABLED, signup_enabled=SIGNUP_ENABLED)
else:
return render_template('register.html', error=result)
except Exception, e:
error = e.message['desc'] if 'desc' in e.message else e
return render_template('register.html', error=error)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/dashboard', methods=['GET', 'POST'])
@login_required
def dashboard():
d = Domain().update()
if current_user.role.name == 'Administrator':
domains = Domain.query.all()
else:
domains = User(id=current_user.id).get_domain()
# stats for dashboard
domain_count = Domain.query.count()
users = User.query.all()
history_number = History.query.count()
history = History.query.order_by(History.created_on.desc()).limit(4)
server = Server(server_id='localhost')
statistics = server.get_statistic()
if statistics:
uptime = filter(lambda uptime: uptime['name'] == 'uptime', statistics)[0]['value']
else:
uptime = 0
return render_template('dashboard.html', domains=domains, domain_count=domain_count, users=users, history_number=history_number, uptime=uptime, histories=history)
@app.route('/domain/<string:domain_name>', methods=['GET', 'POST'])
@app.route('/domain', methods=['GET', 'POST'])
@login_required
def domain(domain_name):
r = Record()
domain = Domain.query.filter(Domain.name == domain_name).first()
if domain:
# query domain info from PowerDNS API
zone_info = r.get_record_data(domain.name)
if zone_info:
jrecords = zone_info['records']
else:
# can not get any record, API server might be down
return redirect(url_for('error', code=500))
records = []
#TODO: This should be done in the "model" instead of "view"
if NEW_SCHEMA:
for jr in jrecords:
if jr['type'] in app.config['RECORDS_ALLOW_EDIT']:
for subrecord in jr['records']:
record = Record(name=jr['name'], type=jr['type'], status='Disabled' if subrecord['disabled'] else 'Active', ttl=jr['ttl'], data=subrecord['content'])
records.append(record)
else:
for jr in jrecords:
if jr['type'] in app.config['RECORDS_ALLOW_EDIT']:
record = Record(name=jr['name'], type=jr['type'], status='Disabled' if jr['disabled'] else 'Active', ttl=jr['ttl'], data=jr['content'])
records.append(record)
return render_template('domain.html', domain=domain, records=records, editable_records=app.config['RECORDS_ALLOW_EDIT'])
else:
return redirect(url_for('error', code=404))
@app.route('/admin/domain/add', methods=['GET', 'POST'])
@login_required
@admin_role_required
def domain_add():
if request.method == 'POST':
try:
domain_name = request.form.getlist('domain_name')[0]
domain_type = request.form.getlist('radio_type')[0]
soa_edit_api = request.form.getlist('radio_type_soa_edit_api')[0]
if ' ' in domain_name or not domain_name or not domain_type:
return render_template('errors/400.html', msg="Please correct your input"), 400
if domain_type == 'slave':
if request.form.getlist('domain_master_address'):
domain_master_string = request.form.getlist('domain_master_address')[0]
domain_master_string = domain_master_string.replace(' ','')
domain_master_ips = domain_master_string.split(',')
else:
domain_master_ips = []
d = Domain()
result = d.add(domain_name=domain_name, domain_type=domain_type, soa_edit_api=soa_edit_api, domain_master_ips=domain_master_ips)
if result['status'] == 'ok':
history = History(msg='Add domain %s' % domain_name, detail=str({'domain_type': domain_type, 'domain_master_ips': domain_master_ips}), created_by=current_user.username)
history.add()
return redirect(url_for('dashboard'))
else:
return render_template('errors/400.html', msg=result['msg']), 400
except:
return redirect(url_for('error', code=500))
return render_template('domain_add.html')
@app.route('/admin/domain/<string:domain_name>/delete', methods=['GET'])
@login_required
@admin_role_required
def domain_delete(domain_name):
d = Domain()
result = d.delete(domain_name)
if result['status'] == 'error':
return redirect(url_for('error', code=500))
history = History(msg='Delete domain %s' % domain_name, created_by=current_user.username)
history.add()
return redirect(url_for('dashboard'))
@app.route('/admin/domain/<string:domain_name>/manage', methods=['GET', 'POST'])
@login_required
@admin_role_required
def domain_management(domain_name):
if request.method == 'GET':
domain = Domain.query.filter(Domain.name == domain_name).first()
if not domain:
return redirect(url_for('error', code=404))
users = User.query.all()
# get list of user ids to initilize selection data
d = Domain(name=domain_name)
domain_user_ids = d.get_user()
return render_template('domain_management.html', domain=domain, users=users, domain_user_ids=domain_user_ids)
if request.method == 'POST':
# username in right column
new_user_list = request.form.getlist('domain_multi_user[]')
# get list of user ids to compare
d = Domain(name=domain_name)
domain_user_ids = d.get_user()
# grant/revoke user privielges
d.grant_privielges(new_user_list)
history = History(msg='Change domain %s access control' % domain_name, detail=str({'user_has_access': new_user_list}), created_by=current_user.username)
history.add()
return redirect(url_for('domain_management', domain_name=domain_name))
@app.route('/domain/<string:domain_name>/apply', methods=['POST'], strict_slashes=False)
@login_required
def record_apply(domain_name):
"""
example jdata: {u'record_ttl': u'1800', u'record_type': u'CNAME', u'record_name': u'test4', u'record_status': u'Active', u'record_data': u'duykhanh.me'}
"""
#TODO: filter removed records / name modified records.
try:
pdata = request.data
jdata = json.loads(pdata)
records = []
for j in jdata:
record = {
"name": domain_name if j['record_name'] in ['@', ''] else j['record_name'] + '.' + domain_name,
"type": j['record_type'],
"content": j['record_data'],
"disabled": True if j['record_status'] == 'Disabled' else False,
"name": domain_name if j['record_name'] in ['@', ''] else j['record_name'] + '.' + domain_name,
"ttl": int(j['record_ttl']) if j['record_ttl'] else 3600,
"type": j['record_type'],
}
records.append(record)
r = Record()
result = r.apply(domain_name, records)
if result['status'] == 'ok':
history = History(msg='Apply record changes to domain %s' % domain_name, detail=str(records), created_by=current_user.username)
history.add()
return make_response(jsonify( result ), 200)
else:
return make_response(jsonify( result ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( {'status': 'error', 'msg': 'Error when applying new changes'} ), 500)
@app.route('/domain/<string:domain_name>/update', methods=['POST'], strict_slashes=False)
@login_required
def record_update(domain_name):
"""
This route is used for domain work as Slave Zone only
Pulling the records update from its Master
"""
try:
pdata = request.data
jdata = json.loads(pdata)
domain_name = jdata['domain']
d = Domain()
result = d.update_from_master(domain_name)
if result['status'] == 'ok':
return make_response(jsonify( {'status': 'ok', 'msg': result['msg']} ), 200)
else:
return make_response(jsonify( {'status': 'error', 'msg': result['msg']} ), 500)
except:
print traceback.format_exc()
return make_response(jsonify( {'status': 'error', 'msg': 'Error when applying new changes'} ), 500)
@app.route('/domain/<string:domain_name>/record/<string:record_name>/type/<string:record_type>/delete', methods=['GET'])
@login_required
@admin_role_required
def record_delete(domain_name, record_name, record_type):
try:
r = Record(name=record_name, type=record_type)
result = r.delete(domain=domain_name)
if result['status'] == 'error':
print result['msg']
except:
print traceback.format_exc()
return redirect(url_for('error', code=500)), 500
return redirect(url_for('domain', domain_name=domain_name))
@app.route('/domain/<string:domain_name>/dnssec', methods=['GET'])
@login_required
def domain_dnssec(domain_name):
domain = Domain()
dnssec = domain.get_domain_dnssec(domain_name)
return make_response(jsonify(dnssec), 200)
@app.route('/domain/<string:domain_name>/managesetting', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_setdomainsetting(domain_name):
if request.method == 'POST':
#
# post data should in format
# {'action': 'set_setting', 'setting': 'default_action, 'value': 'True'}
#
try:
pdata = request.data
jdata = json.loads(pdata)
data = jdata['data']
if jdata['action'] == 'set_setting':
new_setting = data['setting']
new_value = str(data['value'])
domain = Domain.query.filter(Domain.name == domain_name).first()
setting = DomainSetting.query.filter(DomainSetting.domain == domain).filter(DomainSetting.setting == new_setting).first()
if setting:
if setting.set(new_value):
history = History(msg='Setting %s changed value to %s for %s' % (new_setting, new_value, domain.name), created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Setting updated.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to set value of setting.' } ))
else:
if domain.add_setting(new_setting, new_value):
history = History(msg='New setting %s with value %s for %s has been created' % (new_setting, new_value, domain.name), created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'New setting created and updated.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to create new setting.' } ))
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Action not supported.' } ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( { 'status': 'error', 'msg': 'There is something wrong, please contact Administrator.' } ), 400)
@app.route('/admin', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin():
domains = Domain.query.all()
users = User.query.all()
server = Server(server_id='localhost')
configs = server.get_config()
statistics = server.get_statistic()
history_number = History.query.count()
if statistics:
uptime = filter(lambda uptime: uptime['name'] == 'uptime', statistics)[0]['value']
else:
uptime = 0
return render_template('admin.html', domains=domains, users=users, configs=configs, statistics=statistics, uptime=uptime, history_number=history_number)
@app.route('/admin/user/create', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_createuser():
if request.method == 'GET':
return render_template('admin_createuser.html')
if request.method == 'POST':
fdata = request.form
user = User(username=fdata['username'], plain_text_password=fdata['password'], firstname=fdata['firstname'], lastname=fdata['lastname'], email=fdata['email'])
if fdata['password'] == "":
return render_template('admin_createuser.html', user=user, blank_password=True)
result = user.create_local_user();
if result == 'Email already existed':
return render_template('admin_createuser.html', user=user, duplicate_email=True)
if result == 'Username already existed':
return render_template('admin_createuser.html', user=user, duplicate_username=True)
return redirect(url_for('admin_manageuser'))
@app.route('/admin/manageuser', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_manageuser():
if request.method == 'GET':
users = User.query.order_by(User.username).all()
return render_template('admin_manageuser.html', users=users)
if request.method == 'POST':
#
# post data should in format
# {'action': 'delete_user', 'data': 'username'}
#
try:
pdata = request.data
jdata = json.loads(pdata)
data = jdata['data']
if jdata['action'] == 'delete_user':
user = User(username=data)
result = user.delete()
if result:
history = History(msg='Delete username %s' % data, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'User has been removed.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot remove user.' } ), 500)
elif jdata['action'] == 'revoke_user_privielges':
user = User(username=data)
result = user.revoke_privilege()
if result:
history = History(msg='Revoke %s user privielges' % data, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Revoked user privielges.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot revoke user privilege.' } ), 500)
elif jdata['action'] == 'set_admin':
username = data['username']
is_admin = data['is_admin']
user = User(username=username)
result = user.set_admin(is_admin)
if result:
history = History(msg='Change user role of %s' % username, created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Changed user role successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Cannot change user role.' } ), 500)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Action not supported.' } ), 400)
except:
print traceback.format_exc()
return make_response(jsonify( { 'status': 'error', 'msg': 'There is something wrong, please contact Administrator.' } ), 400)
@app.route('/admin/history', methods=['GET', 'POST'])
@login_required
@admin_role_required
def admin_history():
if request.method == 'POST':
h = History()
result = h.remove_all()
if result:
history = History(msg='Remove all histories', created_by=current_user.username)
history.add()
return make_response(jsonify( { 'status': 'ok', 'msg': 'Changed user role successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Can not remove histories.' } ), 500)
if request.method == 'GET':
histories = History.query.all()
return render_template('admin_history.html', histories=histories)
@app.route('/admin/settings', methods=['GET'])
@login_required
@admin_role_required
def admin_settings():
if request.method == 'GET':
settings = Setting.query.filter(Setting.name != 'maintenance')
return render_template('admin_settings.html', settings=settings)
@app.route('/admin/setting/<string:setting>/toggle', methods=['POST'])
@login_required
@admin_role_required
def admin_settings_toggle(setting):
result = Setting().toggle(setting)
if (result):
return make_response(jsonify( { 'status': 'ok', 'msg': 'Toggled setting successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to toggle setting.' } ), 500)
@app.route('/admin/setting/<string:setting>/edit', methods=['POST'])
@login_required
@admin_role_required
def admin_settings_edit(setting):
pdata = request.data
jdata = json.loads(pdata)
new_value = jdata['value']
result = Setting().set(setting, new_value)
if (result):
return make_response(jsonify( { 'status': 'ok', 'msg': 'Toggled setting successfully.' } ), 200)
else:
return make_response(jsonify( { 'status': 'error', 'msg': 'Unable to toggle setting.' } ), 500)
@app.route('/user/profile', methods=['GET', 'POST'])
@login_required
def user_profile():
if request.method == 'GET':
return render_template('user_profile.html')
if request.method == 'POST':
# get new profile info
firstname = request.form['firstname'] if 'firstname' in request.form else ''
lastname = request.form['lastname'] if 'lastname' in request.form else ''
email = request.form['email'] if 'email' in request.form else ''
new_password = request.form['password'] if 'password' in request.form else ''
# json data
if request.data:
jdata = json.loads(request.data)
data = jdata['data']
if jdata['action'] == 'enable_otp':
enable_otp = data['enable_otp']
user = User(username=current_user.username)
user.update_profile(enable_otp=enable_otp)
return make_response(jsonify( { 'status': 'ok', 'msg': 'Change OTP Authentication successfully. Status: %s' % enable_otp } ), 200)
# get new avatar
save_file_name = None
if 'file' in request.files:
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file_extension = filename.rsplit('.', 1)[1]
if file_extension.lower() in ['jpg', 'jpeg', 'png']:
save_file_name = current_user.username + '.' + file_extension
file.save(os.path.join(app.config['UPLOAD_DIR'], 'avatar', save_file_name))
# update user profile
user = User(username=current_user.username, plain_text_password=new_password, firstname=firstname, lastname=lastname, email=email, avatar=save_file_name, reload_info=False)
user.update_profile()
return render_template('user_profile.html')
@app.route('/user/avatar/<string:filename>')
def user_avatar(filename):
return send_from_directory(os.path.join(app.config['UPLOAD_DIR'], 'avatar'), filename)
@app.route('/qrcode')
@login_required
def qrcode():
if not current_user:
return redirect(url_for('index'))
# render qrcode for FreeTOTP
url = pyqrcode.create(current_user.get_totp_uri())
stream = BytesIO()
url.svg(stream, scale=3)
return stream.getvalue(), 200, {
'Content-Type': 'image/svg+xml',
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'}
@app.route('/nic/checkip.html', methods=['GET', 'POST'])
def dyndns_checkip():
# route covers the default ddclient 'web' setting for the checkip service
return render_template('dyndns.html', response=request.environ.get('HTTP_X_REAL_IP', request.remote_addr))
@app.route('/nic/update', methods=['GET', 'POST'])
@dyndns_login_required
def dyndns_update():
# dyndns protocol response codes in use are:
# good: update successful
# nochg: IP address already set to update address
# nohost: hostname does not exist for this user account
# 911: server error
# have to use 200 HTTP return codes because ddclient does not read the return string if the code is other than 200
# reference: https://help.dyn.com/remote-access-api/perform-update/
# reference: https://help.dyn.com/remote-access-api/return-codes/
hostname = request.args.get('hostname')
myip = request.args.get('myip')
try:
# get all domains owned by the current user
domains = User(id=current_user.id).get_domain()
except:
return render_template('dyndns.html', response='911'), 200
domain = None
domain_segments = hostname.split('.')
for index in range(len(domain_segments)):
domain_segments.pop(0)
full_domain = '.'.join(domain_segments)
potential_domain = Domain.query.filter(Domain.name == full_domain).first()
if potential_domain in domains:
domain = potential_domain
break
if not domain:
history = History(msg="DynDNS update: attempted update of %s but it does not exist for this user" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nohost'), 200
r = Record()
r.name = hostname
# check if the user requested record exists within this domain
if r.exists(domain.name) and r.is_allowed:
if r.data == myip:
# record content did not change, return 'nochg'
history = History(msg="DynDNS update: attempted update of %s but record did not change" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nochg'), 200
else:
oldip = r.data
result = r.update(domain.name, myip)
if result['status'] == 'ok':
history = History(msg='DynDNS update: updated record %s in zone %s, it changed from %s to %s' % (hostname,domain.name,oldip,myip), detail=str(result), created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='good'), 200
else:
return render_template('dyndns.html', response='911'), 200
elif r.is_allowed:
ondemand_creation = DomainSetting.query.filter(DomainSetting.domain == domain).filter(DomainSetting.setting == 'create_via_dyndns').first()
if strtobool(ondemand_creation.value) == True:
record = Record(name=hostname,type='A',data=myip,status=False,ttl=3600)
result = record.add(domain.name)
if result['status'] == 'ok':
history = History(msg='DynDNS update: created record %s in zone %s, it now represents %s' % (hostname,domain.name,myip), detail=str(result), created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='good'), 200
history = History(msg="DynDNS update: attempted update of %s but it does not exist for this user" % hostname, created_by=current_user.username)
history.add()
return render_template('dyndns.html', response='nohost'), 200
@app.route('/', methods=['GET', 'POST'])
@login_required
def index():
return redirect(url_for('dashboard'))
# END VIEWS
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
import operator
import os
import re
import string
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLType,
IDLUndefinedValue,
)
from Configuration import getTypesFromDescriptor, getTypesFromDictionary, getTypesFromCallback
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
ADDPROPERTY_HOOK_NAME = '_addProperty'
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
#XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
#oldFileContents = ""
#try:
# oldFile = open(filename, 'rb')
# oldFileContents = ''.join(oldFile.readlines())
# oldFile.close()
#except:
# pass
#if newContents == oldFileContents:
# return False
f = open(filename, 'wb')
f.write(newContents)
f.close()
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", arg);
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def MakeNativeName(name):
return name[0].upper() + name[1:]
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.float: 'f32',
IDLType.Tags.double: 'f64'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.float, IDLType.Tags.double
]
class CastableObjectUnwrapper():
"""
A class for unwrapping an object named by the "source" argument
based on the passed-in descriptor. Stringifies to a Rust expression of
the appropriate type.
codeOnFailure is the code to run if unwrapping fails.
"""
def __init__(self, descriptor, source, codeOnFailure):
self.substitution = {
"type": descriptor.nativeType,
"depth": descriptor.interface.inheritanceDepth(),
"prototype": "PrototypeList::id::" + descriptor.name,
"protoID": "PrototypeList::id::" + descriptor.name + " as uint",
"source": source,
"codeOnFailure": CGIndenter(CGGeneric(codeOnFailure), 4).define(),
}
def __str__(self):
return string.Template(
"""match unwrap_jsmanaged(${source}, ${prototype}, ${depth}) {
Ok(val) => val,
Err(()) => {
${codeOnFailure}
}
}""").substitute(self.substitution)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
assert(False) # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs-1].optional:
requiredArgs -= 1
return requiredArgs
def getPerSignatureCall(signature, argConversionStartsAt=0, signatureIndex=0):
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_'*signatureIndex,
static, descriptor,
method, argConversionStartsAt)
signatures = method.signatures()
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return 0;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
sigIndex = signatures.index(signature)
argCountCases.append(
CGCase(str(argCount), getPerSignatureCall(signature,
signatureIndex=sigIndex)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [CGGeneric("let argv_start = JS_ARGV(cx, vp);")]
caseBody.extend([ CGArgumentConverter(possibleSignatures[0][1][i],
i, "argv_start", "argc",
descriptor) for i in
range(0, distinguishingIndex) ])
# Select the right overload from our set.
distinguishingArg = "(*argv_start.offset(%d))" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
if condition is None:
caseBody.append(
getPerSignatureCall(sigs[0], distinguishingIndex,
possibleSignatures.index(sigs[0])))
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(
getPerSignatureCall(sigs[0], distinguishingIndex,
possibleSignatures.index(sigs[0]))))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.isNullOrUndefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface()) ]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if (%s).is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")));
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex,
needsRooting)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4));
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1, idx), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.isObject() && IsArrayLike(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isArray() or
s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.isObject() && JS_ObjectIsDate(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.isObject() && !IsPlatformObject(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("return 0; //XXXjdm throw stuff\n//return ThrowErrorMessage(cx, MSG_MISSING_ARGUMENTS, %s);\n" % methodName)))
#XXXjdm Avoid unreachable statement warnings
#overloadCGThings.append(
# CGGeneric('fail!("We have an always-returning default case");\n'
# 'return 0;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
class FakeCastableDescriptor():
def __init__(self, descriptor):
self.nativeType = "*const %s" % descriptor.concreteType
self.name = descriptor.name
class FakeInterface:
def inheritanceDepth(self):
return descriptor.interface.inheritanceDepth()
self.interface = FakeInterface()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isArray():
elementType = type.inner
return typeIsSequenceOrHasSequenceMember(elementType)
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def typeNeedsRooting(type, descriptorProvider):
return type.isGeckoInterface() and descriptorProvider.getDescriptor(type.name).needsRooting
def getJSToNativeConversionTemplate(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
if isMember is True, we're being converted from a property of some
JS object, not from an actual method argument, so we can't rely on
our jsval being rooted or outliving us in any way. Any caller
passing true needs to ensure that it is handled correctly in
typeIsSequenceOrHasSequenceMember.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is a tuple consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert(not isDefinitelyObject or defaultValue is None)
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return 0;"
needsRooting = typeNeedsRooting(type, descriptorProvider)
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return (template, default, declType, needsRooting)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureBadType(failureCode, typeName):
return CGWrapper(
CGGeneric(
failureCode or
('//XXXjdm ThrowErrorMessage(cx, MSG_DOES_NOT_IMPLEMENT_INTERFACE, "%s", "%s")\n;'
'%s' % (firstCap(sourceDescription), typeName,
exceptionCode))),
post="\n")
def onFailureNotCallable(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('//XXXjdm ThrowErrorMessage(cx, MSG_NOT_CALLABLE, "%s");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if (${val}).is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if (${val}).is_null_or_undefined() {\n"
" None\n")
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}\n")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
raise TypeError("Can't handle sequence arguments yet")
if type.isUnion():
declType = CGGeneric(type.name + "::" + type.name)
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % exceptionCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGGeneric("Option<%s>" % name);
conversion = ("Some(%s::new((${val}).to_object()))" % name)
template = wrapObjectTemplate(conversion, isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
if isMember:
descriptorType = descriptor.memberType
elif isArgument:
descriptorType = descriptor.argumentType
else:
descriptorType = descriptor.nativeType
templateBody = ""
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = str(CastableObjectUnwrapper(
descriptor,
"(${val}).to_object()",
unwrapFailureCode))
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
if isMember:
templateBody += ".root()"
templateBody = wrapObjectTemplate(templateBody, isDefinitelyObject,
type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "Default",
"EmptyString": "Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
nullBehavior = "()"
else:
nullBehavior = treatAs[treatNullAs]
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % (nullBehavior, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
value = "str::from_utf8(data).unwrap().to_string()"
if type.nullable():
value = "Some(%s)" % value
default = (
"static data: [u8, ..%s] = [ %s ];\n"
"%s" %
(len(defaultValue.value) + 1,
", ".join(["'" + char + "' as u8" for char in defaultValue.value] + ["0"]),
value))
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
declType = CGGeneric("ByteString")
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
return handleOptional(conversionCode, declType, handleDefaultNull("None"))
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = exceptionCode
else:
handleInvalidEnumValueCode = "return 1;"
template = (
"match FindEnumStringIndex(cx, ${val}, %(values)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok(None) => { %(handleInvalidEnumValueCode)s },\n"
" Ok(Some(index)) => {\n"
" //XXXjdm need some range checks up in here.\n"
" unsafe { mem::transmute(index) }\n"
" },\n"
"}" % { "values" : enum + "Values::strings",
"exceptionCode" : exceptionCode,
"handleInvalidEnumValueCode" : handleInvalidEnumValueCode })
if defaultValue is not None:
assert(defaultValue.type.tag() == IDLType.Tags.domstring)
default = "%sValues::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
declType = CGGeneric('%s::%s' % (type.unroll().module(), type.unroll().identifier.name))
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("JS_ObjectIsCallable(cx, ${val}.to_object()) != 0",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return (template, default, declType, needsRooting)
if type.isAny():
assert not isEnforceRange and not isClamp
declType = CGGeneric("JSVal")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "NullValue()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "UndefinedValue()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
raise TypeError("Can't handle object arguments yet")
if type.isDictionary():
if failureCode is not None:
raise TypeError("Can't handle dictionaries when failureCode is not None")
# There are no nullable dictionaries
assert not type.nullable()
typeName = CGDictionary.makeDictionaryName(type.inner)
declType = CGGeneric(typeName)
template = ("match %s::new(cx, ${val}) {\n"
" Ok(dictionary) => dictionary,\n"
" Err(_) => return 0,\n"
"}" % typeName)
return handleOptional(template, declType, handleDefaultNull("%s::empty()" % typeName))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return ("", None, None, False)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
assert not isEnforceRange and not isClamp
if failureCode is None:
failureCode = 'return 0'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
#XXXjdm support conversionBehavior here
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(v) => v,\n"
" Err(_) => { %s }\n"
"}" % exceptionCode)
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert(tag == IDLType.Tags.bool)
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName, needsRooting):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionTemplate, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(
string.Template(templateBody).substitute(replacements)
)
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
if needsRooting:
rootBody = "let %s = %s.root();" % (declName, declName)
result.append(CGGeneric(rootBody))
result.append(CGGeneric(""))
return result;
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "BoolVal(true)" if value.value else "BoolVal(false)"
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
return "DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, argv, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert(not argument.defaultValue or argument.optional)
replacer = {
"index": index,
"argc": argc,
"argv": argv
}
condition = string.Template("${index} < ${argc}").substitute(replacer)
replacementVariables = {
"val": string.Template("(*${argv}.offset(${index}))").substitute(replacer),
}
template, default, declType, needsRooting = getJSToNativeConversionTemplate(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
if not argument.variadic:
if argument.optional:
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(template),
CGGeneric(default)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("Some(%s)" % template),
CGGeneric("None")).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index,
needsRooting)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("(*${argv}.offset(variadicArg as int))").substitute(replacer),
}
innerConverter = instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot",
needsRooting)
seqType = CGTemplatedType("Vec", declType)
variadicConversion = string.Template(
"{\n"
" let mut vector: ${seqType} = Vec::with_capacity((${argc} - ${index}) as uint);\n"
" for variadicArg in range(${index}, ${argc}) {\n"
"${inner}\n"
" vector.push(slot);\n"
" }\n"
" vector\n"
"}"
).substitute({
"index": index,
"argc": argc,
"seqType": seqType.define(),
"inner": CGIndenter(innerConverter, 4).define(),
})
self.converter = instantiateJSToNativeConversionTemplate(
variadicConversion, replacementVariables, seqType, "arg%d" % index,
False)
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return 1;'):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a Rust reference to the JSVal in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
"""
return "%s = (%s).to_jsval(cx);\n%s" % (jsvalRef, result, successCode)
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence() or type.isArray():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
def typeRetValNeedsRooting(type):
if type is None:
return False
if type.nullable():
type = type.inner
return type.isGeckoInterface() and not type.isCallback()
def memberIsCreator(member):
return member.getExtendedAttribute("Creator") is not None
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
result = CGGeneric('%s::%s' % (returnType.unroll().module(),
returnType.unroll().identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric('%s::%s' % (returnType.unroll().name, returnType.unroll().name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
return CGGeneric("*mut JSObject")
if returnType.isSequence():
raise TypeError("We don't support sequence return values")
raise TypeError("Don't know how to declare return value for %s" %
returnType)
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
def generatePrefableArray(self, array, name, specTemplate, specTerminator,
specType, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
assert(len(array) is not 0)
specs = []
for member in array:
specs.append(specTemplate % getDataTuple(member))
if specTerminator:
specs.append(specTerminator)
return (("static %s: &'static [%s] = &[\n" +
",\n".join(specs) + "\n" +
"];\n\n") % (name, specType))
# The length of a method is the maximum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return max([len(arguments) for (retType, arguments) in signatures])
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static):
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess()]
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m),
"flags": "JSPROP_ENUMERATE" }
for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": 'iterator',
"methodInfo": False,
"nativeName": "JS_ArrayIterator",
"length": 0,
"flags": "JSPROP_ENUMERATE" })
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(m):
if m.get("methodInfo", True):
jitinfo = ("&%s_methodinfo" % m["name"])
accessor = "genericMethod"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = m.get("nativeName", m["name"])
return (m["name"], accessor, jitinfo, m["length"], m["flags"])
def stringDecl(m):
return "static %s_name: [u8, ..%i] = %s;\n" % (m["name"], len(m["name"]) + 1,
str_to_const_array(m["name"]))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' JSFunctionSpec {name: &%s_name as *const u8 as *const libc::c_char, call: JSNativeWrapper {op: Some(%s), info: %s}, nargs: %s, flags: %s as u16, selfHostedName: 0 as *const libc::c_char }',
' JSFunctionSpec {name: 0 as *const libc::c_char, call: JSNativeWrapper {op: None, info: 0 as *const JSJitInfo}, nargs: 0, flags: 0, selfHostedName: 0 as *const libc::c_char }',
'JSFunctionSpec',
specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [
m
for m in descriptor.interface.members
if m.isAttr() and m.isStatic() == static
]
self.static = static
def generateArray(self, array, name):
if len(array) == 0:
return ""
def flags(attr):
return "JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_NATIVE_ACCESSORS"
def getter(attr):
if self.static:
accessor = 'get_' + attr.identifier.name
jitinfo = "0"
else:
if attr.hasLenientThis():
accessor = "genericLenientGetter"
else:
accessor = "genericGetter"
jitinfo = "&%s_getterinfo" % attr.identifier.name
return ("JSPropertyOpWrapper {op: Some(%(native)s), info: %(info)s as *const JSJitInfo}"
% {"info" : jitinfo,
"native" : accessor})
def setter(attr):
if attr.readonly:
return "JSStrictPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo}"
if self.static:
accessor = 'set_' + attr.identifier.name
jitinfo = "0"
else:
if attr.hasLenientThis():
accessor = "genericLenientSetter"
else:
accessor = "genericSetter"
jitinfo = "&%s_setterinfo" % attr.identifier.name
return ("JSStrictPropertyOpWrapper {op: Some(%(native)s), info: %(info)s as *const JSJitInfo}"
% {"info" : jitinfo,
"native" : accessor})
def specData(attr):
return (attr.identifier.name, flags(attr), getter(attr),
setter(attr))
def stringDecl(attr):
name = attr.identifier.name
return "static %s_name: [u8, ..%i] = %s;\n" % (name, len(name) + 1,
str_to_const_array(name))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' JSPropertySpec { name: &%s_name as *const u8 as *const libc::c_char, tinyid: 0, flags: ((%s) & 0xFF) as u8, getter: %s, setter: %s }',
' JSPropertySpec { name: 0 as *const libc::c_char, tinyid: 0, flags: 0, getter: JSPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo}, setter: JSStrictPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo} }',
'JSPropertySpec',
specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (const.identifier.name,
convertConstIDLValueToJSVal(const.value))
def stringDecl(const):
name = const.identifier.name
return "static %s_name: &'static [u8] = &%s;\n" % (name, str_to_const_array(name))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' ConstantSpec { name: %s_name, value: %s }',
None,
'ConstantSpec',
specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=2):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn is not "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, imports):
"""
Adds a set of imports.
"""
ignored_warnings = [
# Allow unreachable_code because we use 'break' in a way that
# sometimes produces two 'break's in a row. See for example
# CallbackMember.getArgConversions.
'unreachable_code',
'non_camel_case_types',
'non_uppercase_statics',
'unnecessary_parens',
'unused_imports',
'unused_variable',
'unused_unsafe',
'unused_mut',
'dead_assignment',
'dead_code',
]
statements = ['#![allow(%s)]' % ','.join(ignored_warnings)]
statements.extend('use %s;' % i for i in sorted(imports))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
@staticmethod
def getDeclarationFilename(decl):
# Use our local version of the header, not the exported one, so that
# test bindings, which don't export, will work correctly.
basename = os.path.basename(decl.filename())
return basename.replace('.webidl', 'Binding.rs')
class CGIfWrapper(CGWrapper):
def __init__(self, child, condition):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s\n" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClass(descriptor):
protoList = ['PrototypeList::id::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with IDCount so we
# guarantee that all the lists are the same length. IDCount
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::id::IDCount'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
return """DOMClass {
interface_chain: [ %s ]
}""" % prototypeChainString
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
traceHook = "Some(%s)" % TRACE_HOOK_NAME
if self.descriptor.createGlobal:
flags = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
slots = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
else:
flags = "0"
slots = "1"
return """
static Class_name: [u8, ..%i] = %s;
static Class: DOMJSClass = DOMJSClass {
base: js::Class {
name: &Class_name as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | %s | (((%s) & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT as uint), //JSCLASS_HAS_RESERVED_SLOTS(%s),
addProperty: Some(JS_PropertyStub),
delProperty: Some(JS_PropertyStub),
getProperty: Some(JS_PropertyStub),
setProperty: Some(JS_StrictPropertyStub),
enumerate: Some(JS_EnumerateStub),
resolve: Some(JS_ResolveStub),
convert: Some(JS_ConvertStub),
finalize: Some(%s),
checkAccess: None,
call: None,
hasInstance: None,
construct: None,
trace: %s,
ext: js::ClassExtension {
equality: 0 as *const u8,
outerObject: %s,
innerObject: None,
iteratorObject: 0 as *const u8,
unused: 0 as *const u8,
isWrappedNative: 0 as *const u8,
},
ops: js::ObjectOps {
lookupGeneric: 0 as *const u8,
lookupProperty: 0 as *const u8,
lookupElement: 0 as *const u8,
lookupSpecial: 0 as *const u8,
defineGeneric: 0 as *const u8,
defineProperty: 0 as *const u8,
defineElement: 0 as *const u8,
defineSpecial: 0 as *const u8,
getGeneric: 0 as *const u8,
getProperty: 0 as *const u8,
getElement: 0 as *const u8,
getElementIfPresent: 0 as *const u8,
getSpecial: 0 as *const u8,
setGeneric: 0 as *const u8,
setProperty: 0 as *const u8,
setElement: 0 as *const u8,
setSpecial: 0 as *const u8,
getGenericAttributes: 0 as *const u8,
getPropertyAttributes: 0 as *const u8,
getElementAttributes: 0 as *const u8,
getSpecialAttributes: 0 as *const u8,
setGenericAttributes: 0 as *const u8,
setPropertyAttributes: 0 as *const u8,
setElementAttributes: 0 as *const u8,
setSpecialAttributes: 0 as *const u8,
deleteProperty: 0 as *const u8,
deleteElement: 0 as *const u8,
deleteSpecial: 0 as *const u8,
enumerate: 0 as *const u8,
typeOf: 0 as *const u8,
thisObject: %s,
clear: 0 as *const u8,
},
},
dom_class: %s
};
""" % (len(self.descriptor.interface.identifier.name) + 1,
str_to_const_array(self.descriptor.interface.identifier.name),
flags, slots, slots,
FINALIZE_HOOK_NAME, traceHook,
self.descriptor.outerObjectHook,
self.descriptor.outerObjectHook,
CGIndenter(CGGeneric(DOMClass(self.descriptor))).define())
def str_to_const_array(s):
return "[" + (", ".join(map(lambda x: "'" + x + "' as u8", list(s)) + ['0 as u8'])) + "]"
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return """
static PrototypeClassName__: [u8, ..%s] = %s;
static PrototypeClass: JSClass = JSClass {
name: &PrototypeClassName__ as *const u8 as *const libc::c_char,
flags: (1 & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT as uint, //JSCLASS_HAS_RESERVED_SLOTS(1)
addProperty: Some(JS_PropertyStub),
delProperty: Some(JS_PropertyStub),
getProperty: Some(JS_PropertyStub),
setProperty: Some(JS_StrictPropertyStub),
enumerate: Some(JS_EnumerateStub),
resolve: Some(JS_ResolveStub),
convert: Some(JS_ConvertStub),
finalize: None,
checkAccess: None,
call: None,
hasInstance: None,
construct: None,
trace: None,
reserved: [0 as *mut libc::c_void, ..40]
};
""" % (len(self.descriptor.interface.identifier.name + "Prototype") + 1,
str_to_const_array(self.descriptor.interface.identifier.name + "Prototype"))
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if True:
return ""
ctorname = "0 as *const u8" if not self.descriptor.interface.ctor() else CONSTRUCT_HOOK_NAME
hasinstance = HASINSTANCE_HOOK_NAME
return """
static InterfaceObjectClass: JSClass = {
%s, 0,
JS_PropertyStub,
JS_PropertyStub,
JS_PropertyStub,
JS_StrictPropertyStub,
JS_EnumerateStub,
JS_ResolveStub,
JS_ConvertStub,
0 as *const u8,
0 as *const u8,
%s,
%s,
%s,
0 as *const u8,
JSCLASS_NO_INTERNAL_MEMBERS
};
""" % (str_to_const_array("Function"), ctorname, hasinstance, ctorname)
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
self.children = children
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, generator):
return self.joiner.join(filter(lambda s: len(s) > 0, (child for child in generator)))
def define(self):
return self.join(child.define() for child in self.children if child is not None)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [ CGIfWrapper(ifTrue, condition),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}") ]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
val = "%s::new(tempRoot)" % name
define = """{
let tempRoot = ${val}.to_object();
%s
}""" % val
CGGeneric.__init__(self, define)
def getAllTypes(descriptors, dictionaries, callbacks):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
def SortedTuples(l):
"""
Sort a list of tuples based on the first item in the tuple
"""
return sorted(l, key=operator.itemgetter(0))
def SortedDictValues(d):
"""
Returns a list of values from the dict sorted by key.
"""
# Create a list of tuples containing key and value, sorted on key.
d = SortedTuples(d.items())
# We're only interested in the values.
return (i[1] for i in d)
def UnionTypes(descriptors, dictionaries, callbacks, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom::bindings::utils::unwrap_jsmanaged',
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::Default',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::JS',
'dom::types::*',
'js::jsapi::JSContext',
'js::jsval::JSVal',
'servo_util::str::DOMString',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks):
assert not descriptor or not dictionary
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if not name in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGNamespace(name,
CGImports(CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
]), [], imports),
public=True)
return CGList(SortedDictValues(unionStructs), "\n\n")
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
#XXXjdm Support default arguments somehow :/
#if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
"""
def __init__(self, descriptor, name, returnType, args, inline=False, alwaysInline=False, extern=False, pub=False, templateArgs=None, unsafe=True):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.templateArgs = templateArgs
self.pub = pub;
self.unsafe = unsafe
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline(always)]')
if self.extern:
decorators.append('extern')
if self.pub:
decorators.append('pub')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
if self.unsafe:
body = CGWrapper(body, pre="unsafe {\n", post="\n}")
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%sfn %s%s(%s)%s {\n" % (self._decorators(), self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
assert(False) # Override me!
def CreateBindingJSObject(descriptor, parent=None):
create = "let mut raw: JS<%s> = JS::from_raw(&*aObject);\n" % descriptor.concreteType
if descriptor.proxy:
assert not descriptor.createGlobal
create += """
let handler = RegisterBindings::proxy_handlers[PrototypeList::proxies::%s as uint];
let mut private = PrivateValue(squirrel_away_unique(aObject) as *const libc::c_void);
let obj = with_compartment(aCx, proto, || {
NewProxyObject(aCx, handler,
&private,
proto, %s,
ptr::mut_null(), ptr::mut_null())
});
assert!(obj.is_not_null());
""" % (descriptor.name, parent)
else:
if descriptor.createGlobal:
create += "let obj = CreateDOMGlobal(aCx, &Class.base as *const js::Class as *const JSClass);\n"
else:
create += ("let obj = with_compartment(aCx, proto, || {\n"
" JS_NewObject(aCx, &Class.base as *const js::Class as *const JSClass, &*proto, &*%s)\n"
"});\n" % parent)
create += """assert!(obj.is_not_null());
JS_SetReservedSlot(obj, DOM_OBJECT_SLOT as u32,
PrivateValue(squirrel_away_unique(aObject) as *const libc::c_void));
"""
return create
class CGWrapMethod(CGAbstractMethod):
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
if not descriptor.createGlobal:
args = [Argument('*mut JSContext', 'aCx'), Argument('&GlobalRef', 'aScope'),
Argument("Box<%s>" % descriptor.concreteType, 'aObject', mutable=True)]
else:
args = [Argument('*mut JSContext', 'aCx'),
Argument("Box<%s>" % descriptor.concreteType, 'aObject', mutable=True)]
retval = 'Temporary<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args, pub=True)
def definition_body(self):
if not self.descriptor.createGlobal:
return CGGeneric("""\
let scope = aScope.reflector().get_jsobject();
assert!(scope.is_not_null());
assert!(((*JS_GetClass(scope)).flags & JSCLASS_IS_GLOBAL) != 0);
let proto = with_compartment(aCx, scope, || GetProtoObject(aCx, scope, scope));
assert!(proto.is_not_null());
%s
raw.reflector().set_jsobject(obj);
Temporary::new(raw)""" % CreateBindingJSObject(self.descriptor, "scope"))
else:
return CGGeneric("""\
%s
with_compartment(aCx, obj, || {
let proto = GetProtoObject(aCx, obj, obj);
JS_SetPrototype(aCx, obj, proto);
raw.reflector().set_jsobject(obj);
RegisterBindings::Register(aCx, obj);
});
Temporary::new(raw)""" % CreateBindingJSObject(self.descriptor))
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
replacer = {
'type': self.descriptor.name,
'depth': self.descriptor.interface.inheritanceDepth(),
}
return string.Template("""
impl IDLInterface for ${type} {
fn get_prototype_id(_: Option<${type}>) -> PrototypeList::id::ID {
PrototypeList::id::${type}
}
fn get_prototype_depth(_: Option<${type}>) -> uint {
${depth}
}
}
""").substitute(replacer)
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True)
class PropertyArrays():
def __init__(self, descriptor):
self.staticMethods = MethodDefiner(descriptor, "StaticMethods",
static=True)
self.staticAttrs = AttrDefiner(descriptor, "StaticAttributes",
static=True)
self.methods = MethodDefiner(descriptor, "Methods", static=False)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [ "staticMethods", "staticAttrs", "methods", "attrs", "consts" ]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGNativeProperties(CGThing):
def __init__(self, descriptor, properties):
CGThing.__init__(self)
self.properties = properties
def define(self):
def getField(array):
propertyArray = getattr(self.properties, array)
if propertyArray.length() > 0:
value = "Some(%s)" % propertyArray.variableName()
else:
value = "None"
return CGGeneric(string.Template('${name}: ${value},').substitute({
'name': array,
'value': value,
}))
nativeProps = CGList([getField(array) for array in self.properties.arrayNames()], '\n')
return CGWrapper(CGIndenter(nativeProps),
pre="static sNativeProperties: NativeProperties = NativeProperties {\n",
post="\n};\n").define()
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties):
assert not descriptor.interface.isCallback()
args = [Argument('*mut JSContext', 'aCx'), Argument('*mut JSObject', 'aGlobal'),
Argument('*mut JSObject', 'aReceiver')]
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', '*mut JSObject', args)
self.properties = properties
def definition_body(self):
protoChain = self.descriptor.prototypeChain
if len(protoChain) == 1:
getParentProto = "JS_GetObjectPrototype(aCx, aGlobal)"
else:
parentProtoName = self.descriptor.prototypeChain[-2]
getParentProto = ("%s::GetProtoObject(aCx, aGlobal, aReceiver)" %
toBindingNamespace(parentProtoName))
getParentProto = ("let parentProto: *mut JSObject = %s;\n"
"assert!(parentProto.is_not_null());\n") % getParentProto
if self.descriptor.concrete:
if self.descriptor.proxy:
domClass = "&Class"
else:
domClass = "&Class.dom_class"
else:
domClass = "ptr::null()"
if self.descriptor.interface.hasInterfaceObject():
if self.descriptor.interface.ctor():
constructHook = CONSTRUCT_HOOK_NAME
constructArgs = methodLength(self.descriptor.interface.ctor())
else:
constructHook = "ThrowingConstructor"
constructArgs = 0
constructor = 'Some((%s, "%s", %d))' % (
constructHook, self.descriptor.interface.identifier.name,
constructArgs)
else:
constructor = 'None'
call = """return CreateInterfaceObjects2(aCx, aGlobal, aReceiver, parentProto,
&PrototypeClass, %s,
%s,
&sNativeProperties);""" % (constructor, domClass)
return CGList([
CGGeneric(getParentProto),
CGGeneric(call % self.properties.variableNames())
], "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'aCx'), Argument('*mut JSObject', 'aGlobal'),
Argument('*mut JSObject', 'aReceiver')]
CGAbstractMethod.__init__(self, descriptor, name,
'*mut JSObject', args, pub=pub)
self.id = idPrefix + "id::" + self.descriptor.name
def definition_body(self):
return CGGeneric("""
/* aGlobal and aReceiver are usually the same, but they can be different
too. For example a sandbox often has an xray wrapper for a window as the
prototype of the sandbox's global. In that case aReceiver is the xray
wrapper and aGlobal is the sandbox's global.
*/
assert!(((*JS_GetClass(aGlobal)).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let protoOrIfaceArray = GetProtoOrIfaceArray(aGlobal);
let cachedObject: *mut JSObject = *protoOrIfaceArray.offset(%s as int);
if cachedObject.is_null() {
let tmp: *mut JSObject = CreateInterfaceObjects(aCx, aGlobal, aReceiver);
assert!(tmp.is_not_null());
*protoOrIfaceArray.offset(%s as int) = tmp;
tmp
} else {
cachedObject
}""" % (self.id, self.id))
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::", pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"constructors::")
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler', '*const libc::c_void', [], pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
body = """\
let traps = ProxyTraps {
getPropertyDescriptor: Some(getPropertyDescriptor),
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(defineProperty),
getOwnPropertyNames: ptr::null(),
delete_: Some(delete_),
enumerate: ptr::null(),
has: None,
hasOwn: Some(hasOwn),
get: Some(get),
set: None,
keys: ptr::null(),
iterate: None,
call: None,
construct: None,
nativeCall: ptr::null(),
hasInstance: None,
typeOf: None,
objectClassIs: None,
obj_toString: Some(obj_toString),
fun_toString: None,
//regexp_toShared: ptr::null(),
defaultValue: None,
iteratorNext: None,
finalize: Some(%s),
getElementIfPresent: None,
getPrototypeOf: None,
trace: Some(%s)
};
CreateProxyHandler(&traps, &Class as *const _ as *const _)
""" % (FINALIZE_HOOK_NAME,
TRACE_HOOK_NAME)
return CGGeneric(body)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('*mut JSObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface', 'void', args, pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
return CGGeneric("""\
assert!(global.is_not_null());
assert!(GetProtoObject(cx, global, global).is_not_null());""")
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptorProvider, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptorProvider)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
#XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isGeckoInterface():
if not (a.type.nullable() or a.optional):
name = "&" + name
elif a.type.isDictionary():
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if not "cx" in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % descriptorProvider.interface.identifier.name)
else:
call = CGWrapper(call, pre="(*%s)." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = ""
else:
glob = " let global = global_object_for_js_object(this.reflector().get_jsobject());\n"\
" let global = global.root();\n"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
"%s"
" throw_dom_exception(cx, &global.root_ref(), e);\n"
" return%s;\n"
" },\n"
"};\n" % (glob, errorResult)))
if typeRetValNeedsRooting(returnType):
self.cgRoot.append(CGGeneric("let result = result.root();"))
def define(self):
return self.cgRoot.define()
class MethodNotCreatorError(Exception):
def __init__(self, typename):
self.typename = typename
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
if self.argCount > argConversionStartsAt:
# Insert our argv in there
cgThings = [CGGeneric(self.getArgvDecl())]
else:
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgv(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
cgThings.append(CGCallGenerator(
' false as JSBool' if self.isFallible() else None,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgv(self):
return "argv" if self.argCount > 0 else ""
def getArgvDecl(self):
return "\nlet argv = JS_ARGV(cx, vp);\n"
def getArgc(self):
return "argc"
def getArguments(self):
def process(arg, i):
argVal = "arg" + str(i)
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".root_ref()"
return argVal
return [(a, process(a, i)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return not 'infallible' in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('*vp')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"));
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
#bodyList.append(CGGeneric('fail!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList));
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn 1;"
def getArgc(self):
return "1"
def getArgvDecl(self):
# We just get our stuff from our last arg no matter what
return ""
class CGAbstractBindingMethod(CGAbstractExternMethod):
"""
Common class to generate the JSNatives for all our methods, getters, and
setters. This will generate the function declaration and unwrap the
|this| object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name, args, unwrapFailureCode=None):
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
if unwrapFailureCode is None:
self.unwrapFailureCode = (
'throw_type_error(cx, "\\"this\\" object does not '
'implement interface %s.");\n'
'return 0;' % descriptor.interface.identifier.name)
else:
self.unwrapFailureCode = unwrapFailureCode
def definition_body(self):
# Our descriptor might claim that we're not castable, simply because
# we're someone's consequential interface. But for this-unwrapping, we
# know that we're the real deal. So fake a descriptor here for
# consumption by FailureFatalCastableObjectUnwrapper.
unwrapThis = str(CastableObjectUnwrapper(
FakeCastableDescriptor(self.descriptor),
"obj", self.unwrapFailureCode))
unwrapThis = CGGeneric(
"let obj: *mut JSObject = JS_THIS_OBJECT(cx, vp as *mut JSVal);\n"
"if obj.is_null() {\n"
" return false as JSBool;\n"
"}\n"
"\n"
"let this: JS<%s> = %s;\n" % (self.descriptor.concreteType, unwrapThis))
return CGList([ unwrapThis, self.generate_code() ], "\n")
def generate_code(self):
assert(False) # Override me
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "JSBool", args, extern=True)
def definition_body(self):
return self.generate_code()
def generate_code(self):
assert False # Override me
class CGGenericMethod(CGAbstractBindingMethod):
"""
A class for generating the C++ code for an IDL method..
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
CGAbstractBindingMethod.__init__(self, descriptor, 'genericMethod', args)
def generate_code(self):
return CGGeneric(
"let _info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"return CallJitMethodOp(_info, cx, obj, this.unsafe_get() as *mut libc::c_void, argc, vp);")
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('libc::c_uint', 'argc'), Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'JSBool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, method):
return MakeNativeName(method.identifier.name)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGMethodCall([], nativeName, True, self.descriptor, self.method)
class CGGenericGetter(CGAbstractBindingMethod):
"""
A class for generating the C++ code for an IDL attribute getter.
"""
def __init__(self, descriptor, lenientThis=False):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
if lenientThis:
name = "genericLenientGetter"
unwrapFailureCode = (
"MOZ_ASSERT(!JS_IsExceptionPending(cx));\n"
"JS_SET_RVAL(cx, vp, JS::UndefinedValue());\n"
"return true;")
else:
name = "genericGetter"
unwrapFailureCode = None
CGAbstractBindingMethod.__init__(self, descriptor, name, args,
unwrapFailureCode)
def generate_code(self):
return CGGeneric(
"let info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"return CallJitPropertyOp(info, cx, obj, this.unsafe_get() as *mut libc::c_void, vp);\n")
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
args = [ Argument('*mut JSContext', 'cx'),
Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*mut JSVal', 'vp') ]
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, attr):
nativeName = MakeNativeName(attr.identifier.name)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGGetterCall([], self.attr.type, nativeName, self.descriptor,
self.attr)
class CGGenericSetter(CGAbstractBindingMethod):
"""
A class for generating the Rust code for an IDL attribute setter.
"""
def __init__(self, descriptor, lenientThis=False):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
if lenientThis:
name = "genericLenientSetter"
unwrapFailureCode = (
"MOZ_ASSERT(!JS_IsExceptionPending(cx));\n"
"return true;")
else:
name = "genericSetter"
unwrapFailureCode = None
CGAbstractBindingMethod.__init__(self, descriptor, name, args,
unwrapFailureCode)
def generate_code(self):
return CGGeneric(
"let mut undef = UndefinedValue();\n"
"let argv: *mut JSVal = if argc != 0 { JS_ARGV(cx, vp) } else { &mut undef as *mut JSVal };\n"
"let info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"if CallJitPropertyOp(info, cx, obj, this.unsafe_get() as *mut libc::c_void, argv) == 0 {\n"
" return 0;\n"
"}\n"
"*vp = UndefinedValue();\n"
"return 1;")
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
args = [ Argument('*mut JSContext', 'cx'),
Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*mut JSVal', 'argv')]
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, attr):
return "Set" + MakeNativeName(attr.identifier.name)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let argv = JS_ARGV(cx, vp);\n"
"if (argc == 0) {\n"
" // XXXjdmreturn ThrowErrorMessage(cx, MSG_MISSING_ARGUMENTS, \"%s setter\");\n"
" return 0;\n"
"}\n" % self.attr.identifier.name)
call = CGSetterCall([], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, infallible):
protoID = "PrototypeList::id::%s as u32" % self.descriptor.name
depth = self.descriptor.interface.inheritanceDepth()
failstr = "true" if infallible else "false"
return ("\n"
"static %s: JSJitInfo = JSJitInfo {\n"
" op: %s as *const u8,\n"
" protoID: %s,\n"
" depth: %s,\n"
" isInfallible: %s, /* False in setters. */\n"
" isConstant: false /* Only relevant for getters. */\n"
"};\n" % (infoName, opName, protoID, depth, failstr))
def define(self):
if self.member.isAttr():
getterinfo = ("%s_getterinfo" % self.member.identifier.name)
getter = ("get_%s" % self.member.identifier.name)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
result = self.defineJitInfo(getterinfo, getter, getterinfal)
if not self.member.readonly:
setterinfo = ("%s_setterinfo" % self.member.identifier.name)
setter = ("set_%s" % self.member.identifier.name)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, False)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
# Actually a JSJitMethodOp, but JSJitPropertyOp by struct definition.
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
methodInfal = False
sigs = self.member.signatures()
if len(sigs) == 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
sig = sigs[0]
if len(sig[1]) == 0:
# No arguments and infallible return boxing
methodInfal = True
result = self.defineJitInfo(methodinfo, method, methodInfal)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
inner = """
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::JSContext;
use js::jsval::JSVal;
#[repr(uint)]
#[deriving(Encodable, PartialEq)]
pub enum valuelist {
%s
}
pub static strings: &'static [&'static str] = &[
%s,
];
impl ToJSValConvertible for valuelist {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
strings[*self as uint].to_string().to_jsval(cx)
}
}
""" % (",\n ".join(map(getEnumValueName, enum.values())),
",\n ".join(['"%s"' % val for val in enum.values()]))
self.cgRoot = CGList([
CGNamespace.build([enum.identifier.name + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
CGGeneric("pub type %s = self::%sValues::valuelist;\n" %
(enum.identifier.name, enum.identifier.name)),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.float, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constants):
CGThing.__init__(self)
self.constants = constants
def define(self):
def stringDecl(const):
name = const.identifier.name
value = convertConstIDLValueToRust(const.value)
return CGGeneric("pub static %s: %s = %s;\n" % (name, builtinNames[const.value.type.tag()], value))
return CGIndenter(CGList(stringDecl(m) for m in self.constants)).define()
def getUnionTypeTemplateVars(type, descriptorProvider):
# For dictionaries and sequences we need to pass None as the failureCode
# for getJSToNativeConversionTemplate.
# Also, for dictionaries we would need to handle conversion of
# null/undefined to the dictionary correctly.
if type.isDictionary() or type.isSequence():
raise TypeError("Can't handle dictionaries or sequences in unions")
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).nativeType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isArray() or type.isSequence():
name = str(type)
#XXXjdm dunno about typeName here
typeName = "/*" + type.name + "*/"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
else:
name = type.name
typeName = "/*" + type.name + "*/"
template, _, _, _ = getJSToNativeConversionTemplate(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True)
assert not type.isObject()
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" e%s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" e%s(ref inner) => inner.to_jsval(cx)," % v["name"] for v in templateVars
]
return ("""pub enum %s {
%s
}
impl ToJSValConvertible for %s {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues),
self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, name)
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isArray() or t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
raise TypeError("Can't handle arrays or sequences in unions.")
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
raise TypeError("No support for unwrapping dictionaries as member "
"of a union")
else:
dictionaryObject = None
if callbackObject or dictionaryObject:
assert False, "Not currently supported"
else:
nonPlatformObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
raise TypeError("Can't handle objects in unions.")
else:
object = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or nonPlatformObject or object
if hasObjectTypes:
assert interfaceObject
templateBody = CGList([interfaceObject], "\n")
conversions.append(CGIfWrapper(templateBody, "value.is_object()"))
otherMemberTypes = [
t for t in memberTypes if t.isPrimitive() or t.isString() or t.isEnum()
]
if len(otherMemberTypes) > 0:
assert len(otherMemberTypes) == 1
memberType = otherMemberTypes[0]
if memberType.isEnum():
name = memberType.inner.identifier.name
else:
name = memberType.name
match = (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, name)
conversions.append(CGGeneric(match))
names.append(name)
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="fn from_jsval(cx: *mut JSContext, value: JSVal, _option: ()) -> Result<%s, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(method),
pre="impl FromJSValConvertible<()> for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
pre="fn TryConvertTo%s(cx: *mut JSContext, value: JSVal) -> %s {\n" % (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template("${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility is not 'priv' else ''
})
def define(self, cgClass):
pass
class ClassUsingDeclaration(ClassItem):
""""
Used for importing a name from a base class into a CGClass
baseClass is the name of the base class to import the name from
name is the name to import
visibility determines the visibility of the name (public,
protected, private), defaults to public.
"""
def __init__(self, baseClass, name, visibility='public'):
self.baseClass = baseClass
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return string.Template("""using ${baseClass}::${name};
""").substitute({ 'baseClass': self.baseClass,
'name': self.name })
def define(self, cgClass):
return ''
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"%s {\n"
"%s\n"
"}") % (cgClass.name, '\n'.join(initializers)))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass);
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""pub fn ${decorators}new(${args}) -> ${className}${body}
""").substitute({ 'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body })
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({ 'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body })
class ClassDestructor(ClassItem):
"""
Used for adding a destructor to a CGClass.
inline should be True if the destructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the destructor (public,
protected, private), defaults to private.
body contains a string with the code for the destructor, defaults to empty.
virtual determines whether the destructor is virtual, defaults to False.
"""
def __init__(self, inline=False, bodyInHeader=False,
visibility="private", body='', virtual=False):
self.inline = inline or bodyInHeader
self.bodyInHeader = bodyInHeader
self.body = body
self.virtual = virtual
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.virtual and declaring:
decorators.append('virtual')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
return self.body
def declare(self, cgClass):
if self.bodyInHeader:
body = ' ' + self.getBody();
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = '\n{\n' + body + '}'
else:
body = ';'
return string.Template("""${decorators}~${className}()${body}
""").substitute({ 'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'body': body })
def define(self, cgClass):
if self.bodyInHeader:
return ''
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""${decorators}
${className}::~${className}()
{${body}}
""").substitute({ 'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'body': body })
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type;
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class ClassTypedef(ClassItem):
def __init__(self, name, type, visibility="public"):
self.type = type
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return 'typedef %s %s;\n' % (self.type, self.name)
def define(self, cgClass):
# Only goes in the header
return ''
class ClassEnum(ClassItem):
def __init__(self, name, entries, values=None, visibility="public"):
self.entries = entries
self.values = values
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
entries = []
for i in range(0, len(self.entries)):
if not self.values or i >= len(self.values):
entry = '%s' % self.entries[i]
else:
entry = '%s = %s' % (self.entries[i], self.values[i])
entries.append(entry)
name = '' if not self.name else ' ' + self.name
return 'enum%s\n{\n %s\n};\n' % (name, ',\n '.join(entries))
def define(self, cgClass):
# Only goes in the header
return ''
class ClassUnion(ClassItem):
def __init__(self, name, entries, visibility="public"):
self.entries = [entry + ";" for entry in entries]
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return 'union %s\n{\n %s\n};\n' % (self.name, '\n '.join(self.entries))
def define(self, cgClass):
# Only goes in the header
return ''
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs = [], enums=[], unions=[], templateArgs=[],
templateSpecialization=[], isStruct=False,
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations='',
extradefinitions=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.isStruct = isStruct
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
self.extradefinitions = extradefinitions
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' \
% ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 #XXjdm Can we support multiple inheritance?
result += '{\n%s\n' % self.indent
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(operation)
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
argument.type, descriptor, treatNullAs=argument.treatNullAs)
templateValues = {
"val": "(*desc).value",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name,
needsRooting))
elif operation.isGetter():
self.cgRoot.prepend(CGGeneric("let mut found = false;"))
def getArguments(self):
def process(arg):
argVal = arg.identifier.name
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".root_ref()"
return argVal
args = [(a, process(a)) for a in self.arguments]
if self.idlNode.isGetter():
args.append((FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean],
self.idlNode),
"&mut found"))
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper(wrap, "found")
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedSetter(CGProxySpecialOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy", '*const ' + descriptor.concreteType, args, alwaysInline=True)
def definition_body(self):
return CGGeneric("""/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(obj).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'), Argument('JSBool', 'set'),
Argument('*mut JSPropertyDescriptor', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"JSBool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
setOrIndexedGet = ""
if indexedGetter or indexedSetter:
setOrIndexedGet += "let index = GetArrayIndexFromId(cx, id);\n"
if indexedGetter:
readonly = toStringBool(self.descriptor.operations['IndexedSetter'] is None)
fillDescriptor = "FillPropertyDescriptor(&mut *desc, proxy, %s);\nreturn 1;" % readonly
templateValues = {'jsvalRef': '(*desc).value', 'successCode': fillDescriptor}
get = ("if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
if indexedSetter or self.descriptor.operations['NamedSetter']:
setOrIndexedGet += "if set != 0 {\n"
if indexedSetter:
setOrIndexedGet += (" if index.is_some() {\n" +
" let index = index.unwrap();\n")
if not 'IndexedCreator' in self.descriptor.operations:
# FIXME need to check that this is a 'supported property index'
assert False
setOrIndexedGet += (" FillPropertyDescriptor(&mut *desc, proxy, false);\n" +
" return 1;\n" +
" }\n")
if self.descriptor.operations['NamedSetter']:
setOrIndexedGet += " if RUST_JSID_IS_STRING(id) {\n"
if not 'NamedCreator' in self.descriptor.operations:
# FIXME need to check that this is a 'supported property name'
assert False
setOrIndexedGet += (" FillPropertyDescriptor(&mut *desc, proxy, false);\n" +
" return 1;\n" +
" }\n")
setOrIndexedGet += "}"
if indexedGetter:
setOrIndexedGet += (" else {\n" +
CGIndenter(CGGeneric(get)).define() +
"}")
setOrIndexedGet += "\n\n"
elif indexedGetter:
setOrIndexedGet += ("if set == 0 {\n" +
CGIndenter(CGGeneric(get)).define() +
"}\n\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
readonly = toStringBool(self.descriptor.operations['NamedSetter'] is None)
fillDescriptor = "FillPropertyDescriptor(&mut *desc, proxy, %s);\nreturn 1;" % readonly
templateValues = {'jsvalRef': '(*desc).value', 'successCode': fillDescriptor}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = ("\n" +
"if set == 0 && RUST_JSID_IS_STRING(id) != 0 && !HasPropertyOnPrototype(cx, proxy, id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
else:
namedGet = ""
return setOrIndexedGet + """let expando: *mut JSObject = GetExpandoObject(proxy);
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if expando.is_not_null() {
let flags = if set != 0 { JSRESOLVE_ASSIGNING } else { 0 } | JSRESOLVE_QUALIFIED;
if JS_GetPropertyDescriptorById(cx, expando, id, flags, desc) == 0 {
return 0;
}
if (*desc).obj.is_not_null() {
// Pretend the property lives on the wrapper.
(*desc).obj = proxy;
return 1;
}
}
""" + namedGet + """
(*desc).obj = ptr::mut_null();
return 1;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'),
Argument('*const JSPropertyDescriptor', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
if not (self.descriptor.operations['IndexedCreator'] is indexedSetter):
raise TypeError("Can't handle creator that's different from the setter")
set += ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return 1;\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if GetArrayIndexFromId(cx, id).is_some() {\n" +
" return 0;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
"}\n") % self.descriptor.name
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if not self.descriptor.operations['NamedCreator'] is namedSetter:
raise TypeError("Can't handle creator that's different from the setter")
set += ("if RUST_JSID_IS_STRING(id) != 0 {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() + "\n" +
"}\n")
elif self.descriptor.operations['NamedGetter']:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if (found) {\n"
" return 0;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
" }\n" +
" return 1;\n"
"}\n") % (self.descriptor.name)
return set + """return proxyhandler::defineProperty_(%s);""" % ", ".join(a.name for a in self.args)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'), Argument('*mut JSBool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = found as JSBool;\n" +
" return 1;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = ("if RUST_JSID_IS_STRING(id) != 0 && !HasPropertyOnPrototype(cx, proxy, id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() + "\n" +
" *bp = found as JSBool;\n"
" return 1;\n"
"}\n" +
"\n")
else:
named = ""
return indexed + """let expando: *mut JSObject = GetExpandoObject(proxy);
if expando.is_not_null() {
let mut b: JSBool = 1;
let ok: JSBool = JS_HasPropertyById(cx, expando, id, &mut b);
*bp = !!b;
if ok == 0 || *bp != 0 {
return ok;
}
}
""" + named + """*bp = 0;
return 1;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('*mut JSObject', 'receiver'), Argument('jsid', 'id'),
Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """let expando = GetExpandoObject(proxy);
if expando.is_not_null() {
let mut hasProp = 0;
if JS_HasPropertyById(cx, expando, id, &mut hasProp) == 0 {
return 0;
}
if hasProp != 0 {
return JS_GetPropertyById(cx, expando, id, vp);
}
}"""
templateValues = {'jsvalRef': '*vp'}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter and False: #XXXjdm unfinished
getNamed = ("if (JSID_IS_STRING(id)) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n") % (self.descriptor.concreteType)
else:
getNamed = ""
return """//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !GetPropertyOnPrototype(cx, proxy, id, &mut found, vp) {
return 0;
}
if found {
return 1;
}
%s
*vp = UndefinedValue();
return 1;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_obj_toString(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "obj_toString", "*mut JSString", args)
self.descriptor = descriptor
def getBody(self):
stringifier = self.descriptor.operations['Stringifier']
if stringifier:
nativeName = MakeNativeName(stringifier.identifier.name)
signature = stringifier.signatures()[0]
returnType = signature[0]
extendedAttributes = self.descriptor.getExtendedAttributes(stringifier)
infallible = 'infallible' in extendedAttributes
if not infallible:
error = CGGeneric(
('ThrowMethodFailedWithDetails(cx, rv, "%s", "toString");\n' +
"return NULL;") % self.descriptor.interface.identifier.name)
else:
error = None
call = CGCallGenerator(error, [], "", returnType, extendedAttributes, self.descriptor, nativeName, False, object="UnwrapProxy(proxy)")
return call.define() + """
JSString* jsresult;
return xpc_qsStringToJsstring(cx, result, &jsresult) ? jsresult : NULL;"""
return """let s = "%s".to_c_str();
_obj_toString(cx, s.as_ptr())""" % self.descriptor.name
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""\
let this: *const %s = unwrap::<%s>(obj);
""" % (self.descriptor.concreteType, self.descriptor.concreteType))
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
# Override me
assert(False)
def finalizeHook(descriptor, hookName, context):
release = """let val = JS_GetReservedSlot(obj, dom_object_slot(obj));
let _: Box<%s> = mem::transmute(val.to_private());
debug!("%s finalize: {:p}", this);
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args)
def generate_code(self):
return CGGeneric("(*this).trace(%s);" % self.args[0].name)
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, CONSTRUCT_HOOK_NAME,
'JSBool', args)
self._ctor = self.descriptor.interface.ctor()
def define(self):
if not self._ctor:
return ""
return CGAbstractExternMethod.define(self)
def definition_body(self):
preamble = CGGeneric("""\
let global = global_object_for_js_object(JS_CALLEE(cx, vp).to_object());
let global = global.root();
""")
nativeName = MakeNativeName(self._ctor.identifier.name)
callGenerator = CGMethodCall(["&global.root_ref()"], nativeName, True,
self.descriptor, self._ctor)
return CGList([preamble, callGenerator])
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSFreeOp', 'fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return """
static Class: DOMClass = """ + DOMClass(self.descriptor) + """;
"""
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def argument_type(ty, optional=False, defaultValue=None, variadic=False):
_, _, declType, _ = getJSToNativeConversionTemplate(
ty, descriptor, isArgument=True)
if variadic:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isGeckoInterface() and not (ty.nullable() or optional):
declType = CGWrapper(declType, pre="&")
elif ty.isDictionary():
declType = CGWrapper(declType, pre="&")
return declType.define()
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(argument)
def method_arguments(returnType, arguments, trailing=None):
if needCx(returnType, arguments, True):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptor)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
def members():
for m in descriptor.interface.members:
if m.isMethod() and not m.isStatic():
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(rettype, arguments)
rettype = return_type(rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
needCx = typeNeedsCx(m.type)
yield name, attribute_arguments(needCx), return_type(m.type, infallible)
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(needCx, m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation:
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
arguments = method_arguments(rettype, arguments, ("found", "&mut bool"))
rettype = return_type(rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
methods = CGList([
CGGeneric("fn %s(&self%s) -> %s;\n" % (name, fmt(arguments), rettype))
for name, arguments, rettype in members()
], "")
self.cgRoot = CGWrapper(CGIndenter(methods),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
def define(self):
return self.cgRoot.define()
class CGDescriptor(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert not descriptor.interface.isCallback()
cgThings = []
cgThings.append(CGGetProtoObjectMethod(descriptor))
if descriptor.interface.hasInterfaceObject():
# https://github.com/mozilla/servo/issues/2665
# cgThings.append(CGGetConstructorObjectMethod(descriptor))
pass
(hasMethod, hasGetter, hasLenientGetter,
hasSetter, hasLenientSetter) = False, False, False, False, False
for m in descriptor.interface.members:
if m.isMethod() and not m.isIdentifierLess():
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
else:
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
hasMethod = True
elif m.isAttr():
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
else:
cgThings.append(CGSpecializedGetter(descriptor, m))
if m.hasLenientThis():
hasLenientGetter = True
else:
hasGetter = True
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
else:
cgThings.append(CGSpecializedSetter(descriptor, m))
if m.hasLenientThis():
hasLenientSetter = True
else:
hasSetter = True
if not m.isStatic():
cgThings.append(CGMemberJITInfo(descriptor, m))
if hasMethod:
cgThings.append(CGGenericMethod(descriptor))
if hasGetter:
cgThings.append(CGGenericGetter(descriptor))
if hasLenientGetter:
pass
if hasSetter:
cgThings.append(CGGenericSetter(descriptor))
if hasLenientSetter:
pass
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGClassConstructHook(descriptor))
cgThings.append(CGInterfaceObjectJSClass(descriptor))
cgThings.append(CGPrototypeJSClass(descriptor))
properties = PropertyArrays(descriptor)
cgThings.append(CGGeneric(str(properties)))
cgThings.append(CGNativeProperties(descriptor, properties))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties))
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGConstant(m for m in descriptor.interface.members if m.isConst()),
public=True))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
if descriptor.concrete:
if descriptor.proxy:
#cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_obj_toString(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
#cgThings.append(CGDOMJSProxyHandler(descriptor))
#cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
cgThings.append(CGWrapMethod(descriptor))
cgThings.append(CGIDLInterface(descriptor))
cgThings.append(CGInterfaceTrait(descriptor))
cgThings = CGList(cgThings, "\n")
cgThings = CGWrapper(cgThings, pre='\n', post='\n')
#self.cgRoot = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
# cgThings),
# post='\n')
self.cgRoot = cgThings
def define(self):
return self.cgRoot.define()
class CGNamespacedEnum(CGThing):
def __init__(self, namespace, enumName, names, values, comment="", deriving=""):
if not values:
values = []
# Account for explicit enum values.
entries = []
for i in range(0, len(names)):
if len(values) > i and values[i] is not None:
entry = "%s = %s" % (names[i], values[i])
else:
entry = names[i]
entries.append(entry)
# Append a Count.
entries.append(enumName + 'Count = ' + str(len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if deriving:
enumstr = ('#[deriving(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n',post='\n')
# Add the namespace.
curr = CGNamespace(namespace, curr, public=True)
# Add the typedef
#typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
#curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary;
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionTemplate(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
failureCode="return Err(());",
exceptionCode="return Err(());"))
for member in dictionary.members ]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s<'a, 'b>,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"pub struct ${selfName}<'a, 'b> {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute( { "selfName": self.makeClassName(d),
"inheritance": inheritance }))
def impl(self):
d = self.dictionary
if d.parent:
initParent = ("parent: match %s::%s::new(cx, val) {\n"
" Ok(parent) => parent,\n"
" Err(_) => return Err(()),\n"
"},\n") % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
memberInits = CGList([memberInit(m) for m in self.memberInfo])
return string.Template(
"impl<'a, 'b> ${selfName}<'a, 'b> {\n"
" pub fn empty() -> ${selfName}<'a, 'b> {\n"
" ${selfName}::new(ptr::mut_null(), NullValue()).unwrap()\n"
" }\n"
" pub fn new(cx: *mut JSContext, val: JSVal) -> Result<${selfName}<'a, 'b>, ()> {\n"
" let object = if val.is_null_or_undefined() {\n"
" ptr::mut_null()\n"
" } else if val.is_object() {\n"
" val.to_object()\n"
" } else {\n"
" //XXXjdm throw properly here\n"
" return Err(());\n"
" };\n"
" Ok(${selfName} {\n"
"${initParent}"
"${initMembers}"
" })\n"
" }\n"
"}").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=6).define(),
"initMembers": CGIndenter(memberInits, indentLevel=6).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
name = dictionary.identifier.name
if name.endswith('Init'):
return toBindingNamespace(name.replace('Init', ''))
#XXXjdm This breaks on the test webidl files, sigh.
#raise TypeError("No idea how to find this dictionary's definition: " + name)
return "/* uh oh */ %s" % name
def getMemberType(self, memberInfo):
member, (_, _, declType, _) = memberInfo
if not member.defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo):
def indent(s):
return CGIndenter(CGGeneric(s), 8).define()
member, (templateBody, default, declType, _) = memberInfo
replacements = { "val": "value" }
conversion = string.Template(templateBody).substitute(replacements)
assert (member.defaultValue is None) == (default is None)
if not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"match get_dictionary_property(cx, object, \"%s\") {\n"
" Err(()) => return Err(()),\n"
" Ok(Some(value)) => {\n"
"%s\n"
" },\n"
" Ok(None) => {\n"
"%s\n"
" },\n"
"}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeIdName(name):
return name + "_id"
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name == "type":
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set();
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProtos(CGAbstractMethod):
def __init__(self, config):
arguments = [
Argument('*mut JSContext', 'cx'),
Argument('*mut JSObject', 'global'),
]
CGAbstractMethod.__init__(self, None, 'Register', 'void', arguments,
unsafe=False, pub=True)
self.config = config
def definition_body(self):
return CGList([
CGGeneric("codegen::Bindings::%sBinding::DefineDOMInterface(cx, global);" % desc.name)
for desc in self.config.getDescriptors(hasInterfaceObject=True, register=True)
], "\n")
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("proxy_handlers[proxies::%s as uint] = codegen::Bindings::%sBinding::DefineProxyHandler();" % (desc.name, desc.name))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut proxy_handlers: [*const libc::c_void, ..%d] = [0 as *const libc::c_void, ..%d];" % (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=False)
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
cgthings = []
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
# Do codegen for all the enums
cgthings = [CGEnum(e) for e in config.getEnums(webIDLFile)]
# Do codegen for all the dictionaries. We have to be a bit careful
# here, because we have to generate these in order from least derived
# to most derived so that class inheritance works out. We also have to
# generate members before the dictionary that contains them.
#
# XXXbz this will fail if we have two webidl files A and B such that A
# declares a dictionary which inherits from a dictionary in B and B
# declares a dictionary (possibly a different one!) that inherits from a
# dictionary in A. The good news is that I expect this to never happen.
reSortedDictionaries = []
dictionaries = set(dictionaries)
while len(dictionaries) != 0:
# Find the dictionaries that don't depend on anything else anymore
# and move them over.
toMove = [d for d in dictionaries if
len(CGDictionary.getDictionaryDependencies(d) &
dictionaries) == 0]
if len(toMove) == 0:
raise TypeError("Loop in dictionary dependency graph")
dictionaries = dictionaries - set(toMove)
reSortedDictionaries.extend(toMove)
dictionaries = reSortedDictionaries
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Wrap all of that in our namespaces.
#curr = CGNamespace.build(['dom'],
# CGWrapper(curr, pre="\n"))
# Add imports
#XXXjdm This should only import the namespace for the current binding,
# not every binding ever.
curr = CGImports(curr, descriptors, [
'js',
'js::{JS_ARGV, JS_CALLEE, JS_THIS_OBJECT}',
'js::{JSCLASS_GLOBAL_SLOT_COUNT, JSCLASS_IS_DOMJSCLASS}',
'js::{JSCLASS_IS_GLOBAL, JSCLASS_RESERVED_SLOTS_SHIFT}',
'js::{JSCLASS_RESERVED_SLOTS_MASK, JSID_VOID, JSJitInfo}',
'js::{JSPROP_ENUMERATE, JSPROP_NATIVE_ACCESSORS, JSPROP_SHARED}',
'js::{JSRESOLVE_ASSIGNING, JSRESOLVE_QUALIFIED}',
'js::jsapi::{JS_CallFunctionValue, JS_GetClass, JS_GetGlobalForObject}',
'js::jsapi::{JS_GetObjectPrototype, JS_GetProperty, JS_GetPropertyById}',
'js::jsapi::{JS_GetPropertyDescriptorById, JS_GetReservedSlot}',
'js::jsapi::{JS_HasProperty, JS_HasPropertyById, JS_IsExceptionPending}',
'js::jsapi::{JS_NewObject, JS_ObjectIsCallable, JS_SetPrototype}',
'js::jsapi::{JS_SetReservedSlot, JS_WrapValue, JSBool, JSContext}',
'js::jsapi::{JSClass, JSFreeOp, JSFunctionSpec, JSHandleObject, jsid}',
'js::jsapi::{JSNativeWrapper, JSObject, JSPropertyDescriptor, JS_ArrayIterator}',
'js::jsapi::{JSPropertyOpWrapper, JSPropertySpec, JS_PropertyStub}',
'js::jsapi::{JSStrictPropertyOpWrapper, JSString, JSTracer, JS_ConvertStub}',
'js::jsapi::{JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub}',
'js::jsval::JSVal',
'js::jsval::{ObjectValue, ObjectOrNullValue, PrivateValue}',
'js::jsval::{NullValue, UndefinedValue}',
'js::glue::{CallJitMethodOp, CallJitPropertyOp, CreateProxyHandler}',
'js::glue::{GetProxyPrivate, NewProxyObject, ProxyTraps}',
'js::glue::{RUST_FUNCTION_VALUE_TO_JITINFO}',
'js::glue::{RUST_JS_NumberValue, RUST_JSID_IS_STRING}',
'js::rust::with_compartment',
'dom::types::*',
'dom::bindings',
'dom::bindings::global::GlobalRef',
'dom::bindings::js::{JS, JSRef, Root, RootedReference, Temporary}',
'dom::bindings::js::{OptionalRootable, OptionalRootedRootable, ResultRootable}',
'dom::bindings::js::{OptionalRootedReference, OptionalOptionalRootedRootable}',
'dom::bindings::utils::{CreateDOMGlobal, CreateInterfaceObjects2}',
'dom::bindings::utils::{ConstantSpec, cx_for_dom_object}',
'dom::bindings::utils::{dom_object_slot, DOM_OBJECT_SLOT, DOMClass}',
'dom::bindings::utils::{DOMJSClass, JSCLASS_DOM_GLOBAL}',
'dom::bindings::utils::{FindEnumStringIndex, GetArrayIndexFromId}',
'dom::bindings::utils::{GetPropertyOnPrototype, GetProtoOrIfaceArray}',
'dom::bindings::utils::{HasPropertyOnPrototype, IntVal}',
'dom::bindings::utils::{jsid_to_str}',
'dom::bindings::utils::global_object_for_js_object',
'dom::bindings::utils::{Reflectable}',
'dom::bindings::utils::{squirrel_away_unique}',
'dom::bindings::utils::{ThrowingConstructor, unwrap, unwrap_jsmanaged}',
'dom::bindings::utils::VoidVal',
'dom::bindings::utils::get_dictionary_property',
'dom::bindings::utils::NativeProperties',
'dom::bindings::trace::JSTraceable',
'dom::bindings::callback::{CallbackContainer,CallbackInterface,CallbackFunction}',
'dom::bindings::callback::{CallSetup,ExceptionHandling}',
'dom::bindings::callback::{WrapCallThisObject}',
'dom::bindings::conversions::{FromJSValConvertible, ToJSValConvertible}',
'dom::bindings::conversions::IDLInterface',
'dom::bindings::conversions::{Default, Empty}',
'dom::bindings::codegen::*',
'dom::bindings::codegen::Bindings::*',
'dom::bindings::codegen::RegisterBindings',
'dom::bindings::codegen::UnionTypes::*',
'dom::bindings::error::{FailureUnknown, Fallible, Error, ErrorResult}',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::error::throw_type_error',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::{_obj_toString, defineProperty}',
'dom::bindings::proxyhandler::{FillPropertyDescriptor, GetExpandoObject}',
'dom::bindings::proxyhandler::{delete_, getPropertyDescriptor}',
'dom::bindings::str::ByteString',
'page::JSPageInfo',
'libc',
'servo_util::str::DOMString',
'std::mem',
'std::cmp',
'std::ptr',
'std::str',
'std::num',
])
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
return stripTrailingWhitespace(self.root.define())
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public",
jsObjectsArePtr=False, variadicIsSequence=False):
"""
If jsObjectsArePtr is true, typed arrays and "object" will be
passed as JSObject*.
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
self.jsObjectsArePtr = jsObjectsArePtr
self.variadicIsSequence = variadicIsSequence
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0], False),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterReturnDecl=" ",
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type, isMember):
return self.getRetvalInfo(type, isMember)[0]
def getRetvalInfo(self, type, isMember):
"""
Returns a tuple:
The first element is the type declaration for the retval
The second element is a template for actually returning a value stored in
"${declName}". This means actually returning it if
we're not outparam, else assigning to the "retval" outparam. If
isMember is true, this can be None, since in that case the caller will
never examine this value.
"""
if type.isVoid():
typeDecl, template = "", ""
elif type.isPrimitive() and type.tag() in builtinNames:
result = CGGeneric(builtinNames[type.tag()])
if type.nullable():
raise TypeError("Nullable primitives are not supported here.")
typeDecl, template = result.define(), "return Ok(${declName});"
elif type.isDOMString():
if isMember:
# No need for a third element in the isMember case
typeDecl, template = "nsString", None
# Outparam
else:
typeDecl, template = "void", "retval = ${declName};"
elif type.isByteString():
if isMember:
# No need for a third element in the isMember case
typeDecl, template = "nsCString", None
# Outparam
typeDecl, template = "void", "retval = ${declName};"
elif type.isEnum():
enumName = type.unroll().inner.identifier.name
if type.nullable():
enumName = CGTemplatedType("Nullable",
CGGeneric(enumName)).define()
typeDecl, template = enumName, "return ${declName};"
elif type.isGeckoInterface():
iface = type.unroll().inner;
nativeType = self.descriptorProvider.getDescriptor(
iface.identifier.name).nativeType
# Now trim off unnecessary namespaces
nativeType = nativeType.split("::")
if nativeType[0] == "mozilla":
nativeType.pop(0)
if nativeType[0] == "dom":
nativeType.pop(0)
result = CGWrapper(CGGeneric("::".join(nativeType)), post="*")
# Since we always force an owning type for callback return values,
# our ${declName} is an OwningNonNull or nsRefPtr. So we can just
# .forget() to get our already_AddRefed.
typeDecl, template = result.define(), "return ${declName}.forget();"
elif type.isCallback():
typeDecl, template = \
("already_AddRefed<%s>" % type.unroll().identifier.name,
"return ${declName}.forget();")
elif type.isAny():
typeDecl, template = "JSVal", "return Ok(${declName});"
elif type.isObject():
typeDecl, template = "JSObject*", "return ${declName};"
elif type.isSpiderMonkeyInterface():
if type.nullable():
returnCode = "return ${declName}.IsNull() ? nullptr : ${declName}.Value().Obj();"
else:
returnCode = "return ${declName}.Obj();"
typeDecl, template = "JSObject*", returnCode
elif type.isSequence():
# If we want to handle sequence-of-sequences return values, we're
# going to need to fix example codegen to not produce nsTArray<void>
# for the relevant argument...
assert not isMember
# Outparam.
if type.nullable():
returnCode = ("if (${declName}.IsNull()) {\n"
" retval.SetNull();\n"
"} else {\n"
" retval.SetValue().SwapElements(${declName}.Value());\n"
"}")
else:
returnCode = "retval.SwapElements(${declName});"
typeDecl, template = "void", returnCode
elif type.isDate():
result = CGGeneric("Date")
if type.nullable():
result = CGTemplatedType("Nullable", result)
typeDecl, template = result.define(), "return ${declName};"
else:
raise TypeError("Don't know how to declare return value for %s" % type)
if not 'infallible' in self.extendedAttrs:
if typeDecl:
typeDecl = "Fallible<%s>" % typeDecl
else:
typeDecl = "ErrorResult"
if not template:
template = "return Ok(());"
return typeDecl, template
def getArgs(self, returnType, argList):
args = [self.getArg(arg) for arg in argList]
# Now the outparams
if returnType.isDOMString():
args.append(Argument("nsString&", "retval"))
if returnType.isByteString():
args.append(Argument("nsCString&", "retval"))
elif returnType.isSequence():
nullable = returnType.nullable()
if nullable:
returnType = returnType.inner
# And now the actual underlying type
elementDecl = self.getReturnType(returnType.inner, True)
type = CGTemplatedType("nsTArray", CGGeneric(elementDecl))
if nullable:
type = CGTemplatedType("Nullable", type)
args.append(Argument("%s&" % type.define(), "retval"))
# The legacycaller thisval
if self.member.isMethod() and self.member.isLegacycaller():
# If it has an identifier, we can't deal with it yet
assert self.member.isIdentifierLess()
args.insert(0, Argument("JS::Value", "aThisVal"))
# And jscontext bits.
if needCx(returnType, argList, self.passJSBitsAsNeeded):
args.insert(0, Argument("JSContext*", "cx"))
# And if we're static, a global
if self.member.isStatic():
args.insert(0, Argument("const GlobalObject&", "global"))
return args
def doGetArgType(self, type, optional, isMember):
"""
The main work of getArgType. Returns a string type decl, whether this
is a const ref, as well as whether the type should be wrapped in
Nullable as needed.
isMember can be false or one of the strings "Sequence" or "Variadic"
"""
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
nullable = type.nullable()
if nullable:
type = type.inner
elementType = type.inner
argType = self.getArgType(elementType, False, "Sequence")[0]
decl = CGTemplatedType("Sequence", argType)
return decl.define(), True, True
if type.isUnion():
if type.nullable():
type = type.inner
return str(type) + "::" + str(type), False, True
if type.isGeckoInterface() and not type.isCallbackInterface():
iface = type.unroll().inner
argIsPointer = type.nullable()
forceOwningType = iface.isCallback() or isMember
if argIsPointer:
if (optional or isMember) and forceOwningType:
typeDecl = "nsRefPtr<%s>"
else:
typeDecl = "*%s"
else:
if optional or isMember:
if forceOwningType:
typeDecl = "OwningNonNull<%s>"
else:
typeDecl = "NonNull<%s>"
else:
typeDecl = "%s"
descriptor = self.descriptorProvider.getDescriptor(iface.identifier.name)
return (typeDecl % descriptor.argumentType,
False, False)
if type.isSpiderMonkeyInterface():
if self.jsObjectsArePtr:
return "JSObject*", False, False
return type.name, True, True
if type.isDOMString():
declType = "DOMString"
return declType, True, False
if type.isByteString():
declType = "nsCString"
return declType, True, False
if type.isEnum():
return type.unroll().inner.identifier.name, False, True
if type.isCallback() or type.isCallbackInterface():
forceOwningType = optional or isMember
if type.nullable():
if forceOwningType:
declType = "nsRefPtr<%s>"
else:
declType = "%s*"
else:
if forceOwningType:
declType = "OwningNonNull<%s>"
else:
declType = "%s&"
if type.isCallback():
name = type.unroll().identifier.name
else:
name = type.unroll().inner.identifier.name
return declType % name, False, False
if type.isAny():
# Don't do the rooting stuff for variadics for now
if isMember:
declType = "JS::Value"
else:
declType = "JSVal"
return declType, False, False
if type.isObject():
if isMember:
declType = "JSObject*"
else:
declType = "JS::Handle<JSObject*>"
return declType, False, False
if type.isDictionary():
typeName = CGDictionary.makeDictionaryName(type.inner)
return typeName, True, True
if type.isDate():
return "Date", False, True
assert type.isPrimitive()
return builtinNames[type.tag()], False, True
def getArgType(self, type, optional, isMember):
"""
Get the type of an argument declaration. Returns the type CGThing, and
whether this should be a const ref.
isMember can be False, "Sequence", or "Variadic"
"""
(decl, ref, handleNullable) = self.doGetArgType(type, optional,
isMember)
decl = CGGeneric(decl)
if handleNullable and type.nullable():
decl = CGTemplatedType("Nullable", decl)
ref = True
if isMember == "Variadic":
arrayType = "Sequence" if self.variadicIsSequence else "nsTArray"
decl = CGTemplatedType(arrayType, decl)
ref = True
elif optional:
# Note: All variadic args claim to be optional, but we can just use
# empty arrays to represent them not being present.
decl = CGTemplatedType("Option", decl)
ref = False
return (decl, ref)
def getArg(self, arg):
"""
Get the full argument declaration for an argument
"""
(decl, ref) = self.getArgType(arg.type,
arg.optional and not arg.defaultValue,
"Variadic" if arg.variadic else False)
if ref:
decl = CGWrapper(decl, pre="&")
return Argument(decl.define(), arg.identifier.name)
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods+getters+setters,
decorators="#[deriving(PartialEq,Clone,Encodable)]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new(aCallback)" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "*mut JSObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.GetContext()", "thisObjJS"] + argnames
argnamesWithoutThis = ["s.GetContext()", "ptr::mut_null()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
args[0] = Argument('&' + args[0].argType, args[0].name, args[0].default)
method.args[2] = args[0]
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&JSRef<T>", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = ("let s = CallSetup::new(self, aExceptionHandling);\n"
"if s.GetContext().is_null() {\n"
" return Err(FailureUnknown);\n"
"}\n")
bodyWithThis = string.Template(
setupCall+
"let thisObjJS = WrapCallThisObject(s.GetContext(), thisObj);\n"
"if thisObjJS.is_null() {\n"
" return Err(FailureUnknown);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs" : ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"return ${methodName}(${callArgs});").substitute({
"callArgs" : ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name+'_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: Reflectable"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name+'__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr):
return "Get" + MakeNativeName(attr.identifier.name)
def callbackSetterName(attr):
return "Set" + MakeNativeName(attr.identifier.name)
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""impl CallbackContainer for ${type} {
fn new(callback: *mut JSObject) -> ${type} {
${type}::new(callback)
}
fn callback(&self) -> *mut JSObject {
self.parent.callback()
}
}
impl ToJSValConvertible for ${type} {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
self.callback().to_jsval(cx)
}
}
""").substitute({"type": callback.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling, rethrowContentException=False):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
assert not rethrowContentException or not needThisHandling
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount-1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.Length()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
self.rethrowContentException = rethrowContentException
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility,
jsObjectsArePtr=True)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode= "return Err(FailureUnknown);\n"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"let mut argv = Vec::from_elem(${argCount}, UndefinedValue());\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGList([
CGGeneric(pre),
CGWrapper(CGIndenter(CGGeneric(body)),
pre="with_compartment(cx, self.parent.callback(), || {\n",
post="})")
], "\n").define()
def getResultConversion(self):
replacements = {
"val": "rval",
"declName": "rvalDecl",
}
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl", needsRooting)
assignRetval = string.Template(
self.getRetvalInfo(self.retvalType,
False)[1]).substitute(replacements)
return convertType.define() + "\n" + assignRetval + "\n"
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse();
# Wrap each one in a scope so that any locals it has don't leak out, and
# also so that we can just "break;" for our successCode.
argConversions = [CGWrapper(CGIndenter(CGGeneric(c)),
pre="loop {\n",
post="\nbreak;}\n")
for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx]"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType("*argv.get_mut(%s)" % jsvalIndex,
result=argval,
successCode="continue;" if arg.variadic else "break;")
if arg.variadic:
conversion = string.Template(
"for (uint32_t idx = 0; idx < ${arg}.Length(); ++idx) {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}\n"
"break;").substitute({ "arg": arg.identifier.name })
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper(CGGeneric(conversion),
"%s.is_some()" % arg.identifier.name).define() +
" else if (argc == %d) {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" *argv.get_mut(%d) = UndefinedValue();\n"
"}" % (i+1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
if self.rethrowContentException:
args.append(Argument("JSCompartment*", "aCompartment", "nullptr"))
else:
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("*mut JSObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
callSetup = "CallSetup s(CallbackPreserveColor(), aRv"
if self.rethrowContentException:
# getArgs doesn't add the aExceptionHandling argument but does add
# aCompartment for us.
callSetup += ", RethrowContentExceptions, aCompartment"
else:
callSetup += ", aExceptionHandling"
callSetup += ");"
return string.Template(
"${callSetup}\n"
"JSContext* cx = s.GetContext();\n"
"if (!cx) {\n"
" return Err(FailureUnknown);\n"
"}\n").substitute({
"callSetup": callSetup,
})
def getArgcDecl(self):
return CGGeneric("let mut argc = %su32;" % self.argCountStr);
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling, rethrowContentException=False):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling, rethrowContentException)
def getRvalDecl(self):
return "let mut rval = UndefinedValue();\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl()
}
if self.argCount > 0:
replacements["argv"] = "argv.as_mut_ptr()"
replacements["argc"] = "argc"
else:
replacements["argv"] = "nullptr"
replacements["argc"] = "0"
return string.Template("${getCallable}"
"let ok = unsafe {\n"
" JS_CallFunctionValue(cx, ${thisObj}, callable,\n"
" ${argc}, ${argv}, &mut rval)\n"
"};\n"
"if ok == 0 {\n"
" return Err(FailureUnknown);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj"
def getCallableDecl(self):
return "let callable = ObjectValue(unsafe {&*self.parent.callback()});\n";
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation, rethrowContentException=False):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation, rethrowContentException)
def getThisObj(self):
if not self.singleOperation:
return "self.parent.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj } else { self.parent.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'match self.parent.GetCallableProperty(cx, "${methodName}") {\n'
' Err(_) => return Err(FailureUnknown),\n'
' Ok(callable) => callable,\n'
'}').substitute(replacements)
if not self.singleOperation:
return 'JS::Rooted<JS::Value> callable(cx);\n' + getCallableFromProp
return (
'let isCallable = unsafe { JS_ObjectIsCallable(cx, self.parent.callback()) != 0 };\n'
'let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('unsafe { ObjectValue(&*self.parent.callback()) }'),
CGGeneric(getCallableFromProp))).define() + ';\n')
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName, MakeNativeName(jsName),
descriptor, descriptor.interface.isSingleOperationInterface(),
rethrowContentException=descriptor.interface.isJSImplemented())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False,
rethrowContentException=descriptor.interface.isJSImplemented())
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(FailureUnknown);\n'
'}\n').substitute(replacements);
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False,
rethrowContentException=descriptor.interface.isJSImplemented())
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(FailureUnknown);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
protos = [d.name for d in config.getDescriptors(isCallback=False)]
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub static MAX_PROTO_CHAIN_LENGTH: uint = %d;\n\n" % config.maxProtoChainLength),
CGNamespacedEnum('id', 'ID', protos, [0], deriving="PartialEq"),
CGNamespacedEnum('proxies', 'Proxy', proxies, [0], deriving="PartialEq"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProtos(config),
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, [], [
'dom::bindings::codegen',
'dom::bindings::codegen::PrototypeList::proxies',
'js::jsapi::JSContext',
'js::jsapi::JSObject',
'libc',
])
@staticmethod
def InterfaceTypes(config):
descriptors = [d.name for d in config.getDescriptors(register=True, isCallback=False)]
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(), name)) for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
descriptors = (set(d.name + "Binding" for d in config.getDescriptors(register=True)) |
set(d.unroll().module() for d in config.callbacks))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
allprotos = [CGGeneric("#![allow(unused_imports)]\n"),
CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::js::{JS, JSRef, Temporary};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::utils::Reflectable;\n"),
CGGeneric("use serialize::{Encodable, Encoder};\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n")]
for descriptor in descriptors:
name = descriptor.name
protos = [CGGeneric('pub trait %s {}\n' % (name + 'Base'))]
for proto in descriptor.prototypeChain:
protos += [CGGeneric('impl %s for %s {}\n' % (proto + 'Base',
descriptor.concreteType))]
derived = [CGGeneric('pub trait %s { fn %s(&self) -> bool; }\n' %
(name + 'Derived', 'is_' + name.lower()))]
for protoName in descriptor.prototypeChain[1:-1]:
protoDescriptor = config.getDescriptor(protoName)
delegate = string.Template('''impl ${selfName} for ${baseName} {
fn ${fname}(&self) -> bool {
self.${parentName}.${fname}()
}
}
''').substitute({'fname': 'is_' + name.lower(),
'selfName': name + 'Derived',
'baseName': protoDescriptor.concreteType,
'parentName': protoDescriptor.prototypeChain[-2].lower()})
derived += [CGGeneric(delegate)]
derived += [CGGeneric('\n')]
cast = [CGGeneric(string.Template('''pub trait ${castTraitName} {
#[inline(always)]
fn to_ref<'a, 'b, T: ${toBound}+Reflectable>(base: &'a JSRef<'b, T>) -> Option<&'a JSRef<'b, Self>> {
match base.deref().${checkFn}() {
true => unsafe { Some(base.transmute()) },
false => None
}
}
#[inline(always)]
fn to_mut_ref<'a, 'b, T: ${toBound}+Reflectable>(base: &'a mut JSRef<'b, T>) -> Option<&'a mut JSRef<'b, Self>> {
match base.deref().${checkFn}() {
true => unsafe { Some(base.transmute_mut()) },
false => None
}
}
#[inline(always)]
fn from_ref<'a, 'b, T: ${fromBound}>(derived: &'a JSRef<'b, T>) -> &'a JSRef<'b, Self> {
unsafe { derived.transmute() }
}
#[inline(always)]
fn from_mut_ref<'a, 'b, T: ${fromBound}>(derived: &'a mut JSRef<'b, T>) -> &'a mut JSRef<'b, Self> {
unsafe { derived.transmute_mut() }
}
#[inline(always)]
fn from_temporary<T: ${fromBound}+Reflectable>(derived: Temporary<T>) -> Temporary<Self> {
unsafe { derived.transmute() }
}
}
''').substitute({'checkFn': 'is_' + name.lower(),
'castTraitName': name + 'Cast',
'fromBound': name + 'Base',
'toBound': name + 'Derived'})),
CGGeneric("impl %s for %s {}\n\n" % (name + 'Cast', name))]
trace = [CGGeneric(string.Template('''impl JSTraceable for ${name} {
fn trace(&self, tracer: *mut JSTracer) {
unsafe {
self.encode(&mut *tracer).ok().expect("failed to encode");
}
}
}
''').substitute({'name': name}))]
allprotos += protos + derived + cast + trace
curr = CGList(allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
Add a documentation comment for CGWrapMethod.
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
import operator
import os
import re
import string
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLType,
IDLUndefinedValue,
)
from Configuration import getTypesFromDescriptor, getTypesFromDictionary, getTypesFromCallback
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
ADDPROPERTY_HOOK_NAME = '_addProperty'
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
#XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
#oldFileContents = ""
#try:
# oldFile = open(filename, 'rb')
# oldFileContents = ''.join(oldFile.readlines())
# oldFile.close()
#except:
# pass
#if newContents == oldFileContents:
# return False
f = open(filename, 'wb')
f.write(newContents)
f.close()
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", arg);
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def MakeNativeName(name):
return name[0].upper() + name[1:]
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.float: 'f32',
IDLType.Tags.double: 'f64'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.float, IDLType.Tags.double
]
class CastableObjectUnwrapper():
"""
A class for unwrapping an object named by the "source" argument
based on the passed-in descriptor. Stringifies to a Rust expression of
the appropriate type.
codeOnFailure is the code to run if unwrapping fails.
"""
def __init__(self, descriptor, source, codeOnFailure):
self.substitution = {
"type": descriptor.nativeType,
"depth": descriptor.interface.inheritanceDepth(),
"prototype": "PrototypeList::id::" + descriptor.name,
"protoID": "PrototypeList::id::" + descriptor.name + " as uint",
"source": source,
"codeOnFailure": CGIndenter(CGGeneric(codeOnFailure), 4).define(),
}
def __str__(self):
return string.Template(
"""match unwrap_jsmanaged(${source}, ${prototype}, ${depth}) {
Ok(val) => val,
Err(()) => {
${codeOnFailure}
}
}""").substitute(self.substitution)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
assert(False) # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs-1].optional:
requiredArgs -= 1
return requiredArgs
def getPerSignatureCall(signature, argConversionStartsAt=0, signatureIndex=0):
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_'*signatureIndex,
static, descriptor,
method, argConversionStartsAt)
signatures = method.signatures()
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return 0;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
sigIndex = signatures.index(signature)
argCountCases.append(
CGCase(str(argCount), getPerSignatureCall(signature,
signatureIndex=sigIndex)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [CGGeneric("let argv_start = JS_ARGV(cx, vp);")]
caseBody.extend([ CGArgumentConverter(possibleSignatures[0][1][i],
i, "argv_start", "argc",
descriptor) for i in
range(0, distinguishingIndex) ])
# Select the right overload from our set.
distinguishingArg = "(*argv_start.offset(%d))" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
if condition is None:
caseBody.append(
getPerSignatureCall(sigs[0], distinguishingIndex,
possibleSignatures.index(sigs[0])))
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(
getPerSignatureCall(sigs[0], distinguishingIndex,
possibleSignatures.index(sigs[0]))))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.isNullOrUndefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface()) ]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if (%s).is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")));
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex,
needsRooting)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4));
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1, idx), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.isObject() && IsArrayLike(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isArray() or
s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.isObject() && JS_ObjectIsDate(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.isObject() && !IsPlatformObject(cx, &%s.toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("return 0; //XXXjdm throw stuff\n//return ThrowErrorMessage(cx, MSG_MISSING_ARGUMENTS, %s);\n" % methodName)))
#XXXjdm Avoid unreachable statement warnings
#overloadCGThings.append(
# CGGeneric('fail!("We have an always-returning default case");\n'
# 'return 0;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
class FakeCastableDescriptor():
def __init__(self, descriptor):
self.nativeType = "*const %s" % descriptor.concreteType
self.name = descriptor.name
class FakeInterface:
def inheritanceDepth(self):
return descriptor.interface.inheritanceDepth()
self.interface = FakeInterface()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isArray():
elementType = type.inner
return typeIsSequenceOrHasSequenceMember(elementType)
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def typeNeedsRooting(type, descriptorProvider):
return type.isGeckoInterface() and descriptorProvider.getDescriptor(type.name).needsRooting
def getJSToNativeConversionTemplate(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
if isMember is True, we're being converted from a property of some
JS object, not from an actual method argument, so we can't rely on
our jsval being rooted or outliving us in any way. Any caller
passing true needs to ensure that it is handled correctly in
typeIsSequenceOrHasSequenceMember.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is a tuple consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert(not isDefinitelyObject or defaultValue is None)
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return 0;"
needsRooting = typeNeedsRooting(type, descriptorProvider)
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return (template, default, declType, needsRooting)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureBadType(failureCode, typeName):
return CGWrapper(
CGGeneric(
failureCode or
('//XXXjdm ThrowErrorMessage(cx, MSG_DOES_NOT_IMPLEMENT_INTERFACE, "%s", "%s")\n;'
'%s' % (firstCap(sourceDescription), typeName,
exceptionCode))),
post="\n")
def onFailureNotCallable(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('//XXXjdm ThrowErrorMessage(cx, MSG_NOT_CALLABLE, "%s");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if (${val}).is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if (${val}).is_null_or_undefined() {\n"
" None\n")
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}\n")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
raise TypeError("Can't handle sequence arguments yet")
if type.isUnion():
declType = CGGeneric(type.name + "::" + type.name)
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % exceptionCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGGeneric("Option<%s>" % name);
conversion = ("Some(%s::new((${val}).to_object()))" % name)
template = wrapObjectTemplate(conversion, isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
if isMember:
descriptorType = descriptor.memberType
elif isArgument:
descriptorType = descriptor.argumentType
else:
descriptorType = descriptor.nativeType
templateBody = ""
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = str(CastableObjectUnwrapper(
descriptor,
"(${val}).to_object()",
unwrapFailureCode))
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
if isMember:
templateBody += ".root()"
templateBody = wrapObjectTemplate(templateBody, isDefinitelyObject,
type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "Default",
"EmptyString": "Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
nullBehavior = "()"
else:
nullBehavior = treatAs[treatNullAs]
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % (nullBehavior, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
value = "str::from_utf8(data).unwrap().to_string()"
if type.nullable():
value = "Some(%s)" % value
default = (
"static data: [u8, ..%s] = [ %s ];\n"
"%s" %
(len(defaultValue.value) + 1,
", ".join(["'" + char + "' as u8" for char in defaultValue.value] + ["0"]),
value))
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
declType = CGGeneric("ByteString")
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
return handleOptional(conversionCode, declType, handleDefaultNull("None"))
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = exceptionCode
else:
handleInvalidEnumValueCode = "return 1;"
template = (
"match FindEnumStringIndex(cx, ${val}, %(values)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok(None) => { %(handleInvalidEnumValueCode)s },\n"
" Ok(Some(index)) => {\n"
" //XXXjdm need some range checks up in here.\n"
" unsafe { mem::transmute(index) }\n"
" },\n"
"}" % { "values" : enum + "Values::strings",
"exceptionCode" : exceptionCode,
"handleInvalidEnumValueCode" : handleInvalidEnumValueCode })
if defaultValue is not None:
assert(defaultValue.type.tag() == IDLType.Tags.domstring)
default = "%sValues::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
declType = CGGeneric('%s::%s' % (type.unroll().module(), type.unroll().identifier.name))
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("JS_ObjectIsCallable(cx, ${val}.to_object()) != 0",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return (template, default, declType, needsRooting)
if type.isAny():
assert not isEnforceRange and not isClamp
declType = CGGeneric("JSVal")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "NullValue()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "UndefinedValue()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
raise TypeError("Can't handle object arguments yet")
if type.isDictionary():
if failureCode is not None:
raise TypeError("Can't handle dictionaries when failureCode is not None")
# There are no nullable dictionaries
assert not type.nullable()
typeName = CGDictionary.makeDictionaryName(type.inner)
declType = CGGeneric(typeName)
template = ("match %s::new(cx, ${val}) {\n"
" Ok(dictionary) => dictionary,\n"
" Err(_) => return 0,\n"
"}" % typeName)
return handleOptional(template, declType, handleDefaultNull("%s::empty()" % typeName))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return ("", None, None, False)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
assert not isEnforceRange and not isClamp
if failureCode is None:
failureCode = 'return 0'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
#XXXjdm support conversionBehavior here
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(v) => v,\n"
" Err(_) => { %s }\n"
"}" % exceptionCode)
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert(tag == IDLType.Tags.bool)
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName, needsRooting):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionTemplate, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(
string.Template(templateBody).substitute(replacements)
)
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
if needsRooting:
rootBody = "let %s = %s.root();" % (declName, declName)
result.append(CGGeneric(rootBody))
result.append(CGGeneric(""))
return result;
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "BoolVal(true)" if value.value else "BoolVal(false)"
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
return "DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, argv, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert(not argument.defaultValue or argument.optional)
replacer = {
"index": index,
"argc": argc,
"argv": argv
}
condition = string.Template("${index} < ${argc}").substitute(replacer)
replacementVariables = {
"val": string.Template("(*${argv}.offset(${index}))").substitute(replacer),
}
template, default, declType, needsRooting = getJSToNativeConversionTemplate(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
if not argument.variadic:
if argument.optional:
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(template),
CGGeneric(default)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("Some(%s)" % template),
CGGeneric("None")).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index,
needsRooting)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("(*${argv}.offset(variadicArg as int))").substitute(replacer),
}
innerConverter = instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot",
needsRooting)
seqType = CGTemplatedType("Vec", declType)
variadicConversion = string.Template(
"{\n"
" let mut vector: ${seqType} = Vec::with_capacity((${argc} - ${index}) as uint);\n"
" for variadicArg in range(${index}, ${argc}) {\n"
"${inner}\n"
" vector.push(slot);\n"
" }\n"
" vector\n"
"}"
).substitute({
"index": index,
"argc": argc,
"seqType": seqType.define(),
"inner": CGIndenter(innerConverter, 4).define(),
})
self.converter = instantiateJSToNativeConversionTemplate(
variadicConversion, replacementVariables, seqType, "arg%d" % index,
False)
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return 1;'):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a Rust reference to the JSVal in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
"""
return "%s = (%s).to_jsval(cx);\n%s" % (jsvalRef, result, successCode)
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence() or type.isArray():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
def typeRetValNeedsRooting(type):
if type is None:
return False
if type.nullable():
type = type.inner
return type.isGeckoInterface() and not type.isCallback()
def memberIsCreator(member):
return member.getExtendedAttribute("Creator") is not None
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
result = CGGeneric('%s::%s' % (returnType.unroll().module(),
returnType.unroll().identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric('%s::%s' % (returnType.unroll().name, returnType.unroll().name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
return CGGeneric("*mut JSObject")
if returnType.isSequence():
raise TypeError("We don't support sequence return values")
raise TypeError("Don't know how to declare return value for %s" %
returnType)
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
def generatePrefableArray(self, array, name, specTemplate, specTerminator,
specType, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
assert(len(array) is not 0)
specs = []
for member in array:
specs.append(specTemplate % getDataTuple(member))
if specTerminator:
specs.append(specTerminator)
return (("static %s: &'static [%s] = &[\n" +
",\n".join(specs) + "\n" +
"];\n\n") % (name, specType))
# The length of a method is the maximum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return max([len(arguments) for (retType, arguments) in signatures])
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static):
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess()]
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m),
"flags": "JSPROP_ENUMERATE" }
for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": 'iterator',
"methodInfo": False,
"nativeName": "JS_ArrayIterator",
"length": 0,
"flags": "JSPROP_ENUMERATE" })
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(m):
if m.get("methodInfo", True):
jitinfo = ("&%s_methodinfo" % m["name"])
accessor = "genericMethod"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = m.get("nativeName", m["name"])
return (m["name"], accessor, jitinfo, m["length"], m["flags"])
def stringDecl(m):
return "static %s_name: [u8, ..%i] = %s;\n" % (m["name"], len(m["name"]) + 1,
str_to_const_array(m["name"]))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' JSFunctionSpec {name: &%s_name as *const u8 as *const libc::c_char, call: JSNativeWrapper {op: Some(%s), info: %s}, nargs: %s, flags: %s as u16, selfHostedName: 0 as *const libc::c_char }',
' JSFunctionSpec {name: 0 as *const libc::c_char, call: JSNativeWrapper {op: None, info: 0 as *const JSJitInfo}, nargs: 0, flags: 0, selfHostedName: 0 as *const libc::c_char }',
'JSFunctionSpec',
specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [
m
for m in descriptor.interface.members
if m.isAttr() and m.isStatic() == static
]
self.static = static
def generateArray(self, array, name):
if len(array) == 0:
return ""
def flags(attr):
return "JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_NATIVE_ACCESSORS"
def getter(attr):
if self.static:
accessor = 'get_' + attr.identifier.name
jitinfo = "0"
else:
if attr.hasLenientThis():
accessor = "genericLenientGetter"
else:
accessor = "genericGetter"
jitinfo = "&%s_getterinfo" % attr.identifier.name
return ("JSPropertyOpWrapper {op: Some(%(native)s), info: %(info)s as *const JSJitInfo}"
% {"info" : jitinfo,
"native" : accessor})
def setter(attr):
if attr.readonly:
return "JSStrictPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo}"
if self.static:
accessor = 'set_' + attr.identifier.name
jitinfo = "0"
else:
if attr.hasLenientThis():
accessor = "genericLenientSetter"
else:
accessor = "genericSetter"
jitinfo = "&%s_setterinfo" % attr.identifier.name
return ("JSStrictPropertyOpWrapper {op: Some(%(native)s), info: %(info)s as *const JSJitInfo}"
% {"info" : jitinfo,
"native" : accessor})
def specData(attr):
return (attr.identifier.name, flags(attr), getter(attr),
setter(attr))
def stringDecl(attr):
name = attr.identifier.name
return "static %s_name: [u8, ..%i] = %s;\n" % (name, len(name) + 1,
str_to_const_array(name))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' JSPropertySpec { name: &%s_name as *const u8 as *const libc::c_char, tinyid: 0, flags: ((%s) & 0xFF) as u8, getter: %s, setter: %s }',
' JSPropertySpec { name: 0 as *const libc::c_char, tinyid: 0, flags: 0, getter: JSPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo}, setter: JSStrictPropertyOpWrapper {op: None, info: 0 as *const JSJitInfo} }',
'JSPropertySpec',
specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (const.identifier.name,
convertConstIDLValueToJSVal(const.value))
def stringDecl(const):
name = const.identifier.name
return "static %s_name: &'static [u8] = &%s;\n" % (name, str_to_const_array(name))
decls = ''.join([stringDecl(m) for m in array])
return decls + self.generatePrefableArray(
array, name,
' ConstantSpec { name: %s_name, value: %s }',
None,
'ConstantSpec',
specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=2):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn is not "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, imports):
"""
Adds a set of imports.
"""
ignored_warnings = [
# Allow unreachable_code because we use 'break' in a way that
# sometimes produces two 'break's in a row. See for example
# CallbackMember.getArgConversions.
'unreachable_code',
'non_camel_case_types',
'non_uppercase_statics',
'unnecessary_parens',
'unused_imports',
'unused_variable',
'unused_unsafe',
'unused_mut',
'dead_assignment',
'dead_code',
]
statements = ['#![allow(%s)]' % ','.join(ignored_warnings)]
statements.extend('use %s;' % i for i in sorted(imports))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
@staticmethod
def getDeclarationFilename(decl):
# Use our local version of the header, not the exported one, so that
# test bindings, which don't export, will work correctly.
basename = os.path.basename(decl.filename())
return basename.replace('.webidl', 'Binding.rs')
class CGIfWrapper(CGWrapper):
def __init__(self, child, condition):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s\n" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClass(descriptor):
protoList = ['PrototypeList::id::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with IDCount so we
# guarantee that all the lists are the same length. IDCount
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::id::IDCount'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
return """DOMClass {
interface_chain: [ %s ]
}""" % prototypeChainString
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
traceHook = "Some(%s)" % TRACE_HOOK_NAME
if self.descriptor.createGlobal:
flags = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
slots = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
else:
flags = "0"
slots = "1"
return """
static Class_name: [u8, ..%i] = %s;
static Class: DOMJSClass = DOMJSClass {
base: js::Class {
name: &Class_name as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | %s | (((%s) & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT as uint), //JSCLASS_HAS_RESERVED_SLOTS(%s),
addProperty: Some(JS_PropertyStub),
delProperty: Some(JS_PropertyStub),
getProperty: Some(JS_PropertyStub),
setProperty: Some(JS_StrictPropertyStub),
enumerate: Some(JS_EnumerateStub),
resolve: Some(JS_ResolveStub),
convert: Some(JS_ConvertStub),
finalize: Some(%s),
checkAccess: None,
call: None,
hasInstance: None,
construct: None,
trace: %s,
ext: js::ClassExtension {
equality: 0 as *const u8,
outerObject: %s,
innerObject: None,
iteratorObject: 0 as *const u8,
unused: 0 as *const u8,
isWrappedNative: 0 as *const u8,
},
ops: js::ObjectOps {
lookupGeneric: 0 as *const u8,
lookupProperty: 0 as *const u8,
lookupElement: 0 as *const u8,
lookupSpecial: 0 as *const u8,
defineGeneric: 0 as *const u8,
defineProperty: 0 as *const u8,
defineElement: 0 as *const u8,
defineSpecial: 0 as *const u8,
getGeneric: 0 as *const u8,
getProperty: 0 as *const u8,
getElement: 0 as *const u8,
getElementIfPresent: 0 as *const u8,
getSpecial: 0 as *const u8,
setGeneric: 0 as *const u8,
setProperty: 0 as *const u8,
setElement: 0 as *const u8,
setSpecial: 0 as *const u8,
getGenericAttributes: 0 as *const u8,
getPropertyAttributes: 0 as *const u8,
getElementAttributes: 0 as *const u8,
getSpecialAttributes: 0 as *const u8,
setGenericAttributes: 0 as *const u8,
setPropertyAttributes: 0 as *const u8,
setElementAttributes: 0 as *const u8,
setSpecialAttributes: 0 as *const u8,
deleteProperty: 0 as *const u8,
deleteElement: 0 as *const u8,
deleteSpecial: 0 as *const u8,
enumerate: 0 as *const u8,
typeOf: 0 as *const u8,
thisObject: %s,
clear: 0 as *const u8,
},
},
dom_class: %s
};
""" % (len(self.descriptor.interface.identifier.name) + 1,
str_to_const_array(self.descriptor.interface.identifier.name),
flags, slots, slots,
FINALIZE_HOOK_NAME, traceHook,
self.descriptor.outerObjectHook,
self.descriptor.outerObjectHook,
CGIndenter(CGGeneric(DOMClass(self.descriptor))).define())
def str_to_const_array(s):
return "[" + (", ".join(map(lambda x: "'" + x + "' as u8", list(s)) + ['0 as u8'])) + "]"
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return """
static PrototypeClassName__: [u8, ..%s] = %s;
static PrototypeClass: JSClass = JSClass {
name: &PrototypeClassName__ as *const u8 as *const libc::c_char,
flags: (1 & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT as uint, //JSCLASS_HAS_RESERVED_SLOTS(1)
addProperty: Some(JS_PropertyStub),
delProperty: Some(JS_PropertyStub),
getProperty: Some(JS_PropertyStub),
setProperty: Some(JS_StrictPropertyStub),
enumerate: Some(JS_EnumerateStub),
resolve: Some(JS_ResolveStub),
convert: Some(JS_ConvertStub),
finalize: None,
checkAccess: None,
call: None,
hasInstance: None,
construct: None,
trace: None,
reserved: [0 as *mut libc::c_void, ..40]
};
""" % (len(self.descriptor.interface.identifier.name + "Prototype") + 1,
str_to_const_array(self.descriptor.interface.identifier.name + "Prototype"))
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if True:
return ""
ctorname = "0 as *const u8" if not self.descriptor.interface.ctor() else CONSTRUCT_HOOK_NAME
hasinstance = HASINSTANCE_HOOK_NAME
return """
static InterfaceObjectClass: JSClass = {
%s, 0,
JS_PropertyStub,
JS_PropertyStub,
JS_PropertyStub,
JS_StrictPropertyStub,
JS_EnumerateStub,
JS_ResolveStub,
JS_ConvertStub,
0 as *const u8,
0 as *const u8,
%s,
%s,
%s,
0 as *const u8,
JSCLASS_NO_INTERNAL_MEMBERS
};
""" % (str_to_const_array("Function"), ctorname, hasinstance, ctorname)
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
self.children = children
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, generator):
return self.joiner.join(filter(lambda s: len(s) > 0, (child for child in generator)))
def define(self):
return self.join(child.define() for child in self.children if child is not None)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [ CGIfWrapper(ifTrue, condition),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}") ]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
val = "%s::new(tempRoot)" % name
define = """{
let tempRoot = ${val}.to_object();
%s
}""" % val
CGGeneric.__init__(self, define)
def getAllTypes(descriptors, dictionaries, callbacks):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
def SortedTuples(l):
"""
Sort a list of tuples based on the first item in the tuple
"""
return sorted(l, key=operator.itemgetter(0))
def SortedDictValues(d):
"""
Returns a list of values from the dict sorted by key.
"""
# Create a list of tuples containing key and value, sorted on key.
d = SortedTuples(d.items())
# We're only interested in the values.
return (i[1] for i in d)
def UnionTypes(descriptors, dictionaries, callbacks, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom::bindings::utils::unwrap_jsmanaged',
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::Default',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::JS',
'dom::types::*',
'js::jsapi::JSContext',
'js::jsval::JSVal',
'servo_util::str::DOMString',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks):
assert not descriptor or not dictionary
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if not name in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGNamespace(name,
CGImports(CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
]), [], imports),
public=True)
return CGList(SortedDictValues(unionStructs), "\n\n")
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
#XXXjdm Support default arguments somehow :/
#if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
"""
def __init__(self, descriptor, name, returnType, args, inline=False, alwaysInline=False, extern=False, pub=False, templateArgs=None, unsafe=True):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.templateArgs = templateArgs
self.pub = pub;
self.unsafe = unsafe
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline(always)]')
if self.extern:
decorators.append('extern')
if self.pub:
decorators.append('pub')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
if self.unsafe:
body = CGWrapper(body, pre="unsafe {\n", post="\n}")
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%sfn %s%s(%s)%s {\n" % (self._decorators(), self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
assert(False) # Override me!
def CreateBindingJSObject(descriptor, parent=None):
create = "let mut raw: JS<%s> = JS::from_raw(&*aObject);\n" % descriptor.concreteType
if descriptor.proxy:
assert not descriptor.createGlobal
create += """
let handler = RegisterBindings::proxy_handlers[PrototypeList::proxies::%s as uint];
let mut private = PrivateValue(squirrel_away_unique(aObject) as *const libc::c_void);
let obj = with_compartment(aCx, proto, || {
NewProxyObject(aCx, handler,
&private,
proto, %s,
ptr::mut_null(), ptr::mut_null())
});
assert!(obj.is_not_null());
""" % (descriptor.name, parent)
else:
if descriptor.createGlobal:
create += "let obj = CreateDOMGlobal(aCx, &Class.base as *const js::Class as *const JSClass);\n"
else:
create += ("let obj = with_compartment(aCx, proto, || {\n"
" JS_NewObject(aCx, &Class.base as *const js::Class as *const JSClass, &*proto, &*%s)\n"
"});\n" % parent)
create += """assert!(obj.is_not_null());
JS_SetReservedSlot(obj, DOM_OBJECT_SLOT as u32,
PrivateValue(squirrel_away_unique(aObject) as *const libc::c_void));
"""
return create
class CGWrapMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for non-callback
interfaces.
"""
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
if not descriptor.createGlobal:
args = [Argument('*mut JSContext', 'aCx'), Argument('&GlobalRef', 'aScope'),
Argument("Box<%s>" % descriptor.concreteType, 'aObject', mutable=True)]
else:
args = [Argument('*mut JSContext', 'aCx'),
Argument("Box<%s>" % descriptor.concreteType, 'aObject', mutable=True)]
retval = 'Temporary<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args, pub=True)
def definition_body(self):
if not self.descriptor.createGlobal:
return CGGeneric("""\
let scope = aScope.reflector().get_jsobject();
assert!(scope.is_not_null());
assert!(((*JS_GetClass(scope)).flags & JSCLASS_IS_GLOBAL) != 0);
let proto = with_compartment(aCx, scope, || GetProtoObject(aCx, scope, scope));
assert!(proto.is_not_null());
%s
raw.reflector().set_jsobject(obj);
Temporary::new(raw)""" % CreateBindingJSObject(self.descriptor, "scope"))
else:
return CGGeneric("""\
%s
with_compartment(aCx, obj, || {
let proto = GetProtoObject(aCx, obj, obj);
JS_SetPrototype(aCx, obj, proto);
raw.reflector().set_jsobject(obj);
RegisterBindings::Register(aCx, obj);
});
Temporary::new(raw)""" % CreateBindingJSObject(self.descriptor))
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
replacer = {
'type': self.descriptor.name,
'depth': self.descriptor.interface.inheritanceDepth(),
}
return string.Template("""
impl IDLInterface for ${type} {
fn get_prototype_id(_: Option<${type}>) -> PrototypeList::id::ID {
PrototypeList::id::${type}
}
fn get_prototype_depth(_: Option<${type}>) -> uint {
${depth}
}
}
""").substitute(replacer)
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True)
class PropertyArrays():
def __init__(self, descriptor):
self.staticMethods = MethodDefiner(descriptor, "StaticMethods",
static=True)
self.staticAttrs = AttrDefiner(descriptor, "StaticAttributes",
static=True)
self.methods = MethodDefiner(descriptor, "Methods", static=False)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [ "staticMethods", "staticAttrs", "methods", "attrs", "consts" ]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGNativeProperties(CGThing):
def __init__(self, descriptor, properties):
CGThing.__init__(self)
self.properties = properties
def define(self):
def getField(array):
propertyArray = getattr(self.properties, array)
if propertyArray.length() > 0:
value = "Some(%s)" % propertyArray.variableName()
else:
value = "None"
return CGGeneric(string.Template('${name}: ${value},').substitute({
'name': array,
'value': value,
}))
nativeProps = CGList([getField(array) for array in self.properties.arrayNames()], '\n')
return CGWrapper(CGIndenter(nativeProps),
pre="static sNativeProperties: NativeProperties = NativeProperties {\n",
post="\n};\n").define()
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties):
assert not descriptor.interface.isCallback()
args = [Argument('*mut JSContext', 'aCx'), Argument('*mut JSObject', 'aGlobal'),
Argument('*mut JSObject', 'aReceiver')]
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', '*mut JSObject', args)
self.properties = properties
def definition_body(self):
protoChain = self.descriptor.prototypeChain
if len(protoChain) == 1:
getParentProto = "JS_GetObjectPrototype(aCx, aGlobal)"
else:
parentProtoName = self.descriptor.prototypeChain[-2]
getParentProto = ("%s::GetProtoObject(aCx, aGlobal, aReceiver)" %
toBindingNamespace(parentProtoName))
getParentProto = ("let parentProto: *mut JSObject = %s;\n"
"assert!(parentProto.is_not_null());\n") % getParentProto
if self.descriptor.concrete:
if self.descriptor.proxy:
domClass = "&Class"
else:
domClass = "&Class.dom_class"
else:
domClass = "ptr::null()"
if self.descriptor.interface.hasInterfaceObject():
if self.descriptor.interface.ctor():
constructHook = CONSTRUCT_HOOK_NAME
constructArgs = methodLength(self.descriptor.interface.ctor())
else:
constructHook = "ThrowingConstructor"
constructArgs = 0
constructor = 'Some((%s, "%s", %d))' % (
constructHook, self.descriptor.interface.identifier.name,
constructArgs)
else:
constructor = 'None'
call = """return CreateInterfaceObjects2(aCx, aGlobal, aReceiver, parentProto,
&PrototypeClass, %s,
%s,
&sNativeProperties);""" % (constructor, domClass)
return CGList([
CGGeneric(getParentProto),
CGGeneric(call % self.properties.variableNames())
], "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'aCx'), Argument('*mut JSObject', 'aGlobal'),
Argument('*mut JSObject', 'aReceiver')]
CGAbstractMethod.__init__(self, descriptor, name,
'*mut JSObject', args, pub=pub)
self.id = idPrefix + "id::" + self.descriptor.name
def definition_body(self):
return CGGeneric("""
/* aGlobal and aReceiver are usually the same, but they can be different
too. For example a sandbox often has an xray wrapper for a window as the
prototype of the sandbox's global. In that case aReceiver is the xray
wrapper and aGlobal is the sandbox's global.
*/
assert!(((*JS_GetClass(aGlobal)).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let protoOrIfaceArray = GetProtoOrIfaceArray(aGlobal);
let cachedObject: *mut JSObject = *protoOrIfaceArray.offset(%s as int);
if cachedObject.is_null() {
let tmp: *mut JSObject = CreateInterfaceObjects(aCx, aGlobal, aReceiver);
assert!(tmp.is_not_null());
*protoOrIfaceArray.offset(%s as int) = tmp;
tmp
} else {
cachedObject
}""" % (self.id, self.id))
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::", pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"constructors::")
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler', '*const libc::c_void', [], pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
body = """\
let traps = ProxyTraps {
getPropertyDescriptor: Some(getPropertyDescriptor),
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(defineProperty),
getOwnPropertyNames: ptr::null(),
delete_: Some(delete_),
enumerate: ptr::null(),
has: None,
hasOwn: Some(hasOwn),
get: Some(get),
set: None,
keys: ptr::null(),
iterate: None,
call: None,
construct: None,
nativeCall: ptr::null(),
hasInstance: None,
typeOf: None,
objectClassIs: None,
obj_toString: Some(obj_toString),
fun_toString: None,
//regexp_toShared: ptr::null(),
defaultValue: None,
iteratorNext: None,
finalize: Some(%s),
getElementIfPresent: None,
getPrototypeOf: None,
trace: Some(%s)
};
CreateProxyHandler(&traps, &Class as *const _ as *const _)
""" % (FINALIZE_HOOK_NAME,
TRACE_HOOK_NAME)
return CGGeneric(body)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('*mut JSObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface', 'void', args, pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
return CGGeneric("""\
assert!(global.is_not_null());
assert!(GetProtoObject(cx, global, global).is_not_null());""")
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptorProvider, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptorProvider)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
#XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isGeckoInterface():
if not (a.type.nullable() or a.optional):
name = "&" + name
elif a.type.isDictionary():
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if not "cx" in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % descriptorProvider.interface.identifier.name)
else:
call = CGWrapper(call, pre="(*%s)." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = ""
else:
glob = " let global = global_object_for_js_object(this.reflector().get_jsobject());\n"\
" let global = global.root();\n"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
"%s"
" throw_dom_exception(cx, &global.root_ref(), e);\n"
" return%s;\n"
" },\n"
"};\n" % (glob, errorResult)))
if typeRetValNeedsRooting(returnType):
self.cgRoot.append(CGGeneric("let result = result.root();"))
def define(self):
return self.cgRoot.define()
class MethodNotCreatorError(Exception):
def __init__(self, typename):
self.typename = typename
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
if self.argCount > argConversionStartsAt:
# Insert our argv in there
cgThings = [CGGeneric(self.getArgvDecl())]
else:
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgv(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
cgThings.append(CGCallGenerator(
' false as JSBool' if self.isFallible() else None,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgv(self):
return "argv" if self.argCount > 0 else ""
def getArgvDecl(self):
return "\nlet argv = JS_ARGV(cx, vp);\n"
def getArgc(self):
return "argc"
def getArguments(self):
def process(arg, i):
argVal = "arg" + str(i)
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".root_ref()"
return argVal
return [(a, process(a, i)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return not 'infallible' in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('*vp')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"));
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
#bodyList.append(CGGeneric('fail!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList));
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn 1;"
def getArgc(self):
return "1"
def getArgvDecl(self):
# We just get our stuff from our last arg no matter what
return ""
class CGAbstractBindingMethod(CGAbstractExternMethod):
"""
Common class to generate the JSNatives for all our methods, getters, and
setters. This will generate the function declaration and unwrap the
|this| object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name, args, unwrapFailureCode=None):
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
if unwrapFailureCode is None:
self.unwrapFailureCode = (
'throw_type_error(cx, "\\"this\\" object does not '
'implement interface %s.");\n'
'return 0;' % descriptor.interface.identifier.name)
else:
self.unwrapFailureCode = unwrapFailureCode
def definition_body(self):
# Our descriptor might claim that we're not castable, simply because
# we're someone's consequential interface. But for this-unwrapping, we
# know that we're the real deal. So fake a descriptor here for
# consumption by FailureFatalCastableObjectUnwrapper.
unwrapThis = str(CastableObjectUnwrapper(
FakeCastableDescriptor(self.descriptor),
"obj", self.unwrapFailureCode))
unwrapThis = CGGeneric(
"let obj: *mut JSObject = JS_THIS_OBJECT(cx, vp as *mut JSVal);\n"
"if obj.is_null() {\n"
" return false as JSBool;\n"
"}\n"
"\n"
"let this: JS<%s> = %s;\n" % (self.descriptor.concreteType, unwrapThis))
return CGList([ unwrapThis, self.generate_code() ], "\n")
def generate_code(self):
assert(False) # Override me
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "JSBool", args, extern=True)
def definition_body(self):
return self.generate_code()
def generate_code(self):
assert False # Override me
class CGGenericMethod(CGAbstractBindingMethod):
"""
A class for generating the C++ code for an IDL method..
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
CGAbstractBindingMethod.__init__(self, descriptor, 'genericMethod', args)
def generate_code(self):
return CGGeneric(
"let _info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"return CallJitMethodOp(_info, cx, obj, this.unsafe_get() as *mut libc::c_void, argc, vp);")
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('libc::c_uint', 'argc'), Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'JSBool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, method):
return MakeNativeName(method.identifier.name)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGMethodCall([], nativeName, True, self.descriptor, self.method)
class CGGenericGetter(CGAbstractBindingMethod):
"""
A class for generating the C++ code for an IDL attribute getter.
"""
def __init__(self, descriptor, lenientThis=False):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
if lenientThis:
name = "genericLenientGetter"
unwrapFailureCode = (
"MOZ_ASSERT(!JS_IsExceptionPending(cx));\n"
"JS_SET_RVAL(cx, vp, JS::UndefinedValue());\n"
"return true;")
else:
name = "genericGetter"
unwrapFailureCode = None
CGAbstractBindingMethod.__init__(self, descriptor, name, args,
unwrapFailureCode)
def generate_code(self):
return CGGeneric(
"let info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"return CallJitPropertyOp(info, cx, obj, this.unsafe_get() as *mut libc::c_void, vp);\n")
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
args = [ Argument('*mut JSContext', 'cx'),
Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*mut JSVal', 'vp') ]
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, attr):
nativeName = MakeNativeName(attr.identifier.name)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGGetterCall([], self.attr.type, nativeName, self.descriptor,
self.attr)
class CGGenericSetter(CGAbstractBindingMethod):
"""
A class for generating the Rust code for an IDL attribute setter.
"""
def __init__(self, descriptor, lenientThis=False):
args = [Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp')]
if lenientThis:
name = "genericLenientSetter"
unwrapFailureCode = (
"MOZ_ASSERT(!JS_IsExceptionPending(cx));\n"
"return true;")
else:
name = "genericSetter"
unwrapFailureCode = None
CGAbstractBindingMethod.__init__(self, descriptor, name, args,
unwrapFailureCode)
def generate_code(self):
return CGGeneric(
"let mut undef = UndefinedValue();\n"
"let argv: *mut JSVal = if argc != 0 { JS_ARGV(cx, vp) } else { &mut undef as *mut JSVal };\n"
"let info: *const JSJitInfo = RUST_FUNCTION_VALUE_TO_JITINFO(JS_CALLEE(cx, vp));\n"
"if CallJitPropertyOp(info, cx, obj, this.unsafe_get() as *mut libc::c_void, argv) == 0 {\n"
" return 0;\n"
"}\n"
"*vp = UndefinedValue();\n"
"return 1;")
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
args = [ Argument('*mut JSContext', 'cx'),
Argument('JSHandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*mut JSVal', 'argv')]
CGAbstractExternMethod.__init__(self, descriptor, name, "JSBool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = JS::from_raw(this);\n"
"let this = this.root();\n")
@staticmethod
def makeNativeName(descriptor, attr):
return "Set" + MakeNativeName(attr.identifier.name)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let argv = JS_ARGV(cx, vp);\n"
"if (argc == 0) {\n"
" // XXXjdmreturn ThrowErrorMessage(cx, MSG_MISSING_ARGUMENTS, \"%s setter\");\n"
" return 0;\n"
"}\n" % self.attr.identifier.name)
call = CGSetterCall([], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, infallible):
protoID = "PrototypeList::id::%s as u32" % self.descriptor.name
depth = self.descriptor.interface.inheritanceDepth()
failstr = "true" if infallible else "false"
return ("\n"
"static %s: JSJitInfo = JSJitInfo {\n"
" op: %s as *const u8,\n"
" protoID: %s,\n"
" depth: %s,\n"
" isInfallible: %s, /* False in setters. */\n"
" isConstant: false /* Only relevant for getters. */\n"
"};\n" % (infoName, opName, protoID, depth, failstr))
def define(self):
if self.member.isAttr():
getterinfo = ("%s_getterinfo" % self.member.identifier.name)
getter = ("get_%s" % self.member.identifier.name)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
result = self.defineJitInfo(getterinfo, getter, getterinfal)
if not self.member.readonly:
setterinfo = ("%s_setterinfo" % self.member.identifier.name)
setter = ("set_%s" % self.member.identifier.name)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, False)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
# Actually a JSJitMethodOp, but JSJitPropertyOp by struct definition.
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
methodInfal = False
sigs = self.member.signatures()
if len(sigs) == 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
sig = sigs[0]
if len(sig[1]) == 0:
# No arguments and infallible return boxing
methodInfal = True
result = self.defineJitInfo(methodinfo, method, methodInfal)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
inner = """
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::JSContext;
use js::jsval::JSVal;
#[repr(uint)]
#[deriving(Encodable, PartialEq)]
pub enum valuelist {
%s
}
pub static strings: &'static [&'static str] = &[
%s,
];
impl ToJSValConvertible for valuelist {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
strings[*self as uint].to_string().to_jsval(cx)
}
}
""" % (",\n ".join(map(getEnumValueName, enum.values())),
",\n ".join(['"%s"' % val for val in enum.values()]))
self.cgRoot = CGList([
CGNamespace.build([enum.identifier.name + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
CGGeneric("pub type %s = self::%sValues::valuelist;\n" %
(enum.identifier.name, enum.identifier.name)),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.float, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constants):
CGThing.__init__(self)
self.constants = constants
def define(self):
def stringDecl(const):
name = const.identifier.name
value = convertConstIDLValueToRust(const.value)
return CGGeneric("pub static %s: %s = %s;\n" % (name, builtinNames[const.value.type.tag()], value))
return CGIndenter(CGList(stringDecl(m) for m in self.constants)).define()
def getUnionTypeTemplateVars(type, descriptorProvider):
# For dictionaries and sequences we need to pass None as the failureCode
# for getJSToNativeConversionTemplate.
# Also, for dictionaries we would need to handle conversion of
# null/undefined to the dictionary correctly.
if type.isDictionary() or type.isSequence():
raise TypeError("Can't handle dictionaries or sequences in unions")
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).nativeType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isArray() or type.isSequence():
name = str(type)
#XXXjdm dunno about typeName here
typeName = "/*" + type.name + "*/"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
else:
name = type.name
typeName = "/*" + type.name + "*/"
template, _, _, _ = getJSToNativeConversionTemplate(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True)
assert not type.isObject()
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" e%s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" e%s(ref inner) => inner.to_jsval(cx)," % v["name"] for v in templateVars
]
return ("""pub enum %s {
%s
}
impl ToJSValConvertible for %s {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues),
self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, name)
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isArray() or t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
raise TypeError("Can't handle arrays or sequences in unions.")
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
raise TypeError("No support for unwrapping dictionaries as member "
"of a union")
else:
dictionaryObject = None
if callbackObject or dictionaryObject:
assert False, "Not currently supported"
else:
nonPlatformObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
raise TypeError("Can't handle objects in unions.")
else:
object = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or nonPlatformObject or object
if hasObjectTypes:
assert interfaceObject
templateBody = CGList([interfaceObject], "\n")
conversions.append(CGIfWrapper(templateBody, "value.is_object()"))
otherMemberTypes = [
t for t in memberTypes if t.isPrimitive() or t.isString() or t.isEnum()
]
if len(otherMemberTypes) > 0:
assert len(otherMemberTypes) == 1
memberType = otherMemberTypes[0]
if memberType.isEnum():
name = memberType.inner.identifier.name
else:
name = memberType.name
match = (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, name)
conversions.append(CGGeneric(match))
names.append(name)
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="fn from_jsval(cx: *mut JSContext, value: JSVal, _option: ()) -> Result<%s, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(method),
pre="impl FromJSValConvertible<()> for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
pre="fn TryConvertTo%s(cx: *mut JSContext, value: JSVal) -> %s {\n" % (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template("${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility is not 'priv' else ''
})
def define(self, cgClass):
pass
class ClassUsingDeclaration(ClassItem):
""""
Used for importing a name from a base class into a CGClass
baseClass is the name of the base class to import the name from
name is the name to import
visibility determines the visibility of the name (public,
protected, private), defaults to public.
"""
def __init__(self, baseClass, name, visibility='public'):
self.baseClass = baseClass
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return string.Template("""using ${baseClass}::${name};
""").substitute({ 'baseClass': self.baseClass,
'name': self.name })
def define(self, cgClass):
return ''
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"%s {\n"
"%s\n"
"}") % (cgClass.name, '\n'.join(initializers)))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass);
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""pub fn ${decorators}new(${args}) -> ${className}${body}
""").substitute({ 'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body })
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({ 'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body })
class ClassDestructor(ClassItem):
"""
Used for adding a destructor to a CGClass.
inline should be True if the destructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the destructor (public,
protected, private), defaults to private.
body contains a string with the code for the destructor, defaults to empty.
virtual determines whether the destructor is virtual, defaults to False.
"""
def __init__(self, inline=False, bodyInHeader=False,
visibility="private", body='', virtual=False):
self.inline = inline or bodyInHeader
self.bodyInHeader = bodyInHeader
self.body = body
self.virtual = virtual
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.virtual and declaring:
decorators.append('virtual')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
return self.body
def declare(self, cgClass):
if self.bodyInHeader:
body = ' ' + self.getBody();
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = '\n{\n' + body + '}'
else:
body = ';'
return string.Template("""${decorators}~${className}()${body}
""").substitute({ 'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'body': body })
def define(self, cgClass):
if self.bodyInHeader:
return ''
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""${decorators}
${className}::~${className}()
{${body}}
""").substitute({ 'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'body': body })
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type;
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class ClassTypedef(ClassItem):
def __init__(self, name, type, visibility="public"):
self.type = type
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return 'typedef %s %s;\n' % (self.type, self.name)
def define(self, cgClass):
# Only goes in the header
return ''
class ClassEnum(ClassItem):
def __init__(self, name, entries, values=None, visibility="public"):
self.entries = entries
self.values = values
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
entries = []
for i in range(0, len(self.entries)):
if not self.values or i >= len(self.values):
entry = '%s' % self.entries[i]
else:
entry = '%s = %s' % (self.entries[i], self.values[i])
entries.append(entry)
name = '' if not self.name else ' ' + self.name
return 'enum%s\n{\n %s\n};\n' % (name, ',\n '.join(entries))
def define(self, cgClass):
# Only goes in the header
return ''
class ClassUnion(ClassItem):
def __init__(self, name, entries, visibility="public"):
self.entries = [entry + ";" for entry in entries]
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return 'union %s\n{\n %s\n};\n' % (self.name, '\n '.join(self.entries))
def define(self, cgClass):
# Only goes in the header
return ''
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs = [], enums=[], unions=[], templateArgs=[],
templateSpecialization=[], isStruct=False,
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations='',
extradefinitions=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.isStruct = isStruct
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
self.extradefinitions = extradefinitions
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' \
% ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 #XXjdm Can we support multiple inheritance?
result += '{\n%s\n' % self.indent
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(operation)
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
argument.type, descriptor, treatNullAs=argument.treatNullAs)
templateValues = {
"val": "(*desc).value",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name,
needsRooting))
elif operation.isGetter():
self.cgRoot.prepend(CGGeneric("let mut found = false;"))
def getArguments(self):
def process(arg):
argVal = arg.identifier.name
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".root_ref()"
return argVal
args = [(a, process(a)) for a in self.arguments]
if self.idlNode.isGetter():
args.append((FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean],
self.idlNode),
"&mut found"))
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper(wrap, "found")
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedSetter(CGProxySpecialOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy", '*const ' + descriptor.concreteType, args, alwaysInline=True)
def definition_body(self):
return CGGeneric("""/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(obj).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'), Argument('JSBool', 'set'),
Argument('*mut JSPropertyDescriptor', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"JSBool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
setOrIndexedGet = ""
if indexedGetter or indexedSetter:
setOrIndexedGet += "let index = GetArrayIndexFromId(cx, id);\n"
if indexedGetter:
readonly = toStringBool(self.descriptor.operations['IndexedSetter'] is None)
fillDescriptor = "FillPropertyDescriptor(&mut *desc, proxy, %s);\nreturn 1;" % readonly
templateValues = {'jsvalRef': '(*desc).value', 'successCode': fillDescriptor}
get = ("if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
if indexedSetter or self.descriptor.operations['NamedSetter']:
setOrIndexedGet += "if set != 0 {\n"
if indexedSetter:
setOrIndexedGet += (" if index.is_some() {\n" +
" let index = index.unwrap();\n")
if not 'IndexedCreator' in self.descriptor.operations:
# FIXME need to check that this is a 'supported property index'
assert False
setOrIndexedGet += (" FillPropertyDescriptor(&mut *desc, proxy, false);\n" +
" return 1;\n" +
" }\n")
if self.descriptor.operations['NamedSetter']:
setOrIndexedGet += " if RUST_JSID_IS_STRING(id) {\n"
if not 'NamedCreator' in self.descriptor.operations:
# FIXME need to check that this is a 'supported property name'
assert False
setOrIndexedGet += (" FillPropertyDescriptor(&mut *desc, proxy, false);\n" +
" return 1;\n" +
" }\n")
setOrIndexedGet += "}"
if indexedGetter:
setOrIndexedGet += (" else {\n" +
CGIndenter(CGGeneric(get)).define() +
"}")
setOrIndexedGet += "\n\n"
elif indexedGetter:
setOrIndexedGet += ("if set == 0 {\n" +
CGIndenter(CGGeneric(get)).define() +
"}\n\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
readonly = toStringBool(self.descriptor.operations['NamedSetter'] is None)
fillDescriptor = "FillPropertyDescriptor(&mut *desc, proxy, %s);\nreturn 1;" % readonly
templateValues = {'jsvalRef': '(*desc).value', 'successCode': fillDescriptor}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = ("\n" +
"if set == 0 && RUST_JSID_IS_STRING(id) != 0 && !HasPropertyOnPrototype(cx, proxy, id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
else:
namedGet = ""
return setOrIndexedGet + """let expando: *mut JSObject = GetExpandoObject(proxy);
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if expando.is_not_null() {
let flags = if set != 0 { JSRESOLVE_ASSIGNING } else { 0 } | JSRESOLVE_QUALIFIED;
if JS_GetPropertyDescriptorById(cx, expando, id, flags, desc) == 0 {
return 0;
}
if (*desc).obj.is_not_null() {
// Pretend the property lives on the wrapper.
(*desc).obj = proxy;
return 1;
}
}
""" + namedGet + """
(*desc).obj = ptr::mut_null();
return 1;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'),
Argument('*const JSPropertyDescriptor', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
if not (self.descriptor.operations['IndexedCreator'] is indexedSetter):
raise TypeError("Can't handle creator that's different from the setter")
set += ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return 1;\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if GetArrayIndexFromId(cx, id).is_some() {\n" +
" return 0;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
"}\n") % self.descriptor.name
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if not self.descriptor.operations['NamedCreator'] is namedSetter:
raise TypeError("Can't handle creator that's different from the setter")
set += ("if RUST_JSID_IS_STRING(id) != 0 {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() + "\n" +
"}\n")
elif self.descriptor.operations['NamedGetter']:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if (found) {\n"
" return 0;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
" }\n" +
" return 1;\n"
"}\n") % (self.descriptor.name)
return set + """return proxyhandler::defineProperty_(%s);""" % ", ".join(a.name for a in self.args)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('jsid', 'id'), Argument('*mut JSBool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = found as JSBool;\n" +
" return 1;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = ("if RUST_JSID_IS_STRING(id) != 0 && !HasPropertyOnPrototype(cx, proxy, id) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() + "\n" +
" *bp = found as JSBool;\n"
" return 1;\n"
"}\n" +
"\n")
else:
named = ""
return indexed + """let expando: *mut JSObject = GetExpandoObject(proxy);
if expando.is_not_null() {
let mut b: JSBool = 1;
let ok: JSBool = JS_HasPropertyById(cx, expando, id, &mut b);
*bp = !!b;
if ok == 0 || *bp != 0 {
return ok;
}
}
""" + named + """*bp = 0;
return 1;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy'),
Argument('*mut JSObject', 'receiver'), Argument('jsid', 'id'),
Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "JSBool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """let expando = GetExpandoObject(proxy);
if expando.is_not_null() {
let mut hasProp = 0;
if JS_HasPropertyById(cx, expando, id, &mut hasProp) == 0 {
return 0;
}
if hasProp != 0 {
return JS_GetPropertyById(cx, expando, id, vp);
}
}"""
templateValues = {'jsvalRef': '*vp'}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = GetArrayIndexFromId(cx, id);\n" +
"if index.is_some() {\n" +
" let index = index.unwrap();\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter and False: #XXXjdm unfinished
getNamed = ("if (JSID_IS_STRING(id)) {\n" +
" let name = jsid_to_str(cx, id);\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = JS::from_raw(this);\n" +
" let this = this.root();\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n") % (self.descriptor.concreteType)
else:
getNamed = ""
return """//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !GetPropertyOnPrototype(cx, proxy, id, &mut found, vp) {
return 0;
}
if found {
return 1;
}
%s
*vp = UndefinedValue();
return 1;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_obj_toString(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('*mut JSObject', 'proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "obj_toString", "*mut JSString", args)
self.descriptor = descriptor
def getBody(self):
stringifier = self.descriptor.operations['Stringifier']
if stringifier:
nativeName = MakeNativeName(stringifier.identifier.name)
signature = stringifier.signatures()[0]
returnType = signature[0]
extendedAttributes = self.descriptor.getExtendedAttributes(stringifier)
infallible = 'infallible' in extendedAttributes
if not infallible:
error = CGGeneric(
('ThrowMethodFailedWithDetails(cx, rv, "%s", "toString");\n' +
"return NULL;") % self.descriptor.interface.identifier.name)
else:
error = None
call = CGCallGenerator(error, [], "", returnType, extendedAttributes, self.descriptor, nativeName, False, object="UnwrapProxy(proxy)")
return call.define() + """
JSString* jsresult;
return xpc_qsStringToJsstring(cx, result, &jsresult) ? jsresult : NULL;"""
return """let s = "%s".to_c_str();
_obj_toString(cx, s.as_ptr())""" % self.descriptor.name
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""\
let this: *const %s = unwrap::<%s>(obj);
""" % (self.descriptor.concreteType, self.descriptor.concreteType))
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
# Override me
assert(False)
def finalizeHook(descriptor, hookName, context):
release = """let val = JS_GetReservedSlot(obj, dom_object_slot(obj));
let _: Box<%s> = mem::transmute(val.to_private());
debug!("%s finalize: {:p}", this);
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args)
def generate_code(self):
return CGGeneric("(*this).trace(%s);" % self.args[0].name)
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, CONSTRUCT_HOOK_NAME,
'JSBool', args)
self._ctor = self.descriptor.interface.ctor()
def define(self):
if not self._ctor:
return ""
return CGAbstractExternMethod.define(self)
def definition_body(self):
preamble = CGGeneric("""\
let global = global_object_for_js_object(JS_CALLEE(cx, vp).to_object());
let global = global.root();
""")
nativeName = MakeNativeName(self._ctor.identifier.name)
callGenerator = CGMethodCall(["&global.root_ref()"], nativeName, True,
self.descriptor, self._ctor)
return CGList([preamble, callGenerator])
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSFreeOp', 'fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return """
static Class: DOMClass = """ + DOMClass(self.descriptor) + """;
"""
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def argument_type(ty, optional=False, defaultValue=None, variadic=False):
_, _, declType, _ = getJSToNativeConversionTemplate(
ty, descriptor, isArgument=True)
if variadic:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isGeckoInterface() and not (ty.nullable() or optional):
declType = CGWrapper(declType, pre="&")
elif ty.isDictionary():
declType = CGWrapper(declType, pre="&")
return declType.define()
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(argument)
def method_arguments(returnType, arguments, trailing=None):
if needCx(returnType, arguments, True):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptor)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
def members():
for m in descriptor.interface.members:
if m.isMethod() and not m.isStatic():
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(rettype, arguments)
rettype = return_type(rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
needCx = typeNeedsCx(m.type)
yield name, attribute_arguments(needCx), return_type(m.type, infallible)
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(needCx, m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation:
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
arguments = method_arguments(rettype, arguments, ("found", "&mut bool"))
rettype = return_type(rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
methods = CGList([
CGGeneric("fn %s(&self%s) -> %s;\n" % (name, fmt(arguments), rettype))
for name, arguments, rettype in members()
], "")
self.cgRoot = CGWrapper(CGIndenter(methods),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
def define(self):
return self.cgRoot.define()
class CGDescriptor(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert not descriptor.interface.isCallback()
cgThings = []
cgThings.append(CGGetProtoObjectMethod(descriptor))
if descriptor.interface.hasInterfaceObject():
# https://github.com/mozilla/servo/issues/2665
# cgThings.append(CGGetConstructorObjectMethod(descriptor))
pass
(hasMethod, hasGetter, hasLenientGetter,
hasSetter, hasLenientSetter) = False, False, False, False, False
for m in descriptor.interface.members:
if m.isMethod() and not m.isIdentifierLess():
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
else:
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
hasMethod = True
elif m.isAttr():
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
else:
cgThings.append(CGSpecializedGetter(descriptor, m))
if m.hasLenientThis():
hasLenientGetter = True
else:
hasGetter = True
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
else:
cgThings.append(CGSpecializedSetter(descriptor, m))
if m.hasLenientThis():
hasLenientSetter = True
else:
hasSetter = True
if not m.isStatic():
cgThings.append(CGMemberJITInfo(descriptor, m))
if hasMethod:
cgThings.append(CGGenericMethod(descriptor))
if hasGetter:
cgThings.append(CGGenericGetter(descriptor))
if hasLenientGetter:
pass
if hasSetter:
cgThings.append(CGGenericSetter(descriptor))
if hasLenientSetter:
pass
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGClassConstructHook(descriptor))
cgThings.append(CGInterfaceObjectJSClass(descriptor))
cgThings.append(CGPrototypeJSClass(descriptor))
properties = PropertyArrays(descriptor)
cgThings.append(CGGeneric(str(properties)))
cgThings.append(CGNativeProperties(descriptor, properties))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties))
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGConstant(m for m in descriptor.interface.members if m.isConst()),
public=True))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
if descriptor.concrete:
if descriptor.proxy:
#cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_obj_toString(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
#cgThings.append(CGDOMJSProxyHandler(descriptor))
#cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
cgThings.append(CGWrapMethod(descriptor))
cgThings.append(CGIDLInterface(descriptor))
cgThings.append(CGInterfaceTrait(descriptor))
cgThings = CGList(cgThings, "\n")
cgThings = CGWrapper(cgThings, pre='\n', post='\n')
#self.cgRoot = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
# cgThings),
# post='\n')
self.cgRoot = cgThings
def define(self):
return self.cgRoot.define()
class CGNamespacedEnum(CGThing):
def __init__(self, namespace, enumName, names, values, comment="", deriving=""):
if not values:
values = []
# Account for explicit enum values.
entries = []
for i in range(0, len(names)):
if len(values) > i and values[i] is not None:
entry = "%s = %s" % (names[i], values[i])
else:
entry = names[i]
entries.append(entry)
# Append a Count.
entries.append(enumName + 'Count = ' + str(len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if deriving:
enumstr = ('#[deriving(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n',post='\n')
# Add the namespace.
curr = CGNamespace(namespace, curr, public=True)
# Add the typedef
#typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
#curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary;
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionTemplate(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
failureCode="return Err(());",
exceptionCode="return Err(());"))
for member in dictionary.members ]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s<'a, 'b>,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"pub struct ${selfName}<'a, 'b> {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute( { "selfName": self.makeClassName(d),
"inheritance": inheritance }))
def impl(self):
d = self.dictionary
if d.parent:
initParent = ("parent: match %s::%s::new(cx, val) {\n"
" Ok(parent) => parent,\n"
" Err(_) => return Err(()),\n"
"},\n") % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
memberInits = CGList([memberInit(m) for m in self.memberInfo])
return string.Template(
"impl<'a, 'b> ${selfName}<'a, 'b> {\n"
" pub fn empty() -> ${selfName}<'a, 'b> {\n"
" ${selfName}::new(ptr::mut_null(), NullValue()).unwrap()\n"
" }\n"
" pub fn new(cx: *mut JSContext, val: JSVal) -> Result<${selfName}<'a, 'b>, ()> {\n"
" let object = if val.is_null_or_undefined() {\n"
" ptr::mut_null()\n"
" } else if val.is_object() {\n"
" val.to_object()\n"
" } else {\n"
" //XXXjdm throw properly here\n"
" return Err(());\n"
" };\n"
" Ok(${selfName} {\n"
"${initParent}"
"${initMembers}"
" })\n"
" }\n"
"}").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=6).define(),
"initMembers": CGIndenter(memberInits, indentLevel=6).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
name = dictionary.identifier.name
if name.endswith('Init'):
return toBindingNamespace(name.replace('Init', ''))
#XXXjdm This breaks on the test webidl files, sigh.
#raise TypeError("No idea how to find this dictionary's definition: " + name)
return "/* uh oh */ %s" % name
def getMemberType(self, memberInfo):
member, (_, _, declType, _) = memberInfo
if not member.defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo):
def indent(s):
return CGIndenter(CGGeneric(s), 8).define()
member, (templateBody, default, declType, _) = memberInfo
replacements = { "val": "value" }
conversion = string.Template(templateBody).substitute(replacements)
assert (member.defaultValue is None) == (default is None)
if not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"match get_dictionary_property(cx, object, \"%s\") {\n"
" Err(()) => return Err(()),\n"
" Ok(Some(value)) => {\n"
"%s\n"
" },\n"
" Ok(None) => {\n"
"%s\n"
" },\n"
"}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeIdName(name):
return name + "_id"
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name == "type":
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set();
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProtos(CGAbstractMethod):
def __init__(self, config):
arguments = [
Argument('*mut JSContext', 'cx'),
Argument('*mut JSObject', 'global'),
]
CGAbstractMethod.__init__(self, None, 'Register', 'void', arguments,
unsafe=False, pub=True)
self.config = config
def definition_body(self):
return CGList([
CGGeneric("codegen::Bindings::%sBinding::DefineDOMInterface(cx, global);" % desc.name)
for desc in self.config.getDescriptors(hasInterfaceObject=True, register=True)
], "\n")
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("proxy_handlers[proxies::%s as uint] = codegen::Bindings::%sBinding::DefineProxyHandler();" % (desc.name, desc.name))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut proxy_handlers: [*const libc::c_void, ..%d] = [0 as *const libc::c_void, ..%d];" % (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=False)
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
cgthings = []
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
# Do codegen for all the enums
cgthings = [CGEnum(e) for e in config.getEnums(webIDLFile)]
# Do codegen for all the dictionaries. We have to be a bit careful
# here, because we have to generate these in order from least derived
# to most derived so that class inheritance works out. We also have to
# generate members before the dictionary that contains them.
#
# XXXbz this will fail if we have two webidl files A and B such that A
# declares a dictionary which inherits from a dictionary in B and B
# declares a dictionary (possibly a different one!) that inherits from a
# dictionary in A. The good news is that I expect this to never happen.
reSortedDictionaries = []
dictionaries = set(dictionaries)
while len(dictionaries) != 0:
# Find the dictionaries that don't depend on anything else anymore
# and move them over.
toMove = [d for d in dictionaries if
len(CGDictionary.getDictionaryDependencies(d) &
dictionaries) == 0]
if len(toMove) == 0:
raise TypeError("Loop in dictionary dependency graph")
dictionaries = dictionaries - set(toMove)
reSortedDictionaries.extend(toMove)
dictionaries = reSortedDictionaries
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Wrap all of that in our namespaces.
#curr = CGNamespace.build(['dom'],
# CGWrapper(curr, pre="\n"))
# Add imports
#XXXjdm This should only import the namespace for the current binding,
# not every binding ever.
curr = CGImports(curr, descriptors, [
'js',
'js::{JS_ARGV, JS_CALLEE, JS_THIS_OBJECT}',
'js::{JSCLASS_GLOBAL_SLOT_COUNT, JSCLASS_IS_DOMJSCLASS}',
'js::{JSCLASS_IS_GLOBAL, JSCLASS_RESERVED_SLOTS_SHIFT}',
'js::{JSCLASS_RESERVED_SLOTS_MASK, JSID_VOID, JSJitInfo}',
'js::{JSPROP_ENUMERATE, JSPROP_NATIVE_ACCESSORS, JSPROP_SHARED}',
'js::{JSRESOLVE_ASSIGNING, JSRESOLVE_QUALIFIED}',
'js::jsapi::{JS_CallFunctionValue, JS_GetClass, JS_GetGlobalForObject}',
'js::jsapi::{JS_GetObjectPrototype, JS_GetProperty, JS_GetPropertyById}',
'js::jsapi::{JS_GetPropertyDescriptorById, JS_GetReservedSlot}',
'js::jsapi::{JS_HasProperty, JS_HasPropertyById, JS_IsExceptionPending}',
'js::jsapi::{JS_NewObject, JS_ObjectIsCallable, JS_SetPrototype}',
'js::jsapi::{JS_SetReservedSlot, JS_WrapValue, JSBool, JSContext}',
'js::jsapi::{JSClass, JSFreeOp, JSFunctionSpec, JSHandleObject, jsid}',
'js::jsapi::{JSNativeWrapper, JSObject, JSPropertyDescriptor, JS_ArrayIterator}',
'js::jsapi::{JSPropertyOpWrapper, JSPropertySpec, JS_PropertyStub}',
'js::jsapi::{JSStrictPropertyOpWrapper, JSString, JSTracer, JS_ConvertStub}',
'js::jsapi::{JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub}',
'js::jsval::JSVal',
'js::jsval::{ObjectValue, ObjectOrNullValue, PrivateValue}',
'js::jsval::{NullValue, UndefinedValue}',
'js::glue::{CallJitMethodOp, CallJitPropertyOp, CreateProxyHandler}',
'js::glue::{GetProxyPrivate, NewProxyObject, ProxyTraps}',
'js::glue::{RUST_FUNCTION_VALUE_TO_JITINFO}',
'js::glue::{RUST_JS_NumberValue, RUST_JSID_IS_STRING}',
'js::rust::with_compartment',
'dom::types::*',
'dom::bindings',
'dom::bindings::global::GlobalRef',
'dom::bindings::js::{JS, JSRef, Root, RootedReference, Temporary}',
'dom::bindings::js::{OptionalRootable, OptionalRootedRootable, ResultRootable}',
'dom::bindings::js::{OptionalRootedReference, OptionalOptionalRootedRootable}',
'dom::bindings::utils::{CreateDOMGlobal, CreateInterfaceObjects2}',
'dom::bindings::utils::{ConstantSpec, cx_for_dom_object}',
'dom::bindings::utils::{dom_object_slot, DOM_OBJECT_SLOT, DOMClass}',
'dom::bindings::utils::{DOMJSClass, JSCLASS_DOM_GLOBAL}',
'dom::bindings::utils::{FindEnumStringIndex, GetArrayIndexFromId}',
'dom::bindings::utils::{GetPropertyOnPrototype, GetProtoOrIfaceArray}',
'dom::bindings::utils::{HasPropertyOnPrototype, IntVal}',
'dom::bindings::utils::{jsid_to_str}',
'dom::bindings::utils::global_object_for_js_object',
'dom::bindings::utils::{Reflectable}',
'dom::bindings::utils::{squirrel_away_unique}',
'dom::bindings::utils::{ThrowingConstructor, unwrap, unwrap_jsmanaged}',
'dom::bindings::utils::VoidVal',
'dom::bindings::utils::get_dictionary_property',
'dom::bindings::utils::NativeProperties',
'dom::bindings::trace::JSTraceable',
'dom::bindings::callback::{CallbackContainer,CallbackInterface,CallbackFunction}',
'dom::bindings::callback::{CallSetup,ExceptionHandling}',
'dom::bindings::callback::{WrapCallThisObject}',
'dom::bindings::conversions::{FromJSValConvertible, ToJSValConvertible}',
'dom::bindings::conversions::IDLInterface',
'dom::bindings::conversions::{Default, Empty}',
'dom::bindings::codegen::*',
'dom::bindings::codegen::Bindings::*',
'dom::bindings::codegen::RegisterBindings',
'dom::bindings::codegen::UnionTypes::*',
'dom::bindings::error::{FailureUnknown, Fallible, Error, ErrorResult}',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::error::throw_type_error',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::{_obj_toString, defineProperty}',
'dom::bindings::proxyhandler::{FillPropertyDescriptor, GetExpandoObject}',
'dom::bindings::proxyhandler::{delete_, getPropertyDescriptor}',
'dom::bindings::str::ByteString',
'page::JSPageInfo',
'libc',
'servo_util::str::DOMString',
'std::mem',
'std::cmp',
'std::ptr',
'std::str',
'std::num',
])
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
return stripTrailingWhitespace(self.root.define())
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public",
jsObjectsArePtr=False, variadicIsSequence=False):
"""
If jsObjectsArePtr is true, typed arrays and "object" will be
passed as JSObject*.
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
self.jsObjectsArePtr = jsObjectsArePtr
self.variadicIsSequence = variadicIsSequence
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0], False),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterReturnDecl=" ",
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type, isMember):
return self.getRetvalInfo(type, isMember)[0]
def getRetvalInfo(self, type, isMember):
"""
Returns a tuple:
The first element is the type declaration for the retval
The second element is a template for actually returning a value stored in
"${declName}". This means actually returning it if
we're not outparam, else assigning to the "retval" outparam. If
isMember is true, this can be None, since in that case the caller will
never examine this value.
"""
if type.isVoid():
typeDecl, template = "", ""
elif type.isPrimitive() and type.tag() in builtinNames:
result = CGGeneric(builtinNames[type.tag()])
if type.nullable():
raise TypeError("Nullable primitives are not supported here.")
typeDecl, template = result.define(), "return Ok(${declName});"
elif type.isDOMString():
if isMember:
# No need for a third element in the isMember case
typeDecl, template = "nsString", None
# Outparam
else:
typeDecl, template = "void", "retval = ${declName};"
elif type.isByteString():
if isMember:
# No need for a third element in the isMember case
typeDecl, template = "nsCString", None
# Outparam
typeDecl, template = "void", "retval = ${declName};"
elif type.isEnum():
enumName = type.unroll().inner.identifier.name
if type.nullable():
enumName = CGTemplatedType("Nullable",
CGGeneric(enumName)).define()
typeDecl, template = enumName, "return ${declName};"
elif type.isGeckoInterface():
iface = type.unroll().inner;
nativeType = self.descriptorProvider.getDescriptor(
iface.identifier.name).nativeType
# Now trim off unnecessary namespaces
nativeType = nativeType.split("::")
if nativeType[0] == "mozilla":
nativeType.pop(0)
if nativeType[0] == "dom":
nativeType.pop(0)
result = CGWrapper(CGGeneric("::".join(nativeType)), post="*")
# Since we always force an owning type for callback return values,
# our ${declName} is an OwningNonNull or nsRefPtr. So we can just
# .forget() to get our already_AddRefed.
typeDecl, template = result.define(), "return ${declName}.forget();"
elif type.isCallback():
typeDecl, template = \
("already_AddRefed<%s>" % type.unroll().identifier.name,
"return ${declName}.forget();")
elif type.isAny():
typeDecl, template = "JSVal", "return Ok(${declName});"
elif type.isObject():
typeDecl, template = "JSObject*", "return ${declName};"
elif type.isSpiderMonkeyInterface():
if type.nullable():
returnCode = "return ${declName}.IsNull() ? nullptr : ${declName}.Value().Obj();"
else:
returnCode = "return ${declName}.Obj();"
typeDecl, template = "JSObject*", returnCode
elif type.isSequence():
# If we want to handle sequence-of-sequences return values, we're
# going to need to fix example codegen to not produce nsTArray<void>
# for the relevant argument...
assert not isMember
# Outparam.
if type.nullable():
returnCode = ("if (${declName}.IsNull()) {\n"
" retval.SetNull();\n"
"} else {\n"
" retval.SetValue().SwapElements(${declName}.Value());\n"
"}")
else:
returnCode = "retval.SwapElements(${declName});"
typeDecl, template = "void", returnCode
elif type.isDate():
result = CGGeneric("Date")
if type.nullable():
result = CGTemplatedType("Nullable", result)
typeDecl, template = result.define(), "return ${declName};"
else:
raise TypeError("Don't know how to declare return value for %s" % type)
if not 'infallible' in self.extendedAttrs:
if typeDecl:
typeDecl = "Fallible<%s>" % typeDecl
else:
typeDecl = "ErrorResult"
if not template:
template = "return Ok(());"
return typeDecl, template
def getArgs(self, returnType, argList):
args = [self.getArg(arg) for arg in argList]
# Now the outparams
if returnType.isDOMString():
args.append(Argument("nsString&", "retval"))
if returnType.isByteString():
args.append(Argument("nsCString&", "retval"))
elif returnType.isSequence():
nullable = returnType.nullable()
if nullable:
returnType = returnType.inner
# And now the actual underlying type
elementDecl = self.getReturnType(returnType.inner, True)
type = CGTemplatedType("nsTArray", CGGeneric(elementDecl))
if nullable:
type = CGTemplatedType("Nullable", type)
args.append(Argument("%s&" % type.define(), "retval"))
# The legacycaller thisval
if self.member.isMethod() and self.member.isLegacycaller():
# If it has an identifier, we can't deal with it yet
assert self.member.isIdentifierLess()
args.insert(0, Argument("JS::Value", "aThisVal"))
# And jscontext bits.
if needCx(returnType, argList, self.passJSBitsAsNeeded):
args.insert(0, Argument("JSContext*", "cx"))
# And if we're static, a global
if self.member.isStatic():
args.insert(0, Argument("const GlobalObject&", "global"))
return args
def doGetArgType(self, type, optional, isMember):
"""
The main work of getArgType. Returns a string type decl, whether this
is a const ref, as well as whether the type should be wrapped in
Nullable as needed.
isMember can be false or one of the strings "Sequence" or "Variadic"
"""
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
nullable = type.nullable()
if nullable:
type = type.inner
elementType = type.inner
argType = self.getArgType(elementType, False, "Sequence")[0]
decl = CGTemplatedType("Sequence", argType)
return decl.define(), True, True
if type.isUnion():
if type.nullable():
type = type.inner
return str(type) + "::" + str(type), False, True
if type.isGeckoInterface() and not type.isCallbackInterface():
iface = type.unroll().inner
argIsPointer = type.nullable()
forceOwningType = iface.isCallback() or isMember
if argIsPointer:
if (optional or isMember) and forceOwningType:
typeDecl = "nsRefPtr<%s>"
else:
typeDecl = "*%s"
else:
if optional or isMember:
if forceOwningType:
typeDecl = "OwningNonNull<%s>"
else:
typeDecl = "NonNull<%s>"
else:
typeDecl = "%s"
descriptor = self.descriptorProvider.getDescriptor(iface.identifier.name)
return (typeDecl % descriptor.argumentType,
False, False)
if type.isSpiderMonkeyInterface():
if self.jsObjectsArePtr:
return "JSObject*", False, False
return type.name, True, True
if type.isDOMString():
declType = "DOMString"
return declType, True, False
if type.isByteString():
declType = "nsCString"
return declType, True, False
if type.isEnum():
return type.unroll().inner.identifier.name, False, True
if type.isCallback() or type.isCallbackInterface():
forceOwningType = optional or isMember
if type.nullable():
if forceOwningType:
declType = "nsRefPtr<%s>"
else:
declType = "%s*"
else:
if forceOwningType:
declType = "OwningNonNull<%s>"
else:
declType = "%s&"
if type.isCallback():
name = type.unroll().identifier.name
else:
name = type.unroll().inner.identifier.name
return declType % name, False, False
if type.isAny():
# Don't do the rooting stuff for variadics for now
if isMember:
declType = "JS::Value"
else:
declType = "JSVal"
return declType, False, False
if type.isObject():
if isMember:
declType = "JSObject*"
else:
declType = "JS::Handle<JSObject*>"
return declType, False, False
if type.isDictionary():
typeName = CGDictionary.makeDictionaryName(type.inner)
return typeName, True, True
if type.isDate():
return "Date", False, True
assert type.isPrimitive()
return builtinNames[type.tag()], False, True
def getArgType(self, type, optional, isMember):
"""
Get the type of an argument declaration. Returns the type CGThing, and
whether this should be a const ref.
isMember can be False, "Sequence", or "Variadic"
"""
(decl, ref, handleNullable) = self.doGetArgType(type, optional,
isMember)
decl = CGGeneric(decl)
if handleNullable and type.nullable():
decl = CGTemplatedType("Nullable", decl)
ref = True
if isMember == "Variadic":
arrayType = "Sequence" if self.variadicIsSequence else "nsTArray"
decl = CGTemplatedType(arrayType, decl)
ref = True
elif optional:
# Note: All variadic args claim to be optional, but we can just use
# empty arrays to represent them not being present.
decl = CGTemplatedType("Option", decl)
ref = False
return (decl, ref)
def getArg(self, arg):
"""
Get the full argument declaration for an argument
"""
(decl, ref) = self.getArgType(arg.type,
arg.optional and not arg.defaultValue,
"Variadic" if arg.variadic else False)
if ref:
decl = CGWrapper(decl, pre="&")
return Argument(decl.define(), arg.identifier.name)
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods+getters+setters,
decorators="#[deriving(PartialEq,Clone,Encodable)]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new(aCallback)" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "*mut JSObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.GetContext()", "thisObjJS"] + argnames
argnamesWithoutThis = ["s.GetContext()", "ptr::mut_null()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
args[0] = Argument('&' + args[0].argType, args[0].name, args[0].default)
method.args[2] = args[0]
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&JSRef<T>", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = ("let s = CallSetup::new(self, aExceptionHandling);\n"
"if s.GetContext().is_null() {\n"
" return Err(FailureUnknown);\n"
"}\n")
bodyWithThis = string.Template(
setupCall+
"let thisObjJS = WrapCallThisObject(s.GetContext(), thisObj);\n"
"if thisObjJS.is_null() {\n"
" return Err(FailureUnknown);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs" : ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"return ${methodName}(${callArgs});").substitute({
"callArgs" : ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name+'_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: Reflectable"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name+'__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr):
return "Get" + MakeNativeName(attr.identifier.name)
def callbackSetterName(attr):
return "Set" + MakeNativeName(attr.identifier.name)
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""impl CallbackContainer for ${type} {
fn new(callback: *mut JSObject) -> ${type} {
${type}::new(callback)
}
fn callback(&self) -> *mut JSObject {
self.parent.callback()
}
}
impl ToJSValConvertible for ${type} {
fn to_jsval(&self, cx: *mut JSContext) -> JSVal {
self.callback().to_jsval(cx)
}
}
""").substitute({"type": callback.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling, rethrowContentException=False):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
assert not rethrowContentException or not needThisHandling
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount-1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.Length()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
self.rethrowContentException = rethrowContentException
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility,
jsObjectsArePtr=True)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode= "return Err(FailureUnknown);\n"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"let mut argv = Vec::from_elem(${argCount}, UndefinedValue());\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGList([
CGGeneric(pre),
CGWrapper(CGIndenter(CGGeneric(body)),
pre="with_compartment(cx, self.parent.callback(), || {\n",
post="})")
], "\n").define()
def getResultConversion(self):
replacements = {
"val": "rval",
"declName": "rvalDecl",
}
template, _, declType, needsRooting = getJSToNativeConversionTemplate(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl", needsRooting)
assignRetval = string.Template(
self.getRetvalInfo(self.retvalType,
False)[1]).substitute(replacements)
return convertType.define() + "\n" + assignRetval + "\n"
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse();
# Wrap each one in a scope so that any locals it has don't leak out, and
# also so that we can just "break;" for our successCode.
argConversions = [CGWrapper(CGIndenter(CGGeneric(c)),
pre="loop {\n",
post="\nbreak;}\n")
for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx]"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType("*argv.get_mut(%s)" % jsvalIndex,
result=argval,
successCode="continue;" if arg.variadic else "break;")
if arg.variadic:
conversion = string.Template(
"for (uint32_t idx = 0; idx < ${arg}.Length(); ++idx) {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}\n"
"break;").substitute({ "arg": arg.identifier.name })
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper(CGGeneric(conversion),
"%s.is_some()" % arg.identifier.name).define() +
" else if (argc == %d) {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" *argv.get_mut(%d) = UndefinedValue();\n"
"}" % (i+1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
if self.rethrowContentException:
args.append(Argument("JSCompartment*", "aCompartment", "nullptr"))
else:
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("*mut JSObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
callSetup = "CallSetup s(CallbackPreserveColor(), aRv"
if self.rethrowContentException:
# getArgs doesn't add the aExceptionHandling argument but does add
# aCompartment for us.
callSetup += ", RethrowContentExceptions, aCompartment"
else:
callSetup += ", aExceptionHandling"
callSetup += ");"
return string.Template(
"${callSetup}\n"
"JSContext* cx = s.GetContext();\n"
"if (!cx) {\n"
" return Err(FailureUnknown);\n"
"}\n").substitute({
"callSetup": callSetup,
})
def getArgcDecl(self):
return CGGeneric("let mut argc = %su32;" % self.argCountStr);
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling, rethrowContentException=False):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling, rethrowContentException)
def getRvalDecl(self):
return "let mut rval = UndefinedValue();\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl()
}
if self.argCount > 0:
replacements["argv"] = "argv.as_mut_ptr()"
replacements["argc"] = "argc"
else:
replacements["argv"] = "nullptr"
replacements["argc"] = "0"
return string.Template("${getCallable}"
"let ok = unsafe {\n"
" JS_CallFunctionValue(cx, ${thisObj}, callable,\n"
" ${argc}, ${argv}, &mut rval)\n"
"};\n"
"if ok == 0 {\n"
" return Err(FailureUnknown);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj"
def getCallableDecl(self):
return "let callable = ObjectValue(unsafe {&*self.parent.callback()});\n";
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation, rethrowContentException=False):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation, rethrowContentException)
def getThisObj(self):
if not self.singleOperation:
return "self.parent.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj } else { self.parent.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'match self.parent.GetCallableProperty(cx, "${methodName}") {\n'
' Err(_) => return Err(FailureUnknown),\n'
' Ok(callable) => callable,\n'
'}').substitute(replacements)
if not self.singleOperation:
return 'JS::Rooted<JS::Value> callable(cx);\n' + getCallableFromProp
return (
'let isCallable = unsafe { JS_ObjectIsCallable(cx, self.parent.callback()) != 0 };\n'
'let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('unsafe { ObjectValue(&*self.parent.callback()) }'),
CGGeneric(getCallableFromProp))).define() + ';\n')
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName, MakeNativeName(jsName),
descriptor, descriptor.interface.isSingleOperationInterface(),
rethrowContentException=descriptor.interface.isJSImplemented())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False,
rethrowContentException=descriptor.interface.isJSImplemented())
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(FailureUnknown);\n'
'}\n').substitute(replacements);
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False,
rethrowContentException=descriptor.interface.isJSImplemented())
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(FailureUnknown);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
protos = [d.name for d in config.getDescriptors(isCallback=False)]
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub static MAX_PROTO_CHAIN_LENGTH: uint = %d;\n\n" % config.maxProtoChainLength),
CGNamespacedEnum('id', 'ID', protos, [0], deriving="PartialEq"),
CGNamespacedEnum('proxies', 'Proxy', proxies, [0], deriving="PartialEq"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProtos(config),
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, [], [
'dom::bindings::codegen',
'dom::bindings::codegen::PrototypeList::proxies',
'js::jsapi::JSContext',
'js::jsapi::JSObject',
'libc',
])
@staticmethod
def InterfaceTypes(config):
descriptors = [d.name for d in config.getDescriptors(register=True, isCallback=False)]
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(), name)) for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
descriptors = (set(d.name + "Binding" for d in config.getDescriptors(register=True)) |
set(d.unroll().module() for d in config.callbacks))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
allprotos = [CGGeneric("#![allow(unused_imports)]\n"),
CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::js::{JS, JSRef, Temporary};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::utils::Reflectable;\n"),
CGGeneric("use serialize::{Encodable, Encoder};\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n")]
for descriptor in descriptors:
name = descriptor.name
protos = [CGGeneric('pub trait %s {}\n' % (name + 'Base'))]
for proto in descriptor.prototypeChain:
protos += [CGGeneric('impl %s for %s {}\n' % (proto + 'Base',
descriptor.concreteType))]
derived = [CGGeneric('pub trait %s { fn %s(&self) -> bool; }\n' %
(name + 'Derived', 'is_' + name.lower()))]
for protoName in descriptor.prototypeChain[1:-1]:
protoDescriptor = config.getDescriptor(protoName)
delegate = string.Template('''impl ${selfName} for ${baseName} {
fn ${fname}(&self) -> bool {
self.${parentName}.${fname}()
}
}
''').substitute({'fname': 'is_' + name.lower(),
'selfName': name + 'Derived',
'baseName': protoDescriptor.concreteType,
'parentName': protoDescriptor.prototypeChain[-2].lower()})
derived += [CGGeneric(delegate)]
derived += [CGGeneric('\n')]
cast = [CGGeneric(string.Template('''pub trait ${castTraitName} {
#[inline(always)]
fn to_ref<'a, 'b, T: ${toBound}+Reflectable>(base: &'a JSRef<'b, T>) -> Option<&'a JSRef<'b, Self>> {
match base.deref().${checkFn}() {
true => unsafe { Some(base.transmute()) },
false => None
}
}
#[inline(always)]
fn to_mut_ref<'a, 'b, T: ${toBound}+Reflectable>(base: &'a mut JSRef<'b, T>) -> Option<&'a mut JSRef<'b, Self>> {
match base.deref().${checkFn}() {
true => unsafe { Some(base.transmute_mut()) },
false => None
}
}
#[inline(always)]
fn from_ref<'a, 'b, T: ${fromBound}>(derived: &'a JSRef<'b, T>) -> &'a JSRef<'b, Self> {
unsafe { derived.transmute() }
}
#[inline(always)]
fn from_mut_ref<'a, 'b, T: ${fromBound}>(derived: &'a mut JSRef<'b, T>) -> &'a mut JSRef<'b, Self> {
unsafe { derived.transmute_mut() }
}
#[inline(always)]
fn from_temporary<T: ${fromBound}+Reflectable>(derived: Temporary<T>) -> Temporary<Self> {
unsafe { derived.transmute() }
}
}
''').substitute({'checkFn': 'is_' + name.lower(),
'castTraitName': name + 'Cast',
'fromBound': name + 'Base',
'toBound': name + 'Derived'})),
CGGeneric("impl %s for %s {}\n\n" % (name + 'Cast', name))]
trace = [CGGeneric(string.Template('''impl JSTraceable for ${name} {
fn trace(&self, tracer: *mut JSTracer) {
unsafe {
self.encode(&mut *tracer).ok().expect("failed to encode");
}
}
}
''').substitute({'name': name}))]
allprotos += protos + derived + cast + trace
curr = CGList(allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
|
import logging
from functools import reduce
import numpy as np
from ..__about__ import __version__
from .._exceptions import ReadError, WriteError
from .._files import open_file
from .._mesh import Mesh
from .._vtk_common import (
meshio_to_vtk_order,
meshio_to_vtk_type,
vtk_cells_from_data,
)
# VTK 5.1 data types
vtk_to_numpy_dtype_name = {
"float": "float32",
"double": "float64",
"vtktypeint8": "int8",
"vtktypeint16": "int16",
"vtktypeint32": "int32",
"vtktypeint64": "int64",
"vtktypeuint8": "uint8",
"vtktypeuint16": "uint16",
"vtktypeuint32": "uint32",
"vtktypeuint64": "uint64",
}
numpy_to_vtk_dtype = {v: k for k, v in vtk_to_numpy_dtype_name.items()}
# supported vtk dataset types
vtk_dataset_types = [
"UNSTRUCTURED_GRID",
"STRUCTURED_POINTS",
"STRUCTURED_GRID",
"RECTILINEAR_GRID",
]
# additional infos per dataset type
vtk_dataset_infos = {
"UNSTRUCTURED_GRID": [],
"STRUCTURED_POINTS": [
"DIMENSIONS",
"ORIGIN",
"SPACING",
"ASPECT_RATIO", # alternative for SPACING in version 1.0 and 2.0
],
"STRUCTURED_GRID": ["DIMENSIONS"],
"RECTILINEAR_GRID": [
"DIMENSIONS",
"X_COORDINATES",
"Y_COORDINATES",
"Z_COORDINATES",
],
}
# all main sections in vtk
vtk_sections = [
"METADATA",
"DATASET",
"POINTS",
"CELLS",
"CELL_TYPES",
"POINT_DATA",
"CELL_DATA",
"LOOKUP_TABLE",
"COLOR_SCALARS",
]
class Info:
"""Info Container for the VTK reader."""
def __init__(self):
self.points = None
self.field_data = {}
self.cell_data_raw = {}
self.point_data = {}
self.dataset = {}
self.connectivity = None
self.offsets = None
self.types = None
self.active = None
self.is_ascii = False
self.split = []
self.num_items = 0
# One of the problem in reading VTK files are POINT_DATA and CELL_DATA fields.
# They can contain a number of SCALARS+LOOKUP_TABLE tables, without giving and
# indication of how many there are. Hence, SCALARS must be treated like a
# first-class section. To associate it with POINT/CELL_DATA, we store the
# `active` section in this variable.
self.section = None
def read(filename):
with open_file(filename, "rb") as f:
out = read_buffer(f)
return out
def read_buffer(f):
# initialize output data
info = Info()
# skip title comment
f.readline()
data_type = f.readline().decode().strip().upper()
if data_type not in ["ASCII", "BINARY"]:
raise ReadError(f"Unknown VTK data type '{data_type}'.")
info.is_ascii = data_type == "ASCII"
while True:
line = f.readline().decode()
if not line:
# EOF
break
line = line.strip()
if len(line) == 0:
continue
info.split = line.split()
info.section = info.split[0].upper()
if info.section in vtk_sections:
_read_section(f, info)
else:
_read_subsection(f, info)
_check_mesh(info)
cells, cell_data = vtk_cells_from_data(
info.connectivity, info.offsets, info.types, info.cell_data_raw
)
return Mesh(
info.points,
cells,
point_data=info.point_data,
cell_data=cell_data,
field_data=info.field_data,
)
def _read_section(f, info):
if info.section == "METADATA":
_skip_meta(f)
elif info.section == "DATASET":
info.active = "DATASET"
info.dataset["type"] = info.split[1].upper()
if info.dataset["type"] not in vtk_dataset_types:
raise ReadError(
"Only VTK '{}' supported (not {}).".format(
"', '".join(vtk_dataset_types), info.dataset["type"]
)
)
elif info.section == "POINTS":
info.active = "POINTS"
info.num_points = int(info.split[1])
data_type = info.split[2].lower()
info.points = _read_points(f, data_type, info.is_ascii, info.num_points)
elif info.section == "CELLS":
info.active = "CELLS"
try:
line = f.readline().decode()
except UnicodeDecodeError:
line = ""
assert line.startswith("OFFSETS")
# vtk DataFile Version 5.1 - appearing in Paraview 5.8.1 outputs
# No specification found for this file format.
# See the question on ParaView Discourse Forum:
# https://discourse.paraview.org/t/specification-of-vtk-datafile-version-5-1/5127
info.num_offsets = int(info.split[1])
info.num_items = int(info.split[2])
dtype = np.dtype(vtk_to_numpy_dtype_name[line.split()[1]])
print("offi")
offsets = _read_int_data(f, info.is_ascii, info.num_offsets, dtype)
line = f.readline().decode()
assert line.startswith("CONNECTIVITY")
dtype = np.dtype(vtk_to_numpy_dtype_name[line.split()[1]])
print("conni")
connectivity = _read_int_data(f, info.is_ascii, info.num_items, dtype)
info.connectivity = connectivity
assert offsets[0] == 0
assert offsets[-1] == len(connectivity)
info.offsets = offsets[1:]
elif info.section == "CELL_TYPES":
info.active = "CELL_TYPES"
info.num_items = int(info.split[1])
info.types = _read_cell_types(f, info.is_ascii, info.num_items)
elif info.section == "POINT_DATA":
info.active = "POINT_DATA"
info.num_items = int(info.split[1])
elif info.section == "CELL_DATA":
info.active = "CELL_DATA"
info.num_items = int(info.split[1])
elif info.section == "LOOKUP_TABLE":
info.num_items = int(info.split[2])
np.fromfile(f, count=info.num_items * 4, sep=" ", dtype=float)
# rgba = data.reshape((info.num_items, 4))
elif info.section == "COLOR_SCALARS":
nValues = int(info.split[2])
# re-use num_items from active POINT/CELL_DATA
num_items = info.num_items
dtype = np.ubyte
if info.is_ascii:
dtype = float
np.fromfile(f, count=num_items * nValues, dtype=dtype)
def _read_subsection(f, info):
if info.active == "POINT_DATA":
d = info.point_data
elif info.active == "CELL_DATA":
d = info.cell_data_raw
elif info.active == "DATASET":
d = info.dataset
else:
d = info.field_data
if info.section in vtk_dataset_infos[info.dataset["type"]]:
if info.section[1:] == "_COORDINATES":
info.num_points = int(info.split[1])
data_type = info.split[2].lower()
d[info.section] = _read_coords(f, data_type, info.is_ascii, info.num_points)
else:
if info.section == "DIMENSIONS":
d[info.section] = list(map(int, info.split[1:]))
else:
d[info.section] = list(map(float, info.split[1:]))
if len(d[info.section]) != 3:
raise ReadError(
"Wrong number of info in section '{}'. Need 3, got {}.".format(
info.section, len(d[info.section])
)
)
elif info.section == "SCALARS":
d.update(_read_scalar_field(f, info.num_items, info.split, info.is_ascii))
elif info.section == "VECTORS":
d.update(_read_field(f, info.num_items, info.split, [3], info.is_ascii))
elif info.section == "TENSORS":
d.update(_read_field(f, info.num_items, info.split, [3, 3], info.is_ascii))
elif info.section == "FIELD":
d.update(_read_fields(f, int(info.split[2]), info.is_ascii))
else:
raise ReadError(f"Unknown section '{info.section}'.")
def _check_mesh(info):
if info.dataset["type"] == "UNSTRUCTURED_GRID":
if info.connectivity is None:
raise ReadError("Required section CELLS not found.")
if info.types is None:
raise ReadError("Required section CELL_TYPES not found.")
elif info.dataset["type"] == "STRUCTURED_POINTS":
dim = info.dataset["DIMENSIONS"]
ori = info.dataset["ORIGIN"]
spa = (
info.dataset["SPACING"]
if "SPACING" in info.dataset
else info.dataset["ASPECT_RATIO"]
)
axis = [
np.linspace(ori[i], ori[i] + (dim[i] - 1.0) * spa[i], dim[i])
for i in range(3)
]
info.points = _generate_points(axis)
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
elif info.dataset["type"] == "RECTILINEAR_GRID":
axis = [
info.dataset["X_COORDINATES"],
info.dataset["Y_COORDINATES"],
info.dataset["Z_COORDINATES"],
]
info.points = _generate_points(axis)
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
elif info.dataset["type"] == "STRUCTURED_GRID":
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
def _generate_cells(dim):
ele_dim = [d - 1 for d in dim if d > 1]
# TODO use math.prod when requiring Python 3.8+? this would save the int conversion
# <https://github.com/microsoft/pyright/issues/1226>
ele_no = int(np.prod(ele_dim))
spatial_dim = len(ele_dim)
if spatial_dim == 1:
# cells are lines in 1D
cells = np.empty((ele_no, 3), dtype=int)
cells[:, 0] = 2
cells[:, 1] = np.arange(ele_no, dtype=int)
cells[:, 2] = cells[:, 1] + 1
cell_types = np.full(ele_no, 3, dtype=int)
elif spatial_dim == 2:
# cells are quad in 2D
cells = np.empty((ele_no, 5), dtype=int)
cells[:, 0] = 4
cells[:, 1] = np.arange(0, ele_no, dtype=int)
cells[:, 1] += np.arange(0, ele_no, dtype=int) // ele_dim[0]
cells[:, 2] = cells[:, 1] + 1
cells[:, 3] = cells[:, 1] + 2 + ele_dim[0]
cells[:, 4] = cells[:, 3] - 1
cell_types = np.full(ele_no, 9, dtype=int)
else:
# cells are hex in 3D
cells = np.empty((ele_no, 9), dtype=int)
cells[:, 0] = 8
cells[:, 1] = np.arange(ele_no)
cells[:, 1] += (ele_dim[0] + ele_dim[1] + 1) * (
np.arange(ele_no) // (ele_dim[0] * ele_dim[1])
)
cells[:, 1] += (np.arange(ele_no) % (ele_dim[0] * ele_dim[1])) // ele_dim[0]
cells[:, 2] = cells[:, 1] + 1
cells[:, 3] = cells[:, 1] + 2 + ele_dim[0]
cells[:, 4] = cells[:, 3] - 1
cells[:, 5] = cells[:, 1] + (1 + ele_dim[0]) * (1 + ele_dim[1])
cells[:, 6] = cells[:, 5] + 1
cells[:, 7] = cells[:, 5] + 2 + ele_dim[0]
cells[:, 8] = cells[:, 7] - 1
cell_types = np.full(ele_no, 12, dtype=int)
return cells.reshape(-1), cell_types
def _generate_points(axis):
x_dim = len(axis[0])
y_dim = len(axis[1])
z_dim = len(axis[2])
pnt_no = x_dim * y_dim * z_dim
x_id, y_id, z_id = np.mgrid[0:x_dim, 0:y_dim, 0:z_dim]
points = np.empty((pnt_no, 3), dtype=axis[0].dtype)
# VTK sorts points and cells in Fortran order
points[:, 0] = axis[0][x_id.reshape(-1, order="F")]
points[:, 1] = axis[1][y_id.reshape(-1, order="F")]
points[:, 2] = axis[2][z_id.reshape(-1, order="F")]
return points
def _read_coords(f, data_type, is_ascii, num_points):
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
if is_ascii:
coords = np.fromfile(f, count=num_points, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
coords = np.fromfile(f, count=num_points, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
return coords
def _read_points(f, data_type, is_ascii, num_points):
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
if is_ascii:
points = np.fromfile(f, count=num_points * 3, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
points = np.fromfile(f, count=num_points * 3, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
return points.reshape((num_points, 3))
def _read_int_data(f, is_ascii, num_items, dtype):
if is_ascii:
c = np.fromfile(f, count=num_items, sep=" ", dtype=dtype)
else:
dtype = dtype.newbyteorder(">")
c = np.fromfile(f, count=num_items, dtype=dtype)
print("c", c)
line = f.readline().decode()
if line != "\n":
raise ReadError("Expected newline")
return c
def _read_cell_types(f, is_ascii, num_items):
if is_ascii:
ct = np.fromfile(f, count=int(num_items), sep=" ", dtype=int)
else:
# binary
ct = np.fromfile(f, count=int(num_items), dtype=">i4")
line = f.readline().decode()
# Sometimes, there's no newline at the end
if line.strip() != "":
raise ReadError()
return ct
def _read_scalar_field(f, num_data, split, is_ascii):
data_name = split[1]
data_type = split[2].lower()
try:
num_comp = int(split[3])
except IndexError:
num_comp = 1
# The standard says:
# > The parameter numComp must range between (1,4) inclusive; [...]
if not (0 < num_comp < 5):
raise ReadError("The parameter numComp must range between (1,4) inclusive")
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
lt, _ = f.readline().decode().split()
if lt.upper() != "LOOKUP_TABLE":
raise ReadError()
if is_ascii:
data = np.fromfile(f, count=num_data * num_comp, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
data = np.fromfile(f, count=num_data * num_comp, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
data = data.reshape(-1, num_comp)
return {data_name: data}
def _read_field(f, num_data, split, shape, is_ascii):
data_name = split[1]
data_type = split[2].lower()
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
# prod()
# <https://stackoverflow.com/q/2104782/353337>
k = reduce((lambda x, y: x * y), shape)
if is_ascii:
data = np.fromfile(f, count=k * num_data, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
data = np.fromfile(f, count=k * num_data, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
data = data.reshape(-1, *shape)
return {data_name: data}
def _read_fields(f, num_fields, is_ascii):
data = {}
for _ in range(num_fields):
line = f.readline().decode().split()
if line[0] == "METADATA":
_skip_meta(f)
name, shape0, shape1, data_type = f.readline().decode().split()
else:
name, shape0, shape1, data_type = line
shape0 = int(shape0)
shape1 = int(shape1)
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type.lower()])
if is_ascii:
dat = np.fromfile(f, count=shape0 * shape1, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
dat = np.fromfile(f, count=shape0 * shape1, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
if shape0 != 1:
dat = dat.reshape((shape1, shape0))
data[name] = dat
return data
def _skip_meta(f):
# skip possible metadata
# https://vtk.org/doc/nightly/html/IOLegacyInformationFormat.html
while True:
line = f.readline().decode().strip()
if not line:
# end of metadata is a blank line
break
def _pad(array):
return np.pad(array, ((0, 0), (0, 1)), "constant")
def write(filename, mesh, binary=True):
if mesh.points.shape[1] == 2:
logging.warning(
"VTK requires 3D points, but 2D points given. "
"Appending 0 third component."
)
points = _pad(mesh.points)
else:
points = mesh.points
if mesh.point_data:
for name, values in mesh.point_data.items():
if len(values.shape) == 2 and values.shape[1] == 2:
logging.warning(
"VTK requires 3D vectors, but 2D vectors given. "
f"Appending 0 third component to {name}."
)
mesh.point_data[name] = _pad(values)
for name, data in mesh.cell_data.items():
for k, values in enumerate(data):
if len(values.shape) == 2 and values.shape[1] == 2:
logging.warning(
"VTK requires 3D vectors, but 2D vectors given. "
f"Appending 0 third component to {name}."
)
data[k] = _pad(data[k])
if not binary:
logging.warning("VTK ASCII files are only meant for debugging.")
with open_file(filename, "wb") as f:
f.write(b"# vtk DataFile Version 5.1\n")
f.write(f"written by meshio v{__version__}\n".encode())
f.write(("BINARY\n" if binary else "ASCII\n").encode())
f.write(b"DATASET UNSTRUCTURED_GRID\n")
# write points and cells
_write_points(f, points, binary)
_write_cells(f, mesh.cells, binary)
# write point data
if mesh.point_data:
num_points = mesh.points.shape[0]
f.write(f"POINT_DATA {num_points}\n".encode())
_write_field_data(f, mesh.point_data, binary)
# write cell data
if mesh.cell_data:
total_num_cells = sum(len(c.data) for c in mesh.cells)
f.write(f"CELL_DATA {total_num_cells}\n".encode())
_write_field_data(f, mesh.cell_data, binary)
def _write_points(f, points, binary):
dtype = numpy_to_vtk_dtype[points.dtype.name]
f.write(f"POINTS {len(points)} {dtype}\n".encode())
if binary:
# Binary data must be big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
# if points.dtype.byteorder == "<" or (
# points.dtype.byteorder == "=" and sys.byteorder == "little"
# ):
# logging.warn("Converting to new byte order")
points.astype(points.dtype.newbyteorder(">")).tofile(f, sep="")
else:
# ascii
points.tofile(f, sep=" ")
f.write(b"\n")
def _write_cells(f, cells, binary):
total_num_cells = sum([len(c.data) for c in cells])
total_num_idx = sum([c.data.size for c in cells])
f.write(f"CELLS {total_num_cells + 1} {total_num_idx}\n".encode())
# offsets
offsets = [[0]]
k = 0
for cell_block in cells:
m, n = cell_block.data.shape
offsets.append(np.arange(k + n, k + (m + 1) * n, n))
k = offsets[-1][-1]
offsets = np.concatenate(offsets)
if binary:
vtk_dtype = numpy_to_vtk_dtype[offsets.dtype.name]
f.write(f"OFFSETS {vtk_dtype}\n".encode())
# force big-endian and int64
offsets.astype(">i8").tofile(f, sep="")
f.write(b"\n")
f.write(f"CONNECTIVITY {vtk_dtype}\n".encode())
for cell_block in cells:
d = cell_block.data
cell_idx = meshio_to_vtk_order(cell_block.type)
if cell_idx is not None:
d = d[:, cell_idx]
# force big-endian and int64
d.astype(">i8").tofile(f, sep="")
f.write(b"\n")
else:
# ascii
for c in cells:
n = c.data.shape[1]
cell_idx = meshio_to_vtk_order(c.type, n)
# prepend a column with the value n
np.column_stack(
[
np.full(c.data.shape[0], n, dtype=c.data.dtype),
c.data[:, cell_idx],
]
).tofile(f, sep="\n")
f.write(b"\n")
# write cell types
f.write(f"CELL_TYPES {total_num_cells}\n".encode())
if binary:
for c in cells:
vtk_type = meshio_to_vtk_type[c.type]
np.full(len(c.data), vtk_type, dtype=np.dtype(">i4")).tofile(f, sep="")
f.write(b"\n")
else:
# ascii
for c in cells:
vtk_type = meshio_to_vtk_type[c.type]
np.full(len(c.data), vtk_type).tofile(f, sep="\n")
f.write(b"\n")
def _write_field_data(f, data, binary):
f.write((f"FIELD FieldData {len(data)}\n").encode())
for name, values in data.items():
if isinstance(values, list):
values = np.concatenate(values)
if len(values.shape) == 1:
num_tuples = values.shape[0]
num_components = 1
else:
num_tuples = values.shape[0]
num_components = values.shape[1]
if " " in name:
raise WriteError(f"VTK doesn't support spaces in field names ('{name}').")
f.write(
(
"{} {} {} {}\n".format(
name,
num_components,
num_tuples,
numpy_to_vtk_dtype[values.dtype.name],
)
).encode()
)
if binary:
values.astype(values.dtype.newbyteorder(">")).tofile(f, sep="")
else:
# ascii
values.tofile(f, sep=" ")
# np.savetxt(f, points)
f.write(b"\n")
fixes for ascii vtk 5.1
import logging
from functools import reduce
import numpy as np
from ..__about__ import __version__
from .._exceptions import ReadError, WriteError
from .._files import open_file
from .._mesh import Mesh
from .._vtk_common import (
meshio_to_vtk_order,
meshio_to_vtk_type,
vtk_cells_from_data,
)
# VTK 5.1 data types
vtk_to_numpy_dtype_name = {
"float": "float32",
"double": "float64",
"vtktypeint8": "int8",
"vtktypeint16": "int16",
"vtktypeint32": "int32",
"vtktypeint64": "int64",
"vtktypeuint8": "uint8",
"vtktypeuint16": "uint16",
"vtktypeuint32": "uint32",
"vtktypeuint64": "uint64",
}
numpy_to_vtk_dtype = {v: k for k, v in vtk_to_numpy_dtype_name.items()}
# supported vtk dataset types
vtk_dataset_types = [
"UNSTRUCTURED_GRID",
"STRUCTURED_POINTS",
"STRUCTURED_GRID",
"RECTILINEAR_GRID",
]
# additional infos per dataset type
vtk_dataset_infos = {
"UNSTRUCTURED_GRID": [],
"STRUCTURED_POINTS": [
"DIMENSIONS",
"ORIGIN",
"SPACING",
"ASPECT_RATIO", # alternative for SPACING in version 1.0 and 2.0
],
"STRUCTURED_GRID": ["DIMENSIONS"],
"RECTILINEAR_GRID": [
"DIMENSIONS",
"X_COORDINATES",
"Y_COORDINATES",
"Z_COORDINATES",
],
}
# all main sections in vtk
vtk_sections = [
"METADATA",
"DATASET",
"POINTS",
"CELLS",
"CELL_TYPES",
"POINT_DATA",
"CELL_DATA",
"LOOKUP_TABLE",
"COLOR_SCALARS",
]
class Info:
"""Info Container for the VTK reader."""
def __init__(self):
self.points = None
self.field_data = {}
self.cell_data_raw = {}
self.point_data = {}
self.dataset = {}
self.connectivity = None
self.offsets = None
self.types = None
self.active = None
self.is_ascii = False
self.split = []
self.num_items = 0
# One of the problem in reading VTK files are POINT_DATA and CELL_DATA fields.
# They can contain a number of SCALARS+LOOKUP_TABLE tables, without giving and
# indication of how many there are. Hence, SCALARS must be treated like a
# first-class section. To associate it with POINT/CELL_DATA, we store the
# `active` section in this variable.
self.section = None
def read(filename):
with open_file(filename, "rb") as f:
out = read_buffer(f)
return out
def read_buffer(f):
# initialize output data
info = Info()
# skip title comment
f.readline()
data_type = f.readline().decode().strip().upper()
if data_type not in ["ASCII", "BINARY"]:
raise ReadError(f"Unknown VTK data type '{data_type}'.")
info.is_ascii = data_type == "ASCII"
while True:
line = f.readline().decode()
if not line:
# EOF
break
line = line.strip()
if len(line) == 0:
continue
info.split = line.split()
info.section = info.split[0].upper()
if info.section in vtk_sections:
_read_section(f, info)
else:
_read_subsection(f, info)
_check_mesh(info)
cells, cell_data = vtk_cells_from_data(
info.connectivity, info.offsets, info.types, info.cell_data_raw
)
return Mesh(
info.points,
cells,
point_data=info.point_data,
cell_data=cell_data,
field_data=info.field_data,
)
def _read_section(f, info):
if info.section == "METADATA":
_skip_meta(f)
elif info.section == "DATASET":
info.active = "DATASET"
info.dataset["type"] = info.split[1].upper()
if info.dataset["type"] not in vtk_dataset_types:
raise ReadError(
"Only VTK '{}' supported (not {}).".format(
"', '".join(vtk_dataset_types), info.dataset["type"]
)
)
elif info.section == "POINTS":
info.active = "POINTS"
info.num_points = int(info.split[1])
data_type = info.split[2].lower()
info.points = _read_points(f, data_type, info.is_ascii, info.num_points)
elif info.section == "CELLS":
info.active = "CELLS"
try:
line = f.readline().decode()
except UnicodeDecodeError:
line = ""
assert line.startswith("OFFSETS")
# vtk DataFile Version 5.1 - appearing in Paraview 5.8.1 outputs
# No specification found for this file format.
# See the question on ParaView Discourse Forum:
# https://discourse.paraview.org/t/specification-of-vtk-datafile-version-5-1/5127
info.num_offsets = int(info.split[1])
info.num_items = int(info.split[2])
dtype = np.dtype(vtk_to_numpy_dtype_name[line.split()[1]])
print("offi")
offsets = _read_int_data(f, info.is_ascii, info.num_offsets, dtype)
line = f.readline().decode()
assert line.startswith("CONNECTIVITY")
dtype = np.dtype(vtk_to_numpy_dtype_name[line.split()[1]])
print("conni")
connectivity = _read_int_data(f, info.is_ascii, info.num_items, dtype)
info.connectivity = connectivity
assert offsets[0] == 0
assert offsets[-1] == len(connectivity)
info.offsets = offsets[1:]
elif info.section == "CELL_TYPES":
info.active = "CELL_TYPES"
info.num_items = int(info.split[1])
info.types = _read_cell_types(f, info.is_ascii, info.num_items)
elif info.section == "POINT_DATA":
info.active = "POINT_DATA"
info.num_items = int(info.split[1])
elif info.section == "CELL_DATA":
info.active = "CELL_DATA"
info.num_items = int(info.split[1])
elif info.section == "LOOKUP_TABLE":
info.num_items = int(info.split[2])
np.fromfile(f, count=info.num_items * 4, sep=" ", dtype=float)
# rgba = data.reshape((info.num_items, 4))
elif info.section == "COLOR_SCALARS":
nValues = int(info.split[2])
# re-use num_items from active POINT/CELL_DATA
num_items = info.num_items
dtype = np.ubyte
if info.is_ascii:
dtype = float
np.fromfile(f, count=num_items * nValues, dtype=dtype)
def _read_subsection(f, info):
if info.active == "POINT_DATA":
d = info.point_data
elif info.active == "CELL_DATA":
d = info.cell_data_raw
elif info.active == "DATASET":
d = info.dataset
else:
d = info.field_data
if info.section in vtk_dataset_infos[info.dataset["type"]]:
if info.section[1:] == "_COORDINATES":
info.num_points = int(info.split[1])
data_type = info.split[2].lower()
d[info.section] = _read_coords(f, data_type, info.is_ascii, info.num_points)
else:
if info.section == "DIMENSIONS":
d[info.section] = list(map(int, info.split[1:]))
else:
d[info.section] = list(map(float, info.split[1:]))
if len(d[info.section]) != 3:
raise ReadError(
"Wrong number of info in section '{}'. Need 3, got {}.".format(
info.section, len(d[info.section])
)
)
elif info.section == "SCALARS":
d.update(_read_scalar_field(f, info.num_items, info.split, info.is_ascii))
elif info.section == "VECTORS":
d.update(_read_field(f, info.num_items, info.split, [3], info.is_ascii))
elif info.section == "TENSORS":
d.update(_read_field(f, info.num_items, info.split, [3, 3], info.is_ascii))
elif info.section == "FIELD":
d.update(_read_fields(f, int(info.split[2]), info.is_ascii))
else:
raise ReadError(f"Unknown section '{info.section}'.")
def _check_mesh(info):
if info.dataset["type"] == "UNSTRUCTURED_GRID":
if info.connectivity is None:
raise ReadError("Required section CELLS not found.")
if info.types is None:
raise ReadError("Required section CELL_TYPES not found.")
elif info.dataset["type"] == "STRUCTURED_POINTS":
dim = info.dataset["DIMENSIONS"]
ori = info.dataset["ORIGIN"]
spa = (
info.dataset["SPACING"]
if "SPACING" in info.dataset
else info.dataset["ASPECT_RATIO"]
)
axis = [
np.linspace(ori[i], ori[i] + (dim[i] - 1.0) * spa[i], dim[i])
for i in range(3)
]
info.points = _generate_points(axis)
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
elif info.dataset["type"] == "RECTILINEAR_GRID":
axis = [
info.dataset["X_COORDINATES"],
info.dataset["Y_COORDINATES"],
info.dataset["Z_COORDINATES"],
]
info.points = _generate_points(axis)
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
elif info.dataset["type"] == "STRUCTURED_GRID":
info.connectivity, info.types = _generate_cells(dim=info.dataset["DIMENSIONS"])
def _generate_cells(dim):
ele_dim = [d - 1 for d in dim if d > 1]
# TODO use math.prod when requiring Python 3.8+? this would save the int conversion
# <https://github.com/microsoft/pyright/issues/1226>
ele_no = int(np.prod(ele_dim))
spatial_dim = len(ele_dim)
if spatial_dim == 1:
# cells are lines in 1D
cells = np.empty((ele_no, 3), dtype=int)
cells[:, 0] = 2
cells[:, 1] = np.arange(ele_no, dtype=int)
cells[:, 2] = cells[:, 1] + 1
cell_types = np.full(ele_no, 3, dtype=int)
elif spatial_dim == 2:
# cells are quad in 2D
cells = np.empty((ele_no, 5), dtype=int)
cells[:, 0] = 4
cells[:, 1] = np.arange(0, ele_no, dtype=int)
cells[:, 1] += np.arange(0, ele_no, dtype=int) // ele_dim[0]
cells[:, 2] = cells[:, 1] + 1
cells[:, 3] = cells[:, 1] + 2 + ele_dim[0]
cells[:, 4] = cells[:, 3] - 1
cell_types = np.full(ele_no, 9, dtype=int)
else:
# cells are hex in 3D
cells = np.empty((ele_no, 9), dtype=int)
cells[:, 0] = 8
cells[:, 1] = np.arange(ele_no)
cells[:, 1] += (ele_dim[0] + ele_dim[1] + 1) * (
np.arange(ele_no) // (ele_dim[0] * ele_dim[1])
)
cells[:, 1] += (np.arange(ele_no) % (ele_dim[0] * ele_dim[1])) // ele_dim[0]
cells[:, 2] = cells[:, 1] + 1
cells[:, 3] = cells[:, 1] + 2 + ele_dim[0]
cells[:, 4] = cells[:, 3] - 1
cells[:, 5] = cells[:, 1] + (1 + ele_dim[0]) * (1 + ele_dim[1])
cells[:, 6] = cells[:, 5] + 1
cells[:, 7] = cells[:, 5] + 2 + ele_dim[0]
cells[:, 8] = cells[:, 7] - 1
cell_types = np.full(ele_no, 12, dtype=int)
return cells.reshape(-1), cell_types
def _generate_points(axis):
x_dim = len(axis[0])
y_dim = len(axis[1])
z_dim = len(axis[2])
pnt_no = x_dim * y_dim * z_dim
x_id, y_id, z_id = np.mgrid[0:x_dim, 0:y_dim, 0:z_dim]
points = np.empty((pnt_no, 3), dtype=axis[0].dtype)
# VTK sorts points and cells in Fortran order
points[:, 0] = axis[0][x_id.reshape(-1, order="F")]
points[:, 1] = axis[1][y_id.reshape(-1, order="F")]
points[:, 2] = axis[2][z_id.reshape(-1, order="F")]
return points
def _read_coords(f, data_type, is_ascii, num_points):
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
if is_ascii:
coords = np.fromfile(f, count=num_points, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
coords = np.fromfile(f, count=num_points, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
return coords
def _read_points(f, data_type, is_ascii, num_points):
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
if is_ascii:
points = np.fromfile(f, count=num_points * 3, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
points = np.fromfile(f, count=num_points * 3, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
return points.reshape((num_points, 3))
def _read_int_data(f, is_ascii, num_items, dtype):
if is_ascii:
c = np.fromfile(f, count=num_items, sep=" ", dtype=dtype)
else:
dtype = dtype.newbyteorder(">")
c = np.fromfile(f, count=num_items, dtype=dtype)
print("c", c)
line = f.readline().decode()
if line != "\n":
raise ReadError("Expected newline")
return c
def _read_cell_types(f, is_ascii, num_items):
if is_ascii:
ct = np.fromfile(f, count=int(num_items), sep=" ", dtype=int)
else:
# binary
ct = np.fromfile(f, count=int(num_items), dtype=">i4")
line = f.readline().decode()
# Sometimes, there's no newline at the end
if line.strip() != "":
raise ReadError()
return ct
def _read_scalar_field(f, num_data, split, is_ascii):
data_name = split[1]
data_type = split[2].lower()
try:
num_comp = int(split[3])
except IndexError:
num_comp = 1
# The standard says:
# > The parameter numComp must range between (1,4) inclusive; [...]
if not (0 < num_comp < 5):
raise ReadError("The parameter numComp must range between (1,4) inclusive")
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
lt, _ = f.readline().decode().split()
if lt.upper() != "LOOKUP_TABLE":
raise ReadError()
if is_ascii:
data = np.fromfile(f, count=num_data * num_comp, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
data = np.fromfile(f, count=num_data * num_comp, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
data = data.reshape(-1, num_comp)
return {data_name: data}
def _read_field(f, num_data, split, shape, is_ascii):
data_name = split[1]
data_type = split[2].lower()
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type])
# prod()
# <https://stackoverflow.com/q/2104782/353337>
k = reduce((lambda x, y: x * y), shape)
if is_ascii:
data = np.fromfile(f, count=k * num_data, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
data = np.fromfile(f, count=k * num_data, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
data = data.reshape(-1, *shape)
return {data_name: data}
def _read_fields(f, num_fields, is_ascii):
data = {}
for _ in range(num_fields):
line = f.readline().decode().split()
if line[0] == "METADATA":
_skip_meta(f)
name, shape0, shape1, data_type = f.readline().decode().split()
else:
name, shape0, shape1, data_type = line
shape0 = int(shape0)
shape1 = int(shape1)
dtype = np.dtype(vtk_to_numpy_dtype_name[data_type.lower()])
if is_ascii:
dat = np.fromfile(f, count=shape0 * shape1, sep=" ", dtype=dtype)
else:
# Binary data is big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
dtype = dtype.newbyteorder(">")
dat = np.fromfile(f, count=shape0 * shape1, dtype=dtype)
line = f.readline().decode()
if line != "\n":
raise ReadError()
if shape0 != 1:
dat = dat.reshape((shape1, shape0))
data[name] = dat
return data
def _skip_meta(f):
# skip possible metadata
# https://vtk.org/doc/nightly/html/IOLegacyInformationFormat.html
while True:
line = f.readline().decode().strip()
if not line:
# end of metadata is a blank line
break
def _pad(array):
return np.pad(array, ((0, 0), (0, 1)), "constant")
def write(filename, mesh, binary=True):
if mesh.points.shape[1] == 2:
logging.warning(
"VTK requires 3D points, but 2D points given. "
"Appending 0 third component."
)
points = _pad(mesh.points)
else:
points = mesh.points
if mesh.point_data:
for name, values in mesh.point_data.items():
if len(values.shape) == 2 and values.shape[1] == 2:
logging.warning(
"VTK requires 3D vectors, but 2D vectors given. "
f"Appending 0 third component to {name}."
)
mesh.point_data[name] = _pad(values)
for name, data in mesh.cell_data.items():
for k, values in enumerate(data):
if len(values.shape) == 2 and values.shape[1] == 2:
logging.warning(
"VTK requires 3D vectors, but 2D vectors given. "
f"Appending 0 third component to {name}."
)
data[k] = _pad(data[k])
if not binary:
logging.warning("VTK ASCII files are only meant for debugging.")
with open_file(filename, "wb") as f:
f.write(b"# vtk DataFile Version 5.1\n")
f.write(f"written by meshio v{__version__}\n".encode())
f.write(("BINARY\n" if binary else "ASCII\n").encode())
f.write(b"DATASET UNSTRUCTURED_GRID\n")
# write points and cells
_write_points(f, points, binary)
_write_cells(f, mesh.cells, binary)
# write point data
if mesh.point_data:
num_points = mesh.points.shape[0]
f.write(f"POINT_DATA {num_points}\n".encode())
_write_field_data(f, mesh.point_data, binary)
# write cell data
if mesh.cell_data:
total_num_cells = sum(len(c.data) for c in mesh.cells)
f.write(f"CELL_DATA {total_num_cells}\n".encode())
_write_field_data(f, mesh.cell_data, binary)
def _write_points(f, points, binary):
dtype = numpy_to_vtk_dtype[points.dtype.name]
f.write(f"POINTS {len(points)} {dtype}\n".encode())
if binary:
# Binary data must be big endian, see
# <https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python#.22legacy.22>.
# if points.dtype.byteorder == "<" or (
# points.dtype.byteorder == "=" and sys.byteorder == "little"
# ):
# logging.warn("Converting to new byte order")
points.astype(points.dtype.newbyteorder(">")).tofile(f, sep="")
else:
# ascii
points.tofile(f, sep=" ")
f.write(b"\n")
def _write_cells(f, cells, binary):
total_num_cells = sum([len(c.data) for c in cells])
total_num_idx = sum([c.data.size for c in cells])
f.write(f"CELLS {total_num_cells + 1} {total_num_idx}\n".encode())
# offsets
offsets = [[0]]
k = 0
for cell_block in cells:
m, n = cell_block.data.shape
offsets.append(np.arange(k + n, k + (m + 1) * n, n))
k = offsets[-1][-1]
offsets = np.concatenate(offsets)
if binary:
f.write(f"OFFSETS vtktypeint64\n".encode())
# force big-endian and int64
offsets.astype(">i8").tofile(f, sep="")
f.write(b"\n")
f.write(f"CONNECTIVITY vtktypeint64\n".encode())
for cell_block in cells:
d = cell_block.data
cell_idx = meshio_to_vtk_order(cell_block.type)
if cell_idx is not None:
d = d[:, cell_idx]
# force big-endian and int64
d.astype(">i8").tofile(f, sep="")
f.write(b"\n")
else:
# ascii
f.write(f"OFFSETS vtktypeint64\n".encode())
offsets.tofile(f, sep="\n")
f.write(b"\n")
f.write(f"CONNECTIVITY vtktypeint64\n".encode())
for cell_block in cells:
d = cell_block.data
cell_idx = meshio_to_vtk_order(cell_block.type)
if cell_idx is not None:
d = d[:, cell_idx]
d.tofile(f, sep="\n")
f.write(b"\n")
# write cell types
f.write(f"CELL_TYPES {total_num_cells}\n".encode())
if binary:
for c in cells:
vtk_type = meshio_to_vtk_type[c.type]
np.full(len(c.data), vtk_type, dtype=np.dtype(">i4")).tofile(f, sep="")
f.write(b"\n")
else:
# ascii
for c in cells:
vtk_type = meshio_to_vtk_type[c.type]
np.full(len(c.data), vtk_type).tofile(f, sep="\n")
f.write(b"\n")
def _write_field_data(f, data, binary):
f.write((f"FIELD FieldData {len(data)}\n").encode())
for name, values in data.items():
if isinstance(values, list):
values = np.concatenate(values)
if len(values.shape) == 1:
num_tuples = values.shape[0]
num_components = 1
else:
num_tuples = values.shape[0]
num_components = values.shape[1]
if " " in name:
raise WriteError(f"VTK doesn't support spaces in field names ('{name}').")
f.write(
(
"{} {} {} {}\n".format(
name,
num_components,
num_tuples,
numpy_to_vtk_dtype[values.dtype.name],
)
).encode()
)
if binary:
values.astype(values.dtype.newbyteorder(">")).tofile(f, sep="")
else:
# ascii
values.tofile(f, sep=" ")
# np.savetxt(f, points)
f.write(b"\n")
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2013 Jeremy Lainé
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import json
import mimetypes
import os
import posixpath
import shutil
import time
try:
from urllib.parse import unquote
except ImportError: # Python 2
from urllib import unquote
import Image
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import render, redirect
from django.template import RequestContext
from django.utils.http import http_date, urlquote
from django.views.decorators.http import require_http_methods
import django.views.static
import coconuts.EXIF as EXIF
from coconuts.forms import AddFileForm, AddFolderForm, PhotoForm, ShareForm, ShareAccessFormSet
from coconuts.models import Folder, NamedAcl, OWNERS, PERMISSIONS, path2url, url2path
ORIENTATIONS = {
1: [ False, False, 0 ], # Horizontal (normal)
2: [ True, False, 0 ], # Mirrored horizontal
3: [ False, False, 180 ], # Rotated 180
4: [ False, True, 0 ], # Mirrored vertical
5: [ True, False, 90 ], # Mirrored horizontal then rotated 90 CCW
6: [ False, False, -90 ], # Rotated 90 CW
7: [ True, False, -90 ], # Mirrored horizontal then rotated 90 CW
8: [ False, False, 90 ], # Rotated 90 CCW
}
def clean_path(path):
"""
Adapted from django.views.static.serve
"""
path = posixpath.normpath(unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and newpath != path:
raise ValueError
return newpath
def get_image_info(filepath):
"""
Get an image's information.
"""
info = {}
with open(filepath, 'rb') as fp:
tags = EXIF.process_file(fp, details=False)
# camera
camera = None
if tags.has_key('Image Model'):
camera = "%s" % tags['Image Model']
if tags.has_key('Image Make'):
make = "%s" % tags['Image Make']
if not camera:
camera = make
elif not camera.startswith(make):
camera = "%s %s" % (make, camera)
if camera:
info['camera'] = camera
# settings
bits = []
if tags.has_key('EXIF FNumber'):
v = eval("float(%s.0)" % tags['EXIF FNumber'])
bits.append("f/%s" % v)
if tags.has_key('EXIF ExposureTime'):
bits.append(u"%s sec" % tags['EXIF ExposureTime'])
if tags.has_key('EXIF FocalLength'):
bits.append(u"%s mm" % tags['EXIF FocalLength'])
if bits:
info['settings'] = ', '.join(bits)
# size
info['size'] = Image.open(filepath).size
return info
@login_required
@require_http_methods(['POST'])
def add_file(request, path):
try:
folder = Folder(path)
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
form = AddFileForm(request.POST, request.FILES)
if not form.is_valid():
return HttpResponseBadRequest()
filename = request.FILES['upload'].name
filepath = os.path.join(folder.filepath(), request.FILES['upload'].name)
if os.path.exists(filepath):
return HttpResponseBadRequest()
fp = file(filepath, 'wb')
for chunk in request.FILES['upload'].chunks():
fp.write(chunk)
fp.close()
return content_list(request, folder.path)
@login_required
@require_http_methods(['POST'])
def add_folder(request, path):
try:
folder = Folder(path)
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
form = AddFolderForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
Folder.create(os.path.join(folder.path, form.cleaned_data['name']))
return content_list(request, folder.path)
@login_required
def browse(request, path):
"""Show the list of photos for the given folder."""
if path:
return redirect(reverse(browse, args=['']) + '#/' + path)
template_path = os.path.join(os.path.dirname(__file__), 'static', 'coconuts', 'index.html')
return HttpResponse(open(template_path, 'rb').read())
@login_required
def content_list(request, path):
"""Show the list of photos for the given folder."""
try:
folder = Folder(os.path.dirname(path))
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
# list items
folder_path = folder.filepath()
folder_url = '/' + folder.path
if not folder_url.endswith('/'):
folder_url += '/'
folders = []
files = []
for entry in sorted(os.listdir(folder_path)):
if entry.startswith('.'):
continue
node_path = os.path.join(folder_path, entry)
node_url = folder_url + entry
if os.path.isdir(node_path):
# keep only the children the user is allowed to read. This is only useful in '/'
child = Folder(node_url[1:])
if child.has_perm('can_read', request.user):
folders.append({
'name': entry,
'path': node_url + '/',
'size': os.path.getsize(node_path),
})
else:
data = {
'mimetype': mimetypes.guess_type(node_path)[0],
'name': entry,
'path': node_url,
'size': os.path.getsize(node_path),
}
if data['mimetype'] in ['image/jpeg', 'image/pjpeg']:
data['image'] = get_image_info(node_path)
files.append(data)
return HttpResponse(json.dumps({
'can_manage': folder.has_perm('can_manage', request.user),
'can_write': folder.has_perm('can_write', request.user),
'files': files,
'folders': folders,
'name': folder.name(),
'path': folder_url,
}), content_type='application/json')
@login_required
@require_http_methods(['POST'])
def delete(request, path):
"""Delete the given file or folder."""
# check permissions
if not path:
return HttpResponseForbidden()
folder = Folder(os.path.dirname(path))
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
# delete file or folder
filepath = os.path.join(settings.COCONUTS_DATA_ROOT, url2path(path))
if os.path.isdir(filepath):
shutil.rmtree(filepath)
else:
os.unlink(filepath)
return content_list(request, folder.path)
@login_required
def download(request, path):
"""Return the raw file for the given photo."""
folder = Folder(os.path.dirname(path))
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
resp = django.views.static.serve(request,
path,
document_root=settings.COCONUTS_DATA_ROOT)
resp['Content-Disposition'] = 'attachment; filename="%s"' % urlquote(os.path.basename(path))
return resp
@login_required
def manage(request, path):
"""Manage the properties for the given folder."""
# Check this is a folder, not a file
if path and not (path.endswith('/') and path.count("/") == 1):
return HttpResponseForbidden()
# Check permissions
folder = Folder(os.path.dirname(path))
share = folder.share
if not share.has_perm('can_manage', request.user):
return HttpResponseForbidden()
# Process submission
if request.method == 'POST':
# properties
shareform = ShareForm(request.POST, instance=share)
if shareform.is_valid():
shareform.save()
# access
formset = ShareAccessFormSet(request.POST)
if formset.is_valid():
# access
acls = []
for data in formset.clean():
acl = NamedAcl("%s:" % data['owner'])
for perm in PERMISSIONS:
if data[perm]: acl.add_perm(perm)
if acl.permissions: acls.append(acl)
share.set_acls(acls)
# Check we are not locking ourselves out before saving
if not share.has_perm('can_manage', request.user):
return HttpResponseForbidden()
share.save()
return redirect(reverse(browse, args=['']))
# fill form from database
data = []
for acl in share.acls():
entry = {'owner': "%s:%s" % (acl.type, acl.name)}
for perm in PERMISSIONS:
entry[perm] = acl.has_perm(perm)
data.append(entry)
shareform = ShareForm(instance=share)
formset = ShareAccessFormSet(initial=data)
return render(request, 'coconuts/manage.html', {
'formset': formset,
'share': share,
'shareform': shareform,
})
@login_required
def owner_list(request):
choices = []
for klass, key in OWNERS:
opts = []
for obj in klass.objects.all().order_by(key):
opts.append("%s:%s" % (klass.__name__.lower(), getattr(obj, key)))
if len(opts):
choices.append({'name': klass.__name__, 'options': opts})
return HttpResponse(json.dumps(choices), content_type='application/json')
@login_required
def render_file(request, path):
"""Return a resized version of the given photo."""
folder = Folder(os.path.dirname(path))
filepath = os.path.join(settings.COCONUTS_DATA_ROOT, url2path(path))
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
# check the size is legitimate
form = PhotoForm(request.GET)
if not form.is_valid():
return HttpResponseBadRequest()
size = form.cleaned_data['size']
# check thumbnail
cachesize = size, int(size * 0.75)
cachepath = os.path.join(str(size), url2path(path))
cachefile = os.path.join(settings.COCONUTS_CACHE_ROOT, cachepath)
if not os.path.exists(cachefile):
cachedir = os.path.dirname(cachefile)
if not os.path.exists(cachedir):
os.makedirs(cachedir)
img = Image.open(filepath)
# rotate if needed
with open(filepath, 'rb') as fp:
tags = EXIF.process_file(fp, details=False)
if tags.has_key('Image Orientation'):
orientation = tags['Image Orientation'].values[0]
img = img.rotate(ORIENTATIONS[orientation][2])
img.thumbnail(cachesize, Image.ANTIALIAS)
img.save(cachefile, quality=90)
# serve the photo
response = django.views.static.serve(request,
path2url(cachepath),
document_root=settings.COCONUTS_CACHE_ROOT)
response["Expires"] = http_date(time.time() + 3600 * 24 * 365)
return response
use clean_path
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2013 Jeremy Lainé
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import json
import mimetypes
import os
import posixpath
import shutil
import time
try:
from urllib.parse import unquote
except ImportError: # Python 2
from urllib import unquote
import Image
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import render, redirect
from django.template import RequestContext
from django.utils.http import http_date, urlquote
from django.views.decorators.http import require_http_methods
import django.views.static
import coconuts.EXIF as EXIF
from coconuts.forms import AddFileForm, AddFolderForm, PhotoForm, ShareForm, ShareAccessFormSet
from coconuts.models import Folder, NamedAcl, OWNERS, PERMISSIONS, path2url, url2path
ORIENTATIONS = {
1: [ False, False, 0 ], # Horizontal (normal)
2: [ True, False, 0 ], # Mirrored horizontal
3: [ False, False, 180 ], # Rotated 180
4: [ False, True, 0 ], # Mirrored vertical
5: [ True, False, 90 ], # Mirrored horizontal then rotated 90 CCW
6: [ False, False, -90 ], # Rotated 90 CW
7: [ True, False, -90 ], # Mirrored horizontal then rotated 90 CW
8: [ False, False, 90 ], # Rotated 90 CCW
}
def clean_path(path):
"""
Adapted from django.views.static.serve
"""
path = posixpath.normpath(unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and newpath != path:
raise ValueError
return newpath
def get_image_info(filepath):
"""
Get an image's information.
"""
info = {}
with open(filepath, 'rb') as fp:
tags = EXIF.process_file(fp, details=False)
# camera
camera = None
if tags.has_key('Image Model'):
camera = "%s" % tags['Image Model']
if tags.has_key('Image Make'):
make = "%s" % tags['Image Make']
if not camera:
camera = make
elif not camera.startswith(make):
camera = "%s %s" % (make, camera)
if camera:
info['camera'] = camera
# settings
bits = []
if tags.has_key('EXIF FNumber'):
v = eval("float(%s.0)" % tags['EXIF FNumber'])
bits.append("f/%s" % v)
if tags.has_key('EXIF ExposureTime'):
bits.append(u"%s sec" % tags['EXIF ExposureTime'])
if tags.has_key('EXIF FocalLength'):
bits.append(u"%s mm" % tags['EXIF FocalLength'])
if bits:
info['settings'] = ', '.join(bits)
# size
info['size'] = Image.open(filepath).size
return info
@login_required
@require_http_methods(['POST'])
def add_file(request, path):
try:
folder = Folder(path)
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
form = AddFileForm(request.POST, request.FILES)
if not form.is_valid():
return HttpResponseBadRequest()
filename = request.FILES['upload'].name
filepath = os.path.join(folder.filepath(), request.FILES['upload'].name)
if os.path.exists(filepath):
return HttpResponseBadRequest()
fp = file(filepath, 'wb')
for chunk in request.FILES['upload'].chunks():
fp.write(chunk)
fp.close()
return content_list(request, folder.path)
@login_required
@require_http_methods(['POST'])
def add_folder(request, path):
try:
folder = Folder(path)
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
form = AddFolderForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
Folder.create(os.path.join(folder.path, form.cleaned_data['name']))
return content_list(request, folder.path)
@login_required
def browse(request, path):
"""Show the list of photos for the given folder."""
if path:
return redirect(reverse(browse, args=['']) + '#/' + path)
template_path = os.path.join(os.path.dirname(__file__), 'static', 'coconuts', 'index.html')
return HttpResponse(open(template_path, 'rb').read())
@login_required
def content_list(request, path):
"""Show the list of photos for the given folder."""
path = clean_path(path)
try:
folder = Folder(path)
except Folder.DoesNotExist:
raise Http404
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
# list items
folder_path = folder.filepath()
folder_url = '/' + folder.path
if not folder_url.endswith('/'):
folder_url += '/'
folders = []
files = []
for entry in sorted(os.listdir(folder_path)):
if entry.startswith('.'):
continue
node_path = os.path.join(folder_path, entry)
node_url = folder_url + entry
if os.path.isdir(node_path):
# keep only the children the user is allowed to read. This is only useful in '/'
child = Folder(node_url[1:])
if child.has_perm('can_read', request.user):
folders.append({
'name': entry,
'path': node_url + '/',
'size': os.path.getsize(node_path),
})
else:
data = {
'mimetype': mimetypes.guess_type(node_path)[0],
'name': entry,
'path': node_url,
'size': os.path.getsize(node_path),
}
if data['mimetype'] in ['image/jpeg', 'image/pjpeg']:
data['image'] = get_image_info(node_path)
files.append(data)
return HttpResponse(json.dumps({
'can_manage': folder.has_perm('can_manage', request.user),
'can_write': folder.has_perm('can_write', request.user),
'files': files,
'folders': folders,
'name': folder.name(),
'path': folder_url,
}), content_type='application/json')
@login_required
@require_http_methods(['POST'])
def delete(request, path):
"""Delete the given file or folder."""
# check permissions
path = clean_path(path)
if not path:
return HttpResponseForbidden()
folder = Folder(posixpath.dirname(path))
if not folder.has_perm('can_write', request.user):
return HttpResponseForbidden()
# delete file or folder
filepath = os.path.join(settings.COCONUTS_DATA_ROOT, url2path(path))
if os.path.isdir(filepath):
shutil.rmtree(filepath)
else:
os.unlink(filepath)
return content_list(request, folder.path)
@login_required
def download(request, path):
"""Return the raw file for the given photo."""
path = clean_path(path)
folder = Folder(posixpath.dirname(path))
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
resp = django.views.static.serve(request,
path,
document_root=settings.COCONUTS_DATA_ROOT)
resp['Content-Disposition'] = 'attachment; filename="%s"' % urlquote(os.path.basename(path))
return resp
@login_required
def manage(request, path):
"""Manage the properties for the given folder."""
path = clean_path(path)
# Check permissions
folder = Folder(path)
share = folder.share
if not share.has_perm('can_manage', request.user):
return HttpResponseForbidden()
# Process submission
if request.method == 'POST':
# properties
shareform = ShareForm(request.POST, instance=share)
if shareform.is_valid():
shareform.save()
# access
formset = ShareAccessFormSet(request.POST)
if formset.is_valid():
# access
acls = []
for data in formset.clean():
acl = NamedAcl("%s:" % data['owner'])
for perm in PERMISSIONS:
if data[perm]: acl.add_perm(perm)
if acl.permissions: acls.append(acl)
share.set_acls(acls)
# Check we are not locking ourselves out before saving
if not share.has_perm('can_manage', request.user):
return HttpResponseForbidden()
share.save()
return redirect(reverse(browse, args=['']))
# fill form from database
data = []
for acl in share.acls():
entry = {'owner': "%s:%s" % (acl.type, acl.name)}
for perm in PERMISSIONS:
entry[perm] = acl.has_perm(perm)
data.append(entry)
shareform = ShareForm(instance=share)
formset = ShareAccessFormSet(initial=data)
return render(request, 'coconuts/manage.html', {
'formset': formset,
'share': share,
'shareform': shareform,
})
@login_required
def owner_list(request):
choices = []
for klass, key in OWNERS:
opts = []
for obj in klass.objects.all().order_by(key):
opts.append("%s:%s" % (klass.__name__.lower(), getattr(obj, key)))
if len(opts):
choices.append({'name': klass.__name__, 'options': opts})
return HttpResponse(json.dumps(choices), content_type='application/json')
@login_required
def render_file(request, path):
"""Return a resized version of the given photo."""
path = clean_path(path)
folder = Folder(posixpath.dirname(path))
filepath = os.path.join(settings.COCONUTS_DATA_ROOT, url2path(path))
# check permissions
if not folder.has_perm('can_read', request.user):
return HttpResponseForbidden()
# check the size is legitimate
form = PhotoForm(request.GET)
if not form.is_valid():
return HttpResponseBadRequest()
size = form.cleaned_data['size']
# check thumbnail
cachesize = size, int(size * 0.75)
cachepath = os.path.join(str(size), url2path(path))
cachefile = os.path.join(settings.COCONUTS_CACHE_ROOT, cachepath)
if not os.path.exists(cachefile):
cachedir = os.path.dirname(cachefile)
if not os.path.exists(cachedir):
os.makedirs(cachedir)
img = Image.open(filepath)
# rotate if needed
with open(filepath, 'rb') as fp:
tags = EXIF.process_file(fp, details=False)
if tags.has_key('Image Orientation'):
orientation = tags['Image Orientation'].values[0]
img = img.rotate(ORIENTATIONS[orientation][2])
img.thumbnail(cachesize, Image.ANTIALIAS)
img.save(cachefile, quality=90)
# serve the photo
response = django.views.static.serve(request,
path2url(cachepath),
document_root=settings.COCONUTS_CACHE_ROOT)
response["Expires"] = http_date(time.time() + 3600 * 24 * 365)
return response
|
"""
Utility functions that map information from environments onto package cache
"""
from __future__ import print_function
from os.path import join
from .environment import Environment, environments
from ..cache.package import Package
from ..utils import is_hardlinked
from ..constants import LINK_TYPE
def check_hardlinked_env(env:Environment) -> dict:
"""
Check all hardlinked packages in env
"""
return {p.name: check_hardlinked_pkg(env, p.to_package()) for p in env.hard_linked}
def owns(env:Environment, path) -> tuple:
"""
Return the package in env that owns file.
This function will return all packages that claim the file. This
shouldn't typically happen, and if it does, could mean the packages in the
environment were incorrectly built.
"""
return tuple(p for p in env.packages if path in p.files)
def check_hardlinked_pkg(env:Environment, Pkg:Package) -> list:
"""
Check that pkg in cache is correctly (or completely) hardlinked into env.
Returns a list of improperly hardlinked files.
"""
bad_linked = []
expected_linked = Pkg.files - Pkg.has_prefix.keys() - Pkg.no_link
for f in expected_linked:
src = join(Pkg.path, f)
tgt = join(env.path, f)
if not is_hardlinked(src, tgt):
bad_linked.append(f)
return bad_linked
def explicitly_installed(env:Environment) -> dict:
"""
Return list of explicitly installed packages.
Note that this does not work with root environments
"""
current_pkgs = set(env.package_specs)
hist = env.history
# Map date to explicitly installed package specs
_ci = {'install', 'create'}
installed_specs = {x['date']: set(t.split()[0]
for t in x['specs'])
for x in hist.get_user_requests
if x.get('action') in _ci}
# See what packages were actually installed
actually_installed = {date: set(pkg_spec) for date, pkg_spec in hist.construct_states}
for date, specs in installed_specs.items():
# Translate name only spec to full specs
name_spec = {x for x in actually_installed[date] if x.split('-')[0] in specs}
actually_installed[date] = name_spec
# Intersect with currently installed packages
actually_installed = {date: specs.intersection(current_pkgs) for date, specs in actually_installed.items()}
return actually_installed
def orphaned(env:Environment) -> set:
"""
Return a list of orphaned packages in the env.
A package that has 0 packages depending on it will be considered orphaned.
Since we don't have a full dependency solver, this method naively only
considers package names (and ignores versions and version constraints).
"""
current_pkgs = set(p.name for p in env.packages)
depended_on = set()
for pkg in current_pkgs:
depended_on.update(d.split(maxsplit=1)[0] for d in pkg.depends)
return current_pkgs.difference(depended_on)
def dependency_graph(env:Environment) -> dict:
"""
Return a dictionary that represents the dependency graph of the environment.
Only package names are considered because a package cannot have
multiple versions of the same package installed.
The output of this function can be passed to NetworkX constructors
Args:
env (Environment):
Returns:
(dict)
"""
graph = {}
for pkg in env.packages:
graph[pkg.name] = deps = []
for depended_on in pkg.depends:
deps.append(depended_on.split()[0])
return graph
Avoid unnecessary set constructions in orphaned.
"""
Utility functions that map information from environments onto package cache
"""
from __future__ import print_function
from os.path import join
from .environment import Environment, environments
from ..cache.package import Package
from ..utils import is_hardlinked
from ..constants import LINK_TYPE
def check_hardlinked_env(env:Environment) -> dict:
"""
Check all hardlinked packages in env
"""
return {p.name: check_hardlinked_pkg(env, p.to_package()) for p in env.hard_linked}
def owns(env:Environment, path) -> tuple:
"""
Return the package in env that owns file.
This function will return all packages that claim the file. This
shouldn't typically happen, and if it does, could mean the packages in the
environment were incorrectly built.
"""
return tuple(p for p in env.packages if path in p.files)
def check_hardlinked_pkg(env:Environment, Pkg:Package) -> list:
"""
Check that pkg in cache is correctly (or completely) hardlinked into env.
Returns a list of improperly hardlinked files.
"""
bad_linked = []
expected_linked = Pkg.files - Pkg.has_prefix.keys() - Pkg.no_link
for f in expected_linked:
src = join(Pkg.path, f)
tgt = join(env.path, f)
if not is_hardlinked(src, tgt):
bad_linked.append(f)
return bad_linked
def explicitly_installed(env:Environment) -> dict:
"""
Return list of explicitly installed packages.
Note that this does not work with root environments
"""
current_pkgs = set(env.package_specs)
hist = env.history
# Map date to explicitly installed package specs
_ci = {'install', 'create'}
installed_specs = {x['date']: set(t.split()[0]
for t in x['specs'])
for x in hist.get_user_requests
if x.get('action') in _ci}
# See what packages were actually installed
actually_installed = {date: set(pkg_spec) for date, pkg_spec in hist.construct_states}
for date, specs in installed_specs.items():
# Translate name only spec to full specs
name_spec = {x for x in actually_installed[date] if x.split('-')[0] in specs}
actually_installed[date] = name_spec
# Intersect with currently installed packages
actually_installed = {date: specs.intersection(current_pkgs) for date, specs in actually_installed.items()}
return actually_installed
def orphaned(env:Environment) -> set:
"""
Return a list of orphaned packages in the env.
A package that has 0 packages depending on it will be considered orphaned.
Since we don't have a full dependency solver, this method naively only
considers package names (and ignores versions and version constraints).
"""
depended_on = set()
for pkg in env.packages:
depended_on.update(d.split(maxsplit=1)[0] for d in pkg.depends)
return set(p for p in env.packages if p.name not in depended_on)
def dependency_graph(env:Environment) -> dict:
"""
Return a dictionary that represents the dependency graph of the environment.
Only package names are considered because a package cannot have
multiple versions of the same package installed.
The output of this function can be passed to NetworkX constructors
Args:
env (Environment):
Returns:
(dict)
"""
graph = {}
for pkg in env.packages:
graph[pkg.name] = deps = []
for depended_on in pkg.depends:
deps.append(depended_on.split(maxsplit=1)[0])
return graph
|
import time
def on_join(irc, conn, event):
nick = event.source.nick
channel = event.target
if nick == irc.get_nick():
log.info("Joined to {}".format(channel))
if channel not in irc.state["channels"]:
irc.state["channels"][channel] = {}
irc.state["channels"][channel]["topic"] = "" # TOPIC chan
irc.state["channels"][channel]["names"] = [] # NAMES chan
irc.state["channels"][channel]["bans"] = [] # MODE chan b
irc.state["channels"][channel]["quiets"] = [] # MODE chan q
irc.state["channels"][channel]["excepts"] = [] # MODE chan e
irc.state["channels"][channel]["invites"] = [] # MODE chan i
irc.state["channels"][channel]["ops"] = []
irc.state["channels"][channel]["voices"] = []
if channel not in irc.channels:
irc.channels[channel] = {}
irc.save_config()
log.info("Syncing {} users".format(channel))
irc.who(channel)
log.info("Syncing {} bans".format(channel))
log.info("Syncing {} quiets".format(channel))
irc.mode(channel, "bq")
if nick not in irc.state["channels"][channel]["names"]:
irc.state["channels"][channel]["names"].append(nick)
if nick not in irc.state["users"]:
irc.state["users"][nick] = {}
irc.state["users"][nick]["channels"] = []
irc.state["users"][nick]["lastmsg"] = {}
irc.state["users"][nick]["lastmsg"]["time"] = time.time()
irc.state["users"][nick]["lastmsg"]["channel"] = channel
irc.state["users"][nick]["lastmsg"]["message"] = None
irc.state["users"][nick]["lastmsg"]["command"] = event.type
irc.state["users"][nick]["user"] = event.source.user
irc.state["users"][nick]["host"] = event.source.host
irc.state["users"][nick]["gecos"] = event.arguments[1]
if event.arguments[0] != "*":
irc.state["users"][nick]["account"] = event.arguments[0]
else:
irc.state["users"][nick]["account"] = None
if channel not in irc.state["users"][nick]["channels"]:
irc.state["users"][nick]["channels"].append(channel)
Update JOIN.py
Now sends a request to receive channel modes and the the time the channel was created
import time
def on_join(irc, conn, event):
nick = event.source.nick
channel = event.target
if nick == irc.get_nick():
log.info("Joined to {}".format(channel))
if channel not in irc.state["channels"]:
irc.state["channels"][channel] = {}
irc.state["channels"][channel]["topic"] = "" # TOPIC chan
irc.state["channels"][channel]["names"] = [] # NAMES chan
irc.state["channels"][channel]["bans"] = [] # MODE chan b
irc.state["channels"][channel]["quiets"] = [] # MODE chan q
irc.state["channels"][channel]["excepts"] = [] # MODE chan e
irc.state["channels"][channel]["invites"] = [] # MODE chan i
irc.state["channels"][channel]["ops"] = []
irc.state["channels"][channel]["voices"] = []
if channel not in irc.channels:
irc.channels[channel] = {}
irc.save_config()
log.info("Syncing {} users".format(channel))
irc.who(channel)
log.info("Syncing {} bans".format(channel))
log.info("Syncing {} quiets".format(channel))
irc.mode(channel, "bq")
log.info("Syncing {} mode's and time created".format(channel))
irc.mode(channel, "")
if nick not in irc.state["channels"][channel]["names"]:
irc.state["channels"][channel]["names"].append(nick)
if nick not in irc.state["users"]:
irc.state["users"][nick] = {}
irc.state["users"][nick]["channels"] = []
irc.state["users"][nick]["lastmsg"] = {}
irc.state["users"][nick]["lastmsg"]["time"] = time.time()
irc.state["users"][nick]["lastmsg"]["channel"] = channel
irc.state["users"][nick]["lastmsg"]["message"] = None
irc.state["users"][nick]["lastmsg"]["command"] = event.type
irc.state["users"][nick]["user"] = event.source.user
irc.state["users"][nick]["host"] = event.source.host
irc.state["users"][nick]["gecos"] = event.arguments[1]
if event.arguments[0] != "*":
irc.state["users"][nick]["account"] = event.arguments[0]
else:
irc.state["users"][nick]["account"] = None
if channel not in irc.state["users"][nick]["channels"]:
irc.state["users"][nick]["channels"].append(channel)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Rico Sennrich
"""Use operations learned with learn_bpe.py to encode a new text.
The text will not be smaller, but use only a fixed vocabulary, with rare words
encoded as variable-length sequences of subword units.
Reference:
Rico Sennrich, Barry Haddow and Alexandra Birch (2015). Neural Machine Translation of Rare Words with Subword Units.
Proceedings of the 54th Annual Meeting of the Association for Computational Linguistics (ACL 2016). Berlin, Germany.
"""
from __future__ import unicode_literals, division
import sys
import codecs
import argparse
import json
import re
from collections import defaultdict
# hack for python2/3 compatibility
from io import open
argparse.open = open
class BPE(object):
def __init__(self, codes, separator='@@', vocab=None, glossaries=None):
# check version information
firstline = codes.readline()
if firstline.startswith('#version:'):
self.version = tuple([int(x) for x in re.sub(r'(\.0+)*$','', firstline.split()[-1]).split(".")])
else:
self.version = (0, 1)
codes.seek(0)
self.bpe_codes = [tuple(item.split()) for item in codes]
# some hacking to deal with duplicates (only consider first instance)
self.bpe_codes = dict([(code,i) for (i,code) in reversed(list(enumerate(self.bpe_codes)))])
self.bpe_codes_reverse = dict([(pair[0] + pair[1], pair) for pair,i in self.bpe_codes.items()])
self.separator = separator
self.vocab = vocab
self.glossaries = glossaries if glossaries else []
def segment(self, sentence):
"""segment single sentence (whitespace-tokenized string) with BPE encoding"""
output = []
for word in sentence.split():
new_word = [out for segment in self._isolate_glossaries(word)
for out in encode(segment,
self.bpe_codes,
self.bpe_codes_reverse,
self.vocab,
self.separator,
self.version,
self.glossaries)]
for item in new_word[:-1]:
output.append(item + self.separator)
output.append(new_word[-1])
return ' '.join(output)
def _isolate_glossaries(self, word):
word_segments = [word]
for gloss in self.glossaries:
word_segments = [out_segments for segment in word_segments
for out_segments in isolate_glossary(segment, gloss)]
return word_segments
def create_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="learn BPE-based word segmentation")
parser.add_argument(
'--input', '-i', type=argparse.FileType('r'), default=sys.stdin,
metavar='PATH',
help="Input file (default: standard input).")
parser.add_argument(
'--codes', '-c', type=argparse.FileType('r'), metavar='PATH',
required=True,
help="File with BPE codes (created by learn_bpe.py).")
parser.add_argument(
'--output', '-o', type=argparse.FileType('w'), default=sys.stdout,
metavar='PATH',
help="Output file (default: standard output)")
parser.add_argument(
'--separator', '-s', type=str, default='@@', metavar='STR',
help="Separator between non-final subword units (default: '%(default)s'))")
parser.add_argument(
'--vocabulary', type=argparse.FileType('r'), default=None,
metavar="PATH",
help="Vocabulary file (built with get_vocab.py). If provided, this script reverts any merge operations that produce an OOV.")
parser.add_argument(
'--vocabulary-threshold', type=int, default=None,
metavar="INT",
help="Vocabulary threshold. If vocabulary is provided, any word with frequency < threshold will be treated as OOV")
parser.add_argument(
'--glossaries', type=str, nargs='+', default=None,
metavar="STR",
help="Glossaries. The strings provided in glossaries will not be affected"+
"by the BPE (i.e. they will neither be broken into subwords, nor concatenated with other subwords")
return parser
def get_pairs(word):
"""Return set of symbol pairs in a word.
word is represented as tuple of symbols (symbols being variable-length strings)
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
def encode(orig, bpe_codes, bpe_codes_reverse, vocab, separator, version, glossaries=None, cache={}):
"""Encode word based on list of BPE merge operations, which are applied consecutively
"""
if orig in cache:
return cache[orig]
if orig in glossaries:
cache[orig] = (orig,)
return (orig,)
if version == (0, 1):
word = tuple(orig) + ('</w>',)
elif version == (0, 2): # more consistent handling of word-final segments
word = tuple(orig[:-1]) + ( orig[-1] + '</w>',)
else:
raise NotImplementedError
pairs = get_pairs(word)
if not pairs:
return orig
while True:
bigram = min(pairs, key = lambda pair: bpe_codes.get(pair, float('inf')))
if bigram not in bpe_codes:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
new_word.extend(word[i:j])
i = j
except:
new_word.extend(word[i:])
break
if word[i] == first and i < len(word)-1 and word[i+1] == second:
new_word.append(first+second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
# don't print end-of-word symbols
if word[-1] == '</w>':
word = word[:-1]
elif word[-1].endswith('</w>'):
word = word[:-1] + (word[-1].replace('</w>',''),)
if vocab:
word = check_vocab_and_split(word, bpe_codes_reverse, vocab, separator)
cache[orig] = word
return word
def recursive_split(segment, bpe_codes, vocab, separator, final=False):
"""Recursively split segment into smaller units (by reversing BPE merges)
until all units are either in-vocabulary, or cannot be split futher."""
try:
if final:
left, right = bpe_codes[segment + '</w>']
right = right[:-4]
else:
left, right = bpe_codes[segment]
except:
#sys.stderr.write('cannot split {0} further.\n'.format(segment))
yield segment
return
if left + separator in vocab:
yield left
else:
for item in recursive_split(left, bpe_codes, vocab, separator, False):
yield item
if (final and right in vocab) or (not final and right + separator in vocab):
yield right
else:
for item in recursive_split(right, bpe_codes, vocab, separator, final):
yield item
def check_vocab_and_split(orig, bpe_codes, vocab, separator):
"""Check for each segment in word if it is in-vocabulary,
and segment OOV segments into smaller units by reversing the BPE merge operations"""
out = []
for segment in orig[:-1]:
if segment + separator in vocab:
out.append(segment)
else:
#sys.stderr.write('OOV: {0}\n'.format(segment))
for item in recursive_split(segment, bpe_codes, vocab, separator, False):
out.append(item)
segment = orig[-1]
if segment in vocab:
out.append(segment)
else:
#sys.stderr.write('OOV: {0}\n'.format(segment))
for item in recursive_split(segment, bpe_codes, vocab, separator, True):
out.append(item)
return out
def read_vocabulary(vocab_file, threshold):
"""read vocabulary file produced by get_vocab.py, and filter according to frequency threshold.
"""
vocabulary = set()
for line in vocab_file:
word, freq = line.split()
freq = int(freq)
if threshold == None or freq >= threshold:
vocabulary.add(word)
return vocabulary
def isolate_glossary(word, glossary):
"""
Isolate a glossary present inside a word.
Returns a list of subwords. In which all 'glossary' glossaries are isolated
For example, if 'USA' is the glossary and '1934USABUSA' the word, the return value is:
['1934', 'USA', 'B', 'USA']
"""
if word == glossary or glossary not in word:
return [word]
else:
splits = word.split(glossary)
segments = [segment.strip() for split in splits[:-1] for segment in [split, glossary] if segment != '']
return segments + [splits[-1].strip()] if splits[-1] != '' else segments
if __name__ == '__main__':
# python 2/3 compatibility
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer)
parser = create_parser()
args = parser.parse_args()
# read/write files as UTF-8
args.codes = codecs.open(args.codes.name, encoding='utf-8')
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
if args.vocabulary:
args.vocabulary = codecs.open(args.vocabulary.name, encoding='utf-8')
if args.vocabulary:
vocabulary = read_vocabulary(args.vocabulary, args.vocabulary_threshold)
else:
vocabulary = None
bpe = BPE(args.codes, args.separator, vocabulary, args.glossaries)
for line in args.input:
args.output.write(bpe.segment(line).strip())
args.output.write('\n')
Somehow, apply_bpe.py ended up non-executable, resulting in an empty training corpus and a failed AMUNMT training. When cleaning afterwards, the subword-nmt repo is deleted and cloned again by the AMUNMT example training script, resulting in apply_bpe.py being non-executable again (should it have been chmod +x ’ed).
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Rico Sennrich
"""Use operations learned with learn_bpe.py to encode a new text.
The text will not be smaller, but use only a fixed vocabulary, with rare words
encoded as variable-length sequences of subword units.
Reference:
Rico Sennrich, Barry Haddow and Alexandra Birch (2015). Neural Machine Translation of Rare Words with Subword Units.
Proceedings of the 54th Annual Meeting of the Association for Computational Linguistics (ACL 2016). Berlin, Germany.
"""
from __future__ import unicode_literals, division
import sys
import codecs
import argparse
import json
import re
from collections import defaultdict
# hack for python2/3 compatibility
from io import open
argparse.open = open
class BPE(object):
def __init__(self, codes, separator='@@', vocab=None, glossaries=None):
# check version information
firstline = codes.readline()
if firstline.startswith('#version:'):
self.version = tuple([int(x) for x in re.sub(r'(\.0+)*$','', firstline.split()[-1]).split(".")])
else:
self.version = (0, 1)
codes.seek(0)
self.bpe_codes = [tuple(item.split()) for item in codes]
# some hacking to deal with duplicates (only consider first instance)
self.bpe_codes = dict([(code,i) for (i,code) in reversed(list(enumerate(self.bpe_codes)))])
self.bpe_codes_reverse = dict([(pair[0] + pair[1], pair) for pair,i in self.bpe_codes.items()])
self.separator = separator
self.vocab = vocab
self.glossaries = glossaries if glossaries else []
def segment(self, sentence):
"""segment single sentence (whitespace-tokenized string) with BPE encoding"""
output = []
for word in sentence.split():
new_word = [out for segment in self._isolate_glossaries(word)
for out in encode(segment,
self.bpe_codes,
self.bpe_codes_reverse,
self.vocab,
self.separator,
self.version,
self.glossaries)]
for item in new_word[:-1]:
output.append(item + self.separator)
output.append(new_word[-1])
return ' '.join(output)
def _isolate_glossaries(self, word):
word_segments = [word]
for gloss in self.glossaries:
word_segments = [out_segments for segment in word_segments
for out_segments in isolate_glossary(segment, gloss)]
return word_segments
def create_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="learn BPE-based word segmentation")
parser.add_argument(
'--input', '-i', type=argparse.FileType('r'), default=sys.stdin,
metavar='PATH',
help="Input file (default: standard input).")
parser.add_argument(
'--codes', '-c', type=argparse.FileType('r'), metavar='PATH',
required=True,
help="File with BPE codes (created by learn_bpe.py).")
parser.add_argument(
'--output', '-o', type=argparse.FileType('w'), default=sys.stdout,
metavar='PATH',
help="Output file (default: standard output)")
parser.add_argument(
'--separator', '-s', type=str, default='@@', metavar='STR',
help="Separator between non-final subword units (default: '%(default)s'))")
parser.add_argument(
'--vocabulary', type=argparse.FileType('r'), default=None,
metavar="PATH",
help="Vocabulary file (built with get_vocab.py). If provided, this script reverts any merge operations that produce an OOV.")
parser.add_argument(
'--vocabulary-threshold', type=int, default=None,
metavar="INT",
help="Vocabulary threshold. If vocabulary is provided, any word with frequency < threshold will be treated as OOV")
parser.add_argument(
'--glossaries', type=str, nargs='+', default=None,
metavar="STR",
help="Glossaries. The strings provided in glossaries will not be affected"+
"by the BPE (i.e. they will neither be broken into subwords, nor concatenated with other subwords")
return parser
def get_pairs(word):
"""Return set of symbol pairs in a word.
word is represented as tuple of symbols (symbols being variable-length strings)
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
def encode(orig, bpe_codes, bpe_codes_reverse, vocab, separator, version, glossaries=None, cache={}):
"""Encode word based on list of BPE merge operations, which are applied consecutively
"""
if orig in cache:
return cache[orig]
if orig in glossaries:
cache[orig] = (orig,)
return (orig,)
if version == (0, 1):
word = tuple(orig) + ('</w>',)
elif version == (0, 2): # more consistent handling of word-final segments
word = tuple(orig[:-1]) + ( orig[-1] + '</w>',)
else:
raise NotImplementedError
pairs = get_pairs(word)
if not pairs:
return orig
while True:
bigram = min(pairs, key = lambda pair: bpe_codes.get(pair, float('inf')))
if bigram not in bpe_codes:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
new_word.extend(word[i:j])
i = j
except:
new_word.extend(word[i:])
break
if word[i] == first and i < len(word)-1 and word[i+1] == second:
new_word.append(first+second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
# don't print end-of-word symbols
if word[-1] == '</w>':
word = word[:-1]
elif word[-1].endswith('</w>'):
word = word[:-1] + (word[-1].replace('</w>',''),)
if vocab:
word = check_vocab_and_split(word, bpe_codes_reverse, vocab, separator)
cache[orig] = word
return word
def recursive_split(segment, bpe_codes, vocab, separator, final=False):
"""Recursively split segment into smaller units (by reversing BPE merges)
until all units are either in-vocabulary, or cannot be split futher."""
try:
if final:
left, right = bpe_codes[segment + '</w>']
right = right[:-4]
else:
left, right = bpe_codes[segment]
except:
#sys.stderr.write('cannot split {0} further.\n'.format(segment))
yield segment
return
if left + separator in vocab:
yield left
else:
for item in recursive_split(left, bpe_codes, vocab, separator, False):
yield item
if (final and right in vocab) or (not final and right + separator in vocab):
yield right
else:
for item in recursive_split(right, bpe_codes, vocab, separator, final):
yield item
def check_vocab_and_split(orig, bpe_codes, vocab, separator):
"""Check for each segment in word if it is in-vocabulary,
and segment OOV segments into smaller units by reversing the BPE merge operations"""
out = []
for segment in orig[:-1]:
if segment + separator in vocab:
out.append(segment)
else:
#sys.stderr.write('OOV: {0}\n'.format(segment))
for item in recursive_split(segment, bpe_codes, vocab, separator, False):
out.append(item)
segment = orig[-1]
if segment in vocab:
out.append(segment)
else:
#sys.stderr.write('OOV: {0}\n'.format(segment))
for item in recursive_split(segment, bpe_codes, vocab, separator, True):
out.append(item)
return out
def read_vocabulary(vocab_file, threshold):
"""read vocabulary file produced by get_vocab.py, and filter according to frequency threshold.
"""
vocabulary = set()
for line in vocab_file:
word, freq = line.split()
freq = int(freq)
if threshold == None or freq >= threshold:
vocabulary.add(word)
return vocabulary
def isolate_glossary(word, glossary):
"""
Isolate a glossary present inside a word.
Returns a list of subwords. In which all 'glossary' glossaries are isolated
For example, if 'USA' is the glossary and '1934USABUSA' the word, the return value is:
['1934', 'USA', 'B', 'USA']
"""
if word == glossary or glossary not in word:
return [word]
else:
splits = word.split(glossary)
segments = [segment.strip() for split in splits[:-1] for segment in [split, glossary] if segment != '']
return segments + [splits[-1].strip()] if splits[-1] != '' else segments
if __name__ == '__main__':
# python 2/3 compatibility
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer)
parser = create_parser()
args = parser.parse_args()
# read/write files as UTF-8
args.codes = codecs.open(args.codes.name, encoding='utf-8')
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
if args.vocabulary:
args.vocabulary = codecs.open(args.vocabulary.name, encoding='utf-8')
if args.vocabulary:
vocabulary = read_vocabulary(args.vocabulary, args.vocabulary_threshold)
else:
vocabulary = None
bpe = BPE(args.codes, args.separator, vocabulary, args.glossaries)
for line in args.input:
args.output.write(bpe.segment(line).strip())
args.output.write('\n')
|
import math
from itertools import chain
from collections import deque
class Node(object):
""" A Node in a kd-tree
A tree is represented by its root node, and every node represents
its subtree"""
def __init__(self, data=None, left=None, right=None):
self.data = data
self.left = left
self.right = right
@property
def is_leaf(self):
return (not self.data) or \
(all(not bool(c) for c, p in self.children))
def preorder(self):
if not self:
return
yield self
if self.left:
for x in self.left.preorder():
yield x
if self.right:
for x in self.right.preorder():
yield x
def inorder(self):
if not self:
return
if self.left:
for x in self.left.inorder():
yield x
yield self
if self.right:
for x in self.right.inorder():
yield x
def postorder(self):
if not self:
return
if self.left:
for x in self.left.postorder():
yield x
if self.right:
for x in self.right.postorder():
yield x
yield self
@property
def children(self):
"""
Returns an iterator for the non-empty children of the Node
>>> len(list(create().children()))
0
>>> len(list(create([ (1, 2) ]).children()))
0
>>> len(list(create([ (2, 2), (2, 1), (2, 3) ]).children()))
2
"""
if self.left and self.left.data is not None:
yield self.left, 0
if self.right and self.right.data is not None:
yield self.right, 1
def set_child(self, index, child):
if index == 0:
self.left = child
else:
self.right = child
def height(self):
"""
Returns height of the (sub)tree, without considering
empty leaf-nodes
>>> create().height()
0
>>> create([ (1, 2) ]).height()
1
>>> create([ (1, 2), (2, 3) ]).height()
2
"""
min_height = int(bool(self))
return max([min_height] + [c.height()+1 for c, p in self.children])
def get_child_pos(self, child):
for c, pos in self.children:
if child == c:
return pos
def __repr__(self):
return '<%(cls)s - %(data)s>' % \
dict(cls=self.__class__.__name__, data=repr(self.data))
def __nonzero__(self):
return self.data is not None
def __eq__(self, other):
if isinstance(other, tuple):
return self.data == other
else:
return self.data == other.data
def require_axis(f):
""" Check if the object of the function has axis and sel_axis members """
def _wrapper(self, *args, **kwargs):
if None in (self.axis, self.sel_axis):
raise ValueError('%(func_name) requires the node %(node)s '
'to have an axis and a sel_axis function' %
dict(func_name=f.func_name, node=repr(self)))
return f(self, *args, **kwargs)
return _wrapper
class KDNode(Node):
def __init__(self, data=None, left=None, right=None, axis=None,
sel_axis=None):
""" Creates a new node for a kd-tree
If the node will be used within a tree, the axis and the sel_axis
function should be supplied.
sel_axis(axis) is used when creating subnodes of the current node. It
receives the axis of the parent node and returns the axis of the child
node. """
super(KDNode, self).__init__(data, left, right)
self.axis = axis
self.sel_axis = sel_axis
@require_axis
def add(self, point):
"""
Adds a point to the current node or recursively
descends to one of its children.
Users should call add() only to the topmost tree.
"""
if self.data is None:
self.data = point
return
dim = check_dimensionality([self.data, point])
if point[self.axis] < self.data[self.axis]:
if self.left is None:
self.left = self.create_subnode(point)
else:
self.left.add(point)
else:
if self.right is None:
self.right = self.create_subnode(point)
else:
self.right.add(point)
@require_axis
def create_subnode(self, data):
return self.__class__(data,
axis=self.sel_axis(self.axis),
sel_axis=self.sel_axis)
@require_axis
def find_replacement(self):
if self.right:
child, parent = self.right.extreme_child(min, self.axis)
else:
child, parent = self.left.extreme_child(max, self.axis)
return (child, parent if parent is not None else self)
@require_axis
def remove(self, point):
""" Removes the node with the given point from the tree
Returns the new root node of the (sub)tree """
if not self:
return
if self.data == point:
if self.is_leaf:
self.data = None
return self
else:
root, max_p = self.find_replacement()
pos = max_p.get_child_pos(root)
# self and root swap positions
tmp_l, tmp_r = self.left, self.right
self.left, self.right = root.left, root.right
root.left, root.right = tmp_l if tmp_l is not root else self, tmp_r if tmp_r is not root else self
if max_p is not self:
max_p.set_child(pos, self)
new_depth = max_p.height()
max_p.remove(self.data)
else:
root.remove(self.data)
return root
if self.left and self.left.data == point:
if self.left.is_leaf:
self.left = None
else:
self.left = self.left.remove(point)
elif self.right and self.right.data == point:
if self.right.is_leaf:
self.right = None
else:
self.right = self.right.remove(point)
if point[self.axis] <= self.data[self.axis]:
if self.left:
self.left = self.left.remove(point)
if point[self.axis] >= self.data[self.axis]:
if self.right:
self.right = self.right.remove(point)
return self
def rebalance(self):
"""
Returns the (possibly new) root of the rebalanced tree
"""
return create(list([x.data for x in self.inorder()]))
def axis_dist(self, point, axis):
"""
Squared distance at the given axis between
the current Node and the given point
"""
import math
return math.pow(self.data[axis] - point[axis], 2)
def dist(self, point):
"""
Squared distance between the current Node
and the given point
"""
r = range(len(self.data))
return sum([self.axis_dist(point, i) for i in r])
@require_axis
def search_nn(self, point, best=None):
"""
Search the nearest neighbor of the given point
"""
if best is None:
best = self
# consider the current node
if self.dist(point) < best.dist(point):
best = self
# sort the children, nearer one first
children = sorted(self.children, key=lambda (c, p): c.dist(point))
for child, p in children:
# check if node needs to be recursed
if self.axis_dist(point, self.axis) < best.dist(point):
best = child.search_nn(point, best)
return best
@require_axis
def is_valid(self):
if not self:
return True
if self.left and self.data[self.axis] < self.left.data[self.axis]:
return False
if self.right and self.data[self.axis] > self.right.data[self.axis]:
return False
return all(c.is_valid() for c, _ in self.children) or self.is_leaf
def extreme_child(self, sel_func, axis):
""" Returns a child of the subtree and its parent
The child is selected by sel_func which is either min or max
(or a different function with similar semantics). """
max_key = lambda (child, parent): child.data[axis]
# we don't know our parent, so we include None
me = [(self, None)] if self else []
child_max = [c.extreme_child(sel_func, axis) for c, _ in self.children]
# insert self for unknown parents
child_max = [(c, p if p is not None else self) for c, p in child_max]
candidates = me + child_max
if not candidates:
return None, None
return sel_func(candidates, key=max_key)
def create(point_list=[], dimensions=None, axis=0, sel_axis=None):
""" Creates a kd-tree from a list of points
All points in the list must be of the same dimensionality.
If no point_list is given, an empty tree is created. The number of
dimensions has to be given instead.
If both a point_list and dimensions are given, the numbers must agree.
Axis is the axis on which the root-node should split.
sel_axis(axis) is used when creating subnodes of a node. It receives the
axis of the parent node and returns the axis of the child node. """
if not point_list and not dimensions:
raise ValueError('either point_list or dimensions must be provided')
elif point_list:
dim = check_dimensionality(point_list)
dimensions = dim
else:
dim = dimensions
if dim != dimensions:
raise ValueError('dimensions parameter must match actual dimension of points')
# by default cycle through the axis
sel_axis = sel_axis or (lambda prev_axis: (prev_axis+1) % dim)
if not point_list:
return KDNode(sel_axis=sel_axis, axis=axis)
# Sort point list and choose median as pivot element
point_list.sort(key=lambda point: point[axis])
median = len(point_list) // 2
loc = point_list[median]
left = create(point_list[:median], dim, sel_axis(axis))
right = create(point_list[median + 1:], dim, sel_axis(axis))
return KDNode(loc, left, right, axis=axis, sel_axis=sel_axis)
def check_dimensionality(point_list):
dimension = len(point_list[0])
for p in point_list[1:]:
if len(p) != dimension:
raise ValueError('All Points in the point_list must have the same dimensionality')
return dimension
def level_order(tree, include_all=False):
""" Returns an iterator over the tree in level-order
If include_all is set to True, empty parts of the tree are filled
with dummy entries and the iterator becomes infinite. """
q = deque()
q.append(tree)
while q:
node = q.popleft()
yield node
if include_all or node.left:
q.append(node.left or node.__class__())
if include_all or node.right:
q.append(node.right or node.__class__())
def visualize(tree, max_level=100, node_width=10, left_padding=5):
""" Prints the tree to stdout """
height = min(max_level, tree.height()-1)
max_width = pow(2, height)
per_level = 1
in_level = 0
level = 0
for node in level_order(tree, include_all=True):
if in_level == 0:
print
print
print ' '*left_padding,
width = int(max_width*node_width/per_level)
node_str = (str(node.data) if node else '').center(width)
print node_str,
in_level += 1
if in_level == per_level:
in_level = 0
per_level *= 2
level += 1
if level > height:
break
print
print
add/fix doctests
import math
from itertools import chain
from collections import deque
class Node(object):
""" A Node in a kd-tree
A tree is represented by its root node, and every node represents
its subtree"""
def __init__(self, data=None, left=None, right=None):
self.data = data
self.left = left
self.right = right
@property
def is_leaf(self):
""" Returns True if a Node has no subnodes
>>> Node().is_leaf
True
>>> Node( 1, left=Node(2) ).is_leaf
False
"""
return (not self.data) or \
(all(not bool(c) for c, p in self.children))
def preorder(self):
if not self:
return
yield self
if self.left:
for x in self.left.preorder():
yield x
if self.right:
for x in self.right.preorder():
yield x
def inorder(self):
if not self:
return
if self.left:
for x in self.left.inorder():
yield x
yield self
if self.right:
for x in self.right.inorder():
yield x
def postorder(self):
if not self:
return
if self.left:
for x in self.left.postorder():
yield x
if self.right:
for x in self.right.postorder():
yield x
yield self
@property
def children(self):
"""
Returns an iterator for the non-empty children of the Node
>>> len(list(create(dimensions=2).children))
0
>>> len(list(create([ (1, 2) ]).children))
0
>>> len(list(create([ (2, 2), (2, 1), (2, 3) ]).children))
2
"""
if self.left and self.left.data is not None:
yield self.left, 0
if self.right and self.right.data is not None:
yield self.right, 1
def set_child(self, index, child):
if index == 0:
self.left = child
else:
self.right = child
def height(self):
"""
Returns height of the (sub)tree, without considering
empty leaf-nodes
>>> create(dimensions=2).height()
0
>>> create([ (1, 2) ]).height()
1
>>> create([ (1, 2), (2, 3) ]).height()
2
"""
min_height = int(bool(self))
return max([min_height] + [c.height()+1 for c, p in self.children])
def get_child_pos(self, child):
for c, pos in self.children:
if child == c:
return pos
def __repr__(self):
return '<%(cls)s - %(data)s>' % \
dict(cls=self.__class__.__name__, data=repr(self.data))
def __nonzero__(self):
return self.data is not None
def __eq__(self, other):
if isinstance(other, tuple):
return self.data == other
else:
return self.data == other.data
def require_axis(f):
""" Check if the object of the function has axis and sel_axis members """
def _wrapper(self, *args, **kwargs):
if None in (self.axis, self.sel_axis):
raise ValueError('%(func_name) requires the node %(node)s '
'to have an axis and a sel_axis function' %
dict(func_name=f.func_name, node=repr(self)))
return f(self, *args, **kwargs)
return _wrapper
class KDNode(Node):
def __init__(self, data=None, left=None, right=None, axis=None,
sel_axis=None):
""" Creates a new node for a kd-tree
If the node will be used within a tree, the axis and the sel_axis
function should be supplied.
sel_axis(axis) is used when creating subnodes of the current node. It
receives the axis of the parent node and returns the axis of the child
node. """
super(KDNode, self).__init__(data, left, right)
self.axis = axis
self.sel_axis = sel_axis
@require_axis
def add(self, point):
"""
Adds a point to the current node or recursively
descends to one of its children.
Users should call add() only to the topmost tree.
"""
if self.data is None:
self.data = point
return
dim = check_dimensionality([self.data, point])
if point[self.axis] < self.data[self.axis]:
if self.left is None:
self.left = self.create_subnode(point)
else:
self.left.add(point)
else:
if self.right is None:
self.right = self.create_subnode(point)
else:
self.right.add(point)
@require_axis
def create_subnode(self, data):
return self.__class__(data,
axis=self.sel_axis(self.axis),
sel_axis=self.sel_axis)
@require_axis
def find_replacement(self):
if self.right:
child, parent = self.right.extreme_child(min, self.axis)
else:
child, parent = self.left.extreme_child(max, self.axis)
return (child, parent if parent is not None else self)
@require_axis
def remove(self, point):
""" Removes the node with the given point from the tree
Returns the new root node of the (sub)tree """
if not self:
return
if self.data == point:
if self.is_leaf:
self.data = None
return self
else:
root, max_p = self.find_replacement()
pos = max_p.get_child_pos(root)
# self and root swap positions
tmp_l, tmp_r = self.left, self.right
self.left, self.right = root.left, root.right
root.left, root.right = tmp_l if tmp_l is not root else self, tmp_r if tmp_r is not root else self
if max_p is not self:
max_p.set_child(pos, self)
new_depth = max_p.height()
max_p.remove(self.data)
else:
root.remove(self.data)
return root
if self.left and self.left.data == point:
if self.left.is_leaf:
self.left = None
else:
self.left = self.left.remove(point)
elif self.right and self.right.data == point:
if self.right.is_leaf:
self.right = None
else:
self.right = self.right.remove(point)
if point[self.axis] <= self.data[self.axis]:
if self.left:
self.left = self.left.remove(point)
if point[self.axis] >= self.data[self.axis]:
if self.right:
self.right = self.right.remove(point)
return self
def rebalance(self):
"""
Returns the (possibly new) root of the rebalanced tree
"""
return create(list([x.data for x in self.inorder()]))
def axis_dist(self, point, axis):
"""
Squared distance at the given axis between
the current Node and the given point
"""
import math
return math.pow(self.data[axis] - point[axis], 2)
def dist(self, point):
"""
Squared distance between the current Node
and the given point
"""
r = range(len(self.data))
return sum([self.axis_dist(point, i) for i in r])
@require_axis
def search_nn(self, point, best=None):
"""
Search the nearest neighbor of the given point
"""
if best is None:
best = self
# consider the current node
if self.dist(point) < best.dist(point):
best = self
# sort the children, nearer one first
children = sorted(self.children, key=lambda (c, p): c.dist(point))
for child, p in children:
# check if node needs to be recursed
if self.axis_dist(point, self.axis) < best.dist(point):
best = child.search_nn(point, best)
return best
@require_axis
def is_valid(self):
if not self:
return True
if self.left and self.data[self.axis] < self.left.data[self.axis]:
return False
if self.right and self.data[self.axis] > self.right.data[self.axis]:
return False
return all(c.is_valid() for c, _ in self.children) or self.is_leaf
def extreme_child(self, sel_func, axis):
""" Returns a child of the subtree and its parent
The child is selected by sel_func which is either min or max
(or a different function with similar semantics). """
max_key = lambda (child, parent): child.data[axis]
# we don't know our parent, so we include None
me = [(self, None)] if self else []
child_max = [c.extreme_child(sel_func, axis) for c, _ in self.children]
# insert self for unknown parents
child_max = [(c, p if p is not None else self) for c, p in child_max]
candidates = me + child_max
if not candidates:
return None, None
return sel_func(candidates, key=max_key)
def create(point_list=[], dimensions=None, axis=0, sel_axis=None):
""" Creates a kd-tree from a list of points
All points in the list must be of the same dimensionality.
If no point_list is given, an empty tree is created. The number of
dimensions has to be given instead.
If both a point_list and dimensions are given, the numbers must agree.
Axis is the axis on which the root-node should split.
sel_axis(axis) is used when creating subnodes of a node. It receives the
axis of the parent node and returns the axis of the child node. """
if not point_list and not dimensions:
raise ValueError('either point_list or dimensions must be provided')
elif point_list:
dim = check_dimensionality(point_list)
dimensions = dim
else:
dim = dimensions
if dim != dimensions:
raise ValueError('dimensions parameter must match actual dimension of points')
# by default cycle through the axis
sel_axis = sel_axis or (lambda prev_axis: (prev_axis+1) % dim)
if not point_list:
return KDNode(sel_axis=sel_axis, axis=axis)
# Sort point list and choose median as pivot element
point_list.sort(key=lambda point: point[axis])
median = len(point_list) // 2
loc = point_list[median]
left = create(point_list[:median], dim, sel_axis(axis))
right = create(point_list[median + 1:], dim, sel_axis(axis))
return KDNode(loc, left, right, axis=axis, sel_axis=sel_axis)
def check_dimensionality(point_list):
dimension = len(point_list[0])
for p in point_list[1:]:
if len(p) != dimension:
raise ValueError('All Points in the point_list must have the same dimensionality')
return dimension
def level_order(tree, include_all=False):
""" Returns an iterator over the tree in level-order
If include_all is set to True, empty parts of the tree are filled
with dummy entries and the iterator becomes infinite. """
q = deque()
q.append(tree)
while q:
node = q.popleft()
yield node
if include_all or node.left:
q.append(node.left or node.__class__())
if include_all or node.right:
q.append(node.right or node.__class__())
def visualize(tree, max_level=100, node_width=10, left_padding=5):
""" Prints the tree to stdout """
height = min(max_level, tree.height()-1)
max_width = pow(2, height)
per_level = 1
in_level = 0
level = 0
for node in level_order(tree, include_all=True):
if in_level == 0:
print
print
print ' '*left_padding,
width = int(max_width*node_width/per_level)
node_str = (str(node.data) if node else '').center(width)
print node_str,
in_level += 1
if in_level == per_level:
in_level = 0
per_level *= 2
level += 1
if level > height:
break
print
print
|
# -*- coding: utf-8 -*-
"""
rnn
model built atop tensorflow for constructing Recurrent Neural Network (RNN) and
Long Short-Term Memory (LSTM) architecture
"""
import tensorflow as tf
class RecurrentNeuralNet:
def __init__(self, cell_type, num_nodes):
self.type = cell_type
self.nodes = num_nodes
@property
def x(self):
"""feature set"""
return tf.reshape(tf.placeholder(
dtype=tf.float32,
shape=self.flattened_shape,
name='feature'
),
# transform 3D shape to 4D
(-1, ) + self.shape
)
@property
def y_(self):
"""true label, in one hot format"""
return tf.placeholder(dtype=tf.float32, shape=[None, 8], name='label')
@staticmethod
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
@staticmethod
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def cell(self):
"""vanila RNN cell or LSTM cell (Hochreiter & Schmidhuber 1997)
with forget, input, and output gates. """
if self.type == 'RNN':
return tf.contrib.rnn.BasicRNNCell(
num_units=self.nodes,
activation=tf.tanh)
elif self.type == 'LSTM':
return tf.contrib.rnn.LSTMCell(num_units=self.nodes,
activation=tf.tanh,
use_peepholes=False)
def _unrolled_rnn(self):
"""dynamic_rnn padd sequential input of different sizes."""
outputs, state = tf.nn.dynamic_rnn(cell=self.cell,
inputs=self.x,
dtype=tf.float32)
return outputs, state
rnn working in progress
# -*- coding: utf-8 -*-
"""
rnn
model built atop tensorflow for constructing Recurrent Neural Network (RNN) and
Long Short-Term Memory (LSTM) architecture
"""
import tensorflow as tf
class RecurrentNeuralNet:
def __init__(self, state_size, num_classes):
self._state_size = state_size
self._n_class = num_classes
@property
def x(self):
"""feature set"""
return tf.placeholder(
dtype=tf.float32,
shape=(-1, ) + self.shape,
name='feature')
@property
def y_(self):
"""true label, in one hot format"""
return tf.placeholder(dtype=tf.float32,
shape=(None, self._n_class),
name='multi-label')
@staticmethod
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
@staticmethod
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def lstm_cell(self):
"""LSTM implementation by Hochreiter & Schmidhuber (1997)."""
return tf.contrib.rnn.LSTMCell(num_units=self.nodes,
activation=tf.tanh,
use_peepholes=False)
def lstm(self):
"""dynamic_rnn pad sequential input of different sizes."""
outputs, state = tf.nn.dynamic_rnn(cell=self.lstm_cell,
inputs=self.x,
dtype=tf.float32)
return outputs, state
|
"""Function to keep a maxmind database file up to date"""
import hashlib
import os
import shutil
import sys
import tarfile
import requests
__version__ = '0.1.0'
__url__ = 'https://github.com/yola/maxmind-updater'
def _hash_file(filename):
if not os.path.exists(filename):
return ''
block_size = 65536
hasher = hashlib.md5()
with open(filename, 'rb') as f:
buf = f.read(block_size)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(block_size)
return hasher.hexdigest()
def update_db(db_path, license_key, edition_id):
db_dir_path = os.path.dirname(db_path)
db_archive_path = '%s.tar.gz' % db_path
def maxmind_download(suffix, **kwargs):
return requests.get('https://download.maxmind.com/app/geoip_download',
params={'license_key': license_key,
'edition_id': edition_id,
'suffix': suffix,
},
**kwargs)
expected_md5 = maxmind_download('tar.gz.md5').content
curr_md5 = _hash_file(db_archive_path)
if expected_md5 == curr_md5 and os.path.exists(db_path):
return
with open(db_archive_path, 'wb') as local_zip:
for chunk in maxmind_download('tar.gz', stream=True
).iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
local_zip.write(chunk)
with tarfile.open(db_archive_path) as tar_file:
# We only want the mmdb file. Maxmind kindly includes things
# we don't want.
extract_members = [member for member in tar_file.getmembers()
if member.name.endswith('.mmdb')]
assert len(extract_members) == 1
tar_file.extractall(path=db_dir_path, members=extract_members)
# extractall keeps the subfolder structure. Account for this by
# appending the path to the db_dir_path where it was extracted.
new_db = os.path.join(db_dir_path, extract_members[0].path)
try:
pass
# TODO
# test_ip('8.8.8.8', new_db)
# test_ip('2001:420::', new_db)
except Exception:
sys.stderr.write('Retrieved invalid GeoIP database - '
'check MaxMind account details.\n')
raise
if not os.path.exists(os.path.dirname(db_path)):
os.makedirs(os.path.dirname(db_path))
shutil.move(new_db, db_path)
os.rmdir(os.path.dirname(new_db))
Extract additional usages of db_dir_path
"""Function to keep a maxmind database file up to date"""
import hashlib
import os
import shutil
import sys
import tarfile
import requests
__version__ = '0.1.0'
__url__ = 'https://github.com/yola/maxmind-updater'
def _hash_file(filename):
if not os.path.exists(filename):
return ''
block_size = 65536
hasher = hashlib.md5()
with open(filename, 'rb') as f:
buf = f.read(block_size)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(block_size)
return hasher.hexdigest()
def update_db(db_path, license_key, edition_id):
db_dir_path = os.path.dirname(db_path)
db_archive_path = '%s.tar.gz' % db_path
def maxmind_download(suffix, **kwargs):
return requests.get('https://download.maxmind.com/app/geoip_download',
params={'license_key': license_key,
'edition_id': edition_id,
'suffix': suffix,
},
**kwargs)
expected_md5 = maxmind_download('tar.gz.md5').content
curr_md5 = _hash_file(db_archive_path)
if expected_md5 == curr_md5 and os.path.exists(db_path):
return
with open(db_archive_path, 'wb') as local_zip:
for chunk in maxmind_download('tar.gz', stream=True
).iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
local_zip.write(chunk)
with tarfile.open(db_archive_path) as tar_file:
# We only want the mmdb file. Maxmind kindly includes things
# we don't want.
extract_members = [member for member in tar_file.getmembers()
if member.name.endswith('.mmdb')]
assert len(extract_members) == 1
tar_file.extractall(path=db_dir_path, members=extract_members)
# extractall keeps the subfolder structure. Account for this by
# appending the path to the db_dir_path where it was extracted.
new_db = os.path.join(db_dir_path, extract_members[0].path)
try:
pass
# TODO
# test_ip('8.8.8.8', new_db)
# test_ip('2001:420::', new_db)
except Exception:
sys.stderr.write('Retrieved invalid GeoIP database - '
'check MaxMind account details.\n')
raise
if not os.path.exists(db_dir_path):
os.makedirs(db_dir_path)
shutil.move(new_db, db_path)
os.rmdir(os.path.dirname(new_db))
|
from abc import ABCMeta, abstractmethod, abstractproperty
class IBroker(object):
__metaclass__ = ABCMeta
@abstractmethod
def publish(self, exchange_name, routing_key, message):
"""
Publish an unencoded message to the amqp server and submit timing info.
Returns the deferred from IBroker.raw_publish
"""
# XXX This should be removed. It's only used by place observers.
@abstractmethod
def raw_publish(self, exchange_name, routing_key, encoded_message):
"""
Publish an encoded message to the amqp server.
Returns a deferred that will callback when amqp server confirms and
will errback when amqp server rejects published message. Both are
called with the publish_tag
"""
@abstractmethod
def register_handler(self, handler):
"""
Register a handler to accept messages on a queue. When a connection is
made the listener will be set up for you and will deliver message_class
objects to the handler's __call__ function.
"""
class IHandler(object):
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, message):
"""
Returns a deferred that will callback when the message has been
completely handled, or will errback when the message cannot be
handled.
"""
@abstractproperty
def message_class(self):
pass
class IOrchestrator(object):
__metaclass__ = ABCMeta
@abstractmethod
def create_token(self, net_key, place_idx,
color, color_group_index, data=None):
pass
@abstractmethod
def notify_place(self, net_key, place_idx):
pass
@abstractmethod
def notify_transition(self, net_key, transition_idx, place_idx):
pass
@abstractmethod
def place_entry_observed(self, packet):
pass
class IServiceLocator(object):
__metaclass__ = ABCMeta
@abstractmethod
def get(self, name, defaut=None):
pass
@abstractmethod
def __getitem__(self, name):
pass
class ISettings(object):
__metaclass__ = ABCMeta
@abstractmethod
def get(self, path, defaut=None):
pass
@abstractmethod
def __getitem__(self, path):
pass
class IShellCommand(object):
__metaclass__ = ABCMeta
@abstractmethod
def submit(self, command_line, net_key=None, response_places=None,
**executor_options):
pass
class IShellCommandExecutor(object):
__metaclass__ = ABCMeta
@abstractmethod
def execute(self, command_line, **kwargs):
pass
@abstractmethod
def __call__(self, command_line, group_id=None, user_id=None,
environment={}, **kwargs):
pass
class IStorage(object):
__metaclass__ = ABCMeta
update orchestrator interface
from abc import ABCMeta, abstractmethod, abstractproperty
class IBroker(object):
__metaclass__ = ABCMeta
@abstractmethod
def publish(self, exchange_name, routing_key, message):
"""
Publish an unencoded message to the amqp server and submit timing info.
Returns the deferred from IBroker.raw_publish
"""
# XXX This should be removed. It's only used by place observers.
@abstractmethod
def raw_publish(self, exchange_name, routing_key, encoded_message):
"""
Publish an encoded message to the amqp server.
Returns a deferred that will callback when amqp server confirms and
will errback when amqp server rejects published message. Both are
called with the publish_tag
"""
@abstractmethod
def register_handler(self, handler):
"""
Register a handler to accept messages on a queue. When a connection is
made the listener will be set up for you and will deliver message_class
objects to the handler's __call__ function.
"""
class IHandler(object):
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, message):
"""
Returns a deferred that will callback when the message has been
completely handled, or will errback when the message cannot be
handled.
"""
@abstractproperty
def message_class(self):
pass
class IOrchestrator(object):
__metaclass__ = ABCMeta
@abstractmethod
def create_token(self, net_key, place_idx,
color, color_group_index, data=None):
pass
@abstractmethod
def notify_place(self, net_key, place_idx, color):
pass
@abstractmethod
def notify_transition(self, net_key, transition_idx, place_idx, token_idx):
pass
@abstractmethod
def place_entry_observed(self, packet):
pass
class IServiceLocator(object):
__metaclass__ = ABCMeta
@abstractmethod
def get(self, name, defaut=None):
pass
@abstractmethod
def __getitem__(self, name):
pass
class ISettings(object):
__metaclass__ = ABCMeta
@abstractmethod
def get(self, path, defaut=None):
pass
@abstractmethod
def __getitem__(self, path):
pass
class IShellCommand(object):
__metaclass__ = ABCMeta
@abstractmethod
def submit(self, command_line, net_key=None, response_places=None,
**executor_options):
pass
class IShellCommandExecutor(object):
__metaclass__ = ABCMeta
@abstractmethod
def execute(self, command_line, **kwargs):
pass
@abstractmethod
def __call__(self, command_line, group_id=None, user_id=None,
environment={}, **kwargs):
pass
class IStorage(object):
__metaclass__ = ABCMeta
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# copied from harvest_template.py which is due to:
# (C) 2013 Multichill, Amir
# (C) 2013 Pywikipediabot team
#
# Distributed under the terms of MIT License.
#
import re
import json
import pywikibot
from pywikibot import pagegenerators as pg
from pywikibot import textlib
import mwparserfromhell
import xisbn
from collections import defaultdict
from pprint import pprint
import copy
en_wikipedia = pywikibot.Site('en', 'wikipedia')
wikidata = en_wikipedia.data_repository()
if not wikidata.logged_in(): wikidata.login()
if not en_wikipedia.logged_in(): en_wikipedia.login()
source = pywikibot.Claim(wikidata, 'p143')
source.setTarget(pywikibot.ItemPage(wikidata,'q328'))
properties = {'isbn':'P212',
'ocn':'P243',
'illustrator': 'P110',
'author': 'P50',
'lang': 'P364',
'genre': 'P136',
'translator': 'P655'}
wplangs = {'en':'Q328',
'de':'Q48183',
'fr':'Q8447',
'it':'Q11920',
'imported_from': 'P143'}
def logVIAFstats(remoteClaims):
for remoteClaimList in remoteClaims.itervalues():
for remoteClaim in remoteClaimList:
if remoteClaim.id == 'P214':
print 'VIAF Author', str(remoteClaim.target)
cases['hadVIAF'] += 1
class bookdata:
def __init__(self, wditem):
self.wditem = wditem
self.isbns = list()
self.xisbns = list()
self.ocns = list()
self.xocns = list()
self.deweys = list()
self.authors = list()
self.illustrators = list()
self.translators = list()
self.langs = list()
self.publishdate = list()
self.genres = list()
def dictify(self):
bookdict = dict()
for k, v in self.__dict__.iteritems():
bookdict[k] = str(v)
bookdict['wditem'] = self.wditem.getID()
return bookdict
def incorp_xdata(book):
if not book.ocns:
if book.xocns:
book.xocns.sort()
book.ocns.append(book.xocns[0])
cases['got_ocn_from_xisbn'] += 1
def checkISBN13(book):
def ISBN13(isbn):
justnums = filter(lambda x: x in '1234567890Xx', isbn)
if len(justnums) == 13:
return True
else:
return False
isbn13s = list()
for isbn in book.isbns:
if ISBN13(isbn):
isbn13s.append(isbn)
#no isbn13s
if not isbn13s:
if book.xisbns:
book.xisbns.sort()
book.isbns.append(book.xisbns[0])
print 'using an xisbn here'
cases['put_in_a_isbn13'] += 1
def processRE(param, rx):
cleaned_text = textlib.removeDisabledParts(str(param.value.strip()))
relist = re.findall(rx, cleaned_text)
return relist
def processLinks(param):
itempagelist = list()
for mwnode in param.value.filter():
if type(mwnode) == mwparserfromhell.nodes.wikilink.Wikilink:
try:
if pywikibot.Page(en_wikipedia, mwnode.title).isRedirectPage():
redirpage = pywikibot.Page(en_wikipedia, mwnode.title).getRedirectTarget()
pagetitle = redirpage.title()
else:
pagetitle = mwnode.title
#hopefully here you can see im trying to add to the returnlist a Wikdata ItemPage associated with a mwparerfromhell wikilink
itempagelist.append(pywikibot.ItemPage.fromPage(pywikibot.Page(en_wikipedia, pagetitle)))
except:
raise
return itempagelist
def processISBNs(param, book):
isbns = processRE(param=param, rx="[0-9][--–\ 0-9]{9,16}[xX]?")
xisbns = set()
xocns = set()
for isbn in isbns:
try:
metadata = xisbn.xisbn(isbn, metadata=True)
xisbns.update(metadata['isbn'])
xocns.update(metadata['oclcnum'])
except xisbn.isbnError:
pywikibot.output('xisbn error')
book.isbns.extend(isbns)
book.xisbns.extend(list(xisbns))
book.xocns.extend(list(xocns))
def processOCNs(param, book):
ocns = processRE(param=param, rx="\d+")
book.ocns.extend(ocns)
def processDewey(param, book):
deweys = processRE(param=param, rx="[^,]+")
book.deweys.extend(deweys)
def processAuthors(param, book):
book.authors.extend(processLinks(param))
def processIllustrators(param, book):
book.illustrators.extend(processLinks(param))
def processTranslators(param, book):
book.translators.extend(processLinks(param))
def processGenre(param, book):
book.genres.extend(processLinks(param))
def processLanguage(param, book):
book.langs.extend(processLinks(param))
def processPublished(param, book):
pass
def processPage(page):
"""
Proces a single page
"""
book = bookdata(pywikibot.ItemPage.fromPage(page))
pywikibot.output('Processing %s' % page)
pagetext = page.get()
wikicode = mwparserfromhell.parse(pagetext)
for template in wikicode.filter_templates():
if template.name.startswith(templateTitle):
for param in template.params:
if param.name.strip() == 'isbn':
processISBNs(param, book)
if param.name.strip() == 'oclc':
processOCNs(param, book)
if param.name.strip() == 'author':
processAuthors(param, book)
if param.name.strip() == 'illustrator':
processIllustrators(param, book)
if param.name.strip() == 'translator':
processTranslators(param, book)
if param.name.strip() == 'language':
processLanguage(param, book)
if param.name.strip() == 'published':
processPublished(param, book)
if param.name.strip() == 'genre':
processGenre(param, book)
if param.name.strip() == 'dewey':
processDewey(param, book)
return book
def propertiesToClaims(book, lang):
localClaims = list() #we're returning this
bookattrs = {'isbn': book.isbns,
'ocn': book.ocns,
'illustrator': book.illustrators,
'author': book.authors,
'lang': book.langs,
'genre': book.genres}
for book_k, book_v in bookattrs.iteritems():
if book_v:
for attr in book_v:
claimObj = pywikibot.Claim(site=wikidata, pid=properties[book_k])
claimObj.setTarget(attr)
localClaims.append(claimObj)
return localClaims
def compareClaims(book, sourcelang):
qid = book.wditem.getID()
pageToEdit = pywikibot.ItemPage(wikidata, qid)
page_parts = pageToEdit.get()
localClaims = propertiesToClaims(book, sourcelang)
remoteClaims = page_parts['claims']
logVIAFstats(remoteClaims)
#we'll need this for every claim
localSource = pywikibot.Claim(site=wikidata, pid=wplangs['imported_from'])
localSource.setTarget(pywikibot.ItemPage(wikidata, wplangs[sourcelang]))
for localClaim in localClaims:
'''there are three states
noMatchingClaim, so we add our claim
matchingClaimUnsourced, so we add our source
matchingClaimSurced, claim was already present and had the same source, do nothing
'''
noMatchingClaim = False
matchingClaimUnsourced = False
matchingClaimSourced = False
for remoteClaimList in remoteClaims.itervalues():
for remoteClaim in remoteClaimList:
if localClaim.id == remoteClaim.id:
#now we see if a our source is there
for remoteSourceDict in remoteClaim.getSources():
for remoteSourceList in remoteSourceDict.itervalues():
for remoteSource in remoteSourceList:
if remoteSource.id == localSource.id:
if remoteSource.getTarget() == localSource.getTarget():
matchingClaimSourced = True
if not matchingClaimSourced:
matchingClaimUnsourced = remoteClaim
if not matchingClaimUnsourced:
noMatchingClaim = True
if matchingClaimSourced:
cases[str(localClaim.id)+'present'] += 1
continue
if matchingClaimUnsourced:
matchingClaimUnsourced.addSource(localSource)
cases[str(localSource.id)+'source'] += 1
continue
if noMatchingClaim:
try:
pageToEdit.addClaim(localClaim)
localClaim.addSource(localSource)
cases[str(localClaim.id)+'claim'] += 1
except:
print 'Error:'
pprint(localClaim)
continue
templateTitle = u'Infobox book'
templatePage = pywikibot.Page(en_wikipedia, "Template:"+templateTitle)
generator = pg.ReferringPageGenerator(templatePage, followRedirects=False,
withTemplateInclusion=True,
onlyTemplateInclusion=True,
step=None, total=None, content=False)
try:
casesJSON = open('cases.JSON','r')
cases = defaultdict(int)
savedcases = json.load(casesJSON)
for k, v in savedcases.iteritems():
cases[k] = v
casesJSON.close()
except IOError:
cases = defaultdict(int)
cases["prevtouched"] = 0
try:
allbooksJSON = open('allbooks.JSON','r')
allbooks = json.load(allbooksJSON)
allbooksJSON.close()
except IOError:
allbooks = list()
def savecases():
casesJSON = open('cases.JSON', 'w')
json.dump(cases, casesJSON, indent=4)
casesJSON.close()
allbooksJSON = open('allbooks.json', 'w')
json.dump(allbooks, allbooksJSON, indent=4)
allbooksJSON.close()
def run():
touched = 0
for page in generator:
touched += 1
fake = False
if not fake:
if cases['prevtouched'] >= touched:
continue
book = processPage(page)
allbooks.append(book.dictify())
incorp_xdata(book)
checkISBN13(book)
#pprint (vars(book))
compareClaims(book, 'en')
cases['prevtouched'] = touched
savecases()
if __name__ == "__main__":
run()
added mulitlingual support and isb10->13 conversion
#!/usr/bin/python
# -*- coding: utf-8 -*-
# copied from harvest_template.py which is due to:
# (C) 2013 Multichill, Amir
# (C) 2013 Pywikipediabot team
#
# Distributed under the terms of MIT License.
#
import re
import json
import pywikibot
from pywikibot import pagegenerators as pg
from pywikibot import textlib
import mwparserfromhell
import xisbn
from collections import defaultdict
from pprint import pprint
import copy
import pyisbn
en_wikipedia = pywikibot.Site('en', 'wikipedia')
wikidata = en_wikipedia.data_repository()
if not wikidata.logged_in(): wikidata.login()
if not en_wikipedia.logged_in(): en_wikipedia.login()
source = pywikibot.Claim(wikidata, 'p143')
source.setTarget(pywikibot.ItemPage(wikidata,'q328'))
properties = {'isbn':'P212',
'ocn':'P243',
'illustrator': 'P110',
'author': 'P50',
'lang': 'P364',
'genre': 'P136',
'translator': 'P655'}
wplangs = {'en':'Q328',
'de':'Q48183',
'fr':'Q8447',
'it':'Q11920',
"es": "q8449",
"ja": "q177837",
"ru": "q206855",
"pl": "q1551807",
"sv": "q169514",
'imported_from': 'P143'}
wpsites = {'en': {'isbn': 'isbn',
'oclc': 'oclc',
'author': 'author',
'illustrator': 'illustrator',
'translator': 'translator',
'language': 'language',
'published': 'published',
'genre': 'genre',
'dewey': 'dewey'},
'it': {'isbn': None,
'oclc': None,
'author': 'autore',
'illustrator': None,
'translator': None,
'language': 'lingua',
'published': 'annoorig',
'genre': 'genere',
'dewey': None},
'fr': {'isbn': 'isbn',
'oclc': None,
'author': 'auteur',
'illustrator': 'dessinateur',
'translator': 'traducteur',
'language': 'langue',
'published': 'dateparution_orig',
'genre': 'genere',
'dewey': None},
'es': {'isbn': 'isbn',
'oclc': 'oclc',
'author': 'autor',
'illustrator': 'ilustrador',
'translator': 'traductor',
'language': 'idioma original',
'published': 'publicación original',
'genre': 'género',
'dewey': None},
'ja': {'isbn': None,
'oclc': None,
'author': 'author',
'illustrator': 'illustrator',
'translator': 'translator',
'language': 'language',
'published': 'published',
'genre': 'genre',
'dewey': None},
'pl': {'isbn': None,
'oclc': None,
'author': 'autor',
'illustrator': None,
'translator': 'tłumacz',
'language': 'język oryg wyd',
'published': 'data I wyd oryg',
'genre': 'tematyka',
'dewey': None},
'pt': {'isbn': 'isbn',
'oclc': None,
'author': 'autor',
'illustrator': 'ilustrador',
'translator': 'tradutor_br',
'language': 'idioma',
'published': 'lançamento',
'genre': 'gênero',
'dewey': None},
'sv': {'isbn': 'isbn',
'oclc': None,
'author': 'autor',
'illustrator': 'ilustrador',
'translator': 'tradutor_br',
'language': 'idioma',
'published': 'lançamento',
'genre': 'gênero',
'dewey': None},
'ru': {'isbn': 'isbni',
'oclc': None,
'author': 'Автор',
'illustrator': 'illustratör ',
'translator': 'Переводчик',
'language': 'Язык',
'published': 'Оригинал выпуска',
'genre': 'Жанр',
'dewey': None}
}
templateTitleDict = {'en': u'Infobox book',
'it': u'Libro',
'fr': u'Infobox Livre',
'es': u'Ficha de libro',
'ja': u'基礎情報 書籍',
'pl': u'Książka infobox',
'pt': u'Info/Livro',
'sv': u'Bokfakta',
'ru': u'Издание'}
templateNSDict = {'en': u'Template:',
'it': u'Template:',
'fr': u'Modèle:',
'es': u'Plantilla:',
'ja': u'Template:',
'pl': u'Szablon:',
'pt': u'Predefinição:',
'sv': u'Mall:',
'ru': u'Шаблон:'}
def makeGenerator(lang):
templateNS = templateNSDict[lang]
templateTitle = templateTitleDict[lang]
tsite = pywikibot.Site(lang, 'wikipedia')
templatePage = pywikibot.Page(tsite, templateNS+templateTitle)
generator = pg.ReferringPageGenerator(templatePage, followRedirects=False,
withTemplateInclusion=True,
onlyTemplateInclusion=True,
step=None, total=None, content=False)
return generator
def logVIAFstats(remoteClaims):
for remoteClaimList in remoteClaims.itervalues():
for remoteClaim in remoteClaimList:
if remoteClaim.id == 'P214':
print 'VIAF Author', str(remoteClaim.target)
cases['hadVIAF'] += 1
class bookdata:
def __init__(self, wditem):
self.wditem = wditem
self.isbns = list()
self.xisbns = list()
self.ocns = list()
self.xocns = list()
self.deweys = list()
self.authors = list()
self.illustrators = list()
self.translators = list()
self.langs = list()
self.publishdate = list()
self.genres = list()
def dictify(self):
bookdict = dict()
for k, v in self.__dict__.iteritems():
bookdict[k] = str(v)
bookdict['wditem'] = self.wditem.getID()
return bookdict
def incorp_xdata(book):
if not book.ocns:
if book.xocns:
book.xocns.sort()
book.ocns.append(book.xocns[0])
cases['got_ocn_from_xisbn'] += 1
def checkISBN13(book):
def ISBNsize(isbn, isnblen):
justnums = filter(lambda x: x in '1234567890Xx', isbn)
if len(justnums) == isbnlen:
return True
else:
return False
isbnlists ={13: list(), 10:list() }
for isbnlen in isbnlists.iterkeys():
for isbn in book.isbns:
if ISBNsize(isbn, isbnlen):
isbnlists[isbnlen].append(isbn)
#no isbn13s
if not isbnlists[13] and not isbnlists[10]:
if book.xisbns:
book.xisbns.sort()
book.isbns.append(book.xisbns[0])
print 'using an xisbn here'
cases['put_in_a_isbn13'] += 1
if isbnlists[10] and not isbnlists[13]:
for isbn in isbnlists[10]:
converted = pyisbn.convert(isbn)
print 'conversion', isbn, converted
book.isbns.append(converted)
def processRE(param, rx):
cleaned_text = textlib.removeDisabledParts(str(param.value.strip()))
relist = re.findall(rx, cleaned_text)
return relist
def processLinks(param, wpsitelang):
itempagelist = list()
tsite = pywikibot.Site(wpsitelang, 'wikipedia')
for mwnode in param.value.filter():
if type(mwnode) == mwparserfromhell.nodes.wikilink.Wikilink:
try:
if pywikibot.Page(tsite, mwnode.title).isRedirectPage():
redirpage = pywikibot.Page(tsite, mwnode.title).getRedirectTarget()
pagetitle = redirpage.title()
else:
pagetitle = mwnode.title
#hopefully here you can see im trying to add to the returnlist a Wikdata ItemPage associated with a mwparerfromhell wikilink
itempagelist.append(pywikibot.ItemPage.fromPage(pywikibot.Page(tsite, pagetitle)))
except:
raise
return itempagelist
def processISBNs(param, book):
isbns = processRE(param=param, rx="[0-9][--–\ 0-9]{9,16}[xX]?")
isbns = map(lambda x: x.replace(' ', ''), isbns)
xisbns = set()
xocns = set()
for isbn in isbns:
try:
metadata = xisbn.xisbn(isbn, metadata=True)
xisbns.update(metadata['isbn'])
xocns.update(metadata['oclcnum'])
except xisbn.isbnError:
pywikibot.output('xisbn error')
book.isbns.extend(isbns)
book.xisbns.extend(list(xisbns))
book.xocns.extend(list(xocns))
def processOCNs(param, book, wpsitelang):
ocns = processRE(param=param, rx="\d+")
book.ocns.extend(ocns)
def processDewey(param, book, wpsitelang):
deweys = processRE(param=param, rx="[^,]+")
book.deweys.extend(deweys)
def processAuthors(param, book, wpsitelang):
book.authors.extend(processLinks(param, wpsitelang))
def processIllustrators(param, book, wpsitelang):
book.illustrators.extend(processLinks(param, wpsitelang))
def processTranslators(param, book, wpsitelang):
book.translators.extend(processLinks(param, wpsitelang))
def processGenre(param, book, wpsitelang):
book.genres.extend(processLinks(param, wpsitelang))
def processLanguage(param, book, wpsitelang):
book.langs.extend(processLinks(param, wpsitelang))
def processPublished(param, book, wpsitelang):
pass
def processPage(page, wpsitelang):
"""
Process a single page
"""
paramdict = wpsites[wpsitelang]
wditem = pywikibot.ItemPage.fromPage(page)
book = bookdata(wditem)
pywikibot.output('Processing %s' % page)
pagetext = page.get()
wikicode = mwparserfromhell.parse(pagetext)
for template in wikicode.filter_templates():
if template.name.startswith(templateTitleDict[wpsitelang]):
for param in template.params:
paramname = param.name.strip()
if paramname == paramdict['isbn']:
processISBNs(param, book, wpsitelang)
if paramname == paramdict['oclc']:
processOCNs(param, book, wpsitelang)
if paramname == paramdict['author']:
processAuthors(param, book, wpsitelang)
if paramname == paramdict['illustrator']:
processIllustrators(param, book, wpsitelang)
if paramname == paramdict['translator']:
processTranslators(param, book, wpsitelang)
if paramname == paramdict['language']:
processLanguage(param, book, wpsitelang)
if paramname == paramdict['published']:
processPublished(param, book, wpsitelang)
if paramname == paramdict['genre']:
processGenre(param, book, wpsitelang)
if paramname == paramdict['dewey']:
processDewey(param, book, wpsitelang)
return book
def propertiesToClaims(book, lang):
localClaims = list() #we're returning this
bookattrs = {'isbn': book.isbns,
'ocn': book.ocns,
'illustrator': book.illustrators,
'author': book.authors,
'lang': book.langs,
'genre': book.genres}
for book_k, book_v in bookattrs.iteritems():
if book_v:
for attr in book_v:
claimObj = pywikibot.Claim(site=wikidata, pid=properties[book_k])
claimObj.setTarget(attr)
localClaims.append(claimObj)
return localClaims
def compareClaims(book, sourcelang):
qid = book.wditem.getID()
pageToEdit = pywikibot.ItemPage(wikidata, qid)
page_parts = pageToEdit.get()
localClaims = propertiesToClaims(book, sourcelang)
remoteClaims = page_parts['claims']
logVIAFstats(remoteClaims)
#we'll need this for every claim
localSource = pywikibot.Claim(site=wikidata, pid=wplangs['imported_from'])
localSource.setTarget(pywikibot.ItemPage(wikidata, wplangs[sourcelang]))
for localClaim in localClaims:
'''there are three states
noMatchingClaim, so we add our claim
matchingClaimUnsourced, so we add our source
matchingClaimSurced, claim was already present and had the same source, do nothing
'''
noMatchingClaim = False
matchingClaimUnsourced = False
matchingClaimSourced = False
for remoteClaimList in remoteClaims.itervalues():
for remoteClaim in remoteClaimList:
if localClaim.id == remoteClaim.id:
#now we see if a our source is there
for remoteSourceDict in remoteClaim.getSources():
for remoteSourceList in remoteSourceDict.itervalues():
for remoteSource in remoteSourceList:
if remoteSource.id == localSource.id:
if remoteSource.getTarget() == localSource.getTarget():
matchingClaimSourced = True
if not matchingClaimSourced:
matchingClaimUnsourced = remoteClaim
if not matchingClaimUnsourced:
noMatchingClaim = True
if matchingClaimSourced:
cases[str(localClaim.id)+'present'] += 1
continue
if matchingClaimUnsourced:
matchingClaimUnsourced.addSource(localSource)
cases[str(localSource.id)+'source'] += 1
continue
if noMatchingClaim:
try:
pageToEdit.addClaim(localClaim)
localClaim.addSource(localSource)
cases[str(localClaim.id)+'claim'] += 1
except:
print 'Error:'
pprint(localClaim)
continue
try:
casesJSON = open('cases.JSON','r')
cases = defaultdict(int)
savedcases = json.load(casesJSON)
for k, v in savedcases.iteritems():
cases[k] = v
casesJSON.close()
except IOError:
cases = defaultdict(int)
cases["prevtouched"] = 0
try:
allbooksJSON = open('allbooks.JSON','r')
allbooks = json.load(allbooksJSON)
allbooksJSON.close()
except IOError:
allbooks = defaultdict(list)
def savecases():
casesJSON = open('cases.JSON', 'w')
json.dump(cases, casesJSON, indent=4)
casesJSON.close()
allbooksJSON = open('allbooks.json', 'w')
json.dump(allbooks, allbooksJSON, indent=4)
allbooksJSON.close()
def run(wpsitelang):
touched = 0
generator = makeGenerator(wpsitelang)
for page in generator:
touched += 1
fake = False
if not fake:
if cases['prevtouched'] >= touched:
continue
book = processPage(page, wpsitelang)
allbooks[wpsitelang].append(book.dictify())
incorp_xdata(book)
checkISBN13(book)
#pprint (vars(book))
compareClaims(book, 'en')
cases['prevtouched'] = touched
savecases()
if __name__ == "__main__":
for lang in wpsites.iterkeys():
run(lang)
|
""" Fedora Notifications pkgdb client """
import json
import logging
import requests
from dogpile.cache import make_region
log = logging.getLogger(__name__)
_cache = make_region()
def get_packages_of_user(config, username):
""" Retrieve the list of packages where the specified user some acl.
:arg config: a dict containing the fedmsg config
:arg username: the fas username of the packager whose packages are of
interest.
:return: a set listing all the packages where the specified user has
some ACL.
"""
if not hasattr(_cache, 'backend'):
_cache.configure(**config['fmn.rules.cache'])
@_cache.cache_on_arguments()
def _getter(username):
if config.get('fmn.rules.utils.use_pkgdb2', True):
return _get_pkgdb2_packages_for(config, username)
else:
return _get_pkgdb1_packages_for(config, username)
return _getter(username)
def _get_pkgdb2_packages_for(config, username):
log.debug("Requesting pkgdb2 packages for user %r" % username)
def _get_page(page):
req = requests.get('{0}/packager/acl/{1}'.format(
config['fmn.rules.utils.pkgdb_url'], username),
params=dict(page=page),
)
if not req.status_code == 200:
return set()
return req.json()
# We have to request the first page of data to figure out the total number
data = _get_page(1)
pages = data['page_total']
packages = set()
for i in range(1, pages + 1):
# Avoid requesting the data twice the first time around
if i != 1:
data = _get_page(i)
for pkgacl in data['acls']:
if pkgacl['status'] != 'Approved':
continue
packages.add(pkgacl['packagelist']['package']['name'])
log.debug("done talking with pkgdb2 for now.")
return packages
# TODO -- delete this once pkgdb2 goes live.
def _get_pkgdb1_packages_for(config, username):
log.debug("Requesting pkgdb1 packages for user %r" % username)
pkgdb1_base_url = config['fmn.rules.utils.pkgdb_url']
query_string = "tg_format=json&pkgs_tgp_limit=10000"
req = requests.get('{0}/users/packages/{1}?{2}'.format(
pkgdb1_base_url, username, query_string))
if not req.status_code == 200:
return set()
data = req.json()
packages = set([pkg['name'] for pkg in data['pkgs']])
log.debug("done talking with pkgdb1 for now.")
return packages
If the pkgdb call fails, return an empty list of packages
""" Fedora Notifications pkgdb client """
import json
import logging
import requests
from dogpile.cache import make_region
log = logging.getLogger(__name__)
_cache = make_region()
def get_packages_of_user(config, username):
""" Retrieve the list of packages where the specified user some acl.
:arg config: a dict containing the fedmsg config
:arg username: the fas username of the packager whose packages are of
interest.
:return: a set listing all the packages where the specified user has
some ACL.
"""
if not hasattr(_cache, 'backend'):
_cache.configure(**config['fmn.rules.cache'])
@_cache.cache_on_arguments()
def _getter(username):
if config.get('fmn.rules.utils.use_pkgdb2', True):
return _get_pkgdb2_packages_for(config, username)
else:
return _get_pkgdb1_packages_for(config, username)
return _getter(username)
def _get_pkgdb2_packages_for(config, username):
log.debug("Requesting pkgdb2 packages for user %r" % username)
def _get_page(page):
req = requests.get('{0}/packager/acl/{1}'.format(
config['fmn.rules.utils.pkgdb_url'], username),
params=dict(page=page),
)
if not req.status_code == 200:
return set()
return req.json()
# We have to request the first page of data to figure out the total number
packages = set()
data = _get_page(1)
if data == set():
return packages
pages = data['page_total']
for i in range(1, pages + 1):
# Avoid requesting the data twice the first time around
if i != 1:
data = _get_page(i)
for pkgacl in data['acls']:
if pkgacl['status'] != 'Approved':
continue
packages.add(pkgacl['packagelist']['package']['name'])
log.debug("done talking with pkgdb2 for now.")
return packages
# TODO -- delete this once pkgdb2 goes live.
def _get_pkgdb1_packages_for(config, username):
log.debug("Requesting pkgdb1 packages for user %r" % username)
pkgdb1_base_url = config['fmn.rules.utils.pkgdb_url']
query_string = "tg_format=json&pkgs_tgp_limit=10000"
req = requests.get('{0}/users/packages/{1}?{2}'.format(
pkgdb1_base_url, username, query_string))
if not req.status_code == 200:
return set()
data = req.json()
packages = set([pkg['name'] for pkg in data['pkgs']])
log.debug("done talking with pkgdb1 for now.")
return packages
|
#!/usr/bin/env python
import time
from collections import namedtuple
import psycopg2
import zmq
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
Event = namedtuple('Event', ['kind', 'source', 'target', 'content',
'timestamp'])
history_registry = dict()
identity_registry = dict()
class Identity(object):
def __new__(cls, history, token):
print("Identity {0}".format(token))
# Pull it out of cached objects if we can
if token in identity_registry:
return identity_registry[token]
else:
id_obj = object.__new__(cls, history, token)
identity_registry[token] = id_obj
return id_obj
def __init__(self, history, token):
self.history = history
self.sql = history.sql
self.token = token
cur = self.sql.cursor()
def get_identity():
cur.execute("SELECT id FROM identities WHERE token = %s", (token,))
return cur.fetchone()
result = get_identity()
if result:
self.id = result[0]
else:
cur.execute("INSERT INTO identities (token) VALUES (%s)",
(token,))
print("Inserting token {0}".format(token))
self.id = get_identity()[0]
self.sql.commit()
identity_registry[self.token] = self
class NickIdentity(Identity):
@classmethod
def filter_token(cls, token):
return token.split("!")[0]
def __new__(cls, history, token):
# After modifying the token, do exactly as Identity
return Identity.__new__(NickIdentity, history, cls.filter_token(token))
def __init__(self, history, token):
token = NickIdentity.filter_token(token)
super(NickIdentity, self).__init__(history, token)
class History(object):
"""Interact with client history for a network."""
def __new__(cls, irc_network):
# Pull it out of cached objects if we can
if irc_network in history_registry:
return history_registry[irc_network]
else:
h_obj = object.__new__(cls, irc_network)
history_registry[irc_network] = h_obj
return h_obj
def __init__(self, irc_network):
self.sql = psycopg2.connect("dbname=hashi")
self.irc_network = irc_network
cur = self.sql.cursor()
cur.execute("SELECT id FROM networks WHERE name = %s", (irc_network,))
result = cur.fetchone()
if result:
self.id = result[0]
else:
cur.execute("INSERT INTO networks (name) VALUES (%s)",
(irc_network,))
# Done with setup, commit
self.sql.commit()
def record(self, identity, kind, args):
cur = self.sql.cursor()
record_sql = """INSERT INTO events (network_id, source, target, args)
VALUES (%s, %s, %s, %s);"""
# Record each kind of message, with a fallback for unimplemented ones
if kind == 'privmsg':
source = NickIdentity(self, args[0]).id
target = NickIdentity(self, args[1]).id
cur.execute(record_sql, (self.id, source, target, args[2:]))
else:
# No formatter, stuff it all into the args column (to prevent loss)
cur.execute(record_sql, (self.id, None, None, args))
self.sql.commit()
class RemoteEventReceiver(object):
def __init__(self):
context = zmq.Context.instance()
self.socket = context.socket(zmq.SUB)
self.socket.connect("tcp://127.0.0.1:9913")
self.socket.setsockopt(zmq.SUBSCRIBE, "")
self.queries = context.socket(zmq.REP)
self.queries.bind("tcp://127.0.0.1:9922")
self.poller = zmq.Poller()
self.poller.register(self.socket, zmq.POLLIN)
self.poller.register(self.queries, zmq.POLLIN)
def run(self):
while True:
socks = dict(self.poller.poll())
# New history
if self.socket in socks and socks[self.socket] == zmq.POLLIN:
event = [unicode(x) for x in self.socket.recv_multipart()]
network, identity, kind = event[:3]
args = event[3:]
history = History(network)
id_obj = NickIdentity(history, identity)
history_registry[network].record(id_obj, kind, args)
print("{0}:{1}:{2}:{3}".format(network, id_obj.token, kind,
args))
# Queries against the history
if self.queries in socks and socks[self.queries] == zmq.POLLIN:
pass
if __name__ == "__main__":
r = RemoteEventReceiver()
r.run()
New email token in history messages.
#!/usr/bin/env python
import time
from collections import namedtuple
import psycopg2
import zmq
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
Event = namedtuple('Event', ['kind', 'source', 'target', 'content',
'timestamp'])
history_registry = dict()
identity_registry = dict()
class Identity(object):
def __new__(cls, history, token):
print("Identity {0}".format(token))
# Pull it out of cached objects if we can
if token in identity_registry:
return identity_registry[token]
else:
id_obj = object.__new__(cls, history, token)
identity_registry[token] = id_obj
return id_obj
def __init__(self, history, token):
self.history = history
self.sql = history.sql
self.token = token
cur = self.sql.cursor()
def get_identity():
cur.execute("SELECT id FROM identities WHERE token = %s", (token,))
return cur.fetchone()
result = get_identity()
if result:
self.id = result[0]
else:
cur.execute("INSERT INTO identities (token) VALUES (%s)",
(token,))
print("Inserting token {0}".format(token))
self.id = get_identity()[0]
self.sql.commit()
identity_registry[self.token] = self
class NickIdentity(Identity):
@classmethod
def filter_token(cls, token):
return token.split("!")[0]
def __new__(cls, history, token):
# After modifying the token, do exactly as Identity
return Identity.__new__(NickIdentity, history, cls.filter_token(token))
def __init__(self, history, token):
token = NickIdentity.filter_token(token)
super(NickIdentity, self).__init__(history, token)
class History(object):
"""Interact with client history for a network."""
def __new__(cls, irc_network):
# Pull it out of cached objects if we can
if irc_network in history_registry:
return history_registry[irc_network]
else:
h_obj = object.__new__(cls, irc_network)
history_registry[irc_network] = h_obj
return h_obj
def __init__(self, irc_network):
self.sql = psycopg2.connect("dbname=hashi")
self.irc_network = irc_network
cur = self.sql.cursor()
cur.execute("SELECT id FROM networks WHERE name = %s", (irc_network,))
result = cur.fetchone()
if result:
self.id = result[0]
else:
cur.execute("INSERT INTO networks (name) VALUES (%s)",
(irc_network,))
# Done with setup, commit
self.sql.commit()
def record(self, identity, kind, args):
cur = self.sql.cursor()
record_sql = """INSERT INTO events (network_id, source, target, args)
VALUES (%s, %s, %s, %s);"""
# Record each kind of message, with a fallback for unimplemented ones
if kind == 'privmsg':
source = NickIdentity(self, args[0]).id
target = NickIdentity(self, args[1]).id
cur.execute(record_sql, (self.id, source, target, args[2:]))
else:
# No formatter, stuff it all into the args column (to prevent loss)
cur.execute(record_sql, (self.id, None, None, args))
self.sql.commit()
class RemoteEventReceiver(object):
def __init__(self):
context = zmq.Context.instance()
self.socket = context.socket(zmq.SUB)
self.socket.connect("tcp://127.0.0.1:9913")
self.socket.setsockopt(zmq.SUBSCRIBE, "")
self.queries = context.socket(zmq.REP)
self.queries.bind("tcp://127.0.0.1:9922")
self.poller = zmq.Poller()
self.poller.register(self.socket, zmq.POLLIN)
self.poller.register(self.queries, zmq.POLLIN)
def run(self):
while True:
socks = dict(self.poller.poll())
# New history
if self.socket in socks and socks[self.socket] == zmq.POLLIN:
event = [unicode(x) for x in self.socket.recv_multipart()]
email, network, identity, kind = event[:4]
args = event[4:]
history = History(network)
id_obj = NickIdentity(history, identity)
history_registry[network].record(id_obj, kind, args)
print("{0}:{1}:{2}:{3}".format(network, id_obj.token, kind,
args))
# Queries against the history
if self.queries in socks and socks[self.queries] == zmq.POLLIN:
pass
if __name__ == "__main__":
r = RemoteEventReceiver()
r.run()
|
"""Functional programming tools for data processing
`fntools` is a simple library providing the user with functional programming
functions to transform, filter and inspect Python data structures.
"""
from copy import deepcopy
import itertools
import operator
import collections
from functools import wraps
# TRANSFORMATION {{{1
def use_with(data, fn, *attrs):
"""
# Let's create some data first
>>> from collections import namedtuple
>>> Person = namedtuple('Person', ('name', 'age', 'gender'))
>>> alice = Person('Alice', 30, 'F')
# Usage
>>> make_csv_row = lambda n, a, g: '%s,%d,%s' % (n, a, g)
>>> use_with(alice, make_csv_row, 'name', 'age', 'gender')
'Alice,30,F'
"""
args = [getattr(data, x) for x in attrs]
return fn(*args)
def zip_with(fn, *colls):
"""Return the result of the function applied on the zip of the collections
:param fn: a function
:param colls: collections
>>> print list(zip_with(lambda x, y: x-y, [10, 20, 30], [42, 19, 43]))
[-32, 1, -13]
"""
return itertools.starmap(fn, itertools.izip(*colls))
def unzip(colls):
"""Unzip collections"""
return zip(*colls)
def concat(colls):
"""Concatenate a list of collections
:param colls: a list of collections
:returns: the concatenation of the collections
>>> print concat(([1, 2], [3, 4]))
[1, 2, 3, 4]
"""
return list(itertools.chain(*colls))
def mapcat(fn, colls):
"""Concatenate the result of a map
:param fn: a function
:param colls: a list of collections
"""
return map(fn, concat(colls))
# TODO Fix and test dmap
def dmap(fn, record):
"""map for a directory
:param fn: a function
:param record: a dictionary
:returns: a dictionary
"""
values = (fn(v) for k, v in record.items())
return dict(itertools.izip(record, values))
def compose(*fns):
"""Return the function composed with the given functions
>>> add2 = lambda x: x+2
>>> mult3 = lambda x: x*3
>>> new_fn = compose(add2, mult3)
>>> print new_fn(2)
8
.. note:: compose(fn1, fn2, fn3) is the same as fn1(fn2(fn3))
which means that the last function provided is the first to be applied.
"""
def compose2(f, g):
return lambda x: f(g(x))
return reduce(compose2, fns)
def groupby(f, sample):
"""Group elements in sub-samples by f
>>> print groupby(len, ['John', 'Terry', 'Eric', 'Graham', 'Mickael'])
{4: ['John', 'Eric'], 5: ['Terry'], 6: ['Graham'], 7: ['Mickael']}
"""
d = collections.defaultdict(list)
for item in sample:
key = f(item)
d[key].append(item)
return dict(d)
def reductions(fn, seq, acc=None):
"""Return the intermediate values of a reduction
>>> print reductions(lambda x, y: x + y, [1, 2, 3])
[1, 3, 6]
>>> print reductions(lambda x, y: x + y, [1, 2, 3], 10)
[11, 13, 16]
"""
indexes = xrange(len(seq))
if acc:
return map(lambda i: reduce(lambda x, y: fn(x, y), seq[:i+1], acc), indexes)
else:
return map(lambda i: reduce(lambda x, y: fn(x, y), seq[:i+1]), indexes)
def split(coll, factor):
"""Split a collection by using a factor
>>> bands = ('Led Zeppelin', 'Debussy', 'Metallica', 'Iron Maiden', 'Bach')
>>> styles = ('rock', 'classic', 'rock', 'rock', 'classic')
>>> print split(bands, styles)
{'classic': ['Debussy', 'Bach'], 'rock': ['Led Zeppelin', 'Metallica', 'Iron Maiden']}
"""
groups = groupby(lambda x: x[0], itertools.izip(factor, coll))
return dmap(lambda x: [y[1] for y in x], groups)
def assoc(_d, key, value):
"""Associate a key with a value in a dictionary
>>> movie = assoc({}, 'name', 'Holy Grail')
>>> print movie
{'name': 'Holy Grail'}
"""
d = deepcopy(_d)
d[key] = value
return d
def dispatch(data, fns):
"""Apply the functions on the data
:param data: the data
:param fns: a list of functions
>>> x = (1, 42, 5, 79)
>>> print dispatch(x, (min, max))
[1, 79]
"""
return map(lambda fn: fn(data), fns)
def multimap(fn, colls):
"""Apply a function on multiple collections
>>> print multimap(operator.add, ((1, 2, 3), (4, 5, 6)))
[5, 7, 9]
>>> f = lambda x, y, z: 2*x + 3*y - z
>>> result = multimap(f, ((1, 2), (4, 1), (1, 1)))
>>> result[0] == f(1, 4, 1)
True
>>> result[1] == f(2, 1, 1)
True
"""
return list(itertools.starmap(fn, zip(*colls)))
def pipe(data, *fns):
"""Apply functions recursively on your data
>>> inc = lambda x: x + 1
>>> pipe(42, inc, str)
'43'
"""
return reduce(lambda acc, f: f(acc), fns, data)
def pipe_each(coll, *fns):
"""Apply functions recursively on your collection of data
"""
return map(lambda x: pipe(x, *fns), coll)
def shift(func, *args, **kwargs):
"""This function is basically a beefed up lambda x: func(x, *args, **kwargs)
`shift` comes in handy when it is used in a pipeline with a function that
needs the passed value as its first argument.
>>> def div(x, y): return float(x) / y
# This is equivalent to div(42, 2):
>>> shift(div, 2)(42)
21.0
# which is different from div(2, 42):
>>> partial(div, 2)(42)
0.047619047619047616
"""
@wraps(func)
def wrapped(x):
return func(x, *args, **kwargs)
return wrapped
# FILTERING {{{1
def duplicates(coll):
"""Return the duplicated items in the given collection"""
return list(set(x for x in coll if coll.count(x) > 1))
def pluck(record, *keys, **kwargs):
"""
>>> d = {'name': 'Lancelot', 'actor': 'John Cleese', 'color': 'blue'}
>>> print pluck(d, 'name', 'color')
{'color': 'blue', 'name': 'Lancelot'}
# the keyword 'default' allows to replace a None value
>>> d = {'year': 2014, 'movie': 'Bilbo'}
>>> print pluck(d, 'year', 'movie', 'nb_aliens', default=0)
{'movie': 'Bilbo', 'nb_aliens': 0, 'year': 2014}
"""
default = kwargs.get('default', None)
return reduce(lambda a, x: assoc(a, x, record.get(x, default)), keys, {})
def use(data, *attrs):
"""
# Let's create some data first
>>> from collections import namedtuple
>>> Person = namedtuple('Person', ('name', 'age', 'gender'))
>>> alice = Person('Alice', 30, 'F')
# Usage
>>> use(alice, 'name', 'gender')
['Alice', 'F']
"""
return map(lambda x: getattr(data, x), attrs)
def get_in(record, *keys, **kwargs):
"""Return the value corresponding to the keys in a nested record
>>> d = {'id': {'name': 'Lancelot', 'actor': 'John Cleese', 'color': 'blue'}}
>>> print get_in(d, 'id', 'name')
Lancelot
>>> print get_in(d, 'id', 'age', default='?')
?
"""
default = kwargs.get('default', None)
return reduce(lambda a, x: a.get(x, default), keys, record)
def valuesof(record, keys):
"""Return the values corresponding to the given keys
>>> band = {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}
>>> print valuesof(band, ('name', 'date', 'singer'))
['Metallica', None, 'James Hetfield']
"""
if not isiterable(keys):
keys = [keys]
return map(record.get, keys)
def valueof(records, key):
"""Extract the value corresponding to the given key in all the dictionaries
# >>> bands = [{'name': 'Led Zeppelin', 'singer': 'Robert Plant', 'guitarist': 'Jimmy Page'},
# ....: {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}]
# >>> print valueof(bands, 'singer')
# ['Robert Plant', 'James Hetfield']
"""
if isinstance(records, dict):
records = [records]
return map(operator.itemgetter(key), records)
def take(n, seq):
"""Return the n first items in the sequence
>>> take(3, xrange(10000))
[0, 1, 2]
"""
return list(itertools.islice(seq, 0, n))
def drop(n, seq):
"""Return the n last items in the sequence
>>> drop(9997, xrange(10000))
[9997, 9998, 9999]
"""
return list(itertools.islice(seq, n, None))
def find(fn, record):
"""Apply a function on the record and return the corresponding new record
>>> print find(max, {'Terry': 30, 'Graham': 35, 'John': 27})
{'Graham': 35}
"""
values_result = fn(record.values())
keys_result = [k for k, v in record.items() if v == values_result]
return {keys_result[0]: values_result}
# INSPECTION {{{1
def isiterable(coll):
"""Return True if the collection is any iterable except a string"""
return hasattr(coll, "__iter__")
def are_in(items, collection):
"""Return True for each item in the collection
>>> print are_in(['Terry', 'James'], ['Terry', 'John', 'Eric'])
[True, False]
"""
if not isinstance(items, (list, tuple)):
items = (items, )
return map(lambda x: x in collection, items)
def any_in(items, collection):
"""Return True if any of the items are in the collection
:param items: items that may be in the collection
:param collection: a collection
>>> print any_in(2, [1, 3, 2])
True
>>> print any_in([1, 2], [1, 3, 2])
True
>>> print any_in([1, 2], [1, 3])
True
"""
return any(are_in(items, collection))
def all_in(items, collection):
"""Return True if all of the items are in the collection
:param items: items that may be in the collection
:param collection: a collection
>>> print all_in(2, [1, 3, 2])
True
>>> print all_in([1, 2], [1, 3, 2])
True
>>> print all_in([1, 2], [1, 3])
False
"""
return all(are_in(items, collection))
def monotony(seq):
"""Determine the monotony of a sequence
:param seq: a sequence
:returns: 1 if the sequence is sorted (increasing)
:returns: 0 if it is not sorted
:returns: -1 if it is sorted in reverse order (decreasing)
>>> monotony([1, 2, 3])
1
>>> monotony([1, 3, 2])
0
>>> monotony([3, 2, 1])
-1
"""
if seq == sorted(seq):
return 1
elif seq == list(reversed(sorted(seq))):
return -1
else:
return 0
def attributes(data):
"""Return all the non callable and non special attributes of the input data"""
return [x for x in dir(data) if not callable(x) and not x.startswith('__')]
def find_each(fn, records):
return dmap(lambda c: find(fn, x), records)
def dfilter(fn, record):
"""filter for a directory
:param fn: A predicate function
:param record: a dict
:returns: a dict
>>> odd = lambda x: x % 2 != 0
>>> print dfilter(odd, {'Terry': 30, 'Graham': 35, 'John': 27})
{'John': 27, 'Graham': 35}
"""
return dict([(k, v) for k, v in record.items() if fn(v)])
def occurrences(coll, value=None):
"""Return the occurrences of the elements in the collection
>>> print occurrences((1, 1, 2, 3))
{1: 2, 2: 1, 3: 1}
>>> print occurrences((1, 1, 2, 3), 1)
2
"""
count = {}
for element in coll:
count[element] = count.get(element, 0) + 1
if value:
count = count.get(value, 0)
return count
def indexof(coll, item, start=0, default=None):
"""Return the index of the item in the collection
:param coll: iterable
:param item: scalar
:param start: (optional) The start index
:default: The default value of the index if the item is not in the collection
:returns: idx -- The index of the item in the collection
"""
if item in coll:
return list(coll).index(item, start)
else:
return default
def indexesof(coll, item):
"""Return all the indexes of the item in the collection"""
return [indexof(coll, item, i) for i in xrange(len(coll)) if coll[i] == item]
def count(fn, coll):
"""Return the count of True values returned by the function applied to the
collection
>>> count(lambda x: x % 2 == 0, [11, 22, 31, 24, 15])
2
"""
return len([x for x in coll if fn(x) is True])
# TODO Check collections.Counter can be imported
# (it is available obly in recent versions of Python)
def isdistinct(coll):
"""
>>> isdistinct([1, 2, 3])
True
>>> isdistinct([1, 2, 2])
False
"""
most_common = collections.Counter(coll).most_common(1)
return not most_common[0][1] > 1
Minor fix
"""Functional programming tools for data processing
`fntools` is a simple library providing the user with functional programming
functions to transform, filter and inspect Python data structures.
"""
from copy import deepcopy
import itertools
import operator
import collections
from functools import wraps
# TRANSFORMATION {{{1
def use_with(data, fn, *attrs):
"""
# Let's create some data first
>>> from collections import namedtuple
>>> Person = namedtuple('Person', ('name', 'age', 'gender'))
>>> alice = Person('Alice', 30, 'F')
# Usage
>>> make_csv_row = lambda n, a, g: '%s,%d,%s' % (n, a, g)
>>> use_with(alice, make_csv_row, 'name', 'age', 'gender')
'Alice,30,F'
"""
args = [getattr(data, x) for x in attrs]
return fn(*args)
def zip_with(fn, *colls):
"""Return the result of the function applied on the zip of the collections
:param fn: a function
:param colls: collections
>>> print list(zip_with(lambda x, y: x-y, [10, 20, 30], [42, 19, 43]))
[-32, 1, -13]
"""
return itertools.starmap(fn, itertools.izip(*colls))
def unzip(colls):
"""Unzip collections"""
return zip(*colls)
def concat(colls):
"""Concatenate a list of collections
:param colls: a list of collections
:returns: the concatenation of the collections
>>> print concat(([1, 2], [3, 4]))
[1, 2, 3, 4]
"""
return list(itertools.chain(*colls))
def mapcat(fn, colls):
"""Concatenate the result of a map
:param fn: a function
:param colls: a list of collections
"""
return map(fn, concat(colls))
# TODO Fix and test dmap
def dmap(fn, record):
"""map for a directory
:param fn: a function
:param record: a dictionary
:returns: a dictionary
"""
values = (fn(v) for k, v in record.items())
return dict(itertools.izip(record, values))
def compose(*fns):
"""Return the function composed with the given functions
>>> add2 = lambda x: x+2
>>> mult3 = lambda x: x*3
>>> new_fn = compose(add2, mult3)
>>> print new_fn(2)
8
.. note:: compose(fn1, fn2, fn3) is the same as fn1(fn2(fn3))
which means that the last function provided is the first to be applied.
"""
def compose2(f, g):
return lambda x: f(g(x))
return reduce(compose2, fns)
def groupby(f, sample):
"""Group elements in sub-samples by f
>>> print groupby(len, ['John', 'Terry', 'Eric', 'Graham', 'Mickael'])
{4: ['John', 'Eric'], 5: ['Terry'], 6: ['Graham'], 7: ['Mickael']}
"""
d = collections.defaultdict(list)
for item in sample:
key = f(item)
d[key].append(item)
return dict(d)
def reductions(fn, seq, acc=None):
"""Return the intermediate values of a reduction
>>> print reductions(lambda x, y: x + y, [1, 2, 3])
[1, 3, 6]
>>> print reductions(lambda x, y: x + y, [1, 2, 3], 10)
[11, 13, 16]
"""
indexes = xrange(len(seq))
if acc:
return map(lambda i: reduce(lambda x, y: fn(x, y), seq[:i+1], acc), indexes)
else:
return map(lambda i: reduce(lambda x, y: fn(x, y), seq[:i+1]), indexes)
def split(coll, factor):
"""Split a collection by using a factor
>>> bands = ('Led Zeppelin', 'Debussy', 'Metallica', 'Iron Maiden', 'Bach')
>>> styles = ('rock', 'classic', 'rock', 'rock', 'classic')
>>> print split(bands, styles)
{'classic': ['Debussy', 'Bach'], 'rock': ['Led Zeppelin', 'Metallica', 'Iron Maiden']}
"""
groups = groupby(lambda x: x[0], itertools.izip(factor, coll))
return dmap(lambda x: [y[1] for y in x], groups)
def assoc(_d, key, value):
"""Associate a key with a value in a dictionary
>>> movie = assoc({}, 'name', 'Holy Grail')
>>> print movie
{'name': 'Holy Grail'}
"""
d = deepcopy(_d)
d[key] = value
return d
def dispatch(data, fns):
"""Apply the functions on the data
:param data: the data
:param fns: a list of functions
>>> x = (1, 42, 5, 79)
>>> print dispatch(x, (min, max))
[1, 79]
"""
return map(lambda fn: fn(data), fns)
def multimap(fn, colls):
"""Apply a function on multiple collections
>>> print multimap(operator.add, ((1, 2, 3), (4, 5, 6)))
[5, 7, 9]
>>> f = lambda x, y, z: 2*x + 3*y - z
>>> result = multimap(f, ((1, 2), (4, 1), (1, 1)))
>>> result[0] == f(1, 4, 1)
True
>>> result[1] == f(2, 1, 1)
True
"""
return list(itertools.starmap(fn, zip(*colls)))
def pipe(data, *fns):
"""Apply functions recursively on your data
>>> inc = lambda x: x + 1
>>> pipe(42, inc, str)
'43'
"""
return reduce(lambda acc, f: f(acc), fns, data)
def pipe_each(coll, *fns):
"""Apply functions recursively on your collection of data
"""
return map(lambda x: pipe(x, *fns), coll)
def shift(func, *args, **kwargs):
"""This function is basically a beefed up lambda x: func(x, *args, **kwargs)
`shift` comes in handy when it is used in a pipeline with a function that
needs the passed value as its first argument.
>>> def div(x, y): return float(x) / y
# This is equivalent to div(42, 2):
>>> shift(div, 2)(42)
21.0
# which is different from div(2, 42):
>>> partial(div, 2)(42)
0.047619047619047616
"""
@wraps(func)
def wrapped(x):
return func(x, *args, **kwargs)
return wrapped
# FILTERING {{{1
def duplicates(coll):
"""Return the duplicated items in the given collection"""
return list(set(x for x in coll if coll.count(x) > 1))
def pluck(record, *keys, **kwargs):
"""
>>> d = {'name': 'Lancelot', 'actor': 'John Cleese', 'color': 'blue'}
>>> print pluck(d, 'name', 'color')
{'color': 'blue', 'name': 'Lancelot'}
# the keyword 'default' allows to replace a None value
>>> d = {'year': 2014, 'movie': 'Bilbo'}
>>> print pluck(d, 'year', 'movie', 'nb_aliens', default=0)
{'movie': 'Bilbo', 'nb_aliens': 0, 'year': 2014}
"""
default = kwargs.get('default', None)
return reduce(lambda a, x: assoc(a, x, record.get(x, default)), keys, {})
def use(data, *attrs):
"""
# Let's create some data first
>>> from collections import namedtuple
>>> Person = namedtuple('Person', ('name', 'age', 'gender'))
>>> alice = Person('Alice', 30, 'F')
# Usage
>>> use(alice, 'name', 'gender')
['Alice', 'F']
"""
return map(lambda x: getattr(data, x), attrs)
def get_in(record, *keys, **kwargs):
"""Return the value corresponding to the keys in a nested record
>>> d = {'id': {'name': 'Lancelot', 'actor': 'John Cleese', 'color': 'blue'}}
>>> print get_in(d, 'id', 'name')
Lancelot
>>> print get_in(d, 'id', 'age', default='?')
?
"""
default = kwargs.get('default', None)
return reduce(lambda a, x: a.get(x, default), keys, record)
def valuesof(record, keys):
"""Return the values corresponding to the given keys
>>> band = {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}
>>> print valuesof(band, ('name', 'date', 'singer'))
['Metallica', None, 'James Hetfield']
"""
if not isiterable(keys):
keys = [keys]
return map(record.get, keys)
def valueof(records, key):
"""Extract the value corresponding to the given key in all the dictionaries
# >>> bands = [{'name': 'Led Zeppelin', 'singer': 'Robert Plant', 'guitarist': 'Jimmy Page'},
# ....: {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}]
# >>> print valueof(bands, 'singer')
# ['Robert Plant', 'James Hetfield']
"""
if isinstance(records, dict):
records = [records]
return map(operator.itemgetter(key), records)
def take(n, seq):
"""Return the n first items in the sequence
>>> take(3, xrange(10000))
[0, 1, 2]
"""
return list(itertools.islice(seq, 0, n))
def drop(n, seq):
"""Return the n last items in the sequence
>>> drop(9997, xrange(10000))
[9997, 9998, 9999]
"""
return list(itertools.islice(seq, n, None))
def find(fn, record):
"""Apply a function on the record and return the corresponding new record
>>> print find(max, {'Terry': 30, 'Graham': 35, 'John': 27})
{'Graham': 35}
"""
values_result = fn(record.values())
keys_result = [k for k, v in record.items() if v == values_result]
return {keys_result[0]: values_result}
# INSPECTION {{{1
def isiterable(coll):
"""Return True if the collection is any iterable except a string"""
return hasattr(coll, "__iter__")
def are_in(items, collection):
"""Return True for each item in the collection
>>> print are_in(['Terry', 'James'], ['Terry', 'John', 'Eric'])
[True, False]
"""
if not isinstance(items, (list, tuple)):
items = (items, )
return map(lambda x: x in collection, items)
def any_in(items, collection):
"""Return True if any of the items are in the collection
:param items: items that may be in the collection
:param collection: a collection
>>> print any_in(2, [1, 3, 2])
True
>>> print any_in([1, 2], [1, 3, 2])
True
>>> print any_in([1, 2], [1, 3])
True
"""
return any(are_in(items, collection))
def all_in(items, collection):
"""Return True if all of the items are in the collection
:param items: items that may be in the collection
:param collection: a collection
>>> print all_in(2, [1, 3, 2])
True
>>> print all_in([1, 2], [1, 3, 2])
True
>>> print all_in([1, 2], [1, 3])
False
"""
return all(are_in(items, collection))
def monotony(seq):
"""Determine the monotony of a sequence
:param seq: a sequence
:returns: 1 if the sequence is sorted (increasing)
:returns: 0 if it is not sorted
:returns: -1 if it is sorted in reverse order (decreasing)
>>> monotony([1, 2, 3])
1
>>> monotony([1, 3, 2])
0
>>> monotony([3, 2, 1])
-1
"""
if seq == sorted(seq):
return 1
elif seq == list(reversed(sorted(seq))):
return -1
else:
return 0
def attributes(data):
"""Return all the non callable and non special attributes of the input data"""
return [x for x in dir(data) if not callable(x) and not x.startswith('__')]
def find_each(fn, records):
return dmap(lambda x: find(fn, x), records)
def dfilter(fn, record):
"""filter for a directory
:param fn: A predicate function
:param record: a dict
:returns: a dict
>>> odd = lambda x: x % 2 != 0
>>> print dfilter(odd, {'Terry': 30, 'Graham': 35, 'John': 27})
{'John': 27, 'Graham': 35}
"""
return dict([(k, v) for k, v in record.items() if fn(v)])
def occurrences(coll, value=None):
"""Return the occurrences of the elements in the collection
>>> print occurrences((1, 1, 2, 3))
{1: 2, 2: 1, 3: 1}
>>> print occurrences((1, 1, 2, 3), 1)
2
"""
count = {}
for element in coll:
count[element] = count.get(element, 0) + 1
if value:
count = count.get(value, 0)
return count
def indexof(coll, item, start=0, default=None):
"""Return the index of the item in the collection
:param coll: iterable
:param item: scalar
:param start: (optional) The start index
:default: The default value of the index if the item is not in the collection
:returns: idx -- The index of the item in the collection
"""
if item in coll:
return list(coll).index(item, start)
else:
return default
def indexesof(coll, item):
"""Return all the indexes of the item in the collection"""
return [indexof(coll, item, i) for i in xrange(len(coll)) if coll[i] == item]
def count(fn, coll):
"""Return the count of True values returned by the function applied to the
collection
>>> count(lambda x: x % 2 == 0, [11, 22, 31, 24, 15])
2
"""
return len([x for x in coll if fn(x) is True])
# TODO Check collections.Counter can be imported
# (it is available obly in recent versions of Python)
def isdistinct(coll):
"""
>>> isdistinct([1, 2, 3])
True
>>> isdistinct([1, 2, 2])
False
"""
most_common = collections.Counter(coll).most_common(1)
return not most_common[0][1] > 1
|
# -*- coding: utf-8 -*-
"""
hdbscan.plots: Use matplotlib to display plots of internal
tree structures used by HDBSCAN.
"""
# Author: Leland McInnes <leland.mcinnes@gmail.com>
#
# License: BSD 3 clause
from ._hdbscan_tree import compute_stability
import numpy as np
CB_LEFT = 0
CB_RIGHT = 1
CB_BOTTOM = 2
CB_TOP = 3
def get_leaves(condensed_tree):
cluster_tree = condensed_tree[condensed_tree['child_size'] > 1]
clusters = cluster_tree['child']
return [c for c in clusters if len(cluster_tree[cluster_tree['parent'] == c]) == 0]
def bfs_from_cluster_tree(tree, bfs_root):
result = []
to_process = [bfs_root]
while to_process:
result.extend(to_process)
to_process = tree['child'][np.in1d(tree['parent'], to_process)].tolist()
return result
class CondensedTree (object):
def __init__(self, condensed_tree_array):
self._raw_tree = condensed_tree_array
def get_plot_data(self, leaf_separation=1, log_size=False):
"""Generates data for use in plotting the 'icicle plot' or dendrogram
plot of the condensed tree generated by HDBSCAN.
Keyword Arguments
-----------------
leaf_separation : float
How far apart to space the final leaves of the
dendrogram. (default 1)
log_size : boolean
Use log scale for the 'size' of clusters (i.e. number of
points in the cluster at a given lambda value).
(default False)
Returns
-------
plot_data : dict
Data associated to bars in a bar plot:
`bar_centers` x coordinate centers for bars
`bar_tops` heights of bars in lambda scale
`bar_bottoms` y coordinate of bottoms of bars
`bar_widths` widths of the bars (in x coord scale)
`bar_bounds` a 4-tuple of [left, right, bottom, top]
giving the bounds on a full set of
cluster bars
Data associates with cluster splits:
`line_xs` x coordinates for horiontal dendrogram lines
`line_ys` y coordinates for horiontal dendrogram lines
"""
leaves = get_leaves(self._raw_tree)
last_leaf = self._raw_tree['parent'].max()
root = self._raw_tree['parent'].min()
# We want to get the x and y coordinates for the start of each cluster
# Initialize the leaves, since we know where they go, the iterate
# through everything from the leaves back, setting coords as we go
cluster_x_coords = dict(zip(leaves, [leaf_separation * x
for x in range(len(leaves))]))
cluster_y_coords = {root : 0.0}
for cluster in range(last_leaf, root - 1, -1):
split = self._raw_tree[['child', 'lambda']]
split = split[(self._raw_tree['parent'] == cluster) &
(self._raw_tree['child_size'] > 1)]
if len(split['child']) > 1:
left_child, right_child = split['child']
cluster_x_coords[cluster] = np.mean([cluster_x_coords[left_child],
cluster_x_coords[right_child]])
cluster_y_coords[left_child] = split['lambda'][0]
cluster_y_coords[right_child] = split['lambda'][1]
# We use bars to plot the 'icicles', so we need to generate centers, tops,
# bottoms and widths for each rectangle. We can go through each cluster
# and do this for each in turn.
bar_centers = []
bar_tops = []
bar_bottoms = []
bar_widths = []
cluster_bounds = {}
scaling = np.sum(self._raw_tree[self._raw_tree['parent'] == root]['child_size'])
if log_size:
scaling = np.log(scaling)
for c in range(last_leaf, root - 1, -1):
cluster_bounds[c] = [0, 0, 0, 0]
c_children = self._raw_tree[self._raw_tree['parent'] == c]
current_size = np.sum(c_children['child_size'])
current_lambda = cluster_y_coords[c]
if log_size:
current_size = np.log(current_size)
cluster_bounds[c][CB_LEFT] = cluster_x_coords[c] * scaling - (current_size / 2.0)
cluster_bounds[c][CB_RIGHT] = cluster_x_coords[c] * scaling + (current_size / 2.0)
cluster_bounds[c][CB_BOTTOM] = cluster_y_coords[c]
cluster_bounds[c][CB_TOP] = np.max(c_children['lambda'])
for i in np.argsort(c_children['lambda']):
row = c_children[i]
if row['lambda'] != current_lambda:
bar_centers.append(cluster_x_coords[c] * scaling)
bar_tops.append(row['lambda'] - current_lambda)
bar_bottoms.append(current_lambda)
bar_widths.append(current_size)
if log_size:
current_size = np.log(np.exp(current_size) - row['child_size'])
else:
current_size -= row['child_size']
current_lambda = row['lambda']
# Finally we need the horizontal lines that occur at cluster splits.
line_xs = []
line_ys = []
for row in self._raw_tree[self._raw_tree['child_size'] > 1]:
parent = row['parent']
child = row['child']
child_size = row['child_size']
if log_size:
child_size = np.log(child_size)
sign = np.sign(cluster_x_coords[child] - cluster_x_coords[parent])
line_xs.append([
cluster_x_coords[parent] * scaling,
cluster_x_coords[child] * scaling + sign * (child_size / 2.0)
])
line_ys.append([
cluster_y_coords[child],
cluster_y_coords[child]
])
return {
'bar_centers' : bar_centers,
'bar_tops' : bar_tops,
'bar_bottoms' : bar_bottoms,
'bar_widths' : bar_widths,
'line_xs' : line_xs,
'line_ys' : line_ys,
'cluster_bounds': cluster_bounds
}
def _select_clusters(self):
stability = compute_stability(self._raw_tree)
node_list = sorted(stability.keys(), reverse=True)[:-1]
cluster_tree = self._raw_tree[self._raw_tree['child_size'] > 1]
is_cluster = {cluster : True for cluster in node_list}
for node in node_list:
child_selection = (cluster_tree['parent'] == node)
subtree_stability = np.sum([stability[child] for
child in cluster_tree['child'][child_selection]])
if subtree_stability > stability[node]:
is_cluster[node] = False
stability[node] = subtree_stability
else:
for sub_node in bfs_from_cluster_tree(cluster_tree, node):
if sub_node != node:
is_cluster[sub_node] = False
return [cluster for cluster in is_cluster if is_cluster[cluster]]
def plot(self, leaf_separation=1, cmap='Blues', select_clusters=False,
axis=None, colorbar=True, log_size=False):
try:
import matplotlib.pyplot as plt
except ImportError:
raise ImportError('You must install the matplotlib library to plot the condensed tree. Use get_plot_data to calculate the relevant data without plotting.')
plot_data = self.get_plot_data(leaf_separation=leaf_separation, log_size=log_size)
if cmap != 'none':
sm = plt.cm.ScalarMappable(cmap=cmap,
norm=plt.Normalize(0, max(plot_data['bar_widths'])))
sm.set_array(plot_data['bar_widths'])
bar_colors = [sm.to_rgba(x) for x in plot_data['bar_widths']]
else:
bar_colors = 'black'
if axis is None:
axis = plt.gca()
axis.bar(
plot_data['bar_centers'],
plot_data['bar_tops'],
bottom=plot_data['bar_bottoms'],
width=plot_data['bar_widths'],
color=bar_colors,
align='center',
linewidth=0
)
for xs, ys in zip(plot_data['line_xs'], plot_data['line_ys']):
axis.plot(xs, ys, color='black', linewidth=1)
if select_clusters:
try:
from matplotlib.patches import Ellipse
except ImportError:
raise ImportError('You must have matplotlib.patches available to plot selected clusters.')
chosen_clusters = self._select_clusters()
for c in chosen_clusters:
c_bounds = plot_data['cluster_bounds'][c]
width = (c_bounds[CB_RIGHT] - c_bounds[CB_LEFT])
height = (c_bounds[CB_TOP] - c_bounds[CB_BOTTOM])
center = (
np.mean([c_bounds[CB_LEFT], c_bounds[CB_RIGHT]]),
np.mean([c_bounds[CB_TOP], c_bounds[CB_BOTTOM]]),
)
box = Ellipse(
center,
2.0 * width,
1.2 * height,
facecolor='none',
edgecolor='r',
linewidth=2
)
axis.add_artist(box)
if colorbar:
cb = plt.colorbar(sm)
if log_size:
cb.ax.set_ylabel('log(Number of points)')
else:
cb.ax.set_ylabel('Number of points')
axis.set_xticks([])
for side in ('right', 'top', 'bottom'):
axis.spines[side].set_visible(False)
axis.invert_yaxis()
axis.set_ylabel('$\lambda$ value')
return axis
def to_pandas(self):
try:
from pandas import DataFrame, Series
except ImportError:
raise ImportError('You must have pandas installed to export pandas DataFrames')
result = DataFrame(self._raw_tree)
return result
def to_networkx(self):
try:
from networkx import DiGraph, set_node_attributes
except ImportError:
raise ImportError('You must have networkx installed to export networkx graphs')
result = DiGraph()
for row in self._raw_tree:
result.add_edge(row['parent'], row['child'], weight=row['lambda'])
set_node_attributes(result, 'size', dict(self._raw_tree[['child', 'child_size']]))
return result
Further documentation
# -*- coding: utf-8 -*-
"""
hdbscan.plots: Use matplotlib to display plots of internal
tree structures used by HDBSCAN.
"""
# Author: Leland McInnes <leland.mcinnes@gmail.com>
#
# License: BSD 3 clause
from ._hdbscan_tree import compute_stability
import numpy as np
CB_LEFT = 0
CB_RIGHT = 1
CB_BOTTOM = 2
CB_TOP = 3
def get_leaves(condensed_tree):
cluster_tree = condensed_tree[condensed_tree['child_size'] > 1]
clusters = cluster_tree['child']
return [c for c in clusters if len(cluster_tree[cluster_tree['parent'] == c]) == 0]
def bfs_from_cluster_tree(tree, bfs_root):
result = []
to_process = [bfs_root]
while to_process:
result.extend(to_process)
to_process = tree['child'][np.in1d(tree['parent'], to_process)].tolist()
return result
class CondensedTree (object):
def __init__(self, condensed_tree_array):
self._raw_tree = condensed_tree_array
def get_plot_data(self, leaf_separation=1, log_size=False):
"""Generates data for use in plotting the 'icicle plot' or dendrogram
plot of the condensed tree generated by HDBSCAN.
Keyword Arguments
-----------------
leaf_separation : float
How far apart to space the final leaves of the
dendrogram. (default 1)
log_size : boolean
Use log scale for the 'size' of clusters (i.e. number of
points in the cluster at a given lambda value).
(default False)
Returns
-------
plot_data : dict
Data associated to bars in a bar plot:
`bar_centers` x coordinate centers for bars
`bar_tops` heights of bars in lambda scale
`bar_bottoms` y coordinate of bottoms of bars
`bar_widths` widths of the bars (in x coord scale)
`bar_bounds` a 4-tuple of [left, right, bottom, top]
giving the bounds on a full set of
cluster bars
Data associates with cluster splits:
`line_xs` x coordinates for horiontal dendrogram lines
`line_ys` y coordinates for horiontal dendrogram lines
"""
leaves = get_leaves(self._raw_tree)
last_leaf = self._raw_tree['parent'].max()
root = self._raw_tree['parent'].min()
# We want to get the x and y coordinates for the start of each cluster
# Initialize the leaves, since we know where they go, the iterate
# through everything from the leaves back, setting coords as we go
cluster_x_coords = dict(zip(leaves, [leaf_separation * x
for x in range(len(leaves))]))
cluster_y_coords = {root : 0.0}
for cluster in range(last_leaf, root - 1, -1):
split = self._raw_tree[['child', 'lambda']]
split = split[(self._raw_tree['parent'] == cluster) &
(self._raw_tree['child_size'] > 1)]
if len(split['child']) > 1:
left_child, right_child = split['child']
cluster_x_coords[cluster] = np.mean([cluster_x_coords[left_child],
cluster_x_coords[right_child]])
cluster_y_coords[left_child] = split['lambda'][0]
cluster_y_coords[right_child] = split['lambda'][1]
# We use bars to plot the 'icicles', so we need to generate centers, tops,
# bottoms and widths for each rectangle. We can go through each cluster
# and do this for each in turn.
bar_centers = []
bar_tops = []
bar_bottoms = []
bar_widths = []
cluster_bounds = {}
scaling = np.sum(self._raw_tree[self._raw_tree['parent'] == root]['child_size'])
if log_size:
scaling = np.log(scaling)
for c in range(last_leaf, root - 1, -1):
cluster_bounds[c] = [0, 0, 0, 0]
c_children = self._raw_tree[self._raw_tree['parent'] == c]
current_size = np.sum(c_children['child_size'])
current_lambda = cluster_y_coords[c]
if log_size:
current_size = np.log(current_size)
cluster_bounds[c][CB_LEFT] = cluster_x_coords[c] * scaling - (current_size / 2.0)
cluster_bounds[c][CB_RIGHT] = cluster_x_coords[c] * scaling + (current_size / 2.0)
cluster_bounds[c][CB_BOTTOM] = cluster_y_coords[c]
cluster_bounds[c][CB_TOP] = np.max(c_children['lambda'])
for i in np.argsort(c_children['lambda']):
row = c_children[i]
if row['lambda'] != current_lambda:
bar_centers.append(cluster_x_coords[c] * scaling)
bar_tops.append(row['lambda'] - current_lambda)
bar_bottoms.append(current_lambda)
bar_widths.append(current_size)
if log_size:
current_size = np.log(np.exp(current_size) - row['child_size'])
else:
current_size -= row['child_size']
current_lambda = row['lambda']
# Finally we need the horizontal lines that occur at cluster splits.
line_xs = []
line_ys = []
for row in self._raw_tree[self._raw_tree['child_size'] > 1]:
parent = row['parent']
child = row['child']
child_size = row['child_size']
if log_size:
child_size = np.log(child_size)
sign = np.sign(cluster_x_coords[child] - cluster_x_coords[parent])
line_xs.append([
cluster_x_coords[parent] * scaling,
cluster_x_coords[child] * scaling + sign * (child_size / 2.0)
])
line_ys.append([
cluster_y_coords[child],
cluster_y_coords[child]
])
return {
'bar_centers' : bar_centers,
'bar_tops' : bar_tops,
'bar_bottoms' : bar_bottoms,
'bar_widths' : bar_widths,
'line_xs' : line_xs,
'line_ys' : line_ys,
'cluster_bounds': cluster_bounds
}
def _select_clusters(self):
stability = compute_stability(self._raw_tree)
node_list = sorted(stability.keys(), reverse=True)[:-1]
cluster_tree = self._raw_tree[self._raw_tree['child_size'] > 1]
is_cluster = {cluster : True for cluster in node_list}
for node in node_list:
child_selection = (cluster_tree['parent'] == node)
subtree_stability = np.sum([stability[child] for
child in cluster_tree['child'][child_selection]])
if subtree_stability > stability[node]:
is_cluster[node] = False
stability[node] = subtree_stability
else:
for sub_node in bfs_from_cluster_tree(cluster_tree, node):
if sub_node != node:
is_cluster[sub_node] = False
return [cluster for cluster in is_cluster if is_cluster[cluster]]
def plot(self, leaf_separation=1, cmap='Blues', select_clusters=False,
axis=None, colorbar=True, log_size=False):
try:
import matplotlib.pyplot as plt
except ImportError:
raise ImportError('You must install the matplotlib library to plot the condensed tree. Use get_plot_data to calculate the relevant data without plotting.')
plot_data = self.get_plot_data(leaf_separation=leaf_separation, log_size=log_size)
if cmap != 'none':
sm = plt.cm.ScalarMappable(cmap=cmap,
norm=plt.Normalize(0, max(plot_data['bar_widths'])))
sm.set_array(plot_data['bar_widths'])
bar_colors = [sm.to_rgba(x) for x in plot_data['bar_widths']]
else:
bar_colors = 'black'
if axis is None:
axis = plt.gca()
axis.bar(
plot_data['bar_centers'],
plot_data['bar_tops'],
bottom=plot_data['bar_bottoms'],
width=plot_data['bar_widths'],
color=bar_colors,
align='center',
linewidth=0
)
for xs, ys in zip(plot_data['line_xs'], plot_data['line_ys']):
axis.plot(xs, ys, color='black', linewidth=1)
if select_clusters:
try:
from matplotlib.patches import Ellipse
except ImportError:
raise ImportError('You must have matplotlib.patches available to plot selected clusters.')
chosen_clusters = self._select_clusters()
for c in chosen_clusters:
c_bounds = plot_data['cluster_bounds'][c]
width = (c_bounds[CB_RIGHT] - c_bounds[CB_LEFT])
height = (c_bounds[CB_TOP] - c_bounds[CB_BOTTOM])
center = (
np.mean([c_bounds[CB_LEFT], c_bounds[CB_RIGHT]]),
np.mean([c_bounds[CB_TOP], c_bounds[CB_BOTTOM]]),
)
box = Ellipse(
center,
2.0 * width,
1.2 * height,
facecolor='none',
edgecolor='r',
linewidth=2
)
axis.add_artist(box)
if colorbar:
cb = plt.colorbar(sm)
if log_size:
cb.ax.set_ylabel('log(Number of points)')
else:
cb.ax.set_ylabel('Number of points')
axis.set_xticks([])
for side in ('right', 'top', 'bottom'):
axis.spines[side].set_visible(False)
axis.invert_yaxis()
axis.set_ylabel('$\lambda$ value')
return axis
def to_pandas(self):
"""Return a pandas dataframe representation of the condensed tree.
Each row of the dataframe corresponds to an edge in the tree.
The columns of the dataframe are `parent`, `child`, `lambda`
and `child_size`.
The `parent` and `child` are the ids of the
parent and child nodes in the tree. Node ids less than the number
of points in the original dataset represent idividual points, while
ids greater than the number of points are clusters.
The `lambda` value is the value (1/distance) at which the `child`
node leaves the cluster.
The `child_size` is the number of points in the `child` node.
"""
try:
from pandas import DataFrame, Series
except ImportError:
raise ImportError('You must have pandas installed to export pandas DataFrames')
result = DataFrame(self._raw_tree)
return result
def to_networkx(self):
"""Return a NetworkX DiGraph object representing the condensed tree.
Edge weights in the graph are the lamba values at which child nodes
'leave' the parent cluster.
Nodes have a `size` attribute attached giving the number of points
that are in the cluster (or 1 if it is a singleton point) at the
point of cluster creation (fewer points may be in the cluster at
larger lambda values).
"""
try:
from networkx import DiGraph, set_node_attributes
except ImportError:
raise ImportError('You must have networkx installed to export networkx graphs')
result = DiGraph()
for row in self._raw_tree:
result.add_edge(row['parent'], row['child'], weight=row['lambda'])
set_node_attributes(result, 'size', dict(self._raw_tree[['child', 'child_size']]))
return result
|
#!/usr/bin/env python3
from yahdlc import *
from threading import Thread, Event, Lock
from time import sleep
class HDLController:
"""
An HDLC controller based on python4yahdlc.
"""
MAX_SEQ_NO = 8
def __init__(self, read_func, write_func, window=3):
if not hasattr(read_func, '__call__'):
raise TypeError('The read function parameter is not a callable object')
if not hasattr(write_func, '__call__'):
raise TypeError('The write function parameter is not a callable object')
self.read = read_func
self.write = write_func
self.window = window
self.senders = dict()
self.send_lock = Lock()
self.new_seq_no = 0
self.send_callback = None
self.receive_callback = None
self.receiver = None
def start(self):
"""
Start HDLC controller's threads.
"""
self.receiver = self.Receiver(
self.read,
self.write,
self.send_lock,
self.senders,
callback=self.receive_callback,
)
self.receiver.start()
def stop(self):
"""
Stop HDLC controller's threads.
"""
self.receiver.join()
for s in self.senders.values():
s.join()
def set_send_callback(self, callback):
"""
Set the send callback function.
If the HDLC controller has already
been started, the new callback
function will be take into account
for the next data frames to send.
"""
if not hasattr(callback, '__call__'):
raise TypeError('The callback function parameter is not a callable object')
self.send_callback = callback
def set_receive_callback(self, callback):
"""
Set the receive callback function.
This method has to be called before
starting the HDLC controller.
"""
if not hasattr(callback, '__call__'):
raise TypeError('The callback function parameter is not a callable object')
self.receive_callback = callback
def send(self, data):
"""
Send a new data frame.
This method will block until a new room is
available for a new sender. This limit is
determined by the size of the window.
"""
while len(self.senders) >= self.window:
pass
self.senders[self.new_seq_no] = self.Sender(
self.write,
self.send_lock,
data,
self.new_seq_no,
callback=self.send_callback,
)
self.senders[self.new_seq_no].start()
self.new_seq_no = (self.new_seq_no + 1) % HDLController.MAX_SEQ_NO
class Sender(Thread):
"""
Thread used to send HDLC frames.
"""
def __init__(self, write_func, send_lock, data, seq_no, callback=None):
super().__init__()
self.write = write_func
self.send_lock = send_lock
self.data = data
self.seq_no = seq_no
self.callback = callback
self.ack = Event()
def run(self):
with self.send_lock:
self.__send_data()
while not self.ack.isSet():
pass
def join(self, timeout=None):
"""
Stop the current thread.
"""
self.ack.set()
super().join(timeout)
def ack_received(self):
"""
Inform the sender that the ack frame
has been received which has the
consequence of stopping the current
thread.
"""
self.join()
def nack_received(self):
"""
Inform the sender that a nack frame
has been received which has the
consequence of resending the data
frame.
"""
with self.send_lock:
self.__send_data()
def __send_data(self):
"""
Send a new data frame.
"""
if self.callback != None:
self.callback(self.data)
self.write(frame_data(self.data, FRAME_DATA, self.seq_no))
class Receiver(Thread):
"""
Thread used to receive HDLC frames.
"""
def __init__(self, read_func, write_func, send_lock, senders_list, callback=None):
super().__init__()
self.read = read_func
self.write = write_func
self.send_lock = send_lock
self.senders = senders_list
self.callback = callback
self.stop_receiver = Event()
def run(self):
while not self.stop_receiver.isSet():
try:
data, type, seq_no = get_data(self.read())
if type == FRAME_DATA:
if self.callback != None:
self.callback(data)
with self.send_lock:
self.__send_ack((seq_no + 1) % HDLController.MAX_SEQ_NO)
elif type == FRAME_ACK:
seq_no_sent = (seq_no - 1) % HDLController.MAX_SEQ_NO
self.senders[seq_no_sent].ack_received()
del self.senders[seq_no_sent]
elif type == FRAME_NACK:
self.senders[seq_no].nack_received()
else:
raise TypeError('Bad frame type received')
except MessageError:
# No HDLC frame detected
pass
except KeyError:
# Drop bad (n)ack
pass
except FCSError:
with self.send_lock:
self.__send_nack(seq_no)
finally:
# 200 µs
sleep(200 / 1000000.0)
def join(self, timeout=None):
"""
Stop the current thread.
"""
self.stop_receiver.set()
super().join(timeout)
def __send_ack(self, seq_no):
"""
Send a new ack frame.
"""
self.write(frame_data('', FRAME_ACK, seq_no))
def __send_nack(self, seq_no):
"""
Send a new nack frame.
"""
self.write(frame_data('', FRAME_NACK, seq_no))
if __name__ == '__main__':
import serial
from sys import stdout, stderr
from argparse import ArgumentParser
ap = ArgumentParser(
description='HDLC controller',
)
ap.add_argument('-d', '--device', default='/dev/ttyACM0', help='serial device to use (default: /dev/ttyACM0)')
ap.add_argument('-b', '--baudrate', type=int, default='9600', help='serial baudrate value (default: 9600)')
ap.add_argument('-t', '--timeout', type=int, default='0', help='serial read timeout value (default: 0)')
ap.add_argument('-m', '--message', default='test', help='test message to send (default: test)')
args = vars(ap.parse_args())
# Serial port configuration
ser = serial.Serial()
ser.port = args['device']
ser.baudrate = args['baudrate']
ser.timeout = args['timeout']
stdout.write('[*] Connection ...\n')
try:
ser.open()
except serial.serialutil.SerialException as e:
stderr.write('[x] Serial connection problem : {0}\n'.format(e))
exit(1)
def read_uart():
return ser.read(ser.inWaiting())
def send_callback(data):
print('> {0}'.format(data))
def receive_callback(data):
print('< {0}'.format(data))
try:
hdlc_c = HDLController(read_uart, ser.write)
hdlc_c.set_send_callback(send_callback)
hdlc_c.set_receive_callback(receive_callback)
hdlc_c.start()
while True:
hdlc_c.send(args['message'])
sleep(1)
except KeyboardInterrupt:
stdout.write('[*] Bye !\n')
hdlc_c.stop()
ser.close()
add argparse epilog
#!/usr/bin/env python3
from yahdlc import *
from threading import Thread, Event, Lock
from time import sleep
class HDLController:
"""
An HDLC controller based on python4yahdlc.
"""
MAX_SEQ_NO = 8
def __init__(self, read_func, write_func, window=3):
if not hasattr(read_func, '__call__'):
raise TypeError('The read function parameter is not a callable object')
if not hasattr(write_func, '__call__'):
raise TypeError('The write function parameter is not a callable object')
self.read = read_func
self.write = write_func
self.window = window
self.senders = dict()
self.send_lock = Lock()
self.new_seq_no = 0
self.send_callback = None
self.receive_callback = None
self.receiver = None
def start(self):
"""
Start HDLC controller's threads.
"""
self.receiver = self.Receiver(
self.read,
self.write,
self.send_lock,
self.senders,
callback=self.receive_callback,
)
self.receiver.start()
def stop(self):
"""
Stop HDLC controller's threads.
"""
self.receiver.join()
for s in self.senders.values():
s.join()
def set_send_callback(self, callback):
"""
Set the send callback function.
If the HDLC controller has already
been started, the new callback
function will be take into account
for the next data frames to send.
"""
if not hasattr(callback, '__call__'):
raise TypeError('The callback function parameter is not a callable object')
self.send_callback = callback
def set_receive_callback(self, callback):
"""
Set the receive callback function.
This method has to be called before
starting the HDLC controller.
"""
if not hasattr(callback, '__call__'):
raise TypeError('The callback function parameter is not a callable object')
self.receive_callback = callback
def send(self, data):
"""
Send a new data frame.
This method will block until a new room is
available for a new sender. This limit is
determined by the size of the window.
"""
while len(self.senders) >= self.window:
pass
self.senders[self.new_seq_no] = self.Sender(
self.write,
self.send_lock,
data,
self.new_seq_no,
callback=self.send_callback,
)
self.senders[self.new_seq_no].start()
self.new_seq_no = (self.new_seq_no + 1) % HDLController.MAX_SEQ_NO
class Sender(Thread):
"""
Thread used to send HDLC frames.
"""
def __init__(self, write_func, send_lock, data, seq_no, callback=None):
super().__init__()
self.write = write_func
self.send_lock = send_lock
self.data = data
self.seq_no = seq_no
self.callback = callback
self.ack = Event()
def run(self):
with self.send_lock:
self.__send_data()
while not self.ack.isSet():
pass
def join(self, timeout=None):
"""
Stop the current thread.
"""
self.ack.set()
super().join(timeout)
def ack_received(self):
"""
Inform the sender that the ack frame
has been received which has the
consequence of stopping the current
thread.
"""
self.join()
def nack_received(self):
"""
Inform the sender that a nack frame
has been received which has the
consequence of resending the data
frame.
"""
with self.send_lock:
self.__send_data()
def __send_data(self):
"""
Send a new data frame.
"""
if self.callback != None:
self.callback(self.data)
self.write(frame_data(self.data, FRAME_DATA, self.seq_no))
class Receiver(Thread):
"""
Thread used to receive HDLC frames.
"""
def __init__(self, read_func, write_func, send_lock, senders_list, callback=None):
super().__init__()
self.read = read_func
self.write = write_func
self.send_lock = send_lock
self.senders = senders_list
self.callback = callback
self.stop_receiver = Event()
def run(self):
while not self.stop_receiver.isSet():
try:
data, type, seq_no = get_data(self.read())
if type == FRAME_DATA:
if self.callback != None:
self.callback(data)
with self.send_lock:
self.__send_ack((seq_no + 1) % HDLController.MAX_SEQ_NO)
elif type == FRAME_ACK:
seq_no_sent = (seq_no - 1) % HDLController.MAX_SEQ_NO
self.senders[seq_no_sent].ack_received()
del self.senders[seq_no_sent]
elif type == FRAME_NACK:
self.senders[seq_no].nack_received()
else:
raise TypeError('Bad frame type received')
except MessageError:
# No HDLC frame detected
pass
except KeyError:
# Drop bad (n)ack
pass
except FCSError:
with self.send_lock:
self.__send_nack(seq_no)
finally:
# 200 µs
sleep(200 / 1000000.0)
def join(self, timeout=None):
"""
Stop the current thread.
"""
self.stop_receiver.set()
super().join(timeout)
def __send_ack(self, seq_no):
"""
Send a new ack frame.
"""
self.write(frame_data('', FRAME_ACK, seq_no))
def __send_nack(self, seq_no):
"""
Send a new nack frame.
"""
self.write(frame_data('', FRAME_NACK, seq_no))
if __name__ == '__main__':
import serial
from sys import stdout, stderr
from argparse import ArgumentParser
ap = ArgumentParser(
description='HDLC controller example',
epilog="Example: ./hdlcontroller.py -d /dev/ttyUSB0 -b 115200 -m 'Hello world!'",
)
ap.add_argument('-d', '--device', default='/dev/ttyACM0', help='serial device to use (default: /dev/ttyACM0)')
ap.add_argument('-b', '--baudrate', type=int, default='9600', help='serial baudrate value (default: 9600)')
ap.add_argument('-t', '--timeout', type=int, default='0', help='serial read timeout value (default: 0)')
ap.add_argument('-m', '--message', default='test', help='test message to send (default: test)')
args = vars(ap.parse_args())
# Serial port configuration
ser = serial.Serial()
ser.port = args['device']
ser.baudrate = args['baudrate']
ser.timeout = args['timeout']
stdout.write('[*] Connection ...\n')
try:
ser.open()
except serial.serialutil.SerialException as e:
stderr.write('[x] Serial connection problem : {0}\n'.format(e))
exit(1)
def read_uart():
return ser.read(ser.inWaiting())
def send_callback(data):
print('> {0}'.format(data))
def receive_callback(data):
print('< {0}'.format(data))
try:
hdlc_c = HDLController(read_uart, ser.write)
hdlc_c.set_send_callback(send_callback)
hdlc_c.set_receive_callback(receive_callback)
hdlc_c.start()
while True:
hdlc_c.send(args['message'])
sleep(1)
except KeyboardInterrupt:
stdout.write('[*] Bye !\n')
hdlc_c.stop()
ser.close()
|
from __future__ import print_function
"""Document builder for foliant. Implements "build" subcommand."""
import os, shutil, json
from os.path import join
from datetime import date
import yaml
from . import gitutils, pandoc, uploader, seqdiag
def copy_dir_content(src, dest):
"""Recusrively copy directory content to another directory."""
if not os.path.exists(src):
return
for child in os.listdir(src):
if os.path.isfile(join(src, child)):
shutil.copy(join(src, child), dest)
elif os.path.isdir(join(src, child)):
shutil.copytree(join(src, child), join(dest, child))
def get_version(cfg):
"""Extract version from config or generate it from git tag and revcount.
Append current date.
"""
components = []
git_version = gitutils.get_version() if cfg["version"] == "auto" else cfg["version"]
if git_version:
components.append(git_version)
if cfg["date"] == "true":
components.append(date.today().strftime("%d-%m-%Y"))
return '-'.join(components)
def get_title(cfg):
"""Generate file name from config: slugify the title and add version."""
components = []
file_name = cfg.get("file_name", cfg["title"].replace(' ', '_'))
components.append(file_name)
version = get_version(cfg)
if version:
components.append(version)
return '_'.join(components)
def collect_source(project_dir, target_dir, src_file):
"""Copy .md files, images, templates, and references from the project
directory to a temporary directory.
"""
print("Collecting source... ", end='')
with open(join(target_dir, src_file), 'w+', encoding="utf8") as src:
with open(
join(project_dir, "main.yaml"),
encoding="utf8"
) as contents_file:
for chapter_name in yaml.load(contents_file)["chapters"]:
chapter_file = chapter_name + ".md"
with open(
join(project_dir, "sources", chapter_file),
encoding="utf8"
) as chapter:
src.write(chapter.read() + '\n')
copy_dir_content(join(project_dir, "sources", "images"), target_dir)
copy_dir_content(join(project_dir, "templates"), target_dir)
copy_dir_content(join(project_dir, "references"), target_dir)
print("Done!")
def build(target_format, project_dir):
"""Convert source Markdown to the target format using Pandoc."""
tmp_dir = "tmp"
src_file = "output.md"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
os.makedirs(tmp_dir)
cfg = json.load(open(join(project_dir, "config.json"), encoding="utf8"))
output_title = get_title(cfg)
collect_source(project_dir, tmp_dir, src_file)
seqdiag.process_diagrams(tmp_dir, src_file)
if target_format.startswith('p'):
output_file = output_title + ".pdf"
pandoc.to_pdf(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('d'):
output_file = output_title + ".docx"
pandoc.to_docx(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('t'):
output_file = output_title + ".tex"
pandoc.to_tex(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('m'):
output_file = output_title + ".md"
shutil.copy(join(tmp_dir, src_file), output_file)
elif target_format.startswith('g'):
output_file = output_title + ".docx"
pandoc.to_docx(src_file, output_file, tmp_dir, cfg)
uploader.upload(output_file)
else:
raise RuntimeError("Invalid target: %s" % target_format)
shutil.rmtree(tmp_dir)
return output_file
Builder: Add include processing.
from __future__ import print_function
"""Document builder for foliant. Implements "build" subcommand."""
import os, shutil, json
from os.path import join
from datetime import date
import yaml
from . import gitutils, pandoc, uploader, seqdiag, includes
def copy_dir_content(src, dest):
"""Recusrively copy directory content to another directory."""
if not os.path.exists(src):
return
for child in os.listdir(src):
if os.path.isfile(join(src, child)):
shutil.copy(join(src, child), dest)
elif os.path.isdir(join(src, child)):
shutil.copytree(join(src, child), join(dest, child))
def get_version(cfg):
"""Extract version from config or generate it from git tag and revcount.
Append current date.
"""
components = []
git_version = gitutils.get_version() if cfg["version"] == "auto" else cfg["version"]
if git_version:
components.append(git_version)
if cfg["date"] == "true":
components.append(date.today().strftime("%d-%m-%Y"))
return '-'.join(components)
def get_title(cfg):
"""Generate file name from config: slugify the title and add version."""
components = []
file_name = cfg.get("file_name", cfg["title"].replace(' ', '_'))
components.append(file_name)
version = get_version(cfg)
if version:
components.append(version)
return '_'.join(components)
def collect_source(project_dir, target_dir, src_file):
"""Copy .md files, images, templates, and references from the project
directory to a temporary directory.
"""
print("Collecting source... ", end='')
with open(join(target_dir, src_file), 'w+', encoding="utf8") as src:
with open(
join(project_dir, "main.yaml"),
encoding="utf8"
) as contents_file:
for chapter_name in yaml.load(contents_file)["chapters"]:
chapter_file = chapter_name + ".md"
with open(
join(project_dir, "sources", chapter_file),
encoding="utf8"
) as chapter:
src.write(includes.process_includes(chapter.read()))
copy_dir_content(join(project_dir, "sources", "images"), target_dir)
copy_dir_content(join(project_dir, "templates"), target_dir)
copy_dir_content(join(project_dir, "references"), target_dir)
print("Done!")
def build(target_format, project_dir):
"""Convert source Markdown to the target format using Pandoc."""
tmp_dir = "foliantcache"
src_file = "output.md"
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
cfg = json.load(open(join(project_dir, "config.json"), encoding="utf8"))
output_title = get_title(cfg)
collect_source(project_dir, tmp_dir, src_file)
seqdiag.process_diagrams(tmp_dir, src_file)
if target_format.startswith('p'):
output_file = output_title + ".pdf"
pandoc.to_pdf(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('d'):
output_file = output_title + ".docx"
pandoc.to_docx(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('t'):
output_file = output_title + ".tex"
pandoc.to_tex(src_file, output_file, tmp_dir, cfg)
shutil.copy(join(tmp_dir, output_file), output_file)
elif target_format.startswith('m'):
output_file = output_title + ".md"
shutil.copy(join(tmp_dir, src_file), output_file)
elif target_format.startswith('g'):
output_file = output_title + ".docx"
pandoc.to_docx(src_file, output_file, tmp_dir, cfg)
uploader.upload(output_file)
else:
raise RuntimeError("Invalid target: %s" % target_format)
return output_file
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
from branca.colormap import (ColorMap, LinearColormap, StepColormap)
from branca.element import (CssLink, Div, Element, Figure, Html, IFrame,
JavascriptLink, Link, MacroElement)
from folium._version import get_versions
from folium.features import (
ClickForMarker, ColorLine, CustomIcon, DivIcon, GeoJson,
LatLngPopup, RegularPolygonMarker, TopoJson, Vega, VegaLite,
)
# from folium.raster_layers imoort TileLayer, WmsTileLayer
from folium.folium import Map
from folium.map import (
FeatureGroup, FitBounds, Icon, LayerControl, Marker, Popup
)
from folium.vector_layers import Circle, CircleMarker, PolyLine, Polygon, Rectangle # noqa
__version__ = get_versions()['version']
del get_versions
__all__ = [
'CssLink',
'Div',
'Element',
'Figure',
'Html',
'IFrame',
'JavascriptLink',
'Link',
'MacroElement',
'ColorMap',
'ColorLine',
'LinearColormap',
'StepColormap',
'Map',
'FeatureGroup',
'FitBounds',
'Icon',
'LayerControl',
'Marker',
'Popup',
'TileLayer',
'ClickForMarker',
'CustomIcon',
'DivIcon',
'GeoJson',
'GeoJsonStyle',
'LatLngPopup',
'MarkerCluster',
'Vega',
'VegaLite',
'RegularPolygonMarker',
'TopoJson',
'WmsTileLayer',
# vector_layers
'Circle',
'CircleMarker',
'PolyLine',
'Polygon',
'Polyline',
'Rectangle',
]
TileLayer and WMSTileLayer are in __all__, (#756)
* TileLayer and WMSTileLayer are in __all__
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
from branca.colormap import (ColorMap, LinearColormap, StepColormap)
from branca.element import (CssLink, Div, Element, Figure, Html, IFrame,
JavascriptLink, Link, MacroElement)
from folium._version import get_versions
from folium.features import (
ClickForMarker, ColorLine, CustomIcon, DivIcon, GeoJson,
LatLngPopup, RegularPolygonMarker, TopoJson, Vega, VegaLite,
)
from folium.raster_layers import TileLayer, WmsTileLayer
from folium.folium import Map
from folium.map import (
FeatureGroup, FitBounds, Icon, LayerControl, Marker, Popup
)
from folium.vector_layers import Circle, CircleMarker, PolyLine, Polygon, Rectangle # noqa
__version__ = get_versions()['version']
del get_versions
__all__ = [
'CssLink',
'Div',
'Element',
'Figure',
'Html',
'IFrame',
'JavascriptLink',
'Link',
'MacroElement',
'ColorMap',
'ColorLine',
'LinearColormap',
'StepColormap',
'Map',
'FeatureGroup',
'FitBounds',
'Icon',
'LayerControl',
'Marker',
'Popup',
'TileLayer',
'ClickForMarker',
'CustomIcon',
'DivIcon',
'GeoJson',
'GeoJsonStyle',
'LatLngPopup',
'MarkerCluster',
'Vega',
'VegaLite',
'RegularPolygonMarker',
'TopoJson',
'WmsTileLayer',
# vector_layers
'Circle',
'CircleMarker',
'PolyLine',
'Polygon',
'Polyline',
'Rectangle',
]
|
import os
import sys
import time
import urllib2
import base64
import json
import string
import random
import subprocess
# don't buffer stdout
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
VERSION = '1'
API_URL = "http://api.metagenomics.anl.gov/"+VERSION
AUTH_LIST = "Jared Bischof, Travis Harrison, Folker Meyer, Tobias Paczian, Andreas Wilke"
SEARCH_FIELDS = ["function", "organism", "md5", "name", "biome", "feature", "material", "country", "location", "longitude", "latitude", "created", "env_package_type", "project_id", "project_name", "PI_firstname", "PI_lastname", "sequence_type", "seq_method", "collection_date"]
# return python struct from JSON output of asynchronous MG-RAST API
def async_rest_api(url, auth=None, data=None, debug=False, delay=15):
submit = obj_from_url(url, auth=auth, data=data, debug=debug)
if not (('status' in submit) and (submit['status'] == 'Submitted') and ('url' in submit)):
sys.stderr.write("ERROR: return data invalid format\n:%s"%json.dumps(submit))
result = obj_from_url(submit['url'], debug=debug)
while result['status'] != 'done':
if debug:
print "waiting %d seconds ..."%delay
time.sleep(delay)
result = obj_from_url(submit['url'], debug=debug)
if 'ERROR' in result['data']:
sys.stderr.write("ERROR: %s\n" %result['data']['ERROR'])
sys.exit(1)
return result['data']
# return python struct from JSON output of MG-RAST API
def obj_from_url(url, auth=None, data=None, debug=False):
header = {'Accept': 'application/json'}
if auth:
header['Auth'] = auth
if data:
header['Content-Type'] = 'application/json'
if debug:
if data:
print "data:\t"+data
print "header:\t"+json.dumps(header)
print "url:\t"+url
try:
req = urllib2.Request(url, data, headers=header)
res = urllib2.urlopen(req)
except urllib2.HTTPError, error:
try:
eobj = json.loads(error.read())
sys.stderr.write("ERROR (%s): %s\n" %(error.code, eobj['ERROR']))
sys.exit(1)
except:
sys.stderr.write("ERROR (%s): %s\n" %(error.code, error.read()))
sys.exit(1)
if not res:
sys.stderr.write("ERROR: no results returned\n")
sys.exit(1)
obj = json.loads(res.read())
if obj is None:
sys.stderr.write("ERROR: return structure not valid json format\n")
sys.exit(1)
if len(obj.keys()) == 0:
sys.stderr.write("ERROR: no data available\n")
sys.exit(1)
if 'ERROR' in obj:
sys.stderr.write("ERROR: %s\n" %obj['ERROR'])
sys.exit(1)
return obj
# print to stdout results of MG-RAST API
def stdout_from_url(url, auth=None, data=None, debug=False):
header = {'Accept': 'text/plain'}
if auth:
header['Auth'] = auth
if data:
header['Content-Type'] = 'application/json'
if debug:
if data:
print "data:\t"+data
print "header:\t"+json.dumps(header)
print "url:\t"+url
try:
req = urllib2.Request(url, data, headers=header)
res = urllib2.urlopen(req)
except urllib2.HTTPError, error:
try:
eobj = json.loads(error.read())
sys.stderr.write("ERROR (%s): %s\n" %(error.code, eobj['ERROR']))
sys.exit(1)
except:
sys.stderr.write("ERROR (%s): %s\n" %(error.code, error.read()))
sys.exit(1)
if not res:
sys.stderr.write("ERROR: no results returned\n")
sys.exit(1)
while True:
chunk = res.read(8192)
if not chunk:
break
safe_print(chunk)
# safe handeling of stdout for pipeing
def safe_print(text):
text = "".join([x if ord(x) < 128 else '?' for x in text])
try:
sys.stdout.write(text)
except IOError:
# stdout is closed, no point in continuing
# Attempt to close them explicitly to prevent cleanup problems:
try:
sys.stdout.close()
except IOError:
pass
try:
sys.stderr.close()
except IOError:
pass
# transform sparse matrix to dense matrix (2D array)
def sparse_to_dense(sMatrix, rmax, cmax):
dMatrix = [[0 for i in range(cmax)] for j in range(rmax)]
for sd in sMatrix:
r, c, v = sd
dMatrix[r][c] = v
return dMatrix
# transform BIOM format to tabbed table
def biom_to_tab(biom, hdl, rows=None, use_id=True):
if biom['matrix_type'] == 'sparse':
matrix = sparse_to_dense(biom['data'], biom['shape'][0], biom['shape'][1])
else:
matrix = biom['data']
hdl.write( "\t%s\n" %"\t".join([c['id'] for c in biom['columns']]) )
for i, row in enumerate(matrix):
name = biom['rows'][i]['id'] if use_id else biom['rows'][i]['metadata']['ontology'][-1]
if rows and (name not in rows):
continue
try:
hdl.write( "%s\t%s\n" %(name, "\t".join([str(r) for r in row])) )
except:
try:
hdl.close()
except:
pass
# transform BIOM format to matrix in json format
def biom_to_matrix(biom):
rows = [r['id'] for r in biom['rows']]
cols = [c['id'] for c in biom['columns']]
if biom['matrix_type'] == 'sparse':
data = sparse_to_dense(biom['data'], len(rows), len(cols))
else:
data = biom['data']
return rows, cols, data
# transform tabbed table to matrix in json format
def tab_to_matrix(indata):
lines = indata.split('\n')
data = []
rows = []
cols = lines[0].strip().split('\t')
for line in lines[1:]:
parts = line.strip().split('\t')
first = parts.pop(0)
if len(cols) == len(parts):
rows.append(first)
data.append(parts)
return rows, cols, data
# return a subselection of matrix columns
def sub_matrix(matrix, ncols):
sub = []
for row in matrix:
sub.append( row[:ncols] )
return sub
# return KBase id for MG-RAST id
def mgid_to_kbid(mgid):
id_map = kbid_lookup([mgid], reverse=True)
return id_map[mgid] if mgid in id_map else None
# return MG-RAST id for given KBase id
def kbid_to_mgid(kbid):
id_map = kbid_lookup([kbid])
if kbid not in id_map:
sys.stderr.write("ERROR: '%s' not a valid KBase ID\n" %kbid)
sys.exit(1)
return id_map[kbid]
# return list of MG-RAST ids for given list of KBase ids
# handels mixed ids / all mgrast ids
def kbids_to_mgids(kbids):
id_map = kbid_lookup(kbids)
mgids = []
for i in kbids:
if i in id_map:
mgids.append(id_map[i])
else:
mgids.append(i)
return mgids
# return map (KBase id -> MG-RAST id) for given list of KBase ids
# or reverse
def kbid_lookup(ids, reverse=False):
request = 'mg2kb' if reverse else 'kb2mg'
post = json.dumps({'ids': ids}, separators=(',',':'))
data = obj_from_url(API_URL+'/job/'+request, data=post)
return data['data']
def get_auth_token(opts):
if 'KB_AUTH_TOKEN' in os.environ:
return os.environ['KB_AUTH_TOKEN']
if opts.token:
return opts.token
elif opts.user or opts.passwd:
if opts.user and opts.passwd:
return token_from_login(opts.user, opts.passwd)
else:
sys.stderr.write("ERROR: both username and password are required\n")
sys.exit(1)
else:
return None
def token_from_login(user, passwd):
auth = 'kbgo4711'+base64.b64encode('%s:%s' %(user, passwd)).replace('\n', '')
data = obj_from_url(API_URL, auth=auth)
return data['token']
def random_str(size=8):
chars = string.ascii_letters + string.digits
return ''.join(random.choice(chars) for x in range(size))
# this is a bit of a hack, need to redo using rpy2
def execute_r(cmd, debug=False):
r_cmd = "echo '%s' | R --vanilla --slave --silent"%cmd
if debug:
print r_cmd
else:
process = subprocess.Popen(r_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, error = process.communicate()
if error:
sys.stderr.write(error)
sys.exit(1)
updated support metagenome lib
import os
import sys
import time
import copy
import urllib2
import base64
import json
import string
import random
import subprocess
# don't buffer stdout
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
VERSION = '1'
API_URL = "http://api.metagenomics.anl.gov/"+VERSION
AUTH_LIST = "Jared Bischof, Travis Harrison, Folker Meyer, Tobias Paczian, Andreas Wilke"
SEARCH_FIELDS = ["function", "organism", "md5", "name", "biome", "feature", "material", "country", "location", "longitude", "latitude", "created", "env_package_type", "project_id", "project_name", "PI_firstname", "PI_lastname", "sequence_type", "seq_method", "collection_date"]
# return python struct from JSON output of asynchronous MG-RAST API
def async_rest_api(url, auth=None, data=None, debug=False, delay=15):
submit = obj_from_url(url, auth=auth, data=data, debug=debug)
if not (('status' in submit) and (submit['status'] == 'Submitted') and ('url' in submit)):
sys.stderr.write("ERROR: return data invalid format\n:%s"%json.dumps(submit))
result = obj_from_url(submit['url'], debug=debug)
while result['status'] != 'done':
if debug:
print "waiting %d seconds ..."%delay
time.sleep(delay)
result = obj_from_url(submit['url'], debug=debug)
if 'ERROR' in result['data']:
sys.stderr.write("ERROR: %s\n" %result['data']['ERROR'])
sys.exit(1)
return result['data']
# return python struct from JSON output of MG-RAST API
def obj_from_url(url, auth=None, data=None, debug=False):
header = {'Accept': 'application/json'}
if auth:
header['Auth'] = auth
if data:
header['Content-Type'] = 'application/json'
if debug:
if data:
print "data:\t"+data
print "header:\t"+json.dumps(header)
print "url:\t"+url
try:
req = urllib2.Request(url, data, headers=header)
res = urllib2.urlopen(req)
except urllib2.HTTPError, error:
try:
eobj = json.loads(error.read())
sys.stderr.write("ERROR (%s): %s\n" %(error.code, eobj['ERROR']))
sys.exit(1)
except:
sys.stderr.write("ERROR (%s): %s\n" %(error.code, error.read()))
sys.exit(1)
if not res:
sys.stderr.write("ERROR: no results returned\n")
sys.exit(1)
obj = json.loads(res.read())
if obj is None:
sys.stderr.write("ERROR: return structure not valid json format\n")
sys.exit(1)
if len(obj.keys()) == 0:
sys.stderr.write("ERROR: no data available\n")
sys.exit(1)
if 'ERROR' in obj:
sys.stderr.write("ERROR: %s\n" %obj['ERROR'])
sys.exit(1)
return obj
# print to stdout results of MG-RAST API
def stdout_from_url(url, auth=None, data=None, debug=False):
header = {'Accept': 'text/plain'}
if auth:
header['Auth'] = auth
if data:
header['Content-Type'] = 'application/json'
if debug:
if data:
print "data:\t"+data
print "header:\t"+json.dumps(header)
print "url:\t"+url
try:
req = urllib2.Request(url, data, headers=header)
res = urllib2.urlopen(req)
except urllib2.HTTPError, error:
try:
eobj = json.loads(error.read())
sys.stderr.write("ERROR (%s): %s\n" %(error.code, eobj['ERROR']))
sys.exit(1)
except:
sys.stderr.write("ERROR (%s): %s\n" %(error.code, error.read()))
sys.exit(1)
if not res:
sys.stderr.write("ERROR: no results returned\n")
sys.exit(1)
while True:
chunk = res.read(8192)
if not chunk:
break
safe_print(chunk)
# safe handeling of stdout for pipeing
def safe_print(text):
text = "".join([x if ord(x) < 128 else '?' for x in text])
try:
sys.stdout.write(text)
except IOError:
# stdout is closed, no point in continuing
# Attempt to close them explicitly to prevent cleanup problems:
try:
sys.stdout.close()
except IOError:
pass
try:
sys.stderr.close()
except IOError:
pass
# transform sparse matrix to dense matrix (2D array)
def sparse_to_dense(sMatrix, rmax, cmax):
dMatrix = [[0 for i in range(cmax)] for j in range(rmax)]
for sd in sMatrix:
r, c, v = sd
dMatrix[r][c] = v
return dMatrix
# transform BIOM format to tabbed table
def biom_to_tab(biom, hdl, rows=None, use_id=True, col_name=False):
if biom['matrix_type'] == 'sparse':
matrix = sparse_to_dense(biom['data'], biom['shape'][0], biom['shape'][1])
else:
matrix = biom['data']
if col_name:
hdl.write( "\t%s\n" %"\t".join([c['name'] for c in biom['columns']]) )
else:
hdl.write( "\t%s\n" %"\t".join([c['id'] for c in biom['columns']]) )
for i, row in enumerate(matrix):
name = biom['rows'][i]['id']
if (not use_id) and ('ontology' in biom['rows'][i]['metadata']):
name += ':'+biom['rows'][i]['metadata']['ontology'][-1]
if rows and (name not in rows):
continue
try:
hdl.write( "%s\t%s\n" %(name, "\t".join([str(r) for r in row])) )
except:
try:
hdl.close()
except:
pass
# retrieve a list of metadata values from biom file columns for given term
# order is same as columns
def metadata_from_biom(biom, term):
vals = []
for col in biom['columns']:
value = 'null'
if ('metadata' in col) and col['metadata']:
for v in col['metadata'].itervalues():
if ('data' in v) and (term in v['data']):
value = v['data'][term]
vals.append(value)
return vals
# merge two BIOM objects
def merge_biom(b1, b2):
"""input: 2 biom objects of same 'type', 'matrix_element_type', and 'matrix_element_value'
return: merged biom object, duplicate columns skipped, duplicate rows added"""
# validate
if not (b1 and b2 and (b1['type'] == b2['type']) and (b1['matrix_element_type'] == b2['matrix_element_type']) and (b1['matrix_element_value'] == b2['matrix_element_value'])):
sys.stderr.write("The inputed biom objects are not compatable for merging\n")
return None
# build
mBiom = { "generated_by": b1['generated_by'],
"matrix_type": 'dense',
"date": strftime("%Y-%m-%dT%H:%M:%S", localtime()),
"columns": copy.deepcopy(b1['columns']),
"rows": copy.deepcopy(b1['rows']),
"data": sparse_to_dense(b1['data'], b1['shape'][0], b1['shape'][1]) if b1['matrix_type'] == 'sparse' else copy.deepcopy(b1['data']),
"shape": [],
"matrix_element_value": b1['matrix_element_value'],
"matrix_element_type": b1['matrix_element_type'],
"format_url": "http://biom-format.org",
"format": "Biological Observation Matrix 1.0",
"id": b1['id']+'_'+b2['id'],
"type": b1['type'] }
# make sure we are dense
if b2['matrix_type'] == 'sparse':
b2['data'] = sparse_to_dense(b1['data'], b2['shape'][0], b2['shape'][1])
# get lists of ids
c1_id = [c['id'] for c in b1['columns']]
r1_id = [r['id'] for r in b1['rows']]
r2_id = [r['id'] for r in b2['rows']]
c2_keep = 0
# merge columns, skip duplicate by id
for c in b2['columns']:
if c['id'] not in c1_id:
mBiom['columns'].append(c)
c2_keep += 1
# merge b2-cols into b1-rows
for i, r in enumerate(mBiom['rows']):
add_row = []
try:
# b1-row is in b2, use those values
r2_index = r2_id.index(r['id'])
for j, c in enumerate(b2['columns']):
if c['id'] not in c1_id:
add_row.append(b2['data'][r2_index][j])
except:
# b1-row not in b2, add 0's
add_row = [0]*c2_keep
mBiom['data'][i].extend(add_row)
# add b2-rows that are not in b1
for i, r in enumerate(b2['rows']):
if r['id'] in r1_id:
continue
# b1-col all 0's
add_row = [0]*b1['shape'][1]
# add b2-cols
for j, c in enumerate(b2['columns']):
if c['id'] not in c1_id:
add_row.append(b2['data'][i][j])
mBiom['rows'].append(r)
mBiom['data'].append(add_row)
mBiom['shape'] = [ len(mBiom['rows']), len(mBiom['columns']) ]
return mBiom
# transform BIOM format to matrix in json format
def biom_to_matrix(biom, col_name=False, sig_stats=False):
if col_name:
cols = [c['name'] for c in biom['columns']]
else:
cols = [c['id'] for c in biom['columns']]
rows = [r['id'] for r in biom['rows']]
if biom['matrix_type'] == 'sparse':
data = sparse_to_dense(biom['data'], len(rows), len(cols))
else:
data = biom['data']
if sig_stats and ('significance' in biom['rows'][0]['metadata']) and (len(biom['rows'][0]['metadata']['significance']) > 0):
cols.extend( [s[0] for s in biom['rows'][0]['metadata']['significance']] )
for i, r in enumerate(biom['rows']):
data[i].extend( [s[1] for s in r['metadata']['significance']] )
return rows, cols, data
# transform tabbed table to matrix in json format
def tab_to_matrix(indata):
lines = indata.split('\n')
data = []
rows = []
cols = lines[0].strip().split('\t')
for line in lines[1:]:
parts = line.strip().split('\t')
first = parts.pop(0)
if len(cols) == len(parts):
rows.append(first)
data.append(parts)
return rows, cols, data
# return a subselection of matrix columns
def sub_matrix(matrix, ncols):
sub = []
for row in matrix:
sub.append( row[:ncols] )
return sub
# return KBase id for MG-RAST id
def mgid_to_kbid(mgid):
id_map = kbid_lookup([mgid], reverse=True)
return id_map[mgid] if mgid in id_map else None
# return MG-RAST id for given KBase id
def kbid_to_mgid(kbid):
id_map = kbid_lookup([kbid])
if kbid not in id_map:
sys.stderr.write("ERROR: '%s' not a valid KBase ID\n" %kbid)
sys.exit(1)
return id_map[kbid]
# return list of MG-RAST ids for given list of KBase ids
# handels mixed ids / all mgrast ids
def kbids_to_mgids(kbids):
id_map = kbid_lookup(kbids)
mgids = []
for i in kbids:
if i in id_map:
mgids.append(id_map[i])
else:
mgids.append(i)
return mgids
# return map (KBase id -> MG-RAST id) for given list of KBase ids
# or reverse
def kbid_lookup(ids, reverse=False):
request = 'mg2kb' if reverse else 'kb2mg'
post = json.dumps({'ids': ids}, separators=(',',':'))
data = obj_from_url(API_URL+'/job/'+request, data=post)
return data['data']
def get_auth_token(opts):
if 'KB_AUTH_TOKEN' in os.environ:
return os.environ['KB_AUTH_TOKEN']
if opts.token:
return opts.token
elif opts.user or opts.passwd:
if opts.user and opts.passwd:
return token_from_login(opts.user, opts.passwd)
else:
sys.stderr.write("ERROR: both username and password are required\n")
sys.exit(1)
else:
return None
def token_from_login(user, passwd):
auth = 'kbgo4711'+base64.b64encode('%s:%s' %(user, passwd)).replace('\n', '')
data = obj_from_url(API_URL, auth=auth)
return data['token']
def random_str(size=8):
chars = string.ascii_letters + string.digits
return ''.join(random.choice(chars) for x in range(size))
# this is a bit of a hack, need to redo using rpy2
def execute_r(cmd, debug=False):
r_cmd = "echo '%s' | R --vanilla --slave --silent"%cmd
if debug:
print r_cmd
else:
process = subprocess.Popen(r_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, error = process.communicate()
if error:
sys.stderr.write(error)
sys.exit(1)
|
# -*- coding: utf-8 -*-
"""
Elements
------
A generic class for creating Elements.
"""
import warnings
from uuid import uuid4
from jinja2 import Environment, PackageLoader, Template
ENV = Environment(loader=PackageLoader('folium', 'templates'))
from collections import OrderedDict
import json
from .six import text_type, binary_type, urlopen
def _camelify(out):
return (''.join(["_"+x.lower() if i<len(out)-1 and x.isupper() and out[i+1].islower()
else x.lower()+"_" if i<len(out)-1 and x.islower() and out[i+1].isupper()
else x.lower() for i,x in enumerate(list(out))])).lstrip('_').replace('__','_')
class Element(object):
"""Basic Element object that does nothing.
Other Elements may inherit from this one."""
def __init__(self, template=None, template_name=None):
"""Creates a Element."""
self._name = 'Element'
self._id = uuid4().hex
self._env = ENV
self._children = OrderedDict()
self._parent = None
self._template = Template(template) if template is not None\
else ENV.get_template(template_name) if template_name is not None\
else Template(u"""
{% for name, element in this._children.items() %}
{{element.render(**kwargs)}}
{% endfor %}
""")
def get_name(self):
return _camelify(self._name) + '_' +self._id
def add_children(self, child, name=None, index=None):
"""Add a children."""
if name is None:
name = child.get_name()
if index is None:
self._children[name] = child
else:
items = [item for item in self._children.items() if item[0] != name]
items.insert(int(index),(name,child))
self._children = items
child._parent = self
def add_to(self, parent, name=None, index=None):
"""Add element to a parent."""
parent.add_children(self, name=name, index=index)
def to_dict(self, depth=-1, ordered=True, **kwargs):
if ordered:
dict_fun = OrderedDict
else:
dict_fun = dict
out = dict_fun()
out['name'] = self._name
out['id'] = self._id
if depth != 0:
out['children'] = dict_fun([(name, child.to_dict(depth=depth-1))\
for name,child in self._children.items()])
return out
def to_json(self, depth=-1, **kwargs):
return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs)
def get_root(self):
"""Returns the root of the elements tree."""
if self._parent is None:
return self
else:
return self._parent.get_root()
def render(self, **kwargs):
"""TODO : docstring here."""
return self._template.render(this=self, kwargs=kwargs)
_default_js = [
('leaflet',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.3/leaflet.js"),
('jquery',
"https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"),
('bootstrap',
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"),
('awesome_markers',
"https://rawgithub.com/lvoogdt/Leaflet.awesome-markers/2.0/develop/dist/leaflet.awesome-markers.js"),
('marker_cluster_src',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/leaflet.markercluster-src.js"),
('marker_cluster',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/leaflet.markercluster.js"),
]
_default_css = [
("leaflet_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.3/leaflet.css"),
("bootstrap_css",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css"),
("bootstrap_theme_css",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap-theme.min.css"),
("awesome_markers_font_css",
"https://maxcdn.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css"),
("awesome_markers_css",
"https://rawgit.com/lvoogdt/Leaflet.awesome-markers/2.0/develop/dist/leaflet.awesome-markers.css"),
("marker_cluster_default_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.Default.css"),
("marker_cluster_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.css"),
("awesome_rotate_css",
"https://raw.githubusercontent.com/python-visualization/folium/master/folium/templates/leaflet.awesome.rotate.css"),
]
class Figure(Element):
def __init__(self):
super(Figure, self).__init__()
self._name = 'Figure'
self.header = Element()
self.html = Element()
self.script = Element()
#self.axes = []
self.header._parent = self
self.html._parent = self
self.script._parent = self
self._template = Template(u"""
<!DOCTYPE html>
<head>
{{this.header.render(**kwargs)}}
</head>
<body>
{{this.html.render(**kwargs)}}
</body>
<script>
{{this.script.render(**kwargs)}}
</script>
""")
# Create the meta tag
self.header.add_children(Element(
'<meta http-equiv="content-type" content="text/html; charset=UTF-8" />'),
name='meta_http')
# Import Javascripts
for name, url in _default_js:
self.header.add_children(JavascriptLink(url), name=name)
# Import Css
for name, url in _default_css:
self.header.add_children(CssLink(url), name=name)
self.header.add_children(Element("""
<style>
html, body {
width: 100%;
height: 100%;
margin: 0;
padding: 0;
}
#map {
position:absolute;
top:0;
bottom:0;
right:0;
left:0;
}
</style>
"""), name='css_style')
def to_dict(self, depth=-1, **kwargs):
out = super(Figure, self).to_dict(depth=depth, **kwargs)
out['header'] = self.header.to_dict(depth=depth-1, **kwargs)
out['html'] = self.html.to_dict(depth=depth-1, **kwargs)
out['script'] = self.script.to_dict(depth=depth-1, **kwargs)
return out
def render(self, **kwargs):
"""TODO : docstring here."""
for name, child in self._children.items():
child.render(**kwargs)
return self._template.render(this=self, kwargs=kwargs)
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Figure in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
html = self.render(**kwargs)
width, height = figsize
iframe = '<iframe src="{html}" width="{width}px" height="{height}px"></iframe>'\
.format(\
html = "data:text/html;base64,"+html.encode('utf8').encode('base64'),
#html = self.HTML.replace('"','"'),
width = int(60.*width),
height= int(60.*height),
)
return iframe
def add_subplot(self, x,y,n,margin=0.05):
width = 1./y
height = 1./x
left = ((n-1)%y)*width
top = ((n-1)//y)*height
left = left+width*margin
top = top+height*margin
width = width*(1-2.*margin)
height = height*(1-2.*margin)
div = Div(position='absolute',
width="{}%".format(100.*width),
height="{}%".format(100.*height),
left="{}%".format(100.*left),
top="{}%".format(100.*top),
)
self.add_children(div)
return div
class Link(Element):
def get_code(self):
if self.code is None:
self.code = urlopen(self.url).read()
return self.code
def to_dict(self, depth=-1, **kwargs):
out = super(Link, self).to_dict(depth=-1, **kwargs)
out['url'] = self.url
return out
class JavascriptLink(Link):
def __init__(self, url, download=False):
"""Create a JavascriptLink object based on a url.
Parameters
----------
url : str
The url to be linked
download : bool, default False
Whether the target document shall be loaded right now.
"""
super(JavascriptLink, self).__init__()
self._name = 'JavascriptLink'
self.url = url
self.code = None
if download:
self.get_code()
self._template = Template(u"""
{% if kwargs.get("embedded",False) %}
<script>{{this.get_code()}}</script>
{% else %}
<script src="{{this.url}}"></script>
{% endif %}
""")
class CssLink(Link):
def __init__(self, url, download=False):
"""Create a CssLink object based on a url.
Parameters
----------
url : str
The url to be linked
download : bool, default False
Whether the target document shall be loaded right now.
"""
super(CssLink, self).__init__()
self._name = 'CssLink'
self.url = url
self.code = None
if download:
self.get_code()
self._template = Template(u"""
{% if kwargs.get("embedded",False) %}
<style>{{this.get_code()}}</style>
{% else %}
<link rel="stylesheet" href="{{this.url}}" />
{% endif %}
""")
class Div(Figure):
def __init__(self, width='100%', height='100%',
left="0%", top="0%", position='relative'):
"""Create a Map with Folium and Leaflet.js
"""
super(Figure, self).__init__()
self._name = 'Div'
# Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self.header = Element()
self.html = Element("""
{% for name, element in this._children.items() %}
{{element.render(**kwargs)}}
{% endfor %}
""")
self.script = Element()
self.header._parent = self
self.html._parent = self
self.script._parent = self
self._template = Template(u"""
{% macro header(this, kwargs) %}
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
{% endmacro %}
{% macro html(this, kwargs) %}
<div id="{{this.get_name()}}">
{{this.html.render(**kwargs)}}
</div>
{% endmacro %}
""")
def get_root(self):
return self
def render(self, **kwargs):
"""TODO : docstring here."""
figure = self._parent
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
for name, element in self._children.items():
element.render(**kwargs)
for name, element in self.header._children.items():
figure.header.add_children(element, name=name)
for name, element in self.script._children.items():
figure.script.add_children(element, name=name)
header = self._template.module.__dict__.get('header',None)
if header is not None:
figure.header.add_children(Element(header(self, kwargs)),
name=self.get_name())
html = self._template.module.__dict__.get('html',None)
if html is not None:
figure.html.add_children(Element(html(self, kwargs)),
name=self.get_name())
script = self._template.module.__dict__.get('script',None)
if script is not None:
figure.script.add_children(Element(script(self, kwargs)),
name=self.get_name())
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Map in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
if self._parent is None:
self.add_to(Figure())
out = self._parent._repr_html_(figsize=figsize, **kwargs)
self._parent = None
else:
out = self._parent._repr_html_(figsize=figsize, **kwargs)
return out
class MacroElement(Element):
"""This is a parent class for Elements defined by a macro template.
To compute your own element, all you have to do is:
* To inherit from this class
* Overwrite the '_name' attribute
* Overwrite the '_template' attribute with something of the form:
{% macro header(this, kwargs) %}
...
{% endmacro %}
{% macro html(this, kwargs) %}
...
{% endmacro %}
{% macro script(this, kwargs) %}
...
{% endmacro %}
"""
def __init__(self):
"""TODO : docstring here"""
super(MacroElement, self).__init__()
self._name = 'MacroElement'
self._template = Template(u"")
def render(self, **kwargs):
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
header = self._template.module.__dict__.get('header',None)
if header is not None:
figure.header.add_children(Element(header(self, kwargs)),
name=self.get_name())
html = self._template.module.__dict__.get('html',None)
if html is not None:
figure.html.add_children(Element(html(self, kwargs)),
name=self.get_name())
script = self._template.module.__dict__.get('script',None)
if script is not None:
figure.script.add_children(Element(script(self, kwargs)),
name=self.get_name())
for name, element in self._children.items():
element.render(**kwargs)
def _parse_size(value):
try:
if isinstance(value, int) or isinstance(value, float):
value_type = 'px'
value = float(value)
assert value > 0
else:
value_type = '%'
value = float(value.strip('%'))
assert 0 <= value <= 100
except:
msg = "Cannot parse value {!r} as {!r}".format
raise ValueError(msg(value, value_type))
return value, value_type
class Map(MacroElement):
def __init__(self, location=None, width='100%', height='100%',
left="0%", top="0%", position='relative',
tiles='OpenStreetMap', API_key=None, max_zoom=18, min_zoom=1,
zoom_start=10, attr=None, min_lat=-90, max_lat=90,
min_lon=-180, max_lon=180):
"""Create a Map with Folium and Leaflet.js
Generate a base map of given width and height with either default
tilesets or a custom tileset URL. The following tilesets are built-in
to Folium. Pass any of the following to the "tiles" keyword:
- "OpenStreetMap"
- "MapQuest Open"
- "MapQuest Open Aerial"
- "Mapbox Bright" (Limited levels of zoom for free tiles)
- "Mapbox Control Room" (Limited levels of zoom for free tiles)
- "Stamen" (Terrain, Toner, and Watercolor)
- "Cloudmade" (Must pass API key)
- "Mapbox" (Must pass API key)
- "CartoDB" (positron and dark_matter)
You can pass a custom tileset to Folium by passing a Leaflet-style
URL to the tiles parameter:
http://{s}.yourtiles.com/{z}/{x}/{y}.png
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Map (Northing, Easting).
width: pixel int or percentage string (default: '100%')
Width of the map.
height: pixel int or percentage string (default: '100%')
Height of the map.
tiles: str, default 'OpenStreetMap'
Map tileset to use. Can use defaults or pass a custom URL.
API_key: str, default None
API key for Cloudmade or Mapbox tiles.
max_zoom: int, default 18
Maximum zoom depth for the map.
zoom_start: int, default 10
Initial zoom level for the map.
attr: string, default None
Map tile attribution; only required if passing custom tile URL.
Returns
-------
Folium Map Object
Examples
--------
>>>map = folium.Map(location=[45.523, -122.675], width=750, height=500)
>>>map = folium.Map(location=[45.523, -122.675],
tiles='Mapbox Control Room')
>>>map = folium.Map(location=(45.523, -122.675), max_zoom=20,
tiles='Cloudmade', API_key='YourKey')
>>>map = folium.Map(location=[45.523, -122.675], zoom_start=2,
tiles=('http://{s}.tiles.mapbox.com/v3/'
'mapbox.control-room/{z}/{x}/{y}.png'),
attr='Mapbox attribution')
"""
super(Map, self).__init__()
self._name = 'Map'
if not location:
# If location is not passed, we center the map at 0,0 and ignore zoom
self.location = [0, 0]
self.zoom_start = min_zoom
else:
self.location = location
self.zoom_start = zoom_start
# Map Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self.min_lat = min_lat
self.max_lat = max_lat
self.min_lon = min_lon
self.max_lon = max_lon
self.add_tile_layer(tiles=tiles, min_zoom=min_zoom, max_zoom=max_zoom,
attr=attr, API_key=API_key)
self._template = Template(u"""
{% macro header(this, kwargs) %}
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
{% endmacro %}
{% macro html(this, kwargs) %}
<div class="folium-map" id="{{this.get_name()}}" ></div>
{% endmacro %}
{% macro script(this, kwargs) %}
var southWest = L.latLng({{ this.min_lat }}, {{ this.min_lon }});
var northEast = L.latLng({{ this.max_lat }}, {{ this.max_lon }});
var bounds = L.latLngBounds(southWest, northEast);
var {{this.get_name()}} = L.map('{{this.get_name()}}', {
center:[{{this.location[0]}},{{this.location[1]}}],
zoom: {{this.zoom_start}},
maxBounds: bounds,
layers: []
});
{% endmacro %}
""")
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Map in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
if self._parent is None:
self.add_to(Figure())
out = self._parent._repr_html_(figsize=figsize, **kwargs)
self._parent = None
else:
out = self._parent._repr_html_(figsize=figsize, **kwargs)
return out
def add_tile_layer(self, tiles='OpenStreetMap', name=None,
API_key=None, max_zoom=18, min_zoom=1,
attr=None, tile_name=None, tile_url=None,
active=False, **kwargs):
if tile_name is not None:
name = tile_name
warnings.warn("'tile_name' is deprecated. Please use 'name' instead.")
if tile_url is not None:
tiles = tile_url
warnings.warn("'tile_url' is deprecated. Please use 'tiles' instead.")
tile_layer = TileLayer(tiles=tiles, name=name,
min_zoom=min_zoom, max_zoom=max_zoom,
attr=attr, API_key=API_key)
self.add_children(tile_layer, name=tile_layer.tile_name)
class TileLayer(MacroElement):
def __init__(self, tiles='OpenStreetMap', name=None,
min_zoom=1, max_zoom=18, attr=None, API_key=None):
"""TODO docstring here
Parameters
----------
"""
super(TileLayer, self).__init__()
self._name = 'TileLayer'
self.tile_name = name if name is not None else ''.join(tiles.lower().strip().split())
self.min_zoom = min_zoom
self.max_zoom = max_zoom
self.tiles = ''.join(tiles.lower().strip().split())
if self.tiles in ('cloudmade', 'mapbox') and not API_key:
raise ValueError('You must pass an API key if using Cloudmade'
' or non-default Mapbox tiles.')
templates = list(self._env.list_templates(filter_func=lambda x: x.startswith('tiles/')))
tile_template = 'tiles/'+self.tiles+'/tiles.txt'
attr_template = 'tiles/'+self.tiles+'/attr.txt'
if tile_template in templates and attr_template in templates:
self.tiles = self._env.get_template(tile_template).render(API_key=API_key)
self.attr = self._env.get_template(attr_template).render()
else:
self.tiles = tiles
if not attr:
raise ValueError('Custom tiles must'
' also be passed an attribution')
self.attr = attr
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.tileLayer(
'{{this.tiles}}',
{
maxZoom: {{this.max_zoom}},
minZoom: {{this.min_zoom}},
attribution: '{{this.attr}}'
}
).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class WmsTileLayer(TileLayer):
def __init__(self, url, name=None,
format=None, layers=None, transparent=True,
attribution=None):
"""TODO docstring here
Parameters
----------
"""
super(TileLayer, self).__init__()
self._name = 'WmsTileLayer'
self.tile_name = name if name is not None else 'WmsTileLayer_'+self._id
self.url = url
self.format = format
self.layers = layers
self.transparent = transparent
#if attribution is None:
# raise ValueError('WMS must'
# ' also be passed an attribution')
self.attribution = attribution
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.tileLayer.wms(
'{{ this.url }}',
{
format:'{{ this.format }}',
transparent: {{ this.transparent.__str__().lower() }},
layers:'{{ this.layers }}',
attribution:'{{this.attribution}}'
}
).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class Icon(MacroElement):
def __init__(self, color='blue', icon='info-sign', angle=0):
"""TODO : docstring here"""
super(Icon, self).__init__()
self._name = 'Icon'
self.color = color
self.icon = icon
self.angle = angle
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.AwesomeMarkers.icon({
icon: '{{this.icon}}',
markerColor: '{{this.color}}',
prefix: 'glyphicon',
extraClasses: 'fa-rotate-{{this.angle}}'
});
{{this._parent.get_name()}}.setIcon({{this.get_name()}});
{% endmacro %}
""")
class Marker(MacroElement):
def __init__(self, location, popup=None, icon=None):
"""Create a simple stock Leaflet marker on the map, with optional
popup text or Vincent visualization.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Marker (Northing, Easting)
popup: string or tuple, default 'Pop Text'
Input text or visualization for object. Can pass either text,
or a tuple of the form (Vincent object, 'vis_path.json')
It is possible to adjust the width of text/HTML popups
using the optional keywords `popup_width` (default is 300px).
icon: Icon plugin
the Icon plugin to use to render the marker.
Returns
-------
Marker names and HTML in obj.template_vars
Example
-------
>>>map.simple_marker(location=[45.5, -122.3], popup='Portland, OR')
>>>map.simple_marker(location=[45.5, -122.3], popup=(vis, 'vis.json'))
"""
super(Marker, self).__init__()
self._name = 'Marker'
self.location = location
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.marker(
[{{this.location[0]}},{{this.location[1]}}],
{
icon: new L.Icon.Default()
}
)
.addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class RegularPolygonMarker(MacroElement):
def __init__(self, location, popup=None, icon=None,
color='black', opacity=1, weight=2,
fill_color='blue', fill_opacity=1,
number_of_sides=4, rotation=0, radius=15):
"""TODO : docstring here"""
super(RegularPolygonMarker, self).__init__()
self._name = 'RegularPolygonMarker'
self.location = location
self.icon = "new L.Icon.Default()" if icon is None else icon
self.color = color
self.opacity = opacity
self.weight = weight
self.fill_color = fill_color
self.fill_opacity= fill_opacity
self.number_of_sides= number_of_sides
self.rotation = rotation
self.radius = radius
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = new L.RegularPolygonMarker(
new L.LatLng({{this.location[0]}},{{this.location[1]}}),
{
icon : new L.Icon.Default(),
color: '{{this.color}}',
opacity: {{this.opacity}},
weight: {{this.weight}},
fillColor: '{{this.fill_color}}',
fillOpacity: {{this.fill_opacity}},
numberOfSides: {{this.number_of_sides}},
rotation: {{this.rotation}},
radius: {{this.radius}}
}
)
.addTo({{this._parent.get_name()}});
{% endmacro %}
""")
def render(self, **kwargs):
super(RegularPolygonMarker, self).render()
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet-dvf"
"/0.2/leaflet-dvf.markers.min.js"),
name='dvf_js')
class Html(Element):
def __init__(self, data, width="100%", height="100%"):
"""TODO : docstring here"""
super(Html, self).__init__()
self._name = 'Html'
self.data = data
self.width = _parse_size(width)
self.height = _parse_size(height)
self._template = Template(u"""
<div id="{{this.get_name()}}"
style="width: {{this.width[0]}}{{this.width[1]}}; height: {{this.height[0]}}{{this.height[1]}};">
{{this.data}}</div>
""")
class Popup(Element):
def __init__(self, html, max_width=300):
super(Popup, self).__init__()
self._name = 'Popup'
self.header = Element()
self.html = Element()
self.script = Element()
self.header._parent = self
self.html._parent = self
self.script._parent = self
if isinstance(html, Element):
self.html.add_children(html)
elif isinstance(html, text_type) or isinstance(html,binary_type):
self.html.add_children(Html(text_type(html)))
self.max_width = max_width
self._template = Template(u"""
var {{this.get_name()}} = L.popup({maxWidth: '{{this.max_width}}'});
{% for name, element in this.html._children.items() %}
var {{name}} = $('{{element.render(**kwargs).replace('\\n',' ')}}')[0];
{{this.get_name()}}.setContent({{name}});
{% endfor %}
{{this._parent.get_name()}}.bindPopup({{this.get_name()}});
{% for name, element in this.script._children.items() %}
{{element.render()}}
{% endfor %}
""")
def render(self, **kwargs):
"""TODO : docstring here."""
for name, child in self._children.items():
child.render(**kwargs)
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.script.add_children(Element(\
self._template.render(this=self, kwargs=kwargs)), name=self.get_name())
class Vega(Element):
def __init__(self, data, width='100%', height='100%',
left="0%", top="0%", position='relative'):
"""TODO : docstring here"""
super(Vega, self).__init__()
self._name = 'Vega'
self.data = data
# Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self._template = Template(u"")
def render(self, **kwargs):
self.json = json.dumps(self.data)
self._parent.html.add_children(Element(Template("""
<div id="{{this.get_name()}}"
style="width: {{this.width[0]}}{{this.width[1]}}; height: {{this.height[0]}}{{this.height[1]}};">
</div>
""").render(this=self, kwargs=kwargs)), name=self.get_name())
self._parent.script.add_children(Element(Template("""
vega_parse({{this.json}},{{this.get_name()}});
""").render(this=self)), name=self.get_name())
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(Element(Template("""
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
""").render(this=self, **kwargs)), name=self.get_name())
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"),
name='d3')
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/vega/1.4.3/vega.min.js"),
name='vega')
figure.header.add_children(\
JavascriptLink("https://code.jquery.com/jquery-2.1.0.min.js"),
name='jquery')
figure.script.add_children(\
Template("""function vega_parse(spec, div) {
vg.parse.spec(spec, function(chart) { chart({el:div}).update(); });}"""),
name='vega_parse')
class GeoJson(MacroElement):
def __init__(self, data):
"""Creates a GeoJson plugin to append into a map with
Map.add_plugin.
Parameters
----------
data: file, dict or str.
The geo-json data you want to plot.
If file, then data will be read in the file and fully embeded in Leaflet's javascript.
If dict, then data will be converted to json and embeded in the javascript.
If str, then data will be passed to the javascript as-is.
examples :
# providing file
GeoJson(open('foo.json'))
# providing dict
GeoJson(json.load(open('foo.json')))
# providing string
GeoJson(open('foo.json').read())
"""
super(GeoJson, self).__init__()
self._name = 'GeoJson'
if 'read' in dir(data):
self.data = data.read()
elif type(data) is dict:
self.data = json.dumps(data)
else:
self.data = data
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.geoJson({{this.data}}).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class MarkerCluster(MacroElement):
"""Adds a MarkerCluster layer on the map."""
def __init__(self):
"""Creates a MarkerCluster element to append into a map with
Map.add_children.
Parameters
----------
"""
super(MarkerCluster, self).__init__()
self._name = 'MarkerCluster'
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.markerClusterGroup();
{{this._parent.get_name()}}.addLayer({{this.get_name()}});
{% endmacro %}
""")
def render(self, **kwargs):
super(MarkerCluster, self).render()
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster"
"/0.4.0/leaflet.markercluster-src.js"),
name='marker_cluster_src')
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster"
"/0.4.0/leaflet.markercluster.js"),
name='marker_cluster')
figure.header.add_children(\
CssLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.css"),
name='marker_cluster_css')
figure.header.add_children(\
CssLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.Default.css"),
name="marker_cluster_default_css")
Add GeoJsonStyle for choropleth
# -*- coding: utf-8 -*-
"""
Elements
------
A generic class for creating Elements.
"""
import warnings
from uuid import uuid4
from jinja2 import Environment, PackageLoader, Template
ENV = Environment(loader=PackageLoader('folium', 'templates'))
from collections import OrderedDict
import json
from .six import text_type, binary_type, urlopen
from .utilities import color_brewer
def _camelify(out):
return (''.join(["_"+x.lower() if i<len(out)-1 and x.isupper() and out[i+1].islower()
else x.lower()+"_" if i<len(out)-1 and x.islower() and out[i+1].isupper()
else x.lower() for i,x in enumerate(list(out))])).lstrip('_').replace('__','_')
class Element(object):
"""Basic Element object that does nothing.
Other Elements may inherit from this one."""
def __init__(self, template=None, template_name=None):
"""Creates a Element."""
self._name = 'Element'
self._id = uuid4().hex
self._env = ENV
self._children = OrderedDict()
self._parent = None
self._template = Template(template) if template is not None\
else ENV.get_template(template_name) if template_name is not None\
else Template(u"""
{% for name, element in this._children.items() %}
{{element.render(**kwargs)}}
{% endfor %}
""")
def get_name(self):
return _camelify(self._name) + '_' +self._id
def add_children(self, child, name=None, index=None):
"""Add a children."""
if name is None:
name = child.get_name()
if index is None:
self._children[name] = child
else:
items = [item for item in self._children.items() if item[0] != name]
items.insert(int(index),(name,child))
self._children = items
child._parent = self
def add_to(self, parent, name=None, index=None):
"""Add element to a parent."""
parent.add_children(self, name=name, index=index)
def to_dict(self, depth=-1, ordered=True, **kwargs):
if ordered:
dict_fun = OrderedDict
else:
dict_fun = dict
out = dict_fun()
out['name'] = self._name
out['id'] = self._id
if depth != 0:
out['children'] = dict_fun([(name, child.to_dict(depth=depth-1))\
for name,child in self._children.items()])
return out
def to_json(self, depth=-1, **kwargs):
return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs)
def get_root(self):
"""Returns the root of the elements tree."""
if self._parent is None:
return self
else:
return self._parent.get_root()
def render(self, **kwargs):
"""TODO : docstring here."""
return self._template.render(this=self, kwargs=kwargs)
_default_js = [
('leaflet',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.3/leaflet.js"),
('jquery',
"https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"),
('bootstrap',
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"),
('awesome_markers',
"https://rawgithub.com/lvoogdt/Leaflet.awesome-markers/2.0/develop/dist/leaflet.awesome-markers.js"),
('marker_cluster_src',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/leaflet.markercluster-src.js"),
('marker_cluster',
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/leaflet.markercluster.js"),
]
_default_css = [
("leaflet_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet/0.7.3/leaflet.css"),
("bootstrap_css",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css"),
("bootstrap_theme_css",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap-theme.min.css"),
("awesome_markers_font_css",
"https://maxcdn.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css"),
("awesome_markers_css",
"https://rawgit.com/lvoogdt/Leaflet.awesome-markers/2.0/develop/dist/leaflet.awesome-markers.css"),
("marker_cluster_default_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.Default.css"),
("marker_cluster_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.css"),
("awesome_rotate_css",
"https://raw.githubusercontent.com/python-visualization/folium/master/folium/templates/leaflet.awesome.rotate.css"),
]
class Figure(Element):
def __init__(self):
super(Figure, self).__init__()
self._name = 'Figure'
self.header = Element()
self.html = Element()
self.script = Element()
#self.axes = []
self.header._parent = self
self.html._parent = self
self.script._parent = self
self._template = Template(u"""
<!DOCTYPE html>
<head>
{{this.header.render(**kwargs)}}
</head>
<body>
{{this.html.render(**kwargs)}}
</body>
<script>
{{this.script.render(**kwargs)}}
</script>
""")
# Create the meta tag
self.header.add_children(Element(
'<meta http-equiv="content-type" content="text/html; charset=UTF-8" />'),
name='meta_http')
# Import Javascripts
for name, url in _default_js:
self.header.add_children(JavascriptLink(url), name=name)
# Import Css
for name, url in _default_css:
self.header.add_children(CssLink(url), name=name)
self.header.add_children(Element("""
<style>
html, body {
width: 100%;
height: 100%;
margin: 0;
padding: 0;
}
#map {
position:absolute;
top:0;
bottom:0;
right:0;
left:0;
}
</style>
"""), name='css_style')
def to_dict(self, depth=-1, **kwargs):
out = super(Figure, self).to_dict(depth=depth, **kwargs)
out['header'] = self.header.to_dict(depth=depth-1, **kwargs)
out['html'] = self.html.to_dict(depth=depth-1, **kwargs)
out['script'] = self.script.to_dict(depth=depth-1, **kwargs)
return out
def render(self, **kwargs):
"""TODO : docstring here."""
for name, child in self._children.items():
child.render(**kwargs)
return self._template.render(this=self, kwargs=kwargs)
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Figure in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
html = self.render(**kwargs)
width, height = figsize
iframe = '<iframe src="{html}" width="{width}px" height="{height}px"></iframe>'\
.format(\
html = "data:text/html;base64,"+html.encode('utf8').encode('base64'),
#html = self.HTML.replace('"','"'),
width = int(60.*width),
height= int(60.*height),
)
return iframe
def add_subplot(self, x,y,n,margin=0.05):
width = 1./y
height = 1./x
left = ((n-1)%y)*width
top = ((n-1)//y)*height
left = left+width*margin
top = top+height*margin
width = width*(1-2.*margin)
height = height*(1-2.*margin)
div = Div(position='absolute',
width="{}%".format(100.*width),
height="{}%".format(100.*height),
left="{}%".format(100.*left),
top="{}%".format(100.*top),
)
self.add_children(div)
return div
class Link(Element):
def get_code(self):
if self.code is None:
self.code = urlopen(self.url).read()
return self.code
def to_dict(self, depth=-1, **kwargs):
out = super(Link, self).to_dict(depth=-1, **kwargs)
out['url'] = self.url
return out
class JavascriptLink(Link):
def __init__(self, url, download=False):
"""Create a JavascriptLink object based on a url.
Parameters
----------
url : str
The url to be linked
download : bool, default False
Whether the target document shall be loaded right now.
"""
super(JavascriptLink, self).__init__()
self._name = 'JavascriptLink'
self.url = url
self.code = None
if download:
self.get_code()
self._template = Template(u"""
{% if kwargs.get("embedded",False) %}
<script>{{this.get_code()}}</script>
{% else %}
<script src="{{this.url}}"></script>
{% endif %}
""")
class CssLink(Link):
def __init__(self, url, download=False):
"""Create a CssLink object based on a url.
Parameters
----------
url : str
The url to be linked
download : bool, default False
Whether the target document shall be loaded right now.
"""
super(CssLink, self).__init__()
self._name = 'CssLink'
self.url = url
self.code = None
if download:
self.get_code()
self._template = Template(u"""
{% if kwargs.get("embedded",False) %}
<style>{{this.get_code()}}</style>
{% else %}
<link rel="stylesheet" href="{{this.url}}" />
{% endif %}
""")
class Div(Figure):
def __init__(self, width='100%', height='100%',
left="0%", top="0%", position='relative'):
"""Create a Map with Folium and Leaflet.js
"""
super(Figure, self).__init__()
self._name = 'Div'
# Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self.header = Element()
self.html = Element("""
{% for name, element in this._children.items() %}
{{element.render(**kwargs)}}
{% endfor %}
""")
self.script = Element()
self.header._parent = self
self.html._parent = self
self.script._parent = self
self._template = Template(u"""
{% macro header(this, kwargs) %}
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
{% endmacro %}
{% macro html(this, kwargs) %}
<div id="{{this.get_name()}}">
{{this.html.render(**kwargs)}}
</div>
{% endmacro %}
""")
def get_root(self):
return self
def render(self, **kwargs):
"""TODO : docstring here."""
figure = self._parent
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
for name, element in self._children.items():
element.render(**kwargs)
for name, element in self.header._children.items():
figure.header.add_children(element, name=name)
for name, element in self.script._children.items():
figure.script.add_children(element, name=name)
header = self._template.module.__dict__.get('header',None)
if header is not None:
figure.header.add_children(Element(header(self, kwargs)),
name=self.get_name())
html = self._template.module.__dict__.get('html',None)
if html is not None:
figure.html.add_children(Element(html(self, kwargs)),
name=self.get_name())
script = self._template.module.__dict__.get('script',None)
if script is not None:
figure.script.add_children(Element(script(self, kwargs)),
name=self.get_name())
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Map in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
if self._parent is None:
self.add_to(Figure())
out = self._parent._repr_html_(figsize=figsize, **kwargs)
self._parent = None
else:
out = self._parent._repr_html_(figsize=figsize, **kwargs)
return out
class MacroElement(Element):
"""This is a parent class for Elements defined by a macro template.
To compute your own element, all you have to do is:
* To inherit from this class
* Overwrite the '_name' attribute
* Overwrite the '_template' attribute with something of the form:
{% macro header(this, kwargs) %}
...
{% endmacro %}
{% macro html(this, kwargs) %}
...
{% endmacro %}
{% macro script(this, kwargs) %}
...
{% endmacro %}
"""
def __init__(self):
"""TODO : docstring here"""
super(MacroElement, self).__init__()
self._name = 'MacroElement'
self._template = Template(u"")
def render(self, **kwargs):
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
header = self._template.module.__dict__.get('header',None)
if header is not None:
figure.header.add_children(Element(header(self, kwargs)),
name=self.get_name())
html = self._template.module.__dict__.get('html',None)
if html is not None:
figure.html.add_children(Element(html(self, kwargs)),
name=self.get_name())
script = self._template.module.__dict__.get('script',None)
if script is not None:
figure.script.add_children(Element(script(self, kwargs)),
name=self.get_name())
for name, element in self._children.items():
element.render(**kwargs)
def _parse_size(value):
try:
if isinstance(value, int) or isinstance(value, float):
value_type = 'px'
value = float(value)
assert value > 0
else:
value_type = '%'
value = float(value.strip('%'))
assert 0 <= value <= 100
except:
msg = "Cannot parse value {!r} as {!r}".format
raise ValueError(msg(value, value_type))
return value, value_type
class Map(MacroElement):
def __init__(self, location=None, width='100%', height='100%',
left="0%", top="0%", position='relative',
tiles='OpenStreetMap', API_key=None, max_zoom=18, min_zoom=1,
zoom_start=10, attr=None, min_lat=-90, max_lat=90,
min_lon=-180, max_lon=180):
"""Create a Map with Folium and Leaflet.js
Generate a base map of given width and height with either default
tilesets or a custom tileset URL. The following tilesets are built-in
to Folium. Pass any of the following to the "tiles" keyword:
- "OpenStreetMap"
- "MapQuest Open"
- "MapQuest Open Aerial"
- "Mapbox Bright" (Limited levels of zoom for free tiles)
- "Mapbox Control Room" (Limited levels of zoom for free tiles)
- "Stamen" (Terrain, Toner, and Watercolor)
- "Cloudmade" (Must pass API key)
- "Mapbox" (Must pass API key)
- "CartoDB" (positron and dark_matter)
You can pass a custom tileset to Folium by passing a Leaflet-style
URL to the tiles parameter:
http://{s}.yourtiles.com/{z}/{x}/{y}.png
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Map (Northing, Easting).
width: pixel int or percentage string (default: '100%')
Width of the map.
height: pixel int or percentage string (default: '100%')
Height of the map.
tiles: str, default 'OpenStreetMap'
Map tileset to use. Can use defaults or pass a custom URL.
API_key: str, default None
API key for Cloudmade or Mapbox tiles.
max_zoom: int, default 18
Maximum zoom depth for the map.
zoom_start: int, default 10
Initial zoom level for the map.
attr: string, default None
Map tile attribution; only required if passing custom tile URL.
Returns
-------
Folium Map Object
Examples
--------
>>>map = folium.Map(location=[45.523, -122.675], width=750, height=500)
>>>map = folium.Map(location=[45.523, -122.675],
tiles='Mapbox Control Room')
>>>map = folium.Map(location=(45.523, -122.675), max_zoom=20,
tiles='Cloudmade', API_key='YourKey')
>>>map = folium.Map(location=[45.523, -122.675], zoom_start=2,
tiles=('http://{s}.tiles.mapbox.com/v3/'
'mapbox.control-room/{z}/{x}/{y}.png'),
attr='Mapbox attribution')
"""
super(Map, self).__init__()
self._name = 'Map'
if not location:
# If location is not passed, we center the map at 0,0 and ignore zoom
self.location = [0, 0]
self.zoom_start = min_zoom
else:
self.location = location
self.zoom_start = zoom_start
# Map Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self.min_lat = min_lat
self.max_lat = max_lat
self.min_lon = min_lon
self.max_lon = max_lon
self.add_tile_layer(tiles=tiles, min_zoom=min_zoom, max_zoom=max_zoom,
attr=attr, API_key=API_key)
self._template = Template(u"""
{% macro header(this, kwargs) %}
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
{% endmacro %}
{% macro html(this, kwargs) %}
<div class="folium-map" id="{{this.get_name()}}" ></div>
{% endmacro %}
{% macro script(this, kwargs) %}
var southWest = L.latLng({{ this.min_lat }}, {{ this.min_lon }});
var northEast = L.latLng({{ this.max_lat }}, {{ this.max_lon }});
var bounds = L.latLngBounds(southWest, northEast);
var {{this.get_name()}} = L.map('{{this.get_name()}}', {
center:[{{this.location[0]}},{{this.location[1]}}],
zoom: {{this.zoom_start}},
maxBounds: bounds,
layers: []
});
{% endmacro %}
""")
def _repr_html_(self, figsize=(17,10), **kwargs):
"""Displays the Map in a Jupyter notebook.
Parameters
----------
self : folium.Map object
The map you want to display
figsize : tuple of length 2, default (17,10)
The size of the output you expect in inches.
Output is 60dpi so that the output has same size as a
matplotlib figure with the same figsize.
"""
if self._parent is None:
self.add_to(Figure())
out = self._parent._repr_html_(figsize=figsize, **kwargs)
self._parent = None
else:
out = self._parent._repr_html_(figsize=figsize, **kwargs)
return out
def add_tile_layer(self, tiles='OpenStreetMap', name=None,
API_key=None, max_zoom=18, min_zoom=1,
attr=None, tile_name=None, tile_url=None,
active=False, **kwargs):
if tile_name is not None:
name = tile_name
warnings.warn("'tile_name' is deprecated. Please use 'name' instead.")
if tile_url is not None:
tiles = tile_url
warnings.warn("'tile_url' is deprecated. Please use 'tiles' instead.")
tile_layer = TileLayer(tiles=tiles, name=name,
min_zoom=min_zoom, max_zoom=max_zoom,
attr=attr, API_key=API_key)
self.add_children(tile_layer, name=tile_layer.tile_name)
class TileLayer(MacroElement):
def __init__(self, tiles='OpenStreetMap', name=None,
min_zoom=1, max_zoom=18, attr=None, API_key=None):
"""TODO docstring here
Parameters
----------
"""
super(TileLayer, self).__init__()
self._name = 'TileLayer'
self.tile_name = name if name is not None else ''.join(tiles.lower().strip().split())
self.min_zoom = min_zoom
self.max_zoom = max_zoom
self.tiles = ''.join(tiles.lower().strip().split())
if self.tiles in ('cloudmade', 'mapbox') and not API_key:
raise ValueError('You must pass an API key if using Cloudmade'
' or non-default Mapbox tiles.')
templates = list(self._env.list_templates(filter_func=lambda x: x.startswith('tiles/')))
tile_template = 'tiles/'+self.tiles+'/tiles.txt'
attr_template = 'tiles/'+self.tiles+'/attr.txt'
if tile_template in templates and attr_template in templates:
self.tiles = self._env.get_template(tile_template).render(API_key=API_key)
self.attr = self._env.get_template(attr_template).render()
else:
self.tiles = tiles
if not attr:
raise ValueError('Custom tiles must'
' also be passed an attribution')
self.attr = attr
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.tileLayer(
'{{this.tiles}}',
{
maxZoom: {{this.max_zoom}},
minZoom: {{this.min_zoom}},
attribution: '{{this.attr}}'
}
).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class WmsTileLayer(TileLayer):
def __init__(self, url, name=None,
format=None, layers=None, transparent=True,
attribution=None):
"""TODO docstring here
Parameters
----------
"""
super(TileLayer, self).__init__()
self._name = 'WmsTileLayer'
self.tile_name = name if name is not None else 'WmsTileLayer_'+self._id
self.url = url
self.format = format
self.layers = layers
self.transparent = transparent
#if attribution is None:
# raise ValueError('WMS must'
# ' also be passed an attribution')
self.attribution = attribution
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.tileLayer.wms(
'{{ this.url }}',
{
format:'{{ this.format }}',
transparent: {{ this.transparent.__str__().lower() }},
layers:'{{ this.layers }}',
attribution:'{{this.attribution}}'
}
).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class Icon(MacroElement):
def __init__(self, color='blue', icon='info-sign', angle=0):
"""TODO : docstring here"""
super(Icon, self).__init__()
self._name = 'Icon'
self.color = color
self.icon = icon
self.angle = angle
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.AwesomeMarkers.icon({
icon: '{{this.icon}}',
markerColor: '{{this.color}}',
prefix: 'glyphicon',
extraClasses: 'fa-rotate-{{this.angle}}'
});
{{this._parent.get_name()}}.setIcon({{this.get_name()}});
{% endmacro %}
""")
class Marker(MacroElement):
def __init__(self, location, popup=None, icon=None):
"""Create a simple stock Leaflet marker on the map, with optional
popup text or Vincent visualization.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Marker (Northing, Easting)
popup: string or tuple, default 'Pop Text'
Input text or visualization for object. Can pass either text,
or a tuple of the form (Vincent object, 'vis_path.json')
It is possible to adjust the width of text/HTML popups
using the optional keywords `popup_width` (default is 300px).
icon: Icon plugin
the Icon plugin to use to render the marker.
Returns
-------
Marker names and HTML in obj.template_vars
Example
-------
>>>map.simple_marker(location=[45.5, -122.3], popup='Portland, OR')
>>>map.simple_marker(location=[45.5, -122.3], popup=(vis, 'vis.json'))
"""
super(Marker, self).__init__()
self._name = 'Marker'
self.location = location
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.marker(
[{{this.location[0]}},{{this.location[1]}}],
{
icon: new L.Icon.Default()
}
)
.addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class RegularPolygonMarker(MacroElement):
def __init__(self, location, popup=None, icon=None,
color='black', opacity=1, weight=2,
fill_color='blue', fill_opacity=1,
number_of_sides=4, rotation=0, radius=15):
"""TODO : docstring here"""
super(RegularPolygonMarker, self).__init__()
self._name = 'RegularPolygonMarker'
self.location = location
self.icon = "new L.Icon.Default()" if icon is None else icon
self.color = color
self.opacity = opacity
self.weight = weight
self.fill_color = fill_color
self.fill_opacity= fill_opacity
self.number_of_sides= number_of_sides
self.rotation = rotation
self.radius = radius
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = new L.RegularPolygonMarker(
new L.LatLng({{this.location[0]}},{{this.location[1]}}),
{
icon : new L.Icon.Default(),
color: '{{this.color}}',
opacity: {{this.opacity}},
weight: {{this.weight}},
fillColor: '{{this.fill_color}}',
fillOpacity: {{this.fill_opacity}},
numberOfSides: {{this.number_of_sides}},
rotation: {{this.rotation}},
radius: {{this.radius}}
}
)
.addTo({{this._parent.get_name()}});
{% endmacro %}
""")
def render(self, **kwargs):
super(RegularPolygonMarker, self).render()
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet-dvf"
"/0.2/leaflet-dvf.markers.min.js"),
name='dvf_js')
class Html(Element):
def __init__(self, data, width="100%", height="100%"):
"""TODO : docstring here"""
super(Html, self).__init__()
self._name = 'Html'
self.data = data
self.width = _parse_size(width)
self.height = _parse_size(height)
self._template = Template(u"""
<div id="{{this.get_name()}}"
style="width: {{this.width[0]}}{{this.width[1]}}; height: {{this.height[0]}}{{this.height[1]}};">
{{this.data}}</div>
""")
class Popup(Element):
def __init__(self, html, max_width=300):
super(Popup, self).__init__()
self._name = 'Popup'
self.header = Element()
self.html = Element()
self.script = Element()
self.header._parent = self
self.html._parent = self
self.script._parent = self
if isinstance(html, Element):
self.html.add_children(html)
elif isinstance(html, text_type) or isinstance(html,binary_type):
self.html.add_children(Html(text_type(html)))
self.max_width = max_width
self._template = Template(u"""
var {{this.get_name()}} = L.popup({maxWidth: '{{this.max_width}}'});
{% for name, element in this.html._children.items() %}
var {{name}} = $('{{element.render(**kwargs).replace('\\n',' ')}}')[0];
{{this.get_name()}}.setContent({{name}});
{% endfor %}
{{this._parent.get_name()}}.bindPopup({{this.get_name()}});
{% for name, element in this.script._children.items() %}
{{element.render()}}
{% endfor %}
""")
def render(self, **kwargs):
"""TODO : docstring here."""
for name, child in self._children.items():
child.render(**kwargs)
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.script.add_children(Element(\
self._template.render(this=self, kwargs=kwargs)), name=self.get_name())
class Vega(Element):
def __init__(self, data, width='100%', height='100%',
left="0%", top="0%", position='relative'):
"""TODO : docstring here"""
super(Vega, self).__init__()
self._name = 'Vega'
self.data = data
# Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
self._template = Template(u"")
def render(self, **kwargs):
self.json = json.dumps(self.data)
self._parent.html.add_children(Element(Template("""
<div id="{{this.get_name()}}"
style="width: {{this.width[0]}}{{this.width[1]}}; height: {{this.height[0]}}{{this.height[1]}};">
</div>
""").render(this=self, kwargs=kwargs)), name=self.get_name())
self._parent.script.add_children(Element(Template("""
vega_parse({{this.json}},{{this.get_name()}});
""").render(this=self)), name=self.get_name())
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(Element(Template("""
<style> #{{this.get_name()}} {
position : {{this.position}};
width : {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
</style>
""").render(this=self, **kwargs)), name=self.get_name())
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"),
name='d3')
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/vega/1.4.3/vega.min.js"),
name='vega')
figure.header.add_children(\
JavascriptLink("https://code.jquery.com/jquery-2.1.0.min.js"),
name='jquery')
figure.script.add_children(\
Template("""function vega_parse(spec, div) {
vg.parse.spec(spec, function(chart) { chart({el:div}).update(); });}"""),
name='vega_parse')
class GeoJson(MacroElement):
def __init__(self, data):
"""Creates a GeoJson plugin to append into a map with
Map.add_plugin.
Parameters
----------
data: file, dict or str.
The geo-json data you want to plot.
If file, then data will be read in the file and fully embeded in Leaflet's javascript.
If dict, then data will be converted to json and embeded in the javascript.
If str, then data will be passed to the javascript as-is.
examples :
# providing file
GeoJson(open('foo.json'))
# providing dict
GeoJson(json.load(open('foo.json')))
# providing string
GeoJson(open('foo.json').read())
"""
super(GeoJson, self).__init__()
self._name = 'GeoJson'
if 'read' in dir(data):
self.data = data.read()
elif type(data) is dict:
self.data = json.dumps(data)
else:
self.data = data
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.geoJson({{this.data}}).addTo({{this._parent.get_name()}});
{% endmacro %}
""")
class GeoJsonStyle(MacroElement):
def __init__(self, color_domain, color_code, color_data=None, key_on='feature.properties.color'):
"""TODO : docstring here.
"""
super(GeoJsonStyle, self).__init__()
self._name = 'GeoJsonStyle'
self.color_domain = color_domain
self.color_range = color_brewer(color_code, n=len(color_domain))
self.color_data = json.dumps(color_data)
self.key_on = key_on
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = {
color_scale : d3.scale.threshold()
.domain({{this.color_domain}})
.range({{this.color_range}}),
color_data : {{this.color_data}},
color_function : function(feature) {
{% if this.color_data=='null' %}
return this.color_scale({{this.key_on}});
{% else %}
return this.color_scale(this.color_data[{{this.key_on}}]);
{% endif %}
},
};
{{this._parent.get_name()}}.setStyle(function(feature) {
return {
fillColor: {{this.get_name()}}.color_function(feature),
weight: 2,
opacity: 1,
color: 'white',
dashArray: '3',
fillOpacity: 0.7
};
});
{% endmacro %}
""")
def render(self,**kwargs):
super(GeoJsonStyle,self).render(**kwargs)
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"),
name='d3')
class MarkerCluster(MacroElement):
"""Adds a MarkerCluster layer on the map."""
def __init__(self):
"""Creates a MarkerCluster element to append into a map with
Map.add_children.
Parameters
----------
"""
super(MarkerCluster, self).__init__()
self._name = 'MarkerCluster'
self._template = Template(u"""
{% macro script(this, kwargs) %}
var {{this.get_name()}} = L.markerClusterGroup();
{{this._parent.get_name()}}.addLayer({{this.get_name()}});
{% endmacro %}
""")
def render(self, **kwargs):
super(MarkerCluster, self).render()
figure = self.get_root()
assert isinstance(figure,Figure), ("You cannot render this Element "
"if it's not in a Figure.")
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster"
"/0.4.0/leaflet.markercluster-src.js"),
name='marker_cluster_src')
figure.header.add_children(\
JavascriptLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster"
"/0.4.0/leaflet.markercluster.js"),
name='marker_cluster')
figure.header.add_children(\
CssLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.css"),
name='marker_cluster_css')
figure.header.add_children(\
CssLink("https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/0.4.0/MarkerCluster.Default.css"),
name="marker_cluster_default_css")
|
#!/usr/bin/env python2.7
# Copyright (c) 2012 Jonathan Warren
# Copyright (c) 2012 The Bitmessage developers
# Distributed under the MIT/X11 software license. See the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#Right now, PyBitmessage only support connecting to stream 1. It doesn't yet contain logic to expand into further streams.
#The software version variable is now held in shared.py
verbose = 1
maximumAgeOfAnObjectThatIAmWillingToAccept = 216000 #Equals two days and 12 hours.
lengthOfTimeToLeaveObjectsInInventory = 237600 #Equals two days and 18 hours. This should be longer than maximumAgeOfAnObjectThatIAmWillingToAccept so that we don't process messages twice.
lengthOfTimeToHoldOnToAllPubkeys = 2419200 #Equals 4 weeks. You could make this longer if you want but making it shorter would not be advisable because there is a very small possibility that it could keep you from obtaining a needed pubkey for a period of time.
maximumAgeOfObjectsThatIAdvertiseToOthers = 216000 #Equals two days and 12 hours
maximumAgeOfNodesThatIAdvertiseToOthers = 10800 #Equals three hours
storeConfigFilesInSameDirectoryAsProgramByDefault = False #The user may de-select Portable Mode in the settings if they want the config files to stay in the application data folder.
useVeryEasyProofOfWorkForTesting = False #If you set this to True while on the normal network, you won't be able to send or sometimes receive messages.
encryptedBroadcastSwitchoverTime = 1369735200
import sys
import ConfigParser
import Queue
from addresses import *
#from shared import *
import shared
from defaultKnownNodes import *
import time
import socket
import threading
import hashlib
from struct import *
import pickle
import random
import sqlite3
import threading
from time import strftime, localtime, gmtime
import shutil #used for moving the messages.dat file
import string
import socks
import highlevelcrypto
from pyelliptic.openssl import OpenSSL
import ctypes
from pyelliptic import arithmetic
import signal #Used to capture a Ctrl-C keypress so that Bitmessage can shutdown gracefully.
#The next 3 are used for the API
from SimpleXMLRPCServer import *
import json
from subprocess import call #used when the API must execute an outside program
import singleton
#For each stream to which we connect, several outgoingSynSender threads will exist and will collectively create 8 connections with peers.
class outgoingSynSender(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def setup(self,streamNumber):
self.streamNumber = streamNumber
def run(self):
time.sleep(1)
global alreadyAttemptedConnectionsListResetTime
while True:
#time.sleep(999999)#I sometimes use this to prevent connections for testing.
if len(selfInitiatedConnections[self.streamNumber]) < 8: #maximum number of outgoing connections = 8
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
alreadyAttemptedConnectionsListLock.acquire()
while HOST in alreadyAttemptedConnectionsList or HOST in shared.connectedHostsList:
alreadyAttemptedConnectionsListLock.release()
#print 'choosing new sample'
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
time.sleep(1)
#Clear out the alreadyAttemptedConnectionsList every half hour so that this program will again attempt a connection to any nodes, even ones it has already tried.
if (time.time() - alreadyAttemptedConnectionsListResetTime) > 1800:
alreadyAttemptedConnectionsList.clear()
alreadyAttemptedConnectionsListResetTime = int(time.time())
alreadyAttemptedConnectionsListLock.acquire()
alreadyAttemptedConnectionsList[HOST] = 0
alreadyAttemptedConnectionsListLock.release()
PORT, timeNodeLastSeen = shared.knownNodes[self.streamNumber][HOST]
sock = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
#This option apparently avoids the TIME_WAIT state so that we can rebind faster
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(20)
if shared.config.get('bitmessagesettings', 'socksproxytype') == 'none' and verbose >= 2:
shared.printLock.acquire()
print 'Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
#sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif shared.config.get('bitmessagesettings', 'socksproxytype') == 'SOCKS4a':
if verbose >= 2:
shared.printLock.acquire()
print '(Using SOCKS4a) Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
proxytype = socks.PROXY_TYPE_SOCKS4
sockshostname = shared.config.get('bitmessagesettings', 'sockshostname')
socksport = shared.config.getint('bitmessagesettings', 'socksport')
rdns = True #Do domain name lookups through the proxy; though this setting doesn't really matter since we won't be doing any domain name lookups anyway.
if shared.config.getboolean('bitmessagesettings', 'socksauthentication'):
socksusername = shared.config.get('bitmessagesettings', 'socksusername')
sockspassword = shared.config.get('bitmessagesettings', 'sockspassword')
sock.setproxy(proxytype, sockshostname, socksport, rdns, socksusername, sockspassword)
else:
sock.setproxy(proxytype, sockshostname, socksport, rdns)
elif shared.config.get('bitmessagesettings', 'socksproxytype') == 'SOCKS5':
if verbose >= 2:
shared.printLock.acquire()
print '(Using SOCKS5) Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
proxytype = socks.PROXY_TYPE_SOCKS5
sockshostname = shared.config.get('bitmessagesettings', 'sockshostname')
socksport = shared.config.getint('bitmessagesettings', 'socksport')
rdns = True #Do domain name lookups through the proxy; though this setting doesn't really matter since we won't be doing any domain name lookups anyway.
if shared.config.getboolean('bitmessagesettings', 'socksauthentication'):
socksusername = shared.config.get('bitmessagesettings', 'socksusername')
sockspassword = shared.config.get('bitmessagesettings', 'sockspassword')
sock.setproxy(proxytype, sockshostname, socksport, rdns, socksusername, sockspassword)
else:
sock.setproxy(proxytype, sockshostname, socksport, rdns)
try:
sock.connect((HOST, PORT))
rd = receiveDataThread()
rd.daemon = True # close the main program even if there are threads left
#self.emit(SIGNAL("passObjectThrough(PyQt_PyObject)"),rd)
objectsOfWhichThisRemoteNodeIsAlreadyAware = {}
rd.setup(sock,HOST,PORT,self.streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware)
rd.start()
shared.printLock.acquire()
print self, 'connected to', HOST, 'during an outgoing attempt.'
shared.printLock.release()
sd = sendDataThread()
sd.setup(sock,HOST,PORT,self.streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware)
sd.start()
sd.sendVersionMessage()
except socks.GeneralProxyError, err:
if verbose >= 2:
shared.printLock.acquire()
print 'Could NOT connect to', HOST, 'during outgoing attempt.', err
shared.printLock.release()
PORT, timeLastSeen = shared.knownNodes[self.streamNumber][HOST]
if (int(time.time())-timeLastSeen) > 172800 and len(shared.knownNodes[self.streamNumber]) > 1000: # for nodes older than 48 hours old if we have more than 1000 hosts in our list, delete from the shared.knownNodes data-structure.
shared.knownNodesLock.acquire()
del shared.knownNodes[self.streamNumber][HOST]
shared.knownNodesLock.release()
shared.printLock.acquire()
print 'deleting ', HOST, 'from shared.knownNodes because it is more than 48 hours old and we could not connect to it.'
shared.printLock.release()
except socks.Socks5AuthError, err:
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS5 Authentication problem: "+str(err))
shared.UISignalQueue.put(('updateStatusBar',"SOCKS5 Authentication problem: "+str(err)))
except socks.Socks5Error, err:
pass
print 'SOCKS5 error. (It is possible that the server wants authentication).)' ,str(err)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS5 error. Server might require authentication. "+str(err))
except socks.Socks4Error, err:
print 'Socks4Error:', err
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS4 error: "+str(err))
except socket.error, err:
if shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
print 'Bitmessage MIGHT be having trouble connecting to the SOCKS server. '+str(err)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Problem: Bitmessage can not connect to the SOCKS server. "+str(err))
else:
if verbose >= 1:
shared.printLock.acquire()
print 'Could NOT connect to', HOST, 'during outgoing attempt.', err
shared.printLock.release()
PORT, timeLastSeen = shared.knownNodes[self.streamNumber][HOST]
if (int(time.time())-timeLastSeen) > 172800 and len(shared.knownNodes[self.streamNumber]) > 1000: # for nodes older than 48 hours old if we have more than 1000 hosts in our list, delete from the knownNodes data-structure.
shared.knownNodesLock.acquire()
del shared.knownNodes[self.streamNumber][HOST]
shared.knownNodesLock.release()
print 'deleting ', HOST, 'from knownNodes because it is more than 48 hours old and we could not connect to it.'
except Exception, err:
sys.stderr.write('An exception has occurred in the outgoingSynSender thread that was not caught by other exception types: %s\n' % err)
time.sleep(0.1)
#Only one singleListener thread will ever exist. It creates the receiveDataThread and sendDataThread for each incoming connection. Note that it cannot set the stream number because it is not known yet- the other node will have to tell us its stream number in a version message. If we don't care about their stream, we will close the connection (within the recversion function of the recieveData thread)
class singleListener(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
#We don't want to accept incoming connections if the user is using a SOCKS proxy. If they eventually select proxy 'none' then this will start listening for connections.
while shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
time.sleep(300)
shared.printLock.acquire()
print 'Listening for incoming connections.'
shared.printLock.release()
HOST = '' # Symbolic name meaning all available interfaces
PORT = shared.config.getint('bitmessagesettings', 'port')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#This option apparently avoids the TIME_WAIT state so that we can rebind faster
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((HOST, PORT))
sock.listen(2)
while True:
#We don't want to accept incoming connections if the user is using a SOCKS proxy. If the user eventually select proxy 'none' then this will start listening for connections.
while shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
time.sleep(10)
while len(shared.connectedHostsList) > 220:
shared.printLock.acquire()
print 'We are connected to too many people. Not accepting further incoming connections for ten seconds.'
shared.printLock.release()
time.sleep(10)
a,(HOST,PORT) = sock.accept()
#The following code will, unfortunately, block an incoming connection if someone else on the same LAN is already connected because the two computers will share the same external IP. This is here to prevent connection flooding.
while HOST in shared.connectedHostsList:
shared.printLock.acquire()
print 'incoming connection is from a host in shared.connectedHostsList (we are already connected to it). Ignoring it.'
shared.printLock.release()
a.close()
a,(HOST,PORT) = sock.accept()
objectsOfWhichThisRemoteNodeIsAlreadyAware = {}
sd = sendDataThread()
sd.setup(a,HOST,PORT,-1,objectsOfWhichThisRemoteNodeIsAlreadyAware)
sd.start()
rd = receiveDataThread()
rd.daemon = True # close the main program even if there are threads left
rd.setup(a,HOST,PORT,-1,objectsOfWhichThisRemoteNodeIsAlreadyAware)
rd.start()
shared.printLock.acquire()
print self, 'connected to', HOST,'during INCOMING request.'
shared.printLock.release()
#This thread is created either by the synSenderThread(for outgoing connections) or the singleListenerThread(for incoming connectiosn).
class receiveDataThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.data = ''
self.verackSent = False
self.verackReceived = False
def setup(self,sock,HOST,port,streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware):
self.sock = sock
self.HOST = HOST
self.PORT = port
self.streamNumber = streamNumber
self.payloadLength = 0 #This is the protocol payload length thus it doesn't include the 24 byte message header
self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave = {}
shared.connectedHostsList[self.HOST] = 0 #The very fact that this receiveData thread exists shows that we are connected to the remote host. Let's add it to this list so that an outgoingSynSender thread doesn't try to connect to it.
self.connectionIsOrWasFullyEstablished = False #set to true after the remote node and I accept each other's version messages. This is needed to allow the user interface to accurately reflect the current number of connections.
if self.streamNumber == -1: #This was an incoming connection. Send out a version message if we accept the other node's version message.
self.initiatedConnection = False
else:
self.initiatedConnection = True
selfInitiatedConnections[streamNumber][self] = 0
self.ackDataThatWeHaveYetToSend = [] #When we receive a message bound for us, we store the acknowledgement that we need to send (the ackdata) here until we are done processing all other data received from this peer.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware = objectsOfWhichThisRemoteNodeIsAlreadyAware
def run(self):
shared.printLock.acquire()
print 'ID of the receiveDataThread is', str(id(self))+'. The size of the shared.connectedHostsList is now', len(shared.connectedHostsList)
shared.printLock.release()
while True:
try:
self.data += self.sock.recv(4096)
except socket.timeout:
shared.printLock.acquire()
print 'Timeout occurred waiting for data from', self.HOST + '. Closing receiveData thread. (ID:',str(id(self))+ ')'
shared.printLock.release()
break
except Exception, err:
shared.printLock.acquire()
print 'sock.recv error. Closing receiveData thread (HOST:', self.HOST, 'ID:',str(id(self))+ ').', err
shared.printLock.release()
break
#print 'Received', repr(self.data)
if self.data == "":
shared.printLock.acquire()
print 'Connection to', self.HOST, 'closed. Closing receiveData thread. (ID:',str(id(self))+ ')'
shared.printLock.release()
break
else:
self.processData()
try:
del selfInitiatedConnections[self.streamNumber][self]
shared.printLock.acquire()
print 'removed self (a receiveDataThread) from selfInitiatedConnections'
shared.printLock.release()
except:
pass
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
try:
del shared.connectedHostsList[self.HOST]
except Exception, err:
shared.printLock.acquire()
print 'Could not delete', self.HOST, 'from shared.connectedHostsList.', err
shared.printLock.release()
shared.UISignalQueue.put(('updateNetworkStatusTab','no data'))
shared.printLock.acquire()
print 'The size of the connectedHostsList is now:', len(shared.connectedHostsList)
shared.printLock.release()
def processData(self):
global verbose
#if verbose >= 3:
#shared.printLock.acquire()
#print 'self.data is currently ', repr(self.data)
#shared.printLock.release()
if len(self.data) < 20: #if so little of the data has arrived that we can't even unpack the payload length
pass
elif self.data[0:4] != '\xe9\xbe\xb4\xd9':
if verbose >= 1:
shared.printLock.acquire()
sys.stderr.write('The magic bytes were not correct. First 40 bytes of data: %s\n' % repr(self.data[0:40]))
print 'self.data:', self.data.encode('hex')
shared.printLock.release()
self.data = ""
else:
self.payloadLength, = unpack('>L',self.data[16:20])
if len(self.data) >= self.payloadLength+24: #check if the whole message has arrived yet. If it has,...
if self.data[20:24] == hashlib.sha512(self.data[24:self.payloadLength+24]).digest()[0:4]:#test the checksum in the message. If it is correct...
#print 'message checksum is correct'
#The time we've last seen this node is obviously right now since we just received valid data from it. So update the knownNodes list so that other peers can be made aware of its existance.
if self.initiatedConnection and self.connectionIsOrWasFullyEstablished: #The remote port is only something we should share with others if it is the remote node's incoming port (rather than some random operating-system-assigned outgoing port).
shared.knownNodesLock.acquire()
shared.knownNodes[self.streamNumber][self.HOST] = (self.PORT,int(time.time()))
shared.knownNodesLock.release()
if self.payloadLength <= 180000000: #If the size of the message is greater than 180MB, ignore it. (I get memory errors when processing messages much larger than this though it is concievable that this value will have to be lowered if some systems are less tolarant of large messages.)
remoteCommand = self.data[4:16]
shared.printLock.acquire()
print 'remoteCommand', repr(remoteCommand.replace('\x00','')), ' from', self.HOST
shared.printLock.release()
if remoteCommand == 'version\x00\x00\x00\x00\x00':
self.recversion(self.data[24:self.payloadLength+24])
elif remoteCommand == 'verack\x00\x00\x00\x00\x00\x00':
self.recverack()
elif remoteCommand == 'addr\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recaddr(self.data[24:self.payloadLength+24])
elif remoteCommand == 'getpubkey\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recgetpubkey(self.data[24:self.payloadLength+24])
elif remoteCommand == 'pubkey\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recpubkey(self.data[24:self.payloadLength+24])
elif remoteCommand == 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recinv(self.data[24:self.payloadLength+24])
elif remoteCommand == 'getdata\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recgetdata(self.data[24:self.payloadLength+24])
elif remoteCommand == 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recmsg(self.data[24:self.payloadLength+24])
elif remoteCommand == 'broadcast\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recbroadcast(self.data[24:self.payloadLength+24])
elif remoteCommand == 'ping\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.sendpong()
elif remoteCommand == 'pong\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
pass
elif remoteCommand == 'alert\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
pass
self.data = self.data[self.payloadLength+24:]#take this message out and then process the next message
if self.data == '':
while len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) > 0:
random.seed()
objectHash, = random.sample(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave, 1)
if objectHash in shared.inventory:
shared.printLock.acquire()
print 'Inventory (in memory) already has object listed in inv message.'
shared.printLock.release()
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash]
elif isInSqlInventory(objectHash):
if verbose >= 3:
shared.printLock.acquire()
print 'Inventory (SQL on disk) already has object listed in inv message.'
shared.printLock.release()
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash]
else:
self.sendgetdata(objectHash)
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash] #It is possible that the remote node doesn't respond with the object. In that case, we'll very likely get it from someone else anyway.
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) == 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
break
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) == 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) > 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
if len(self.ackDataThatWeHaveYetToSend) > 0:
self.data = self.ackDataThatWeHaveYetToSend.pop()
self.processData()
else:
print 'Checksum incorrect. Clearing this message.'
self.data = self.data[self.payloadLength+24:]
def isProofOfWorkSufficient(self,data,nonceTrialsPerByte=0,payloadLengthExtraBytes=0):
if nonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte:
nonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if payloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
POW, = unpack('>Q',hashlib.sha512(hashlib.sha512(data[:8]+ hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8])
#print 'POW:', POW
return POW <= 2**64 / ((len(data)+payloadLengthExtraBytes) * (nonceTrialsPerByte))
def sendpong(self):
print 'Sending pong'
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x70\x6F\x6E\x67\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
def recverack(self):
print 'verack received'
self.verackReceived = True
if self.verackSent == True:
#We have thus both sent and received a verack.
self.connectionFullyEstablished()
def connectionFullyEstablished(self):
self.connectionIsOrWasFullyEstablished = True
if not self.initiatedConnection:
#self.emit(SIGNAL("setStatusIcon(PyQt_PyObject)"),'green')
shared.UISignalQueue.put(('setStatusIcon','green'))
self.sock.settimeout(600) #We'll send out a pong every 5 minutes to make sure the connection stays alive if there has been no other traffic to send lately.
shared.UISignalQueue.put(('updateNetworkStatusTab','no data'))
remoteNodeIncomingPort, remoteNodeSeenTime = shared.knownNodes[self.streamNumber][self.HOST]
shared.printLock.acquire()
print 'Connection fully established with', self.HOST, remoteNodeIncomingPort
print 'The size of the connectedHostsList is now', len(shared.connectedHostsList)
print 'The length of sendDataQueues is now:', len(shared.sendDataQueues)
print 'broadcasting addr from within connectionFullyEstablished function.'
shared.printLock.release()
self.broadcastaddr([(int(time.time()), self.streamNumber, 1, self.HOST, remoteNodeIncomingPort)]) #This lets all of our peers know about this new node.
self.sendaddr() #This is one large addr message to this one peer.
if not self.initiatedConnection and len(shared.connectedHostsList) > 200:
shared.printLock.acquire()
print 'We are connected to too many people. Closing connection.'
shared.printLock.release()
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
return
self.sendBigInv()
def sendBigInv(self):
shared.sqlLock.acquire()
#Select all hashes which are younger than two days old and in this stream.
t = (int(time.time())-maximumAgeOfObjectsThatIAdvertiseToOthers,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys,self.streamNumber)
shared.sqlSubmitQueue.put('''SELECT hash FROM inventory WHERE ((receivedtime>? and objecttype<>'pubkey') or (receivedtime>? and objecttype='pubkey')) and streamnumber=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
bigInvList = {}
for row in queryreturn:
hash, = row
if hash not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
bigInvList[hash] = 0
else:
shared.printLock.acquire()
print 'Not including an object hash in a big inv message because the remote node is already aware of it.'#This line is here to check that this feature is working.
shared.printLock.release()
#We also have messages in our inventory in memory (which is a python dictionary). Let's fetch those too.
for hash, storedValue in shared.inventory.items():
if hash not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
objectType, streamNumber, payload, receivedTime = storedValue
if streamNumber == self.streamNumber and receivedTime > int(time.time())-maximumAgeOfObjectsThatIAdvertiseToOthers:
bigInvList[hash] = 0
else:
shared.printLock.acquire()
print 'Not including an object hash in a big inv message because the remote node is already aware of it.'#This line is here to check that this feature is working.
shared.printLock.release()
numberOfObjectsInInvMessage = 0
payload = ''
#Now let us start appending all of these hashes together. They will be sent out in a big inv message to our new peer.
for hash, storedValue in bigInvList.items():
payload += hash
numberOfObjectsInInvMessage += 1
if numberOfObjectsInInvMessage >= 50000: #We can only send a max of 50000 items per inv message but we may have more objects to advertise. They must be split up into multiple inv messages.
self.sendinvMessageToJustThisOnePeer(numberOfObjectsInInvMessage,payload)
payload = ''
numberOfObjectsInInvMessage = 0
if numberOfObjectsInInvMessage > 0:
self.sendinvMessageToJustThisOnePeer(numberOfObjectsInInvMessage,payload)
#Self explanatory. Notice that there is also a broadcastinv function for broadcasting invs to everyone in our stream.
def sendinvMessageToJustThisOnePeer(self,numberOfObjects,payload):
payload = encodeVarint(numberOfObjects) + payload
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
shared.printLock.acquire()
print 'Sending huge inv message with', numberOfObjects, 'objects to just this one peer'
shared.printLock.release()
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a broadcast message
def recbroadcast(self,data):
self.messageProcessingStartTime = time.time()
#First we must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in broadcast message insufficient.'
return
readPosition = 8 #bypass the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > (int(time.time())+10800): #prevent funny business
print 'The embedded time in this broadcast message is more than three hours in the future. That doesn\'t make sense. Ignoring message.'
return
if embeddedTime < (int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept):
print 'The embedded time in this broadcast message is too old. Ignoring message.'
return
if len(data) < 180:
print 'The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.'
return
#Let us check to make sure the stream number is correct (thus preventing an individual from sending broadcasts out on the wrong streams or all streams).
broadcastVersion, broadcastVersionLength = decodeVarint(data[readPosition:readPosition+10])
if broadcastVersion >= 2:
streamNumber, streamNumberLength = decodeVarint(data[readPosition+broadcastVersionLength:readPosition+broadcastVersionLength+10])
if streamNumber != self.streamNumber:
print 'The stream number encoded in this broadcast message (' + str(streamNumber) + ') does not match the stream number on which it was received. Ignoring it.'
return
shared.inventoryLock.acquire()
self.inventoryHash = calculateInventoryHash(data)
if self.inventoryHash in shared.inventory:
print 'We have already received this broadcast object. Ignoring.'
shared.inventoryLock.release()
return
elif isInSqlInventory(self.inventoryHash):
print 'We have already received this broadcast object (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
#It is valid so far. Let's let our peers know about it.
objectType = 'broadcast'
shared.inventory[self.inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(self.inventoryHash)
#self.emit(SIGNAL("incrementNumberOfBroadcastsProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfBroadcastsProcessed','no data'))
self.processbroadcast(readPosition,data)#When this function returns, we will have either successfully processed this broadcast because we are interested in it, ignored it because we aren't interested in it, or found problem with the broadcast that warranted ignoring it.
# Let us now set lengthOfTimeWeShouldUseToProcessThisMessage. If we haven't used the specified amount of time, we shall sleep. These values are mostly the same values used for msg messages although broadcast messages are processed faster.
if len(data) > 100000000: #Size is greater than 100 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 100 #seconds.
elif len(data) > 10000000: #Between 100 and 10 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 20 #seconds.
elif len(data) > 1000000: #Between 10 and 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = 3 #seconds.
else: #Less than 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = .6 #seconds.
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.messageProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total message processing time:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
#A broadcast message has a valid time and POW and requires processing. The recbroadcast function calls this one.
def processbroadcast(self,readPosition,data):
broadcastVersion, broadcastVersionLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += broadcastVersionLength
if broadcastVersion < 1 or broadcastVersion > 2:
print 'Cannot decode incoming broadcast versions higher than 2. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.'
return
if broadcastVersion == 1:
beginningOfPubkeyPosition = readPosition #used when we add the pubkey to our pubkey table
sendersAddressVersion, sendersAddressVersionLength = decodeVarint(data[readPosition:readPosition+9])
if sendersAddressVersion <= 1 or sendersAddressVersion >=3:
#Cannot decode senderAddressVersion higher than 2. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.
return
readPosition += sendersAddressVersionLength
if sendersAddressVersion == 2:
sendersStream, sendersStreamLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += sendersStreamLength
behaviorBitfield = data[readPosition:readPosition+4]
readPosition += 4
sendersPubSigningKey = '\x04' + data[readPosition:readPosition+64]
readPosition += 64
sendersPubEncryptionKey = '\x04' + data[readPosition:readPosition+64]
readPosition += 64
endOfPubkeyPosition = readPosition
sendersHash = data[readPosition:readPosition+20]
if sendersHash not in shared.broadcastSendersForWhichImWatching:
#Display timing data
shared.printLock.acquire()
print 'Time spent deciding that we are not interested in this v1 broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
return
#At this point, this message claims to be from sendersHash and we are interested in it. We still have to hash the public key to make sure it is truly the key that matches the hash, and also check the signiture.
readPosition += 20
sha = hashlib.new('sha512')
sha.update(sendersPubSigningKey+sendersPubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
if ripe.digest() != sendersHash:
#The sender of this message lied.
return
messageEncodingType, messageEncodingTypeLength = decodeVarint(data[readPosition:readPosition+9])
if messageEncodingType == 0:
return
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += messageLengthLength
message = data[readPosition:readPosition+messageLength]
readPosition += messageLength
readPositionAtBottomOfMessage = readPosition
signatureLength, signatureLengthLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += signatureLengthLength
signature = data[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(data[12:readPositionAtBottomOfMessage],signature,sendersPubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
#verify passed
#Let's store the public key in case we want to reply to this person.
#We don't have the correct nonce or time (which would let us send out a pubkey message) so we'll just fill it with 1's. We won't be able to send this pubkey to others (without doing the proof of work ourselves, which this program is programmed to not do.)
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+data[beginningOfPubkeyPosition:endOfPubkeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersion,sendersStream,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersion,sendersStream,ripe.digest())
shared.printLock.acquire()
print 'fromAddress:', fromAddress
shared.printLock.release()
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
toAddress = '[Broadcast subscribers]'
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newBroadcast"])
#Display timing data
shared.printLock.acquire()
print 'Time spent processing this interesting broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
if broadcastVersion == 2:
cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += cleartextStreamNumberLength
initialDecryptionSuccessful = False
for key, cryptorObject in shared.MyECSubscriptionCryptorObjects.items():
try:
decryptedData = cryptorObject.decrypt(data[readPosition:])
toRipe = key #This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key.
initialDecryptionSuccessful = True
print 'EC decryption successful using key associated with ripe hash:', key.encode('hex')
break
except Exception, err:
pass
#print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful:
#This is not a broadcast I am interested in.
shared.printLock.acquire()
print 'Length of time program spent failing to decrypt this v2 broadcast:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
return
#At this point this is a broadcast I have decrypted and thus am interested in.
signedBroadcastVersion, readPosition = decodeVarint(decryptedData[:10])
beginningOfPubkeyPosition = readPosition #used when we add the pubkey to our pubkey table
sendersAddressVersion, sendersAddressVersionLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if sendersAddressVersion < 2 or sendersAddressVersion > 3:
print 'Cannot decode senderAddressVersion other than 2 or 3. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.'
return
readPosition += sendersAddressVersionLength
sendersStream, sendersStreamLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if sendersStream != cleartextStreamNumber:
print 'The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.'
return
readPosition += sendersStreamLength
behaviorBitfield = decryptedData[readPosition:readPosition+4]
readPosition += 4
sendersPubSigningKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
sendersPubEncryptionKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
if sendersAddressVersion >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is', requiredAverageProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredPayloadLengthExtraBytes is', requiredPayloadLengthExtraBytes
endOfPubkeyPosition = readPosition
sha = hashlib.new('sha512')
sha.update(sendersPubSigningKey+sendersPubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
if toRipe != ripe.digest():
print 'The encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.'
return
messageEncodingType, messageEncodingTypeLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if messageEncodingType == 0:
return
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(decryptedData[readPosition:readPosition+9])
readPosition += messageLengthLength
message = decryptedData[readPosition:readPosition+messageLength]
readPosition += messageLength
readPositionAtBottomOfMessage = readPosition
signatureLength, signatureLengthLength = decodeVarint(decryptedData[readPosition:readPosition+9])
readPosition += signatureLengthLength
signature = decryptedData[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(decryptedData[:readPositionAtBottomOfMessage],signature,sendersPubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
#verify passed
#Let's store the public key in case we want to reply to this person.
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+decryptedData[beginningOfPubkeyPosition:endOfPubkeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersion,sendersStream,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersion,sendersStream,ripe.digest())
shared.printLock.acquire()
print 'fromAddress:', fromAddress
shared.printLock.release()
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
toAddress = '[Broadcast subscribers]'
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newBroadcast"])
#Display timing data
shared.printLock.acquire()
print 'Time spent processing this interesting broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
#We have received a msg message.
def recmsg(self,data):
self.messageProcessingStartTime = time.time()
#First we must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in msg message insufficient.'
return
readPosition = 8
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > int(time.time())+10800:
print 'The time in the msg message is too new. Ignoring it. Time:', embeddedTime
return
if embeddedTime < int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept:
print 'The time in the msg message is too old. Ignoring it. Time:', embeddedTime
return
streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = decodeVarint(data[readPosition:readPosition+9])
if streamNumberAsClaimedByMsg != self.streamNumber:
print 'The stream number encoded in this msg (' + str(streamNumberAsClaimedByMsg) + ') message does not match the stream number on which it was received. Ignoring it.'
return
readPosition += streamNumberAsClaimedByMsgLength
self.inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if self.inventoryHash in shared.inventory:
print 'We have already received this msg message. Ignoring.'
shared.inventoryLock.release()
return
elif isInSqlInventory(self.inventoryHash):
print 'We have already received this msg message (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
#This msg message is valid. Let's let our peers know about it.
objectType = 'msg'
shared.inventory[self.inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(self.inventoryHash)
#self.emit(SIGNAL("incrementNumberOfMessagesProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfMessagesProcessed','no data'))
self.processmsg(readPosition,data) #When this function returns, we will have either successfully processed the message bound for us, ignored it because it isn't bound for us, or found problem with the message that warranted ignoring it.
# Let us now set lengthOfTimeWeShouldUseToProcessThisMessage. If we haven't used the specified amount of time, we shall sleep. These values are based on test timings and you may change them at-will.
if len(data) > 100000000: #Size is greater than 100 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 100 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 100 MB message: 3.7 seconds.
elif len(data) > 10000000: #Between 100 and 10 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 20 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 10 MB message: 0.53 seconds. Actual length of time it takes in practice when processing a real message: 1.44 seconds.
elif len(data) > 1000000: #Between 10 and 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = 3 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 1 MB message: 0.18 seconds. Actual length of time it takes in practice when processing a real message: 0.30 seconds.
else: #Less than 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = .6 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 100 KB message: 0.15 seconds. Actual length of time it takes in practice when processing a real message: 0.25 seconds.
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.messageProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total message processing time:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
#A msg message has a valid time and POW and requires processing. The recmsg function calls this one.
def processmsg(self,readPosition, encryptedData):
initialDecryptionSuccessful = False
#Let's check whether this is a message acknowledgement bound for us.
if encryptedData[readPosition:] in ackdataForWhichImWatching:
shared.printLock.acquire()
print 'This msg IS an acknowledgement bound for me.'
shared.printLock.release()
del ackdataForWhichImWatching[encryptedData[readPosition:]]
t = ('ackreceived',encryptedData[readPosition:])
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('UPDATE sent SET status=? WHERE ackdata=?')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),encryptedData[readPosition:],'Acknowledgement of the message received just now.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(encryptedData[readPosition:],'Acknowledgement of the message received just now.')))
return
else:
shared.printLock.acquire()
print 'This was NOT an acknowledgement bound for me.'
#print 'ackdataForWhichImWatching', ackdataForWhichImWatching
shared.printLock.release()
#This is not an acknowledgement bound for me. See if it is a message bound for me by trying to decrypt it with my private keys.
for key, cryptorObject in shared.myECCryptorObjects.items():
try:
decryptedData = cryptorObject.decrypt(encryptedData[readPosition:])
toRipe = key #This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data.
initialDecryptionSuccessful = True
print 'EC decryption successful using key associated with ripe hash:', key.encode('hex')
break
except Exception, err:
pass
#print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful:
#This is not a message bound for me.
shared.printLock.acquire()
print 'Length of time program spent failing to decrypt this message:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
else:
#This is a message bound for me.
toAddress = shared.myAddressesByHash[toRipe] #Look up my address based on the RIPE hash.
readPosition = 0
messageVersion, messageVersionLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageVersionLength
if messageVersion != 1:
print 'Cannot understand message versions other than one. Ignoring message.'
return
sendersAddressVersionNumber, sendersAddressVersionNumberLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += sendersAddressVersionNumberLength
if sendersAddressVersionNumber == 0:
print 'Cannot understand sendersAddressVersionNumber = 0. Ignoring message.'
return
if sendersAddressVersionNumber >= 4:
print 'Sender\'s address version number', sendersAddressVersionNumber, 'not yet supported. Ignoring message.'
return
if len(decryptedData) < 170:
print 'Length of the unencrypted data is unreasonably short. Sanity check failed. Ignoring message.'
return
sendersStreamNumber, sendersStreamNumberLength = decodeVarint(decryptedData[readPosition:readPosition+10])
if sendersStreamNumber == 0:
print 'sender\'s stream number is 0. Ignoring message.'
return
readPosition += sendersStreamNumberLength
behaviorBitfield = decryptedData[readPosition:readPosition+4]
readPosition += 4
pubSigningKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
pubEncryptionKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
if sendersAddressVersionNumber >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is', requiredAverageProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredPayloadLengthExtraBytes is', requiredPayloadLengthExtraBytes
endOfThePublicKeyPosition = readPosition #needed for when we store the pubkey in our database of pubkeys for later use.
if toRipe != decryptedData[readPosition:readPosition+20]:
shared.printLock.acquire()
print 'The original sender of this message did not send it to you. Someone is attempting a Surreptitious Forwarding Attack.'
print 'See: http://world.std.com/~dtd/sign_encrypt/sign_encrypt7.html'
print 'your toRipe:', toRipe.encode('hex')
print 'embedded destination toRipe:', decryptedData[readPosition:readPosition+20].encode('hex')
shared.printLock.release()
return
readPosition += 20
messageEncodingType, messageEncodingTypeLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageLengthLength
message = decryptedData[readPosition:readPosition+messageLength]
#print 'First 150 characters of message:', repr(message[:150])
readPosition += messageLength
ackLength, ackLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += ackLengthLength
ackData = decryptedData[readPosition:readPosition+ackLength]
readPosition += ackLength
positionOfBottomOfAckData = readPosition #needed to mark the end of what is covered by the signature
signatureLength, signatureLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += signatureLengthLength
signature = decryptedData[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(decryptedData[:positionOfBottomOfAckData],signature,pubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
shared.printLock.acquire()
print 'As a matter of intellectual curiosity, here is the Bitcoin address associated with the keys owned by the other person:', calculateBitcoinAddressFromPubkey(pubSigningKey), ' ..and here is the testnet address:',calculateTestnetAddressFromPubkey(pubSigningKey),'. The other person must take their private signing key from Bitmessage and import it into Bitcoin (or a service like Blockchain.info) for it to be of any use. Do not use this unless you know what you are doing.'
shared.printLock.release()
#calculate the fromRipe.
sha = hashlib.new('sha512')
sha.update(pubSigningKey+pubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
#Let's store the public key in case we want to reply to this person.
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+decryptedData[messageVersionLength:endOfThePublicKeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersionNumber,sendersStreamNumber,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersionNumber,sendersStreamNumber,ripe.digest())
#If this message is bound for one of my version 3 addresses (or higher), then we must check to make sure it meets our demanded proof of work requirement.
if decodeAddress(toAddress)[1] >= 3:#If the toAddress version number is 3 or higher:
if not shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(fromAddress): #If I'm not friendly with this person:
requiredNonceTrialsPerByte = shared.config.getint(toAddress,'noncetrialsperbyte')
requiredPayloadLengthExtraBytes = shared.config.getint(toAddress,'payloadlengthextrabytes')
if not self.isProofOfWorkSufficient(encryptedData,requiredNonceTrialsPerByte,requiredPayloadLengthExtraBytes):
print 'Proof of work in msg message insufficient only because it does not meet our higher requirement.'
return
blockMessage = False #Gets set to True if the user shouldn't see the message according to black or white lists.
if shared.config.get('bitmessagesettings', 'blackwhitelist') == 'black': #If we are using a blacklist
t = (fromAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT label FROM blacklist where address=? and enabled='1' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
shared.printLock.acquire()
print 'Message ignored because address is in blacklist.'
shared.printLock.release()
blockMessage = True
else: #We're using a whitelist
t = (fromAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT label FROM whitelist where address=? and enabled='1' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
print 'Message ignored because address not in whitelist.'
blockMessage = True
if not blockMessage:
print 'fromAddress:', fromAddress
print 'First 150 characters of message:', repr(message[:150])
toLabel = shared.config.get(toAddress, 'label')
if toLabel == '':
toLabel = toAddress
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message. They probably just sent it so that we would store their public key or send their ack data for them.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newMessage"])
#Let us now check and see whether our receiving address is behaving as a mailing list
if shared.safeConfigGetBoolean(toAddress,'mailinglist'):
try:
mailingListName = shared.config.get(toAddress, 'mailinglistname')
except:
mailingListName = ''
#Let us send out this message as a broadcast
subject = self.addMailingListNameToSubject(subject,mailingListName)
#Let us now send this message out as a broadcast
message = strftime("%a, %Y-%m-%d %H:%M:%S UTC",gmtime()) + ' Message ostensibly from ' + fromAddress + ':\n\n' + body
fromAddress = toAddress #The fromAddress for the broadcast that we are about to send is the toAddress (my address) for the msg message we are currently processing.
ackdata = OpenSSL.rand(32) #We don't actually need the ackdata for acknowledgement since this is a broadcast message but we can use it to update the user interface when the POW is done generating.
toAddress = '[Broadcast subscribers]'
ripe = ''
shared.sqlLock.acquire()
t = ('',toAddress,ripe,fromAddress,subject,message,ackdata,int(time.time()),'broadcastpending',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,'[Broadcast subscribers]',fromAddress,subject,message,ackdata)
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,'[Broadcast subscribers]',fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendbroadcast',(fromAddress,subject,message)))
if self.isAckDataValid(ackData):
print 'ackData is valid. Will process it.'
self.ackDataThatWeHaveYetToSend.append(ackData) #When we have processed all data, the processData function will pop the ackData out and process it as if it is a message received from our peer.
#Display timing data
timeRequiredToAttemptToDecryptMessage = time.time()- self.messageProcessingStartTime
successfullyDecryptMessageTimings.append(timeRequiredToAttemptToDecryptMessage)
sum = 0
for item in successfullyDecryptMessageTimings:
sum += item
shared.printLock.acquire()
print 'Time to decrypt this message successfully:', timeRequiredToAttemptToDecryptMessage
print 'Average time for all message decryption successes since startup:', sum / len(successfullyDecryptMessageTimings)
shared.printLock.release()
def isAckDataValid(self,ackData):
if len(ackData) < 24:
print 'The length of ackData is unreasonably short. Not sending ackData.'
return False
if ackData[0:4] != '\xe9\xbe\xb4\xd9':
print 'Ackdata magic bytes were wrong. Not sending ackData.'
return False
ackDataPayloadLength, = unpack('>L',ackData[16:20])
if len(ackData)-24 != ackDataPayloadLength:
print 'ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.'
return False
if ackData[4:16] != 'getpubkey\x00\x00\x00' and ackData[4:16] != 'pubkey\x00\x00\x00\x00\x00\x00' and ackData[4:16] != 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00' and ackData[4:16] != 'broadcast\x00\x00\x00' :
return False
return True
def addMailingListNameToSubject(self,subject,mailingListName):
subject = subject.strip()
if subject[:3] == 'Re:' or subject[:3] == 'RE:':
subject = subject[3:].strip()
if '['+mailingListName+']' in subject:
return subject
else:
return '['+mailingListName+'] ' + subject
#We have received a pubkey
def recpubkey(self,data):
self.pubkeyProcessingStartTime = time.time()
if len(data) < 146 or len(data) >600: #sanity check
return
#We must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in pubkey message insufficient.'
return
readPosition = 8 #for the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime < int(time.time())-lengthOfTimeToHoldOnToAllPubkeys:
shared.printLock.acquire()
print 'The embedded time in this pubkey message is too old. Ignoring. Embedded time is:', embeddedTime
shared.printLock.release()
return
if embeddedTime > int(time.time()) + 10800:
shared.printLock.acquire()
print 'The embedded time in this pubkey message more than several hours in the future. This is irrational. Ignoring message.'
shared.printLock.release()
return
addressVersion, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
streamNumber, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
if self.streamNumber != streamNumber:
print 'stream number embedded in this pubkey doesn\'t match our stream number. Ignoring.'
return
inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if inventoryHash in shared.inventory:
print 'We have already received this pubkey. Ignoring it.'
shared.inventoryLock.release()
return
elif isInSqlInventory(inventoryHash):
print 'We have already received this pubkey (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(inventoryHash)
#self.emit(SIGNAL("incrementNumberOfPubkeysProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfPubkeysProcessed','no data'))
self.processpubkey(data)
lengthOfTimeWeShouldUseToProcessThisMessage = .2
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.pubkeyProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total pubkey processing time:', time.time()- self.pubkeyProcessingStartTime, 'seconds.'
shared.printLock.release()
def processpubkey(self,data):
readPosition = 8 #for the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
readPosition += 4 #for the time
addressVersion, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
streamNumber, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
if addressVersion == 0:
print '(Within processpubkey) addressVersion of 0 doesn\'t make sense.'
return
if addressVersion >= 4 or addressVersion == 1:
shared.printLock.acquire()
print 'This version of Bitmessage cannot handle version', addressVersion,'addresses.'
shared.printLock.release()
return
if addressVersion == 2:
if len(data) < 146: #sanity check. This is the minimum possible length.
print '(within processpubkey) payloadLength less than 146. Sanity check failed.'
return
bitfieldBehaviors = data[readPosition:readPosition+4]
readPosition += 4
publicSigningKey = data[readPosition:readPosition+64]
#Is it possible for a public key to be invalid such that trying to encrypt or sign with it will cause an error? If it is, we should probably test these keys here.
readPosition += 64
publicEncryptionKey = data[readPosition:readPosition+64]
if len(publicEncryptionKey) < 64:
print 'publicEncryptionKey length less than 64. Sanity check failed.'
return
sha = hashlib.new('sha512')
sha.update('\x04'+publicSigningKey+'\x04'+publicEncryptionKey)
ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
shared.printLock.acquire()
print 'within recpubkey, addressVersion:', addressVersion, ', streamNumber:', streamNumber
print 'ripe', ripe.encode('hex')
print 'publicSigningKey in hex:', publicSigningKey.encode('hex')
print 'publicEncryptionKey in hex:', publicEncryptionKey.encode('hex')
shared.printLock.release()
t = (ripe,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT usedpersonally FROM pubkeys WHERE hash=? AND usedpersonally='yes' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #if this pubkey is already in our database and if we have used it personally:
print 'We HAVE used this pubkey personally. Updating time.'
t = (ripe,data,embeddedTime,'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
else:
print 'We have NOT used this pubkey personally. Inserting in database.'
t = (ripe,data,embeddedTime,'no') #This will also update the embeddedTime.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
if addressVersion == 3:
if len(data) < 170: #sanity check.
print '(within processpubkey) payloadLength less than 170. Sanity check failed.'
return
bitfieldBehaviors = data[readPosition:readPosition+4]
readPosition += 4
publicSigningKey = '\x04'+data[readPosition:readPosition+64]
#Is it possible for a public key to be invalid such that trying to encrypt or sign with it will cause an error? If it is, we should probably test these keys here.
readPosition += 64
publicEncryptionKey = '\x04'+data[readPosition:readPosition+64]
readPosition += 64
specifiedNonceTrialsPerByte, specifiedNonceTrialsPerByteLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += specifiedNonceTrialsPerByteLength
specifiedPayloadLengthExtraBytes, specifiedPayloadLengthExtraBytesLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += specifiedPayloadLengthExtraBytesLength
signatureLength, signatureLengthLength = decodeVarint(data[readPosition:readPosition+10])
signature = data[readPosition:readPosition+signatureLengthLength]
try:
highlevelcrypto.verify(data[8:readPosition],signature,publicSigningKey.encode('hex'))
print 'ECDSA verify passed (within processpubkey)'
except Exception, err:
print 'ECDSA verify failed (within processpubkey)', err
return
sha = hashlib.new('sha512')
sha.update(publicSigningKey+publicEncryptionKey)
ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
shared.printLock.acquire()
print 'within recpubkey, addressVersion:', addressVersion, ', streamNumber:', streamNumber
print 'ripe', ripe.encode('hex')
print 'publicSigningKey in hex:', publicSigningKey.encode('hex')
print 'publicEncryptionKey in hex:', publicEncryptionKey.encode('hex')
shared.printLock.release()
t = (ripe,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT usedpersonally FROM pubkeys WHERE hash=? AND usedpersonally='yes' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #if this pubkey is already in our database and if we have used it personally:
print 'We HAVE used this pubkey personally. Updating time.'
t = (ripe,data,embeddedTime,'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
else:
print 'We have NOT used this pubkey personally. Inserting in database.'
t = (ripe,data,embeddedTime,'no') #This will also update the embeddedTime.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
#We have received a getpubkey message
def recgetpubkey(self,data):
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in getpubkey message insufficient.'
return
if len(data) < 34:
print 'getpubkey message doesn\'t contain enough data. Ignoring.'
return
readPosition = 8 #bypass the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > int(time.time())+10800:
print 'The time in this getpubkey message is too new. Ignoring it. Time:', embeddedTime
return
if embeddedTime < int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept:
print 'The time in this getpubkey message is too old. Ignoring it. Time:', embeddedTime
return
requestedAddressVersionNumber, addressVersionLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += addressVersionLength
streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition+10])
if streamNumber <> self.streamNumber:
print 'The streamNumber', streamNumber, 'doesn\'t match our stream number:', self.streamNumber
return
readPosition += streamNumberLength
inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if inventoryHash in shared.inventory:
print 'We have already received this getpubkey request. Ignoring it.'
shared.inventoryLock.release()
return
elif isInSqlInventory(inventoryHash):
print 'We have already received this getpubkey request (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
objectType = 'getpubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
#This getpubkey request is valid so far. Forward to peers.
self.broadcastinv(inventoryHash)
if requestedAddressVersionNumber == 0:
print 'The requestedAddressVersionNumber of the pubkey request is zero. That doesn\'t make any sense. Ignoring it.'
return
elif requestedAddressVersionNumber == 1:
print 'The requestedAddressVersionNumber of the pubkey request is 1 which isn\'t supported anymore. Ignoring it.'
return
elif requestedAddressVersionNumber > 3:
print 'The requestedAddressVersionNumber of the pubkey request is too high. Can\'t understand. Ignoring it.'
return
requestedHash = data[readPosition:readPosition+20]
if len(requestedHash) != 20:
print 'The length of the requested hash is not 20 bytes. Something is wrong. Ignoring.'
return
print 'the hash requested in this getpubkey request is:', requestedHash.encode('hex')
"""shared.sqlLock.acquire()
t = (requestedHash,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys) #this prevents SQL injection
shared.sqlSubmitQueue.put('''SELECT hash, transmitdata, time FROM pubkeys WHERE hash=? AND havecorrectnonce=1 AND time>?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
for row in queryreturn:
hash, payload, timeEncodedInPubkey = row
shared.printLock.acquire()
print 'We have the requested pubkey stored in our database of pubkeys. Sending it.'
shared.printLock.release()
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, payload, timeEncodedInPubkey)#If the time embedded in this pubkey is more than 3 days old then this object isn't going to last very long in the inventory- the cleanerThread is going to come along and move it from the inventory in memory to the SQL inventory and then delete it from the SQL inventory. It should still find its way back to the original requestor if he is online however.
self.broadcastinv(inventoryHash)"""
#else: #the pubkey is not in our database of pubkeys. Let's check if the requested key is ours (which would mean we should do the POW, put it in the pubkey table, and broadcast out the pubkey.)
if requestedHash in shared.myAddressesByHash: #if this address hash is one of mine
if decodeAddress(shared.myAddressesByHash[requestedHash])[1] != requestedAddressVersionNumber:
shared.printLock.acquire()
sys.stderr.write('(Within the recgetpubkey function) Someone requested one of my pubkeys but the requestedAddressVersionNumber doesn\'t match my actual address version number. That shouldn\'t have happened. Ignoring.\n')
shared.printLock.release()
return
try:
lastPubkeySendTime = int(shared.config.get(shared.myAddressesByHash[requestedHash],'lastpubkeysendtime'))
except:
lastPubkeySendTime = 0
if lastPubkeySendTime < time.time()-lengthOfTimeToHoldOnToAllPubkeys: #If the last time we sent our pubkey was 28 days ago
shared.printLock.acquire()
print 'Found getpubkey-requested-hash in my list of EC hashes. Telling Worker thread to do the POW for a pubkey message and send it out.'
shared.printLock.release()
if requestedAddressVersionNumber == 2:
shared.workerQueue.put(('doPOWForMyV2Pubkey',requestedHash))
elif requestedAddressVersionNumber == 3:
shared.workerQueue.put(('doPOWForMyV3Pubkey',requestedHash))
else:
shared.printLock.acquire()
print 'Found getpubkey-requested-hash in my list of EC hashes BUT we already sent it recently. Ignoring request. The lastPubkeySendTime is:',lastPubkeySendTime
shared.printLock.release()
else:
shared.printLock.acquire()
print 'This getpubkey request is not for any of my keys.'
shared.printLock.release()
#We have received an inv message
def recinv(self,data):
numberOfItemsInInv, lengthOfVarint = decodeVarint(data[:10])
if len(data) < lengthOfVarint + (numberOfItemsInInv * 32):
print 'inv message doesn\'t contain enough data. Ignoring.'
return
if numberOfItemsInInv == 1: #we'll just request this data from the person who advertised the object.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware[data[lengthOfVarint:32+lengthOfVarint]] = 0
if data[lengthOfVarint:32+lengthOfVarint] in shared.inventory:
shared.printLock.acquire()
print 'Inventory (in memory) has inventory item already.'
shared.printLock.release()
elif isInSqlInventory(data[lengthOfVarint:32+lengthOfVarint]):
print 'Inventory (SQL on disk) has inventory item already.'
else:
self.sendgetdata(data[lengthOfVarint:32+lengthOfVarint])
else:
print 'inv message lists', numberOfItemsInInv, 'objects.'
for i in range(numberOfItemsInInv): #upon finishing dealing with an incoming message, the receiveDataThread will request a random object from the peer. This way if we get multiple inv messages from multiple peers which list mostly the same objects, we will make getdata requests for different random objects from the various peers.
if len(data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]) == 32: #The length of an inventory hash should be 32. If it isn't 32 then the remote node is either badly programmed or behaving nefariously.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware[data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]] = 0
self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]] = 0
#Send a getdata message to our peer to request the object with the given hash
def sendgetdata(self,hash):
shared.printLock.acquire()
print 'sending getdata to retrieve object with hash:', hash.encode('hex')
shared.printLock.release()
payload = '\x01' + hash
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'getdata\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload)) #payload length. Note that we add an extra 8 for the nonce.
headerData += hashlib.sha512(payload).digest()[:4]
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a getdata request from our peer
def recgetdata(self, data):
numberOfRequestedInventoryItems, lengthOfVarint = decodeVarint(data[:10])
if len(data) < lengthOfVarint + (32 * numberOfRequestedInventoryItems):
print 'getdata message does not contain enough data. Ignoring.'
return
for i in xrange(numberOfRequestedInventoryItems):
hash = data[lengthOfVarint+(i*32):32+lengthOfVarint+(i*32)]
shared.printLock.acquire()
print 'received getdata request for item:', hash.encode('hex')
shared.printLock.release()
#print 'inventory is', shared.inventory
if hash in shared.inventory:
objectType, streamNumber, payload, receivedTime = shared.inventory[hash]
self.sendData(objectType,payload)
else:
t = (hash,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select objecttype, payload from inventory where hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn <> []:
for row in queryreturn:
objectType, payload = row
self.sendData(objectType,payload)
else:
print 'Someone asked for an object with a getdata which is not in either our memory inventory or our SQL inventory. That shouldn\'t have happened.'
#Our peer has requested (in a getdata message) that we send an object.
def sendData(self,objectType,payload):
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
if objectType == 'pubkey':
shared.printLock.acquire()
print 'sending pubkey'
shared.printLock.release()
headerData += 'pubkey\x00\x00\x00\x00\x00\x00'
elif objectType == 'getpubkey' or objectType == 'pubkeyrequest':
shared.printLock.acquire()
print 'sending getpubkey'
shared.printLock.release()
headerData += 'getpubkey\x00\x00\x00'
elif objectType == 'msg':
shared.printLock.acquire()
print 'sending msg'
shared.printLock.release()
headerData += 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00'
elif objectType == 'broadcast':
shared.printLock.acquire()
print 'sending broadcast'
shared.printLock.release()
headerData += 'broadcast\x00\x00\x00'
else:
sys.stderr.write('Error: sendData has been asked to send a strange objectType: %s\n' % str(objectType))
return
headerData += pack('>L',len(payload)) #payload length.
headerData += hashlib.sha512(payload).digest()[:4]
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#Send an inv message with just one hash to all of our peers
def broadcastinv(self,hash):
shared.printLock.acquire()
print 'broadcasting inv with hash:', hash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((self.streamNumber, 'sendinv', hash))
#We have received an addr message.
def recaddr(self,data):
listOfAddressDetailsToBroadcastToPeers = []
numberOfAddressesIncluded = 0
numberOfAddressesIncluded, lengthOfNumberOfAddresses = decodeVarint(data[:10])
if verbose >= 1:
shared.printLock.acquire()
print 'addr message contains', numberOfAddressesIncluded, 'IP addresses.'
shared.printLock.release()
if self.remoteProtocolVersion == 1:
if numberOfAddressesIncluded > 1000 or numberOfAddressesIncluded == 0:
return
if len(data) != lengthOfNumberOfAddresses + (34 * numberOfAddressesIncluded):
print 'addr message does not contain the correct amount of data. Ignoring.'
return
needToWriteKnownNodesToDisk = False
for i in range(0,numberOfAddressesIncluded):
try:
if data[16+lengthOfNumberOfAddresses+(34*i):28+lengthOfNumberOfAddresses+(34*i)] != '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
shared.printLock.acquire()
print 'Skipping IPv6 address.', repr(data[16+lengthOfNumberOfAddresses+(34*i):28+lengthOfNumberOfAddresses+(34*i)])
shared.printLock.release()
continue
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (to test for an IPv6 address). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrStream, = unpack('>I',data[4+lengthOfNumberOfAddresses+(34*i):8+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrStream). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
if recaddrStream == 0:
continue
if recaddrStream != self.streamNumber and recaddrStream != (self.streamNumber * 2) and recaddrStream != ((self.streamNumber * 2) + 1): #if the embedded stream number is not in my stream or either of my child streams then ignore it. Someone might be trying funny business.
continue
try:
recaddrServices, = unpack('>Q',data[8+lengthOfNumberOfAddresses+(34*i):16+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrServices). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrPort, = unpack('>H',data[32+lengthOfNumberOfAddresses+(34*i):34+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrPort). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
#print 'Within recaddr(): IP', recaddrIP, ', Port', recaddrPort, ', i', i
hostFromAddrMessage = socket.inet_ntoa(data[28+lengthOfNumberOfAddresses+(34*i):32+lengthOfNumberOfAddresses+(34*i)])
#print 'hostFromAddrMessage', hostFromAddrMessage
if data[28+lengthOfNumberOfAddresses+(34*i)] == '\x7F':
print 'Ignoring IP address in loopback range:', hostFromAddrMessage
continue
if data[28+lengthOfNumberOfAddresses+(34*i)] == '\x0A':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
if data[28+lengthOfNumberOfAddresses+(34*i):30+lengthOfNumberOfAddresses+(34*i)] == '\xC0A8':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
timeSomeoneElseReceivedMessageFromThisNode, = unpack('>I',data[lengthOfNumberOfAddresses+(34*i):4+lengthOfNumberOfAddresses+(34*i)]) #This is the 'time' value in the received addr message.
if recaddrStream not in shared.knownNodes: #knownNodes is a dictionary of dictionaries with one outer dictionary for each stream. If the outer stream dictionary doesn't exist yet then we must make it.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream] = {}
shared.knownNodesLock.release()
if hostFromAddrMessage not in shared.knownNodes[recaddrStream]:
if len(shared.knownNodes[recaddrStream]) < 20000 and timeSomeoneElseReceivedMessageFromThisNode > (int(time.time())-10800) and timeSomeoneElseReceivedMessageFromThisNode < (int(time.time()) + 10800): #If we have more than 20000 nodes in our list already then just forget about adding more. Also, make sure that the time that someone else received a message from this node is within three hours from now.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (recaddrPort, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
needToWriteKnownNodesToDisk = True
hostDetails = (timeSomeoneElseReceivedMessageFromThisNode, recaddrStream, recaddrServices, hostFromAddrMessage, recaddrPort)
listOfAddressDetailsToBroadcastToPeers.append(hostDetails)
else:
PORT, timeLastReceivedMessageFromThisNode = shared.knownNodes[recaddrStream][hostFromAddrMessage]#PORT in this case is either the port we used to connect to the remote node, or the port that was specified by someone else in a past addr message.
if (timeLastReceivedMessageFromThisNode < timeSomeoneElseReceivedMessageFromThisNode) and (timeSomeoneElseReceivedMessageFromThisNode < int(time.time())):
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (PORT, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
if PORT != recaddrPort:
print 'Strange occurance: The port specified in an addr message', str(recaddrPort),'does not match the port',str(PORT),'that this program (or some other peer) used to connect to it',str(hostFromAddrMessage),'. Perhaps they changed their port or are using a strange NAT configuration.'
if needToWriteKnownNodesToDisk: #Runs if any nodes were new to us. Also, share those nodes with our peers.
shared.knownNodesLock.acquire()
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.broadcastaddr(listOfAddressDetailsToBroadcastToPeers) #no longer broadcast
shared.printLock.acquire()
print 'knownNodes currently has', len(shared.knownNodes[self.streamNumber]), 'nodes for this stream.'
shared.printLock.release()
elif self.remoteProtocolVersion >= 2: #The difference is that in protocol version 2, network addresses use 64 bit times rather than 32 bit times.
if numberOfAddressesIncluded > 1000 or numberOfAddressesIncluded == 0:
return
if len(data) != lengthOfNumberOfAddresses + (38 * numberOfAddressesIncluded):
print 'addr message does not contain the correct amount of data. Ignoring.'
return
needToWriteKnownNodesToDisk = False
for i in range(0,numberOfAddressesIncluded):
try:
if data[20+lengthOfNumberOfAddresses+(38*i):32+lengthOfNumberOfAddresses+(38*i)] != '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
shared.printLock.acquire()
print 'Skipping IPv6 address.', repr(data[20+lengthOfNumberOfAddresses+(38*i):32+lengthOfNumberOfAddresses+(38*i)])
shared.printLock.release()
continue
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (to test for an IPv6 address). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrStream, = unpack('>I',data[8+lengthOfNumberOfAddresses+(38*i):12+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrStream). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
if recaddrStream == 0:
continue
if recaddrStream != self.streamNumber and recaddrStream != (self.streamNumber * 2) and recaddrStream != ((self.streamNumber * 2) + 1): #if the embedded stream number is not in my stream or either of my child streams then ignore it. Someone might be trying funny business.
continue
try:
recaddrServices, = unpack('>Q',data[12+lengthOfNumberOfAddresses+(38*i):20+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrServices). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrPort, = unpack('>H',data[36+lengthOfNumberOfAddresses+(38*i):38+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrPort). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
#print 'Within recaddr(): IP', recaddrIP, ', Port', recaddrPort, ', i', i
hostFromAddrMessage = socket.inet_ntoa(data[32+lengthOfNumberOfAddresses+(38*i):36+lengthOfNumberOfAddresses+(38*i)])
#print 'hostFromAddrMessage', hostFromAddrMessage
if data[32+lengthOfNumberOfAddresses+(38*i)] == '\x7F':
print 'Ignoring IP address in loopback range:', hostFromAddrMessage
continue
if data[32+lengthOfNumberOfAddresses+(38*i)] == '\x0A':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
if data[32+lengthOfNumberOfAddresses+(38*i):34+lengthOfNumberOfAddresses+(38*i)] == '\xC0A8':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
timeSomeoneElseReceivedMessageFromThisNode, = unpack('>Q',data[lengthOfNumberOfAddresses+(38*i):8+lengthOfNumberOfAddresses+(38*i)]) #This is the 'time' value in the received addr message. 64-bit.
if recaddrStream not in shared.knownNodes: #knownNodes is a dictionary of dictionaries with one outer dictionary for each stream. If the outer stream dictionary doesn't exist yet then we must make it.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream] = {}
shared.knownNodesLock.release()
if hostFromAddrMessage not in shared.knownNodes[recaddrStream]:
if len(shared.knownNodes[recaddrStream]) < 20000 and timeSomeoneElseReceivedMessageFromThisNode > (int(time.time())-10800) and timeSomeoneElseReceivedMessageFromThisNode < (int(time.time()) + 10800): #If we have more than 20000 nodes in our list already then just forget about adding more. Also, make sure that the time that someone else received a message from this node is within three hours from now.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (recaddrPort, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
shared.printLock.acquire()
print 'added new node', hostFromAddrMessage, 'to knownNodes in stream', recaddrStream
shared.printLock.release()
needToWriteKnownNodesToDisk = True
hostDetails = (timeSomeoneElseReceivedMessageFromThisNode, recaddrStream, recaddrServices, hostFromAddrMessage, recaddrPort)
listOfAddressDetailsToBroadcastToPeers.append(hostDetails)
else:
PORT, timeLastReceivedMessageFromThisNode = shared.knownNodes[recaddrStream][hostFromAddrMessage]#PORT in this case is either the port we used to connect to the remote node, or the port that was specified by someone else in a past addr message.
if (timeLastReceivedMessageFromThisNode < timeSomeoneElseReceivedMessageFromThisNode) and (timeSomeoneElseReceivedMessageFromThisNode < int(time.time())):
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (PORT, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
if PORT != recaddrPort:
print 'Strange occurance: The port specified in an addr message', str(recaddrPort),'does not match the port',str(PORT),'that this program (or some other peer) used to connect to it',str(hostFromAddrMessage),'. Perhaps they changed their port or are using a strange NAT configuration.'
if needToWriteKnownNodesToDisk: #Runs if any nodes were new to us. Also, share those nodes with our peers.
shared.knownNodesLock.acquire()
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.broadcastaddr(listOfAddressDetailsToBroadcastToPeers)
shared.printLock.acquire()
print 'knownNodes currently has', len(shared.knownNodes[self.streamNumber]), 'nodes for this stream.'
shared.printLock.release()
#Function runs when we want to broadcast an addr message to all of our peers. Runs when we learn of nodes that we didn't previously know about and want to share them with our peers.
def broadcastaddr(self,listOfAddressDetailsToBroadcastToPeers):
numberOfAddressesInAddrMessage = len(listOfAddressDetailsToBroadcastToPeers)
payload = ''
for hostDetails in listOfAddressDetailsToBroadcastToPeers:
timeLastReceivedMessageFromThisNode, streamNumber, services, host, port = hostDetails
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #now uses 64-bit time
payload += pack('>I',streamNumber)
payload += pack('>q',services) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(host)
payload += pack('>H',port)#remote port
payload = encodeVarint(numberOfAddressesInAddrMessage) + payload
datatosend = '\xE9\xBE\xB4\xD9addr\x00\x00\x00\x00\x00\x00\x00\x00'
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
datatosend = datatosend + payload
if verbose >= 1:
shared.printLock.acquire()
print 'Broadcasting addr with', numberOfAddressesInAddrMessage, 'entries.'
shared.printLock.release()
shared.broadcastToSendDataQueues((self.streamNumber, 'sendaddr', datatosend))
#Send a big addr message to our peer
def sendaddr(self):
addrsInMyStream = {}
addrsInChildStreamLeft = {}
addrsInChildStreamRight = {}
#print 'knownNodes', shared.knownNodes
#We are going to share a maximum number of 1000 addrs with our peer. 500 from this stream, 250 from the left child stream, and 250 from the right child stream.
shared.knownNodesLock.acquire()
if len(shared.knownNodes[self.streamNumber]) > 0:
for i in range(500):
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInMyStream[HOST] = shared.knownNodes[self.streamNumber][HOST]
if len(shared.knownNodes[self.streamNumber*2]) > 0:
for i in range(250):
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber*2], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInChildStreamLeft[HOST] = shared.knownNodes[self.streamNumber*2][HOST]
if len(shared.knownNodes[(self.streamNumber*2)+1]) > 0:
for i in range(250):
random.seed()
HOST, = random.sample(shared.knownNodes[(self.streamNumber*2)+1], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInChildStreamRight[HOST] = shared.knownNodes[(self.streamNumber*2)+1][HOST]
shared.knownNodesLock.release()
numberOfAddressesInAddrMessage = 0
payload = ''
#print 'addrsInMyStream.items()', addrsInMyStream.items()
for HOST, value in addrsInMyStream.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',self.streamNumber)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
for HOST, value in addrsInChildStreamLeft.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',self.streamNumber*2)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
for HOST, value in addrsInChildStreamRight.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',(self.streamNumber*2)+1)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
payload = encodeVarint(numberOfAddressesInAddrMessage) + payload
datatosend = '\xE9\xBE\xB4\xD9addr\x00\x00\x00\x00\x00\x00\x00\x00'
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
datatosend = datatosend + payload
try:
self.sock.sendall(datatosend)
if verbose >= 1:
shared.printLock.acquire()
print 'Sending addr with', numberOfAddressesInAddrMessage, 'entries.'
shared.printLock.release()
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a version message
def recversion(self,data):
if len(data) < 83:
#This version message is unreasonably short. Forget it.
return
elif not self.verackSent:
self.remoteProtocolVersion, = unpack('>L',data[:4])
#print 'remoteProtocolVersion', self.remoteProtocolVersion
self.myExternalIP = socket.inet_ntoa(data[40:44])
#print 'myExternalIP', self.myExternalIP
self.remoteNodeIncomingPort, = unpack('>H',data[70:72])
#print 'remoteNodeIncomingPort', self.remoteNodeIncomingPort
useragentLength, lengthOfUseragentVarint = decodeVarint(data[80:84])
readPosition = 80 + lengthOfUseragentVarint
useragent = data[readPosition:readPosition+useragentLength]
readPosition += useragentLength
numberOfStreamsInVersionMessage, lengthOfNumberOfStreamsInVersionMessage = decodeVarint(data[readPosition:])
readPosition += lengthOfNumberOfStreamsInVersionMessage
self.streamNumber, lengthOfRemoteStreamNumber = decodeVarint(data[readPosition:])
shared.printLock.acquire()
print 'Remote node useragent:', useragent, ' stream number:', self.streamNumber
shared.printLock.release()
if self.streamNumber != 1:
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
shared.printLock.acquire()
print 'Closed connection to', self.HOST, 'because they are interested in stream', self.streamNumber,'.'
shared.printLock.release()
return
shared.connectedHostsList[self.HOST] = 1 #We use this data structure to not only keep track of what hosts we are connected to so that we don't try to connect to them again, but also to list the connections count on the Network Status tab.
#If this was an incoming connection, then the sendData thread doesn't know the stream. We have to set it.
if not self.initiatedConnection:
shared.broadcastToSendDataQueues((0,'setStreamNumber',(self.HOST,self.streamNumber)))
if data[72:80] == eightBytesOfRandomDataUsedToDetectConnectionsToSelf:
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
shared.printLock.acquire()
print 'Closing connection to myself: ', self.HOST
shared.printLock.release()
return
shared.broadcastToSendDataQueues((0,'setRemoteProtocolVersion',(self.HOST,self.remoteProtocolVersion)))
shared.knownNodesLock.acquire()
shared.knownNodes[self.streamNumber][self.HOST] = (self.remoteNodeIncomingPort, int(time.time()))
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.sendverack()
if self.initiatedConnection == False:
self.sendversion()
#Sends a version message
def sendversion(self):
shared.printLock.acquire()
print 'Sending version message'
shared.printLock.release()
try:
self.sock.sendall(assembleVersionMessage(self.HOST,self.PORT,self.streamNumber))
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#Sends a verack message
def sendverack(self):
shared.printLock.acquire()
print 'Sending verack'
shared.printLock.release()
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x76\x65\x72\x61\x63\x6B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#cf 83 e1 35
self.verackSent = True
if self.verackReceived == True:
self.connectionFullyEstablished()
def isHostInPrivateIPRange(self,host):
if host[:3] == '10.':
return True
if host[:4] == '172.':
if host[6] == '.':
if int(host[4:6]) >= 16 and int(host[4:6]) <= 31:
return True
if host[:8] == '192.168.':
return True
return False
#Every connection to a peer has a sendDataThread (and also a receiveDataThread).
class sendDataThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.mailbox = Queue.Queue()
shared.sendDataQueues.append(self.mailbox)
shared.printLock.acquire()
print 'The length of sendDataQueues at sendDataThread init is:', len(shared.sendDataQueues)
shared.printLock.release()
self.data = ''
def setup(self,sock,HOST,PORT,streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware):
self.sock = sock
self.HOST = HOST
self.PORT = PORT
self.streamNumber = streamNumber
self.remoteProtocolVersion = -1 #This must be set using setRemoteProtocolVersion command which is sent through the self.mailbox queue.
self.lastTimeISentData = int(time.time()) #If this value increases beyond five minutes ago, we'll send a pong message to keep the connection alive.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware = objectsOfWhichThisRemoteNodeIsAlreadyAware
shared.printLock.acquire()
print 'The streamNumber of this sendDataThread (ID:', str(id(self))+') at setup() is', self.streamNumber
shared.printLock.release()
def sendVersionMessage(self):
datatosend = assembleVersionMessage(self.HOST,self.PORT,self.streamNumber)#the IP and port of the remote host, and my streamNumber.
shared.printLock.acquire()
print 'Sending version packet: ', repr(datatosend)
shared.printLock.release()
try:
self.sock.sendall(datatosend)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
self.versionSent = 1
def run(self):
while True:
deststream,command,data = self.mailbox.get()
#shared.printLock.acquire()
#print 'sendDataThread, destream:', deststream, ', Command:', command, ', ID:',id(self), ', HOST:', self.HOST
#shared.printLock.release()
if deststream == self.streamNumber or deststream == 0:
if command == 'shutdown':
if data == self.HOST or data == 'all':
shared.printLock.acquire()
print 'sendDataThread (associated with', self.HOST,') ID:',id(self), 'shutting down now.'
shared.printLock.release()
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
shared.printLock.acquire()
print 'len of sendDataQueues', len(shared.sendDataQueues)
shared.printLock.release()
break
#When you receive an incoming connection, a sendDataThread is created even though you don't yet know what stream number the remote peer is interested in. They will tell you in a version message and if you too are interested in that stream then you will continue on with the connection and will set the streamNumber of this send data thread here:
elif command == 'setStreamNumber':
hostInMessage, specifiedStreamNumber = data
if hostInMessage == self.HOST:
shared.printLock.acquire()
print 'setting the stream number in the sendData thread (ID:',id(self), ') to', specifiedStreamNumber
shared.printLock.release()
self.streamNumber = specifiedStreamNumber
elif command == 'setRemoteProtocolVersion':
hostInMessage, specifiedRemoteProtocolVersion = data
if hostInMessage == self.HOST:
shared.printLock.acquire()
print 'setting the remote node\'s protocol version in the sendData thread (ID:',id(self), ') to', specifiedRemoteProtocolVersion
shared.printLock.release()
self.remoteProtocolVersion = specifiedRemoteProtocolVersion
elif command == 'sendaddr':
if self.remoteProtocolVersion == 1:
shared.printLock.acquire()
print 'a sendData thread is not sending an addr message to this particular peer ('+self.HOST+') because their protocol version is 1.'
shared.printLock.release()
else:
try:
#To prevent some network analysis, 'leak' the data out to our peer after waiting a random amount of time unless we have a long list of messages in our queue to send.
random.seed()
time.sleep(random.randrange(0, 10))
self.sock.sendall(data)
self.lastTimeISentData = int(time.time())
except:
print 'self.sock.sendall failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread (ID:',str(id(self))+') ending now. Was connected to', self.HOST
break
elif command == 'sendinv':
if data not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
payload = '\x01' + data
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
#To prevent some network analysis, 'leak' the data out to our peer after waiting a random amount of time
random.seed()
time.sleep(random.randrange(0, 10))
try:
self.sock.sendall(headerData + payload)
self.lastTimeISentData = int(time.time())
except:
print 'self.sock.sendall failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread (ID:',str(id(self))+') ending now. Was connected to', self.HOST
break
elif command == 'pong':
if self.lastTimeISentData < (int(time.time()) - 298):
#Send out a pong message to keep the connection alive.
shared.printLock.acquire()
print 'Sending pong to', self.HOST, 'to keep connection alive.'
shared.printLock.release()
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x70\x6F\x6E\x67\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
self.lastTimeISentData = int(time.time())
except:
print 'send pong failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread', self, 'ending now. Was connected to', self.HOST
break
else:
shared.printLock.acquire()
print 'sendDataThread ID:',id(self),'ignoring command', command,'because the thread is not in stream',deststream
shared.printLock.release()
def isInSqlInventory(hash):
t = (hash,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select hash from inventory where hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
return False
else:
return True
def convertIntToString(n):
a = __builtins__.hex(n)
if a[-1:] == 'L':
a = a[:-1]
if (len(a) % 2) == 0:
return a[2:].decode('hex')
else:
return ('0'+a[2:]).decode('hex')
def convertStringToInt(s):
return int(s.encode('hex'), 16)
#This function expects that pubkey begin with \x04
def calculateBitcoinAddressFromPubkey(pubkey):
if len(pubkey)!= 65:
print 'Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was', len(pubkey),'bytes long rather than 65.'
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
sha.update(pubkey)
ripe.update(sha.digest())
ripeWithProdnetPrefix = '\x00' + ripe.digest()
checksum = hashlib.sha256(hashlib.sha256(ripeWithProdnetPrefix).digest()).digest()[:4]
binaryBitcoinAddress = ripeWithProdnetPrefix + checksum
numberOfZeroBytesOnBinaryBitcoinAddress = 0
while binaryBitcoinAddress[0] == '\x00':
numberOfZeroBytesOnBinaryBitcoinAddress += 1
binaryBitcoinAddress = binaryBitcoinAddress[1:]
base58encoded = arithmetic.changebase(binaryBitcoinAddress,256,58)
return "1"*numberOfZeroBytesOnBinaryBitcoinAddress + base58encoded
def calculateTestnetAddressFromPubkey(pubkey):
if len(pubkey)!= 65:
print 'Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was', len(pubkey),'bytes long rather than 65.'
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
sha.update(pubkey)
ripe.update(sha.digest())
ripeWithProdnetPrefix = '\x6F' + ripe.digest()
checksum = hashlib.sha256(hashlib.sha256(ripeWithProdnetPrefix).digest()).digest()[:4]
binaryBitcoinAddress = ripeWithProdnetPrefix + checksum
numberOfZeroBytesOnBinaryBitcoinAddress = 0
while binaryBitcoinAddress[0] == '\x00':
numberOfZeroBytesOnBinaryBitcoinAddress += 1
binaryBitcoinAddress = binaryBitcoinAddress[1:]
base58encoded = arithmetic.changebase(binaryBitcoinAddress,256,58)
return "1"*numberOfZeroBytesOnBinaryBitcoinAddress + base58encoded
def signal_handler(signal, frame):
if shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
shared.doCleanShutdown()
sys.exit(0)
else:
print 'Unfortunately you cannot use Ctrl+C when running the UI because the UI captures the signal.'
def connectToStream(streamNumber):
selfInitiatedConnections[streamNumber] = {}
if sys.platform[0:3] == 'win':
maximumNumberOfHalfOpenConnections = 9
else:
maximumNumberOfHalfOpenConnections = 32
for i in range(maximumNumberOfHalfOpenConnections):
a = outgoingSynSender()
a.setup(streamNumber)
a.start()
#Does an EC point multiplication; turns a private key into a public key.
def pointMult(secret):
#ctx = OpenSSL.BN_CTX_new() #This value proved to cause Seg Faults on Linux. It turns out that it really didn't speed up EC_POINT_mul anyway.
k = OpenSSL.EC_KEY_new_by_curve_name(OpenSSL.get_curve('secp256k1'))
priv_key = OpenSSL.BN_bin2bn(secret, 32, 0)
group = OpenSSL.EC_KEY_get0_group(k)
pub_key = OpenSSL.EC_POINT_new(group)
OpenSSL.EC_POINT_mul(group, pub_key, priv_key, None, None, None)
OpenSSL.EC_KEY_set_private_key(k, priv_key)
OpenSSL.EC_KEY_set_public_key(k, pub_key)
#print 'priv_key',priv_key
#print 'pub_key',pub_key
size = OpenSSL.i2o_ECPublicKey(k, 0)
mb = ctypes.create_string_buffer(size)
OpenSSL.i2o_ECPublicKey(k, ctypes.byref(ctypes.pointer(mb)))
#print 'mb.raw', mb.raw.encode('hex'), 'length:', len(mb.raw)
#print 'mb.raw', mb.raw, 'length:', len(mb.raw)
OpenSSL.EC_POINT_free(pub_key)
#OpenSSL.BN_CTX_free(ctx)
OpenSSL.BN_free(priv_key)
OpenSSL.EC_KEY_free(k)
return mb.raw
def assembleVersionMessage(remoteHost,remotePort,myStreamNumber):
shared.softwareVersion
payload = ''
payload += pack('>L',2) #protocol version.
payload += pack('>q',1) #bitflags of the services I offer.
payload += pack('>q',int(time.time()))
payload += pack('>q',1) #boolservices of remote connection. How can I even know this for sure? This is probably ignored by the remote host.
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(remoteHost)
payload += pack('>H',remotePort)#remote IPv6 and port
payload += pack('>q',1) #bitflags of the services I offer.
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack('>L',2130706433) # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used.
payload += pack('>H',shared.config.getint('bitmessagesettings', 'port'))#my external IPv6 and port
random.seed()
payload += eightBytesOfRandomDataUsedToDetectConnectionsToSelf
userAgent = '/PyBitmessage:' + shared.softwareVersion + '/' #Length of userAgent must be less than 253.
payload += pack('>B',len(userAgent)) #user agent string length. If the user agent is more than 252 bytes long, this code isn't going to work.
payload += userAgent
payload += encodeVarint(1) #The number of streams about which I care. PyBitmessage currently only supports 1 per connection.
payload += encodeVarint(myStreamNumber)
datatosend = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
datatosend = datatosend + 'version\x00\x00\x00\x00\x00' #version command
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
return datatosend + payload
#This thread exists because SQLITE3 is so un-threadsafe that we must submit queries to it and it puts results back in a different queue. They won't let us just use locks.
class sqlThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
self.conn = sqlite3.connect(shared.appdata + 'messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
try:
self.cur.execute( '''CREATE TABLE inbox (msgid blob, toaddress text, fromaddress text, subject text, received text, message text, folder text, encodingtype int, read bool, UNIQUE(msgid) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE sent (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text, ackdata blob, lastactiontime integer, status text, pubkeyretrynumber integer, msgretrynumber integer, folder text, encodingtype int)''' )
self.cur.execute( '''CREATE TABLE subscriptions (label text, address text, enabled bool)''' )
self.cur.execute( '''CREATE TABLE addressbook (label text, address text)''' )
self.cur.execute( '''CREATE TABLE blacklist (label text, address text, enabled bool)''' )
self.cur.execute( '''CREATE TABLE whitelist (label text, address text, enabled bool)''' )
#Explanation of what is in the pubkeys table:
# The hash is the RIPEMD160 hash that is encoded in the Bitmessage address.
# transmitdata is literally the data that was included in the Bitmessage pubkey message when it arrived, except for the 24 byte protocol header- ie, it starts with the POW nonce.
# time is the time that the pubkey was broadcast on the network same as with every other type of Bitmessage object.
# usedpersonally is set to "yes" if we have used the key personally. This keeps us from deleting it because we may want to reply to a message in the future. This field is not a bool because we may need more flexability in the future and it doesn't take up much more space anyway.
self.cur.execute( '''CREATE TABLE pubkeys (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE inventory (hash blob, objecttype text, streamnumber int, payload blob, receivedtime integer, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE knownnodes (timelastseen int, stream int, services blob, host blob, port blob, UNIQUE(host, stream, port) ON CONFLICT REPLACE)''' ) #This table isn't used in the program yet but I have a feeling that we'll need it.
self.cur.execute( '''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
self.cur.execute( '''CREATE TABLE settings (key blob, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO settings VALUES('version','1')''')
self.cur.execute( '''INSERT INTO settings VALUES('lastvacuumtime',?)''',(int(time.time()),))
self.conn.commit()
print 'Created messages database file'
except Exception, err:
if str(err) == 'table inbox already exists':
shared.printLock.acquire()
print 'Database file already exists.'
shared.printLock.release()
else:
sys.stderr.write('ERROR trying to create database file (message.dat). Error message: %s\n' % str(err))
os._exit(0)
#People running earlier versions of PyBitmessage do not have the usedpersonally field in their pubkeys table. Let's add it.
if shared.config.getint('bitmessagesettings','settingsversion') == 2:
item = '''ALTER TABLE pubkeys ADD usedpersonally text DEFAULT 'no' '''
parameters = ''
self.cur.execute(item, parameters)
self.conn.commit()
shared.config.set('bitmessagesettings','settingsversion','3')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#People running earlier versions of PyBitmessage do not have the encodingtype field in their inbox and sent tables or the read field in the inbox table. Let's add them.
if shared.config.getint('bitmessagesettings','settingsversion') == 3:
item = '''ALTER TABLE inbox ADD encodingtype int DEFAULT '2' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''ALTER TABLE inbox ADD read bool DEFAULT '1' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''ALTER TABLE sent ADD encodingtype int DEFAULT '2' '''
parameters = ''
self.cur.execute(item, parameters)
self.conn.commit()
shared.config.set('bitmessagesettings','settingsversion','4')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
if shared.config.getint('bitmessagesettings','settingsversion') == 4:
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte',str(shared.networkDefaultProofOfWorkNonceTrialsPerByte))
shared.config.set('bitmessagesettings','defaultpayloadlengthextrabytes',str(shared.networkDefaultPayloadLengthExtraBytes))
shared.config.set('bitmessagesettings','settingsversion','5')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#From now on, let us keep a 'version' embedded in the messages.dat file so that when we make changes to the database, the database version we are on can stay embedded in the messages.dat file. Let us check to see if the settings table exists yet.
item = '''SELECT name FROM sqlite_master WHERE type='table' AND name='settings';'''
parameters = ''
self.cur.execute(item, parameters)
if self.cur.fetchall() == []:
#The settings table doesn't exist. We need to make it.
print 'In messages.dat database, creating new \'settings\' table.'
self.cur.execute( '''CREATE TABLE settings (key text, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO settings VALUES('version','1')''')
self.cur.execute( '''INSERT INTO settings VALUES('lastvacuumtime',?)''',(int(time.time()),))
print 'In messages.dat database, removing an obsolete field from the pubkeys table.'
self.cur.execute( '''CREATE TEMPORARY TABLE pubkeys_backup(hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE);''')
self.cur.execute( '''INSERT INTO pubkeys_backup SELECT hash, transmitdata, time, usedpersonally FROM pubkeys;''')
self.cur.execute( '''DROP TABLE pubkeys''')
self.cur.execute( '''CREATE TABLE pubkeys (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO pubkeys SELECT hash, transmitdata, time, usedpersonally FROM pubkeys_backup;''')
self.cur.execute( '''DROP TABLE pubkeys_backup;''')
print 'Deleting all pubkeys from inventory. They will be redownloaded and then saved with the correct times.'
self.cur.execute( '''delete from inventory where objecttype = 'pubkey';''')
print 'replacing Bitmessage announcements mailing list with a new one.'
self.cur.execute( '''delete from subscriptions where address='BM-BbkPSZbzPwpVcYZpU4yHwf9ZPEapN5Zx' ''')
self.cur.execute( '''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
print 'Commiting.'
self.conn.commit()
print 'Vacuuming message.dat. You might notice that the file size gets much smaller.'
self.cur.execute( ''' VACUUM ''')
try:
testpayload = '\x00\x00'
t = ('1234',testpayload,'12345678','no')
self.cur.execute( '''INSERT INTO pubkeys VALUES(?,?,?,?)''',t)
self.conn.commit()
self.cur.execute('''SELECT transmitdata FROM pubkeys WHERE hash='1234' ''')
queryreturn = self.cur.fetchall()
for row in queryreturn:
transmitdata, = row
self.cur.execute('''DELETE FROM pubkeys WHERE hash='1234' ''')
self.conn.commit()
if transmitdata == '':
sys.stderr.write('Problem: The version of SQLite you have cannot store Null values. Please download and install the latest revision of your version of Python (for example, the latest Python 2.7 revision) and try again.\n')
sys.stderr.write('PyBitmessage will now exist very abruptly. You may now see threading errors related to this abrupt exit but the problem you need to solve is related to SQLite.\n\n')
os._exit(0)
except Exception, err:
print err
#Let us check to see the last time we vaccumed the messages.dat file. If it has been more than a month let's do it now.
item = '''SELECT value FROM settings WHERE key='lastvacuumtime';'''
parameters = ''
self.cur.execute(item, parameters)
queryreturn = self.cur.fetchall()
for row in queryreturn:
value, = row
if int(value) < int(time.time()) - 2592000:
print 'It has been a long time since the messages.dat file has been vacuumed. Vacuuming now...'
self.cur.execute( ''' VACUUM ''')
item = '''update settings set value=? WHERE key='lastvacuumtime';'''
parameters = (int(time.time()),)
self.cur.execute(item, parameters)
while True:
item = shared.sqlSubmitQueue.get()
if item == 'commit':
self.conn.commit()
elif item == 'exit':
self.conn.close()
print 'sqlThread exiting gracefully.'
return
elif item == 'movemessagstoprog':
shared.printLock.acquire()
print 'the sqlThread is moving the messages.dat file to the local program directory.'
shared.printLock.release()
self.conn.commit()
self.conn.close()
shutil.move(shared.lookupAppdataFolder()+'messages.dat','messages.dat')
self.conn = sqlite3.connect('messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
elif item == 'movemessagstoappdata':
shared.printLock.acquire()
print 'the sqlThread is moving the messages.dat file to the Appdata folder.'
shared.printLock.release()
self.conn.commit()
self.conn.close()
shutil.move('messages.dat',shared.lookupAppdataFolder()+'messages.dat')
self.conn = sqlite3.connect(shared.appdata + 'messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
else:
parameters = shared.sqlSubmitQueue.get()
#print 'item', item
#print 'parameters', parameters
try:
self.cur.execute(item, parameters)
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('\nMajor error occurred when trying to execute a SQL statement within the sqlThread. Please tell Atheros about this error message or post it in the forum! Error occurred while trying to execute statement: "'+str(item) + '" Here are the parameters; you might want to censor this data with asterisks (***) as it can contain private information: '+str(repr(parameters))+'\nHere is the actual error message thrown by the sqlThread: '+ str(err)+'\n')
sys.stderr.write('This program shall now abruptly exit!\n')
shared.printLock.release()
os._exit(0)
shared.sqlReturnQueue.put(self.cur.fetchall())
#shared.sqlSubmitQueue.task_done()
'''The singleCleaner class is a timer-driven thread that cleans data structures to free memory, resends messages when a remote node doesn't respond, and sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory:
inventory (moves data to the on-disk sql database)
It cleans these tables on the disk:
inventory (clears data more than 2 days and 12 hours old)
pubkeys (clears pubkeys older than 4 weeks old which we have not used personally)
It resends messages when there has been no response:
resends getpubkey messages in 4 days (then 8 days, then 16 days, etc...)
resends msg messages in 4 days (then 8 days, then 16 days, etc...)
'''
class singleCleaner(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
timeWeLastClearedInventoryAndPubkeysTables = 0
while True:
shared.sqlLock.acquire()
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing housekeeping (Flushing inventory in memory to disk...)")
shared.UISignalQueue.put(('updateStatusBar','Doing housekeeping (Flushing inventory in memory to disk...)'))
for hash, storedValue in shared.inventory.items():
objectType, streamNumber, payload, receivedTime = storedValue
if int(time.time())- 3600 > receivedTime:
t = (hash,objectType,streamNumber,payload,receivedTime)
shared.sqlSubmitQueue.put('''INSERT INTO inventory VALUES (?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
del shared.inventory[hash]
shared.sqlSubmitQueue.put('commit')
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.sqlLock.release()
shared.broadcastToSendDataQueues((0, 'pong', 'no data')) #commands the sendData threads to send out a pong message if they haven't sent anything else in the last five minutes. The socket timeout-time is 10 minutes.
#If we are running as a daemon then we are going to fill up the UI queue which will never be handled by a UI. We should clear it to save memory.
if shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
shared.UISignalQueue.queue.clear()
if timeWeLastClearedInventoryAndPubkeysTables < int(time.time()) - 7380:
timeWeLastClearedInventoryAndPubkeysTables = int(time.time())
#inventory (moves data from the inventory data structure to the on-disk sql database)
shared.sqlLock.acquire()
#inventory (clears data more than 2 days and 12 hours old)
t = (int(time.time())-lengthOfTimeToLeaveObjectsInInventory,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys)
shared.sqlSubmitQueue.put('''DELETE FROM inventory WHERE (receivedtime<? AND objecttype<>'pubkey') OR (receivedtime<? AND objecttype='pubkey') ''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
#pubkeys
t = (int(time.time())-lengthOfTimeToHoldOnToAllPubkeys,)
shared.sqlSubmitQueue.put('''DELETE FROM pubkeys WHERE time<? AND usedpersonally='no' ''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
t = ()
shared.sqlSubmitQueue.put('''select toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, status, pubkeyretrynumber, msgretrynumber FROM sent WHERE ((status='findingpubkey' OR status='sentmessage') AND folder='sent') ''') #If the message's folder='trash' then we'll ignore it.
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
for row in queryreturn:
toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, status, pubkeyretrynumber, msgretrynumber = row
if status == 'findingpubkey':
if int(time.time()) - lastactiontime > (maximumAgeOfAnObjectThatIAmWillingToAccept * (2 ** (pubkeyretrynumber))):
print 'It has been a long time and we haven\'t heard a response to our getpubkey request. Sending again.'
try:
del neededPubkeys[toripe] #We need to take this entry out of the neededPubkeys structure because the shared.workerQueue checks to see whether the entry is already present and will not do the POW and send the message because it assumes that it has already done it recently.
except:
pass
shared.workerQueue.put(('sendmessage',toaddress))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing work necessary to again attempt to request a public key...")
shared.UISignalQueue.put(('updateStatusBar','Doing work necessary to again attempt to request a public key...'))
t = (int(time.time()),pubkeyretrynumber+1,toripe)
shared.sqlSubmitQueue.put('''UPDATE sent SET lastactiontime=?, pubkeyretrynumber=? WHERE toripe=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
else:# status == sentmessage
if int(time.time()) - lastactiontime > (maximumAgeOfAnObjectThatIAmWillingToAccept * (2 ** (msgretrynumber))):
print 'It has been a long time and we haven\'t heard an acknowledgement to our msg. Sending again.'
t = (int(time.time()),msgretrynumber+1,'findingpubkey',ackdata)
shared.sqlSubmitQueue.put('''UPDATE sent SET lastactiontime=?, msgretrynumber=?, status=? WHERE ackdata=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.workerQueue.put(('sendmessage',toaddress))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing work necessary to again attempt to deliver a message...")
shared.UISignalQueue.put(('updateStatusBar','Doing work necessary to again attempt to deliver a message...'))
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
time.sleep(300)
#This thread, of which there is only one, does the heavy lifting: calculating POWs.
class singleWorker(threading.Thread):
def __init__(self):
#QThread.__init__(self, parent)
threading.Thread.__init__(self)
def run(self):
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT toripe FROM sent WHERE (status=? AND folder='sent')''')
shared.sqlSubmitQueue.put(('findingpubkey',))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toripe, = row
#It is possible for the status of a message in our sent folder (which is also our 'outbox' folder) to have a status of 'findingpubkey' even if we have the pubkey. This can
#happen if the worker thread is working on the POW for an earlier message and does not get to the message in question before the user closes Bitmessage. In this case, the
#status will still be 'findingpubkey' but Bitmessage will never have checked to see whether it actually already has the pubkey. We should therefore check here.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT hash FROM pubkeys WHERE hash=? ''')
shared.sqlSubmitQueue.put((toripe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #If we have the pubkey then send the message otherwise put the hash in the neededPubkeys data structure so that we will pay attention to it if it comes over the wire.
self.sendMsg(toripe)
else:
neededPubkeys[toripe] = 0
self.sendBroadcast() #just in case there are any proof of work tasks for Broadcasts that have yet to be sent.
#Now let us see if there are any proofs of work for msg messages that we have yet to complete..
shared.sqlLock.acquire()
t = ('doingpow',)
shared.sqlSubmitQueue.put('''SELECT toripe FROM sent WHERE status=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toripe, = row
#Evidentially there is a remote possibility that we may, for some reason, no longer have the recipient's pubkey. Let us make sure we still have it or else the sendMsg function will appear to freeze.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT hash FROM pubkeys WHERE hash=? ''')
shared.sqlSubmitQueue.put((toripe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
#We have the needed pubkey
self.sendMsg(toripe)
else:
shared.printLock.acquire()
sys.stderr.write('For some reason, the status of a message in our outbox is \'doingpow\' even though we lack the pubkey. Here is the RIPE hash of the needed pubkey: %s\n' % toripe.encode('hex'))
shared.printLock.release()
while True:
command, data = shared.workerQueue.get()
#statusbar = 'The singleWorker thread is working on work.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
if command == 'sendmessage':
toAddress = data
toStatus,toAddressVersionNumber,toStreamNumber,toRipe = decodeAddress(toAddress)
#print 'message type', type(message)
#print repr(message.toUtf8())
#print str(message.toUtf8())
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('SELECT hash FROM pubkeys WHERE hash=?')
shared.sqlSubmitQueue.put((toRipe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
#print 'queryreturn', queryreturn
if queryreturn == []:
#We'll need to request the pub key because we don't have it.
if not toRipe in neededPubkeys:
neededPubkeys[toRipe] = 0
print 'requesting pubkey:', toRipe.encode('hex')
self.requestPubKey(toAddressVersionNumber,toStreamNumber,toRipe)
else:
print 'We have already requested this pubkey (the ripe hash is in neededPubkeys). We will re-request again soon.'
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),toRipe,'Public key was requested earlier. Receiver must be offline. Will retry.')
shared.UISignalQueue.put(('updateSentItemStatusByHash',(toRipe,'Public key was requested earlier. Receiver must be offline. Will retry.')))
else:
print 'We already have the necessary public key.'
self.sendMsg(toRipe) #by calling this function, we are asserting that we already have the pubkey for toRipe
elif command == 'sendbroadcast':
print 'Within WorkerThread, processing sendbroadcast command.'
fromAddress,subject,message = data
self.sendBroadcast()
elif command == 'doPOWForMyV2Pubkey':
self.doPOWForMyV2Pubkey(data)
elif command == 'doPOWForMyV3Pubkey':
self.doPOWForMyV3Pubkey(data)
elif command == 'newpubkey':
toAddressVersion,toStreamNumber,toRipe = data
if toRipe in neededPubkeys:
print 'We have been awaiting the arrival of this pubkey.'
del neededPubkeys[toRipe]
self.sendMsg(toRipe)
else:
shared.printLock.acquire()
print 'We don\'t need this pub key. We didn\'t ask for it. Pubkey hash:', toRipe.encode('hex')
shared.printLock.release()
else:
shared.printLock.acquire()
sys.stderr.write('Probable programming error: The command sent to the workerThread is weird. It is: %s\n' % command)
shared.printLock.release()
shared.workerQueue.task_done()
def doPOWForMyV2Pubkey(self,hash): #This function also broadcasts out the pubkey message once it is done with the POW
#Look up my stream number based on my address hash
"""configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile <> 'bitmessagesettings':
status,addressVersionNumber,streamNumber,hashFromThisParticularAddress = decodeAddress(addressInKeysFile)
if hash == hashFromThisParticularAddress:
myAddress = addressInKeysFile
break"""
myAddress = shared.myAddressesByHash[hash]
status,addressVersionNumber,streamNumber,hash = decodeAddress(myAddress)
embeddedTime = int(time.time()+random.randrange(-300, 300)) #the current time plus or minus five minutes
payload = pack('>I',(embeddedTime))
payload += encodeVarint(addressVersionNumber) #Address version number
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #bitfield of features supported by me (see the wiki).
try:
privSigningKeyBase58 = shared.config.get(myAddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(myAddress, 'privencryptionkey')
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('Error within doPOWForMyV2Pubkey. Could not read the keys from the keys.dat file for a requested address. %s\n' % err)
shared.printLock.release()
return
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
#Do the POW for this pubkey message
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For pubkey message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
"""t = (hash,payload,embeddedTime,'no')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()"""
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, embeddedTime)
shared.printLock.acquire()
print 'broadcasting inv with hash:', inventoryHash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.config.set(myAddress,'lastpubkeysendtime',str(int(time.time())))
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
def doPOWForMyV3Pubkey(self,hash): #This function also broadcasts out the pubkey message once it is done with the POW
myAddress = shared.myAddressesByHash[hash]
status,addressVersionNumber,streamNumber,hash = decodeAddress(myAddress)
embeddedTime = int(time.time()+random.randrange(-300, 300)) #the current time plus or minus five minutes
payload = pack('>I',(embeddedTime))
payload += encodeVarint(addressVersionNumber) #Address version number
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #bitfield of features supported by me (see the wiki).
try:
privSigningKeyBase58 = shared.config.get(myAddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(myAddress, 'privencryptionkey')
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('Error within doPOWForMyV3Pubkey. Could not read the keys from the keys.dat file for a requested address. %s\n' % err)
shared.printLock.release()
return
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
payload += encodeVarint(shared.config.getint(myAddress,'noncetrialsperbyte'))
payload += encodeVarint(shared.config.getint(myAddress,'payloadlengthextrabytes'))
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
#Do the POW for this pubkey message
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For pubkey message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
"""t = (hash,payload,embeddedTime,'no')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()"""
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, embeddedTime)
shared.printLock.acquire()
print 'broadcasting inv with hash:', inventoryHash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.config.set(myAddress,'lastpubkeysendtime',str(int(time.time())))
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
def sendBroadcast(self):
shared.sqlLock.acquire()
t = ('broadcastpending',)
shared.sqlSubmitQueue.put('''SELECT fromaddress, subject, message, ackdata FROM sent WHERE status=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
fromaddress, subject, body, ackdata = row
status,addressVersionNumber,streamNumber,ripe = decodeAddress(fromaddress)
if addressVersionNumber == 2 and int(time.time()) < encryptedBroadcastSwitchoverTime:
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex') #At this time these pubkeys are 65 bytes long because they include the encoding byte which we won't be sending in the broadcast message.
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes
payload += encodeVarint(1) #broadcast version
payload += encodeVarint(addressVersionNumber)
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #behavior bitfield
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
payload += ripe
payload += '\x02' #message encoding type
payload += encodeVarint(len('Subject:' + subject + '\n' + 'Body:' + body)) #Type 2 is simple UTF-8 message encoding.
payload += 'Subject:' + subject + '\n' + 'Body:' + body
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For broadcast message) Doing proof of work...'
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send broadcast...')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send broadcast...')))
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For broadcast message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'broadcast'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (within sendBroadcast function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
#Update the status of the message in the 'sent' table to have a 'broadcastsent' status
shared.sqlLock.acquire()
t = ('broadcastsent',int(time.time()),fromaddress, subject, body,'broadcastpending')
shared.sqlSubmitQueue.put('UPDATE sent SET status=?, lastactiontime=? WHERE fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
elif addressVersionNumber == 3 or int(time.time()) > encryptedBroadcastSwitchoverTime:
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex') #At this time these pubkeys are 65 bytes long because they include the encoding byte which we won't be sending in the broadcast message.
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes
payload += encodeVarint(2) #broadcast version
payload += encodeVarint(streamNumber)
dataToEncrypt = encodeVarint(2) #broadcast version
dataToEncrypt += encodeVarint(addressVersionNumber)
dataToEncrypt += encodeVarint(streamNumber)
dataToEncrypt += '\x00\x00\x00\x01' #behavior bitfield
dataToEncrypt += pubSigningKey[1:]
dataToEncrypt += pubEncryptionKey[1:]
if addressVersionNumber >= 3:
dataToEncrypt += encodeVarint(shared.config.getint(fromaddress,'noncetrialsperbyte'))
dataToEncrypt += encodeVarint(shared.config.getint(fromaddress,'payloadlengthextrabytes'))
dataToEncrypt += '\x02' #message encoding type
dataToEncrypt += encodeVarint(len('Subject:' + subject + '\n' + 'Body:' + body)) #Type 2 is simple UTF-8 message encoding.
dataToEncrypt += 'Subject:' + subject + '\n' + 'Body:' + body
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
dataToEncrypt += encodeVarint(len(signature))
dataToEncrypt += signature
privEncryptionKey = hashlib.sha512(encodeVarint(addressVersionNumber)+encodeVarint(streamNumber)+ripe).digest()[:32]
pubEncryptionKey = pointMult(privEncryptionKey)
payload += highlevelcrypto.encrypt(dataToEncrypt,pubEncryptionKey.encode('hex'))
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For broadcast message) Doing proof of work...'
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send broadcast...')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send broadcast...')))
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For broadcast message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'broadcast'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (within sendBroadcast function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
#Update the status of the message in the 'sent' table to have a 'broadcastsent' status
shared.sqlLock.acquire()
t = ('broadcastsent',int(time.time()),fromaddress, subject, body,'broadcastpending')
shared.sqlSubmitQueue.put('UPDATE sent SET status=?, lastactiontime=? WHERE fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
else:
shared.printLock.acquire()
sys.stderr.write('Error: In the singleWorker thread, the sendBroadcast function doesn\'t understand the address version.\n')
shared.printLock.release()
def sendMsg(self,toRipe):
shared.sqlLock.acquire()
t = ('doingpow','findingpubkey',toRipe)
shared.sqlSubmitQueue.put('''UPDATE sent SET status=? WHERE status=? AND toripe=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
t = ('doingpow',toRipe)
shared.sqlSubmitQueue.put('''SELECT toaddress, fromaddress, subject, message, ackdata FROM sent WHERE status=? AND toripe=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toaddress, fromaddress, subject, message, ackdata = row
ackdataForWhichImWatching[ackdata] = 0
toStatus,toAddressVersionNumber,toStreamNumber,toHash = decodeAddress(toaddress)
fromStatus,fromAddressVersionNumber,fromStreamNumber,fromHash = decodeAddress(fromaddress)
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send the message.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send the message.')))
shared.printLock.acquire()
print 'Found a message in our database that needs to be sent with this pubkey.'
print 'First 150 characters of message:', message[:150]
shared.printLock.release()
embeddedTime = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes. We will use this time both for our message and for the ackdata packed within our message.
if fromAddressVersionNumber == 2:
payload = '\x01' #Message version.
payload += encodeVarint(fromAddressVersionNumber)
payload += encodeVarint(fromStreamNumber)
payload += '\x00\x00\x00\x01' #Bitfield of features and behaviors that can be expected from me. (See https://bitmessage.org/wiki/Protocol_specification#Pubkey_bitfield_features )
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:] #The \x04 on the beginning of the public keys are not sent. This way there is only one acceptable way to encode and send a public key.
payload += pubEncryptionKey[1:]
payload += toHash #This hash will be checked by the receiver of the message to verify that toHash belongs to them. This prevents a Surreptitious Forwarding Attack.
payload += '\x02' #Type 2 is simple UTF-8 message encoding as specified on the Protocol Specification on the Bitmessage Wiki.
messageToTransmit = 'Subject:' + subject + '\n' + 'Body:' + message
payload += encodeVarint(len(messageToTransmit))
payload += messageToTransmit
fullAckPayload = self.generateFullAckMessage(ackdata,toStreamNumber,embeddedTime)#The fullAckPayload is a normal msg protocol message with the proof of work already completed that the receiver of this message can easily send out.
payload += encodeVarint(len(fullAckPayload))
payload += fullAckPayload
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
if fromAddressVersionNumber == 3:
payload = '\x01' #Message version.
payload += encodeVarint(fromAddressVersionNumber)
payload += encodeVarint(fromStreamNumber)
payload += '\x00\x00\x00\x01' #Bitfield of features and behaviors that can be expected from me. (See https://bitmessage.org/wiki/Protocol_specification#Pubkey_bitfield_features )
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:] #The \x04 on the beginning of the public keys are not sent. This way there is only one acceptable way to encode and send a public key.
payload += pubEncryptionKey[1:]
#If the receiver of our message is in our address book, subscriptions list, or whitelist then we will allow them to do the network-minimum proof of work. Let us check to see if the receiver is in any of those lists.
if shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(toaddress):
payload += encodeVarint(shared.networkDefaultProofOfWorkNonceTrialsPerByte)
payload += encodeVarint(shared.networkDefaultPayloadLengthExtraBytes)
else:
payload += encodeVarint(shared.config.getint(fromaddress,'noncetrialsperbyte'))
payload += encodeVarint(shared.config.getint(fromaddress,'payloadlengthextrabytes'))
payload += toHash #This hash will be checked by the receiver of the message to verify that toHash belongs to them. This prevents a Surreptitious Forwarding Attack.
payload += '\x02' #Type 2 is simple UTF-8 message encoding as specified on the Protocol Specification on the Bitmessage Wiki.
messageToTransmit = 'Subject:' + subject + '\n' + 'Body:' + message
payload += encodeVarint(len(messageToTransmit))
payload += messageToTransmit
fullAckPayload = self.generateFullAckMessage(ackdata,toStreamNumber,embeddedTime)#The fullAckPayload is a normal msg protocol message with the proof of work already completed that the receiver of this message can easily send out.
payload += encodeVarint(len(fullAckPayload))
payload += fullAckPayload
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
#We have assembled the data that will be encrypted. Now let us fetch the recipient's public key out of our database and do the encryption.
if toAddressVersionNumber == 2 or toAddressVersionNumber == 3:
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('SELECT transmitdata FROM pubkeys WHERE hash=?')
shared.sqlSubmitQueue.put((toRipe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
shared.printLock.acquire()
sys.stderr.write('(within sendMsg) The needed pubkey was not found. This should never happen. Aborting send.\n')
shared.printLock.release()
return
for row in queryreturn:
pubkeyPayload, = row
#The pubkey is stored the way we originally received it which means that we need to read beyond things like the nonce and time to get to the public keys.
readPosition = 8 #to bypass the nonce
readPosition += 4 #to bypass the embedded time
readPosition += 1 #to bypass the address version whose length is definitely 1
streamNumber, streamNumberLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += streamNumberLength
behaviorBitfield = pubkeyPayload[readPosition:readPosition+4]
readPosition += 4 #to bypass the bitfield of behaviors
#pubSigningKeyBase256 = pubkeyPayload[readPosition:readPosition+64] #We don't use this key for anything here.
readPosition += 64
pubEncryptionKeyBase256 = pubkeyPayload[readPosition:readPosition+64]
readPosition += 64
if toAddressVersionNumber == 2:
requiredAverageProofOfWorkNonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
elif toAddressVersionNumber == 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += varintLength
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += varintLength
if requiredAverageProofOfWorkNonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte: #We still have to meet a minimum POW difficulty regardless of what they say is allowed in order to get our message to propagate through the network.
requiredAverageProofOfWorkNonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if requiredPayloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
requiredPayloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
encrypted = highlevelcrypto.encrypt(payload,"04"+pubEncryptionKeyBase256.encode('hex'))
nonce = 0
trialValue = 99999999999999999999
#We are now dropping the unencrypted data in payload since it has already been encrypted and replacing it with the encrypted payload that we will send out.
payload = embeddedTime + encodeVarint(toStreamNumber) + encrypted
target = 2**64 / ((len(payload)+requiredPayloadLengthExtraBytes+8) * requiredAverageProofOfWorkNonceTrialsPerByte)
shared.printLock.acquire()
print '(For msg message) Doing proof of work. Total required difficulty:', float(requiredAverageProofOfWorkNonceTrialsPerByte)/shared.networkDefaultProofOfWorkNonceTrialsPerByte,'Required small message difficulty:', float(requiredPayloadLengthExtraBytes)/shared.networkDefaultPayloadLengthExtraBytes
shared.printLock.release()
powStartTime = time.time()
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For msg message) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time()-powStartTime), 'seconds.', nonce/(time.time()-powStartTime), 'nonce trials per second.'
except:
pass
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'msg'
shared.inventory[inventoryHash] = (objectType, toStreamNumber, payload, int(time.time()))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Message sent. Waiting on acknowledgement. Sent on ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Message sent. Waiting on acknowledgement. Sent on ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
print 'sending inv (within sendmsg function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#Update the status of the message in the 'sent' table to have a 'sent' status
shared.sqlLock.acquire()
t = ('sentmessage',toaddress, fromaddress, subject, message,'doingpow')
shared.sqlSubmitQueue.put('UPDATE sent SET status=? WHERE toaddress=? AND fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
t = (toRipe,)
shared.sqlSubmitQueue.put('''UPDATE pubkeys SET usedpersonally='yes' WHERE hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
def requestPubKey(self,addressVersionNumber,streamNumber,ripe):
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes.
payload += encodeVarint(addressVersionNumber)
payload += encodeVarint(streamNumber)
payload += ripe
shared.printLock.acquire()
print 'making request for pubkey with ripe:', ripe.encode('hex')
shared.printLock.release()
nonce = 0
trialValue = 99999999999999999999
#print 'trial value', trialValue
statusbar = 'Doing the computations necessary to request the recipient\'s public key.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
shared.UISignalQueue.put(('updateStatusBar',statusbar))
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),ripe,'Doing work necessary to request public key.')
shared.UISignalQueue.put(('updateSentItemStatusByHash',(ripe,'Doing work necessary to request public key.')))
print 'Doing proof-of-work necessary to send getpubkey message.'
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
shared.printLock.acquire()
print 'Found proof of work', trialValue, 'Nonce:', nonce
shared.printLock.release()
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'getpubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (for the getpubkey message)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Broacasting the public key request. This program will auto-retry if they are offline.')
shared.UISignalQueue.put(('updateStatusBar','Broacasting the public key request. This program will auto-retry if they are offline.'))
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),ripe,'Sending public key request. Waiting for reply. Requested at ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByHash',(ripe,'Sending public key request. Waiting for reply. Requested at ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
def generateFullAckMessage(self,ackdata,toStreamNumber,embeddedTime):
nonce = 0
trialValue = 99999999999999999999
payload = embeddedTime + encodeVarint(toStreamNumber) + ackdata
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
shared.printLock.acquire()
print '(For ack message) Doing proof of work...'
shared.printLock.release()
powStartTime = time.time()
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
shared.printLock.acquire()
print '(For ack message) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time()-powStartTime), 'seconds.', nonce/(time.time()-powStartTime), 'nonce trials per second.'
except:
pass
shared.printLock.release()
payload = pack('>Q',nonce) + payload
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
return headerData + payload
class addressGenerator(threading.Thread):
def __init__(self):
#QThread.__init__(self, parent)
threading.Thread.__init__(self)
def run(self):
while True:
queueValue = shared.addressGeneratorQueue.get()
nonceTrialsPerByte = 0
payloadLengthExtraBytes = 0
if len(queueValue) == 6:
addressVersionNumber,streamNumber,label,numberOfAddressesToMake,deterministicPassphrase,eighteenByteRipe = queueValue
elif len(queueValue) == 8:
addressVersionNumber,streamNumber,label,numberOfAddressesToMake,deterministicPassphrase,eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes = queueValue
else:
sys.stderr.write('Programming error: A structure with the wrong number of values was passed into the addressGeneratorQueue. Here is the queueValue: %s\n' % queueValue)
if addressVersionNumber < 3 or addressVersionNumber > 3:
sys.stderr.write('Program error: For some reason the address generator queue has been given a request to create at least one version %s address which it cannot do.\n' % addressVersionNumber)
if nonceTrialsPerByte == 0:
nonceTrialsPerByte = shared.config.getint('bitmessagesettings','defaultnoncetrialsperbyte')
if nonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte:
nonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if payloadLengthExtraBytes == 0:
payloadLengthExtraBytes = shared.config.getint('bitmessagesettings','defaultpayloadlengthextrabytes')
if payloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
if addressVersionNumber == 3: #currently the only one supported.
if deterministicPassphrase == "":
shared.UISignalQueue.put(('updateStatusBar','Generating one new address'))
#This next section is a little bit strange. We're going to generate keys over and over until we
#find one that starts with either \x00 or \x00\x00. Then when we pack them into a Bitmessage address,
#we won't store the \x00 or \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
potentialPrivSigningKey = OpenSSL.rand(32)
potentialPubSigningKey = pointMult(potentialPrivSigningKey)
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivEncryptionKey = OpenSSL.rand(32)
potentialPubEncryptionKey = pointMult(potentialPrivEncryptionKey)
#print 'potentialPubSigningKey', potentialPubSigningKey.encode('hex')
#print 'potentialPubEncryptionKey', potentialPubEncryptionKey.encode('hex')
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(potentialPubSigningKey+potentialPubEncryptionKey)
ripe.update(sha.digest())
#print 'potential ripe.digest', ripe.digest().encode('hex')
if eighteenByteRipe:
if ripe.digest()[:2] == '\x00\x00':
break
else:
if ripe.digest()[:1] == '\x00':
break
print 'Generated address with ripe digest:', ripe.digest().encode('hex')
print 'Address generator calculated', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, 'addresses at', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix/(time.time()-startTime),'addresses per second before finding one with the correct ripe-prefix.'
address = encodeAddress(3,streamNumber,ripe.digest())
#An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
#https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80'+potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(privSigningKey + checksum,256,58)
#print 'privSigningKeyWIF',privSigningKeyWIF
privEncryptionKey = '\x80'+potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(privEncryptionKey + checksum,256,58)
#print 'privEncryptionKeyWIF',privEncryptionKeyWIF
shared.config.add_section(address)
shared.config.set(address,'label',label)
shared.config.set(address,'enabled','true')
shared.config.set(address,'decoy','false')
shared.config.set(address,'noncetrialsperbyte',str(nonceTrialsPerByte))
shared.config.set(address,'payloadlengthextrabytes',str(payloadLengthExtraBytes))
shared.config.set(address,'privSigningKey',privSigningKeyWIF)
shared.config.set(address,'privEncryptionKey',privEncryptionKeyWIF)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#It may be the case that this address is being generated as a result of a call to the API. Let us put the result in the necessary queue.
apiAddressGeneratorReturnQueue.put(address)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Done generating address. Doing work necessary to broadcast it...')
shared.UISignalQueue.put(('updateStatusBar','Done generating address. Doing work necessary to broadcast it...'))
#self.emit(SIGNAL("writeNewAddressToTable(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.label,address,str(streamNumber))
shared.UISignalQueue.put(('writeNewAddressToTable',(label,address,streamNumber)))
shared.reloadMyAddressHashes()
shared.workerQueue.put(('doPOWForMyV3Pubkey',ripe.digest()))
else: #There is something in the deterministicPassphrase variable thus we are going to do this deterministically.
statusbar = 'Generating '+str(numberOfAddressesToMake) + ' new addresses.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
shared.UISignalQueue.put(('updateStatusBar',statusbar))
signingKeyNonce = 0
encryptionKeyNonce = 1
listOfNewAddressesToSendOutThroughTheAPI = [] #We fill out this list no matter what although we only need it if we end up passing the info to the API.
for i in range(numberOfAddressesToMake):
#This next section is a little bit strange. We're going to generate keys over and over until we
#find one that has a RIPEMD hash that starts with either \x00 or \x00\x00. Then when we pack them
#into a Bitmessage address, we won't store the \x00 or \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivSigningKey = hashlib.sha512(deterministicPassphrase + encodeVarint(signingKeyNonce)).digest()[:32]
potentialPrivEncryptionKey = hashlib.sha512(deterministicPassphrase + encodeVarint(encryptionKeyNonce)).digest()[:32]
potentialPubSigningKey = pointMult(potentialPrivSigningKey)
potentialPubEncryptionKey = pointMult(potentialPrivEncryptionKey)
#print 'potentialPubSigningKey', potentialPubSigningKey.encode('hex')
#print 'potentialPubEncryptionKey', potentialPubEncryptionKey.encode('hex')
signingKeyNonce += 2
encryptionKeyNonce += 2
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(potentialPubSigningKey+potentialPubEncryptionKey)
ripe.update(sha.digest())
#print 'potential ripe.digest', ripe.digest().encode('hex')
if eighteenByteRipe:
if ripe.digest()[:2] == '\x00\x00':
break
else:
if ripe.digest()[:1] == '\x00':
break
print 'ripe.digest', ripe.digest().encode('hex')
print 'Address generator calculated', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, 'addresses at', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix/(time.time()-startTime),'keys per second.'
address = encodeAddress(3,streamNumber,ripe.digest())
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Finished generating address. Writing to keys.dat')
#An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
#https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80'+potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(privSigningKey + checksum,256,58)
privEncryptionKey = '\x80'+potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(privEncryptionKey + checksum,256,58)
try:
shared.config.add_section(address)
print 'label:', label
shared.config.set(address,'label',label)
shared.config.set(address,'enabled','true')
shared.config.set(address,'decoy','false')
shared.config.set(address,'noncetrialsperbyte',str(nonceTrialsPerByte))
shared.config.set(address,'payloadlengthextrabytes',str(payloadLengthExtraBytes))
shared.config.set(address,'privSigningKey',privSigningKeyWIF)
shared.config.set(address,'privEncryptionKey',privEncryptionKeyWIF)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#self.emit(SIGNAL("writeNewAddressToTable(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.label,address,str(self.streamNumber))
shared.UISignalQueue.put(('writeNewAddressToTable',(label,address,str(streamNumber))))
listOfNewAddressesToSendOutThroughTheAPI.append(address)
if eighteenByteRipe:
shared.reloadMyAddressHashes()#This is necessary here (rather than just at the end) because otherwise if the human generates a large number of new addresses and uses one before they are done generating, the program will receive a getpubkey message and will ignore it.
except:
print address,'already exists. Not adding it again.'
#It may be the case that this address is being generated as a result of a call to the API. Let us put the result in the necessary queue.
apiAddressGeneratorReturnQueue.put(listOfNewAddressesToSendOutThroughTheAPI)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Done generating address')
shared.UISignalQueue.put(('updateStatusBar','Done generating address'))
shared.reloadMyAddressHashes()
#This is one of several classes that constitute the API
#This class was written by Vaibhav Bhatia. Modified by Jonathan Warren (Atheros).
#http://code.activestate.com/recipes/501148-xmlrpc-serverclient-which-does-cookie-handling-and/
class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
def do_POST(self):
#Handles the HTTP POST request.
#Attempts to interpret all HTTP POST requests as XML-RPC calls,
#which are forwarded to the server's _dispatch method for handling.
#Note: this method is the same as in SimpleXMLRPCRequestHandler,
#just hacked to handle cookies
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10*1024*1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None)
)
except: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
self.end_headers()
else:
# got a valid XML RPC response
self.send_response(200)
self.send_header("Content-type", "text/xml")
self.send_header("Content-length", str(len(response)))
# HACK :start -> sends cookies here
if self.cookies:
for cookie in self.cookies:
self.send_header('Set-Cookie',cookie.output(header=''))
# HACK :end
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def APIAuthenticateClient(self):
if self.headers.has_key('Authorization'):
# handle Basic authentication
(enctype, encstr) = self.headers.get('Authorization').split()
(emailid, password) = encstr.decode('base64').split(':')
if emailid == shared.config.get('bitmessagesettings', 'apiusername') and password == shared.config.get('bitmessagesettings', 'apipassword'):
return True
else:
return False
else:
print 'Authentication failed because header lacks Authentication field'
time.sleep(2)
return False
return False
def _dispatch(self, method, params):
self.cookies = []
validuser = self.APIAuthenticateClient()
if not validuser:
time.sleep(2)
return "RPC Username or password incorrect or HTTP header lacks authentication at all."
# handle request
if method == 'helloWorld':
(a,b) = params
return a+'-'+b
elif method == 'add':
(a,b) = params
return a+b
elif method == 'statusBar':
message, = params
shared.UISignalQueue.put(('updateStatusBar',message))
elif method == 'listAddresses':
data = '{"addresses":['
configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile <> 'bitmessagesettings':
status,addressVersionNumber,streamNumber,hash = decodeAddress(addressInKeysFile)
data
if len(data) > 20:
data += ','
data += json.dumps({'label':shared.config.get(addressInKeysFile,'label'),'address':addressInKeysFile,'stream':streamNumber,'enabled':shared.config.getboolean(addressInKeysFile,'enabled')},indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'createRandomAddress':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 1:
label, = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 2:
label, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 3:
label, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 4:
label, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
return 'API Error 0000: Too many parameters!'
label = label.decode('base64')
apiAddressGeneratorReturnQueue.queue.clear()
streamNumberForAddress = 1
shared.addressGeneratorQueue.put((3,streamNumberForAddress,label,1,"",eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes))
return apiAddressGeneratorReturnQueue.get()
elif method == 'createDeterministicAddresses':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 1:
passphrase, = params
numberOfAddresses = 1
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 2:
passphrase, numberOfAddresses = params
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 3:
passphrase, numberOfAddresses, addressVersionNumber = params
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 4:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 5:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 6:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 7:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
return 'API Error 0000: Too many parameters!'
if len(passphrase) == 0:
return 'API Error 0001: The specified passphrase is blank.'
passphrase = passphrase.decode('base64')
if addressVersionNumber == 0: #0 means "just use the proper addressVersionNumber"
addressVersionNumber = 3
if addressVersionNumber != 3:
return 'API Error 0002: The address version number currently must be 3 (or 0 which means auto-select).', addressVersionNumber,' isn\'t supported.'
if streamNumber == 0: #0 means "just use the most available stream"
streamNumber = 1
if streamNumber != 1:
return 'API Error 0003: The stream number must be 1 (or 0 which means auto-select). Others aren\'t supported.'
if numberOfAddresses == 0:
return 'API Error 0004: Why would you ask me to generate 0 addresses for you?'
if numberOfAddresses > 999:
return 'API Error 0005: You have (accidentally?) specified too many addresses to make. Maximum 999. This check only exists to prevent mischief; if you really want to create more addresses than this, contact the Bitmessage developers and we can modify the check or you can do it yourself by searching the source code for this message.'
apiAddressGeneratorReturnQueue.queue.clear()
print 'Requesting that the addressGenerator create', numberOfAddresses, 'addresses.'
shared.addressGeneratorQueue.put((addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes))
data = '{"addresses":['
queueReturn = apiAddressGeneratorReturnQueue.get()
for item in queueReturn:
if len(data) > 20:
data += ','
data += "\""+item+ "\""
data += ']}'
return data
elif method == 'getAllInboxMessages':
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT msgid, toaddress, fromaddress, subject, received, message FROM inbox where folder='inbox' ORDER BY received''')
shared.sqlSubmitQueue.put('')
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, = row
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'),'toAddress':toAddress,'fromAddress':fromAddress,'subject':subject.encode('base64'),'message':message.encode('base64'),'encodingType':2,'receivedTime':received},indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'trashMessage':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
msgid = params[0].decode('hex')
t = (msgid,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''UPDATE inbox SET folder='trash' WHERE msgid=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.UISignalQueue.put(('updateStatusBar','Per API: Trashed message (assuming message existed). UI not updated.'))
return 'Trashed message (assuming message existed). UI not updated. To double check, run getAllInboxMessages to see that the message disappeared, or restart Bitmessage and look in the normal Bitmessage GUI.'
elif method == 'sendMessage':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 4:
toAddress, fromAddress, subject, message = params
encodingType = 2
elif len(params) == 5:
toAddress, fromAddress, subject, message, encodingType = params
if encodingType != 2:
return 'API Error 0006: The encoding type must be 2 because that is the only one this program currently supports.'
subject = subject.decode('base64')
message = message.decode('base64')
status,addressVersionNumber,streamNumber,toRipe = decodeAddress(toAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', toAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + toAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ toAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + toAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: The address version number currently must be 2 or 3. Others aren\'t supported. Check the toAddress.'
if streamNumber != 1:
return 'API Error 0012: The stream number must be 1. Others aren\'t supported. Check the toAddress.'
status,addressVersionNumber,streamNumber,fromRipe = decodeAddress(fromAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', fromAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + fromAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ fromAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + fromAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: The address version number currently must be 2 or 3. Others aren\'t supported. Check the fromAddress.'
if streamNumber != 1:
return 'API Error 0012: The stream number must be 1. Others aren\'t supported. Check the fromAddress.'
toAddress = addBMIfNotPresent(toAddress)
fromAddress = addBMIfNotPresent(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(fromAddress,'enabled')
except:
return 'API Error 0013: Could not find your fromAddress in the keys.dat file.'
if not fromAddressEnabled:
return 'API Error 0014: Your fromAddress is disabled. Cannot send.'
ackdata = OpenSSL.rand(32)
shared.sqlLock.acquire()
t = ('',toAddress,toRipe,fromAddress,subject,message,ackdata,int(time.time()),'findingpubkey',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
toLabel = ''
t = (toAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select label from addressbook where address=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn <> []:
for row in queryreturn:
toLabel, = row
#apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendmessage',toAddress))
return ackdata.encode('hex')
elif method == 'sendBroadcast':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
if len(params) == 3:
fromAddress, subject, message = params
encodingType = 2
elif len(params) == 4:
fromAddress, subject, message, encodingType = params
if encodingType != 2:
return 'API Error 0006: The encoding type must be 2 because that is the only one this program currently supports.'
subject = subject.decode('base64')
message = message.decode('base64')
status,addressVersionNumber,streamNumber,fromRipe = decodeAddress(fromAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', fromAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + fromAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ fromAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + fromAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: the address version number currently must be 2 or 3. Others aren\'t supported. Check the fromAddress.'
if streamNumber != 1:
return 'API Error 0012: the stream number must be 1. Others aren\'t supported. Check the fromAddress.'
fromAddress = addBMIfNotPresent(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(fromAddress,'enabled')
except:
return 'API Error 0013: could not find your fromAddress in the keys.dat file.'
ackdata = OpenSSL.rand(32)
toAddress = '[Broadcast subscribers]'
ripe = ''
shared.sqlLock.acquire()
t = ('',toAddress,ripe,fromAddress,subject,message,ackdata,int(time.time()),'broadcastpending',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
toLabel = '[Broadcast subscribers]'
#apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
#self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,toLabel,fromAddress,subject,message,ackdata)
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendbroadcast',(fromAddress,subject,message)))
return ackdata.encode('hex')
elif method == 'getStatus':
if len(params) != 1:
return 'API Error 0000: I need one parameter!'
ackdata, = params
if len(ackdata) != 64:
return 'API Error 0015: The length of ackData should be 32 bytes (encoded in hex thus 64 characters).'
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT status FROM sent where ackdata=?''')
shared.sqlSubmitQueue.put((ackdata.decode('hex'),))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
return 'notFound'
for row in queryreturn:
status, = row
if status == 'findingpubkey':
return 'findingPubkey'
if status == 'doingpow':
return 'doingPow'
if status == 'sentmessage':
return 'sentMessage'
if status == 'ackreceived':
return 'ackReceived'
else:
return 'otherStatus: '+status
else:
return 'Invalid Method: %s'%method
#This thread, of which there is only one, runs the API.
class singleAPI(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
se = SimpleXMLRPCServer((shared.config.get('bitmessagesettings', 'apiinterface'),shared.config.getint('bitmessagesettings', 'apiport')), MySimpleXMLRPCRequestHandler, True, True)
se.register_introspection_functions()
se.serve_forever()
#The MySimpleXMLRPCRequestHandler class cannot emit signals (or at least I don't know how) because it is not a QT thread. It therefore puts data in a queue which this thread monitors and emits the signals on its behalf.
"""class singleAPISignalHandler(QThread):
def __init__(self, parent = None):
QThread.__init__(self, parent)
def run(self):
while True:
command, data = apiSignalQueue.get()
if command == 'updateStatusBar':
self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),data)
elif command == 'createRandomAddress':
label, eighteenByteRipe = data
streamNumberForAddress = 1
#self.addressGenerator = addressGenerator()
#self.addressGenerator.setup(3,streamNumberForAddress,label,1,"",eighteenByteRipe)
#self.emit(SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"),self.addressGenerator)
#self.addressGenerator.start()
shared.addressGeneratorQueue.put((3,streamNumberForAddress,label,1,"",eighteenByteRipe))
elif command == 'createDeterministicAddresses':
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = data
#self.addressGenerator = addressGenerator()
#self.addressGenerator.setup(addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe)
#self.emit(SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"),self.addressGenerator)
#self.addressGenerator.start()
shared.addressGeneratorQueue.put((addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe))
elif command == 'displayNewSentMessage':
toAddress,toLabel,fromAddress,subject,message,ackdata = data
self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,toLabel,fromAddress,subject,message,ackdata)"""
selfInitiatedConnections = {} #This is a list of current connections (the thread pointers at least)
alreadyAttemptedConnectionsList = {} #This is a list of nodes to which we have already attempted a connection
ackdataForWhichImWatching = {}
alreadyAttemptedConnectionsListLock = threading.Lock()
eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack('>Q',random.randrange(1, 18446744073709551615))
neededPubkeys = {}
successfullyDecryptMessageTimings = [] #A list of the amounts of time it took to successfully decrypt msg messages
apiAddressGeneratorReturnQueue = Queue.Queue() #The address generator thread uses this queue to get information back to the API thread.
alreadyAttemptedConnectionsListResetTime = int(time.time()) #used to clear out the alreadyAttemptedConnectionsList periodically so that we will retry connecting to hosts to which we have already tried to connect.
if useVeryEasyProofOfWorkForTesting:
shared.networkDefaultProofOfWorkNonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte / 16)
shared.networkDefaultPayloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes / 7000)
if __name__ == "__main__":
# is the application already running? If yes then exit.
thisapp = singleton.singleinstance()
signal.signal(signal.SIGINT, signal_handler)
#signal.signal(signal.SIGINT, signal.SIG_DFL)
# Check the Major version, the first element in the array
if sqlite3.sqlite_version_info[0] < 3:
print 'This program requires sqlite version 3 or higher because 2 and lower cannot store NULL values. I see version:', sqlite3.sqlite_version_info
os._exit(0)
#First try to load the config file (the keys.dat file) from the program directory
shared.config = ConfigParser.SafeConfigParser()
shared.config.read('keys.dat')
try:
shared.config.get('bitmessagesettings', 'settingsversion')
print 'Loading config files from same directory as program'
shared.appdata = ''
except:
#Could not load the keys.dat file in the program directory. Perhaps it is in the appdata directory.
shared.appdata = shared.lookupAppdataFolder()
shared.config = ConfigParser.SafeConfigParser()
shared.config.read(shared.appdata + 'keys.dat')
try:
shared.config.get('bitmessagesettings', 'settingsversion')
print 'Loading existing config files from', shared.appdata
except:
#This appears to be the first time running the program; there is no config file (or it cannot be accessed). Create config file.
shared.config.add_section('bitmessagesettings')
shared.config.set('bitmessagesettings','settingsversion','5')
shared.config.set('bitmessagesettings','port','8444')
shared.config.set('bitmessagesettings','timeformat','%%a, %%d %%b %%Y %%I:%%M %%p')
shared.config.set('bitmessagesettings','blackwhitelist','black')
shared.config.set('bitmessagesettings','startonlogon','false')
if 'linux' in sys.platform:
shared.config.set('bitmessagesettings','minimizetotray','false')#This isn't implimented yet and when True on Ubuntu causes Bitmessage to disappear while running when minimized.
else:
shared.config.set('bitmessagesettings','minimizetotray','true')
shared.config.set('bitmessagesettings','showtraynotifications','true')
shared.config.set('bitmessagesettings','startintray','false')
shared.config.set('bitmessagesettings','socksproxytype','none')
shared.config.set('bitmessagesettings','sockshostname','localhost')
shared.config.set('bitmessagesettings','socksport','9050')
shared.config.set('bitmessagesettings','socksauthentication','false')
shared.config.set('bitmessagesettings','socksusername','')
shared.config.set('bitmessagesettings','sockspassword','')
shared.config.set('bitmessagesettings','keysencrypted','false')
shared.config.set('bitmessagesettings','messagesencrypted','false')
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte',str(shared.networkDefaultProofOfWorkNonceTrialsPerByte))
shared.config.set('bitmessagesettings','defaultpayloadlengthextrabytes',str(shared.networkDefaultPayloadLengthExtraBytes))
shared.config.set('bitmessagesettings','minimizeonclose','true')
if storeConfigFilesInSameDirectoryAsProgramByDefault:
#Just use the same directory as the program and forget about the appdata folder
shared.appdata = ''
print 'Creating new config files in same directory as program.'
else:
print 'Creating new config files in', shared.appdata
if not os.path.exists(shared.appdata):
os.makedirs(shared.appdata)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
if shared.config.getint('bitmessagesettings','settingsversion') == 1:
shared.config.set('bitmessagesettings','settingsversion','4') #If the settings version is equal to 2 or 3 then the sqlThread will modify the pubkeys table and change the settings version to 4.
shared.config.set('bitmessagesettings','socksproxytype','none')
shared.config.set('bitmessagesettings','sockshostname','localhost')
shared.config.set('bitmessagesettings','socksport','9050')
shared.config.set('bitmessagesettings','socksauthentication','false')
shared.config.set('bitmessagesettings','socksusername','')
shared.config.set('bitmessagesettings','sockspassword','')
shared.config.set('bitmessagesettings','keysencrypted','false')
shared.config.set('bitmessagesettings','messagesencrypted','false')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
try:
#We shouldn't have to use the shared.knownNodesLock because this had better be the only thread accessing knownNodes right now.
pickleFile = open(shared.appdata + 'knownnodes.dat', 'rb')
shared.knownNodes = pickle.load(pickleFile)
pickleFile.close()
except:
createDefaultKnownNodes(shared.appdata)
pickleFile = open(shared.appdata + 'knownnodes.dat', 'rb')
shared.knownNodes = pickle.load(pickleFile)
pickleFile.close()
if shared.config.getint('bitmessagesettings', 'settingsversion') > 5:
print 'Bitmessage cannot read future versions of the keys file (keys.dat). Run the newer version of Bitmessage.'
raise SystemExit
#DNS bootstrap. This could be programmed to use the SOCKS proxy to do the DNS lookup some day but for now we will just rely on the entries in defaultKnownNodes.py. Hopefully either they are up to date or the user has run Bitmessage recently without SOCKS turned on and received good bootstrap nodes using that method.
if shared.config.get('bitmessagesettings', 'socksproxytype') == 'none':
try:
for item in socket.getaddrinfo('bootstrap8080.bitmessage.org',80):
print 'Adding', item[4][0],'to knownNodes based on DNS boostrap method'
shared.knownNodes[1][item[4][0]] = (8080,int(time.time()))
except:
print 'bootstrap8080.bitmessage.org DNS bootstraping failed.'
try:
for item in socket.getaddrinfo('bootstrap8444.bitmessage.org',80):
print 'Adding', item[4][0],'to knownNodes based on DNS boostrap method'
shared.knownNodes[1][item[4][0]] = (8444,int(time.time()))
except:
print 'bootstrap8444.bitmessage.org DNS bootstrapping failed.'
else:
print 'DNS bootstrap skipped because SOCKS is used.'
#Start the address generation thread
addressGeneratorThread = addressGenerator()
addressGeneratorThread.daemon = True # close the main program even if there are threads left
addressGeneratorThread.start()
#Start the thread that calculates POWs
singleWorkerThread = singleWorker()
singleWorkerThread.daemon = True # close the main program even if there are threads left
singleWorkerThread.start()
#Start the SQL thread
sqlLookup = sqlThread()
sqlLookup.daemon = False # DON'T close the main program even if there are threads left. The closeEvent should command this thread to exit gracefully.
sqlLookup.start()
#Start the cleanerThread
singleCleanerThread = singleCleaner()
singleCleanerThread.daemon = True # close the main program even if there are threads left
singleCleanerThread.start()
shared.reloadMyAddressHashes()
shared.reloadBroadcastSendersForWhichImWatching()
#Initialize the ackdataForWhichImWatching data structure using data from the sql database.
shared.sqlSubmitQueue.put('''SELECT ackdata FROM sent where (status='sentmessage' OR status='doingpow')''')
shared.sqlSubmitQueue.put('')
queryreturn = shared.sqlReturnQueue.get()
for row in queryreturn:
ackdata, = row
print 'Watching for ackdata', ackdata.encode('hex')
ackdataForWhichImWatching[ackdata] = 0
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
shared.printLock.acquire()
print 'Trying to call', apiNotifyPath
shared.printLock.release()
call([apiNotifyPath, "startingUp"])
singleAPIThread = singleAPI()
singleAPIThread.daemon = True #close the main program even if there are threads left
singleAPIThread.start()
#self.singleAPISignalHandlerThread = singleAPISignalHandler()
#self.singleAPISignalHandlerThread.start()
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("updateStatusBar(PyQt_PyObject)"), self.updateStatusBar)
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"), self.connectObjectToAddressGeneratorSignals)
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"), self.displayNewSentMessage)
connectToStream(1)
singleListenerThread = singleListener()
singleListenerThread.daemon = True # close the main program even if there are threads left
singleListenerThread.start()
if not shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
try:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
except Exception, err:
print 'PyBitmessage requires PyQt unless you want to run it as a daemon and interact with it using the API. You can download PyQt from http://www.riverbankcomputing.com/software/pyqt/download or by searching Google for \'PyQt Download\'. If you want to run in daemon mode, see https://bitmessage.org/wiki/Daemon'
print 'Error message:', err
os._exit(0)
import bitmessageqt
bitmessageqt.run()
else:
print 'Running as a daemon. You can use Ctrl+C to exit.'
while True:
time.sleep(20)
# So far, the Bitmessage protocol, this client, the Wiki, and the forums
# are all a one-man operation. Bitcoin tips are quite appreciated!
# 1H5XaDA6fYENLbknwZyjiYXYPQaFjjLX2u
set a short timeout for incoming connections which are not fully established with version messages
#!/usr/bin/env python2.7
# Copyright (c) 2012 Jonathan Warren
# Copyright (c) 2012 The Bitmessage developers
# Distributed under the MIT/X11 software license. See the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#Right now, PyBitmessage only support connecting to stream 1. It doesn't yet contain logic to expand into further streams.
#The software version variable is now held in shared.py
verbose = 1
maximumAgeOfAnObjectThatIAmWillingToAccept = 216000 #Equals two days and 12 hours.
lengthOfTimeToLeaveObjectsInInventory = 237600 #Equals two days and 18 hours. This should be longer than maximumAgeOfAnObjectThatIAmWillingToAccept so that we don't process messages twice.
lengthOfTimeToHoldOnToAllPubkeys = 2419200 #Equals 4 weeks. You could make this longer if you want but making it shorter would not be advisable because there is a very small possibility that it could keep you from obtaining a needed pubkey for a period of time.
maximumAgeOfObjectsThatIAdvertiseToOthers = 216000 #Equals two days and 12 hours
maximumAgeOfNodesThatIAdvertiseToOthers = 10800 #Equals three hours
storeConfigFilesInSameDirectoryAsProgramByDefault = False #The user may de-select Portable Mode in the settings if they want the config files to stay in the application data folder.
useVeryEasyProofOfWorkForTesting = False #If you set this to True while on the normal network, you won't be able to send or sometimes receive messages.
encryptedBroadcastSwitchoverTime = 1369735200
import sys
import ConfigParser
import Queue
from addresses import *
#from shared import *
import shared
from defaultKnownNodes import *
import time
import socket
import threading
import hashlib
from struct import *
import pickle
import random
import sqlite3
import threading
from time import strftime, localtime, gmtime
import shutil #used for moving the messages.dat file
import string
import socks
import highlevelcrypto
from pyelliptic.openssl import OpenSSL
import ctypes
from pyelliptic import arithmetic
import signal #Used to capture a Ctrl-C keypress so that Bitmessage can shutdown gracefully.
#The next 3 are used for the API
from SimpleXMLRPCServer import *
import json
from subprocess import call #used when the API must execute an outside program
import singleton
#For each stream to which we connect, several outgoingSynSender threads will exist and will collectively create 8 connections with peers.
class outgoingSynSender(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def setup(self,streamNumber):
self.streamNumber = streamNumber
def run(self):
time.sleep(1)
global alreadyAttemptedConnectionsListResetTime
while True:
#time.sleep(999999)#I sometimes use this to prevent connections for testing.
if len(selfInitiatedConnections[self.streamNumber]) < 8: #maximum number of outgoing connections = 8
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
alreadyAttemptedConnectionsListLock.acquire()
while HOST in alreadyAttemptedConnectionsList or HOST in shared.connectedHostsList:
alreadyAttemptedConnectionsListLock.release()
#print 'choosing new sample'
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
time.sleep(1)
#Clear out the alreadyAttemptedConnectionsList every half hour so that this program will again attempt a connection to any nodes, even ones it has already tried.
if (time.time() - alreadyAttemptedConnectionsListResetTime) > 1800:
alreadyAttemptedConnectionsList.clear()
alreadyAttemptedConnectionsListResetTime = int(time.time())
alreadyAttemptedConnectionsListLock.acquire()
alreadyAttemptedConnectionsList[HOST] = 0
alreadyAttemptedConnectionsListLock.release()
PORT, timeNodeLastSeen = shared.knownNodes[self.streamNumber][HOST]
sock = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
#This option apparently avoids the TIME_WAIT state so that we can rebind faster
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(20)
if shared.config.get('bitmessagesettings', 'socksproxytype') == 'none' and verbose >= 2:
shared.printLock.acquire()
print 'Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
#sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif shared.config.get('bitmessagesettings', 'socksproxytype') == 'SOCKS4a':
if verbose >= 2:
shared.printLock.acquire()
print '(Using SOCKS4a) Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
proxytype = socks.PROXY_TYPE_SOCKS4
sockshostname = shared.config.get('bitmessagesettings', 'sockshostname')
socksport = shared.config.getint('bitmessagesettings', 'socksport')
rdns = True #Do domain name lookups through the proxy; though this setting doesn't really matter since we won't be doing any domain name lookups anyway.
if shared.config.getboolean('bitmessagesettings', 'socksauthentication'):
socksusername = shared.config.get('bitmessagesettings', 'socksusername')
sockspassword = shared.config.get('bitmessagesettings', 'sockspassword')
sock.setproxy(proxytype, sockshostname, socksport, rdns, socksusername, sockspassword)
else:
sock.setproxy(proxytype, sockshostname, socksport, rdns)
elif shared.config.get('bitmessagesettings', 'socksproxytype') == 'SOCKS5':
if verbose >= 2:
shared.printLock.acquire()
print '(Using SOCKS5) Trying an outgoing connection to', HOST, ':', PORT
shared.printLock.release()
proxytype = socks.PROXY_TYPE_SOCKS5
sockshostname = shared.config.get('bitmessagesettings', 'sockshostname')
socksport = shared.config.getint('bitmessagesettings', 'socksport')
rdns = True #Do domain name lookups through the proxy; though this setting doesn't really matter since we won't be doing any domain name lookups anyway.
if shared.config.getboolean('bitmessagesettings', 'socksauthentication'):
socksusername = shared.config.get('bitmessagesettings', 'socksusername')
sockspassword = shared.config.get('bitmessagesettings', 'sockspassword')
sock.setproxy(proxytype, sockshostname, socksport, rdns, socksusername, sockspassword)
else:
sock.setproxy(proxytype, sockshostname, socksport, rdns)
try:
sock.connect((HOST, PORT))
rd = receiveDataThread()
rd.daemon = True # close the main program even if there are threads left
#self.emit(SIGNAL("passObjectThrough(PyQt_PyObject)"),rd)
objectsOfWhichThisRemoteNodeIsAlreadyAware = {}
rd.setup(sock,HOST,PORT,self.streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware)
rd.start()
shared.printLock.acquire()
print self, 'connected to', HOST, 'during an outgoing attempt.'
shared.printLock.release()
sd = sendDataThread()
sd.setup(sock,HOST,PORT,self.streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware)
sd.start()
sd.sendVersionMessage()
except socks.GeneralProxyError, err:
if verbose >= 2:
shared.printLock.acquire()
print 'Could NOT connect to', HOST, 'during outgoing attempt.', err
shared.printLock.release()
PORT, timeLastSeen = shared.knownNodes[self.streamNumber][HOST]
if (int(time.time())-timeLastSeen) > 172800 and len(shared.knownNodes[self.streamNumber]) > 1000: # for nodes older than 48 hours old if we have more than 1000 hosts in our list, delete from the shared.knownNodes data-structure.
shared.knownNodesLock.acquire()
del shared.knownNodes[self.streamNumber][HOST]
shared.knownNodesLock.release()
shared.printLock.acquire()
print 'deleting ', HOST, 'from shared.knownNodes because it is more than 48 hours old and we could not connect to it.'
shared.printLock.release()
except socks.Socks5AuthError, err:
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS5 Authentication problem: "+str(err))
shared.UISignalQueue.put(('updateStatusBar',"SOCKS5 Authentication problem: "+str(err)))
except socks.Socks5Error, err:
pass
print 'SOCKS5 error. (It is possible that the server wants authentication).)' ,str(err)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS5 error. Server might require authentication. "+str(err))
except socks.Socks4Error, err:
print 'Socks4Error:', err
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"SOCKS4 error: "+str(err))
except socket.error, err:
if shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
print 'Bitmessage MIGHT be having trouble connecting to the SOCKS server. '+str(err)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Problem: Bitmessage can not connect to the SOCKS server. "+str(err))
else:
if verbose >= 1:
shared.printLock.acquire()
print 'Could NOT connect to', HOST, 'during outgoing attempt.', err
shared.printLock.release()
PORT, timeLastSeen = shared.knownNodes[self.streamNumber][HOST]
if (int(time.time())-timeLastSeen) > 172800 and len(shared.knownNodes[self.streamNumber]) > 1000: # for nodes older than 48 hours old if we have more than 1000 hosts in our list, delete from the knownNodes data-structure.
shared.knownNodesLock.acquire()
del shared.knownNodes[self.streamNumber][HOST]
shared.knownNodesLock.release()
print 'deleting ', HOST, 'from knownNodes because it is more than 48 hours old and we could not connect to it.'
except Exception, err:
sys.stderr.write('An exception has occurred in the outgoingSynSender thread that was not caught by other exception types: %s\n' % err)
time.sleep(0.1)
#Only one singleListener thread will ever exist. It creates the receiveDataThread and sendDataThread for each incoming connection. Note that it cannot set the stream number because it is not known yet- the other node will have to tell us its stream number in a version message. If we don't care about their stream, we will close the connection (within the recversion function of the recieveData thread)
class singleListener(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
#We don't want to accept incoming connections if the user is using a SOCKS proxy. If they eventually select proxy 'none' then this will start listening for connections.
while shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
time.sleep(300)
shared.printLock.acquire()
print 'Listening for incoming connections.'
shared.printLock.release()
HOST = '' # Symbolic name meaning all available interfaces
PORT = shared.config.getint('bitmessagesettings', 'port')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#This option apparently avoids the TIME_WAIT state so that we can rebind faster
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((HOST, PORT))
sock.listen(2)
while True:
#We don't want to accept incoming connections if the user is using a SOCKS proxy. If the user eventually select proxy 'none' then this will start listening for connections.
while shared.config.get('bitmessagesettings', 'socksproxytype')[0:5] == 'SOCKS':
time.sleep(10)
while len(shared.connectedHostsList) > 220:
shared.printLock.acquire()
print 'We are connected to too many people. Not accepting further incoming connections for ten seconds.'
shared.printLock.release()
time.sleep(10)
a,(HOST,PORT) = sock.accept()
#The following code will, unfortunately, block an incoming connection if someone else on the same LAN is already connected because the two computers will share the same external IP. This is here to prevent connection flooding.
while HOST in shared.connectedHostsList:
shared.printLock.acquire()
print 'incoming connection is from a host in shared.connectedHostsList (we are already connected to it). Ignoring it.'
shared.printLock.release()
a.close()
a,(HOST,PORT) = sock.accept()
objectsOfWhichThisRemoteNodeIsAlreadyAware = {}
a.settimeout(20)
sd = sendDataThread()
sd.setup(a,HOST,PORT,-1,objectsOfWhichThisRemoteNodeIsAlreadyAware)
sd.start()
rd = receiveDataThread()
rd.daemon = True # close the main program even if there are threads left
rd.setup(a,HOST,PORT,-1,objectsOfWhichThisRemoteNodeIsAlreadyAware)
rd.start()
shared.printLock.acquire()
print self, 'connected to', HOST,'during INCOMING request.'
shared.printLock.release()
#This thread is created either by the synSenderThread(for outgoing connections) or the singleListenerThread(for incoming connectiosn).
class receiveDataThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.data = ''
self.verackSent = False
self.verackReceived = False
def setup(self,sock,HOST,port,streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware):
self.sock = sock
self.HOST = HOST
self.PORT = port
self.streamNumber = streamNumber
self.payloadLength = 0 #This is the protocol payload length thus it doesn't include the 24 byte message header
self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave = {}
shared.connectedHostsList[self.HOST] = 0 #The very fact that this receiveData thread exists shows that we are connected to the remote host. Let's add it to this list so that an outgoingSynSender thread doesn't try to connect to it.
self.connectionIsOrWasFullyEstablished = False #set to true after the remote node and I accept each other's version messages. This is needed to allow the user interface to accurately reflect the current number of connections.
if self.streamNumber == -1: #This was an incoming connection. Send out a version message if we accept the other node's version message.
self.initiatedConnection = False
else:
self.initiatedConnection = True
selfInitiatedConnections[streamNumber][self] = 0
self.ackDataThatWeHaveYetToSend = [] #When we receive a message bound for us, we store the acknowledgement that we need to send (the ackdata) here until we are done processing all other data received from this peer.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware = objectsOfWhichThisRemoteNodeIsAlreadyAware
def run(self):
shared.printLock.acquire()
print 'ID of the receiveDataThread is', str(id(self))+'. The size of the shared.connectedHostsList is now', len(shared.connectedHostsList)
shared.printLock.release()
while True:
try:
self.data += self.sock.recv(4096)
except socket.timeout:
shared.printLock.acquire()
print 'Timeout occurred waiting for data from', self.HOST + '. Closing receiveData thread. (ID:',str(id(self))+ ')'
shared.printLock.release()
break
except Exception, err:
shared.printLock.acquire()
print 'sock.recv error. Closing receiveData thread (HOST:', self.HOST, 'ID:',str(id(self))+ ').', err
shared.printLock.release()
break
#print 'Received', repr(self.data)
if self.data == "":
shared.printLock.acquire()
print 'Connection to', self.HOST, 'closed. Closing receiveData thread. (ID:',str(id(self))+ ')'
shared.printLock.release()
break
else:
self.processData()
try:
del selfInitiatedConnections[self.streamNumber][self]
shared.printLock.acquire()
print 'removed self (a receiveDataThread) from selfInitiatedConnections'
shared.printLock.release()
except:
pass
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
try:
del shared.connectedHostsList[self.HOST]
except Exception, err:
shared.printLock.acquire()
print 'Could not delete', self.HOST, 'from shared.connectedHostsList.', err
shared.printLock.release()
shared.UISignalQueue.put(('updateNetworkStatusTab','no data'))
shared.printLock.acquire()
print 'The size of the connectedHostsList is now:', len(shared.connectedHostsList)
shared.printLock.release()
def processData(self):
global verbose
#if verbose >= 3:
#shared.printLock.acquire()
#print 'self.data is currently ', repr(self.data)
#shared.printLock.release()
if len(self.data) < 20: #if so little of the data has arrived that we can't even unpack the payload length
pass
elif self.data[0:4] != '\xe9\xbe\xb4\xd9':
if verbose >= 1:
shared.printLock.acquire()
sys.stderr.write('The magic bytes were not correct. First 40 bytes of data: %s\n' % repr(self.data[0:40]))
print 'self.data:', self.data.encode('hex')
shared.printLock.release()
self.data = ""
else:
self.payloadLength, = unpack('>L',self.data[16:20])
if len(self.data) >= self.payloadLength+24: #check if the whole message has arrived yet. If it has,...
if self.data[20:24] == hashlib.sha512(self.data[24:self.payloadLength+24]).digest()[0:4]:#test the checksum in the message. If it is correct...
#print 'message checksum is correct'
#The time we've last seen this node is obviously right now since we just received valid data from it. So update the knownNodes list so that other peers can be made aware of its existance.
if self.initiatedConnection and self.connectionIsOrWasFullyEstablished: #The remote port is only something we should share with others if it is the remote node's incoming port (rather than some random operating-system-assigned outgoing port).
shared.knownNodesLock.acquire()
shared.knownNodes[self.streamNumber][self.HOST] = (self.PORT,int(time.time()))
shared.knownNodesLock.release()
if self.payloadLength <= 180000000: #If the size of the message is greater than 180MB, ignore it. (I get memory errors when processing messages much larger than this though it is concievable that this value will have to be lowered if some systems are less tolarant of large messages.)
remoteCommand = self.data[4:16]
shared.printLock.acquire()
print 'remoteCommand', repr(remoteCommand.replace('\x00','')), ' from', self.HOST
shared.printLock.release()
if remoteCommand == 'version\x00\x00\x00\x00\x00':
self.recversion(self.data[24:self.payloadLength+24])
elif remoteCommand == 'verack\x00\x00\x00\x00\x00\x00':
self.recverack()
elif remoteCommand == 'addr\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recaddr(self.data[24:self.payloadLength+24])
elif remoteCommand == 'getpubkey\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recgetpubkey(self.data[24:self.payloadLength+24])
elif remoteCommand == 'pubkey\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recpubkey(self.data[24:self.payloadLength+24])
elif remoteCommand == 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recinv(self.data[24:self.payloadLength+24])
elif remoteCommand == 'getdata\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recgetdata(self.data[24:self.payloadLength+24])
elif remoteCommand == 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recmsg(self.data[24:self.payloadLength+24])
elif remoteCommand == 'broadcast\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.recbroadcast(self.data[24:self.payloadLength+24])
elif remoteCommand == 'ping\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
self.sendpong()
elif remoteCommand == 'pong\x00\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
pass
elif remoteCommand == 'alert\x00\x00\x00\x00\x00\x00\x00' and self.connectionIsOrWasFullyEstablished:
pass
self.data = self.data[self.payloadLength+24:]#take this message out and then process the next message
if self.data == '':
while len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) > 0:
random.seed()
objectHash, = random.sample(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave, 1)
if objectHash in shared.inventory:
shared.printLock.acquire()
print 'Inventory (in memory) already has object listed in inv message.'
shared.printLock.release()
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash]
elif isInSqlInventory(objectHash):
if verbose >= 3:
shared.printLock.acquire()
print 'Inventory (SQL on disk) already has object listed in inv message.'
shared.printLock.release()
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash]
else:
self.sendgetdata(objectHash)
del self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[objectHash] #It is possible that the remote node doesn't respond with the object. In that case, we'll very likely get it from someone else anyway.
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) == 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
break
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) == 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
if len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave) > 0:
shared.printLock.acquire()
print '(concerning', self.HOST + ')', 'number of objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave is now', len(self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave)
shared.printLock.release()
if len(self.ackDataThatWeHaveYetToSend) > 0:
self.data = self.ackDataThatWeHaveYetToSend.pop()
self.processData()
else:
print 'Checksum incorrect. Clearing this message.'
self.data = self.data[self.payloadLength+24:]
def isProofOfWorkSufficient(self,data,nonceTrialsPerByte=0,payloadLengthExtraBytes=0):
if nonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte:
nonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if payloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
POW, = unpack('>Q',hashlib.sha512(hashlib.sha512(data[:8]+ hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8])
#print 'POW:', POW
return POW <= 2**64 / ((len(data)+payloadLengthExtraBytes) * (nonceTrialsPerByte))
def sendpong(self):
print 'Sending pong'
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x70\x6F\x6E\x67\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
def recverack(self):
print 'verack received'
self.verackReceived = True
if self.verackSent == True:
#We have thus both sent and received a verack.
self.connectionFullyEstablished()
def connectionFullyEstablished(self):
self.connectionIsOrWasFullyEstablished = True
if not self.initiatedConnection:
#self.emit(SIGNAL("setStatusIcon(PyQt_PyObject)"),'green')
shared.UISignalQueue.put(('setStatusIcon','green'))
self.sock.settimeout(600) #We'll send out a pong every 5 minutes to make sure the connection stays alive if there has been no other traffic to send lately.
shared.UISignalQueue.put(('updateNetworkStatusTab','no data'))
remoteNodeIncomingPort, remoteNodeSeenTime = shared.knownNodes[self.streamNumber][self.HOST]
shared.printLock.acquire()
print 'Connection fully established with', self.HOST, remoteNodeIncomingPort
print 'The size of the connectedHostsList is now', len(shared.connectedHostsList)
print 'The length of sendDataQueues is now:', len(shared.sendDataQueues)
print 'broadcasting addr from within connectionFullyEstablished function.'
shared.printLock.release()
self.broadcastaddr([(int(time.time()), self.streamNumber, 1, self.HOST, remoteNodeIncomingPort)]) #This lets all of our peers know about this new node.
self.sendaddr() #This is one large addr message to this one peer.
if not self.initiatedConnection and len(shared.connectedHostsList) > 200:
shared.printLock.acquire()
print 'We are connected to too many people. Closing connection.'
shared.printLock.release()
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
return
self.sendBigInv()
def sendBigInv(self):
shared.sqlLock.acquire()
#Select all hashes which are younger than two days old and in this stream.
t = (int(time.time())-maximumAgeOfObjectsThatIAdvertiseToOthers,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys,self.streamNumber)
shared.sqlSubmitQueue.put('''SELECT hash FROM inventory WHERE ((receivedtime>? and objecttype<>'pubkey') or (receivedtime>? and objecttype='pubkey')) and streamnumber=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
bigInvList = {}
for row in queryreturn:
hash, = row
if hash not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
bigInvList[hash] = 0
else:
shared.printLock.acquire()
print 'Not including an object hash in a big inv message because the remote node is already aware of it.'#This line is here to check that this feature is working.
shared.printLock.release()
#We also have messages in our inventory in memory (which is a python dictionary). Let's fetch those too.
for hash, storedValue in shared.inventory.items():
if hash not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
objectType, streamNumber, payload, receivedTime = storedValue
if streamNumber == self.streamNumber and receivedTime > int(time.time())-maximumAgeOfObjectsThatIAdvertiseToOthers:
bigInvList[hash] = 0
else:
shared.printLock.acquire()
print 'Not including an object hash in a big inv message because the remote node is already aware of it.'#This line is here to check that this feature is working.
shared.printLock.release()
numberOfObjectsInInvMessage = 0
payload = ''
#Now let us start appending all of these hashes together. They will be sent out in a big inv message to our new peer.
for hash, storedValue in bigInvList.items():
payload += hash
numberOfObjectsInInvMessage += 1
if numberOfObjectsInInvMessage >= 50000: #We can only send a max of 50000 items per inv message but we may have more objects to advertise. They must be split up into multiple inv messages.
self.sendinvMessageToJustThisOnePeer(numberOfObjectsInInvMessage,payload)
payload = ''
numberOfObjectsInInvMessage = 0
if numberOfObjectsInInvMessage > 0:
self.sendinvMessageToJustThisOnePeer(numberOfObjectsInInvMessage,payload)
#Self explanatory. Notice that there is also a broadcastinv function for broadcasting invs to everyone in our stream.
def sendinvMessageToJustThisOnePeer(self,numberOfObjects,payload):
payload = encodeVarint(numberOfObjects) + payload
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
shared.printLock.acquire()
print 'Sending huge inv message with', numberOfObjects, 'objects to just this one peer'
shared.printLock.release()
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a broadcast message
def recbroadcast(self,data):
self.messageProcessingStartTime = time.time()
#First we must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in broadcast message insufficient.'
return
readPosition = 8 #bypass the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > (int(time.time())+10800): #prevent funny business
print 'The embedded time in this broadcast message is more than three hours in the future. That doesn\'t make sense. Ignoring message.'
return
if embeddedTime < (int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept):
print 'The embedded time in this broadcast message is too old. Ignoring message.'
return
if len(data) < 180:
print 'The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.'
return
#Let us check to make sure the stream number is correct (thus preventing an individual from sending broadcasts out on the wrong streams or all streams).
broadcastVersion, broadcastVersionLength = decodeVarint(data[readPosition:readPosition+10])
if broadcastVersion >= 2:
streamNumber, streamNumberLength = decodeVarint(data[readPosition+broadcastVersionLength:readPosition+broadcastVersionLength+10])
if streamNumber != self.streamNumber:
print 'The stream number encoded in this broadcast message (' + str(streamNumber) + ') does not match the stream number on which it was received. Ignoring it.'
return
shared.inventoryLock.acquire()
self.inventoryHash = calculateInventoryHash(data)
if self.inventoryHash in shared.inventory:
print 'We have already received this broadcast object. Ignoring.'
shared.inventoryLock.release()
return
elif isInSqlInventory(self.inventoryHash):
print 'We have already received this broadcast object (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
#It is valid so far. Let's let our peers know about it.
objectType = 'broadcast'
shared.inventory[self.inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(self.inventoryHash)
#self.emit(SIGNAL("incrementNumberOfBroadcastsProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfBroadcastsProcessed','no data'))
self.processbroadcast(readPosition,data)#When this function returns, we will have either successfully processed this broadcast because we are interested in it, ignored it because we aren't interested in it, or found problem with the broadcast that warranted ignoring it.
# Let us now set lengthOfTimeWeShouldUseToProcessThisMessage. If we haven't used the specified amount of time, we shall sleep. These values are mostly the same values used for msg messages although broadcast messages are processed faster.
if len(data) > 100000000: #Size is greater than 100 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 100 #seconds.
elif len(data) > 10000000: #Between 100 and 10 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 20 #seconds.
elif len(data) > 1000000: #Between 10 and 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = 3 #seconds.
else: #Less than 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = .6 #seconds.
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.messageProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total message processing time:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
#A broadcast message has a valid time and POW and requires processing. The recbroadcast function calls this one.
def processbroadcast(self,readPosition,data):
broadcastVersion, broadcastVersionLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += broadcastVersionLength
if broadcastVersion < 1 or broadcastVersion > 2:
print 'Cannot decode incoming broadcast versions higher than 2. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.'
return
if broadcastVersion == 1:
beginningOfPubkeyPosition = readPosition #used when we add the pubkey to our pubkey table
sendersAddressVersion, sendersAddressVersionLength = decodeVarint(data[readPosition:readPosition+9])
if sendersAddressVersion <= 1 or sendersAddressVersion >=3:
#Cannot decode senderAddressVersion higher than 2. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.
return
readPosition += sendersAddressVersionLength
if sendersAddressVersion == 2:
sendersStream, sendersStreamLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += sendersStreamLength
behaviorBitfield = data[readPosition:readPosition+4]
readPosition += 4
sendersPubSigningKey = '\x04' + data[readPosition:readPosition+64]
readPosition += 64
sendersPubEncryptionKey = '\x04' + data[readPosition:readPosition+64]
readPosition += 64
endOfPubkeyPosition = readPosition
sendersHash = data[readPosition:readPosition+20]
if sendersHash not in shared.broadcastSendersForWhichImWatching:
#Display timing data
shared.printLock.acquire()
print 'Time spent deciding that we are not interested in this v1 broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
return
#At this point, this message claims to be from sendersHash and we are interested in it. We still have to hash the public key to make sure it is truly the key that matches the hash, and also check the signiture.
readPosition += 20
sha = hashlib.new('sha512')
sha.update(sendersPubSigningKey+sendersPubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
if ripe.digest() != sendersHash:
#The sender of this message lied.
return
messageEncodingType, messageEncodingTypeLength = decodeVarint(data[readPosition:readPosition+9])
if messageEncodingType == 0:
return
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += messageLengthLength
message = data[readPosition:readPosition+messageLength]
readPosition += messageLength
readPositionAtBottomOfMessage = readPosition
signatureLength, signatureLengthLength = decodeVarint(data[readPosition:readPosition+9])
readPosition += signatureLengthLength
signature = data[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(data[12:readPositionAtBottomOfMessage],signature,sendersPubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
#verify passed
#Let's store the public key in case we want to reply to this person.
#We don't have the correct nonce or time (which would let us send out a pubkey message) so we'll just fill it with 1's. We won't be able to send this pubkey to others (without doing the proof of work ourselves, which this program is programmed to not do.)
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+data[beginningOfPubkeyPosition:endOfPubkeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersion,sendersStream,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersion,sendersStream,ripe.digest())
shared.printLock.acquire()
print 'fromAddress:', fromAddress
shared.printLock.release()
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
toAddress = '[Broadcast subscribers]'
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newBroadcast"])
#Display timing data
shared.printLock.acquire()
print 'Time spent processing this interesting broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
if broadcastVersion == 2:
cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += cleartextStreamNumberLength
initialDecryptionSuccessful = False
for key, cryptorObject in shared.MyECSubscriptionCryptorObjects.items():
try:
decryptedData = cryptorObject.decrypt(data[readPosition:])
toRipe = key #This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key.
initialDecryptionSuccessful = True
print 'EC decryption successful using key associated with ripe hash:', key.encode('hex')
break
except Exception, err:
pass
#print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful:
#This is not a broadcast I am interested in.
shared.printLock.acquire()
print 'Length of time program spent failing to decrypt this v2 broadcast:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
return
#At this point this is a broadcast I have decrypted and thus am interested in.
signedBroadcastVersion, readPosition = decodeVarint(decryptedData[:10])
beginningOfPubkeyPosition = readPosition #used when we add the pubkey to our pubkey table
sendersAddressVersion, sendersAddressVersionLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if sendersAddressVersion < 2 or sendersAddressVersion > 3:
print 'Cannot decode senderAddressVersion other than 2 or 3. Assuming the sender isn\' being silly, you should upgrade Bitmessage because this message shall be ignored.'
return
readPosition += sendersAddressVersionLength
sendersStream, sendersStreamLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if sendersStream != cleartextStreamNumber:
print 'The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.'
return
readPosition += sendersStreamLength
behaviorBitfield = decryptedData[readPosition:readPosition+4]
readPosition += 4
sendersPubSigningKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
sendersPubEncryptionKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
if sendersAddressVersion >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is', requiredAverageProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredPayloadLengthExtraBytes is', requiredPayloadLengthExtraBytes
endOfPubkeyPosition = readPosition
sha = hashlib.new('sha512')
sha.update(sendersPubSigningKey+sendersPubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
if toRipe != ripe.digest():
print 'The encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.'
return
messageEncodingType, messageEncodingTypeLength = decodeVarint(decryptedData[readPosition:readPosition+9])
if messageEncodingType == 0:
return
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(decryptedData[readPosition:readPosition+9])
readPosition += messageLengthLength
message = decryptedData[readPosition:readPosition+messageLength]
readPosition += messageLength
readPositionAtBottomOfMessage = readPosition
signatureLength, signatureLengthLength = decodeVarint(decryptedData[readPosition:readPosition+9])
readPosition += signatureLengthLength
signature = decryptedData[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(decryptedData[:readPositionAtBottomOfMessage],signature,sendersPubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
#verify passed
#Let's store the public key in case we want to reply to this person.
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+decryptedData[beginningOfPubkeyPosition:endOfPubkeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersion,sendersStream,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersion,sendersStream,ripe.digest())
shared.printLock.acquire()
print 'fromAddress:', fromAddress
shared.printLock.release()
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
toAddress = '[Broadcast subscribers]'
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newBroadcast"])
#Display timing data
shared.printLock.acquire()
print 'Time spent processing this interesting broadcast:', time.time()- self.messageProcessingStartTime
shared.printLock.release()
#We have received a msg message.
def recmsg(self,data):
self.messageProcessingStartTime = time.time()
#First we must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in msg message insufficient.'
return
readPosition = 8
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > int(time.time())+10800:
print 'The time in the msg message is too new. Ignoring it. Time:', embeddedTime
return
if embeddedTime < int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept:
print 'The time in the msg message is too old. Ignoring it. Time:', embeddedTime
return
streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = decodeVarint(data[readPosition:readPosition+9])
if streamNumberAsClaimedByMsg != self.streamNumber:
print 'The stream number encoded in this msg (' + str(streamNumberAsClaimedByMsg) + ') message does not match the stream number on which it was received. Ignoring it.'
return
readPosition += streamNumberAsClaimedByMsgLength
self.inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if self.inventoryHash in shared.inventory:
print 'We have already received this msg message. Ignoring.'
shared.inventoryLock.release()
return
elif isInSqlInventory(self.inventoryHash):
print 'We have already received this msg message (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
#This msg message is valid. Let's let our peers know about it.
objectType = 'msg'
shared.inventory[self.inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(self.inventoryHash)
#self.emit(SIGNAL("incrementNumberOfMessagesProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfMessagesProcessed','no data'))
self.processmsg(readPosition,data) #When this function returns, we will have either successfully processed the message bound for us, ignored it because it isn't bound for us, or found problem with the message that warranted ignoring it.
# Let us now set lengthOfTimeWeShouldUseToProcessThisMessage. If we haven't used the specified amount of time, we shall sleep. These values are based on test timings and you may change them at-will.
if len(data) > 100000000: #Size is greater than 100 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 100 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 100 MB message: 3.7 seconds.
elif len(data) > 10000000: #Between 100 and 10 megabytes
lengthOfTimeWeShouldUseToProcessThisMessage = 20 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 10 MB message: 0.53 seconds. Actual length of time it takes in practice when processing a real message: 1.44 seconds.
elif len(data) > 1000000: #Between 10 and 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = 3 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 1 MB message: 0.18 seconds. Actual length of time it takes in practice when processing a real message: 0.30 seconds.
else: #Less than 1 megabyte
lengthOfTimeWeShouldUseToProcessThisMessage = .6 #seconds. Actual length of time it took my computer to decrypt and verify the signature of a 100 KB message: 0.15 seconds. Actual length of time it takes in practice when processing a real message: 0.25 seconds.
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.messageProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total message processing time:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
#A msg message has a valid time and POW and requires processing. The recmsg function calls this one.
def processmsg(self,readPosition, encryptedData):
initialDecryptionSuccessful = False
#Let's check whether this is a message acknowledgement bound for us.
if encryptedData[readPosition:] in ackdataForWhichImWatching:
shared.printLock.acquire()
print 'This msg IS an acknowledgement bound for me.'
shared.printLock.release()
del ackdataForWhichImWatching[encryptedData[readPosition:]]
t = ('ackreceived',encryptedData[readPosition:])
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('UPDATE sent SET status=? WHERE ackdata=?')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),encryptedData[readPosition:],'Acknowledgement of the message received just now.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(encryptedData[readPosition:],'Acknowledgement of the message received just now.')))
return
else:
shared.printLock.acquire()
print 'This was NOT an acknowledgement bound for me.'
#print 'ackdataForWhichImWatching', ackdataForWhichImWatching
shared.printLock.release()
#This is not an acknowledgement bound for me. See if it is a message bound for me by trying to decrypt it with my private keys.
for key, cryptorObject in shared.myECCryptorObjects.items():
try:
decryptedData = cryptorObject.decrypt(encryptedData[readPosition:])
toRipe = key #This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data.
initialDecryptionSuccessful = True
print 'EC decryption successful using key associated with ripe hash:', key.encode('hex')
break
except Exception, err:
pass
#print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful:
#This is not a message bound for me.
shared.printLock.acquire()
print 'Length of time program spent failing to decrypt this message:', time.time()- self.messageProcessingStartTime, 'seconds.'
shared.printLock.release()
else:
#This is a message bound for me.
toAddress = shared.myAddressesByHash[toRipe] #Look up my address based on the RIPE hash.
readPosition = 0
messageVersion, messageVersionLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageVersionLength
if messageVersion != 1:
print 'Cannot understand message versions other than one. Ignoring message.'
return
sendersAddressVersionNumber, sendersAddressVersionNumberLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += sendersAddressVersionNumberLength
if sendersAddressVersionNumber == 0:
print 'Cannot understand sendersAddressVersionNumber = 0. Ignoring message.'
return
if sendersAddressVersionNumber >= 4:
print 'Sender\'s address version number', sendersAddressVersionNumber, 'not yet supported. Ignoring message.'
return
if len(decryptedData) < 170:
print 'Length of the unencrypted data is unreasonably short. Sanity check failed. Ignoring message.'
return
sendersStreamNumber, sendersStreamNumberLength = decodeVarint(decryptedData[readPosition:readPosition+10])
if sendersStreamNumber == 0:
print 'sender\'s stream number is 0. Ignoring message.'
return
readPosition += sendersStreamNumberLength
behaviorBitfield = decryptedData[readPosition:readPosition+4]
readPosition += 4
pubSigningKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
pubEncryptionKey = '\x04' + decryptedData[readPosition:readPosition+64]
readPosition += 64
if sendersAddressVersionNumber >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is', requiredAverageProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += varintLength
print 'sender\'s requiredPayloadLengthExtraBytes is', requiredPayloadLengthExtraBytes
endOfThePublicKeyPosition = readPosition #needed for when we store the pubkey in our database of pubkeys for later use.
if toRipe != decryptedData[readPosition:readPosition+20]:
shared.printLock.acquire()
print 'The original sender of this message did not send it to you. Someone is attempting a Surreptitious Forwarding Attack.'
print 'See: http://world.std.com/~dtd/sign_encrypt/sign_encrypt7.html'
print 'your toRipe:', toRipe.encode('hex')
print 'embedded destination toRipe:', decryptedData[readPosition:readPosition+20].encode('hex')
shared.printLock.release()
return
readPosition += 20
messageEncodingType, messageEncodingTypeLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageEncodingTypeLength
messageLength, messageLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += messageLengthLength
message = decryptedData[readPosition:readPosition+messageLength]
#print 'First 150 characters of message:', repr(message[:150])
readPosition += messageLength
ackLength, ackLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += ackLengthLength
ackData = decryptedData[readPosition:readPosition+ackLength]
readPosition += ackLength
positionOfBottomOfAckData = readPosition #needed to mark the end of what is covered by the signature
signatureLength, signatureLengthLength = decodeVarint(decryptedData[readPosition:readPosition+10])
readPosition += signatureLengthLength
signature = decryptedData[readPosition:readPosition+signatureLength]
try:
highlevelcrypto.verify(decryptedData[:positionOfBottomOfAckData],signature,pubSigningKey.encode('hex'))
print 'ECDSA verify passed'
except Exception, err:
print 'ECDSA verify failed', err
return
shared.printLock.acquire()
print 'As a matter of intellectual curiosity, here is the Bitcoin address associated with the keys owned by the other person:', calculateBitcoinAddressFromPubkey(pubSigningKey), ' ..and here is the testnet address:',calculateTestnetAddressFromPubkey(pubSigningKey),'. The other person must take their private signing key from Bitmessage and import it into Bitcoin (or a service like Blockchain.info) for it to be of any use. Do not use this unless you know what you are doing.'
shared.printLock.release()
#calculate the fromRipe.
sha = hashlib.new('sha512')
sha.update(pubSigningKey+pubEncryptionKey)
ripe = hashlib.new('ripemd160')
ripe.update(sha.digest())
#Let's store the public key in case we want to reply to this person.
t = (ripe.digest(),'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'+'\xFF\xFF\xFF\xFF'+decryptedData[messageVersionLength:endOfThePublicKeyPosition],int(time.time()),'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(sendersAddressVersionNumber,sendersStreamNumber,ripe.digest()))) #This will check to see whether we happen to be awaiting this pubkey in order to send a message. If we are, it will do the POW and send it.
fromAddress = encodeAddress(sendersAddressVersionNumber,sendersStreamNumber,ripe.digest())
#If this message is bound for one of my version 3 addresses (or higher), then we must check to make sure it meets our demanded proof of work requirement.
if decodeAddress(toAddress)[1] >= 3:#If the toAddress version number is 3 or higher:
if not shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(fromAddress): #If I'm not friendly with this person:
requiredNonceTrialsPerByte = shared.config.getint(toAddress,'noncetrialsperbyte')
requiredPayloadLengthExtraBytes = shared.config.getint(toAddress,'payloadlengthextrabytes')
if not self.isProofOfWorkSufficient(encryptedData,requiredNonceTrialsPerByte,requiredPayloadLengthExtraBytes):
print 'Proof of work in msg message insufficient only because it does not meet our higher requirement.'
return
blockMessage = False #Gets set to True if the user shouldn't see the message according to black or white lists.
if shared.config.get('bitmessagesettings', 'blackwhitelist') == 'black': #If we are using a blacklist
t = (fromAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT label FROM blacklist where address=? and enabled='1' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
shared.printLock.acquire()
print 'Message ignored because address is in blacklist.'
shared.printLock.release()
blockMessage = True
else: #We're using a whitelist
t = (fromAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT label FROM whitelist where address=? and enabled='1' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
print 'Message ignored because address not in whitelist.'
blockMessage = True
if not blockMessage:
print 'fromAddress:', fromAddress
print 'First 150 characters of message:', repr(message[:150])
toLabel = shared.config.get(toAddress, 'label')
if toLabel == '':
toLabel = toAddress
if messageEncodingType == 2:
bodyPositionIndex = string.find(message,'\nBody:')
if bodyPositionIndex > 1:
subject = message[8:bodyPositionIndex]
body = message[bodyPositionIndex+6:]
else:
subject = ''
body = message
elif messageEncodingType == 1:
body = message
subject = ''
elif messageEncodingType == 0:
print 'messageEncodingType == 0. Doing nothing with the message. They probably just sent it so that we would store their public key or send their ack data for them.'
else:
body = 'Unknown encoding type.\n\n' + repr(message)
subject = ''
if messageEncodingType <> 0:
shared.sqlLock.acquire()
t = (self.inventoryHash,toAddress,fromAddress,subject,int(time.time()),body,'inbox',messageEncodingType,0)
shared.sqlSubmitQueue.put('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewInboxMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.inventoryHash,toAddress,fromAddress,subject,body)
shared.UISignalQueue.put(('displayNewInboxMessage',(self.inventoryHash,toAddress,fromAddress,subject,body)))
#If we are behaving as an API then we might need to run an outside command to let some program know that a new message has arrived.
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newMessage"])
#Let us now check and see whether our receiving address is behaving as a mailing list
if shared.safeConfigGetBoolean(toAddress,'mailinglist'):
try:
mailingListName = shared.config.get(toAddress, 'mailinglistname')
except:
mailingListName = ''
#Let us send out this message as a broadcast
subject = self.addMailingListNameToSubject(subject,mailingListName)
#Let us now send this message out as a broadcast
message = strftime("%a, %Y-%m-%d %H:%M:%S UTC",gmtime()) + ' Message ostensibly from ' + fromAddress + ':\n\n' + body
fromAddress = toAddress #The fromAddress for the broadcast that we are about to send is the toAddress (my address) for the msg message we are currently processing.
ackdata = OpenSSL.rand(32) #We don't actually need the ackdata for acknowledgement since this is a broadcast message but we can use it to update the user interface when the POW is done generating.
toAddress = '[Broadcast subscribers]'
ripe = ''
shared.sqlLock.acquire()
t = ('',toAddress,ripe,fromAddress,subject,message,ackdata,int(time.time()),'broadcastpending',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
#self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,'[Broadcast subscribers]',fromAddress,subject,message,ackdata)
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,'[Broadcast subscribers]',fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendbroadcast',(fromAddress,subject,message)))
if self.isAckDataValid(ackData):
print 'ackData is valid. Will process it.'
self.ackDataThatWeHaveYetToSend.append(ackData) #When we have processed all data, the processData function will pop the ackData out and process it as if it is a message received from our peer.
#Display timing data
timeRequiredToAttemptToDecryptMessage = time.time()- self.messageProcessingStartTime
successfullyDecryptMessageTimings.append(timeRequiredToAttemptToDecryptMessage)
sum = 0
for item in successfullyDecryptMessageTimings:
sum += item
shared.printLock.acquire()
print 'Time to decrypt this message successfully:', timeRequiredToAttemptToDecryptMessage
print 'Average time for all message decryption successes since startup:', sum / len(successfullyDecryptMessageTimings)
shared.printLock.release()
def isAckDataValid(self,ackData):
if len(ackData) < 24:
print 'The length of ackData is unreasonably short. Not sending ackData.'
return False
if ackData[0:4] != '\xe9\xbe\xb4\xd9':
print 'Ackdata magic bytes were wrong. Not sending ackData.'
return False
ackDataPayloadLength, = unpack('>L',ackData[16:20])
if len(ackData)-24 != ackDataPayloadLength:
print 'ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.'
return False
if ackData[4:16] != 'getpubkey\x00\x00\x00' and ackData[4:16] != 'pubkey\x00\x00\x00\x00\x00\x00' and ackData[4:16] != 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00' and ackData[4:16] != 'broadcast\x00\x00\x00' :
return False
return True
def addMailingListNameToSubject(self,subject,mailingListName):
subject = subject.strip()
if subject[:3] == 'Re:' or subject[:3] == 'RE:':
subject = subject[3:].strip()
if '['+mailingListName+']' in subject:
return subject
else:
return '['+mailingListName+'] ' + subject
#We have received a pubkey
def recpubkey(self,data):
self.pubkeyProcessingStartTime = time.time()
if len(data) < 146 or len(data) >600: #sanity check
return
#We must check to make sure the proof of work is sufficient.
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in pubkey message insufficient.'
return
readPosition = 8 #for the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime < int(time.time())-lengthOfTimeToHoldOnToAllPubkeys:
shared.printLock.acquire()
print 'The embedded time in this pubkey message is too old. Ignoring. Embedded time is:', embeddedTime
shared.printLock.release()
return
if embeddedTime > int(time.time()) + 10800:
shared.printLock.acquire()
print 'The embedded time in this pubkey message more than several hours in the future. This is irrational. Ignoring message.'
shared.printLock.release()
return
addressVersion, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
streamNumber, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
if self.streamNumber != streamNumber:
print 'stream number embedded in this pubkey doesn\'t match our stream number. Ignoring.'
return
inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if inventoryHash in shared.inventory:
print 'We have already received this pubkey. Ignoring it.'
shared.inventoryLock.release()
return
elif isInSqlInventory(inventoryHash):
print 'We have already received this pubkey (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
self.broadcastinv(inventoryHash)
#self.emit(SIGNAL("incrementNumberOfPubkeysProcessed()"))
shared.UISignalQueue.put(('incrementNumberOfPubkeysProcessed','no data'))
self.processpubkey(data)
lengthOfTimeWeShouldUseToProcessThisMessage = .2
sleepTime = lengthOfTimeWeShouldUseToProcessThisMessage - (time.time()- self.pubkeyProcessingStartTime)
if sleepTime > 0:
shared.printLock.acquire()
print 'Timing attack mitigation: Sleeping for', sleepTime ,'seconds.'
shared.printLock.release()
time.sleep(sleepTime)
shared.printLock.acquire()
print 'Total pubkey processing time:', time.time()- self.pubkeyProcessingStartTime, 'seconds.'
shared.printLock.release()
def processpubkey(self,data):
readPosition = 8 #for the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
readPosition += 4 #for the time
addressVersion, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
streamNumber, varintLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += varintLength
if addressVersion == 0:
print '(Within processpubkey) addressVersion of 0 doesn\'t make sense.'
return
if addressVersion >= 4 or addressVersion == 1:
shared.printLock.acquire()
print 'This version of Bitmessage cannot handle version', addressVersion,'addresses.'
shared.printLock.release()
return
if addressVersion == 2:
if len(data) < 146: #sanity check. This is the minimum possible length.
print '(within processpubkey) payloadLength less than 146. Sanity check failed.'
return
bitfieldBehaviors = data[readPosition:readPosition+4]
readPosition += 4
publicSigningKey = data[readPosition:readPosition+64]
#Is it possible for a public key to be invalid such that trying to encrypt or sign with it will cause an error? If it is, we should probably test these keys here.
readPosition += 64
publicEncryptionKey = data[readPosition:readPosition+64]
if len(publicEncryptionKey) < 64:
print 'publicEncryptionKey length less than 64. Sanity check failed.'
return
sha = hashlib.new('sha512')
sha.update('\x04'+publicSigningKey+'\x04'+publicEncryptionKey)
ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
shared.printLock.acquire()
print 'within recpubkey, addressVersion:', addressVersion, ', streamNumber:', streamNumber
print 'ripe', ripe.encode('hex')
print 'publicSigningKey in hex:', publicSigningKey.encode('hex')
print 'publicEncryptionKey in hex:', publicEncryptionKey.encode('hex')
shared.printLock.release()
t = (ripe,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT usedpersonally FROM pubkeys WHERE hash=? AND usedpersonally='yes' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #if this pubkey is already in our database and if we have used it personally:
print 'We HAVE used this pubkey personally. Updating time.'
t = (ripe,data,embeddedTime,'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
else:
print 'We have NOT used this pubkey personally. Inserting in database.'
t = (ripe,data,embeddedTime,'no') #This will also update the embeddedTime.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
if addressVersion == 3:
if len(data) < 170: #sanity check.
print '(within processpubkey) payloadLength less than 170. Sanity check failed.'
return
bitfieldBehaviors = data[readPosition:readPosition+4]
readPosition += 4
publicSigningKey = '\x04'+data[readPosition:readPosition+64]
#Is it possible for a public key to be invalid such that trying to encrypt or sign with it will cause an error? If it is, we should probably test these keys here.
readPosition += 64
publicEncryptionKey = '\x04'+data[readPosition:readPosition+64]
readPosition += 64
specifiedNonceTrialsPerByte, specifiedNonceTrialsPerByteLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += specifiedNonceTrialsPerByteLength
specifiedPayloadLengthExtraBytes, specifiedPayloadLengthExtraBytesLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += specifiedPayloadLengthExtraBytesLength
signatureLength, signatureLengthLength = decodeVarint(data[readPosition:readPosition+10])
signature = data[readPosition:readPosition+signatureLengthLength]
try:
highlevelcrypto.verify(data[8:readPosition],signature,publicSigningKey.encode('hex'))
print 'ECDSA verify passed (within processpubkey)'
except Exception, err:
print 'ECDSA verify failed (within processpubkey)', err
return
sha = hashlib.new('sha512')
sha.update(publicSigningKey+publicEncryptionKey)
ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
shared.printLock.acquire()
print 'within recpubkey, addressVersion:', addressVersion, ', streamNumber:', streamNumber
print 'ripe', ripe.encode('hex')
print 'publicSigningKey in hex:', publicSigningKey.encode('hex')
print 'publicEncryptionKey in hex:', publicEncryptionKey.encode('hex')
shared.printLock.release()
t = (ripe,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT usedpersonally FROM pubkeys WHERE hash=? AND usedpersonally='yes' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #if this pubkey is already in our database and if we have used it personally:
print 'We HAVE used this pubkey personally. Updating time.'
t = (ripe,data,embeddedTime,'yes')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
else:
print 'We have NOT used this pubkey personally. Inserting in database.'
t = (ripe,data,embeddedTime,'no') #This will also update the embeddedTime.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.workerQueue.put(('newpubkey',(addressVersion,streamNumber,ripe)))
#We have received a getpubkey message
def recgetpubkey(self,data):
if not self.isProofOfWorkSufficient(data):
print 'Proof of work in getpubkey message insufficient.'
return
if len(data) < 34:
print 'getpubkey message doesn\'t contain enough data. Ignoring.'
return
readPosition = 8 #bypass the nonce
embeddedTime, = unpack('>I',data[readPosition:readPosition+4])
#This section is used for the transition from 32 bit time to 64 bit time in the protocol.
if embeddedTime == 0:
embeddedTime, = unpack('>Q',data[readPosition:readPosition+8])
readPosition += 8
else:
readPosition += 4
if embeddedTime > int(time.time())+10800:
print 'The time in this getpubkey message is too new. Ignoring it. Time:', embeddedTime
return
if embeddedTime < int(time.time())-maximumAgeOfAnObjectThatIAmWillingToAccept:
print 'The time in this getpubkey message is too old. Ignoring it. Time:', embeddedTime
return
requestedAddressVersionNumber, addressVersionLength = decodeVarint(data[readPosition:readPosition+10])
readPosition += addressVersionLength
streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition+10])
if streamNumber <> self.streamNumber:
print 'The streamNumber', streamNumber, 'doesn\'t match our stream number:', self.streamNumber
return
readPosition += streamNumberLength
inventoryHash = calculateInventoryHash(data)
shared.inventoryLock.acquire()
if inventoryHash in shared.inventory:
print 'We have already received this getpubkey request. Ignoring it.'
shared.inventoryLock.release()
return
elif isInSqlInventory(inventoryHash):
print 'We have already received this getpubkey request (it is stored on disk in the SQL inventory). Ignoring it.'
shared.inventoryLock.release()
return
objectType = 'getpubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, data, embeddedTime)
shared.inventoryLock.release()
#This getpubkey request is valid so far. Forward to peers.
self.broadcastinv(inventoryHash)
if requestedAddressVersionNumber == 0:
print 'The requestedAddressVersionNumber of the pubkey request is zero. That doesn\'t make any sense. Ignoring it.'
return
elif requestedAddressVersionNumber == 1:
print 'The requestedAddressVersionNumber of the pubkey request is 1 which isn\'t supported anymore. Ignoring it.'
return
elif requestedAddressVersionNumber > 3:
print 'The requestedAddressVersionNumber of the pubkey request is too high. Can\'t understand. Ignoring it.'
return
requestedHash = data[readPosition:readPosition+20]
if len(requestedHash) != 20:
print 'The length of the requested hash is not 20 bytes. Something is wrong. Ignoring.'
return
print 'the hash requested in this getpubkey request is:', requestedHash.encode('hex')
"""shared.sqlLock.acquire()
t = (requestedHash,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys) #this prevents SQL injection
shared.sqlSubmitQueue.put('''SELECT hash, transmitdata, time FROM pubkeys WHERE hash=? AND havecorrectnonce=1 AND time>?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
for row in queryreturn:
hash, payload, timeEncodedInPubkey = row
shared.printLock.acquire()
print 'We have the requested pubkey stored in our database of pubkeys. Sending it.'
shared.printLock.release()
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, self.streamNumber, payload, timeEncodedInPubkey)#If the time embedded in this pubkey is more than 3 days old then this object isn't going to last very long in the inventory- the cleanerThread is going to come along and move it from the inventory in memory to the SQL inventory and then delete it from the SQL inventory. It should still find its way back to the original requestor if he is online however.
self.broadcastinv(inventoryHash)"""
#else: #the pubkey is not in our database of pubkeys. Let's check if the requested key is ours (which would mean we should do the POW, put it in the pubkey table, and broadcast out the pubkey.)
if requestedHash in shared.myAddressesByHash: #if this address hash is one of mine
if decodeAddress(shared.myAddressesByHash[requestedHash])[1] != requestedAddressVersionNumber:
shared.printLock.acquire()
sys.stderr.write('(Within the recgetpubkey function) Someone requested one of my pubkeys but the requestedAddressVersionNumber doesn\'t match my actual address version number. That shouldn\'t have happened. Ignoring.\n')
shared.printLock.release()
return
try:
lastPubkeySendTime = int(shared.config.get(shared.myAddressesByHash[requestedHash],'lastpubkeysendtime'))
except:
lastPubkeySendTime = 0
if lastPubkeySendTime < time.time()-lengthOfTimeToHoldOnToAllPubkeys: #If the last time we sent our pubkey was 28 days ago
shared.printLock.acquire()
print 'Found getpubkey-requested-hash in my list of EC hashes. Telling Worker thread to do the POW for a pubkey message and send it out.'
shared.printLock.release()
if requestedAddressVersionNumber == 2:
shared.workerQueue.put(('doPOWForMyV2Pubkey',requestedHash))
elif requestedAddressVersionNumber == 3:
shared.workerQueue.put(('doPOWForMyV3Pubkey',requestedHash))
else:
shared.printLock.acquire()
print 'Found getpubkey-requested-hash in my list of EC hashes BUT we already sent it recently. Ignoring request. The lastPubkeySendTime is:',lastPubkeySendTime
shared.printLock.release()
else:
shared.printLock.acquire()
print 'This getpubkey request is not for any of my keys.'
shared.printLock.release()
#We have received an inv message
def recinv(self,data):
numberOfItemsInInv, lengthOfVarint = decodeVarint(data[:10])
if len(data) < lengthOfVarint + (numberOfItemsInInv * 32):
print 'inv message doesn\'t contain enough data. Ignoring.'
return
if numberOfItemsInInv == 1: #we'll just request this data from the person who advertised the object.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware[data[lengthOfVarint:32+lengthOfVarint]] = 0
if data[lengthOfVarint:32+lengthOfVarint] in shared.inventory:
shared.printLock.acquire()
print 'Inventory (in memory) has inventory item already.'
shared.printLock.release()
elif isInSqlInventory(data[lengthOfVarint:32+lengthOfVarint]):
print 'Inventory (SQL on disk) has inventory item already.'
else:
self.sendgetdata(data[lengthOfVarint:32+lengthOfVarint])
else:
print 'inv message lists', numberOfItemsInInv, 'objects.'
for i in range(numberOfItemsInInv): #upon finishing dealing with an incoming message, the receiveDataThread will request a random object from the peer. This way if we get multiple inv messages from multiple peers which list mostly the same objects, we will make getdata requests for different random objects from the various peers.
if len(data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]) == 32: #The length of an inventory hash should be 32. If it isn't 32 then the remote node is either badly programmed or behaving nefariously.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware[data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]] = 0
self.objectsThatWeHaveYetToCheckAndSeeWhetherWeAlreadyHave[data[lengthOfVarint+(32*i):32+lengthOfVarint+(32*i)]] = 0
#Send a getdata message to our peer to request the object with the given hash
def sendgetdata(self,hash):
shared.printLock.acquire()
print 'sending getdata to retrieve object with hash:', hash.encode('hex')
shared.printLock.release()
payload = '\x01' + hash
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'getdata\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload)) #payload length. Note that we add an extra 8 for the nonce.
headerData += hashlib.sha512(payload).digest()[:4]
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a getdata request from our peer
def recgetdata(self, data):
numberOfRequestedInventoryItems, lengthOfVarint = decodeVarint(data[:10])
if len(data) < lengthOfVarint + (32 * numberOfRequestedInventoryItems):
print 'getdata message does not contain enough data. Ignoring.'
return
for i in xrange(numberOfRequestedInventoryItems):
hash = data[lengthOfVarint+(i*32):32+lengthOfVarint+(i*32)]
shared.printLock.acquire()
print 'received getdata request for item:', hash.encode('hex')
shared.printLock.release()
#print 'inventory is', shared.inventory
if hash in shared.inventory:
objectType, streamNumber, payload, receivedTime = shared.inventory[hash]
self.sendData(objectType,payload)
else:
t = (hash,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select objecttype, payload from inventory where hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn <> []:
for row in queryreturn:
objectType, payload = row
self.sendData(objectType,payload)
else:
print 'Someone asked for an object with a getdata which is not in either our memory inventory or our SQL inventory. That shouldn\'t have happened.'
#Our peer has requested (in a getdata message) that we send an object.
def sendData(self,objectType,payload):
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
if objectType == 'pubkey':
shared.printLock.acquire()
print 'sending pubkey'
shared.printLock.release()
headerData += 'pubkey\x00\x00\x00\x00\x00\x00'
elif objectType == 'getpubkey' or objectType == 'pubkeyrequest':
shared.printLock.acquire()
print 'sending getpubkey'
shared.printLock.release()
headerData += 'getpubkey\x00\x00\x00'
elif objectType == 'msg':
shared.printLock.acquire()
print 'sending msg'
shared.printLock.release()
headerData += 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00'
elif objectType == 'broadcast':
shared.printLock.acquire()
print 'sending broadcast'
shared.printLock.release()
headerData += 'broadcast\x00\x00\x00'
else:
sys.stderr.write('Error: sendData has been asked to send a strange objectType: %s\n' % str(objectType))
return
headerData += pack('>L',len(payload)) #payload length.
headerData += hashlib.sha512(payload).digest()[:4]
try:
self.sock.sendall(headerData + payload)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#Send an inv message with just one hash to all of our peers
def broadcastinv(self,hash):
shared.printLock.acquire()
print 'broadcasting inv with hash:', hash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((self.streamNumber, 'sendinv', hash))
#We have received an addr message.
def recaddr(self,data):
listOfAddressDetailsToBroadcastToPeers = []
numberOfAddressesIncluded = 0
numberOfAddressesIncluded, lengthOfNumberOfAddresses = decodeVarint(data[:10])
if verbose >= 1:
shared.printLock.acquire()
print 'addr message contains', numberOfAddressesIncluded, 'IP addresses.'
shared.printLock.release()
if self.remoteProtocolVersion == 1:
if numberOfAddressesIncluded > 1000 or numberOfAddressesIncluded == 0:
return
if len(data) != lengthOfNumberOfAddresses + (34 * numberOfAddressesIncluded):
print 'addr message does not contain the correct amount of data. Ignoring.'
return
needToWriteKnownNodesToDisk = False
for i in range(0,numberOfAddressesIncluded):
try:
if data[16+lengthOfNumberOfAddresses+(34*i):28+lengthOfNumberOfAddresses+(34*i)] != '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
shared.printLock.acquire()
print 'Skipping IPv6 address.', repr(data[16+lengthOfNumberOfAddresses+(34*i):28+lengthOfNumberOfAddresses+(34*i)])
shared.printLock.release()
continue
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (to test for an IPv6 address). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrStream, = unpack('>I',data[4+lengthOfNumberOfAddresses+(34*i):8+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrStream). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
if recaddrStream == 0:
continue
if recaddrStream != self.streamNumber and recaddrStream != (self.streamNumber * 2) and recaddrStream != ((self.streamNumber * 2) + 1): #if the embedded stream number is not in my stream or either of my child streams then ignore it. Someone might be trying funny business.
continue
try:
recaddrServices, = unpack('>Q',data[8+lengthOfNumberOfAddresses+(34*i):16+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrServices). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrPort, = unpack('>H',data[32+lengthOfNumberOfAddresses+(34*i):34+lengthOfNumberOfAddresses+(34*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrPort). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
#print 'Within recaddr(): IP', recaddrIP, ', Port', recaddrPort, ', i', i
hostFromAddrMessage = socket.inet_ntoa(data[28+lengthOfNumberOfAddresses+(34*i):32+lengthOfNumberOfAddresses+(34*i)])
#print 'hostFromAddrMessage', hostFromAddrMessage
if data[28+lengthOfNumberOfAddresses+(34*i)] == '\x7F':
print 'Ignoring IP address in loopback range:', hostFromAddrMessage
continue
if data[28+lengthOfNumberOfAddresses+(34*i)] == '\x0A':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
if data[28+lengthOfNumberOfAddresses+(34*i):30+lengthOfNumberOfAddresses+(34*i)] == '\xC0A8':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
timeSomeoneElseReceivedMessageFromThisNode, = unpack('>I',data[lengthOfNumberOfAddresses+(34*i):4+lengthOfNumberOfAddresses+(34*i)]) #This is the 'time' value in the received addr message.
if recaddrStream not in shared.knownNodes: #knownNodes is a dictionary of dictionaries with one outer dictionary for each stream. If the outer stream dictionary doesn't exist yet then we must make it.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream] = {}
shared.knownNodesLock.release()
if hostFromAddrMessage not in shared.knownNodes[recaddrStream]:
if len(shared.knownNodes[recaddrStream]) < 20000 and timeSomeoneElseReceivedMessageFromThisNode > (int(time.time())-10800) and timeSomeoneElseReceivedMessageFromThisNode < (int(time.time()) + 10800): #If we have more than 20000 nodes in our list already then just forget about adding more. Also, make sure that the time that someone else received a message from this node is within three hours from now.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (recaddrPort, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
needToWriteKnownNodesToDisk = True
hostDetails = (timeSomeoneElseReceivedMessageFromThisNode, recaddrStream, recaddrServices, hostFromAddrMessage, recaddrPort)
listOfAddressDetailsToBroadcastToPeers.append(hostDetails)
else:
PORT, timeLastReceivedMessageFromThisNode = shared.knownNodes[recaddrStream][hostFromAddrMessage]#PORT in this case is either the port we used to connect to the remote node, or the port that was specified by someone else in a past addr message.
if (timeLastReceivedMessageFromThisNode < timeSomeoneElseReceivedMessageFromThisNode) and (timeSomeoneElseReceivedMessageFromThisNode < int(time.time())):
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (PORT, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
if PORT != recaddrPort:
print 'Strange occurance: The port specified in an addr message', str(recaddrPort),'does not match the port',str(PORT),'that this program (or some other peer) used to connect to it',str(hostFromAddrMessage),'. Perhaps they changed their port or are using a strange NAT configuration.'
if needToWriteKnownNodesToDisk: #Runs if any nodes were new to us. Also, share those nodes with our peers.
shared.knownNodesLock.acquire()
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.broadcastaddr(listOfAddressDetailsToBroadcastToPeers) #no longer broadcast
shared.printLock.acquire()
print 'knownNodes currently has', len(shared.knownNodes[self.streamNumber]), 'nodes for this stream.'
shared.printLock.release()
elif self.remoteProtocolVersion >= 2: #The difference is that in protocol version 2, network addresses use 64 bit times rather than 32 bit times.
if numberOfAddressesIncluded > 1000 or numberOfAddressesIncluded == 0:
return
if len(data) != lengthOfNumberOfAddresses + (38 * numberOfAddressesIncluded):
print 'addr message does not contain the correct amount of data. Ignoring.'
return
needToWriteKnownNodesToDisk = False
for i in range(0,numberOfAddressesIncluded):
try:
if data[20+lengthOfNumberOfAddresses+(38*i):32+lengthOfNumberOfAddresses+(38*i)] != '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
shared.printLock.acquire()
print 'Skipping IPv6 address.', repr(data[20+lengthOfNumberOfAddresses+(38*i):32+lengthOfNumberOfAddresses+(38*i)])
shared.printLock.release()
continue
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (to test for an IPv6 address). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrStream, = unpack('>I',data[8+lengthOfNumberOfAddresses+(38*i):12+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrStream). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
if recaddrStream == 0:
continue
if recaddrStream != self.streamNumber and recaddrStream != (self.streamNumber * 2) and recaddrStream != ((self.streamNumber * 2) + 1): #if the embedded stream number is not in my stream or either of my child streams then ignore it. Someone might be trying funny business.
continue
try:
recaddrServices, = unpack('>Q',data[12+lengthOfNumberOfAddresses+(38*i):20+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrServices). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
try:
recaddrPort, = unpack('>H',data[36+lengthOfNumberOfAddresses+(38*i):38+lengthOfNumberOfAddresses+(38*i)])
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('ERROR TRYING TO UNPACK recaddr (recaddrPort). Message: %s\n' % str(err))
shared.printLock.release()
break #giving up on unpacking any more. We should still be connected however.
#print 'Within recaddr(): IP', recaddrIP, ', Port', recaddrPort, ', i', i
hostFromAddrMessage = socket.inet_ntoa(data[32+lengthOfNumberOfAddresses+(38*i):36+lengthOfNumberOfAddresses+(38*i)])
#print 'hostFromAddrMessage', hostFromAddrMessage
if data[32+lengthOfNumberOfAddresses+(38*i)] == '\x7F':
print 'Ignoring IP address in loopback range:', hostFromAddrMessage
continue
if data[32+lengthOfNumberOfAddresses+(38*i)] == '\x0A':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
if data[32+lengthOfNumberOfAddresses+(38*i):34+lengthOfNumberOfAddresses+(38*i)] == '\xC0A8':
print 'Ignoring IP address in private range:', hostFromAddrMessage
continue
timeSomeoneElseReceivedMessageFromThisNode, = unpack('>Q',data[lengthOfNumberOfAddresses+(38*i):8+lengthOfNumberOfAddresses+(38*i)]) #This is the 'time' value in the received addr message. 64-bit.
if recaddrStream not in shared.knownNodes: #knownNodes is a dictionary of dictionaries with one outer dictionary for each stream. If the outer stream dictionary doesn't exist yet then we must make it.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream] = {}
shared.knownNodesLock.release()
if hostFromAddrMessage not in shared.knownNodes[recaddrStream]:
if len(shared.knownNodes[recaddrStream]) < 20000 and timeSomeoneElseReceivedMessageFromThisNode > (int(time.time())-10800) and timeSomeoneElseReceivedMessageFromThisNode < (int(time.time()) + 10800): #If we have more than 20000 nodes in our list already then just forget about adding more. Also, make sure that the time that someone else received a message from this node is within three hours from now.
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (recaddrPort, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
shared.printLock.acquire()
print 'added new node', hostFromAddrMessage, 'to knownNodes in stream', recaddrStream
shared.printLock.release()
needToWriteKnownNodesToDisk = True
hostDetails = (timeSomeoneElseReceivedMessageFromThisNode, recaddrStream, recaddrServices, hostFromAddrMessage, recaddrPort)
listOfAddressDetailsToBroadcastToPeers.append(hostDetails)
else:
PORT, timeLastReceivedMessageFromThisNode = shared.knownNodes[recaddrStream][hostFromAddrMessage]#PORT in this case is either the port we used to connect to the remote node, or the port that was specified by someone else in a past addr message.
if (timeLastReceivedMessageFromThisNode < timeSomeoneElseReceivedMessageFromThisNode) and (timeSomeoneElseReceivedMessageFromThisNode < int(time.time())):
shared.knownNodesLock.acquire()
shared.knownNodes[recaddrStream][hostFromAddrMessage] = (PORT, timeSomeoneElseReceivedMessageFromThisNode)
shared.knownNodesLock.release()
if PORT != recaddrPort:
print 'Strange occurance: The port specified in an addr message', str(recaddrPort),'does not match the port',str(PORT),'that this program (or some other peer) used to connect to it',str(hostFromAddrMessage),'. Perhaps they changed their port or are using a strange NAT configuration.'
if needToWriteKnownNodesToDisk: #Runs if any nodes were new to us. Also, share those nodes with our peers.
shared.knownNodesLock.acquire()
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.broadcastaddr(listOfAddressDetailsToBroadcastToPeers)
shared.printLock.acquire()
print 'knownNodes currently has', len(shared.knownNodes[self.streamNumber]), 'nodes for this stream.'
shared.printLock.release()
#Function runs when we want to broadcast an addr message to all of our peers. Runs when we learn of nodes that we didn't previously know about and want to share them with our peers.
def broadcastaddr(self,listOfAddressDetailsToBroadcastToPeers):
numberOfAddressesInAddrMessage = len(listOfAddressDetailsToBroadcastToPeers)
payload = ''
for hostDetails in listOfAddressDetailsToBroadcastToPeers:
timeLastReceivedMessageFromThisNode, streamNumber, services, host, port = hostDetails
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #now uses 64-bit time
payload += pack('>I',streamNumber)
payload += pack('>q',services) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(host)
payload += pack('>H',port)#remote port
payload = encodeVarint(numberOfAddressesInAddrMessage) + payload
datatosend = '\xE9\xBE\xB4\xD9addr\x00\x00\x00\x00\x00\x00\x00\x00'
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
datatosend = datatosend + payload
if verbose >= 1:
shared.printLock.acquire()
print 'Broadcasting addr with', numberOfAddressesInAddrMessage, 'entries.'
shared.printLock.release()
shared.broadcastToSendDataQueues((self.streamNumber, 'sendaddr', datatosend))
#Send a big addr message to our peer
def sendaddr(self):
addrsInMyStream = {}
addrsInChildStreamLeft = {}
addrsInChildStreamRight = {}
#print 'knownNodes', shared.knownNodes
#We are going to share a maximum number of 1000 addrs with our peer. 500 from this stream, 250 from the left child stream, and 250 from the right child stream.
shared.knownNodesLock.acquire()
if len(shared.knownNodes[self.streamNumber]) > 0:
for i in range(500):
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInMyStream[HOST] = shared.knownNodes[self.streamNumber][HOST]
if len(shared.knownNodes[self.streamNumber*2]) > 0:
for i in range(250):
random.seed()
HOST, = random.sample(shared.knownNodes[self.streamNumber*2], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInChildStreamLeft[HOST] = shared.knownNodes[self.streamNumber*2][HOST]
if len(shared.knownNodes[(self.streamNumber*2)+1]) > 0:
for i in range(250):
random.seed()
HOST, = random.sample(shared.knownNodes[(self.streamNumber*2)+1], 1)
if self.isHostInPrivateIPRange(HOST):
continue
addrsInChildStreamRight[HOST] = shared.knownNodes[(self.streamNumber*2)+1][HOST]
shared.knownNodesLock.release()
numberOfAddressesInAddrMessage = 0
payload = ''
#print 'addrsInMyStream.items()', addrsInMyStream.items()
for HOST, value in addrsInMyStream.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',self.streamNumber)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
for HOST, value in addrsInChildStreamLeft.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',self.streamNumber*2)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
for HOST, value in addrsInChildStreamRight.items():
PORT, timeLastReceivedMessageFromThisNode = value
if timeLastReceivedMessageFromThisNode > (int(time.time())- maximumAgeOfNodesThatIAdvertiseToOthers): #If it is younger than 3 hours old..
numberOfAddressesInAddrMessage += 1
if self.remoteProtocolVersion == 1:
payload += pack('>I',timeLastReceivedMessageFromThisNode) #32-bit time
else:
payload += pack('>Q',timeLastReceivedMessageFromThisNode) #64-bit time
payload += pack('>I',(self.streamNumber*2)+1)
payload += pack('>q',1) #service bit flags offered by this node
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(HOST)
payload += pack('>H',PORT)#remote port
payload = encodeVarint(numberOfAddressesInAddrMessage) + payload
datatosend = '\xE9\xBE\xB4\xD9addr\x00\x00\x00\x00\x00\x00\x00\x00'
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
datatosend = datatosend + payload
try:
self.sock.sendall(datatosend)
if verbose >= 1:
shared.printLock.acquire()
print 'Sending addr with', numberOfAddressesInAddrMessage, 'entries.'
shared.printLock.release()
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#We have received a version message
def recversion(self,data):
if len(data) < 83:
#This version message is unreasonably short. Forget it.
return
elif not self.verackSent:
self.remoteProtocolVersion, = unpack('>L',data[:4])
#print 'remoteProtocolVersion', self.remoteProtocolVersion
self.myExternalIP = socket.inet_ntoa(data[40:44])
#print 'myExternalIP', self.myExternalIP
self.remoteNodeIncomingPort, = unpack('>H',data[70:72])
#print 'remoteNodeIncomingPort', self.remoteNodeIncomingPort
useragentLength, lengthOfUseragentVarint = decodeVarint(data[80:84])
readPosition = 80 + lengthOfUseragentVarint
useragent = data[readPosition:readPosition+useragentLength]
readPosition += useragentLength
numberOfStreamsInVersionMessage, lengthOfNumberOfStreamsInVersionMessage = decodeVarint(data[readPosition:])
readPosition += lengthOfNumberOfStreamsInVersionMessage
self.streamNumber, lengthOfRemoteStreamNumber = decodeVarint(data[readPosition:])
shared.printLock.acquire()
print 'Remote node useragent:', useragent, ' stream number:', self.streamNumber
shared.printLock.release()
if self.streamNumber != 1:
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
shared.printLock.acquire()
print 'Closed connection to', self.HOST, 'because they are interested in stream', self.streamNumber,'.'
shared.printLock.release()
return
shared.connectedHostsList[self.HOST] = 1 #We use this data structure to not only keep track of what hosts we are connected to so that we don't try to connect to them again, but also to list the connections count on the Network Status tab.
#If this was an incoming connection, then the sendData thread doesn't know the stream. We have to set it.
if not self.initiatedConnection:
shared.broadcastToSendDataQueues((0,'setStreamNumber',(self.HOST,self.streamNumber)))
if data[72:80] == eightBytesOfRandomDataUsedToDetectConnectionsToSelf:
shared.broadcastToSendDataQueues((0, 'shutdown', self.HOST))
shared.printLock.acquire()
print 'Closing connection to myself: ', self.HOST
shared.printLock.release()
return
shared.broadcastToSendDataQueues((0,'setRemoteProtocolVersion',(self.HOST,self.remoteProtocolVersion)))
shared.knownNodesLock.acquire()
shared.knownNodes[self.streamNumber][self.HOST] = (self.remoteNodeIncomingPort, int(time.time()))
output = open(shared.appdata + 'knownnodes.dat', 'wb')
pickle.dump(shared.knownNodes, output)
output.close()
shared.knownNodesLock.release()
self.sendverack()
if self.initiatedConnection == False:
self.sendversion()
#Sends a version message
def sendversion(self):
shared.printLock.acquire()
print 'Sending version message'
shared.printLock.release()
try:
self.sock.sendall(assembleVersionMessage(self.HOST,self.PORT,self.streamNumber))
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#Sends a verack message
def sendverack(self):
shared.printLock.acquire()
print 'Sending verack'
shared.printLock.release()
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x76\x65\x72\x61\x63\x6B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
#cf 83 e1 35
self.verackSent = True
if self.verackReceived == True:
self.connectionFullyEstablished()
def isHostInPrivateIPRange(self,host):
if host[:3] == '10.':
return True
if host[:4] == '172.':
if host[6] == '.':
if int(host[4:6]) >= 16 and int(host[4:6]) <= 31:
return True
if host[:8] == '192.168.':
return True
return False
#Every connection to a peer has a sendDataThread (and also a receiveDataThread).
class sendDataThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.mailbox = Queue.Queue()
shared.sendDataQueues.append(self.mailbox)
shared.printLock.acquire()
print 'The length of sendDataQueues at sendDataThread init is:', len(shared.sendDataQueues)
shared.printLock.release()
self.data = ''
def setup(self,sock,HOST,PORT,streamNumber,objectsOfWhichThisRemoteNodeIsAlreadyAware):
self.sock = sock
self.HOST = HOST
self.PORT = PORT
self.streamNumber = streamNumber
self.remoteProtocolVersion = -1 #This must be set using setRemoteProtocolVersion command which is sent through the self.mailbox queue.
self.lastTimeISentData = int(time.time()) #If this value increases beyond five minutes ago, we'll send a pong message to keep the connection alive.
self.objectsOfWhichThisRemoteNodeIsAlreadyAware = objectsOfWhichThisRemoteNodeIsAlreadyAware
shared.printLock.acquire()
print 'The streamNumber of this sendDataThread (ID:', str(id(self))+') at setup() is', self.streamNumber
shared.printLock.release()
def sendVersionMessage(self):
datatosend = assembleVersionMessage(self.HOST,self.PORT,self.streamNumber)#the IP and port of the remote host, and my streamNumber.
shared.printLock.acquire()
print 'Sending version packet: ', repr(datatosend)
shared.printLock.release()
try:
self.sock.sendall(datatosend)
except Exception, err:
#if not 'Bad file descriptor' in err:
shared.printLock.acquire()
sys.stderr.write('sock.sendall error: %s\n' % err)
shared.printLock.release()
self.versionSent = 1
def run(self):
while True:
deststream,command,data = self.mailbox.get()
#shared.printLock.acquire()
#print 'sendDataThread, destream:', deststream, ', Command:', command, ', ID:',id(self), ', HOST:', self.HOST
#shared.printLock.release()
if deststream == self.streamNumber or deststream == 0:
if command == 'shutdown':
if data == self.HOST or data == 'all':
shared.printLock.acquire()
print 'sendDataThread (associated with', self.HOST,') ID:',id(self), 'shutting down now.'
shared.printLock.release()
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
shared.printLock.acquire()
print 'len of sendDataQueues', len(shared.sendDataQueues)
shared.printLock.release()
break
#When you receive an incoming connection, a sendDataThread is created even though you don't yet know what stream number the remote peer is interested in. They will tell you in a version message and if you too are interested in that stream then you will continue on with the connection and will set the streamNumber of this send data thread here:
elif command == 'setStreamNumber':
hostInMessage, specifiedStreamNumber = data
if hostInMessage == self.HOST:
shared.printLock.acquire()
print 'setting the stream number in the sendData thread (ID:',id(self), ') to', specifiedStreamNumber
shared.printLock.release()
self.streamNumber = specifiedStreamNumber
elif command == 'setRemoteProtocolVersion':
hostInMessage, specifiedRemoteProtocolVersion = data
if hostInMessage == self.HOST:
shared.printLock.acquire()
print 'setting the remote node\'s protocol version in the sendData thread (ID:',id(self), ') to', specifiedRemoteProtocolVersion
shared.printLock.release()
self.remoteProtocolVersion = specifiedRemoteProtocolVersion
elif command == 'sendaddr':
if self.remoteProtocolVersion == 1:
shared.printLock.acquire()
print 'a sendData thread is not sending an addr message to this particular peer ('+self.HOST+') because their protocol version is 1.'
shared.printLock.release()
else:
try:
#To prevent some network analysis, 'leak' the data out to our peer after waiting a random amount of time unless we have a long list of messages in our queue to send.
random.seed()
time.sleep(random.randrange(0, 10))
self.sock.sendall(data)
self.lastTimeISentData = int(time.time())
except:
print 'self.sock.sendall failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread (ID:',str(id(self))+') ending now. Was connected to', self.HOST
break
elif command == 'sendinv':
if data not in self.objectsOfWhichThisRemoteNodeIsAlreadyAware:
payload = '\x01' + data
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
#To prevent some network analysis, 'leak' the data out to our peer after waiting a random amount of time
random.seed()
time.sleep(random.randrange(0, 10))
try:
self.sock.sendall(headerData + payload)
self.lastTimeISentData = int(time.time())
except:
print 'self.sock.sendall failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread (ID:',str(id(self))+') ending now. Was connected to', self.HOST
break
elif command == 'pong':
if self.lastTimeISentData < (int(time.time()) - 298):
#Send out a pong message to keep the connection alive.
shared.printLock.acquire()
print 'Sending pong to', self.HOST, 'to keep connection alive.'
shared.printLock.release()
try:
self.sock.sendall('\xE9\xBE\xB4\xD9\x70\x6F\x6E\x67\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x83\xe1\x35')
self.lastTimeISentData = int(time.time())
except:
print 'send pong failed'
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
except:
pass
shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread', self, 'ending now. Was connected to', self.HOST
break
else:
shared.printLock.acquire()
print 'sendDataThread ID:',id(self),'ignoring command', command,'because the thread is not in stream',deststream
shared.printLock.release()
def isInSqlInventory(hash):
t = (hash,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select hash from inventory where hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
return False
else:
return True
def convertIntToString(n):
a = __builtins__.hex(n)
if a[-1:] == 'L':
a = a[:-1]
if (len(a) % 2) == 0:
return a[2:].decode('hex')
else:
return ('0'+a[2:]).decode('hex')
def convertStringToInt(s):
return int(s.encode('hex'), 16)
#This function expects that pubkey begin with \x04
def calculateBitcoinAddressFromPubkey(pubkey):
if len(pubkey)!= 65:
print 'Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was', len(pubkey),'bytes long rather than 65.'
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
sha.update(pubkey)
ripe.update(sha.digest())
ripeWithProdnetPrefix = '\x00' + ripe.digest()
checksum = hashlib.sha256(hashlib.sha256(ripeWithProdnetPrefix).digest()).digest()[:4]
binaryBitcoinAddress = ripeWithProdnetPrefix + checksum
numberOfZeroBytesOnBinaryBitcoinAddress = 0
while binaryBitcoinAddress[0] == '\x00':
numberOfZeroBytesOnBinaryBitcoinAddress += 1
binaryBitcoinAddress = binaryBitcoinAddress[1:]
base58encoded = arithmetic.changebase(binaryBitcoinAddress,256,58)
return "1"*numberOfZeroBytesOnBinaryBitcoinAddress + base58encoded
def calculateTestnetAddressFromPubkey(pubkey):
if len(pubkey)!= 65:
print 'Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was', len(pubkey),'bytes long rather than 65.'
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
sha.update(pubkey)
ripe.update(sha.digest())
ripeWithProdnetPrefix = '\x6F' + ripe.digest()
checksum = hashlib.sha256(hashlib.sha256(ripeWithProdnetPrefix).digest()).digest()[:4]
binaryBitcoinAddress = ripeWithProdnetPrefix + checksum
numberOfZeroBytesOnBinaryBitcoinAddress = 0
while binaryBitcoinAddress[0] == '\x00':
numberOfZeroBytesOnBinaryBitcoinAddress += 1
binaryBitcoinAddress = binaryBitcoinAddress[1:]
base58encoded = arithmetic.changebase(binaryBitcoinAddress,256,58)
return "1"*numberOfZeroBytesOnBinaryBitcoinAddress + base58encoded
def signal_handler(signal, frame):
if shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
shared.doCleanShutdown()
sys.exit(0)
else:
print 'Unfortunately you cannot use Ctrl+C when running the UI because the UI captures the signal.'
def connectToStream(streamNumber):
selfInitiatedConnections[streamNumber] = {}
if sys.platform[0:3] == 'win':
maximumNumberOfHalfOpenConnections = 9
else:
maximumNumberOfHalfOpenConnections = 32
for i in range(maximumNumberOfHalfOpenConnections):
a = outgoingSynSender()
a.setup(streamNumber)
a.start()
#Does an EC point multiplication; turns a private key into a public key.
def pointMult(secret):
#ctx = OpenSSL.BN_CTX_new() #This value proved to cause Seg Faults on Linux. It turns out that it really didn't speed up EC_POINT_mul anyway.
k = OpenSSL.EC_KEY_new_by_curve_name(OpenSSL.get_curve('secp256k1'))
priv_key = OpenSSL.BN_bin2bn(secret, 32, 0)
group = OpenSSL.EC_KEY_get0_group(k)
pub_key = OpenSSL.EC_POINT_new(group)
OpenSSL.EC_POINT_mul(group, pub_key, priv_key, None, None, None)
OpenSSL.EC_KEY_set_private_key(k, priv_key)
OpenSSL.EC_KEY_set_public_key(k, pub_key)
#print 'priv_key',priv_key
#print 'pub_key',pub_key
size = OpenSSL.i2o_ECPublicKey(k, 0)
mb = ctypes.create_string_buffer(size)
OpenSSL.i2o_ECPublicKey(k, ctypes.byref(ctypes.pointer(mb)))
#print 'mb.raw', mb.raw.encode('hex'), 'length:', len(mb.raw)
#print 'mb.raw', mb.raw, 'length:', len(mb.raw)
OpenSSL.EC_POINT_free(pub_key)
#OpenSSL.BN_CTX_free(ctx)
OpenSSL.BN_free(priv_key)
OpenSSL.EC_KEY_free(k)
return mb.raw
def assembleVersionMessage(remoteHost,remotePort,myStreamNumber):
shared.softwareVersion
payload = ''
payload += pack('>L',2) #protocol version.
payload += pack('>q',1) #bitflags of the services I offer.
payload += pack('>q',int(time.time()))
payload += pack('>q',1) #boolservices of remote connection. How can I even know this for sure? This is probably ignored by the remote host.
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + socket.inet_aton(remoteHost)
payload += pack('>H',remotePort)#remote IPv6 and port
payload += pack('>q',1) #bitflags of the services I offer.
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack('>L',2130706433) # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used.
payload += pack('>H',shared.config.getint('bitmessagesettings', 'port'))#my external IPv6 and port
random.seed()
payload += eightBytesOfRandomDataUsedToDetectConnectionsToSelf
userAgent = '/PyBitmessage:' + shared.softwareVersion + '/' #Length of userAgent must be less than 253.
payload += pack('>B',len(userAgent)) #user agent string length. If the user agent is more than 252 bytes long, this code isn't going to work.
payload += userAgent
payload += encodeVarint(1) #The number of streams about which I care. PyBitmessage currently only supports 1 per connection.
payload += encodeVarint(myStreamNumber)
datatosend = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
datatosend = datatosend + 'version\x00\x00\x00\x00\x00' #version command
datatosend = datatosend + pack('>L',len(payload)) #payload length
datatosend = datatosend + hashlib.sha512(payload).digest()[0:4]
return datatosend + payload
#This thread exists because SQLITE3 is so un-threadsafe that we must submit queries to it and it puts results back in a different queue. They won't let us just use locks.
class sqlThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
self.conn = sqlite3.connect(shared.appdata + 'messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
try:
self.cur.execute( '''CREATE TABLE inbox (msgid blob, toaddress text, fromaddress text, subject text, received text, message text, folder text, encodingtype int, read bool, UNIQUE(msgid) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE sent (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text, ackdata blob, lastactiontime integer, status text, pubkeyretrynumber integer, msgretrynumber integer, folder text, encodingtype int)''' )
self.cur.execute( '''CREATE TABLE subscriptions (label text, address text, enabled bool)''' )
self.cur.execute( '''CREATE TABLE addressbook (label text, address text)''' )
self.cur.execute( '''CREATE TABLE blacklist (label text, address text, enabled bool)''' )
self.cur.execute( '''CREATE TABLE whitelist (label text, address text, enabled bool)''' )
#Explanation of what is in the pubkeys table:
# The hash is the RIPEMD160 hash that is encoded in the Bitmessage address.
# transmitdata is literally the data that was included in the Bitmessage pubkey message when it arrived, except for the 24 byte protocol header- ie, it starts with the POW nonce.
# time is the time that the pubkey was broadcast on the network same as with every other type of Bitmessage object.
# usedpersonally is set to "yes" if we have used the key personally. This keeps us from deleting it because we may want to reply to a message in the future. This field is not a bool because we may need more flexability in the future and it doesn't take up much more space anyway.
self.cur.execute( '''CREATE TABLE pubkeys (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE inventory (hash blob, objecttype text, streamnumber int, payload blob, receivedtime integer, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''CREATE TABLE knownnodes (timelastseen int, stream int, services blob, host blob, port blob, UNIQUE(host, stream, port) ON CONFLICT REPLACE)''' ) #This table isn't used in the program yet but I have a feeling that we'll need it.
self.cur.execute( '''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
self.cur.execute( '''CREATE TABLE settings (key blob, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO settings VALUES('version','1')''')
self.cur.execute( '''INSERT INTO settings VALUES('lastvacuumtime',?)''',(int(time.time()),))
self.conn.commit()
print 'Created messages database file'
except Exception, err:
if str(err) == 'table inbox already exists':
shared.printLock.acquire()
print 'Database file already exists.'
shared.printLock.release()
else:
sys.stderr.write('ERROR trying to create database file (message.dat). Error message: %s\n' % str(err))
os._exit(0)
#People running earlier versions of PyBitmessage do not have the usedpersonally field in their pubkeys table. Let's add it.
if shared.config.getint('bitmessagesettings','settingsversion') == 2:
item = '''ALTER TABLE pubkeys ADD usedpersonally text DEFAULT 'no' '''
parameters = ''
self.cur.execute(item, parameters)
self.conn.commit()
shared.config.set('bitmessagesettings','settingsversion','3')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#People running earlier versions of PyBitmessage do not have the encodingtype field in their inbox and sent tables or the read field in the inbox table. Let's add them.
if shared.config.getint('bitmessagesettings','settingsversion') == 3:
item = '''ALTER TABLE inbox ADD encodingtype int DEFAULT '2' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''ALTER TABLE inbox ADD read bool DEFAULT '1' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''ALTER TABLE sent ADD encodingtype int DEFAULT '2' '''
parameters = ''
self.cur.execute(item, parameters)
self.conn.commit()
shared.config.set('bitmessagesettings','settingsversion','4')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
if shared.config.getint('bitmessagesettings','settingsversion') == 4:
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte',str(shared.networkDefaultProofOfWorkNonceTrialsPerByte))
shared.config.set('bitmessagesettings','defaultpayloadlengthextrabytes',str(shared.networkDefaultPayloadLengthExtraBytes))
shared.config.set('bitmessagesettings','settingsversion','5')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#From now on, let us keep a 'version' embedded in the messages.dat file so that when we make changes to the database, the database version we are on can stay embedded in the messages.dat file. Let us check to see if the settings table exists yet.
item = '''SELECT name FROM sqlite_master WHERE type='table' AND name='settings';'''
parameters = ''
self.cur.execute(item, parameters)
if self.cur.fetchall() == []:
#The settings table doesn't exist. We need to make it.
print 'In messages.dat database, creating new \'settings\' table.'
self.cur.execute( '''CREATE TABLE settings (key text, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO settings VALUES('version','1')''')
self.cur.execute( '''INSERT INTO settings VALUES('lastvacuumtime',?)''',(int(time.time()),))
print 'In messages.dat database, removing an obsolete field from the pubkeys table.'
self.cur.execute( '''CREATE TEMPORARY TABLE pubkeys_backup(hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE);''')
self.cur.execute( '''INSERT INTO pubkeys_backup SELECT hash, transmitdata, time, usedpersonally FROM pubkeys;''')
self.cur.execute( '''DROP TABLE pubkeys''')
self.cur.execute( '''CREATE TABLE pubkeys (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO pubkeys SELECT hash, transmitdata, time, usedpersonally FROM pubkeys_backup;''')
self.cur.execute( '''DROP TABLE pubkeys_backup;''')
print 'Deleting all pubkeys from inventory. They will be redownloaded and then saved with the correct times.'
self.cur.execute( '''delete from inventory where objecttype = 'pubkey';''')
print 'replacing Bitmessage announcements mailing list with a new one.'
self.cur.execute( '''delete from subscriptions where address='BM-BbkPSZbzPwpVcYZpU4yHwf9ZPEapN5Zx' ''')
self.cur.execute( '''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
print 'Commiting.'
self.conn.commit()
print 'Vacuuming message.dat. You might notice that the file size gets much smaller.'
self.cur.execute( ''' VACUUM ''')
try:
testpayload = '\x00\x00'
t = ('1234',testpayload,'12345678','no')
self.cur.execute( '''INSERT INTO pubkeys VALUES(?,?,?,?)''',t)
self.conn.commit()
self.cur.execute('''SELECT transmitdata FROM pubkeys WHERE hash='1234' ''')
queryreturn = self.cur.fetchall()
for row in queryreturn:
transmitdata, = row
self.cur.execute('''DELETE FROM pubkeys WHERE hash='1234' ''')
self.conn.commit()
if transmitdata == '':
sys.stderr.write('Problem: The version of SQLite you have cannot store Null values. Please download and install the latest revision of your version of Python (for example, the latest Python 2.7 revision) and try again.\n')
sys.stderr.write('PyBitmessage will now exist very abruptly. You may now see threading errors related to this abrupt exit but the problem you need to solve is related to SQLite.\n\n')
os._exit(0)
except Exception, err:
print err
#Let us check to see the last time we vaccumed the messages.dat file. If it has been more than a month let's do it now.
item = '''SELECT value FROM settings WHERE key='lastvacuumtime';'''
parameters = ''
self.cur.execute(item, parameters)
queryreturn = self.cur.fetchall()
for row in queryreturn:
value, = row
if int(value) < int(time.time()) - 2592000:
print 'It has been a long time since the messages.dat file has been vacuumed. Vacuuming now...'
self.cur.execute( ''' VACUUM ''')
item = '''update settings set value=? WHERE key='lastvacuumtime';'''
parameters = (int(time.time()),)
self.cur.execute(item, parameters)
while True:
item = shared.sqlSubmitQueue.get()
if item == 'commit':
self.conn.commit()
elif item == 'exit':
self.conn.close()
print 'sqlThread exiting gracefully.'
return
elif item == 'movemessagstoprog':
shared.printLock.acquire()
print 'the sqlThread is moving the messages.dat file to the local program directory.'
shared.printLock.release()
self.conn.commit()
self.conn.close()
shutil.move(shared.lookupAppdataFolder()+'messages.dat','messages.dat')
self.conn = sqlite3.connect('messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
elif item == 'movemessagstoappdata':
shared.printLock.acquire()
print 'the sqlThread is moving the messages.dat file to the Appdata folder.'
shared.printLock.release()
self.conn.commit()
self.conn.close()
shutil.move('messages.dat',shared.lookupAppdataFolder()+'messages.dat')
self.conn = sqlite3.connect(shared.appdata + 'messages.dat' )
self.conn.text_factory = str
self.cur = self.conn.cursor()
else:
parameters = shared.sqlSubmitQueue.get()
#print 'item', item
#print 'parameters', parameters
try:
self.cur.execute(item, parameters)
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('\nMajor error occurred when trying to execute a SQL statement within the sqlThread. Please tell Atheros about this error message or post it in the forum! Error occurred while trying to execute statement: "'+str(item) + '" Here are the parameters; you might want to censor this data with asterisks (***) as it can contain private information: '+str(repr(parameters))+'\nHere is the actual error message thrown by the sqlThread: '+ str(err)+'\n')
sys.stderr.write('This program shall now abruptly exit!\n')
shared.printLock.release()
os._exit(0)
shared.sqlReturnQueue.put(self.cur.fetchall())
#shared.sqlSubmitQueue.task_done()
'''The singleCleaner class is a timer-driven thread that cleans data structures to free memory, resends messages when a remote node doesn't respond, and sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory:
inventory (moves data to the on-disk sql database)
It cleans these tables on the disk:
inventory (clears data more than 2 days and 12 hours old)
pubkeys (clears pubkeys older than 4 weeks old which we have not used personally)
It resends messages when there has been no response:
resends getpubkey messages in 4 days (then 8 days, then 16 days, etc...)
resends msg messages in 4 days (then 8 days, then 16 days, etc...)
'''
class singleCleaner(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
timeWeLastClearedInventoryAndPubkeysTables = 0
while True:
shared.sqlLock.acquire()
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing housekeeping (Flushing inventory in memory to disk...)")
shared.UISignalQueue.put(('updateStatusBar','Doing housekeeping (Flushing inventory in memory to disk...)'))
for hash, storedValue in shared.inventory.items():
objectType, streamNumber, payload, receivedTime = storedValue
if int(time.time())- 3600 > receivedTime:
t = (hash,objectType,streamNumber,payload,receivedTime)
shared.sqlSubmitQueue.put('''INSERT INTO inventory VALUES (?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
del shared.inventory[hash]
shared.sqlSubmitQueue.put('commit')
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.sqlLock.release()
shared.broadcastToSendDataQueues((0, 'pong', 'no data')) #commands the sendData threads to send out a pong message if they haven't sent anything else in the last five minutes. The socket timeout-time is 10 minutes.
#If we are running as a daemon then we are going to fill up the UI queue which will never be handled by a UI. We should clear it to save memory.
if shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
shared.UISignalQueue.queue.clear()
if timeWeLastClearedInventoryAndPubkeysTables < int(time.time()) - 7380:
timeWeLastClearedInventoryAndPubkeysTables = int(time.time())
#inventory (moves data from the inventory data structure to the on-disk sql database)
shared.sqlLock.acquire()
#inventory (clears data more than 2 days and 12 hours old)
t = (int(time.time())-lengthOfTimeToLeaveObjectsInInventory,int(time.time())-lengthOfTimeToHoldOnToAllPubkeys)
shared.sqlSubmitQueue.put('''DELETE FROM inventory WHERE (receivedtime<? AND objecttype<>'pubkey') OR (receivedtime<? AND objecttype='pubkey') ''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
#pubkeys
t = (int(time.time())-lengthOfTimeToHoldOnToAllPubkeys,)
shared.sqlSubmitQueue.put('''DELETE FROM pubkeys WHERE time<? AND usedpersonally='no' ''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
t = ()
shared.sqlSubmitQueue.put('''select toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, status, pubkeyretrynumber, msgretrynumber FROM sent WHERE ((status='findingpubkey' OR status='sentmessage') AND folder='sent') ''') #If the message's folder='trash' then we'll ignore it.
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
for row in queryreturn:
toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, status, pubkeyretrynumber, msgretrynumber = row
if status == 'findingpubkey':
if int(time.time()) - lastactiontime > (maximumAgeOfAnObjectThatIAmWillingToAccept * (2 ** (pubkeyretrynumber))):
print 'It has been a long time and we haven\'t heard a response to our getpubkey request. Sending again.'
try:
del neededPubkeys[toripe] #We need to take this entry out of the neededPubkeys structure because the shared.workerQueue checks to see whether the entry is already present and will not do the POW and send the message because it assumes that it has already done it recently.
except:
pass
shared.workerQueue.put(('sendmessage',toaddress))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing work necessary to again attempt to request a public key...")
shared.UISignalQueue.put(('updateStatusBar','Doing work necessary to again attempt to request a public key...'))
t = (int(time.time()),pubkeyretrynumber+1,toripe)
shared.sqlSubmitQueue.put('''UPDATE sent SET lastactiontime=?, pubkeyretrynumber=? WHERE toripe=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
else:# status == sentmessage
if int(time.time()) - lastactiontime > (maximumAgeOfAnObjectThatIAmWillingToAccept * (2 ** (msgretrynumber))):
print 'It has been a long time and we haven\'t heard an acknowledgement to our msg. Sending again.'
t = (int(time.time()),msgretrynumber+1,'findingpubkey',ackdata)
shared.sqlSubmitQueue.put('''UPDATE sent SET lastactiontime=?, msgretrynumber=?, status=? WHERE ackdata=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.workerQueue.put(('sendmessage',toaddress))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"Doing work necessary to again attempt to deliver a message...")
shared.UISignalQueue.put(('updateStatusBar','Doing work necessary to again attempt to deliver a message...'))
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
time.sleep(300)
#This thread, of which there is only one, does the heavy lifting: calculating POWs.
class singleWorker(threading.Thread):
def __init__(self):
#QThread.__init__(self, parent)
threading.Thread.__init__(self)
def run(self):
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT toripe FROM sent WHERE (status=? AND folder='sent')''')
shared.sqlSubmitQueue.put(('findingpubkey',))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toripe, = row
#It is possible for the status of a message in our sent folder (which is also our 'outbox' folder) to have a status of 'findingpubkey' even if we have the pubkey. This can
#happen if the worker thread is working on the POW for an earlier message and does not get to the message in question before the user closes Bitmessage. In this case, the
#status will still be 'findingpubkey' but Bitmessage will never have checked to see whether it actually already has the pubkey. We should therefore check here.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT hash FROM pubkeys WHERE hash=? ''')
shared.sqlSubmitQueue.put((toripe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []: #If we have the pubkey then send the message otherwise put the hash in the neededPubkeys data structure so that we will pay attention to it if it comes over the wire.
self.sendMsg(toripe)
else:
neededPubkeys[toripe] = 0
self.sendBroadcast() #just in case there are any proof of work tasks for Broadcasts that have yet to be sent.
#Now let us see if there are any proofs of work for msg messages that we have yet to complete..
shared.sqlLock.acquire()
t = ('doingpow',)
shared.sqlSubmitQueue.put('''SELECT toripe FROM sent WHERE status=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toripe, = row
#Evidentially there is a remote possibility that we may, for some reason, no longer have the recipient's pubkey. Let us make sure we still have it or else the sendMsg function will appear to freeze.
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT hash FROM pubkeys WHERE hash=? ''')
shared.sqlSubmitQueue.put((toripe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn != []:
#We have the needed pubkey
self.sendMsg(toripe)
else:
shared.printLock.acquire()
sys.stderr.write('For some reason, the status of a message in our outbox is \'doingpow\' even though we lack the pubkey. Here is the RIPE hash of the needed pubkey: %s\n' % toripe.encode('hex'))
shared.printLock.release()
while True:
command, data = shared.workerQueue.get()
#statusbar = 'The singleWorker thread is working on work.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
if command == 'sendmessage':
toAddress = data
toStatus,toAddressVersionNumber,toStreamNumber,toRipe = decodeAddress(toAddress)
#print 'message type', type(message)
#print repr(message.toUtf8())
#print str(message.toUtf8())
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('SELECT hash FROM pubkeys WHERE hash=?')
shared.sqlSubmitQueue.put((toRipe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
#print 'queryreturn', queryreturn
if queryreturn == []:
#We'll need to request the pub key because we don't have it.
if not toRipe in neededPubkeys:
neededPubkeys[toRipe] = 0
print 'requesting pubkey:', toRipe.encode('hex')
self.requestPubKey(toAddressVersionNumber,toStreamNumber,toRipe)
else:
print 'We have already requested this pubkey (the ripe hash is in neededPubkeys). We will re-request again soon.'
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),toRipe,'Public key was requested earlier. Receiver must be offline. Will retry.')
shared.UISignalQueue.put(('updateSentItemStatusByHash',(toRipe,'Public key was requested earlier. Receiver must be offline. Will retry.')))
else:
print 'We already have the necessary public key.'
self.sendMsg(toRipe) #by calling this function, we are asserting that we already have the pubkey for toRipe
elif command == 'sendbroadcast':
print 'Within WorkerThread, processing sendbroadcast command.'
fromAddress,subject,message = data
self.sendBroadcast()
elif command == 'doPOWForMyV2Pubkey':
self.doPOWForMyV2Pubkey(data)
elif command == 'doPOWForMyV3Pubkey':
self.doPOWForMyV3Pubkey(data)
elif command == 'newpubkey':
toAddressVersion,toStreamNumber,toRipe = data
if toRipe in neededPubkeys:
print 'We have been awaiting the arrival of this pubkey.'
del neededPubkeys[toRipe]
self.sendMsg(toRipe)
else:
shared.printLock.acquire()
print 'We don\'t need this pub key. We didn\'t ask for it. Pubkey hash:', toRipe.encode('hex')
shared.printLock.release()
else:
shared.printLock.acquire()
sys.stderr.write('Probable programming error: The command sent to the workerThread is weird. It is: %s\n' % command)
shared.printLock.release()
shared.workerQueue.task_done()
def doPOWForMyV2Pubkey(self,hash): #This function also broadcasts out the pubkey message once it is done with the POW
#Look up my stream number based on my address hash
"""configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile <> 'bitmessagesettings':
status,addressVersionNumber,streamNumber,hashFromThisParticularAddress = decodeAddress(addressInKeysFile)
if hash == hashFromThisParticularAddress:
myAddress = addressInKeysFile
break"""
myAddress = shared.myAddressesByHash[hash]
status,addressVersionNumber,streamNumber,hash = decodeAddress(myAddress)
embeddedTime = int(time.time()+random.randrange(-300, 300)) #the current time plus or minus five minutes
payload = pack('>I',(embeddedTime))
payload += encodeVarint(addressVersionNumber) #Address version number
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #bitfield of features supported by me (see the wiki).
try:
privSigningKeyBase58 = shared.config.get(myAddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(myAddress, 'privencryptionkey')
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('Error within doPOWForMyV2Pubkey. Could not read the keys from the keys.dat file for a requested address. %s\n' % err)
shared.printLock.release()
return
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
#Do the POW for this pubkey message
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For pubkey message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
"""t = (hash,payload,embeddedTime,'no')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()"""
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, embeddedTime)
shared.printLock.acquire()
print 'broadcasting inv with hash:', inventoryHash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.config.set(myAddress,'lastpubkeysendtime',str(int(time.time())))
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
def doPOWForMyV3Pubkey(self,hash): #This function also broadcasts out the pubkey message once it is done with the POW
myAddress = shared.myAddressesByHash[hash]
status,addressVersionNumber,streamNumber,hash = decodeAddress(myAddress)
embeddedTime = int(time.time()+random.randrange(-300, 300)) #the current time plus or minus five minutes
payload = pack('>I',(embeddedTime))
payload += encodeVarint(addressVersionNumber) #Address version number
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #bitfield of features supported by me (see the wiki).
try:
privSigningKeyBase58 = shared.config.get(myAddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(myAddress, 'privencryptionkey')
except Exception, err:
shared.printLock.acquire()
sys.stderr.write('Error within doPOWForMyV3Pubkey. Could not read the keys from the keys.dat file for a requested address. %s\n' % err)
shared.printLock.release()
return
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
payload += encodeVarint(shared.config.getint(myAddress,'noncetrialsperbyte'))
payload += encodeVarint(shared.config.getint(myAddress,'payloadlengthextrabytes'))
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
#Do the POW for this pubkey message
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For pubkey message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
"""t = (hash,payload,embeddedTime,'no')
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''INSERT INTO pubkeys VALUES (?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()"""
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, embeddedTime)
shared.printLock.acquire()
print 'broadcasting inv with hash:', inventoryHash.encode('hex')
shared.printLock.release()
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),"")
shared.UISignalQueue.put(('updateStatusBar',''))
shared.config.set(myAddress,'lastpubkeysendtime',str(int(time.time())))
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
def sendBroadcast(self):
shared.sqlLock.acquire()
t = ('broadcastpending',)
shared.sqlSubmitQueue.put('''SELECT fromaddress, subject, message, ackdata FROM sent WHERE status=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
fromaddress, subject, body, ackdata = row
status,addressVersionNumber,streamNumber,ripe = decodeAddress(fromaddress)
if addressVersionNumber == 2 and int(time.time()) < encryptedBroadcastSwitchoverTime:
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex') #At this time these pubkeys are 65 bytes long because they include the encoding byte which we won't be sending in the broadcast message.
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes
payload += encodeVarint(1) #broadcast version
payload += encodeVarint(addressVersionNumber)
payload += encodeVarint(streamNumber)
payload += '\x00\x00\x00\x01' #behavior bitfield
payload += pubSigningKey[1:]
payload += pubEncryptionKey[1:]
payload += ripe
payload += '\x02' #message encoding type
payload += encodeVarint(len('Subject:' + subject + '\n' + 'Body:' + body)) #Type 2 is simple UTF-8 message encoding.
payload += 'Subject:' + subject + '\n' + 'Body:' + body
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For broadcast message) Doing proof of work...'
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send broadcast...')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send broadcast...')))
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For broadcast message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'broadcast'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (within sendBroadcast function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
#Update the status of the message in the 'sent' table to have a 'broadcastsent' status
shared.sqlLock.acquire()
t = ('broadcastsent',int(time.time()),fromaddress, subject, body,'broadcastpending')
shared.sqlSubmitQueue.put('UPDATE sent SET status=?, lastactiontime=? WHERE fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
elif addressVersionNumber == 3 or int(time.time()) > encryptedBroadcastSwitchoverTime:
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex') #At this time these pubkeys are 65 bytes long because they include the encoding byte which we won't be sending in the broadcast message.
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes
payload += encodeVarint(2) #broadcast version
payload += encodeVarint(streamNumber)
dataToEncrypt = encodeVarint(2) #broadcast version
dataToEncrypt += encodeVarint(addressVersionNumber)
dataToEncrypt += encodeVarint(streamNumber)
dataToEncrypt += '\x00\x00\x00\x01' #behavior bitfield
dataToEncrypt += pubSigningKey[1:]
dataToEncrypt += pubEncryptionKey[1:]
if addressVersionNumber >= 3:
dataToEncrypt += encodeVarint(shared.config.getint(fromaddress,'noncetrialsperbyte'))
dataToEncrypt += encodeVarint(shared.config.getint(fromaddress,'payloadlengthextrabytes'))
dataToEncrypt += '\x02' #message encoding type
dataToEncrypt += encodeVarint(len('Subject:' + subject + '\n' + 'Body:' + body)) #Type 2 is simple UTF-8 message encoding.
dataToEncrypt += 'Subject:' + subject + '\n' + 'Body:' + body
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
dataToEncrypt += encodeVarint(len(signature))
dataToEncrypt += signature
privEncryptionKey = hashlib.sha512(encodeVarint(addressVersionNumber)+encodeVarint(streamNumber)+ripe).digest()[:32]
pubEncryptionKey = pointMult(privEncryptionKey)
payload += highlevelcrypto.encrypt(dataToEncrypt,pubEncryptionKey.encode('hex'))
nonce = 0
trialValue = 99999999999999999999
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For broadcast message) Doing proof of work...'
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send broadcast...')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send broadcast...')))
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For broadcast message) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'broadcast'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (within sendBroadcast function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Broadcast sent on '+unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
#Update the status of the message in the 'sent' table to have a 'broadcastsent' status
shared.sqlLock.acquire()
t = ('broadcastsent',int(time.time()),fromaddress, subject, body,'broadcastpending')
shared.sqlSubmitQueue.put('UPDATE sent SET status=?, lastactiontime=? WHERE fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
else:
shared.printLock.acquire()
sys.stderr.write('Error: In the singleWorker thread, the sendBroadcast function doesn\'t understand the address version.\n')
shared.printLock.release()
def sendMsg(self,toRipe):
shared.sqlLock.acquire()
t = ('doingpow','findingpubkey',toRipe)
shared.sqlSubmitQueue.put('''UPDATE sent SET status=? WHERE status=? AND toripe=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
t = ('doingpow',toRipe)
shared.sqlSubmitQueue.put('''SELECT toaddress, fromaddress, subject, message, ackdata FROM sent WHERE status=? AND toripe=? and folder='sent' ''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
for row in queryreturn:
toaddress, fromaddress, subject, message, ackdata = row
ackdataForWhichImWatching[ackdata] = 0
toStatus,toAddressVersionNumber,toStreamNumber,toHash = decodeAddress(toaddress)
fromStatus,fromAddressVersionNumber,fromStreamNumber,fromHash = decodeAddress(fromaddress)
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Doing work necessary to send the message.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Doing work necessary to send the message.')))
shared.printLock.acquire()
print 'Found a message in our database that needs to be sent with this pubkey.'
print 'First 150 characters of message:', message[:150]
shared.printLock.release()
embeddedTime = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes. We will use this time both for our message and for the ackdata packed within our message.
if fromAddressVersionNumber == 2:
payload = '\x01' #Message version.
payload += encodeVarint(fromAddressVersionNumber)
payload += encodeVarint(fromStreamNumber)
payload += '\x00\x00\x00\x01' #Bitfield of features and behaviors that can be expected from me. (See https://bitmessage.org/wiki/Protocol_specification#Pubkey_bitfield_features )
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:] #The \x04 on the beginning of the public keys are not sent. This way there is only one acceptable way to encode and send a public key.
payload += pubEncryptionKey[1:]
payload += toHash #This hash will be checked by the receiver of the message to verify that toHash belongs to them. This prevents a Surreptitious Forwarding Attack.
payload += '\x02' #Type 2 is simple UTF-8 message encoding as specified on the Protocol Specification on the Bitmessage Wiki.
messageToTransmit = 'Subject:' + subject + '\n' + 'Body:' + message
payload += encodeVarint(len(messageToTransmit))
payload += messageToTransmit
fullAckPayload = self.generateFullAckMessage(ackdata,toStreamNumber,embeddedTime)#The fullAckPayload is a normal msg protocol message with the proof of work already completed that the receiver of this message can easily send out.
payload += encodeVarint(len(fullAckPayload))
payload += fullAckPayload
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
if fromAddressVersionNumber == 3:
payload = '\x01' #Message version.
payload += encodeVarint(fromAddressVersionNumber)
payload += encodeVarint(fromStreamNumber)
payload += '\x00\x00\x00\x01' #Bitfield of features and behaviors that can be expected from me. (See https://bitmessage.org/wiki/Protocol_specification#Pubkey_bitfield_features )
#We need to convert our private keys to public keys in order to include them.
try:
privSigningKeyBase58 = shared.config.get(fromaddress, 'privsigningkey')
privEncryptionKeyBase58 = shared.config.get(fromaddress, 'privencryptionkey')
except:
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Error! Could not find sender address (your address) in the keys.dat file.')))
continue
privSigningKeyHex = shared.decodeWalletImportFormat(privSigningKeyBase58).encode('hex')
privEncryptionKeyHex = shared.decodeWalletImportFormat(privEncryptionKeyBase58).encode('hex')
pubSigningKey = highlevelcrypto.privToPub(privSigningKeyHex).decode('hex')
pubEncryptionKey = highlevelcrypto.privToPub(privEncryptionKeyHex).decode('hex')
payload += pubSigningKey[1:] #The \x04 on the beginning of the public keys are not sent. This way there is only one acceptable way to encode and send a public key.
payload += pubEncryptionKey[1:]
#If the receiver of our message is in our address book, subscriptions list, or whitelist then we will allow them to do the network-minimum proof of work. Let us check to see if the receiver is in any of those lists.
if shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(toaddress):
payload += encodeVarint(shared.networkDefaultProofOfWorkNonceTrialsPerByte)
payload += encodeVarint(shared.networkDefaultPayloadLengthExtraBytes)
else:
payload += encodeVarint(shared.config.getint(fromaddress,'noncetrialsperbyte'))
payload += encodeVarint(shared.config.getint(fromaddress,'payloadlengthextrabytes'))
payload += toHash #This hash will be checked by the receiver of the message to verify that toHash belongs to them. This prevents a Surreptitious Forwarding Attack.
payload += '\x02' #Type 2 is simple UTF-8 message encoding as specified on the Protocol Specification on the Bitmessage Wiki.
messageToTransmit = 'Subject:' + subject + '\n' + 'Body:' + message
payload += encodeVarint(len(messageToTransmit))
payload += messageToTransmit
fullAckPayload = self.generateFullAckMessage(ackdata,toStreamNumber,embeddedTime)#The fullAckPayload is a normal msg protocol message with the proof of work already completed that the receiver of this message can easily send out.
payload += encodeVarint(len(fullAckPayload))
payload += fullAckPayload
signature = highlevelcrypto.sign(payload,privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
#We have assembled the data that will be encrypted. Now let us fetch the recipient's public key out of our database and do the encryption.
if toAddressVersionNumber == 2 or toAddressVersionNumber == 3:
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('SELECT transmitdata FROM pubkeys WHERE hash=?')
shared.sqlSubmitQueue.put((toRipe,))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
shared.printLock.acquire()
sys.stderr.write('(within sendMsg) The needed pubkey was not found. This should never happen. Aborting send.\n')
shared.printLock.release()
return
for row in queryreturn:
pubkeyPayload, = row
#The pubkey is stored the way we originally received it which means that we need to read beyond things like the nonce and time to get to the public keys.
readPosition = 8 #to bypass the nonce
readPosition += 4 #to bypass the embedded time
readPosition += 1 #to bypass the address version whose length is definitely 1
streamNumber, streamNumberLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += streamNumberLength
behaviorBitfield = pubkeyPayload[readPosition:readPosition+4]
readPosition += 4 #to bypass the bitfield of behaviors
#pubSigningKeyBase256 = pubkeyPayload[readPosition:readPosition+64] #We don't use this key for anything here.
readPosition += 64
pubEncryptionKeyBase256 = pubkeyPayload[readPosition:readPosition+64]
readPosition += 64
if toAddressVersionNumber == 2:
requiredAverageProofOfWorkNonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
requiredPayloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
elif toAddressVersionNumber == 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += varintLength
requiredPayloadLengthExtraBytes, varintLength = decodeVarint(pubkeyPayload[readPosition:readPosition+10])
readPosition += varintLength
if requiredAverageProofOfWorkNonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte: #We still have to meet a minimum POW difficulty regardless of what they say is allowed in order to get our message to propagate through the network.
requiredAverageProofOfWorkNonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if requiredPayloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
requiredPayloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
encrypted = highlevelcrypto.encrypt(payload,"04"+pubEncryptionKeyBase256.encode('hex'))
nonce = 0
trialValue = 99999999999999999999
#We are now dropping the unencrypted data in payload since it has already been encrypted and replacing it with the encrypted payload that we will send out.
payload = embeddedTime + encodeVarint(toStreamNumber) + encrypted
target = 2**64 / ((len(payload)+requiredPayloadLengthExtraBytes+8) * requiredAverageProofOfWorkNonceTrialsPerByte)
shared.printLock.acquire()
print '(For msg message) Doing proof of work. Total required difficulty:', float(requiredAverageProofOfWorkNonceTrialsPerByte)/shared.networkDefaultProofOfWorkNonceTrialsPerByte,'Required small message difficulty:', float(requiredPayloadLengthExtraBytes)/shared.networkDefaultPayloadLengthExtraBytes
shared.printLock.release()
powStartTime = time.time()
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print '(For msg message) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time()-powStartTime), 'seconds.', nonce/(time.time()-powStartTime), 'nonce trials per second.'
except:
pass
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'msg'
shared.inventory[inventoryHash] = (objectType, toStreamNumber, payload, int(time.time()))
#self.emit(SIGNAL("updateSentItemStatusByAckdata(PyQt_PyObject,PyQt_PyObject)"),ackdata,'Message sent. Waiting on acknowledgement. Sent on ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByAckdata',(ackdata,'Message sent. Waiting on acknowledgement. Sent on ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
print 'sending inv (within sendmsg function)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#Update the status of the message in the 'sent' table to have a 'sent' status
shared.sqlLock.acquire()
t = ('sentmessage',toaddress, fromaddress, subject, message,'doingpow')
shared.sqlSubmitQueue.put('UPDATE sent SET status=? WHERE toaddress=? AND fromaddress=? AND subject=? AND message=? AND status=?')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
t = (toRipe,)
shared.sqlSubmitQueue.put('''UPDATE pubkeys SET usedpersonally='yes' WHERE hash=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
def requestPubKey(self,addressVersionNumber,streamNumber,ripe):
payload = pack('>I',(int(time.time())+random.randrange(-300, 300)))#the current time plus or minus five minutes.
payload += encodeVarint(addressVersionNumber)
payload += encodeVarint(streamNumber)
payload += ripe
shared.printLock.acquire()
print 'making request for pubkey with ripe:', ripe.encode('hex')
shared.printLock.release()
nonce = 0
trialValue = 99999999999999999999
#print 'trial value', trialValue
statusbar = 'Doing the computations necessary to request the recipient\'s public key.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
shared.UISignalQueue.put(('updateStatusBar',statusbar))
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),ripe,'Doing work necessary to request public key.')
shared.UISignalQueue.put(('updateSentItemStatusByHash',(ripe,'Doing work necessary to request public key.')))
print 'Doing proof-of-work necessary to send getpubkey message.'
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
shared.printLock.acquire()
print 'Found proof of work', trialValue, 'Nonce:', nonce
shared.printLock.release()
payload = pack('>Q',nonce) + payload
inventoryHash = calculateInventoryHash(payload)
objectType = 'getpubkey'
shared.inventory[inventoryHash] = (objectType, streamNumber, payload, int(time.time()))
print 'sending inv (for the getpubkey message)'
shared.broadcastToSendDataQueues((streamNumber, 'sendinv', inventoryHash))
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Broacasting the public key request. This program will auto-retry if they are offline.')
shared.UISignalQueue.put(('updateStatusBar','Broacasting the public key request. This program will auto-retry if they are offline.'))
#self.emit(SIGNAL("updateSentItemStatusByHash(PyQt_PyObject,PyQt_PyObject)"),ripe,'Sending public key request. Waiting for reply. Requested at ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))
shared.UISignalQueue.put(('updateSentItemStatusByHash',(ripe,'Sending public key request. Waiting for reply. Requested at ' + unicode(strftime(shared.config.get('bitmessagesettings', 'timeformat'),localtime(int(time.time()))),'utf-8'))))
def generateFullAckMessage(self,ackdata,toStreamNumber,embeddedTime):
nonce = 0
trialValue = 99999999999999999999
payload = embeddedTime + encodeVarint(toStreamNumber) + ackdata
target = 2**64 / ((len(payload)+shared.networkDefaultPayloadLengthExtraBytes+8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
shared.printLock.acquire()
print '(For ack message) Doing proof of work...'
shared.printLock.release()
powStartTime = time.time()
initialHash = hashlib.sha512(payload).digest()
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
shared.printLock.acquire()
print '(For ack message) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time()-powStartTime), 'seconds.', nonce/(time.time()-powStartTime), 'nonce trials per second.'
except:
pass
shared.printLock.release()
payload = pack('>Q',nonce) + payload
headerData = '\xe9\xbe\xb4\xd9' #magic bits, slighly different from Bitcoin's magic bits.
headerData += 'msg\x00\x00\x00\x00\x00\x00\x00\x00\x00'
headerData += pack('>L',len(payload))
headerData += hashlib.sha512(payload).digest()[:4]
return headerData + payload
class addressGenerator(threading.Thread):
def __init__(self):
#QThread.__init__(self, parent)
threading.Thread.__init__(self)
def run(self):
while True:
queueValue = shared.addressGeneratorQueue.get()
nonceTrialsPerByte = 0
payloadLengthExtraBytes = 0
if len(queueValue) == 6:
addressVersionNumber,streamNumber,label,numberOfAddressesToMake,deterministicPassphrase,eighteenByteRipe = queueValue
elif len(queueValue) == 8:
addressVersionNumber,streamNumber,label,numberOfAddressesToMake,deterministicPassphrase,eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes = queueValue
else:
sys.stderr.write('Programming error: A structure with the wrong number of values was passed into the addressGeneratorQueue. Here is the queueValue: %s\n' % queueValue)
if addressVersionNumber < 3 or addressVersionNumber > 3:
sys.stderr.write('Program error: For some reason the address generator queue has been given a request to create at least one version %s address which it cannot do.\n' % addressVersionNumber)
if nonceTrialsPerByte == 0:
nonceTrialsPerByte = shared.config.getint('bitmessagesettings','defaultnoncetrialsperbyte')
if nonceTrialsPerByte < shared.networkDefaultProofOfWorkNonceTrialsPerByte:
nonceTrialsPerByte = shared.networkDefaultProofOfWorkNonceTrialsPerByte
if payloadLengthExtraBytes == 0:
payloadLengthExtraBytes = shared.config.getint('bitmessagesettings','defaultpayloadlengthextrabytes')
if payloadLengthExtraBytes < shared.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = shared.networkDefaultPayloadLengthExtraBytes
if addressVersionNumber == 3: #currently the only one supported.
if deterministicPassphrase == "":
shared.UISignalQueue.put(('updateStatusBar','Generating one new address'))
#This next section is a little bit strange. We're going to generate keys over and over until we
#find one that starts with either \x00 or \x00\x00. Then when we pack them into a Bitmessage address,
#we won't store the \x00 or \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
potentialPrivSigningKey = OpenSSL.rand(32)
potentialPubSigningKey = pointMult(potentialPrivSigningKey)
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivEncryptionKey = OpenSSL.rand(32)
potentialPubEncryptionKey = pointMult(potentialPrivEncryptionKey)
#print 'potentialPubSigningKey', potentialPubSigningKey.encode('hex')
#print 'potentialPubEncryptionKey', potentialPubEncryptionKey.encode('hex')
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(potentialPubSigningKey+potentialPubEncryptionKey)
ripe.update(sha.digest())
#print 'potential ripe.digest', ripe.digest().encode('hex')
if eighteenByteRipe:
if ripe.digest()[:2] == '\x00\x00':
break
else:
if ripe.digest()[:1] == '\x00':
break
print 'Generated address with ripe digest:', ripe.digest().encode('hex')
print 'Address generator calculated', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, 'addresses at', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix/(time.time()-startTime),'addresses per second before finding one with the correct ripe-prefix.'
address = encodeAddress(3,streamNumber,ripe.digest())
#An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
#https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80'+potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(privSigningKey + checksum,256,58)
#print 'privSigningKeyWIF',privSigningKeyWIF
privEncryptionKey = '\x80'+potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(privEncryptionKey + checksum,256,58)
#print 'privEncryptionKeyWIF',privEncryptionKeyWIF
shared.config.add_section(address)
shared.config.set(address,'label',label)
shared.config.set(address,'enabled','true')
shared.config.set(address,'decoy','false')
shared.config.set(address,'noncetrialsperbyte',str(nonceTrialsPerByte))
shared.config.set(address,'payloadlengthextrabytes',str(payloadLengthExtraBytes))
shared.config.set(address,'privSigningKey',privSigningKeyWIF)
shared.config.set(address,'privEncryptionKey',privEncryptionKeyWIF)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#It may be the case that this address is being generated as a result of a call to the API. Let us put the result in the necessary queue.
apiAddressGeneratorReturnQueue.put(address)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Done generating address. Doing work necessary to broadcast it...')
shared.UISignalQueue.put(('updateStatusBar','Done generating address. Doing work necessary to broadcast it...'))
#self.emit(SIGNAL("writeNewAddressToTable(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.label,address,str(streamNumber))
shared.UISignalQueue.put(('writeNewAddressToTable',(label,address,streamNumber)))
shared.reloadMyAddressHashes()
shared.workerQueue.put(('doPOWForMyV3Pubkey',ripe.digest()))
else: #There is something in the deterministicPassphrase variable thus we are going to do this deterministically.
statusbar = 'Generating '+str(numberOfAddressesToMake) + ' new addresses.'
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),statusbar)
shared.UISignalQueue.put(('updateStatusBar',statusbar))
signingKeyNonce = 0
encryptionKeyNonce = 1
listOfNewAddressesToSendOutThroughTheAPI = [] #We fill out this list no matter what although we only need it if we end up passing the info to the API.
for i in range(numberOfAddressesToMake):
#This next section is a little bit strange. We're going to generate keys over and over until we
#find one that has a RIPEMD hash that starts with either \x00 or \x00\x00. Then when we pack them
#into a Bitmessage address, we won't store the \x00 or \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivSigningKey = hashlib.sha512(deterministicPassphrase + encodeVarint(signingKeyNonce)).digest()[:32]
potentialPrivEncryptionKey = hashlib.sha512(deterministicPassphrase + encodeVarint(encryptionKeyNonce)).digest()[:32]
potentialPubSigningKey = pointMult(potentialPrivSigningKey)
potentialPubEncryptionKey = pointMult(potentialPrivEncryptionKey)
#print 'potentialPubSigningKey', potentialPubSigningKey.encode('hex')
#print 'potentialPubEncryptionKey', potentialPubEncryptionKey.encode('hex')
signingKeyNonce += 2
encryptionKeyNonce += 2
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(potentialPubSigningKey+potentialPubEncryptionKey)
ripe.update(sha.digest())
#print 'potential ripe.digest', ripe.digest().encode('hex')
if eighteenByteRipe:
if ripe.digest()[:2] == '\x00\x00':
break
else:
if ripe.digest()[:1] == '\x00':
break
print 'ripe.digest', ripe.digest().encode('hex')
print 'Address generator calculated', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, 'addresses at', numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix/(time.time()-startTime),'keys per second.'
address = encodeAddress(3,streamNumber,ripe.digest())
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Finished generating address. Writing to keys.dat')
#An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
#https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80'+potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(privSigningKey + checksum,256,58)
privEncryptionKey = '\x80'+potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(privEncryptionKey + checksum,256,58)
try:
shared.config.add_section(address)
print 'label:', label
shared.config.set(address,'label',label)
shared.config.set(address,'enabled','true')
shared.config.set(address,'decoy','false')
shared.config.set(address,'noncetrialsperbyte',str(nonceTrialsPerByte))
shared.config.set(address,'payloadlengthextrabytes',str(payloadLengthExtraBytes))
shared.config.set(address,'privSigningKey',privSigningKeyWIF)
shared.config.set(address,'privEncryptionKey',privEncryptionKeyWIF)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
#self.emit(SIGNAL("writeNewAddressToTable(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),self.label,address,str(self.streamNumber))
shared.UISignalQueue.put(('writeNewAddressToTable',(label,address,str(streamNumber))))
listOfNewAddressesToSendOutThroughTheAPI.append(address)
if eighteenByteRipe:
shared.reloadMyAddressHashes()#This is necessary here (rather than just at the end) because otherwise if the human generates a large number of new addresses and uses one before they are done generating, the program will receive a getpubkey message and will ignore it.
except:
print address,'already exists. Not adding it again.'
#It may be the case that this address is being generated as a result of a call to the API. Let us put the result in the necessary queue.
apiAddressGeneratorReturnQueue.put(listOfNewAddressesToSendOutThroughTheAPI)
#self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),'Done generating address')
shared.UISignalQueue.put(('updateStatusBar','Done generating address'))
shared.reloadMyAddressHashes()
#This is one of several classes that constitute the API
#This class was written by Vaibhav Bhatia. Modified by Jonathan Warren (Atheros).
#http://code.activestate.com/recipes/501148-xmlrpc-serverclient-which-does-cookie-handling-and/
class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
def do_POST(self):
#Handles the HTTP POST request.
#Attempts to interpret all HTTP POST requests as XML-RPC calls,
#which are forwarded to the server's _dispatch method for handling.
#Note: this method is the same as in SimpleXMLRPCRequestHandler,
#just hacked to handle cookies
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10*1024*1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None)
)
except: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
self.end_headers()
else:
# got a valid XML RPC response
self.send_response(200)
self.send_header("Content-type", "text/xml")
self.send_header("Content-length", str(len(response)))
# HACK :start -> sends cookies here
if self.cookies:
for cookie in self.cookies:
self.send_header('Set-Cookie',cookie.output(header=''))
# HACK :end
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def APIAuthenticateClient(self):
if self.headers.has_key('Authorization'):
# handle Basic authentication
(enctype, encstr) = self.headers.get('Authorization').split()
(emailid, password) = encstr.decode('base64').split(':')
if emailid == shared.config.get('bitmessagesettings', 'apiusername') and password == shared.config.get('bitmessagesettings', 'apipassword'):
return True
else:
return False
else:
print 'Authentication failed because header lacks Authentication field'
time.sleep(2)
return False
return False
def _dispatch(self, method, params):
self.cookies = []
validuser = self.APIAuthenticateClient()
if not validuser:
time.sleep(2)
return "RPC Username or password incorrect or HTTP header lacks authentication at all."
# handle request
if method == 'helloWorld':
(a,b) = params
return a+'-'+b
elif method == 'add':
(a,b) = params
return a+b
elif method == 'statusBar':
message, = params
shared.UISignalQueue.put(('updateStatusBar',message))
elif method == 'listAddresses':
data = '{"addresses":['
configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile <> 'bitmessagesettings':
status,addressVersionNumber,streamNumber,hash = decodeAddress(addressInKeysFile)
data
if len(data) > 20:
data += ','
data += json.dumps({'label':shared.config.get(addressInKeysFile,'label'),'address':addressInKeysFile,'stream':streamNumber,'enabled':shared.config.getboolean(addressInKeysFile,'enabled')},indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'createRandomAddress':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 1:
label, = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 2:
label, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 3:
label, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 4:
label, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
return 'API Error 0000: Too many parameters!'
label = label.decode('base64')
apiAddressGeneratorReturnQueue.queue.clear()
streamNumberForAddress = 1
shared.addressGeneratorQueue.put((3,streamNumberForAddress,label,1,"",eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes))
return apiAddressGeneratorReturnQueue.get()
elif method == 'createDeterministicAddresses':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 1:
passphrase, = params
numberOfAddresses = 1
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 2:
passphrase, numberOfAddresses = params
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 3:
passphrase, numberOfAddresses, addressVersionNumber = params
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 4:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 5:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 6:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get('bitmessagesettings','defaultpayloadlengthextrabytes')
elif len(params) == 7:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
return 'API Error 0000: Too many parameters!'
if len(passphrase) == 0:
return 'API Error 0001: The specified passphrase is blank.'
passphrase = passphrase.decode('base64')
if addressVersionNumber == 0: #0 means "just use the proper addressVersionNumber"
addressVersionNumber = 3
if addressVersionNumber != 3:
return 'API Error 0002: The address version number currently must be 3 (or 0 which means auto-select).', addressVersionNumber,' isn\'t supported.'
if streamNumber == 0: #0 means "just use the most available stream"
streamNumber = 1
if streamNumber != 1:
return 'API Error 0003: The stream number must be 1 (or 0 which means auto-select). Others aren\'t supported.'
if numberOfAddresses == 0:
return 'API Error 0004: Why would you ask me to generate 0 addresses for you?'
if numberOfAddresses > 999:
return 'API Error 0005: You have (accidentally?) specified too many addresses to make. Maximum 999. This check only exists to prevent mischief; if you really want to create more addresses than this, contact the Bitmessage developers and we can modify the check or you can do it yourself by searching the source code for this message.'
apiAddressGeneratorReturnQueue.queue.clear()
print 'Requesting that the addressGenerator create', numberOfAddresses, 'addresses.'
shared.addressGeneratorQueue.put((addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe,nonceTrialsPerByte,payloadLengthExtraBytes))
data = '{"addresses":['
queueReturn = apiAddressGeneratorReturnQueue.get()
for item in queueReturn:
if len(data) > 20:
data += ','
data += "\""+item+ "\""
data += ']}'
return data
elif method == 'getAllInboxMessages':
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT msgid, toaddress, fromaddress, subject, received, message FROM inbox where folder='inbox' ORDER BY received''')
shared.sqlSubmitQueue.put('')
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, = row
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'),'toAddress':toAddress,'fromAddress':fromAddress,'subject':subject.encode('base64'),'message':message.encode('base64'),'encodingType':2,'receivedTime':received},indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'trashMessage':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
msgid = params[0].decode('hex')
t = (msgid,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''UPDATE inbox SET folder='trash' WHERE msgid=?''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
shared.UISignalQueue.put(('updateStatusBar','Per API: Trashed message (assuming message existed). UI not updated.'))
return 'Trashed message (assuming message existed). UI not updated. To double check, run getAllInboxMessages to see that the message disappeared, or restart Bitmessage and look in the normal Bitmessage GUI.'
elif method == 'sendMessage':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
elif len(params) == 4:
toAddress, fromAddress, subject, message = params
encodingType = 2
elif len(params) == 5:
toAddress, fromAddress, subject, message, encodingType = params
if encodingType != 2:
return 'API Error 0006: The encoding type must be 2 because that is the only one this program currently supports.'
subject = subject.decode('base64')
message = message.decode('base64')
status,addressVersionNumber,streamNumber,toRipe = decodeAddress(toAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', toAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + toAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ toAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + toAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: The address version number currently must be 2 or 3. Others aren\'t supported. Check the toAddress.'
if streamNumber != 1:
return 'API Error 0012: The stream number must be 1. Others aren\'t supported. Check the toAddress.'
status,addressVersionNumber,streamNumber,fromRipe = decodeAddress(fromAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', fromAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + fromAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ fromAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + fromAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: The address version number currently must be 2 or 3. Others aren\'t supported. Check the fromAddress.'
if streamNumber != 1:
return 'API Error 0012: The stream number must be 1. Others aren\'t supported. Check the fromAddress.'
toAddress = addBMIfNotPresent(toAddress)
fromAddress = addBMIfNotPresent(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(fromAddress,'enabled')
except:
return 'API Error 0013: Could not find your fromAddress in the keys.dat file.'
if not fromAddressEnabled:
return 'API Error 0014: Your fromAddress is disabled. Cannot send.'
ackdata = OpenSSL.rand(32)
shared.sqlLock.acquire()
t = ('',toAddress,toRipe,fromAddress,subject,message,ackdata,int(time.time()),'findingpubkey',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
toLabel = ''
t = (toAddress,)
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''select label from addressbook where address=?''')
shared.sqlSubmitQueue.put(t)
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn <> []:
for row in queryreturn:
toLabel, = row
#apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendmessage',toAddress))
return ackdata.encode('hex')
elif method == 'sendBroadcast':
if len(params) == 0:
return 'API Error 0000: I need parameters!'
if len(params) == 3:
fromAddress, subject, message = params
encodingType = 2
elif len(params) == 4:
fromAddress, subject, message, encodingType = params
if encodingType != 2:
return 'API Error 0006: The encoding type must be 2 because that is the only one this program currently supports.'
subject = subject.decode('base64')
message = message.decode('base64')
status,addressVersionNumber,streamNumber,fromRipe = decodeAddress(fromAddress)
if status <> 'success':
shared.printLock.acquire()
print 'API Error 0007: Could not decode address:', fromAddress, ':', status
shared.printLock.release()
if status == 'checksumfailed':
return 'API Error 0008: Checksum failed for address: ' + fromAddress
if status == 'invalidcharacters':
return 'API Error 0009: Invalid characters in address: '+ fromAddress
if status == 'versiontoohigh':
return 'API Error 0010: Address version number too high (or zero) in address: ' + fromAddress
if addressVersionNumber < 2 or addressVersionNumber > 3:
return 'API Error 0011: the address version number currently must be 2 or 3. Others aren\'t supported. Check the fromAddress.'
if streamNumber != 1:
return 'API Error 0012: the stream number must be 1. Others aren\'t supported. Check the fromAddress.'
fromAddress = addBMIfNotPresent(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(fromAddress,'enabled')
except:
return 'API Error 0013: could not find your fromAddress in the keys.dat file.'
ackdata = OpenSSL.rand(32)
toAddress = '[Broadcast subscribers]'
ripe = ''
shared.sqlLock.acquire()
t = ('',toAddress,ripe,fromAddress,subject,message,ackdata,int(time.time()),'broadcastpending',1,1,'sent',2)
shared.sqlSubmitQueue.put('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)''')
shared.sqlSubmitQueue.put(t)
shared.sqlReturnQueue.get()
shared.sqlSubmitQueue.put('commit')
shared.sqlLock.release()
toLabel = '[Broadcast subscribers]'
#apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
#self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,toLabel,fromAddress,subject,message,ackdata)
shared.UISignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.workerQueue.put(('sendbroadcast',(fromAddress,subject,message)))
return ackdata.encode('hex')
elif method == 'getStatus':
if len(params) != 1:
return 'API Error 0000: I need one parameter!'
ackdata, = params
if len(ackdata) != 64:
return 'API Error 0015: The length of ackData should be 32 bytes (encoded in hex thus 64 characters).'
shared.sqlLock.acquire()
shared.sqlSubmitQueue.put('''SELECT status FROM sent where ackdata=?''')
shared.sqlSubmitQueue.put((ackdata.decode('hex'),))
queryreturn = shared.sqlReturnQueue.get()
shared.sqlLock.release()
if queryreturn == []:
return 'notFound'
for row in queryreturn:
status, = row
if status == 'findingpubkey':
return 'findingPubkey'
if status == 'doingpow':
return 'doingPow'
if status == 'sentmessage':
return 'sentMessage'
if status == 'ackreceived':
return 'ackReceived'
else:
return 'otherStatus: '+status
else:
return 'Invalid Method: %s'%method
#This thread, of which there is only one, runs the API.
class singleAPI(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
se = SimpleXMLRPCServer((shared.config.get('bitmessagesettings', 'apiinterface'),shared.config.getint('bitmessagesettings', 'apiport')), MySimpleXMLRPCRequestHandler, True, True)
se.register_introspection_functions()
se.serve_forever()
#The MySimpleXMLRPCRequestHandler class cannot emit signals (or at least I don't know how) because it is not a QT thread. It therefore puts data in a queue which this thread monitors and emits the signals on its behalf.
"""class singleAPISignalHandler(QThread):
def __init__(self, parent = None):
QThread.__init__(self, parent)
def run(self):
while True:
command, data = apiSignalQueue.get()
if command == 'updateStatusBar':
self.emit(SIGNAL("updateStatusBar(PyQt_PyObject)"),data)
elif command == 'createRandomAddress':
label, eighteenByteRipe = data
streamNumberForAddress = 1
#self.addressGenerator = addressGenerator()
#self.addressGenerator.setup(3,streamNumberForAddress,label,1,"",eighteenByteRipe)
#self.emit(SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"),self.addressGenerator)
#self.addressGenerator.start()
shared.addressGeneratorQueue.put((3,streamNumberForAddress,label,1,"",eighteenByteRipe))
elif command == 'createDeterministicAddresses':
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = data
#self.addressGenerator = addressGenerator()
#self.addressGenerator.setup(addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe)
#self.emit(SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"),self.addressGenerator)
#self.addressGenerator.start()
shared.addressGeneratorQueue.put((addressVersionNumber,streamNumber,'unused API address',numberOfAddresses,passphrase,eighteenByteRipe))
elif command == 'displayNewSentMessage':
toAddress,toLabel,fromAddress,subject,message,ackdata = data
self.emit(SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"),toAddress,toLabel,fromAddress,subject,message,ackdata)"""
selfInitiatedConnections = {} #This is a list of current connections (the thread pointers at least)
alreadyAttemptedConnectionsList = {} #This is a list of nodes to which we have already attempted a connection
ackdataForWhichImWatching = {}
alreadyAttemptedConnectionsListLock = threading.Lock()
eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack('>Q',random.randrange(1, 18446744073709551615))
neededPubkeys = {}
successfullyDecryptMessageTimings = [] #A list of the amounts of time it took to successfully decrypt msg messages
apiAddressGeneratorReturnQueue = Queue.Queue() #The address generator thread uses this queue to get information back to the API thread.
alreadyAttemptedConnectionsListResetTime = int(time.time()) #used to clear out the alreadyAttemptedConnectionsList periodically so that we will retry connecting to hosts to which we have already tried to connect.
if useVeryEasyProofOfWorkForTesting:
shared.networkDefaultProofOfWorkNonceTrialsPerByte = int(shared.networkDefaultProofOfWorkNonceTrialsPerByte / 16)
shared.networkDefaultPayloadLengthExtraBytes = int(shared.networkDefaultPayloadLengthExtraBytes / 7000)
if __name__ == "__main__":
# is the application already running? If yes then exit.
thisapp = singleton.singleinstance()
signal.signal(signal.SIGINT, signal_handler)
#signal.signal(signal.SIGINT, signal.SIG_DFL)
# Check the Major version, the first element in the array
if sqlite3.sqlite_version_info[0] < 3:
print 'This program requires sqlite version 3 or higher because 2 and lower cannot store NULL values. I see version:', sqlite3.sqlite_version_info
os._exit(0)
#First try to load the config file (the keys.dat file) from the program directory
shared.config = ConfigParser.SafeConfigParser()
shared.config.read('keys.dat')
try:
shared.config.get('bitmessagesettings', 'settingsversion')
print 'Loading config files from same directory as program'
shared.appdata = ''
except:
#Could not load the keys.dat file in the program directory. Perhaps it is in the appdata directory.
shared.appdata = shared.lookupAppdataFolder()
shared.config = ConfigParser.SafeConfigParser()
shared.config.read(shared.appdata + 'keys.dat')
try:
shared.config.get('bitmessagesettings', 'settingsversion')
print 'Loading existing config files from', shared.appdata
except:
#This appears to be the first time running the program; there is no config file (or it cannot be accessed). Create config file.
shared.config.add_section('bitmessagesettings')
shared.config.set('bitmessagesettings','settingsversion','5')
shared.config.set('bitmessagesettings','port','8444')
shared.config.set('bitmessagesettings','timeformat','%%a, %%d %%b %%Y %%I:%%M %%p')
shared.config.set('bitmessagesettings','blackwhitelist','black')
shared.config.set('bitmessagesettings','startonlogon','false')
if 'linux' in sys.platform:
shared.config.set('bitmessagesettings','minimizetotray','false')#This isn't implimented yet and when True on Ubuntu causes Bitmessage to disappear while running when minimized.
else:
shared.config.set('bitmessagesettings','minimizetotray','true')
shared.config.set('bitmessagesettings','showtraynotifications','true')
shared.config.set('bitmessagesettings','startintray','false')
shared.config.set('bitmessagesettings','socksproxytype','none')
shared.config.set('bitmessagesettings','sockshostname','localhost')
shared.config.set('bitmessagesettings','socksport','9050')
shared.config.set('bitmessagesettings','socksauthentication','false')
shared.config.set('bitmessagesettings','socksusername','')
shared.config.set('bitmessagesettings','sockspassword','')
shared.config.set('bitmessagesettings','keysencrypted','false')
shared.config.set('bitmessagesettings','messagesencrypted','false')
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte',str(shared.networkDefaultProofOfWorkNonceTrialsPerByte))
shared.config.set('bitmessagesettings','defaultpayloadlengthextrabytes',str(shared.networkDefaultPayloadLengthExtraBytes))
shared.config.set('bitmessagesettings','minimizeonclose','true')
if storeConfigFilesInSameDirectoryAsProgramByDefault:
#Just use the same directory as the program and forget about the appdata folder
shared.appdata = ''
print 'Creating new config files in same directory as program.'
else:
print 'Creating new config files in', shared.appdata
if not os.path.exists(shared.appdata):
os.makedirs(shared.appdata)
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
if shared.config.getint('bitmessagesettings','settingsversion') == 1:
shared.config.set('bitmessagesettings','settingsversion','4') #If the settings version is equal to 2 or 3 then the sqlThread will modify the pubkeys table and change the settings version to 4.
shared.config.set('bitmessagesettings','socksproxytype','none')
shared.config.set('bitmessagesettings','sockshostname','localhost')
shared.config.set('bitmessagesettings','socksport','9050')
shared.config.set('bitmessagesettings','socksauthentication','false')
shared.config.set('bitmessagesettings','socksusername','')
shared.config.set('bitmessagesettings','sockspassword','')
shared.config.set('bitmessagesettings','keysencrypted','false')
shared.config.set('bitmessagesettings','messagesencrypted','false')
with open(shared.appdata + 'keys.dat', 'wb') as configfile:
shared.config.write(configfile)
try:
#We shouldn't have to use the shared.knownNodesLock because this had better be the only thread accessing knownNodes right now.
pickleFile = open(shared.appdata + 'knownnodes.dat', 'rb')
shared.knownNodes = pickle.load(pickleFile)
pickleFile.close()
except:
createDefaultKnownNodes(shared.appdata)
pickleFile = open(shared.appdata + 'knownnodes.dat', 'rb')
shared.knownNodes = pickle.load(pickleFile)
pickleFile.close()
if shared.config.getint('bitmessagesettings', 'settingsversion') > 5:
print 'Bitmessage cannot read future versions of the keys file (keys.dat). Run the newer version of Bitmessage.'
raise SystemExit
#DNS bootstrap. This could be programmed to use the SOCKS proxy to do the DNS lookup some day but for now we will just rely on the entries in defaultKnownNodes.py. Hopefully either they are up to date or the user has run Bitmessage recently without SOCKS turned on and received good bootstrap nodes using that method.
if shared.config.get('bitmessagesettings', 'socksproxytype') == 'none':
try:
for item in socket.getaddrinfo('bootstrap8080.bitmessage.org',80):
print 'Adding', item[4][0],'to knownNodes based on DNS boostrap method'
shared.knownNodes[1][item[4][0]] = (8080,int(time.time()))
except:
print 'bootstrap8080.bitmessage.org DNS bootstraping failed.'
try:
for item in socket.getaddrinfo('bootstrap8444.bitmessage.org',80):
print 'Adding', item[4][0],'to knownNodes based on DNS boostrap method'
shared.knownNodes[1][item[4][0]] = (8444,int(time.time()))
except:
print 'bootstrap8444.bitmessage.org DNS bootstrapping failed.'
else:
print 'DNS bootstrap skipped because SOCKS is used.'
#Start the address generation thread
addressGeneratorThread = addressGenerator()
addressGeneratorThread.daemon = True # close the main program even if there are threads left
addressGeneratorThread.start()
#Start the thread that calculates POWs
singleWorkerThread = singleWorker()
singleWorkerThread.daemon = True # close the main program even if there are threads left
singleWorkerThread.start()
#Start the SQL thread
sqlLookup = sqlThread()
sqlLookup.daemon = False # DON'T close the main program even if there are threads left. The closeEvent should command this thread to exit gracefully.
sqlLookup.start()
#Start the cleanerThread
singleCleanerThread = singleCleaner()
singleCleanerThread.daemon = True # close the main program even if there are threads left
singleCleanerThread.start()
shared.reloadMyAddressHashes()
shared.reloadBroadcastSendersForWhichImWatching()
#Initialize the ackdataForWhichImWatching data structure using data from the sql database.
shared.sqlSubmitQueue.put('''SELECT ackdata FROM sent where (status='sentmessage' OR status='doingpow')''')
shared.sqlSubmitQueue.put('')
queryreturn = shared.sqlReturnQueue.get()
for row in queryreturn:
ackdata, = row
print 'Watching for ackdata', ackdata.encode('hex')
ackdataForWhichImWatching[ackdata] = 0
if shared.safeConfigGetBoolean('bitmessagesettings','apienabled'):
try:
apiNotifyPath = shared.config.get('bitmessagesettings','apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
shared.printLock.acquire()
print 'Trying to call', apiNotifyPath
shared.printLock.release()
call([apiNotifyPath, "startingUp"])
singleAPIThread = singleAPI()
singleAPIThread.daemon = True #close the main program even if there are threads left
singleAPIThread.start()
#self.singleAPISignalHandlerThread = singleAPISignalHandler()
#self.singleAPISignalHandlerThread.start()
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("updateStatusBar(PyQt_PyObject)"), self.updateStatusBar)
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("passAddressGeneratorObjectThrough(PyQt_PyObject)"), self.connectObjectToAddressGeneratorSignals)
#QtCore.QObject.connect(self.singleAPISignalHandlerThread, QtCore.SIGNAL("displayNewSentMessage(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)"), self.displayNewSentMessage)
connectToStream(1)
singleListenerThread = singleListener()
singleListenerThread.daemon = True # close the main program even if there are threads left
singleListenerThread.start()
if not shared.safeConfigGetBoolean('bitmessagesettings','daemon'):
try:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
except Exception, err:
print 'PyBitmessage requires PyQt unless you want to run it as a daemon and interact with it using the API. You can download PyQt from http://www.riverbankcomputing.com/software/pyqt/download or by searching Google for \'PyQt Download\'. If you want to run in daemon mode, see https://bitmessage.org/wiki/Daemon'
print 'Error message:', err
os._exit(0)
import bitmessageqt
bitmessageqt.run()
else:
print 'Running as a daemon. You can use Ctrl+C to exit.'
while True:
time.sleep(20)
# So far, the Bitmessage protocol, this client, the Wiki, and the forums
# are all a one-man operation. Bitcoin tips are quite appreciated!
# 1H5XaDA6fYENLbknwZyjiYXYPQaFjjLX2u
|
import numpy as np
import pytest
from astromodels import Blackbody, Powerlaw, Model, PointSource
from threeML import JointLikelihood, DataList
from threeML.io.package_data import get_path_of_data_file
from threeML.plugins.DispersionSpectrumLike import DispersionSpectrumLike
from threeML.plugins.SpectrumLike import SpectrumLike
from threeML.utils.OGIP.response import OGIPResponse
from threeML.exceptions.custom_exceptions import NegativeBackground
import warnings
warnings.simplefilter('ignore')
def test_assigning_source_name():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
# good name setting
bb = Blackbody()
pts = PointSource('good_name', 0, 0, spectral_shape=bb)
model = Model(pts)
# before setting model
spectrum_generator.assign_to_source('good_name')
jl = JointLikelihood(model, DataList(spectrum_generator))
_ = jl.fit()
# after setting model
pts = PointSource('good_name', 0, 0, spectral_shape=bb)
model = Model(pts)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
jl = JointLikelihood(model, DataList(spectrum_generator))
spectrum_generator.assign_to_source('good_name')
# after with bad name
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
jl = JointLikelihood(model, DataList(spectrum_generator))
with pytest.raises(AssertionError):
spectrum_generator.assign_to_source('bad_name')
# before with bad name
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.assign_to_source('bad_name')
with pytest.raises(AssertionError):
jl = JointLikelihood(model, DataList(spectrum_generator))
#multisource model
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
ps1 = PointSource('ps1', 0, 0, spectral_shape=Blackbody())
ps2 = PointSource('ps2', 0, 0, spectral_shape=Powerlaw())
model = Model(ps1, ps2)
model.ps2.spectrum.main.Powerlaw.K.fix = True
model.ps2.spectrum.main.Powerlaw.index.fix = True
spectrum_generator.assign_to_source('ps1')
dl = DataList(spectrum_generator)
jl = JointLikelihood(model, dl)
_ = jl.fit()
#
def test_spectrumlike_fit():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
bb = Blackbody()
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(spectrum_generator))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.mean, kT_variates.mean], [sim_K, sim_kT], atol=1 ))
def test_dispersionspectrumlike_fit():
response = OGIPResponse(get_path_of_data_file('datasets/ogip_powerlaw.rsp'))
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = DispersionSpectrumLike.from_function('test', source_function=source_function,
response=response,
background_function=background_function)
bb = Blackbody()
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(spectrum_generator))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.mean, kT_variates.mean], [sim_K, sim_kT], atol=1))
def test_spectrum_like_with_background_model():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=5, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
background_plugin = SpectrumLike.from_background('background',spectrum_generator)
bb = Blackbody()
pl = Powerlaw()
pl.piv = 100
bkg_ps = PointSource('bkg',0,0,spectral_shape=pl)
bkg_model = Model(bkg_ps)
jl_bkg = JointLikelihood(bkg_model,DataList(background_plugin))
_ = jl_bkg.fit()
plugin_bkg_model = SpectrumLike('full',spectrum_generator.observed_spectrum,background=background_plugin)
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(plugin_bkg_model))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.mean, kT_variates.mean], [sim_K, sim_kT], rtol=0.5))
def test_all_statistics():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
# get a blackbody source function
source_function = Blackbody(K=9E-2, kT=20)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
pts = PointSource('mysource', 0, 0, spectral_shape=source_function)
model = Model(pts)
# test Poisson no bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
# test Poisson w/ Poisson bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
spectrum_generator._background_counts = -np.ones_like(spectrum_generator._background_counts)
with pytest.raises(NegativeBackground):
spectrum_generator._probe_noise_models()
# test Poisson w/ ideal bkg
spectrum_generator.background_noise_model = 'ideal'
spectrum_generator.get_log_like()
# test Poisson w/ gauss bkg
# test Poisson w/ Poisson bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
background_errors=0.1 * background_function(low_edge),
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
# test Gaussian w/ no bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
source_errors=0.5 * source_function(low_edge),
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
move mean to average
import numpy as np
import pytest
from astromodels import Blackbody, Powerlaw, Model, PointSource
from threeML import JointLikelihood, DataList
from threeML.io.package_data import get_path_of_data_file
from threeML.plugins.DispersionSpectrumLike import DispersionSpectrumLike
from threeML.plugins.SpectrumLike import SpectrumLike
from threeML.utils.OGIP.response import OGIPResponse
from threeML.exceptions.custom_exceptions import NegativeBackground
import warnings
warnings.simplefilter('ignore')
def test_assigning_source_name():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
# good name setting
bb = Blackbody()
pts = PointSource('good_name', 0, 0, spectral_shape=bb)
model = Model(pts)
# before setting model
spectrum_generator.assign_to_source('good_name')
jl = JointLikelihood(model, DataList(spectrum_generator))
_ = jl.fit()
# after setting model
pts = PointSource('good_name', 0, 0, spectral_shape=bb)
model = Model(pts)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
jl = JointLikelihood(model, DataList(spectrum_generator))
spectrum_generator.assign_to_source('good_name')
# after with bad name
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
jl = JointLikelihood(model, DataList(spectrum_generator))
with pytest.raises(AssertionError):
spectrum_generator.assign_to_source('bad_name')
# before with bad name
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.assign_to_source('bad_name')
with pytest.raises(AssertionError):
jl = JointLikelihood(model, DataList(spectrum_generator))
#multisource model
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
ps1 = PointSource('ps1', 0, 0, spectral_shape=Blackbody())
ps2 = PointSource('ps2', 0, 0, spectral_shape=Powerlaw())
model = Model(ps1, ps2)
model.ps2.spectrum.main.Powerlaw.K.fix = True
model.ps2.spectrum.main.Powerlaw.index.fix = True
spectrum_generator.assign_to_source('ps1')
dl = DataList(spectrum_generator)
jl = JointLikelihood(model, dl)
_ = jl.fit()
#
def test_spectrumlike_fit():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
bb = Blackbody()
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(spectrum_generator))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT], atol=1 ))
def test_dispersionspectrumlike_fit():
response = OGIPResponse(get_path_of_data_file('datasets/ogip_powerlaw.rsp'))
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
spectrum_generator = DispersionSpectrumLike.from_function('test', source_function=source_function,
response=response,
background_function=background_function)
bb = Blackbody()
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(spectrum_generator))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT], atol=1))
def test_spectrum_like_with_background_model():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
sim_K = 1E-1
sim_kT = 20.
# get a blackbody source function
source_function = Blackbody(K=sim_K, kT=sim_kT)
# power law background function
background_function = Powerlaw(K=5, index=-1.5, piv=100.)
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
background_plugin = SpectrumLike.from_background('background',spectrum_generator)
bb = Blackbody()
pl = Powerlaw()
pl.piv = 100
bkg_ps = PointSource('bkg',0,0,spectral_shape=pl)
bkg_model = Model(bkg_ps)
jl_bkg = JointLikelihood(bkg_model,DataList(background_plugin))
_ = jl_bkg.fit()
plugin_bkg_model = SpectrumLike('full',spectrum_generator.observed_spectrum,background=background_plugin)
pts = PointSource('mysource', 0, 0, spectral_shape=bb)
model = Model(pts)
# MLE fitting
jl = JointLikelihood(model, DataList(plugin_bkg_model))
result = jl.fit()
K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')
kT_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.kT')
assert np.all(np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT], rtol=0.5))
def test_all_statistics():
energies = np.logspace(1, 3, 51)
low_edge = energies[:-1]
high_edge = energies[1:]
# get a blackbody source function
source_function = Blackbody(K=9E-2, kT=20)
# power law background function
background_function = Powerlaw(K=1, index=-1.5, piv=100.)
pts = PointSource('mysource', 0, 0, spectral_shape=source_function)
model = Model(pts)
# test Poisson no bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
# test Poisson w/ Poisson bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
spectrum_generator._background_counts = -np.ones_like(spectrum_generator._background_counts)
with pytest.raises(NegativeBackground):
spectrum_generator._probe_noise_models()
# test Poisson w/ ideal bkg
spectrum_generator.background_noise_model = 'ideal'
spectrum_generator.get_log_like()
# test Poisson w/ gauss bkg
# test Poisson w/ Poisson bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
background_function=background_function,
background_errors=0.1 * background_function(low_edge),
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
# test Gaussian w/ no bkg
spectrum_generator = SpectrumLike.from_function('fake',
source_function=source_function,
source_errors=0.5 * source_function(low_edge),
energy_min=low_edge,
energy_max=high_edge)
spectrum_generator.set_model(model)
spectrum_generator.get_log_like()
|
#!/usr/bin/env python
# MD workflow functions. mdwf
""" mdwf functions. version 0.25
"""
import os
import subprocess
import sys
from collections import OrderedDict
import json
import shutil
import fileinput
import hashlib
import time
import datetime
import glob
import re
# ansi color variables for formatting purposes:
DEFAULTCOLOUR = '\033[0m'
RED = '\033[34;3m'
GREEN = '\033[32;2m'
BLUE = '\033[31;2m'
def read_master_config_file():
""" Reads the json file 'master_config_file' and
returns the dictionary """
if os.path.isfile( 'master_config_file' ):
master_json = open( 'master_config_file' )
mcf = json.load(master_json,object_pairs_hook=OrderedDict)
master_json.close()
return mcf
else:
print("{}Can't see 'master_config_file' {} "\
.format(RED, DEFAULTCOLOUR))
print("{}Have you populated the directory? (./mdwf -p){}"\
.format(RED, DEFAULTCOLOUR))
def read_local_job_details(path="Setup_and_Config",
ljdf_target="local_job_details_template.json"):
""" Reads the json file 'local_job_details.json' and
returns the dictionary """
target = path + "/" + ljdf_target
if os.path.isfile(target):
local_json = open(target)
ljdf = json.load(local_json,object_pairs_hook=OrderedDict)
local_json.close()
else:
print("Can't see {} Have you populated job tree? ".format(target))
return ljdf
def read_namd_job_details(targetfile):
""" Extracts simulation details from given namd config file
and returns a dictionary and a list. The function assumes
namd files are located in /Setup_and_Config """
target = os.getcwd() + "/Setup_and_Config/" + targetfile
jdd = {} # job-details dictionary
jdpl = [] # job details parameter list
if os.path.isfile(target):
f = open(target,'r')
for lline in f:
line = lline[0:18] # strip line to avoid artifacts
if not "#" in line[0:2]: # leave out commented lines
if 'structure ' in line:
pl = lline.split()
jdd["psffilepath"] = pl[1]
nl = re.split(('\s+|/|'),lline)
for i in nl:
if '.psf' in i:
jdd["psffile"] = i
natom = get_atoms(i)
jdd["natom"] = natom
if 'coordinates ' in line:
pl = lline.split()
jdd["pdbfilepath"] = pl[1]
nl = re.split(('\s+|/|'),lline)
for i in nl:
if '.pdb' in i:
jdd["pdbfile"] = i
if 'timestep ' in line:
nl = lline.split()
jdd["timestep"] = nl[1]
if 'NumberSteps ' in line:
nl = lline.split()
jdd["steps"] = nl[2]
if 'dcdfreq ' in line:
nl = lline.split()
jdd["dcdfreq"] = nl[1]
if 'run ' in line:
nl = lline.split()
jdd["runsteps"] = nl[1]
if 'restartfreq ' in line:
nl = lline.split()
jdd["restartfreq"] = nl[1]
if 'parameters ' in line:
nl = lline.split()
jdpl.append(nl[1])
f.close()
else:
print("{} {} file not found.{}".format(RED,targetfile,DEFAULTCOLOUR))
return jdd, jdpl
def gather_jobs():
"""function to create a convenient vmd input file to load and view trajectory data"""
global dcdlist
# write basic model loader.
mcf = read_master_config_file()
psf = mcf["PsfFileName"]
pdb = mcf["PdbFileName"]
cwd = os.getcwd()
with open("Analysis/model_loader.vmd", "w+") as mfile:
mfile.write("# Basic vmd model loader \n")
mfile.write("mol new " + cwd + "/InputFiles/" + psf
+ " type psf first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n")
mfile.write("mol addfile " + cwd + "/InputFiles/" + pdb
+ " type pdb first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n")
mfile.close()
with open("Analysis/dcd_trajectory_fileloader.vmd", "w+") as dcdlist:
execute_function_in_job_tree(gather_list)
dcdlist.close()
def extend_jobs(a):
execute_function_in_job_tree(extend_runs,a)
def extend_runs(a):
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
total = int( ljdf_t[ 'TotalRuns' ] )
# Update the total of runs.
newtotal = int( total ) + a
update_local_job_details( "TotalRuns", newtotal )
def sorted_ls(path):
mtime = lambda f: os.stat(os.path.join(path, f)).st_mtime
return list(sorted(os.listdir(path), key=mtime))
def gather_list():
"""function to create list of output files from OutputFiles folder"""
# list dcd files in /OutputFiles folder
cwd = os.getcwd()
line = "# " + cwd + "\n"
dcdlist.write(line)
if os.path.isdir("OutputFiles"):
f_list = sorted_ls("OutputFiles")
# for creating vmd fileloader
head = "mol addfile "
tail = " type dcd first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n"
for l in f_list:
if ".dcd" in l:
dcdline = head + cwd + "/OutputFiles/" + l + tail
dcdlist.write(dcdline)
def get_atoms(psffile):
""" function to estimate dcd frame size of simulation based on
the numbers of atoms. Assumes the psf file is in
/InputFiles directory. Returns the number of atoms """
target=os.getcwd() + "/InputFiles/" + psffile
atoms = 0
if os.path.isfile(target):
f = open(target,'r')
for line in f:
if 'NATOM' in line: # extract number of atoms from !NATOM line
nl = line.split()
atoms = nl[0]
f.close()
else:
print("{}Can't find {} in /InputFiles directory {}"\
.format(RED,psffile,DEFAULTCOLOUR))
return atoms
def pausejob_flag( directive ):
"""creates or removes pausejob flag. Pausejob are (mostly)
empty files in the directory which are an extra
precaution for job control. Their presence in the directory
stops jobs launching. """
if directive == "remove":
update_local_job_details( "PauseJobFlag", 0 )
if os.path.isfile( "pausejob" ):
os.remove( "pausejob" )
else:
update_local_job_details( "PauseJobFlag", "pausejob" )
f = open("pausejob", 'a')
f.write(directive)
f.close()
def check_pausejob_flag():
""" A simple check for the pausejob flag in local details file.
Creates and actual pauseflag file in the directory if present """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
pause = ljdf_t["PauseJobFlag"]
if pause != 0:
f = open( "pausejob", 'a' )
f.write("pauseflag already present")
f.close()
update_local_job_details( "JobStatus", "pausejob" )
def check_disk_quota():
""" function for checking that there is enough diskspace on the
system before starting job. Relies on running the 'mydisk'
program on Avoca. The default setting is set in the
'master_config_file'. Creates a pausejob flag if it fails """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
account = ljdf_t[ 'Account' ]
diskspc = int( ljdf_t[ 'DiskSpaceCutOff' ] )
try:
disk = subprocess.check_output('mydisk')
dline = disk.split("\n")
for i in dline:
if account in i: # looks for account number
usage = int( i.split()[-1][:-1] )
if usage > diskspc:
print("Warning: Account {} disk space quota low. Usage: {} % "\
.format(account,a))
print("Diskspace too low. usage: {}% disk limit set to: {}%\n"\
.format(a,b))
update_local_job_details("JobStatus", "stopping" )
update_local_job_details("PauseJobFlag", "low disk" )
update_local_job_details("JobMessage", "stopped: Disk quota low.")
pausejob_flag( "Low Disk Quota detected." )
except:
print("Can't run 'mydisk'. Can't check disk quota for account {}."\
.format(account))
def log_job_details( jobid ):
""" Simple function to update 'local_job_details' from job details"""
jobdetails = subprocess.check_output(["scontrol",\
"show", "job", str(jobid) ] )
jdsplit = re.split( ' |\n', jobdetails )
for i in jdsplit:
if "JobState=" in i:
update_locate_job_details( "JobStatus", i.split("=")[1] )
if "NumNodes=" in i:
update_locate_job_details( "Nodes", i.split("=")[1] )
if "NumCPUs=" in i:
update_locate_job_details( "Cores", i.split("=")[1] )
if "StartTime=" in i:
update_locate_job_details( "JobStartTime", i.split("=")[1] )
if "TimeLimit=" in i:
update_locate_job_details( "Walltime", i.split("=")[1] )
def check_job_runtime():
""" Check for job failure based on run time. This function assumes
that if a job completes too soon, it has probably failed.
If the run time is less than a certain cutoff defined in the
'master_config_file' it will create a pausejob flag. """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
start = int( ljdf_t[ 'JobStartTime' ] )
finish = int( ljdf_t[ 'JobFinishTime' ] )
limit = int( ljdf_t[ 'JobFailTime' ] )
runtime = finish - start
if runtime < limit:
update_local_job_details( "JobStatus", "stopped" )
update_local_job_details( "JobMessage", "short run time detected" )
pausejob_flag( "Short runtime detected - job fail??" )
def check_run_counter():
""" This function checks if the job runs are finished, and create
a pausejob flag if they have exceeded the job run cuttoff value.
It also increment job run counters as necessary """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
current = ljdf_t[ 'CurrentRun' ]
total = int( ljdf_t[ 'TotalRuns' ] )
jobid = ljdf_t[ 'CurrentJobId' ]
newrun = int( current ) + 1
update_local_job_details( "LastJobId", jobid )
if newrun > total: # -stop jobs if current run equals or greater than totalruns
update_local_job_details( "JobStatus", "finished" )
update_local_job_details( "JobMessage", "finished production runs" )
update_local_job_details( "PauseJobFlag", "pausejob" )
update_local_job_details( "CurrentJobId", -1 )
pausejob_flag( "Job run number greater than total specified." )
final_job_cleanup()
return None
update_local_job_details( "CurrentRun", newrun )
def get_job_runtime( starttime, status ):
""" Function to return runtime of current job in H:M format.
Returns --:-- if job not running. """
if "running" in status:
seconds = int( time.time() - starttime )
m, s = divmod( seconds, 60 )
hours, min = divmod( m, 60 )
Time = "%d:%02d" % ( hours, min)
else:
Time = "--:--"
return Time
def create_job_basename( jobname, run, zf ):
""" creates a time stamped basename for current job, uses zfill for
numbering convention. """
timestamp = time.strftime( "%Y_%d%b_", time.localtime() )
basename = timestamp + jobname + "run_" + str( run ).zfill( zf )
return basename
def update_local_job_details( key, status ):
""" updates local job details of 'local job details file' """
ljdf_t = read_local_job_details(".", "local_job_details.json")
ljdf_t[ key ] = status
with open("local_job_details.json", 'w') as outfile:
json.dump(ljdf_t, outfile, indent=2)
outfile.close()
def redirect_namd_output( CurrentWorkingName = "current_MD_run_files",
jobtype = "production"):
""" A function to redirect NAMD output to appropriate folders."""
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
jobname = ljdf_t[ 'JobBaseName' ]
run = ljdf_t[ 'CurrentRun' ]
total = ljdf_t[ 'TotalRuns' ]
zfill = len( str( total ) ) + 1
basename = create_job_basename( jobname, run, zfill )
# make shorthand of current working files
cwf_coor = CurrentWorkingName + ".coor"
cwf_vel = CurrentWorkingName + ".vel"
cwf_xsc = CurrentWorkingName + ".xsc"
cwf_xst = CurrentWorkingName + ".xst"
cwf_dcd = CurrentWorkingName + ".dcd"
# check that restartfiles actually exisit, if not create pausejob condition.
if not os.path.isfile(cwf_coor) or not os.path.isfile(cwf_vel) \
or not os.path.isfile(cwf_xsc):
pausejob_flag( "Missing input files." )
update_local_job_details( "JobStatus", "stopping" )
update_local_job_details( "JobMessage", "no namd outputfiles generated" )
# copy CurrentWorking (restart) files to LastRestart/ directory
shutil.copy(cwf_coor, 'LastRestart/' + cwf_coor)
shutil.copy(cwf_vel, 'LastRestart/' + cwf_vel)
shutil.copy(cwf_xsc, 'LastRestart/' + cwf_xsc)
# rename and move current working files
os.rename( cwf_dcd, "OutputFiles/" + basename + ".dcd" )
shutil.copy( cwf_vel, "RestartFiles/" + basename + ".vel" )
shutil.copy( cwf_xsc, "RestartFiles/" + basename + ".xsc" )
shutil.copy( cwf_xst, "RestartFiles/" + basename + ".xst" )
shutil.copy( cwf_coor, "RestartFiles/" + basename + ".coor" )
shutil.move( "temp_working_outputfile.out", "OutputText/" + basename + ".txt" )
shutil.move( "temp_working_errorsfile.err", "Errors/" + basename + ".err" )
def post_jobrun_cleanup():
""" remove unwanted error, backup files, etc """
for file in glob.glob("slurm*"):
shutil.move(file, "JobLog/" )
for file in glob.glob("core*"):
shutil.move(file, "Errors/")
for file in glob.glob("*.restart.*"):
shutil.move(file, "LastRestart/")
# reset timer / jobid flags:
update_local_job_details( "JobStartTime", 0 )
update_local_job_details( "JobFinishTime", 0 )
update_local_job_details( "CurrentJobId", 0 )
# update dcd files list:
update_local_dcd_list()
def update_local_dcd_list():
""" a simple function to create a local dcd_files_list.vmd use to load data into VMD"""
f = open('local_dcd_files_loader.vmd', 'w')
cwd = os.getcwd()
f.write("set firstframe 1 \n")
f.write("set lastframe -1 \n")
f.write("set stepsize 1 \n\n")
f.write("set cwd " + cwd + "\n\n")
dcdlist = glob.glob( "OutputFiles/*.dcd" )
for i in dcdlist:
line = " mol addfile %s%s type dcd first %s last %s step %s filebonds 1 autobonds 1 waitfor all\n"\
% ( "$cwd/", i, "$firstframe", "$lastframe", "$stepsize")
f.write( line )
f.close()
def final_job_cleanup():
""" perform final cleanup once jobruns are finished. """
for file in glob.glob("*BAK"):
os.remove( file )
def log_job_timing():
""" log length of job in human readable format """
## still to do
def countdown_timer():
""" function to adjust countdown timer """
## still to do
def check_if_job_running():
""" function to check if job already running in current working directory """
dir_path = os.getcwd()
ljdf_t = read_local_job_details( dir_path, "local_job_details.json" )
current_jobid = ljdf_t["CurrentJobId"]
current_jobstatus = ljdf_t["JobStatus"]
current_run = ljdf_t["CurrentRun"]
#
# status = running
# status = submitted
# status = processing
# status = cancelled
## needs better way to efficient way to check queue here
## this method currently just relies on 'local_job_details'
return current_jobstatus, current_jobid, current_run
def monitor_jobs():
""" -function to monitor jobs status on the cluster """
mcf = read_master_config_file()
account = mcf["Account"]
walltime = mcf["Walltime"]
nodes = mcf["nodes"]
cwd = os.getcwd()
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, nJobStreams,\
nReplicates, nBaseNames = check_job_structure()
print(" Account: %6s nodes: %-6s " % (account, nodes))
print(" Job Name: |Count |JobId |Status |Runtime |Job_messages:")
print(" ---------------|------|--------|----------|-%6s-|------------ " % walltime[:-2])
for i in range(0,nJobStreams):
JobDir = JobStreams[i]
jobdirlist = get_current_dir_list(JobDir)
print("%-24s " %( GREEN + JobDir + ":"+ DEFAULTCOLOUR ))
for j in jobdirlist:
dir_path = JobDir + "/" + j
ljdf_t = read_local_job_details(dir_path,\
"local_job_details.json")
jdn = ljdf_t["JobDirName"]
qs = ljdf_t["QueueStatus"]
js = colour_jobstatus( ljdf_t["JobStatus"] )
jm = ljdf_t["JobMessage"]
startT = ljdf_t["JobStartTime"]
T = get_job_runtime( startT, js )
cjid = str(ljdf_t["CurrentJobId"])
prog = str( ljdf_t["CurrentRun"] ) + "/" + \
str( ljdf_t["TotalRuns"] )
print(" {:<15s} {:<7s}{:>8s} {:<10s} {:>8s} {:<20s} "\
.format(jdn[0:14], prog, cjid, js, T, jm))
print("\n{}done.".format(DEFAULTCOLOUR))
def colour_jobstatus(js):
if js == "running":
js = GREEN + js + DEFAULTCOLOUR
if js == "submitted":
js = BLUE + js + DEFAULTCOLOUR
if js == "error":
js = RED + js + DEFAULTCOLOUR
if js == "stopped":
js = BLUE + js + DEFAULTCOLOUR
return js
def md5sum( filename, blocksize=65536 ):
"""function for returning md5 checksum"""
hash = hashlib.md5()
with open(filename, "r+b") as f:
for block in iter(lambda: f.read(blocksize), ""):
hash.update(block)
f.close()
return hash.hexdigest()
def getfilesize( filename ):
""" Function to get file size """
size = os.path.getsize(filename)
return size
def check_job_structure():
""" Function to check job structure in 'master_config_file'
The job structure has three tiers, JobStreams (we usually
only have 1), job replicates in the stream, and number
of job runs to perform in each replicate. """
mcf = read_master_config_file()
JobStreams = mcf["JobStreams"]
Replicates = mcf["JobReplicates"]
BaseDirNames = mcf["BaseDirNames"]
JobBaseNames = mcf["JobBaseNames"]
Runs = mcf["Runs"]
# check that job details lists are the same length in master_config_file:
nJobStreams = int( len( JobStreams ))
nReplicates = int( len( Replicates ))
nBaseNames = int( len( BaseDirNames ))
nJobBaseNames = int( len( JobBaseNames ))
nRuns = int( len( Runs ))
if not nJobStreams==nReplicates==nBaseNames==nJobBaseNames==nRuns:
print("Job Details Section lists do not appear to be the same length\
in master_config_file.")
sys.exit()
return JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs,\
nJobStreams, nReplicates, nBaseNames
def initialize_job_directories():
""" Function to setup and initialize job structure directories
as defined in the 'master_config_file'. This function copies
the job template from /Setup_and_Config """
cwd=os.getcwd()
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, nJobStreams,\
nReplicates, nBaseNames = check_job_structure()
# create job stream structure: /JobStreams/JobReplicates
for i in range(0, nJobStreams):
TargetJobDir = cwd + "/" + JobStreams[i]
if not os.path.exists(TargetJobDir):
print("Job Stream directory /{} does not exist.\
Making new directory.".format(TargetJobDir))
os.makedirs(JobStreams[i])
# Copy directory structure from /Setup_and Config/JobTemplate
print("Making job replicates in /{}".format(JobStreams[i]))
TemplatePath = cwd + "/Setup_and_Config/JobTemplate"
# check existance of JobTemplate directory:
if not os.path.exists(TemplatePath):
print("Can't find the /Setup_and_Config/JobTemplate \
directory. Exiting.")
sys.exit(error)
replicates = int(Replicates[i])
zf = len(str(replicates)) + 1 # for zfill
for j in range(1,replicates+1):
suffix = str(j).zfill(zf)
NewDirName = JobStreams[i] + "/" + BaseDirNames[i] + suffix
if os.path.exists(NewDirName):
print("Replicate job directory {} already exists! \
-Skipping.".format(NewDirName))
else:
shutil.copytree(TemplatePath, NewDirName)
print("Creating:{}".format(NewDirName))
def populate_job_directories():
""" -function to populate or update job directory tree
with job scripts that are located in /Setup_and_Config """
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
mcf = read_master_config_file()
ljdf_t = read_local_job_details()
cwd=os.getcwd()
ljdf_t[ 'BASE_DIR' ] = cwd
ljdf_t[ 'CurrentRound' ] = mcf["Round"]
ljdf_t[ 'Account' ] = mcf["Account"]
ljdf_t[ 'Nodes' ] = mcf["nodes"]
ljdf_t[ 'ntpn' ] = mcf["ntpn"]
ljdf_t[ 'ppn' ] = mcf["ppn"]
ljdf_t[ 'Walltime' ] = mcf["Walltime"]
ljdf_t[ 'JobFailTime' ] = mcf["JobFailTime"]
ljdf_t[ 'DiskSpaceCutOff' ] = mcf["DiskSpaceCutOff"]
Flavour = mcf["Flavour"]
OptScript = mcf["EquilibrateConfScript"]
ProdScript = mcf["ProductionConfScript"]
ModuleFile = mcf["ModuleFile"]
startscript = mcf["SbatchEquilibrateScript"]
productionscript = mcf["SbatchProductionScript"]
## list files to transfer:
print("{}Job Files to transfer from /Setup_and_Config:{}"\
.format( GREEN, DEFAULTCOLOUR ))
print("{} {} \n {} ".format( BLUE, startscript,\
productionscript ))
print(" local_job_details.json ")
for pyfile in glob.glob(r'Setup_and_Config/*.py' ):
print(" " + pyfile[17:])
for conffile in glob.glob(r'Setup_and_Config/*.conf' ):
print(" " + conffile[17:])
## descend through job structure and populate job directories:
for i in range(0, nJobStreams):
TargetJobDir = cwd + "/" + JobStreams[i]
print("{}\nPopulating JobStream: {} {}".format( GREEN,
TargetJobDir, DEFAULTCOLOUR))
## check to see if there actually are any job directories to fill:
if not os.path.exists( TargetJobDir ):
print("Job directory {} not found. Have you initialized?"\
.format(TargetJobDir))
sys.exit(error)
## create temporary sbatch scripts:
sb_start_template = "Setup_and_Config/" + startscript + ".template"
sb_prod_template = "Setup_and_Config/" + productionscript + ".template"
if not os.path.exists( sb_start_template ) \
or not os.path.exists( sb_prod_template ):
print("Can't find sbatch template files in Settup_and_Config. Exiting.")
sys.exit(error)
## modify replicate elements in staging dictionary file:
ljdf_t[ 'JOB_STREAM_DIR' ] = JobStreams[i]
ljdf_t[ 'CurrentRun' ] = 0
ljdf_t[ 'TotalRuns' ] = int( Runs[i] )
###### ljdf_t[ 'Runs' ] = int( Runs[i] )
ljdf_t[ 'JobBaseName' ] = JobBaseNames[i]
nnodes = "#SBATCH --nodes=" + mcf["nodes"]
ntime = "#SBATCH --time=" + mcf["Walltime"]
naccount = "#SBATCH --account=" + mcf["Account"]
nntpn = "ntpn=" + mcf["ntpn"]
nppn = "ppn=" + mcf["ppn"]
nmodule = "module load " + ModuleFile
nopt = "optimize_script=" + OptScript
nprod = "production_script=" + ProdScript
shutil.copy( sb_start_template, 'sb_start_temp')
shutil.copy( sb_prod_template, 'sb_prod_temp' )
## replace lines in sbatch files:
for f in ["sb_start_temp", "sb_prod_temp"]:
for line in fileinput.FileInput( f, inplace=True ):
line = line.replace( '#SBATCH --nodes=X', nnodes )
line = line.replace( '#SBATCH --time=X', ntime )
line = line.replace( '#SBATCH --account=X', naccount )
line = line.replace( 'ntpn=X', nntpn )
line = line.replace( 'ppn=X', nppn )
line = line.replace( 'module load X', nmodule )
line = line.replace( 'optimize_script=X', nopt )
line = line.replace( 'production_script=X', nprod )
sys.stdout.write(line)
## update local job details file:
jobdirlist = get_current_dir_list(JobStreams[i])
for j in jobdirlist:
print("{} -populating: {}{}".format(BLUE, j, DEFAULTCOLOUR ))
ljdf_t[ 'JobDirName' ] = j
ljdfile = JobStreams[i] + "/" + j + "/local_job_details.json"
if not os.path.isfile( ljdfile ):
with open(ljdfile, 'w') as outfile:
json.dump(ljdf_t, outfile, indent=2)
outfile.close()
print("")
else:
print(" skipping local_details_file: already exists ")
## copy across python scripts from /Setup_and_Config:
jobpath = JobStreams[i] + "/" + j + "/"
sbs_path = jobpath + "/" + startscript
sbp_path = jobpath + "/" + productionscript
shutil.copy( 'sb_start_temp', sbs_path )
shutil.copy( 'sb_prod_temp' , sbp_path )
for pyfile in glob.glob(r'Setup_and_Config/*.py' ):
shutil.copy2( pyfile, jobpath )
for conffile in glob.glob(r'Setup_and_Config/*.conf' ):
shutil.copy2(conffile, jobpath)
## remove tempfiles.
os.remove('sb_start_temp')
os.remove('sb_prod_temp')
print("\n -done populating directories")
def check_job():
""" Function to check the input of the current job and calculate
resources required. """
mcf = read_master_config_file()
jd_opt, jd_opt_pl = read_namd_job_details(mcf["EquilibrateConfScript"])
jd_prod, jd_prod_pl = read_namd_job_details(mcf["ProductionConfScript"])
# # checking if files in configuration exist where they are supposed to be.
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Checking configuration input files:{}".format(BLUE,DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format( BLUE))
print("{}{}:{}".format(BLUE,mcf["EquilibrateConfScript"],DEFAULTCOLOUR))
check_file_exists(jd_opt["psffilepath"])
check_file_exists(jd_opt["pdbfilepath"])
for i in jd_opt_pl:
check_file_exists(i)
print("{}{}:{}".format(BLUE,mcf["ProductionConfScript"],DEFAULTCOLOUR))
check_file_exists(jd_prod["psffilepath"])
check_file_exists(jd_prod["pdbfilepath"])
for i in jd_prod_pl:
check_file_exists(i)
sr = 0 # Initalise no. of job repliates
run = 0 # Initalise no. of runs in each replicate
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Job check summary: ".format(BLUE,DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{} Main Job Directory: {}{}".format(RED, DEFAULTCOLOUR, mcf["JobStreams"]))
print("{} Simulation basename: {}{}".format(RED, DEFAULTCOLOUR, mcf["BaseDirNames"]))
print("{} Sbatch start template: {}{}.template".format(RED, DEFAULTCOLOUR, mcf["SbatchEquilibrateScript"]))
print("{} Sbatch prouction template: {}{}.template".format(RED, DEFAULTCOLOUR, mcf["SbatchProductionScript"]))
print("{} Optimization script: {}{}".format(RED, DEFAULTCOLOUR, mcf["EquilibrateConfScript"]))
print("{} Production script: {}{}".format(RED, DEFAULTCOLOUR, mcf["ProductionConfScript"]))
print("{} Namd modulefile: {}{}".format(RED, DEFAULTCOLOUR, mcf["ModuleFile"]))
Replicates = mcf["JobReplicates"]
Runs = mcf["Runs"]
nReplicates = int(len(Replicates))
nRuns = int(len(Runs))
# calculating variables from input files:
for i in range(0, nReplicates):
sr += int(Replicates[i]) # total no. of job replicate
for j in range(0, nRuns):
run += int(Runs[j]) # total no. of runs in each replicate
spr = jd_prod["steps"] # steps per run
dcd = jd_prod["dcdfreq"] # dcd write frequency
dfs = int(jd_prod["natom"])*12.0/(1024.0*1024.0) # dcd frame size (based on number of atoms from psf)
tdf = int(spr)/int(dcd)*int(run)*int(sr) # total dcd frames
dfs = int(jd_prod["natom"])*12.0/(1024.0*1024.0) # dcd frame size (based on number of atoms from psf)
tdf = int(spr)/int(dcd)*int(run)*int(sr) # total dcd frames
tpd = tdf*dfs/(1024) # total production data
tst = (int(sr)*int(run)*int(jd_prod["timestep"])*int(spr))/1000000.0 # total simulated time
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Estimation of data to be generated from the production run of this simulation:{}".format(BLUE, DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{} Simulation directories: {}%-8s {}Runs per directory: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (sr, run))
print("{} Steps per run: {}%-8s {}Dcdfreq in run: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (spr, dcd))
print("{} Dcd frame size(MB) {}%-8.3f {}Total dcd frames: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (dfs, tdf))
print(" {} Total simulated time:{} %12.2f nanoseconds"\
.format(GREEN, DEFAULTCOLOUR) %(tst))
if not (tpd==0):
print(" {} Total production data:{} %12.2f GB"\
.format(GREEN, DEFAULTCOLOUR) %(tpd))
else:
print(" {} Total production data:{} %12.2f {}GB - error in calculating \
frame size. No psf file?".format(RED, DEFAULTCOLOUR, RED) %(tpd))
print("{}------------------------------------------------------------------------------".format(BLUE))
print("{}Node configuration:{}".format(BLUE, DEFAULTCOLOUR))
print("{}------------------------------------------------------------------------------".format(BLUE))
print("{}Sbatch Scripts: {} %s , %s ".format(RED, DEFAULTCOLOUR) % \
(mcf["SbatchEquilibrateScript"], mcf["SbatchProductionScript"]))
print("{}nodes: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["nodes"]))
print("{}walltime: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["Walltime"]))
print("{}no. tasks per node: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["ntpn"]))
print("{}processes per node: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["ppn"]))
if not mcf["Account"] == "VR0000":
print("{}account: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["Account"]))
else:
print("{}account: %-12s -have you set your account?{} "\
.format(RED, DEFAULTCOLOUR) % (mcf["Account"]))
def check_file_exists(target):
mesg1 = "{} found {} -ok!{}".format(DEFAULTCOLOUR, GREEN, DEFAULTCOLOUR)
mesg2 = "{} found {} -ok!{} -example file?".format(DEFAULTCOLOUR, GREEN, DEFAULTCOLOUR)
mesg3 = "{} not found.{} -Check config file.{}".format(DEFAULTCOLOUR, RED, DEFAULTCOLOUR)
ntarget = target[6:] # strip off "../../"
if not "../../" in target[0:6]:
print("{}unexpected path structure to input files:{}".format(RED, DEFAULTCOLOUR))
if os.path.exists(ntarget):
if "example" in target:
print("{} %-50s {}".format(RED, mesg2) %(ntarget))
else:
print("{} %-50s {}".format(RED, mesg1) %(ntarget))
else:
print("{} %-46s {}".format(RED, mesg3) %(ntarget))
def benchmark():
""" -function to benchmark job """
# read job details:
mcf = read_master_config_file()
jd_opt, jd_opt_pl = read_namd_job_details(mcf["EquilibrateConfScript"])
print("{} Setting up jobs for benchmarking based on current job config files.".format(DEFAULTCOLOUR))
# create temporary files/ figure out job size.
# move files to /Setup_and_Config/Benchmarking / create dictionary/ json file.
# optimize job/ create sbatch_files.
# start benchmarking jobs:
# extract results
# plot results.
def get_current_dir_list(job_dir):
""" Simple function to return a list of directories in a given path """
file_list = []
if os.path.isdir(job_dir):
file_list = (sorted(os.listdir(job_dir)))
else:
sys.stderr.write("No directories found in {}. Have you initialized?".format(job_dir))
return file_list
def get_curr_job_list(job_dir):
"""<high-level description of the function here>
Argument(s):
job_dir -- path to the job directory.
Notes:
<notes of interest, e.g. bugs, caveats etc.>
Returns:
<description of the return value>
"""
# are you after a list of jobs or a list of files?
file_list = []
if os.path.isdir(job_dir):
# build a list of every file in the `job_dir` directory tree.
# you may want to ignore files such as '.gitkeep' etc.
for root, _, fns in os.walk(job_dir):
for fn in fns:
file_path = os.path.join(root, fn)
if os.path.isfile(file_path):
file_list.append(file_path)
#
# insert code to manipulate `file_list` here.
#
file_list.sort()
else:
# NOTE: to developers, it is good practise to write error messages to
# `stderr` rather than to stdout (i.e. avoid using`print` to display
# error messages).
sys.stderr.write("{} doesn't exist, it needs to be initialised.{}"
.format(job_dir, os.linesep))
return file_list
def execute_function_in_job_tree( func, *args ):
""" -a generic function to execute a given function throughout the entire job tree """
cwd = os.getcwd()
## read job structure tree as given in the "master_config_file":
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
## descending into job tree:
for i in range( 0, nJobStreams ):
CurrentJobStream = cwd + '/' + JobStreams[i]
## descending into job directory tree of each JobStream
if os.path.isdir( CurrentJobStream ):
JobStreamDirList = get_current_dir_list( CurrentJobStream )
for j in JobStreamDirList:
CurrentJobDir = CurrentJobStream + '/' + j
if os.path.isdir(CurrentJobDir):
os.chdir( CurrentJobDir )
func( *args )
else:
print("\nCan't descend into job directory tree. Have you populated?\n {}"\
.format(CurrentJobDir))
else:
print("Can't see job directories {} -Have you initialized?"\
.format(CurrentJobStream))
os.chdir(cwd)
def start_all_jobs():
""" function for starting all jobs """
mcf = read_master_config_file()
startscript = mcf["SbatchEquilibrateScript"]
execute_function_in_job_tree( start_jobs, startscript )
def start_jobs( startscript ):
""" function to start jobs in a directory"""
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus == "ready" and jobid == 0 and jobrun == 0:
subprocess.Popen(['sbatch', startscript])
update_local_job_details( "JobStatus", "submitted" )
update_local_job_details( "JobMessage", "submitted to queue" )
else:
if jobstatus == "cancelled":
print("Appears this job was cancelled. Clear flags before restart. {}".format( cwd[-20:], jobid ))
if "running" in jobstatus:
print("A job appears to be running here: {} : jobid:{}".format( cwd[-20:], jobid ))
else:
if jobrun >= 1:
print("Seems equilibration job already run here, don't you want to restart instead? (--restart)")
def clear_jobs():
""" function for clearing all pausejob and stop flags """
mcf = read_master_config_file()
execute_function_in_job_tree( clear_all_jobs )
def clear_all_jobs():
""" function to clear all stop flags in a directory"""
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
if not "running" in jobstatus:
update_local_job_details( "JobStatus", "ready" )
update_local_job_details( "CurrentJobId", 0 )
update_local_job_details( "JobMessage", "cleared stop flags" )
update_local_job_details( "PauseJobFlag", 0 )
## remove explicit flag file:
if os.path.isfile( "pausejob" ):
os.remove( "pausejob" )
print("{} cleared stop flags in: {} {}".format( GREEN, cwd, DEFAULTCOLOUR ))
else:
print("A job appears to be running here:..{} : jobstatus:{}".format( cwd[-20:], jobid ))
def restart_all_production_jobs():
""" -function to restart_all_production_jobs """
print("-- restarting production jobs.")
mcf = read_master_config_file()
## check_job_status
restartscript = mcf["SbatchProductionScript"]
execute_function_in_job_tree(restart_jobs, restartscript)
def restart_jobs(restartscript):
""" function to restart production jobs """
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
current = ljdf_t["CurrentRun"]
jobid = ljdf_t["CurrentJobId"]
total = ljdf_t["TotalRuns"]
message = ljdf_t["JobMessage"]
time.sleep( 0.1 )
if jobstatus in { "running", "submittied" }:
print("A job appears to be submitted or running here:..{} : jobid:{}".format( cwd[-20:], jobid ))
return
if "cancelled" in jobstatus:
print("Job was abruptly cancelled. Clear pause flags first. (--clear) {}".format( cwd[-20:]))
return
if jobstatus in { "finished", "ready" }:
if "cleared" in message: # assume restarting from cancelled job.
pausejob_flag( "remove" ) # -so we don't increment CurrentRun number
subprocess.Popen(['sbatch', restartscript])
update_local_job_details( "JobStatus", "submitted" )
update_local_job_details( "JobMessage", "production job restarted" )
return
if current == total:
newtotal = total + int( runs )
newcurrent = current + 1
pausejob_flag( "remove" )
update_local_job_details( "CurrentRun", newcurrent )
update_local_job_details( "TotalRuns", newtotal )
subprocess.Popen(['sbatch', restartscript])
update_local_job_details( "JobStatus", "submitted" )
update_local_job_details( "JobMessage", "production job restarted" )
return
def recover_all_jobs():
""" -function to recover and restore crashed jobs """
print("Crash recovery: ")
print("Typically we expect binary output files like .dcd to be the")
print("same size using this workflow. If a job has crashed we can ")
print("restore the directory to the last known good point by entering")
print("the name of the first bad file. Files will be restored to this")
print("point. ")
execute_function_in_job_tree( recovery_function )
def recovery_function():
""" this function checks sizes and md5sums of outputfiles, giving the opportunity
for a user to recover from the last known good file"""
ljdf = read_local_job_details( ".", "local_job_details.json" )
# the following constructs a string to find the "equilibration" dcd file
# (which is numbered 0, but with zfill padding)
total = ljdf["TotalRuns"]
zf = len( str(total) ) + 1 + 4
zf_bait = ".dcd".zfill( zf )
dirlist = get_current_dir_list( "OutputFiles" )
line = ljdf["JOB_STREAM_DIR"] + "/" + ljdf["JobDirName"] + "/" + "OutputFiles:"
print("\n{}{}{}".format( GREEN, line, DEFAULTCOLOUR ))
#### while
for i in dirlist:
if "dcd" in i:
path = 'OutputFiles/' + i
size = os.path.getsize( path )
if not zf_bait in i:
print("%-24s %12s " % ( i, size ))
else:
print("{}%-24s %12s -equilibration file {} ".format( BLUE, DEFAULTCOLOUR ) % ( i, size ))
print("Enter the name of the last {}good file{} or press 'enter' to continue scanning. ('q' to quit)".format(GREEN, DEFAULTCOLOUR ))
target = raw_input()
if target == "q":
sys.exit("exiting" )
if target != "":
# find index of target in dirlist.
if target in dirlist:
# index = dirlist.index( target )
# find index of target in dirlist.
index = dirlist.index(target)+1
print("\n{}Files to delete:{}".format(BLUE, DEFAULTCOLOUR ))
targetlist=[]
for i in range( index, int(len(dirlist))):
print(dirlist[i])
targetlist.append(dirlist[i])
line = " {}Confirm:{} y/n ".format(BLUE, DEFAULTCOLOUR )
confirm = raw_input(line)
if confirm in { 'Y', 'y' }:
line = " {}Really? -this can't be undone! Confirm:{} y/n ".format(BLUE, DEFAULTCOLOUR )
confirm = raw_input(line)
if confirm in { 'Y', 'y' }:
print("-deleting redundant output and restart files:")
for j in targetlist:
targetfile=os.getcwd()+"/OutputFiles/"+j
try:
os.remove(targetfile)
except OSError:
pass
#slice base name to remove other files:
basename = j[ :-4 ]
targetfile=os.getcwd()+"/OutputText/" + basename + ".txt"
try:
os.remove(targetfile)
except OSError:
pass
for k in ['.vel', '.coor', '.xsc', '.xst']:
targetfile=os.getcwd()+"/RestartFiles/" + basename + k
try:
os.remove(targetfile)
except OSError:
pass
# slice job number and basename from dcd job name:
num = int( target[-zf:-4] )
basename = target[ :-4 ]
print("-updating restart files:")
for k in ['.vel', '.coor', '.xsc', '.xst']:
src=os.getcwd()+"/RestartFiles/" + basename + k
dst=os.getcwd()+"/current_MD_run_files" + k
print("copy /RestatFiles/{}{} to current_MD_run_files{}".format(basename, k, k))
shutil.copyfile(src, dst)
else:
print(target, " not found: ")
def stop_jobs():
""" -function to stop all jobs, -either immediately or gently."""
print("-- stopping all jobs")
execute_function_in_job_tree(stop_all_jobs_immediately)
def pause_jobs():
""" -function to stop all jobs, -either immediately or gently."""
print("-- pausing all jobs")
execute_function_in_job_tree(pause_all_jobs)
def pause_all_jobs():
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus in { "stopped", "cancelled", "processing" }:
update_local_job_details( "JobMessage", "no job running" )
else:
pausejob_flag( "Manual pausing of job." )
update_local_job_details( "JobMessage", "job paused" )
def stop_all_jobs_immediately():
""" function to stop all jobs immediately """
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus in { "stopped", "cancelled", "processing" }:
update_local_job_details( "JobMessage", "no job running" )
else:
cancel_job( jobid )
def cancel_job( jobid ):
""" function to send scancelcommand for jobid """
print(" stopping job: {}".format( jobid ))
message = " scancel jobid: %s" % jobid
pausejob_flag( "scancel commnad sent." )
update_local_job_details( "JobMessage", "sent scancel command" )
update_local_job_details( "JobStatus", "stopped" )
update_local_job_details( "PauseJobFlag", "cancelled" )
if jobid > 0:
subprocess.Popen([ 'scancel', jobid ])
update_local_job_details( "CurrentJobId", -1 )
def erase_all_data():
""" -function to erase all data for a clean start. Use with caution!"""
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
cwd = os.getcwd()
print("\nWe are about to erase all data in this directory, which can be useful")
print("for making a clean start, but disasterous if this is the wrong folder!")
print("{}Proceed with caution!{}".format(RED, DEFAULTCOLOUR))
print("This operation will delete all data in the folders:\n")
print("/{} ".format(JobStreams,DEFAULTCOLOUR))
print("/JobLog/ - Job logs.")
print("/Setup_and_Config/Benchmarking/ - Benchmarking data.")
strg = raw_input("\n Press enter to quit or type: {}'erase all my data' {}: ".format(GREEN, DEFAULTCOLOUR))
print (strg)
if strg in ['erase all my data']:
print("Ok, well if you say so....")
for j in range( 0, nJobStreams):
TargetDir = cwd + "/" + JobStreams[j]
print(" Erasing all files in:{}".format(TargetDir))
if os.path.isdir( TargetDir ):
shutil.rmtree( TargetDir )
else:
print(" Couldn't see {}".format(TargetDir))
print("\n Oh the humanity. I sure hope that wasn't anything important.")
else:
print(" Phew! Nothing erased.")
def create_dcd_file_loader( first = 0, last = -1, step =1):
""" A function to create an easyfile loader to be able to read in a
contiguous series of dcd output files for VMD. """
cwd = os.getcwd()
OutputDir = cwd + "/" "OutputFiles"
DirList = get_current_dir_list( OutputDir )
# create vmd line:
line = "mol addfile %s type dcd first %s last %s step %s filebonds 1 autobonds 1 waitfor all\n"\
.format( dcdline, first, last, step )
def clone():
""" -function to clone directory without data, but preserving input files."""
print("-- cloning data directory!!")
cleaning up
#!/usr/bin/env python
# MD workflow functions. mdwf
""" mdwf functions. version 0.25
"""
import os
import subprocess
import sys
from collections import OrderedDict
import json
import shutil
import fileinput
import hashlib
import time
import datetime
import glob
import re
# ansi color variables for formatting purposes:
DEFAULTCOLOUR = '\033[0m'
RED = '\033[34;3m'
GREEN = '\033[32;2m'
BLUE = '\033[31;2m'
def read_master_config_file():
""" Reads the json file 'master_config_file' and
returns the dictionary """
if os.path.isfile( 'master_config_file' ):
master_json = open( 'master_config_file' )
mcf = json.load(master_json,object_pairs_hook=OrderedDict)
master_json.close()
return mcf
else:
print("{}Can't see 'master_config_file' {} "\
.format(RED, DEFAULTCOLOUR))
print("{}Have you populated the directory? (./mdwf -p){}"\
.format(RED, DEFAULTCOLOUR))
def read_local_job_details(path="Setup_and_Config",
ljdf_target="local_job_details_template.json"):
""" Reads the json file 'local_job_details.json' and
returns the dictionary """
target = path + "/" + ljdf_target
if os.path.isfile(target):
local_json = open(target)
ljdf = json.load(local_json,object_pairs_hook=OrderedDict)
local_json.close()
else:
print("Can't see {} Have you populated job tree? ".format(target))
return ljdf
def read_namd_job_details(targetfile):
""" Extracts simulation details from given namd config file
and returns a dictionary and a list. The function assumes
namd files are located in /Setup_and_Config """
target = os.getcwd() + "/Setup_and_Config/" + targetfile
jdd = {} # job-details dictionary
jdpl = [] # job details parameter list
if os.path.isfile(target):
f = open(target,'r')
for lline in f:
line = lline[0:18] # strip line to avoid artifacts
if not "#" in line[0:2]: # leave out commented lines
if 'structure ' in line:
pl = lline.split()
jdd["psffilepath"] = pl[1]
nl = re.split(('\s+|/|'),lline)
for i in nl:
if '.psf' in i:
jdd["psffile"] = i
natom = get_atoms(i)
jdd["natom"] = natom
if 'coordinates ' in line:
pl = lline.split()
jdd["pdbfilepath"] = pl[1]
nl = re.split(('\s+|/|'),lline)
for i in nl:
if '.pdb' in i:
jdd["pdbfile"] = i
if 'timestep ' in line:
nl = lline.split()
jdd["timestep"] = nl[1]
if 'NumberSteps ' in line:
nl = lline.split()
jdd["steps"] = nl[2]
if 'dcdfreq ' in line:
nl = lline.split()
jdd["dcdfreq"] = nl[1]
if 'run ' in line:
nl = lline.split()
jdd["runsteps"] = nl[1]
if 'restartfreq ' in line:
nl = lline.split()
jdd["restartfreq"] = nl[1]
if 'parameters ' in line:
nl = lline.split()
jdpl.append(nl[1])
f.close()
else:
print("{} {} file not found.{}".format(RED,targetfile,DEFAULTCOLOUR))
return jdd, jdpl
def gather_jobs():
"""function to create a convenient vmd input file to load and view trajectory data"""
global dcdlist
# write basic model loader.
mcf = read_master_config_file()
psf = mcf["PsfFileName"]
pdb = mcf["PdbFileName"]
cwd = os.getcwd()
with open("Analysis/model_loader.vmd", "w+") as mfile:
mfile.write("# Basic vmd model loader \n")
mfile.write("mol new " + cwd + "/InputFiles/" + psf
+ " type psf first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n")
mfile.write("mol addfile " + cwd + "/InputFiles/" + pdb
+ " type pdb first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n")
mfile.close()
with open("Analysis/dcd_trajectory_fileloader.vmd", "w+") as dcdlist:
execute_function_in_job_tree(gather_list)
dcdlist.close()
def extend_jobs(a):
execute_function_in_job_tree(extend_runs,a)
def extend_runs(a):
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
total = int( ljdf_t[ 'TotalRuns' ] )
# Update the total of runs.
newtotal = int( total ) + a
update_local_job_details( "TotalRuns", newtotal )
def sorted_ls(path):
mtime = lambda f: os.stat(os.path.join(path, f)).st_mtime
return list(sorted(os.listdir(path), key=mtime))
def gather_list():
"""function to create list of output files from OutputFiles folder"""
# list dcd files in /OutputFiles folder
cwd = os.getcwd()
line = "# " + cwd + "\n"
dcdlist.write(line)
if os.path.isdir("OutputFiles"):
f_list = sorted_ls("OutputFiles")
# for creating vmd fileloader
head = "mol addfile "
tail = " type dcd first 0 last -1 step 1 filebonds 1 autobonds 1 waitfor all\n"
for l in f_list:
if ".dcd" in l:
dcdline = head + cwd + "/OutputFiles/" + l + tail
dcdlist.write(dcdline)
def get_atoms(psffile):
""" function to estimate dcd frame size of simulation based on
the numbers of atoms. Assumes the psf file is in
/InputFiles directory. Returns the number of atoms """
target=os.getcwd() + "/InputFiles/" + psffile
atoms = 0
if os.path.isfile(target):
f = open(target,'r')
for line in f:
if 'NATOM' in line: # extract number of atoms from !NATOM line
nl = line.split()
atoms = nl[0]
f.close()
else:
print("{}Can't find {} in /InputFiles directory {}"\
.format(RED,psffile,DEFAULTCOLOUR))
return atoms
def pausejob_flag( directive ):
"""creates or removes pausejob flag. Pausejob are (mostly)
empty files in the directory which are an extra
precaution for job control. Their presence in the directory
stops jobs launching. """
if directive == "remove":
update_local_job_details( "PauseJobFlag", 0 )
if os.path.isfile( "pausejob" ):
os.remove( "pausejob" )
else:
update_local_job_details( "PauseJobFlag", "pausejob" )
f = open("pausejob", 'a')
f.write(directive)
f.close()
def check_pausejob_flag():
""" A simple check for the pausejob flag in local details file.
Creates and actual pauseflag file in the directory if present """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
pause = ljdf_t["PauseJobFlag"]
if pause != 0:
f = open( "pausejob", 'a' )
f.write("pauseflag already present")
f.close()
update_local_job_details( "JobStatus", "pausejob" )
def check_disk_quota():
""" function for checking that there is enough diskspace on the
system before starting job. Relies on running the 'mydisk'
program on Avoca. The default setting is set in the
'master_config_file'. Creates a pausejob flag if it fails """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
account = ljdf_t[ 'Account' ]
diskspc = int( ljdf_t[ 'DiskSpaceCutOff' ] )
try:
disk = subprocess.check_output('mydisk')
dline = disk.split("\n")
for i in dline:
if account in i: # looks for account number
usage = int( i.split()[-1][:-1] )
if usage > diskspc:
print("Warning: Account {} disk space quota low. Usage: {} % "\
.format(account,a))
print("Diskspace too low. usage: {}% disk limit set to: {}%\n"\
.format(a,b))
update_local_job_details("JobStatus", "stopping" )
update_local_job_details("PauseJobFlag", "low disk" )
update_local_job_details("JobMessage", "stopped: Disk quota low.")
pausejob_flag( "Low Disk Quota detected." )
except:
print("Can't run 'mydisk'. Can't check disk quota for account {}."\
.format(account))
def log_job_details( jobid ):
""" Simple function to update 'local_job_details' from job details"""
jobdetails = subprocess.check_output(["scontrol",\
"show", "job", str(jobid) ] )
jdsplit = re.split( ' |\n', jobdetails )
for i in jdsplit:
if "JobState=" in i:
update_locate_job_details( "JobStatus", i.split("=")[1] )
if "NumNodes=" in i:
update_locate_job_details( "Nodes", i.split("=")[1] )
if "NumCPUs=" in i:
update_locate_job_details( "Cores", i.split("=")[1] )
if "StartTime=" in i:
update_locate_job_details( "JobStartTime", i.split("=")[1] )
if "TimeLimit=" in i:
update_locate_job_details( "Walltime", i.split("=")[1] )
def check_job_runtime():
""" Check for job failure based on run time. This function assumes
that if a job completes too soon, it has probably failed.
If the run time is less than a certain cutoff defined in the
'master_config_file' it will create a pausejob flag. """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
start = int( ljdf_t[ 'JobStartTime' ] )
finish = int( ljdf_t[ 'JobFinishTime' ] )
limit = int( ljdf_t[ 'JobFailTime' ] )
runtime = finish - start
if runtime < limit:
update_local_job_details( "JobStatus", "stopped" )
update_local_job_details( "JobMessage", "short run time detected" )
pausejob_flag( "Short runtime detected - job fail??" )
def check_run_counter():
""" This function checks if the job runs are finished, and create
a pausejob flag if they have exceeded the job run cuttoff value.
It also increment job run counters as necessary """
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
current = ljdf_t[ 'CurrentRun' ]
total = int( ljdf_t[ 'TotalRuns' ] )
jobid = ljdf_t[ 'CurrentJobId' ]
newrun = int( current ) + 1
update_local_job_details( "LastJobId", jobid )
if newrun > total: # -stop jobs if current run equals or greater than totalruns
update_local_job_details( "JobStatus", "finished" )
update_local_job_details( "JobMessage", "finished production runs" )
update_local_job_details( "PauseJobFlag", "pausejob" )
update_local_job_details( "CurrentJobId", -1 )
pausejob_flag( "Job run number greater than total specified." )
final_job_cleanup()
return None
update_local_job_details( "CurrentRun", newrun )
def get_job_runtime( starttime, status ):
""" Function to return runtime of current job in H:M format.
Returns --:-- if job not running. """
if "running" in status:
seconds = int( time.time() - starttime )
m, s = divmod( seconds, 60 )
hours, min = divmod( m, 60 )
Time = "%d:%02d" % ( hours, min)
else:
Time = "--:--"
return Time
def create_job_basename( jobname, run, zf ):
""" creates a time stamped basename for current job, uses zfill for
numbering convention. """
timestamp = time.strftime( "%Y_%d%b_", time.localtime() )
basename = timestamp + jobname + "run_" + str( run ).zfill( zf )
return basename
def update_local_job_details( key, status ):
""" updates local job details of 'local job details file' """
ljdf_t = read_local_job_details(".", "local_job_details.json")
ljdf_t[ key ] = status
with open("local_job_details.json", 'w') as outfile:
json.dump(ljdf_t, outfile, indent=2)
outfile.close()
def redirect_namd_output( CurrentWorkingName = "current_MD_run_files",
jobtype = "production"):
""" A function to redirect NAMD output to appropriate folders."""
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
jobname = ljdf_t[ 'JobBaseName' ]
run = ljdf_t[ 'CurrentRun' ]
total = ljdf_t[ 'TotalRuns' ]
zfill = len( str( total ) ) + 1
basename = create_job_basename( jobname, run, zfill )
# make shorthand of current working files
cwf_coor = CurrentWorkingName + ".coor"
cwf_vel = CurrentWorkingName + ".vel"
cwf_xsc = CurrentWorkingName + ".xsc"
cwf_xst = CurrentWorkingName + ".xst"
cwf_dcd = CurrentWorkingName + ".dcd"
# check that restartfiles actually exisit, if not create pausejob condition.
if not os.path.isfile(cwf_coor) or not os.path.isfile(cwf_vel) \
or not os.path.isfile(cwf_xsc):
pausejob_flag( "Missing input files." )
update_local_job_details( "JobStatus", "stopping" )
update_local_job_details( "JobMessage", "no namd outputfiles generated" )
# copy CurrentWorking (restart) files to LastRestart/ directory
shutil.copy(cwf_coor, 'LastRestart/' + cwf_coor)
shutil.copy(cwf_vel, 'LastRestart/' + cwf_vel)
shutil.copy(cwf_xsc, 'LastRestart/' + cwf_xsc)
# rename and move current working files
os.rename( cwf_dcd, "OutputFiles/" + basename + ".dcd" )
shutil.copy( cwf_vel, "RestartFiles/" + basename + ".vel" )
shutil.copy( cwf_xsc, "RestartFiles/" + basename + ".xsc" )
shutil.copy( cwf_xst, "RestartFiles/" + basename + ".xst" )
shutil.copy( cwf_coor, "RestartFiles/" + basename + ".coor" )
shutil.move( "temp_working_outputfile.out", "OutputText/" + basename + ".txt" )
shutil.move( "temp_working_errorsfile.err", "Errors/" + basename + ".err" )
def post_jobrun_cleanup():
""" remove unwanted error, backup files, etc """
for file in glob.glob("slurm*"):
shutil.move(file, "JobLog/" )
for file in glob.glob("core*"):
shutil.move(file, "Errors/")
for file in glob.glob("*.restart.*"):
shutil.move(file, "LastRestart/")
# reset timer / jobid flags:
update_local_job_details( "JobStartTime", 0 )
update_local_job_details( "JobFinishTime", 0 )
update_local_job_details( "CurrentJobId", 0 )
# update dcd files list:
update_local_dcd_list()
def update_local_dcd_list():
""" a simple function to create a local dcd_files_list.vmd use to load data into VMD"""
f = open('local_dcd_files_loader.vmd', 'w')
cwd = os.getcwd()
f.write("set firstframe 1 \n")
f.write("set lastframe -1 \n")
f.write("set stepsize 1 \n\n")
f.write("set cwd " + cwd + "\n\n")
dcdlist = glob.glob( "OutputFiles/*.dcd" )
for i in dcdlist:
line = " mol addfile %s%s type dcd first %s last %s step %s filebonds 1 autobonds 1 waitfor all\n"\
% ( "$cwd/", i, "$firstframe", "$lastframe", "$stepsize")
f.write( line )
f.close()
def final_job_cleanup():
""" perform final cleanup once jobruns are finished. """
for file in glob.glob("*BAK"):
os.remove( file )
def log_job_timing():
""" log length of job in human readable format """
## still to do
def countdown_timer():
""" function to adjust countdown timer """
## still to do
def check_if_job_running():
""" function to check if job already running in current working directory """
dir_path = os.getcwd()
ljdf_t = read_local_job_details( dir_path, "local_job_details.json" )
current_jobid = ljdf_t["CurrentJobId"]
current_jobstatus = ljdf_t["JobStatus"]
current_run = ljdf_t["CurrentRun"]
#
# status = running
# status = submitted
# status = processing
# status = cancelled
## needs better way to efficient way to check queue here
## this method currently just relies on 'local_job_details'
return current_jobstatus, current_jobid, current_run
def monitor_jobs():
""" -function to monitor jobs status on the cluster """
mcf = read_master_config_file()
account = mcf["Account"]
walltime = mcf["Walltime"]
nodes = mcf["nodes"]
cwd = os.getcwd()
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, nJobStreams,\
nReplicates, nBaseNames = check_job_structure()
print(" Account: %6s nodes: %-6s " % (account, nodes))
print(" Job Name: |Count |JobId |Status |Runtime |Job_messages:")
print(" ---------------|------|--------|----------|-%6s-|------------ " % walltime[:-2])
for i in range(0,nJobStreams):
JobDir = JobStreams[i]
jobdirlist = get_current_dir_list(JobDir)
print("%-24s " %( GREEN + JobDir + ":"+ DEFAULTCOLOUR ))
for j in jobdirlist:
dir_path = JobDir + "/" + j
ljdf_t = read_local_job_details(dir_path,\
"local_job_details.json")
jdn = ljdf_t["JobDirName"]
qs = ljdf_t["QueueStatus"]
js = colour_jobstatus( ljdf_t["JobStatus"] )
jm = ljdf_t["JobMessage"]
startT = ljdf_t["JobStartTime"]
T = get_job_runtime( startT, js )
cjid = str(ljdf_t["CurrentJobId"])
prog = str( ljdf_t["CurrentRun"] ) + "/" + \
str( ljdf_t["TotalRuns"] )
print(" {:<15s} {:<7s}{:>8s} {:<10s} {:>8s} {:<20s} "\
.format(jdn[0:14], prog, cjid, js, T, jm))
print("\n{}done.".format(DEFAULTCOLOUR))
def colour_jobstatus(js):
if js == "running":
js = GREEN + js + DEFAULTCOLOUR
if js == "submitted":
js = BLUE + js + DEFAULTCOLOUR
if js == "error":
js = RED + js + DEFAULTCOLOUR
if js == "stopped":
js = BLUE + js + DEFAULTCOLOUR
return js
def md5sum( filename, blocksize=65536 ):
"""function for returning md5 checksum"""
hash = hashlib.md5()
with open(filename, "r+b") as f:
for block in iter(lambda: f.read(blocksize), ""):
hash.update(block)
f.close()
return hash.hexdigest()
def getfilesize( filename ):
""" Function to get file size """
size = os.path.getsize(filename)
return size
def check_job_structure():
""" Function to check job structure in 'master_config_file'
The job structure has three tiers, JobStreams (we usually
only have 1), job replicates in the stream, and number
of job runs to perform in each replicate. """
mcf = read_master_config_file()
JobStreams = mcf["JobStreams"]
Replicates = mcf["JobReplicates"]
BaseDirNames = mcf["BaseDirNames"]
JobBaseNames = mcf["JobBaseNames"]
Runs = mcf["Runs"]
# check that job details lists are the same length in master_config_file:
nJobStreams = int( len( JobStreams ))
nReplicates = int( len( Replicates ))
nBaseNames = int( len( BaseDirNames ))
nJobBaseNames = int( len( JobBaseNames ))
nRuns = int( len( Runs ))
if not nJobStreams==nReplicates==nBaseNames==nJobBaseNames==nRuns:
print("Job Details Section lists do not appear to be the same length\
in master_config_file.")
sys.exit()
return JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs,\
nJobStreams, nReplicates, nBaseNames
def initialize_job_directories():
""" Function to setup and initialize job structure directories
as defined in the 'master_config_file'. This function copies
the job template from /Setup_and_Config """
cwd=os.getcwd()
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, nJobStreams,\
nReplicates, nBaseNames = check_job_structure()
# create job stream structure: /JobStreams/JobReplicates
for i in range(0, nJobStreams):
TargetJobDir = cwd + "/" + JobStreams[i]
if not os.path.exists(TargetJobDir):
print("Job Stream directory /{} does not exist.\
Making new directory.".format(TargetJobDir))
os.makedirs(JobStreams[i])
# Copy directory structure from /Setup_and Config/JobTemplate
print("Making job replicates in /{}".format(JobStreams[i]))
TemplatePath = cwd + "/Setup_and_Config/JobTemplate"
# check existance of JobTemplate directory:
if not os.path.exists(TemplatePath):
print("Can't find the /Setup_and_Config/JobTemplate \
directory. Exiting.")
sys.exit(error)
replicates = int(Replicates[i])
zf = len(str(replicates)) + 1 # for zfill
for j in range(1,replicates+1):
suffix = str(j).zfill(zf)
NewDirName = JobStreams[i] + "/" + BaseDirNames[i] + suffix
if os.path.exists(NewDirName):
print("Replicate job directory {} already exists! \
-Skipping.".format(NewDirName))
else:
shutil.copytree(TemplatePath, NewDirName)
print("Creating:{}".format(NewDirName))
def populate_job_directories():
""" -function to populate or update job directory tree
with job scripts that are located in /Setup_and_Config """
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
mcf = read_master_config_file()
ljdf_t = read_local_job_details()
cwd=os.getcwd()
ljdf_t[ 'BASE_DIR' ] = cwd
ljdf_t[ 'CurrentRound' ] = mcf["Round"]
ljdf_t[ 'Account' ] = mcf["Account"]
ljdf_t[ 'Nodes' ] = mcf["nodes"]
ljdf_t[ 'ntpn' ] = mcf["ntpn"]
ljdf_t[ 'ppn' ] = mcf["ppn"]
ljdf_t[ 'Walltime' ] = mcf["Walltime"]
ljdf_t[ 'JobFailTime' ] = mcf["JobFailTime"]
ljdf_t[ 'DiskSpaceCutOff' ] = mcf["DiskSpaceCutOff"]
Flavour = mcf["Flavour"]
OptScript = mcf["EquilibrateConfScript"]
ProdScript = mcf["ProductionConfScript"]
ModuleFile = mcf["ModuleFile"]
startscript = mcf["SbatchEquilibrateScript"]
productionscript = mcf["SbatchProductionScript"]
## list files to transfer:
print("{}Job Files to transfer from /Setup_and_Config:{}"\
.format( GREEN, DEFAULTCOLOUR ))
print("{} {} \n {} ".format( BLUE, startscript,\
productionscript ))
print(" local_job_details.json ")
for pyfile in glob.glob(r'Setup_and_Config/*.py' ):
print(" " + pyfile[17:])
for conffile in glob.glob(r'Setup_and_Config/*.conf' ):
print(" " + conffile[17:])
## descend through job structure and populate job directories:
for i in range(0, nJobStreams):
TargetJobDir = cwd + "/" + JobStreams[i]
print("{}\nPopulating JobStream: {} {}".format( GREEN,
TargetJobDir, DEFAULTCOLOUR))
## check to see if there actually are any job directories to fill:
if not os.path.exists( TargetJobDir ):
print("Job directory {} not found. Have you initialized?"\
.format(TargetJobDir))
sys.exit(error)
## create temporary sbatch scripts:
sb_start_template = "Setup_and_Config/" + startscript + ".template"
sb_prod_template = "Setup_and_Config/" + productionscript + ".template"
if not os.path.exists( sb_start_template ) \
or not os.path.exists( sb_prod_template ):
print("Can't find sbatch template files in Settup_and_Config. Exiting.")
sys.exit(error)
## modify replicate elements in staging dictionary file:
ljdf_t[ 'JOB_STREAM_DIR' ] = JobStreams[i]
ljdf_t[ 'CurrentRun' ] = 0
ljdf_t[ 'TotalRuns' ] = int( Runs[i] )
###### ljdf_t[ 'Runs' ] = int( Runs[i] )
ljdf_t[ 'JobBaseName' ] = JobBaseNames[i]
nnodes = "#SBATCH --nodes=" + mcf["nodes"]
ntime = "#SBATCH --time=" + mcf["Walltime"]
naccount = "#SBATCH --account=" + mcf["Account"]
nntpn = "ntpn=" + mcf["ntpn"]
nppn = "ppn=" + mcf["ppn"]
nmodule = "module load " + ModuleFile
nopt = "optimize_script=" + OptScript
nprod = "production_script=" + ProdScript
shutil.copy( sb_start_template, 'sb_start_temp')
shutil.copy( sb_prod_template, 'sb_prod_temp' )
## replace lines in sbatch files:
for f in ["sb_start_temp", "sb_prod_temp"]:
for line in fileinput.FileInput( f, inplace=True ):
line = line.replace( '#SBATCH --nodes=X', nnodes )
line = line.replace( '#SBATCH --time=X', ntime )
line = line.replace( '#SBATCH --account=X', naccount )
line = line.replace( 'ntpn=X', nntpn )
line = line.replace( 'ppn=X', nppn )
line = line.replace( 'module load X', nmodule )
line = line.replace( 'optimize_script=X', nopt )
line = line.replace( 'production_script=X', nprod )
sys.stdout.write(line)
## update local job details file:
jobdirlist = get_current_dir_list(JobStreams[i])
for j in jobdirlist:
print("{} -populating: {}{}".format(BLUE, j, DEFAULTCOLOUR ))
ljdf_t[ 'JobDirName' ] = j
ljdfile = JobStreams[i] + "/" + j + "/local_job_details.json"
if not os.path.isfile( ljdfile ):
with open(ljdfile, 'w') as outfile:
json.dump(ljdf_t, outfile, indent=2)
outfile.close()
print("")
else:
print(" skipping local_details_file: already exists ")
## copy across python scripts from /Setup_and_Config:
jobpath = JobStreams[i] + "/" + j + "/"
sbs_path = jobpath + "/" + startscript
sbp_path = jobpath + "/" + productionscript
shutil.copy( 'sb_start_temp', sbs_path )
shutil.copy( 'sb_prod_temp' , sbp_path )
for pyfile in glob.glob(r'Setup_and_Config/*.py' ):
shutil.copy2( pyfile, jobpath )
for conffile in glob.glob(r'Setup_and_Config/*.conf' ):
shutil.copy2(conffile, jobpath)
## remove tempfiles.
os.remove('sb_start_temp')
os.remove('sb_prod_temp')
print("\n -done populating directories")
def check_job():
""" Function to check the input of the current job and calculate
resources required. """
mcf = read_master_config_file()
jd_opt, jd_opt_pl = read_namd_job_details(mcf["EquilibrateConfScript"])
jd_prod, jd_prod_pl = read_namd_job_details(mcf["ProductionConfScript"])
# # checking if files in configuration exist where they are supposed to be.
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Checking configuration input files:{}".format(BLUE,DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format( BLUE))
print("{}{}:{}".format(BLUE,mcf["EquilibrateConfScript"],DEFAULTCOLOUR))
check_file_exists(jd_opt["psffilepath"])
check_file_exists(jd_opt["pdbfilepath"])
for i in jd_opt_pl:
check_file_exists(i)
print("{}{}:{}".format(BLUE,mcf["ProductionConfScript"],DEFAULTCOLOUR))
check_file_exists(jd_prod["psffilepath"])
check_file_exists(jd_prod["pdbfilepath"])
for i in jd_prod_pl:
check_file_exists(i)
sr = 0 # Initalise no. of job repliates
run = 0 # Initalise no. of runs in each replicate
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Job check summary: ".format(BLUE,DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{} Main Job Directory: {}{}".format(RED, DEFAULTCOLOUR, mcf["JobStreams"]))
print("{} Simulation basename: {}{}".format(RED, DEFAULTCOLOUR, mcf["BaseDirNames"]))
print("{} Sbatch start template: {}{}.template".format(RED, DEFAULTCOLOUR, mcf["SbatchEquilibrateScript"]))
print("{} Sbatch prouction template: {}{}.template".format(RED, DEFAULTCOLOUR, mcf["SbatchProductionScript"]))
print("{} Optimization script: {}{}".format(RED, DEFAULTCOLOUR, mcf["EquilibrateConfScript"]))
print("{} Production script: {}{}".format(RED, DEFAULTCOLOUR, mcf["ProductionConfScript"]))
print("{} Namd modulefile: {}{}".format(RED, DEFAULTCOLOUR, mcf["ModuleFile"]))
Replicates = mcf["JobReplicates"]
Runs = mcf["Runs"]
nReplicates = int(len(Replicates))
nRuns = int(len(Runs))
# calculating variables from input files:
for i in range(0, nReplicates):
sr += int(Replicates[i]) # total no. of job replicate
for j in range(0, nRuns):
run += int(Runs[j]) # total no. of runs in each replicate
spr = jd_prod["steps"] # steps per run
dcd = jd_prod["dcdfreq"] # dcd write frequency
dfs = int(jd_prod["natom"])*12.0/(1024.0*1024.0) # dcd frame size (based on number of atoms from psf)
tdf = int(spr)/int(dcd)*int(run)*int(sr) # total dcd frames
dfs = int(jd_prod["natom"])*12.0/(1024.0*1024.0) # dcd frame size (based on number of atoms from psf)
tdf = int(spr)/int(dcd)*int(run)*int(sr) # total dcd frames
tpd = tdf*dfs/(1024) # total production data
tst = (int(sr)*int(run)*int(jd_prod["timestep"])*int(spr))/1000000.0 # total simulated time
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{}Estimation of data to be generated from the production run of this simulation:{}".format(BLUE, DEFAULTCOLOUR))
print("{}--------------------------------------------------------------------------------".format(BLUE))
print("{} Simulation directories: {}%-8s {}Runs per directory: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (sr, run))
print("{} Steps per run: {}%-8s {}Dcdfreq in run: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (spr, dcd))
print("{} Dcd frame size(MB) {}%-8.3f {}Total dcd frames: {}%-8s"\
.format(RED, DEFAULTCOLOUR, RED, DEFAULTCOLOUR) % (dfs, tdf))
print(" {} Total simulated time:{} %12.2f nanoseconds"\
.format(GREEN, DEFAULTCOLOUR) %(tst))
if not (tpd==0):
print(" {} Total production data:{} %12.2f GB"\
.format(GREEN, DEFAULTCOLOUR) %(tpd))
else:
print(" {} Total production data:{} %12.2f {}GB - error in calculating \
frame size. No psf file?".format(RED, DEFAULTCOLOUR, RED) %(tpd))
print("{}------------------------------------------------------------------------------".format(BLUE))
print("{}Node configuration:{}".format(BLUE, DEFAULTCOLOUR))
print("{}------------------------------------------------------------------------------".format(BLUE))
print("{}Sbatch Scripts: {} %s , %s ".format(RED, DEFAULTCOLOUR) % \
(mcf["SbatchEquilibrateScript"], mcf["SbatchProductionScript"]))
print("{}nodes: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["nodes"]))
print("{}walltime: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["Walltime"]))
print("{}no. tasks per node: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["ntpn"]))
print("{}processes per node: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["ppn"]))
if not mcf["Account"] == "VR0000":
print("{}account: {} %-12s ".format(RED, DEFAULTCOLOUR) % (mcf["Account"]))
else:
print("{}account: %-12s -have you set your account?{} "\
.format(RED, DEFAULTCOLOUR) % (mcf["Account"]))
def check_file_exists(target):
mesg1 = "{} found {} -ok!{}".format(DEFAULTCOLOUR, GREEN, DEFAULTCOLOUR)
mesg2 = "{} found {} -ok!{} -example file?".format(DEFAULTCOLOUR, GREEN, DEFAULTCOLOUR)
mesg3 = "{} not found.{} -Check config file.{}".format(DEFAULTCOLOUR, RED, DEFAULTCOLOUR)
ntarget = target[6:] # strip off "../../"
if not "../../" in target[0:6]:
print("{}unexpected path structure to input files:{}".format(RED, DEFAULTCOLOUR))
if os.path.exists(ntarget):
if "example" in target:
print("{} %-50s {}".format(RED, mesg2) %(ntarget))
else:
print("{} %-50s {}".format(RED, mesg1) %(ntarget))
else:
print("{} %-46s {}".format(RED, mesg3) %(ntarget))
def benchmark():
""" -function to benchmark job """
# read job details:
mcf = read_master_config_file()
jd_opt, jd_opt_pl = read_namd_job_details(mcf["EquilibrateConfScript"])
print("{} Setting up jobs for benchmarking based on current job config files.".format(DEFAULTCOLOUR))
# create temporary files/ figure out job size.
# move files to /Setup_and_Config/Benchmarking / create dictionary/ json file.
# optimize job/ create sbatch_files.
# start benchmarking jobs:
# extract results
# plot results.
def get_current_dir_list(job_dir):
""" Simple function to return a list of directories in a given path """
file_list = []
if os.path.isdir(job_dir):
file_list = (sorted(os.listdir(job_dir)))
else:
sys.stderr.write("No directories found in {}. Have you initialized?".format(job_dir))
return file_list
def get_curr_job_list(job_dir):
"""<high-level description of the function here>
Argument(s):
job_dir -- path to the job directory.
Notes:
<notes of interest, e.g. bugs, caveats etc.>
Returns:
<description of the return value>
"""
# are you after a list of jobs or a list of files?
file_list = []
if os.path.isdir(job_dir):
# build a list of every file in the `job_dir` directory tree.
# you may want to ignore files such as '.gitkeep' etc.
for root, _, fns in os.walk(job_dir):
for fn in fns:
file_path = os.path.join(root, fn)
if os.path.isfile(file_path):
file_list.append(file_path)
#
# insert code to manipulate `file_list` here.
#
file_list.sort()
else:
# NOTE: to developers, it is good practise to write error messages to
# `stderr` rather than to stdout (i.e. avoid using`print` to display
# error messages).
sys.stderr.write("{} doesn't exist, it needs to be initialised.{}"
.format(job_dir, os.linesep))
return file_list
def execute_function_in_job_tree( func, *args ):
""" -a generic function to execute a given function throughout the entire job tree """
cwd = os.getcwd()
## read job structure tree as given in the "master_config_file":
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
## descending into job tree:
for i in range( 0, nJobStreams ):
CurrentJobStream = cwd + '/' + JobStreams[i]
## descending into job directory tree of each JobStream
if os.path.isdir( CurrentJobStream ):
JobStreamDirList = get_current_dir_list( CurrentJobStream )
for j in JobStreamDirList:
CurrentJobDir = CurrentJobStream + '/' + j
if os.path.isdir(CurrentJobDir):
os.chdir( CurrentJobDir )
func( *args )
else:
print("\nCan't descend into job directory tree. Have you populated?\n {}"\
.format(CurrentJobDir))
else:
print("Can't see job directories {} -Have you initialized?"\
.format(CurrentJobStream))
os.chdir(cwd)
def start_all_jobs():
""" function for starting all jobs """
mcf = read_master_config_file()
startscript = mcf["SbatchEquilibrateScript"]
execute_function_in_job_tree( start_jobs, startscript )
def start_jobs( startscript ):
""" function to start jobs in a directory"""
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus == "ready" and jobid == 0 and jobrun == 0:
subprocess.Popen(['sbatch', startscript])
update_local_job_details( "JobStatus", "submitted" )
update_local_job_details( "JobMessage", "submitted to queue" )
else:
if jobstatus == "cancelled":
print("Appears this job was cancelled. Clear flags before restart. {}".format( cwd[-20:], jobid ))
if "running" in jobstatus:
print("A job appears to be running here: {} : jobid:{}".format( cwd[-20:], jobid ))
else:
if jobrun >= 1:
print("Seems equilibration job already run here, don't you want to restart instead? (--restart)")
def clear_jobs():
""" function for clearing all pausejob and stop flags """
mcf = read_master_config_file()
execute_function_in_job_tree( clear_all_jobs )
def clear_all_jobs():
""" function to clear all stop flags in a directory"""
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
if not "running" in jobstatus:
update_local_job_details( "JobStatus", "ready" )
update_local_job_details( "CurrentJobId", 0 )
update_local_job_details( "JobMessage", "cleared stop flags" )
update_local_job_details( "PauseJobFlag", 0 )
## remove explicit flag file:
if os.path.isfile( "pausejob" ):
os.remove( "pausejob" )
print("{} cleared stop flags in: {} {}".format( GREEN, cwd, DEFAULTCOLOUR ))
else:
print("A job appears to be running here:..{} : jobstatus:{}".format( cwd[-20:], jobid ))
def restart_all_production_jobs():
""" -function to restart_all_production_jobs """
print("-- restarting production jobs.")
mcf = read_master_config_file()
## check_job_status
restartscript = mcf["SbatchProductionScript"]
execute_function_in_job_tree(restart_jobs, restartscript)
def restart_jobs(restartscript):
""" function to restart production jobs """
cwd = os.getcwd()
jobstatus, jobid, jobrun = check_if_job_running()
ljdf_t = read_local_job_details( ".", "local_job_details.json" )
current = ljdf_t["CurrentRun"]
jobid = ljdf_t["CurrentJobId"]
total = ljdf_t["TotalRuns"]
message = ljdf_t["JobMessage"]
time.sleep( 0.1 )
if jobstatus in { "running", "submittied" }:
print("A job appears to be submitted or running here:..{} : jobid:{}".format( cwd[-20:], jobid ))
return
if "cancelled" in jobstatus:
print("Job was abruptly cancelled. Clear pause flags first. (--clear) {}".format( cwd[-20:]))
return
if jobstatus in { "finished", "ready" }:
if "cleared" in message: # assume restarting from cancelled job.
pausejob_flag( "remove" ) # -so we don't increment CurrentRun number
subprocess.Popen(['sbatch', restartscript])
update_local_job_details( "JobStatus", "submitted" )
update_local_job_details( "JobMessage", "production job restarted" )
return
if current >= total:
print("Current run number equal or greater than total runs. Use './mdwf -e' to extend runs.")
return
def recover_all_jobs():
""" -function to recover and restore crashed jobs """
print("Crash recovery: ")
print("Typically we expect binary output files like .dcd to be the")
print("same size using this workflow. If a job has crashed we can ")
print("restore the directory to the last known good point by entering")
print("the name of the first bad file. Files will be restored to this")
print("point. ")
execute_function_in_job_tree( recovery_function )
def recovery_function():
""" this function checks sizes and md5sums of outputfiles, giving the opportunity
for a user to recover from the last known good file"""
ljdf = read_local_job_details( ".", "local_job_details.json" )
# the following constructs a string to find the "equilibration" dcd file
# (which is numbered 0, but with zfill padding)
total = ljdf["TotalRuns"]
zf = len( str(total) ) + 1 + 4
zf_bait = ".dcd".zfill( zf )
dirlist = get_current_dir_list( "OutputFiles" )
line = ljdf["JOB_STREAM_DIR"] + "/" + ljdf["JobDirName"] + "/" + "OutputFiles:"
print("\n{}{}{}".format( GREEN, line, DEFAULTCOLOUR ))
#### while
for i in dirlist:
if "dcd" in i:
path = 'OutputFiles/' + i
size = os.path.getsize( path )
if not zf_bait in i:
print("%-24s %12s " % ( i, size ))
else:
print("{}%-24s %12s -equilibration file {} ".format( BLUE, DEFAULTCOLOUR ) % ( i, size ))
print("Enter the name of the last {}good file{} or press 'enter' to continue scanning. ('q' to quit)".format(GREEN, DEFAULTCOLOUR ))
target = raw_input()
if target == "q":
sys.exit("exiting" )
if target != "":
# find index of target in dirlist.
if target in dirlist:
# index = dirlist.index( target )
# find index of target in dirlist.
index = dirlist.index(target)+1
print("\n{}Files to delete:{}".format(BLUE, DEFAULTCOLOUR ))
targetlist=[]
for i in range( index, int(len(dirlist))):
print(dirlist[i])
targetlist.append(dirlist[i])
line = " {}Confirm:{} y/n ".format(BLUE, DEFAULTCOLOUR )
confirm = raw_input(line)
if confirm in { 'Y', 'y' }:
line = " {}Really? -this can't be undone! Confirm:{} y/n ".format(BLUE, DEFAULTCOLOUR )
confirm = raw_input(line)
if confirm in { 'Y', 'y' }:
print("-deleting redundant output and restart files:")
for j in targetlist:
targetfile=os.getcwd()+"/OutputFiles/"+j
try:
os.remove(targetfile)
except OSError:
pass
#slice base name to remove other files:
basename = j[ :-4 ]
targetfile=os.getcwd()+"/OutputText/" + basename + ".txt"
try:
os.remove(targetfile)
except OSError:
pass
for k in ['.vel', '.coor', '.xsc', '.xst']:
targetfile=os.getcwd()+"/RestartFiles/" + basename + k
try:
os.remove(targetfile)
except OSError:
pass
# slice job number and basename from dcd job name:
num = int( target[-zf:-4] )
basename = target[ :-4 ]
print("-updating restart files:")
for k in ['.vel', '.coor', '.xsc', '.xst']:
src=os.getcwd()+"/RestartFiles/" + basename + k
dst=os.getcwd()+"/current_MD_run_files" + k
print("copy /RestatFiles/{}{} to current_MD_run_files{}".format(basename, k, k))
shutil.copyfile(src, dst)
print("-updating run number:")
update_local_job_details( "CurrentRun", num+1 )
else:
print(target, " not found: ")
def stop_jobs():
""" -function to stop all jobs, -either immediately or gently."""
print("-- stopping all jobs")
execute_function_in_job_tree(stop_all_jobs_immediately)
def pause_jobs():
""" -function to stop all jobs, -either immediately or gently."""
print("-- pausing all jobs")
execute_function_in_job_tree(pause_all_jobs)
def pause_all_jobs():
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus in { "stopped", "cancelled", "processing" }:
update_local_job_details( "JobMessage", "no job running" )
else:
pausejob_flag( "Manual pausing of job." )
update_local_job_details( "JobMessage", "job paused" )
def stop_all_jobs_immediately():
""" function to stop all jobs immediately """
jobstatus, jobid, jobrun = check_if_job_running()
if jobstatus in { "stopped", "cancelled", "processing" }:
update_local_job_details( "JobMessage", "no job running" )
else:
cancel_job( jobid )
def cancel_job( jobid ):
""" function to send scancelcommand for jobid """
print(" stopping job: {}".format( jobid ))
message = " scancel jobid: %s" % jobid
pausejob_flag( "scancel commnad sent." )
update_local_job_details( "JobMessage", "sent scancel command" )
update_local_job_details( "JobStatus", "stopped" )
update_local_job_details( "PauseJobFlag", "cancelled" )
if jobid > 0:
subprocess.Popen([ 'scancel', jobid ])
update_local_job_details( "CurrentJobId", -1 )
def erase_all_data():
""" -function to erase all data for a clean start. Use with caution!"""
JobStreams, Replicates, BaseDirNames, JobBaseNames, Runs, \
nJobStreams, nReplicates, nBaseNames = check_job_structure()
cwd = os.getcwd()
print("\nWe are about to erase all data in this directory, which can be useful")
print("for making a clean start, but disasterous if this is the wrong folder!")
print("{}Proceed with caution!{}".format(RED, DEFAULTCOLOUR))
print("This operation will delete all data in the folders:\n")
print("/{} ".format(JobStreams,DEFAULTCOLOUR))
print("/JobLog/ - Job logs.")
print("/Setup_and_Config/Benchmarking/ - Benchmarking data.")
strg = raw_input("\n Press enter to quit or type: {}'erase all my data' {}: ".format(GREEN, DEFAULTCOLOUR))
print (strg)
if strg in ['erase all my data']:
print("Ok, well if you say so....")
for j in range( 0, nJobStreams):
TargetDir = cwd + "/" + JobStreams[j]
print(" Erasing all files in:{}".format(TargetDir))
if os.path.isdir( TargetDir ):
shutil.rmtree( TargetDir )
else:
print(" Couldn't see {}".format(TargetDir))
print("\n Oh the humanity. I sure hope that wasn't anything important.")
else:
print(" Phew! Nothing erased.")
def create_dcd_file_loader( first = 0, last = -1, step =1):
""" A function to create an easyfile loader to be able to read in a
contiguous series of dcd output files for VMD. """
cwd = os.getcwd()
OutputDir = cwd + "/" "OutputFiles"
DirList = get_current_dir_list( OutputDir )
# create vmd line:
line = "mol addfile %s type dcd first %s last %s step %s filebonds 1 autobonds 1 waitfor all\n"\
.format( dcdline, first, last, step )
def clone():
""" -function to clone directory without data, but preserving input files."""
print("-- cloning data directory!!")
|
from collections import defaultdict, OrderedDict
import itertools
import json
import os
import sys
import random
import string
import copy
import sys
from datetime import date, datetime
import pysam
import pymongo
from django.core.exceptions import ObjectDoesNotExist
from xbrowse.core.genotype_filters import passes_genotype_filter
from xbrowse.datastore.utils import get_elasticsearch_dataset
from xbrowse.utils import compressed_file, get_progressbar
from xbrowse.utils import slugify
import settings
from xbrowse import utils as xbrowse_utils
from xbrowse import vcf_stuff, genomeloc
from xbrowse.core.variant_filters import VariantFilter, passes_variant_filter
from xbrowse import Variant
import datastore
from pprint import pprint, pformat
import StringIO
import elasticsearch
import elasticsearch_dsl
from elasticsearch_dsl import Q
from pyliftover import LiftOver
liftover_grch38_to_grch37 = LiftOver('hg38', 'hg19')
liftover_grch37_to_grch38 = LiftOver('hg19', 'hg38')
# make encoded values as human-readable as possible
ES_FIELD_NAME_ESCAPE_CHAR = '$'
ES_FIELD_NAME_BAD_LEADING_CHARS = set(['_', '-', '+', ES_FIELD_NAME_ESCAPE_CHAR])
ES_FIELD_NAME_SPECIAL_CHAR_MAP = {
'.': '_$dot$_',
',': '_$comma$_',
'#': '_$hash$_',
'*': '_$star$_',
'(': '_$lp$_',
')': '_$rp$_',
'[': '_$lsb$_',
']': '_$rsb$_',
'{': '_$lcb$_',
'}': '_$rcb$_',
}
# make encoded values as human-readable as possible
ES_FIELD_NAME_ESCAPE_CHAR = '$'
ES_FIELD_NAME_BAD_LEADING_CHARS = set(['_', '-', '+', ES_FIELD_NAME_ESCAPE_CHAR])
ES_FIELD_NAME_SPECIAL_CHAR_MAP = {
'.': '_$dot$_',
',': '_$comma$_',
'#': '_$hash$_',
'*': '_$star$_',
'(': '_$lp$_',
')': '_$rp$_',
'[': '_$lsb$_',
']': '_$rsb$_',
'{': '_$lcb$_',
'}': '_$rcb$_',
}
def _encode_field_name(s):
"""Encodes arbitrary string into an elasticsearch field name
See:
https://discuss.elastic.co/t/special-characters-in-field-names/10658/2
https://discuss.elastic.co/t/illegal-characters-in-elasticsearch-field-names/17196/2
"""
field_name = StringIO.StringIO()
for i, c in enumerate(s):
if c == ES_FIELD_NAME_ESCAPE_CHAR:
field_name.write(2*ES_FIELD_NAME_ESCAPE_CHAR)
elif c in ES_FIELD_NAME_SPECIAL_CHAR_MAP:
field_name.write(ES_FIELD_NAME_SPECIAL_CHAR_MAP[c]) # encode the char
else:
field_name.write(c) # write out the char as is
field_name = field_name.getvalue()
# escape 1st char if necessary
if any(field_name.startswith(c) for c in ES_FIELD_NAME_BAD_LEADING_CHARS):
return ES_FIELD_NAME_ESCAPE_CHAR + field_name
else:
return field_name
def _decode_field_name(field_name):
"""Converts an elasticsearch field name back to the original unencoded string"""
if field_name.startswith(ES_FIELD_NAME_ESCAPE_CHAR):
field_name = field_name[1:]
i = 0
original_string = StringIO.StringIO()
while i < len(field_name):
current_string = field_name[i:]
if current_string.startswith(2*ES_FIELD_NAME_ESCAPE_CHAR):
original_string.write(ES_FIELD_NAME_ESCAPE_CHAR)
i += 2
else:
for original_value, encoded_value in ES_FIELD_NAME_SPECIAL_CHAR_MAP.items():
if current_string.startswith(encoded_value):
original_string.write(original_value)
i += len(encoded_value)
break
else:
original_string.write(field_name[i])
i += 1
return original_string.getvalue()
GENOTYPE_QUERY_MAP = {
'ref_ref': 0,
'ref_alt': 1,
'alt_alt': 2,
'has_alt': {'$gte': 1},
'has_ref': {'$in': [0,1]},
'not_missing': {'$gte': 0},
'missing': -1,
}
CHROMOSOME_SIZES = {
"1":249250621,
"2":243199373,
"3":198022430,
"4":191154276,
"5":180915260,
"6":171115067,
"7":159138663,
"8":146364022,
"9":141213431,
"10":135534747,
"11":135006516,
"12":133851895,
"13":115169878,
"14":107349540,
"15":102531392,
"16":90354753,
"17":81195210,
"18":78077248,
"19":59128983,
"20":63025520,
"21":48129895,
"22":51304566,
"X":155270560,
"Y":59373566,
"MT":16569,
}
def _add_genotype_filter_to_variant_query(db_query, genotype_filter):
"""
Add conditions to db_query from the genotype filter
Edits in place, returns True if successful
"""
for indiv_id, genotype in genotype_filter.items():
key = 'genotypes.%s.num_alt' % indiv_id
db_query[key] = GENOTYPE_QUERY_MAP[genotype]
return True
def _add_index_fields_to_variant(variant_dict, annotation=None):
"""
Add fields to the vairant dictionary that you want to index on before load it
"""
if annotation:
variant_dict['db_freqs'] = annotation['freqs']
variant_dict['db_tags'] = annotation['annotation_tags']
variant_dict['db_gene_ids'] = annotation['gene_ids']
class MongoDatastore(datastore.Datastore):
def __init__(self, db, annotator, custom_population_store=None, custom_populations_map=None):
self._db = db
self._annotator = annotator
self._custom_population_store = custom_population_store
self._custom_populations_map = custom_populations_map
if self._custom_populations_map is None:
self._custom_populations_map = {}
def _make_db_query(self, genotype_filter=None, variant_filter=None):
"""
Caller specifies filters to get_variants, but they are evaluated later.
Here, we just inspect those filters and see what heuristics we can apply to avoid a full table scan,
Query here must return a superset of the true get_variants results
Note that the full annotation isn't stored, so use the fields added by _add_index_fields_to_variant
"""
db_query = {}
# genotype filter
if genotype_filter is not None:
_add_genotype_filter_to_variant_query(db_query, genotype_filter)
if variant_filter:
if variant_filter.locations:
location_ranges = []
for i, location in enumerate(variant_filter.locations):
if isinstance(location, basestring):
chrom, pos_range = location.split(":")
start, end = pos_range.split("-")
xstart = genomeloc.get_xpos(chrom, int(start))
xend = genomeloc.get_xpos(chrom, int(end))
variant_filter.locations[i] = (xstart, xend)
else:
xstart, xend = location
location_ranges.append({'$and' : [ {'xpos' : {'$gte': xstart }}, {'xpos' : {'$lte': xend }}] })
db_query['$or'] = location_ranges
if variant_filter.so_annotations:
db_query['db_tags'] = {'$in': variant_filter.so_annotations}
if variant_filter.genes:
db_query['db_gene_ids'] = {'$in': variant_filter.genes}
db_query['db_exclude_genes'] = getattr(variant_filter, 'exclude_genes')
if variant_filter.ref_freqs:
for population, freq in variant_filter.ref_freqs:
if population in self._annotator.reference_population_slugs:
db_query['db_freqs.' + population] = {'$lte': freq}
return db_query
def get_elasticsearch_variants(self, query_json, elasticsearch_variant_dataset, project_id, family_id=None, variant_id_filter=None):
from seqr.models import Individual as SeqrIndividual, Project as SeqrProject
from reference_data.models import GENOME_VERSION_GRCh37, GENOME_VERSION_GRCh38
elasticsearch_host = elasticsearch_variant_dataset.elasticsearch_host
elasticsearch_index = elasticsearch_variant_dataset.elasticsearch_index
client = elasticsearch.Elasticsearch(host=elasticsearch_host)
s = elasticsearch_dsl.Search(using=client, index=elasticsearch_index) #",".join(indices))
print("===> QUERY: ")
pprint(query_json)
if variant_id_filter is not None:
s = s.filter('term', **{"variantId": variant_id_filter})
# parse variant query
for key, value in query_json.items():
if key == 'db_tags':
vep_consequences = query_json.get('db_tags', {}).get('$in', [])
consequences_filter = Q("terms", transcriptConsequenceTerms=vep_consequences)
if 'intergenic_variant' in vep_consequences:
# for many intergenic variants VEP doesn't add any annotations, so if user selected 'intergenic_variant', also match variants where transcriptConsequenceTerms is emtpy
consequences_filter = consequences_filter | ~Q('exists', field='transcriptConsequenceTerms')
s = s.filter(consequences_filter)
print("==> transcriptConsequenceTerms: %s" % str(vep_consequences))
if key.startswith("genotypes"):
sample_id = ".".join(key.split(".")[1:-1])
encoded_sample_id = _encode_field_name(sample_id)
genotype_filter = value
if type(genotype_filter) == int or type(genotype_filter) == basestring:
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": genotype_filter}))
s = s.filter('term', **{encoded_sample_id+"_num_alt": genotype_filter})
elif '$gte' in genotype_filter:
genotype_filter = {k.replace("$", ""): v for k, v in genotype_filter.items()}
s = s.filter('range', **{encoded_sample_id+"_num_alt": genotype_filter})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": genotype_filter}))
elif "$in" in genotype_filter:
num_alt_values = genotype_filter['$in']
q = Q('term', **{encoded_sample_id+"_num_alt": num_alt_values[0]})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": num_alt_values[0]}))
for num_alt_value in num_alt_values[1:]:
q = q | Q('term', **{encoded_sample_id+"_num_alt": num_alt_value})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": num_alt_value}))
s = s.filter(q)
if key == "db_gene_ids":
gene_ids = query_json.get('db_gene_ids', {}).get('$in', [])
exclude_genes = query_json.get('db_exclude_genes')
if exclude_genes:
s = s.exclude("terms", geneIds=gene_ids)
else:
s = s.filter("terms", geneIds=gene_ids)
print("==> %s %s" % ("exclude" if exclude_genes else "include", "geneIds: " + str(gene_ids)))
if key == "$or" and type(value) == list:
xpos_filters = value[0].get("$and", {})
# for example: $or : [{'$and': [{'xpos': {'$gte': 12345}}, {'xpos': {'$lte': 54321}}]}]
xpos_filters_dict = {}
for xpos_filter in xpos_filters:
xpos_filter_setting = xpos_filter["xpos"] # for example {'$gte': 12345} or {'$lte': 54321}
xpos_filters_dict.update(xpos_filter_setting)
xpos_filter_setting = {k.replace("$", ""): v for k, v in xpos_filters_dict.items()}
s = s.filter('range', **{"xpos": xpos_filter_setting})
print("==> xpos range: " + str({"xpos": xpos_filter_setting}))
af_key_map = {
"db_freqs.1kg_wgs_phase3": "g1k_AF",
"db_freqs.1kg_wgs_phase3_popmax": "g1k_POPMAX_AF",
"db_freqs.exac_v3": "exac_AF",
"db_freqs.exac_v3_popmax": "exac_AF_POPMAX",
"db_freqs.topmed": "topmed_AF",
"db_freqs.gnomad_exomes": "gnomad_exomes_AF",
"db_freqs.gnomad_exomes_popmax": "gnomad_exomes_AF_POPMAX",
"db_freqs.gnomad_genomes": "gnomad_genomes_AF",
"db_freqs.gnomad_genomes_popmax": "gnomad_genomes_AF_POPMAX",
}
if key in af_key_map:
filter_key = af_key_map[key]
af_filter_setting = {k.replace("$", ""): v for k, v in value.items()}
s = s.filter(Q('range', **{filter_key: af_filter_setting}) | ~Q('exists', field=filter_key))
print("==> %s: %s" % (filter_key, af_filter_setting))
s.sort("xpos")
print("=====")
print("FULL QUERY OBJ: " + pformat(s.__dict__))
print("FILTERS: " + pformat(s.to_dict()))
print("=====")
print("Hits: ")
# https://elasticsearch-py.readthedocs.io/en/master/helpers.html#elasticsearch.helpers.scan
response = s.execute()
print("TOTAL: " + str(response.hits.total))
#print(pformat(response.to_dict()))
if family_id is not None:
family_individual_ids = [i.individual_id for i in SeqrIndividual.objects.filter(family__family_id=family_id)]
else:
family_individual_ids = [i.individual_id for i in SeqrIndividual.objects.filter(family__project__project_id=project_id)]
for i, hit in enumerate(s.scan()): # preserve_order=True
if i == 0:
print("Hit columns: " + str(hit.__dict__))
filters = ",".join(hit["filters"]) if "filters" in hit else ""
genotypes = {}
all_num_alt = []
for individual_id in family_individual_ids:
encoded_individual_id = _encode_field_name(individual_id)
num_alt = int(hit["%s_num_alt" % encoded_individual_id]) if ("%s_num_alt" % encoded_individual_id) in hit else -1
if num_alt is not None:
all_num_alt.append(num_alt)
alleles = []
if num_alt == 0:
alleles = [hit["ref"], hit["ref"]]
elif num_alt == 1:
alleles = [hit["ref"], hit["alt"]]
elif num_alt == 2:
alleles = [hit["alt"], hit["alt"]]
elif num_alt == -1 or num_alt == None:
alleles = []
else:
raise ValueError("Invalid num_alt: " + str(num_alt))
genotypes[individual_id] = {
'ab': hit["%s_ab" % encoded_individual_id] if ("%s_ab" % encoded_individual_id) in hit else '',
'alleles': map(str, alleles),
'extras': {
'ad': hit["%s_ab" % encoded_individual_id] if ("%s_ad" % encoded_individual_id) in hit else '',
'dp': hit["%s_dp" % encoded_individual_id] if ("%s_dp" % encoded_individual_id) in hit else '',
'pl': '',
},
'filter': filters or "pass",
'gq': hit["%s_gq" % encoded_individual_id] if ("%s_gq" % encoded_individual_id in hit and hit["%s_gq" % encoded_individual_id] is not None) else '',
'num_alt': num_alt,
}
if all([num_alt <= 0 for num_alt in all_num_alt]):
#print("Filtered out due to genotype: " + str(genotypes))
print("Filtered all_num_alt <= 0 - Result %s: GRCh38: %s:%s, cadd: %s %s - %s" % (i, hit["contig"], hit["start"], hit["cadd_PHRED"] if "cadd_PHRED" in hit else "", hit["transcriptConsequenceTerms"], all_num_alt))
continue
vep_annotation = json.loads(str(hit['sortedTranscriptConsequences']))
if elasticsearch_variant_dataset.genome_version == GENOME_VERSION_GRCh37:
grch38_coord = liftover_grch37_to_grch38.convert_coordinate("chr%s" % hit["contig"].replace("chr", ""), int(hit["start"]))
if grch38_coord and grch37_coord and grch37_coord[0]:
grch38_coord = "%s-%s-%s-%s "% (grch37_coord[0][0], grch37_coord[0][1], hit["ref"], hit["alt"])
else:
grch38_coord = None
else:
grch38_coord = hit["variantId"]
if elasticsearch_variant_dataset.genome_version == GENOME_VERSION_GRCh38:
grch37_coord = liftover_grch38_to_grch37.convert_coordinate("chr%s" % hit["contig"].replace("chr", ""), int(hit["start"]))
if grch37_coord and grch37_coord and grch37_coord[0]:
grch37_coord = "%s-%s-%s-%s "% (grch37_coord[0][0], grch37_coord[0][1], hit["ref"], hit["alt"])
else:
grch37_coord = None
else:
grch37_coord = hit["variantId"]
result = {
#u'_id': ObjectId('596d2207ff66f729285ca588'),
'alt': str(hit["alt"]) if "alt" in hit else None,
'annotation': {
'fathmm': None,
'metasvm': None,
'muttaster': None,
'polyphen': None,
'sift': None,
'cadd_phred': hit["cadd_PHRED"] if "cadd_PHRED" in hit else None,
'dann_score': hit["dbnsfp_DANN_score"] if "dbnsfp_DANN_score" in hit else None,
'revel_score': hit["dbnsfp_REVEL_score"] if "dbnsfp_REVEL_score" in hit else None,
'mpc_score': hit["mpc_MPC"] if "mpc_MPC" in hit else None,
'annotation_tags': list(hit["transcriptConsequenceTerms"] or []) if "transcriptConsequenceTerms" in hit else None,
'coding_gene_ids': list(hit['codingGeneIds'] or []),
'gene_ids': list(hit['geneIds'] or []),
'vep_annotation': vep_annotation,
'vep_group': str(hit['mainTranscript_major_consequence'] or ""),
'vep_consequence': str(hit['mainTranscript_major_consequence'] or ""),
'worst_vep_annotation_index': 0,
'worst_vep_index_per_gene': {str(hit['mainTranscript_gene_id']): 0},
},
'chr': hit["contig"],
'coding_gene_ids': list(hit['codingGeneIds'] or []),
'db_freqs': {
'1kg_wgs_AF': float(hit["g1k_AF"] or 0.0),
'1kg_wgs_popmax_AF': float(hit["g1k_POPMAX_AF"] or 0.0),
'exac_v3_AC': float(hit["exac_AC_Adj"] or 0.0) if "exac_AC_Adj" in hit else 0.0,
'exac_v3_AF': float(hit["exac_AF"] or 0.0) if "exac_AF" in hit else (hit["exac_AC_Adj"]/float(hit["exac_AN_Adj"]) if int(hit["exac_AN_Adj"] or 0) > 0 else 0.0),
'exac_v3_popmax_AF': float(hit["exac_AF_POPMAX"] or 0.0) if "exac_AF_POPMAX" in hit else 0.0,
'topmed_AF': float(hit["topmed_AF"] or 0.0) if "topmed_AF" in hit else 0.0,
'gnomad_exomes_AC': float(hit["gnomad_exomes_AC"] or 0.0) if "gnomad_exomes_AC" in hit else 0.0,
'gnomad_exomes_Hom': float(hit["gnomad_exomes_HOM"] or 0.0) if "gnomad_exomes_HOM" in hit else 0.0,
'gnomad_exomes_AF': float(hit["gnomad_exomes_AF"] or 0.0) if "gnomad_exomes_AF" in hit else 0.0,
'gnomad_exomes_popmax_AF': float(hit["gnomad_exomes_AF_POPMAX"] or 0.0) if "gnomad_exomes_AF_POPMAX" in hit else 0.0,
'gnomad_genomes_AC': float(hit["gnomad_genomes_AC"] or 0.0) if "gnomad_genomes_AC" in hit else 0.0,
'gnomad_genomes_Hom': float(hit["gnomad_genomes_HOM"] or 0.0) if "gnomad_genomes_HOM" in hit else 0.0,
'gnomad_genomes_AF': float(hit["gnomad_genomes_AF"] or 0.0) if "gnomad_genomes_AF" in hit else 0.0,
'gnomad_genomes_popmax_AF': float(hit["gnomad_genomes_AF_POPMAX"] or 0.0) if "gnomad_genomes_AF_POPMAX" in hit else 0.0,
'gnomad_exome_coverage': float(hit["gnomad_exome_coverage"] or -1) if "gnomad_exome_coverage" in hit else -1,
'gnomad_genome_coverage': float(hit["gnomad_genome_coverage"] or -1) if "gnomad_genome_coverage" in hit else -1,
},
'db_gene_ids': list(hit["geneIds"] or []),
'db_tags': str(hit["transcriptConsequenceTerms"] or "") if "transcriptConsequenceTerms" in hit else None,
'extras': {
'genome_version': elasticsearch_variant_dataset.genome_version,
'grch37_coords': grch37_coord,
'grch38_coords': grch38_coord,
u'alt_allele_pos': 0,
u'orig_alt_alleles': map(str, [a.split("-")[-1] for a in hit["originalAltAlleles"]]) if "originalAltAlleles" in hit else []},
'gene_ids': None,
'genotypes': genotypes,
'pos': long(hit['start']),
'pos_end': str(hit['end']),
'ref': str(hit['ref']),
'vartype': 'snp' if len(hit['ref']) == len(hit['alt']) else "indel",
'vcf_id': None,
'xpos': long(hit["xpos"]),
'xposx': long(hit["xpos"]),
}
result["annotation"]["freqs"] = result["db_freqs"]
#print("\n\nConverted result: " + str(i))
print("Result %s: GRCh37: %s GRCh38: %s:, cadd: %s %s - gene ids: %s, coding gene_ids: %s" % (i, grch37_coord, grch37_coord, hit["cadd_PHRED"] if "cadd_PHRED" in hit else "", hit["transcriptConsequenceTerms"], result["gene_ids"], result["coding_gene_ids"]))
#pprint(result["db_freqs"])
yield result
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
break
def get_variants(self, project_id, family_id, genotype_filter=None, variant_filter=None):
db_query = self._make_db_query(genotype_filter, variant_filter)
sys.stderr.write("%s\n" % str(db_query))
counters = OrderedDict([('returned_by_query', 0), ('passes_variant_filter', 0)])
pprint({'$and' : [{k: v} for k, v in db_query.items()]})
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id)
if elasticsearch_variant_dataset is not None:
for i, variant_dict in enumerate(self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, project_id, family_id)):
counters["returned_by_query"] += 1
variant = Variant.fromJSON(variant_dict)
yield variant
print("Counters: " + str(counters))
else:
collection = self._get_family_collection(project_id, family_id)
if not collection:
print("Error: mongodb collection not found for project %s family %s " % (project_id, family_id))
return
counters = OrderedDict([('returned_by_query', 0), ('passes_variant_filter', 0)])
for i, variant_dict in enumerate(collection.find({'$and' : [{k: v} for k, v in db_query.items()]}).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i >= settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
counters["returned_by_query"] += 1
if passes_variant_filter(variant, variant_filter)[0]:
counters["passes_variant_filter"] += 1
yield variant
for k, v in counters.items():
sys.stderr.write(" %s: %s\n" % (k,v))
def get_variants_in_gene(self, project_id, family_id, gene_id, genotype_filter=None, variant_filter=None):
if variant_filter is None:
modified_variant_filter = VariantFilter()
else:
modified_variant_filter = copy.deepcopy(variant_filter)
modified_variant_filter.add_gene(gene_id)
db_query = self._make_db_query(genotype_filter, modified_variant_filter)
collection = self._get_family_collection(project_id, family_id)
if not collection:
return
# we have to collect list in memory here because mongo can't sort on xpos,
# as result size can get too big.
# need to find a better way to do this.
variants = []
for variant_dict in collection.find(db_query).hint([('db_gene_ids', pymongo.ASCENDING), ('xpos', pymongo.ASCENDING)]):
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
if passes_variant_filter(variant, modified_variant_filter):
variants.append(variant)
variants = sorted(variants, key=lambda v: v.unique_tuple())
for v in variants:
yield v
def get_variants_in_range(self, project_id, family_id, xpos_start, xpos_end):
collection = self._get_family_collection(project_id, family_id)
if not collection:
raise ValueError("Family not found: " + str(family_id))
for i, variant_dict in enumerate(collection.find({'$and': [{'xpos': {'$gte': xpos_start}}, {'xpos': {'$lte': xpos_end}}]}).limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
yield variant
def get_single_variant(self, project_id, family_id, xpos, ref, alt):
from seqr.utils.xpos_utils import get_chrom_pos
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id)
if elasticsearch_variant_dataset is not None:
chrom, pos = get_chrom_pos(xpos)
variant_id = "%s-%s-%s-%s" % (chrom, pos, ref, alt)
results = list(self.get_elasticsearch_variants({}, elasticsearch_variant_dataset, project_id, family_id=family_id, variant_id_filter=variant_id))
print("###### single variant search: " + variant_id + ". results: " + str(results))
if not results:
return None
variant_dict = results[0]
variant = Variant.fromJSON(variant_dict)
return variant
else:
collection = self._get_family_collection(project_id, family_id)
if not collection:
return None
variant_dict = collection.find_one({'xpos': xpos, 'ref': ref, 'alt': alt})
if variant_dict:
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
return variant
else:
return None
def get_variants_cohort(self, project_id, cohort_id, variant_filter=None):
db_query = self._make_db_query(None, variant_filter)
collection = self._get_family_collection(project_id, cohort_id)
for i, variant in enumerate(collection.find(db_query).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
yield Variant.fromJSON(variant)
def get_single_variant_cohort(self, project_id, cohort_id, xpos, ref, alt):
collection = self._get_family_collection(project_id, cohort_id)
variant = collection.find_one({'xpos': xpos, 'ref': ref, 'alt': alt})
return Variant.fromJSON(variant)
def get_de_novo_variants(self, project_id, family, de_novo_filter, variant_filter, quality_filter):
db_query = self._make_db_query(de_novo_filter, variant_filter)
elasticsearch_variant_dataset = get_elasticsearch_dataset(family.project_id, family.family_id)
if elasticsearch_variant_dataset is not None:
variant_iter = self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, family.project_id, family.family_id)
else:
collection = self._get_family_collection(family.project_id, family.family_id)
if not collection:
raise ValueError("Error: mongodb collection not found for project %s family %s " % (family.project_id, family.family_id))
variant_iter = collection.find(db_query).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)
# get ids of parents in this family
valid_ids = set(indiv_id for indiv_id in family.individuals)
paternal_ids = set(i.paternal_id for i in family.get_individuals() if i.paternal_id in valid_ids)
maternal_ids = set(i.maternal_id for i in family.get_individuals() if i.maternal_id in valid_ids)
parental_ids = paternal_ids | maternal_ids
# loop over all variants returned
for i, variant_dict in enumerate(variant_iter):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("VARIANT_QUERY_RESULTS_LIMIT of %s exceeded for query: %s" % (settings.VARIANT_QUERY_RESULTS_LIMIT, db_query))
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, family.project_id)
if not passes_variant_filter(variant, variant_filter)[0]:
continue
# handle genotype filters
if len(parental_ids) != 2:
# ordinary filters for non-trios
for indiv_id in de_novo_filter.keys():
genotype = variant.get_genotype(indiv_id)
if not passes_genotype_filter(genotype, quality_filter):
break
else:
yield variant
else:
# for trios use Mark's recommended filters for de-novo variants:
# Hard-coded thresholds:
# 1) Child must have > 10% of combined Parental Read Depth
# 2) MinimumChildGQscore >= 20
# 3) MaximumParentAlleleBalance <= 5%
# Adjustable filters:
# Variants should PASS
# Child AB should be >= 20
# compute parental read depth for filter 1
total_parental_read_depth = 0
for indiv_id in parental_ids:
genotype = variant.get_genotype(indiv_id)
if genotype.extras and 'dp' in genotype.extras and genotype.extras['dp'] != '.':
total_parental_read_depth += int(genotype.extras['dp'])
else:
total_parental_read_depth = None # both parents must have DP to use the parental_read_depth filters
break
for indiv_id in de_novo_filter.keys():
quality_filter_temp = quality_filter.copy() # copy before modifying
if indiv_id in parental_ids:
# handle one of the parents
quality_filter_temp['max_ab'] = 5
else:
# handle child
quality_filter_temp['min_gq'] = 20
if total_parental_read_depth is not None:
quality_filter_temp['min_dp'] = total_parental_read_depth * 0.1
genotype = variant.get_genotype(indiv_id)
if not passes_genotype_filter(genotype, quality_filter_temp):
#print("%s: %s " % (variant.chr, variant.pos))
break
else:
yield variant
#
# New sample stuff
#
def get_all_individuals(self):
"""
List of all individuals in the datastore
Items are (project_id, indiv_id) tuples
"""
return [(i['project_id'], i['indiv_id']) for i in self._db.individuals.find()]
def get_all_families(self):
"""
List of all families in the datastore
Items are (project_id, family_id) tuples
"""
return [(i['project_id'], i['family_id']) for i in self._db.families.find()]
def individual_exists(self, project_id, indiv_id):
return self._db.individuals.find_one({
'project_id': project_id,
'indiv_id': indiv_id
}) is not None
def add_individual(self, project_id, indiv_id):
if self.individual_exists(project_id, indiv_id):
raise Exception("Indiv (%s, %s) already exists" % (project_id, indiv_id))
indiv = {
'project_id': project_id,
'indiv_id': indiv_id,
}
self._db.individuals.save(indiv)
def get_individuals(self, project_id):
return [ i['indiv_id'] for i in self._db.individuals.find({ 'project_id': project_id }) ]
def family_exists(self, project_id, family_id):
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id}) is not None
def get_individuals_for_family(self, project_id, family_id):
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id})['individuals']
def get_family_status(self, project_id, family_id):
family_doc = self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
if not family_doc:
return None
return family_doc['status']
def get_family_statuses(self, family_list):
ret = {f: None for f in family_list}
by_project = defaultdict(list)
for project_id, family_id in family_list:
by_project[project_id].append(family_id)
for project_id, family_id_list in by_project.items():
for family_doc in self._db.families.find({'project_id': project_id, 'family_id': {'$in': family_id_list}}):
ret[(project_id, family_doc['family_id'])] = family_doc['status']
return ret
def _get_family_info(self, project_id, family_id=None):
if family_id is None:
return [family_info for family_info in self._db.families.find({'project_id': project_id})]
else:
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
def _get_family_collection(self, project_id, family_id):
family_info = self._get_family_info(project_id, family_id)
if not family_info:
return None
return self._db[family_info['coll_name']]
#
# Variant loading
# Unique to mongo datastore, not part of protocol
#
def _add_family_info(self, project_id, family_id, individuals):
"""
Add all the background info about this family
We try to keep this as simple as possible - just IDs
After this is run, variants are ready to be loaded
"""
if self.family_exists(project_id, family_id):
#raise Exception("Family (%s, %s) already exists" % (project_id, family_id))
return
for indiv_id in individuals:
if not self.individual_exists(project_id, indiv_id):
self.add_individual(project_id, indiv_id)
family_coll_name = "family_%s_%s" % (slugify(project_id, separator='_'),
slugify(family_id, separator='_'))
family = {
'project_id': project_id,
'family_id': family_id,
'individuals': individuals,
'coll_name': family_coll_name,
'status': 'loading'
}
family_collection = self._db[family_coll_name]
self._index_family_collection(family_collection)
self._db.families.save(family)
def add_family(self, project_id, family_id, individuals):
"""
Add new family
Adds individuals if they don't exist yet
Phenotypes and pedigrees aren't stored, just which individuals
"""
self._add_family_info(project_id, family_id, individuals)
def add_family_set(self, family_list):
"""
Add a set of families from the same VCF file
family_list is just a list of dicts with keys of project_id, family_id, individuals
"""
for fam_info in family_list:
self._add_family_info(fam_info['project_id'], fam_info['family_id'], fam_info['individuals'])
def load_family_set(self, vcf_file_path, family_list, reference_populations=None, vcf_id_map=None, mark_as_loaded=True, start_from_chrom=None, end_with_chrom=None):
"""
Load a set of families from the same VCF file
family_list is a list of (project_id, family_id) tuples
"""
family_info_list = [self._get_family_info(f[0], f[1]) for f in family_list]
self._load_variants_for_family_set(
family_info_list,
vcf_file_path,
reference_populations=reference_populations,
vcf_id_map=vcf_id_map,
start_from_chrom=start_from_chrom,
end_with_chrom=end_with_chrom,
)
if mark_as_loaded:
for family in family_info_list:
self._finalize_family_load(family['project_id'], family['family_id'])
def _load_variants_for_family_set(self, family_info_list, vcf_file_path, reference_populations=None, vcf_id_map=None, start_from_chrom=None, end_with_chrom=None):
"""
Load variants for a set of families, assuming all come from the same VCF file
Added after load_variants_for_family to speed up loading - goal is to
only iterate the VCF once. Here's how it works:
for each raw variant:
annotate
for each family:
extract family variant from full variant
update variant inheritance
if family variant is relevant for family:
add to collection
"""
self._add_vcf_file_for_family_set(
family_info_list,
vcf_file_path,
reference_populations=reference_populations,
vcf_id_map=vcf_id_map,
start_from_chrom=start_from_chrom,
end_with_chrom=end_with_chrom,
)
def _add_vcf_file_for_family_set(self, family_info_list, vcf_file_path, reference_populations=None, vcf_id_map=None, start_from_chrom=None, end_with_chrom=None):
collections = {f['family_id']: self._db[f['coll_name']] for f in family_info_list}
#for collection in collections.values():
# collection.drop_indexes()
indiv_id_list = [i for f in family_info_list for i in f['individuals']]
number_of_families = len(family_info_list)
sys.stderr.write("Loading variants for %(number_of_families)d families %(family_info_list)s from %(vcf_file_path)s\n" % locals())
for family in family_info_list:
print("Indexing family: " + str(family))
collection = collections[family['family_id']]
collection.ensure_index([('xpos', 1), ('ref', 1), ('alt', 1)])
# check whether some of the variants for this chromosome has been loaded already
# if yes, start from the last loaded variant, and not from the beginning
if "_chr" in vcf_file_path or ".chr" in vcf_file_path:
# if the VCF files are split by chromosome (eg. for WGS projects), check within the chromosome
vcf_file = compressed_file(vcf_file_path)
variant = next(vcf_stuff.iterate_vcf(vcf_file, genotypes=False, indiv_id_list=indiv_id_list, vcf_id_map=vcf_id_map))
print(vcf_file_path + " - chromsome: " + str(variant.chr))
vcf_file.close()
position_per_chrom = {}
for chrom in range(1,24):
position_per_chrom[chrom] = defaultdict(int)
for family in family_info_list: #variants = collections[family['family_id']].find().sort([('xpos',-1)]).limit(1)
variants = list(collections[family['family_id']].find({'$and': [{'xpos': { '$gte': chrom*1e9 }}, {'xpos': { '$lt': (chrom+1)*1e9}}] }).sort([('xpos',-1)]).limit(1))
if len(variants) > 0:
position_per_chrom[chrom][family['family_id']] = variants[0]['xpos'] - chrom*1e9
else:
position_per_chrom[chrom][family['family_id']] = 0
for chrom in range(1,24):
position_per_chrom[chrom] = min(position_per_chrom[chrom].values()) # get the smallest last-loaded variant position for this chromosome across all families
chr_idx = int(variant.xpos/1e9)
start_from_pos = int(position_per_chrom[chr_idx])
print("Start from: %s - %s (%0.1f%% done)" % (chr_idx, start_from_pos, 100.*start_from_pos/CHROMOSOME_SIZES[variant.chr.replace("chr", "")]))
tabix_file = pysam.TabixFile(vcf_file_path)
vcf_iter = itertools.chain(tabix_file.header, tabix_file.fetch(variant.chr.replace("chr", ""), start_from_pos, int(2.5e8)))
elif start_from_chrom or end_with_chrom:
if start_from_chrom:
print("Start chrom: chr%s" % start_from_chrom)
if end_with_chrom:
print("End chrom: chr%s" % end_with_chrom)
chrom_list = list(map(str, range(1,23))) + ['X','Y']
chrom_list_start_index = 0
if start_from_chrom:
chrom_list_start_index = chrom_list.index(start_from_chrom.replace("chr", "").upper())
chrom_list_end_index = len(chrom_list)
if end_with_chrom:
chrom_list_end_index = chrom_list.index(end_with_chrom.replace("chr", "").upper())
tabix_file = pysam.TabixFile(vcf_file_path)
vcf_iter = tabix_file.header
for chrom in chrom_list[chrom_list_start_index:chrom_list_end_index+1]:
print("Will load chrom: " + chrom)
try:
vcf_iter = itertools.chain(vcf_iter, tabix_file.fetch(chrom))
except ValueError as e:
print("WARNING: " + str(e))
else:
vcf_iter = vcf_file = compressed_file(vcf_file_path)
# TODO handle case where it's one vcf file, not split by chromosome
size = os.path.getsize(vcf_file_path)
#progress = get_progressbar(size, 'Loading VCF: {}'.format(vcf_file_path))
def insert_all_variants_in_buffer(buff, collections_dict):
for family_id in buff:
if len(buff[family_id]) == 0: # defensive programming
raise ValueError("%s has zero variants to insert. Should not be in buff." % family_id)
while len(buff) > 0:
# choose a random family for which to insert a variant from among families that still have variants to insert
family_id = random.choice(buff.keys())
# pop a variant off the list for this family, and insert it
family_variant_dict_to_insert = buff[family_id].pop()
c = collections_dict[family_id]
c.insert(family_variant_dict_to_insert)
if len(buff[family_id]) == 0:
del buff[family_id] # if no more variants for this family, delete it
vcf_rows_counter = 0
variants_buffered_counter = 0
family_id_to_variant_list = defaultdict(list) # will accumulate variants to be inserted all at once
for variant in vcf_stuff.iterate_vcf(vcf_iter, genotypes=True, indiv_id_list=indiv_id_list, vcf_id_map=vcf_id_map):
if variant.alt == "*":
#print("Skipping GATK 3.4 * alt allele: " + str(variant.unique_tuple()))
continue
try:
annotation = self._annotator.get_annotation(variant.xpos, variant.ref, variant.alt, populations=reference_populations)
except ValueError, e:
sys.stderr.write("WARNING: " + str(e) + "\n")
continue
vcf_rows_counter += 1
for family in family_info_list:
# TODO: can we move this inside the if relevant clause below?
try:
family_variant = variant.make_copy(restrict_to_genotypes=family['individuals'])
family_variant_dict = family_variant.toJSON()
_add_index_fields_to_variant(family_variant_dict, annotation)
if xbrowse_utils.is_variant_relevant_for_individuals(family_variant, family['individuals']):
collection = collections[family['family_id']]
if not collection.find_one({'xpos': family_variant.xpos, 'ref': family_variant.ref, 'alt': family_variant.alt}):
family_id_to_variant_list[family['family_id']].append(family_variant_dict)
variants_buffered_counter += 1
except Exception, e:
sys.stderr.write("ERROR: on variant %s, family: %s - %s\n" % (variant.toJSON(), family, e))
if variants_buffered_counter > 2000:
print(date.strftime(datetime.now(), "%m/%d/%Y %H:%M:%S") + "-- %s:%s-%s-%s (%0.1f%% done) - inserting %d family-variants from %d vcf rows into %s families" % (variant.chr, variant.pos, variant.ref, variant.alt, 100*variant.pos / CHROMOSOME_SIZES[variant.chr.replace("chr", "")], variants_buffered_counter, vcf_rows_counter, len(family_id_to_variant_list)))
insert_all_variants_in_buffer(family_id_to_variant_list, collections)
assert len(family_id_to_variant_list) == 0
vcf_rows_counter = 0
variants_buffered_counter = 0
if variants_buffered_counter > 0:
insert_all_variants_in_buffer(family_id_to_variant_list, collections)
assert len(family_id_to_variant_list) == 0
def _finalize_family_load(self, project_id, family_id):
"""
Call after family is loaded. Sets status and possibly more in the future
"""
self._index_family_collection(self._get_family_collection(project_id, family_id))
family = self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
family['status'] = 'loaded'
self._db.families.save(family)
def _index_family_collection(self, collection):
collection.ensure_index('xpos')
collection.ensure_index([('db_freqs', 1), ('xpos', 1)])
collection.ensure_index([('db_tags', 1), ('xpos', 1)])
collection.ensure_index([('db_gene_ids', 1), ('xpos', 1)])
def delete_project(self, project_id):
self._db.individuals.remove({'project_id': project_id})
for family_info in self._db.families.find({'project_id': project_id}):
self._db.drop_collection(family_info['coll_name'])
self._db.families.remove({'project_id': project_id})
def delete_family(self, project_id, family_id):
for family_info in self._db.families.find({'project_id': project_id, 'family_id': family_id}):
self._db.drop_collection(family_info['coll_name'])
self._db.families.remove({'project_id': project_id, 'family_id': family_id})
def add_annotations_to_variant(self, variant, project_id):
self._annotator.annotate_variant(variant)
try:
if self._custom_population_store:
custom_pop_slugs = self._custom_populations_map.get(project_id)
if custom_pop_slugs:
self._custom_population_store.add_populations_to_variants([variant], custom_pop_slugs)
except Exception, e:
sys.stderr.write("Error in add_annotations_to_variant: " + str(e) + "\n")
#
# This stuff is all copied in from ProjectDatastore
#
def _get_project_collection(self, project_id):
project = self._db.projects.find_one({'project_id': project_id})
if project:
return self._db[project['collection_name']]
else:
return None
def add_variants_to_project_from_vcf(self, vcf_file, project_id, indiv_id_list=None, start_from_chrom=None, end_with_chrom=None):
"""
This is how variants are loaded
"""
chrom_list = list(map(str, range(1,23))) + ['X','Y']
chrom_list_start_index = 0
if start_from_chrom:
chrom_list_start_index = chrom_list.index(start_from_chrom.replace("chr", "").upper())
chrom_list_end_index = len(chrom_list)
if end_with_chrom:
chrom_list_end_index = chrom_list.index(end_with_chrom.replace("chr", "").upper())
chromosomes_to_include = set(chrom_list[chrom_list_start_index : chrom_list_end_index])
#tabix_file = pysam.TabixFile(vcf_file)
#vcf_iter = tabix_file.header
#for chrom in chrom_list[chrom_list_start_index:chrom_list_end_index]:
# print("Will load chrom: " + chrom)
# vcf_iter = itertools.chain(vcf_iter, tabix_file.fetch(chrom))
project_collection = self._get_project_collection(project_id)
reference_populations = self._annotator.reference_population_slugs + self._custom_populations_map.get(project_id)
for counter, variant in enumerate(vcf_stuff.iterate_vcf(vcf_file, genotypes=True, indiv_id_list=indiv_id_list)):
if (start_from_chrom or end_with_chrom) and variant.chr.replace("chr", "") not in chromosomes_to_include:
continue
if variant.alt == "*":
#print("Skipping GATK 3.4 * alt allele: " + str(variant.unique_tuple()))
continue
if counter % 2000 == 0:
print(date.strftime(datetime.now(), "%m/%d/%Y %H:%M:%S") + "-- inserting variant %d %s:%s-%s-%s (%0.1f%% done with %s) " % (counter, variant.chr, variant.pos, variant.ref, variant.alt, 100*variant.pos / CHROMOSOME_SIZES[variant.chr.replace("chr", "")], variant.chr))
variant_dict = project_collection.find_one({'xpos': variant.xpos, 'ref': variant.ref, 'alt': variant.alt})
if not variant_dict:
variant_dict = variant.toJSON()
try:
annotation = self._annotator.get_annotation(variant.xpos, variant.ref, variant.alt, populations=reference_populations)
except ValueError, e:
sys.stderr.write("WARNING: " + str(e) + "\n")
continue
_add_index_fields_to_variant(variant_dict, annotation)
else:
for indiv_id, genotype in variant.get_genotypes():
if genotype.num_alt != 0:
variant_dict['genotypes'][indiv_id] = genotype._asdict()
project_collection.save(variant_dict)
def project_exists(self, project_id):
return self._db.projects.find_one({'project_id': project_id})
def project_collection_is_loaded(self, project_id):
"""Returns true if the project collection is fully loaded (this is the
collection that stores the project-wide set of variants used for gene
search)."""
project = self._db.projects.find_one({'project_id': project_id})
if project is not None and "is_loaded" in project:
return project["is_loaded"]
else:
return False
def set_project_collection_to_loaded(self, project_id, is_loaded=True):
"""Set the project collection "is_loaded" field to the given value.
This field is used by other parts of seqr to decide if this collection
is ready for use."""
project = self._db.projects.find_one({'project_id': project_id})
if project is not None and "is_loaded" in project:
project["is_loaded"] = is_loaded
#print("Setting %s to %s" % (project["_id"], project))
project_id = project['_id']
del project['_id']
self._db.projects.update({'_id': project_id}, {"$set": project})
else:
raise ValueError("Couldn't find project collection for %s" % project_id)
def add_project(self, project_id):
"""
Add all the background info about this family
We try to keep this as simple as possible - just IDs
After this is run, variants are ready to be loaded
"""
if self.project_exists(project_id):
raise Exception("Project {} exists".format(project_id))
project = {
'project_id': project_id,
'collection_name': 'project_' + ''.join([random.choice(string.digits) for i in range(8)]),
'is_loaded': False,
}
self._db.projects.insert(project)
project_collection = self._db[project['collection_name']]
self._index_family_collection(project_collection)
def delete_project_store(self, project_id):
project = self._db.projects.find_one({'project_id': project_id})
if project:
self._db.drop_collection(project['collection_name'])
self._db.projects.remove({'project_id': project_id})
def get_project_variants_in_gene(self, project_id, gene_id, variant_filter=None):
if variant_filter is None:
modified_variant_filter = VariantFilter()
else:
modified_variant_filter = copy.deepcopy(variant_filter)
modified_variant_filter.add_gene(gene_id)
db_query = self._make_db_query(None, modified_variant_filter)
sys.stderr.write("Project Gene Search: " + str(project_id) + " all variants query: " + str(db_query))
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id=None)
if elasticsearch_variant_dataset is not None:
variants = []
for i, variant_dict in enumerate(self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, project_id)):
variant = Variant.fromJSON(variant_dict)
variants.append(variant)
#variants = sorted(variants, key=lambda v: v.unique_tuple())
return variants
collection = self._get_project_collection(project_id)
if not collection:
return []
# we have to collect list in memory here because mongo can't sort on xpos,
# as result size can get too big.
# need to find a better way to do this.
variants = []
for variant_dict in collection.find(db_query).hint([('db_gene_ids', pymongo.ASCENDING), ('xpos', pymongo.ASCENDING)]):
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
if passes_variant_filter(variant, modified_variant_filter):
variants.append(variant)
variants = sorted(variants, key=lambda v: v.unique_tuple())
return variants
added support for 'exclude' gene list filter
from collections import defaultdict, OrderedDict
import itertools
import json
import os
import sys
import random
import string
import copy
import sys
from datetime import date, datetime
import pysam
import pymongo
from django.core.exceptions import ObjectDoesNotExist
from xbrowse.core.genotype_filters import passes_genotype_filter
from xbrowse.datastore.utils import get_elasticsearch_dataset
from xbrowse.utils import compressed_file, get_progressbar
from xbrowse.utils import slugify
import settings
from xbrowse import utils as xbrowse_utils
from xbrowse import vcf_stuff, genomeloc
from xbrowse.core.variant_filters import VariantFilter, passes_variant_filter
from xbrowse import Variant
import datastore
from pprint import pprint, pformat
import StringIO
import elasticsearch
import elasticsearch_dsl
from elasticsearch_dsl import Q
from pyliftover import LiftOver
liftover_grch38_to_grch37 = LiftOver('hg38', 'hg19')
liftover_grch37_to_grch38 = LiftOver('hg19', 'hg38')
# make encoded values as human-readable as possible
ES_FIELD_NAME_ESCAPE_CHAR = '$'
ES_FIELD_NAME_BAD_LEADING_CHARS = set(['_', '-', '+', ES_FIELD_NAME_ESCAPE_CHAR])
ES_FIELD_NAME_SPECIAL_CHAR_MAP = {
'.': '_$dot$_',
',': '_$comma$_',
'#': '_$hash$_',
'*': '_$star$_',
'(': '_$lp$_',
')': '_$rp$_',
'[': '_$lsb$_',
']': '_$rsb$_',
'{': '_$lcb$_',
'}': '_$rcb$_',
}
# make encoded values as human-readable as possible
ES_FIELD_NAME_ESCAPE_CHAR = '$'
ES_FIELD_NAME_BAD_LEADING_CHARS = set(['_', '-', '+', ES_FIELD_NAME_ESCAPE_CHAR])
ES_FIELD_NAME_SPECIAL_CHAR_MAP = {
'.': '_$dot$_',
',': '_$comma$_',
'#': '_$hash$_',
'*': '_$star$_',
'(': '_$lp$_',
')': '_$rp$_',
'[': '_$lsb$_',
']': '_$rsb$_',
'{': '_$lcb$_',
'}': '_$rcb$_',
}
def _encode_field_name(s):
"""Encodes arbitrary string into an elasticsearch field name
See:
https://discuss.elastic.co/t/special-characters-in-field-names/10658/2
https://discuss.elastic.co/t/illegal-characters-in-elasticsearch-field-names/17196/2
"""
field_name = StringIO.StringIO()
for i, c in enumerate(s):
if c == ES_FIELD_NAME_ESCAPE_CHAR:
field_name.write(2*ES_FIELD_NAME_ESCAPE_CHAR)
elif c in ES_FIELD_NAME_SPECIAL_CHAR_MAP:
field_name.write(ES_FIELD_NAME_SPECIAL_CHAR_MAP[c]) # encode the char
else:
field_name.write(c) # write out the char as is
field_name = field_name.getvalue()
# escape 1st char if necessary
if any(field_name.startswith(c) for c in ES_FIELD_NAME_BAD_LEADING_CHARS):
return ES_FIELD_NAME_ESCAPE_CHAR + field_name
else:
return field_name
def _decode_field_name(field_name):
"""Converts an elasticsearch field name back to the original unencoded string"""
if field_name.startswith(ES_FIELD_NAME_ESCAPE_CHAR):
field_name = field_name[1:]
i = 0
original_string = StringIO.StringIO()
while i < len(field_name):
current_string = field_name[i:]
if current_string.startswith(2*ES_FIELD_NAME_ESCAPE_CHAR):
original_string.write(ES_FIELD_NAME_ESCAPE_CHAR)
i += 2
else:
for original_value, encoded_value in ES_FIELD_NAME_SPECIAL_CHAR_MAP.items():
if current_string.startswith(encoded_value):
original_string.write(original_value)
i += len(encoded_value)
break
else:
original_string.write(field_name[i])
i += 1
return original_string.getvalue()
GENOTYPE_QUERY_MAP = {
'ref_ref': 0,
'ref_alt': 1,
'alt_alt': 2,
'has_alt': {'$gte': 1},
'has_ref': {'$in': [0,1]},
'not_missing': {'$gte': 0},
'missing': -1,
}
CHROMOSOME_SIZES = {
"1":249250621,
"2":243199373,
"3":198022430,
"4":191154276,
"5":180915260,
"6":171115067,
"7":159138663,
"8":146364022,
"9":141213431,
"10":135534747,
"11":135006516,
"12":133851895,
"13":115169878,
"14":107349540,
"15":102531392,
"16":90354753,
"17":81195210,
"18":78077248,
"19":59128983,
"20":63025520,
"21":48129895,
"22":51304566,
"X":155270560,
"Y":59373566,
"MT":16569,
}
def _add_genotype_filter_to_variant_query(db_query, genotype_filter):
"""
Add conditions to db_query from the genotype filter
Edits in place, returns True if successful
"""
for indiv_id, genotype in genotype_filter.items():
key = 'genotypes.%s.num_alt' % indiv_id
db_query[key] = GENOTYPE_QUERY_MAP[genotype]
return True
def _add_index_fields_to_variant(variant_dict, annotation=None):
"""
Add fields to the vairant dictionary that you want to index on before load it
"""
if annotation:
variant_dict['db_freqs'] = annotation['freqs']
variant_dict['db_tags'] = annotation['annotation_tags']
variant_dict['db_gene_ids'] = annotation['gene_ids']
class MongoDatastore(datastore.Datastore):
def __init__(self, db, annotator, custom_population_store=None, custom_populations_map=None):
self._db = db
self._annotator = annotator
self._custom_population_store = custom_population_store
self._custom_populations_map = custom_populations_map
if self._custom_populations_map is None:
self._custom_populations_map = {}
def _make_db_query(self, genotype_filter=None, variant_filter=None):
"""
Caller specifies filters to get_variants, but they are evaluated later.
Here, we just inspect those filters and see what heuristics we can apply to avoid a full table scan,
Query here must return a superset of the true get_variants results
Note that the full annotation isn't stored, so use the fields added by _add_index_fields_to_variant
"""
db_query = {}
# genotype filter
if genotype_filter is not None:
_add_genotype_filter_to_variant_query(db_query, genotype_filter)
if variant_filter:
if variant_filter.locations:
location_ranges = []
for i, location in enumerate(variant_filter.locations):
if isinstance(location, basestring):
chrom, pos_range = location.split(":")
start, end = pos_range.split("-")
xstart = genomeloc.get_xpos(chrom, int(start))
xend = genomeloc.get_xpos(chrom, int(end))
variant_filter.locations[i] = (xstart, xend)
else:
xstart, xend = location
location_ranges.append({'$and' : [ {'xpos' : {'$gte': xstart }}, {'xpos' : {'$lte': xend }}] })
db_query['$or'] = location_ranges
if variant_filter.so_annotations:
db_query['db_tags'] = {'$in': variant_filter.so_annotations}
if variant_filter.genes:
if getattr(variant_filter, 'exclude_genes'):
db_query['db_gene_ids'] = {'$nin': variant_filter.genes}
else:
db_query['db_gene_ids'] = {'$in': variant_filter.genes}
if variant_filter.ref_freqs:
for population, freq in variant_filter.ref_freqs:
if population in self._annotator.reference_population_slugs:
db_query['db_freqs.' + population] = {'$lte': freq}
return db_query
def get_elasticsearch_variants(self, query_json, elasticsearch_variant_dataset, project_id, family_id=None, variant_id_filter=None):
from seqr.models import Individual as SeqrIndividual, Project as SeqrProject
from reference_data.models import GENOME_VERSION_GRCh37, GENOME_VERSION_GRCh38
elasticsearch_host = elasticsearch_variant_dataset.elasticsearch_host
elasticsearch_index = elasticsearch_variant_dataset.elasticsearch_index
client = elasticsearch.Elasticsearch(host=elasticsearch_host)
s = elasticsearch_dsl.Search(using=client, index=elasticsearch_index) #",".join(indices))
print("===> QUERY: ")
pprint(query_json)
if variant_id_filter is not None:
s = s.filter('term', **{"variantId": variant_id_filter})
# parse variant query
for key, value in query_json.items():
if key == 'db_tags':
vep_consequences = query_json.get('db_tags', {}).get('$in', [])
consequences_filter = Q("terms", transcriptConsequenceTerms=vep_consequences)
if 'intergenic_variant' in vep_consequences:
# for many intergenic variants VEP doesn't add any annotations, so if user selected 'intergenic_variant', also match variants where transcriptConsequenceTerms is emtpy
consequences_filter = consequences_filter | ~Q('exists', field='transcriptConsequenceTerms')
s = s.filter(consequences_filter)
print("==> transcriptConsequenceTerms: %s" % str(vep_consequences))
if key.startswith("genotypes"):
sample_id = ".".join(key.split(".")[1:-1])
encoded_sample_id = _encode_field_name(sample_id)
genotype_filter = value
if type(genotype_filter) == int or type(genotype_filter) == basestring:
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": genotype_filter}))
s = s.filter('term', **{encoded_sample_id+"_num_alt": genotype_filter})
elif '$gte' in genotype_filter:
genotype_filter = {k.replace("$", ""): v for k, v in genotype_filter.items()}
s = s.filter('range', **{encoded_sample_id+"_num_alt": genotype_filter})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": genotype_filter}))
elif "$in" in genotype_filter:
num_alt_values = genotype_filter['$in']
q = Q('term', **{encoded_sample_id+"_num_alt": num_alt_values[0]})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": num_alt_values[0]}))
for num_alt_value in num_alt_values[1:]:
q = q | Q('term', **{encoded_sample_id+"_num_alt": num_alt_value})
print("==> genotypes: %s" % str({encoded_sample_id+"_num_alt": num_alt_value}))
s = s.filter(q)
if key == "db_gene_ids":
db_gene_ids = query_json.get('db_gene_ids', {})
exclude_genes = db_gene_ids.get('$nin', [])
gene_ids = exclude_genes or db_gene_ids.get('$in', [])
if exclude_genes:
s = s.exclude("terms", geneIds=gene_ids)
else:
s = s.filter("terms", geneIds=gene_ids)
print("==> %s %s" % ("exclude" if exclude_genes else "include", "geneIds: " + str(gene_ids)))
if key == "$or" and type(value) == list:
xpos_filters = value[0].get("$and", {})
# for example: $or : [{'$and': [{'xpos': {'$gte': 12345}}, {'xpos': {'$lte': 54321}}]}]
xpos_filters_dict = {}
for xpos_filter in xpos_filters:
xpos_filter_setting = xpos_filter["xpos"] # for example {'$gte': 12345} or {'$lte': 54321}
xpos_filters_dict.update(xpos_filter_setting)
xpos_filter_setting = {k.replace("$", ""): v for k, v in xpos_filters_dict.items()}
s = s.filter('range', **{"xpos": xpos_filter_setting})
print("==> xpos range: " + str({"xpos": xpos_filter_setting}))
af_key_map = {
"db_freqs.1kg_wgs_phase3": "g1k_AF",
"db_freqs.1kg_wgs_phase3_popmax": "g1k_POPMAX_AF",
"db_freqs.exac_v3": "exac_AF",
"db_freqs.exac_v3_popmax": "exac_AF_POPMAX",
"db_freqs.topmed": "topmed_AF",
"db_freqs.gnomad_exomes": "gnomad_exomes_AF",
"db_freqs.gnomad_exomes_popmax": "gnomad_exomes_AF_POPMAX",
"db_freqs.gnomad_genomes": "gnomad_genomes_AF",
"db_freqs.gnomad_genomes_popmax": "gnomad_genomes_AF_POPMAX",
}
if key in af_key_map:
filter_key = af_key_map[key]
af_filter_setting = {k.replace("$", ""): v for k, v in value.items()}
s = s.filter(Q('range', **{filter_key: af_filter_setting}) | ~Q('exists', field=filter_key))
print("==> %s: %s" % (filter_key, af_filter_setting))
s.sort("xpos")
print("=====")
print("FULL QUERY OBJ: " + pformat(s.__dict__))
print("FILTERS: " + pformat(s.to_dict()))
print("=====")
print("Hits: ")
# https://elasticsearch-py.readthedocs.io/en/master/helpers.html#elasticsearch.helpers.scan
response = s.execute()
print("TOTAL: " + str(response.hits.total))
#print(pformat(response.to_dict()))
if family_id is not None:
family_individual_ids = [i.individual_id for i in SeqrIndividual.objects.filter(family__family_id=family_id)]
else:
family_individual_ids = [i.individual_id for i in SeqrIndividual.objects.filter(family__project__project_id=project_id)]
for i, hit in enumerate(s.scan()): # preserve_order=True
if i == 0:
print("Hit columns: " + str(hit.__dict__))
filters = ",".join(hit["filters"]) if "filters" in hit else ""
genotypes = {}
all_num_alt = []
for individual_id in family_individual_ids:
encoded_individual_id = _encode_field_name(individual_id)
num_alt = int(hit["%s_num_alt" % encoded_individual_id]) if ("%s_num_alt" % encoded_individual_id) in hit else -1
if num_alt is not None:
all_num_alt.append(num_alt)
alleles = []
if num_alt == 0:
alleles = [hit["ref"], hit["ref"]]
elif num_alt == 1:
alleles = [hit["ref"], hit["alt"]]
elif num_alt == 2:
alleles = [hit["alt"], hit["alt"]]
elif num_alt == -1 or num_alt == None:
alleles = []
else:
raise ValueError("Invalid num_alt: " + str(num_alt))
genotypes[individual_id] = {
'ab': hit["%s_ab" % encoded_individual_id] if ("%s_ab" % encoded_individual_id) in hit else '',
'alleles': map(str, alleles),
'extras': {
'ad': hit["%s_ab" % encoded_individual_id] if ("%s_ad" % encoded_individual_id) in hit else '',
'dp': hit["%s_dp" % encoded_individual_id] if ("%s_dp" % encoded_individual_id) in hit else '',
'pl': '',
},
'filter': filters or "pass",
'gq': hit["%s_gq" % encoded_individual_id] if ("%s_gq" % encoded_individual_id in hit and hit["%s_gq" % encoded_individual_id] is not None) else '',
'num_alt': num_alt,
}
if all([num_alt <= 0 for num_alt in all_num_alt]):
#print("Filtered out due to genotype: " + str(genotypes))
print("Filtered all_num_alt <= 0 - Result %s: GRCh38: %s:%s, cadd: %s %s - %s" % (i, hit["contig"], hit["start"], hit["cadd_PHRED"] if "cadd_PHRED" in hit else "", hit["transcriptConsequenceTerms"], all_num_alt))
continue
vep_annotation = json.loads(str(hit['sortedTranscriptConsequences']))
if elasticsearch_variant_dataset.genome_version == GENOME_VERSION_GRCh37:
grch38_coord = liftover_grch37_to_grch38.convert_coordinate("chr%s" % hit["contig"].replace("chr", ""), int(hit["start"]))
if grch38_coord and grch37_coord and grch37_coord[0]:
grch38_coord = "%s-%s-%s-%s "% (grch37_coord[0][0], grch37_coord[0][1], hit["ref"], hit["alt"])
else:
grch38_coord = None
else:
grch38_coord = hit["variantId"]
if elasticsearch_variant_dataset.genome_version == GENOME_VERSION_GRCh38:
grch37_coord = liftover_grch38_to_grch37.convert_coordinate("chr%s" % hit["contig"].replace("chr", ""), int(hit["start"]))
if grch37_coord and grch37_coord and grch37_coord[0]:
grch37_coord = "%s-%s-%s-%s "% (grch37_coord[0][0], grch37_coord[0][1], hit["ref"], hit["alt"])
else:
grch37_coord = None
else:
grch37_coord = hit["variantId"]
result = {
#u'_id': ObjectId('596d2207ff66f729285ca588'),
'alt': str(hit["alt"]) if "alt" in hit else None,
'annotation': {
'fathmm': None,
'metasvm': None,
'muttaster': None,
'polyphen': None,
'sift': None,
'cadd_phred': hit["cadd_PHRED"] if "cadd_PHRED" in hit else None,
'dann_score': hit["dbnsfp_DANN_score"] if "dbnsfp_DANN_score" in hit else None,
'revel_score': hit["dbnsfp_REVEL_score"] if "dbnsfp_REVEL_score" in hit else None,
'mpc_score': hit["mpc_MPC"] if "mpc_MPC" in hit else None,
'annotation_tags': list(hit["transcriptConsequenceTerms"] or []) if "transcriptConsequenceTerms" in hit else None,
'coding_gene_ids': list(hit['codingGeneIds'] or []),
'gene_ids': list(hit['geneIds'] or []),
'vep_annotation': vep_annotation,
'vep_group': str(hit['mainTranscript_major_consequence'] or ""),
'vep_consequence': str(hit['mainTranscript_major_consequence'] or ""),
'worst_vep_annotation_index': 0,
'worst_vep_index_per_gene': {str(hit['mainTranscript_gene_id']): 0},
},
'chr': hit["contig"],
'coding_gene_ids': list(hit['codingGeneIds'] or []),
'db_freqs': {
'1kg_wgs_AF': float(hit["g1k_AF"] or 0.0),
'1kg_wgs_popmax_AF': float(hit["g1k_POPMAX_AF"] or 0.0),
'exac_v3_AC': float(hit["exac_AC_Adj"] or 0.0) if "exac_AC_Adj" in hit else 0.0,
'exac_v3_AF': float(hit["exac_AF"] or 0.0) if "exac_AF" in hit else (hit["exac_AC_Adj"]/float(hit["exac_AN_Adj"]) if int(hit["exac_AN_Adj"] or 0) > 0 else 0.0),
'exac_v3_popmax_AF': float(hit["exac_AF_POPMAX"] or 0.0) if "exac_AF_POPMAX" in hit else 0.0,
'topmed_AF': float(hit["topmed_AF"] or 0.0) if "topmed_AF" in hit else 0.0,
'gnomad_exomes_AC': float(hit["gnomad_exomes_AC"] or 0.0) if "gnomad_exomes_AC" in hit else 0.0,
'gnomad_exomes_Hom': float(hit["gnomad_exomes_HOM"] or 0.0) if "gnomad_exomes_HOM" in hit else 0.0,
'gnomad_exomes_AF': float(hit["gnomad_exomes_AF"] or 0.0) if "gnomad_exomes_AF" in hit else 0.0,
'gnomad_exomes_popmax_AF': float(hit["gnomad_exomes_AF_POPMAX"] or 0.0) if "gnomad_exomes_AF_POPMAX" in hit else 0.0,
'gnomad_genomes_AC': float(hit["gnomad_genomes_AC"] or 0.0) if "gnomad_genomes_AC" in hit else 0.0,
'gnomad_genomes_Hom': float(hit["gnomad_genomes_HOM"] or 0.0) if "gnomad_genomes_HOM" in hit else 0.0,
'gnomad_genomes_AF': float(hit["gnomad_genomes_AF"] or 0.0) if "gnomad_genomes_AF" in hit else 0.0,
'gnomad_genomes_popmax_AF': float(hit["gnomad_genomes_AF_POPMAX"] or 0.0) if "gnomad_genomes_AF_POPMAX" in hit else 0.0,
'gnomad_exome_coverage': float(hit["gnomad_exome_coverage"] or -1) if "gnomad_exome_coverage" in hit else -1,
'gnomad_genome_coverage': float(hit["gnomad_genome_coverage"] or -1) if "gnomad_genome_coverage" in hit else -1,
},
'db_gene_ids': list(hit["geneIds"] or []),
'db_tags': str(hit["transcriptConsequenceTerms"] or "") if "transcriptConsequenceTerms" in hit else None,
'extras': {
'genome_version': elasticsearch_variant_dataset.genome_version,
'grch37_coords': grch37_coord,
'grch38_coords': grch38_coord,
u'alt_allele_pos': 0,
u'orig_alt_alleles': map(str, [a.split("-")[-1] for a in hit["originalAltAlleles"]]) if "originalAltAlleles" in hit else []},
'gene_ids': None,
'genotypes': genotypes,
'pos': long(hit['start']),
'pos_end': str(hit['end']),
'ref': str(hit['ref']),
'vartype': 'snp' if len(hit['ref']) == len(hit['alt']) else "indel",
'vcf_id': None,
'xpos': long(hit["xpos"]),
'xposx': long(hit["xpos"]),
}
result["annotation"]["freqs"] = result["db_freqs"]
#print("\n\nConverted result: " + str(i))
print("Result %s: GRCh37: %s GRCh38: %s:, cadd: %s %s - gene ids: %s, coding gene_ids: %s" % (i, grch37_coord, grch37_coord, hit["cadd_PHRED"] if "cadd_PHRED" in hit else "", hit["transcriptConsequenceTerms"], result["gene_ids"], result["coding_gene_ids"]))
#pprint(result["db_freqs"])
yield result
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
break
def get_variants(self, project_id, family_id, genotype_filter=None, variant_filter=None):
db_query = self._make_db_query(genotype_filter, variant_filter)
sys.stderr.write("%s\n" % str(db_query))
counters = OrderedDict([('returned_by_query', 0), ('passes_variant_filter', 0)])
pprint({'$and' : [{k: v} for k, v in db_query.items()]})
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id)
if elasticsearch_variant_dataset is not None:
for i, variant_dict in enumerate(self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, project_id, family_id)):
counters["returned_by_query"] += 1
variant = Variant.fromJSON(variant_dict)
yield variant
print("Counters: " + str(counters))
else:
collection = self._get_family_collection(project_id, family_id)
if not collection:
print("Error: mongodb collection not found for project %s family %s " % (project_id, family_id))
return
counters = OrderedDict([('returned_by_query', 0), ('passes_variant_filter', 0)])
for i, variant_dict in enumerate(collection.find({'$and' : [{k: v} for k, v in db_query.items()]}).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i >= settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
counters["returned_by_query"] += 1
if passes_variant_filter(variant, variant_filter)[0]:
counters["passes_variant_filter"] += 1
yield variant
for k, v in counters.items():
sys.stderr.write(" %s: %s\n" % (k,v))
def get_variants_in_gene(self, project_id, family_id, gene_id, genotype_filter=None, variant_filter=None):
if variant_filter is None:
modified_variant_filter = VariantFilter()
else:
modified_variant_filter = copy.deepcopy(variant_filter)
modified_variant_filter.add_gene(gene_id)
db_query = self._make_db_query(genotype_filter, modified_variant_filter)
collection = self._get_family_collection(project_id, family_id)
if not collection:
return
# we have to collect list in memory here because mongo can't sort on xpos,
# as result size can get too big.
# need to find a better way to do this.
variants = []
for variant_dict in collection.find(db_query).hint([('db_gene_ids', pymongo.ASCENDING), ('xpos', pymongo.ASCENDING)]):
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
if passes_variant_filter(variant, modified_variant_filter):
variants.append(variant)
variants = sorted(variants, key=lambda v: v.unique_tuple())
for v in variants:
yield v
def get_variants_in_range(self, project_id, family_id, xpos_start, xpos_end):
collection = self._get_family_collection(project_id, family_id)
if not collection:
raise ValueError("Family not found: " + str(family_id))
for i, variant_dict in enumerate(collection.find({'$and': [{'xpos': {'$gte': xpos_start}}, {'xpos': {'$lte': xpos_end}}]}).limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
yield variant
def get_single_variant(self, project_id, family_id, xpos, ref, alt):
from seqr.utils.xpos_utils import get_chrom_pos
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id)
if elasticsearch_variant_dataset is not None:
chrom, pos = get_chrom_pos(xpos)
variant_id = "%s-%s-%s-%s" % (chrom, pos, ref, alt)
results = list(self.get_elasticsearch_variants({}, elasticsearch_variant_dataset, project_id, family_id=family_id, variant_id_filter=variant_id))
print("###### single variant search: " + variant_id + ". results: " + str(results))
if not results:
return None
variant_dict = results[0]
variant = Variant.fromJSON(variant_dict)
return variant
else:
collection = self._get_family_collection(project_id, family_id)
if not collection:
return None
variant_dict = collection.find_one({'xpos': xpos, 'ref': ref, 'alt': alt})
if variant_dict:
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
return variant
else:
return None
def get_variants_cohort(self, project_id, cohort_id, variant_filter=None):
db_query = self._make_db_query(None, variant_filter)
collection = self._get_family_collection(project_id, cohort_id)
for i, variant in enumerate(collection.find(db_query).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("ERROR: this search exceeded the %s variant result size limit. Please set additional filters and try again." % settings.VARIANT_QUERY_RESULTS_LIMIT)
yield Variant.fromJSON(variant)
def get_single_variant_cohort(self, project_id, cohort_id, xpos, ref, alt):
collection = self._get_family_collection(project_id, cohort_id)
variant = collection.find_one({'xpos': xpos, 'ref': ref, 'alt': alt})
return Variant.fromJSON(variant)
def get_de_novo_variants(self, project_id, family, de_novo_filter, variant_filter, quality_filter):
db_query = self._make_db_query(de_novo_filter, variant_filter)
elasticsearch_variant_dataset = get_elasticsearch_dataset(family.project_id, family.family_id)
if elasticsearch_variant_dataset is not None:
variant_iter = self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, family.project_id, family.family_id)
else:
collection = self._get_family_collection(family.project_id, family.family_id)
if not collection:
raise ValueError("Error: mongodb collection not found for project %s family %s " % (family.project_id, family.family_id))
variant_iter = collection.find(db_query).sort('xpos').limit(settings.VARIANT_QUERY_RESULTS_LIMIT+5)
# get ids of parents in this family
valid_ids = set(indiv_id for indiv_id in family.individuals)
paternal_ids = set(i.paternal_id for i in family.get_individuals() if i.paternal_id in valid_ids)
maternal_ids = set(i.maternal_id for i in family.get_individuals() if i.maternal_id in valid_ids)
parental_ids = paternal_ids | maternal_ids
# loop over all variants returned
for i, variant_dict in enumerate(variant_iter):
if i > settings.VARIANT_QUERY_RESULTS_LIMIT:
raise Exception("VARIANT_QUERY_RESULTS_LIMIT of %s exceeded for query: %s" % (settings.VARIANT_QUERY_RESULTS_LIMIT, db_query))
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, family.project_id)
if not passes_variant_filter(variant, variant_filter)[0]:
continue
# handle genotype filters
if len(parental_ids) != 2:
# ordinary filters for non-trios
for indiv_id in de_novo_filter.keys():
genotype = variant.get_genotype(indiv_id)
if not passes_genotype_filter(genotype, quality_filter):
break
else:
yield variant
else:
# for trios use Mark's recommended filters for de-novo variants:
# Hard-coded thresholds:
# 1) Child must have > 10% of combined Parental Read Depth
# 2) MinimumChildGQscore >= 20
# 3) MaximumParentAlleleBalance <= 5%
# Adjustable filters:
# Variants should PASS
# Child AB should be >= 20
# compute parental read depth for filter 1
total_parental_read_depth = 0
for indiv_id in parental_ids:
genotype = variant.get_genotype(indiv_id)
if genotype.extras and 'dp' in genotype.extras and genotype.extras['dp'] != '.':
total_parental_read_depth += int(genotype.extras['dp'])
else:
total_parental_read_depth = None # both parents must have DP to use the parental_read_depth filters
break
for indiv_id in de_novo_filter.keys():
quality_filter_temp = quality_filter.copy() # copy before modifying
if indiv_id in parental_ids:
# handle one of the parents
quality_filter_temp['max_ab'] = 5
else:
# handle child
quality_filter_temp['min_gq'] = 20
if total_parental_read_depth is not None:
quality_filter_temp['min_dp'] = total_parental_read_depth * 0.1
genotype = variant.get_genotype(indiv_id)
if not passes_genotype_filter(genotype, quality_filter_temp):
#print("%s: %s " % (variant.chr, variant.pos))
break
else:
yield variant
#
# New sample stuff
#
def get_all_individuals(self):
"""
List of all individuals in the datastore
Items are (project_id, indiv_id) tuples
"""
return [(i['project_id'], i['indiv_id']) for i in self._db.individuals.find()]
def get_all_families(self):
"""
List of all families in the datastore
Items are (project_id, family_id) tuples
"""
return [(i['project_id'], i['family_id']) for i in self._db.families.find()]
def individual_exists(self, project_id, indiv_id):
return self._db.individuals.find_one({
'project_id': project_id,
'indiv_id': indiv_id
}) is not None
def add_individual(self, project_id, indiv_id):
if self.individual_exists(project_id, indiv_id):
raise Exception("Indiv (%s, %s) already exists" % (project_id, indiv_id))
indiv = {
'project_id': project_id,
'indiv_id': indiv_id,
}
self._db.individuals.save(indiv)
def get_individuals(self, project_id):
return [ i['indiv_id'] for i in self._db.individuals.find({ 'project_id': project_id }) ]
def family_exists(self, project_id, family_id):
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id}) is not None
def get_individuals_for_family(self, project_id, family_id):
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id})['individuals']
def get_family_status(self, project_id, family_id):
family_doc = self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
if not family_doc:
return None
return family_doc['status']
def get_family_statuses(self, family_list):
ret = {f: None for f in family_list}
by_project = defaultdict(list)
for project_id, family_id in family_list:
by_project[project_id].append(family_id)
for project_id, family_id_list in by_project.items():
for family_doc in self._db.families.find({'project_id': project_id, 'family_id': {'$in': family_id_list}}):
ret[(project_id, family_doc['family_id'])] = family_doc['status']
return ret
def _get_family_info(self, project_id, family_id=None):
if family_id is None:
return [family_info for family_info in self._db.families.find({'project_id': project_id})]
else:
return self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
def _get_family_collection(self, project_id, family_id):
family_info = self._get_family_info(project_id, family_id)
if not family_info:
return None
return self._db[family_info['coll_name']]
#
# Variant loading
# Unique to mongo datastore, not part of protocol
#
def _add_family_info(self, project_id, family_id, individuals):
"""
Add all the background info about this family
We try to keep this as simple as possible - just IDs
After this is run, variants are ready to be loaded
"""
if self.family_exists(project_id, family_id):
#raise Exception("Family (%s, %s) already exists" % (project_id, family_id))
return
for indiv_id in individuals:
if not self.individual_exists(project_id, indiv_id):
self.add_individual(project_id, indiv_id)
family_coll_name = "family_%s_%s" % (slugify(project_id, separator='_'),
slugify(family_id, separator='_'))
family = {
'project_id': project_id,
'family_id': family_id,
'individuals': individuals,
'coll_name': family_coll_name,
'status': 'loading'
}
family_collection = self._db[family_coll_name]
self._index_family_collection(family_collection)
self._db.families.save(family)
def add_family(self, project_id, family_id, individuals):
"""
Add new family
Adds individuals if they don't exist yet
Phenotypes and pedigrees aren't stored, just which individuals
"""
self._add_family_info(project_id, family_id, individuals)
def add_family_set(self, family_list):
"""
Add a set of families from the same VCF file
family_list is just a list of dicts with keys of project_id, family_id, individuals
"""
for fam_info in family_list:
self._add_family_info(fam_info['project_id'], fam_info['family_id'], fam_info['individuals'])
def load_family_set(self, vcf_file_path, family_list, reference_populations=None, vcf_id_map=None, mark_as_loaded=True, start_from_chrom=None, end_with_chrom=None):
"""
Load a set of families from the same VCF file
family_list is a list of (project_id, family_id) tuples
"""
family_info_list = [self._get_family_info(f[0], f[1]) for f in family_list]
self._load_variants_for_family_set(
family_info_list,
vcf_file_path,
reference_populations=reference_populations,
vcf_id_map=vcf_id_map,
start_from_chrom=start_from_chrom,
end_with_chrom=end_with_chrom,
)
if mark_as_loaded:
for family in family_info_list:
self._finalize_family_load(family['project_id'], family['family_id'])
def _load_variants_for_family_set(self, family_info_list, vcf_file_path, reference_populations=None, vcf_id_map=None, start_from_chrom=None, end_with_chrom=None):
"""
Load variants for a set of families, assuming all come from the same VCF file
Added after load_variants_for_family to speed up loading - goal is to
only iterate the VCF once. Here's how it works:
for each raw variant:
annotate
for each family:
extract family variant from full variant
update variant inheritance
if family variant is relevant for family:
add to collection
"""
self._add_vcf_file_for_family_set(
family_info_list,
vcf_file_path,
reference_populations=reference_populations,
vcf_id_map=vcf_id_map,
start_from_chrom=start_from_chrom,
end_with_chrom=end_with_chrom,
)
def _add_vcf_file_for_family_set(self, family_info_list, vcf_file_path, reference_populations=None, vcf_id_map=None, start_from_chrom=None, end_with_chrom=None):
collections = {f['family_id']: self._db[f['coll_name']] for f in family_info_list}
#for collection in collections.values():
# collection.drop_indexes()
indiv_id_list = [i for f in family_info_list for i in f['individuals']]
number_of_families = len(family_info_list)
sys.stderr.write("Loading variants for %(number_of_families)d families %(family_info_list)s from %(vcf_file_path)s\n" % locals())
for family in family_info_list:
print("Indexing family: " + str(family))
collection = collections[family['family_id']]
collection.ensure_index([('xpos', 1), ('ref', 1), ('alt', 1)])
# check whether some of the variants for this chromosome has been loaded already
# if yes, start from the last loaded variant, and not from the beginning
if "_chr" in vcf_file_path or ".chr" in vcf_file_path:
# if the VCF files are split by chromosome (eg. for WGS projects), check within the chromosome
vcf_file = compressed_file(vcf_file_path)
variant = next(vcf_stuff.iterate_vcf(vcf_file, genotypes=False, indiv_id_list=indiv_id_list, vcf_id_map=vcf_id_map))
print(vcf_file_path + " - chromsome: " + str(variant.chr))
vcf_file.close()
position_per_chrom = {}
for chrom in range(1,24):
position_per_chrom[chrom] = defaultdict(int)
for family in family_info_list: #variants = collections[family['family_id']].find().sort([('xpos',-1)]).limit(1)
variants = list(collections[family['family_id']].find({'$and': [{'xpos': { '$gte': chrom*1e9 }}, {'xpos': { '$lt': (chrom+1)*1e9}}] }).sort([('xpos',-1)]).limit(1))
if len(variants) > 0:
position_per_chrom[chrom][family['family_id']] = variants[0]['xpos'] - chrom*1e9
else:
position_per_chrom[chrom][family['family_id']] = 0
for chrom in range(1,24):
position_per_chrom[chrom] = min(position_per_chrom[chrom].values()) # get the smallest last-loaded variant position for this chromosome across all families
chr_idx = int(variant.xpos/1e9)
start_from_pos = int(position_per_chrom[chr_idx])
print("Start from: %s - %s (%0.1f%% done)" % (chr_idx, start_from_pos, 100.*start_from_pos/CHROMOSOME_SIZES[variant.chr.replace("chr", "")]))
tabix_file = pysam.TabixFile(vcf_file_path)
vcf_iter = itertools.chain(tabix_file.header, tabix_file.fetch(variant.chr.replace("chr", ""), start_from_pos, int(2.5e8)))
elif start_from_chrom or end_with_chrom:
if start_from_chrom:
print("Start chrom: chr%s" % start_from_chrom)
if end_with_chrom:
print("End chrom: chr%s" % end_with_chrom)
chrom_list = list(map(str, range(1,23))) + ['X','Y']
chrom_list_start_index = 0
if start_from_chrom:
chrom_list_start_index = chrom_list.index(start_from_chrom.replace("chr", "").upper())
chrom_list_end_index = len(chrom_list)
if end_with_chrom:
chrom_list_end_index = chrom_list.index(end_with_chrom.replace("chr", "").upper())
tabix_file = pysam.TabixFile(vcf_file_path)
vcf_iter = tabix_file.header
for chrom in chrom_list[chrom_list_start_index:chrom_list_end_index+1]:
print("Will load chrom: " + chrom)
try:
vcf_iter = itertools.chain(vcf_iter, tabix_file.fetch(chrom))
except ValueError as e:
print("WARNING: " + str(e))
else:
vcf_iter = vcf_file = compressed_file(vcf_file_path)
# TODO handle case where it's one vcf file, not split by chromosome
size = os.path.getsize(vcf_file_path)
#progress = get_progressbar(size, 'Loading VCF: {}'.format(vcf_file_path))
def insert_all_variants_in_buffer(buff, collections_dict):
for family_id in buff:
if len(buff[family_id]) == 0: # defensive programming
raise ValueError("%s has zero variants to insert. Should not be in buff." % family_id)
while len(buff) > 0:
# choose a random family for which to insert a variant from among families that still have variants to insert
family_id = random.choice(buff.keys())
# pop a variant off the list for this family, and insert it
family_variant_dict_to_insert = buff[family_id].pop()
c = collections_dict[family_id]
c.insert(family_variant_dict_to_insert)
if len(buff[family_id]) == 0:
del buff[family_id] # if no more variants for this family, delete it
vcf_rows_counter = 0
variants_buffered_counter = 0
family_id_to_variant_list = defaultdict(list) # will accumulate variants to be inserted all at once
for variant in vcf_stuff.iterate_vcf(vcf_iter, genotypes=True, indiv_id_list=indiv_id_list, vcf_id_map=vcf_id_map):
if variant.alt == "*":
#print("Skipping GATK 3.4 * alt allele: " + str(variant.unique_tuple()))
continue
try:
annotation = self._annotator.get_annotation(variant.xpos, variant.ref, variant.alt, populations=reference_populations)
except ValueError, e:
sys.stderr.write("WARNING: " + str(e) + "\n")
continue
vcf_rows_counter += 1
for family in family_info_list:
# TODO: can we move this inside the if relevant clause below?
try:
family_variant = variant.make_copy(restrict_to_genotypes=family['individuals'])
family_variant_dict = family_variant.toJSON()
_add_index_fields_to_variant(family_variant_dict, annotation)
if xbrowse_utils.is_variant_relevant_for_individuals(family_variant, family['individuals']):
collection = collections[family['family_id']]
if not collection.find_one({'xpos': family_variant.xpos, 'ref': family_variant.ref, 'alt': family_variant.alt}):
family_id_to_variant_list[family['family_id']].append(family_variant_dict)
variants_buffered_counter += 1
except Exception, e:
sys.stderr.write("ERROR: on variant %s, family: %s - %s\n" % (variant.toJSON(), family, e))
if variants_buffered_counter > 2000:
print(date.strftime(datetime.now(), "%m/%d/%Y %H:%M:%S") + "-- %s:%s-%s-%s (%0.1f%% done) - inserting %d family-variants from %d vcf rows into %s families" % (variant.chr, variant.pos, variant.ref, variant.alt, 100*variant.pos / CHROMOSOME_SIZES[variant.chr.replace("chr", "")], variants_buffered_counter, vcf_rows_counter, len(family_id_to_variant_list)))
insert_all_variants_in_buffer(family_id_to_variant_list, collections)
assert len(family_id_to_variant_list) == 0
vcf_rows_counter = 0
variants_buffered_counter = 0
if variants_buffered_counter > 0:
insert_all_variants_in_buffer(family_id_to_variant_list, collections)
assert len(family_id_to_variant_list) == 0
def _finalize_family_load(self, project_id, family_id):
"""
Call after family is loaded. Sets status and possibly more in the future
"""
self._index_family_collection(self._get_family_collection(project_id, family_id))
family = self._db.families.find_one({'project_id': project_id, 'family_id': family_id})
family['status'] = 'loaded'
self._db.families.save(family)
def _index_family_collection(self, collection):
collection.ensure_index('xpos')
collection.ensure_index([('db_freqs', 1), ('xpos', 1)])
collection.ensure_index([('db_tags', 1), ('xpos', 1)])
collection.ensure_index([('db_gene_ids', 1), ('xpos', 1)])
def delete_project(self, project_id):
self._db.individuals.remove({'project_id': project_id})
for family_info in self._db.families.find({'project_id': project_id}):
self._db.drop_collection(family_info['coll_name'])
self._db.families.remove({'project_id': project_id})
def delete_family(self, project_id, family_id):
for family_info in self._db.families.find({'project_id': project_id, 'family_id': family_id}):
self._db.drop_collection(family_info['coll_name'])
self._db.families.remove({'project_id': project_id, 'family_id': family_id})
def add_annotations_to_variant(self, variant, project_id):
self._annotator.annotate_variant(variant)
try:
if self._custom_population_store:
custom_pop_slugs = self._custom_populations_map.get(project_id)
if custom_pop_slugs:
self._custom_population_store.add_populations_to_variants([variant], custom_pop_slugs)
except Exception, e:
sys.stderr.write("Error in add_annotations_to_variant: " + str(e) + "\n")
#
# This stuff is all copied in from ProjectDatastore
#
def _get_project_collection(self, project_id):
project = self._db.projects.find_one({'project_id': project_id})
if project:
return self._db[project['collection_name']]
else:
return None
def add_variants_to_project_from_vcf(self, vcf_file, project_id, indiv_id_list=None, start_from_chrom=None, end_with_chrom=None):
"""
This is how variants are loaded
"""
chrom_list = list(map(str, range(1,23))) + ['X','Y']
chrom_list_start_index = 0
if start_from_chrom:
chrom_list_start_index = chrom_list.index(start_from_chrom.replace("chr", "").upper())
chrom_list_end_index = len(chrom_list)
if end_with_chrom:
chrom_list_end_index = chrom_list.index(end_with_chrom.replace("chr", "").upper())
chromosomes_to_include = set(chrom_list[chrom_list_start_index : chrom_list_end_index])
#tabix_file = pysam.TabixFile(vcf_file)
#vcf_iter = tabix_file.header
#for chrom in chrom_list[chrom_list_start_index:chrom_list_end_index]:
# print("Will load chrom: " + chrom)
# vcf_iter = itertools.chain(vcf_iter, tabix_file.fetch(chrom))
project_collection = self._get_project_collection(project_id)
reference_populations = self._annotator.reference_population_slugs + self._custom_populations_map.get(project_id)
for counter, variant in enumerate(vcf_stuff.iterate_vcf(vcf_file, genotypes=True, indiv_id_list=indiv_id_list)):
if (start_from_chrom or end_with_chrom) and variant.chr.replace("chr", "") not in chromosomes_to_include:
continue
if variant.alt == "*":
#print("Skipping GATK 3.4 * alt allele: " + str(variant.unique_tuple()))
continue
if counter % 2000 == 0:
print(date.strftime(datetime.now(), "%m/%d/%Y %H:%M:%S") + "-- inserting variant %d %s:%s-%s-%s (%0.1f%% done with %s) " % (counter, variant.chr, variant.pos, variant.ref, variant.alt, 100*variant.pos / CHROMOSOME_SIZES[variant.chr.replace("chr", "")], variant.chr))
variant_dict = project_collection.find_one({'xpos': variant.xpos, 'ref': variant.ref, 'alt': variant.alt})
if not variant_dict:
variant_dict = variant.toJSON()
try:
annotation = self._annotator.get_annotation(variant.xpos, variant.ref, variant.alt, populations=reference_populations)
except ValueError, e:
sys.stderr.write("WARNING: " + str(e) + "\n")
continue
_add_index_fields_to_variant(variant_dict, annotation)
else:
for indiv_id, genotype in variant.get_genotypes():
if genotype.num_alt != 0:
variant_dict['genotypes'][indiv_id] = genotype._asdict()
project_collection.save(variant_dict)
def project_exists(self, project_id):
return self._db.projects.find_one({'project_id': project_id})
def project_collection_is_loaded(self, project_id):
"""Returns true if the project collection is fully loaded (this is the
collection that stores the project-wide set of variants used for gene
search)."""
project = self._db.projects.find_one({'project_id': project_id})
if project is not None and "is_loaded" in project:
return project["is_loaded"]
else:
return False
def set_project_collection_to_loaded(self, project_id, is_loaded=True):
"""Set the project collection "is_loaded" field to the given value.
This field is used by other parts of seqr to decide if this collection
is ready for use."""
project = self._db.projects.find_one({'project_id': project_id})
if project is not None and "is_loaded" in project:
project["is_loaded"] = is_loaded
#print("Setting %s to %s" % (project["_id"], project))
project_id = project['_id']
del project['_id']
self._db.projects.update({'_id': project_id}, {"$set": project})
else:
raise ValueError("Couldn't find project collection for %s" % project_id)
def add_project(self, project_id):
"""
Add all the background info about this family
We try to keep this as simple as possible - just IDs
After this is run, variants are ready to be loaded
"""
if self.project_exists(project_id):
raise Exception("Project {} exists".format(project_id))
project = {
'project_id': project_id,
'collection_name': 'project_' + ''.join([random.choice(string.digits) for i in range(8)]),
'is_loaded': False,
}
self._db.projects.insert(project)
project_collection = self._db[project['collection_name']]
self._index_family_collection(project_collection)
def delete_project_store(self, project_id):
project = self._db.projects.find_one({'project_id': project_id})
if project:
self._db.drop_collection(project['collection_name'])
self._db.projects.remove({'project_id': project_id})
def get_project_variants_in_gene(self, project_id, gene_id, variant_filter=None):
if variant_filter is None:
modified_variant_filter = VariantFilter()
else:
modified_variant_filter = copy.deepcopy(variant_filter)
modified_variant_filter.add_gene(gene_id)
db_query = self._make_db_query(None, modified_variant_filter)
sys.stderr.write("Project Gene Search: " + str(project_id) + " all variants query: " + str(db_query))
elasticsearch_variant_dataset = get_elasticsearch_dataset(project_id, family_id=None)
if elasticsearch_variant_dataset is not None:
variants = []
for i, variant_dict in enumerate(self.get_elasticsearch_variants(db_query, elasticsearch_variant_dataset, project_id)):
variant = Variant.fromJSON(variant_dict)
variants.append(variant)
#variants = sorted(variants, key=lambda v: v.unique_tuple())
return variants
collection = self._get_project_collection(project_id)
if not collection:
return []
# we have to collect list in memory here because mongo can't sort on xpos,
# as result size can get too big.
# need to find a better way to do this.
variants = []
for variant_dict in collection.find(db_query).hint([('db_gene_ids', pymongo.ASCENDING), ('xpos', pymongo.ASCENDING)]):
variant = Variant.fromJSON(variant_dict)
self.add_annotations_to_variant(variant, project_id)
if passes_variant_filter(variant, modified_variant_filter):
variants.append(variant)
variants = sorted(variants, key=lambda v: v.unique_tuple())
return variants
|
from viaduct import db, application
from flask import render_template, Markup, redirect, url_for, abort,\
flash
from flask.ext.login import current_user
from unidecode import unidecode
import datetime
import re
from viaduct.api.group import GroupPermissionAPI
from viaduct.api.user import UserAPI
from viaduct.models import Group, User
from viaduct.models import Minute, Task
from viaduct.models.pimpy import TaskUserRel
DATE_FORMAT = application.config['DATE_FORMAT']
class PimpyAPI:
@staticmethod
def commit_minute_to_db(content, date, group_id):
"""
Enter minute into the database.
Return succes (boolean, message (string). Message is the new minute.id
if succes is true, otherwise it contains an error message.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
try:
date = datetime.datetime.strptime(date, DATE_FORMAT)
except:
if date != "":
return False, "Could not parse the date"
date = None
minute = Minute(content, group_id, date)
db.session.add(minute)
db.session.commit()
return True, minute.id
@staticmethod
def commit_task_to_db(name, content, deadline, group_id, filled_in_users,
line, minute_id, status):
"""
Enter task into the database.
Return succes (boolean), message (string). Message is the new task.id
if succes is true, otherwise it contains an error message.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
if group_id == 'all':
return False, "Group can not be 'all'"
group = Group.query.filter(Group.id == group_id).first()
if group is None:
return False, "Could not distinguish group."
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, filled_in_users)
if not users:
return False, message
try:
deadline = datetime.datetime.strptime(deadline, DATE_FORMAT)
except:
if deadline != "":
return False, "Could not parse the deadline"
deadline = None
if minute_id <= 0:
minute_id = 1
task = Task(name, content, deadline, group_id,
users, minute_id, line, status)
db.session.add(task)
db.session.commit()
return True, task.id
@staticmethod
def edit_task(task_id, name, content, deadline, group_id,
filled_in_users, line):
"""
Returns succes (boolean), message (string). Message is irrelevant if
succes is true, otherwise it contains what exactly went wrong.
In case of succes the task is edited in the database.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
if task_id == -1:
return False, "no task_id given"
task = Task.query.filter(Task.id == task_id).first()
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, filled_in_users)
if not users:
return False, message
if name:
task.title = name
if content:
task.content = content
if deadline:
try:
deadline = datetime.datetime.strptime(deadline, DATE_FORMAT)
except:
if deadline != "":
return False, "Could not parse the deadline"
deadline = None
task.deadline = deadline
if group_id:
task.group_id = group_id
if line:
task.line = line
if users:
task.users = users
# if status:
# task.status = status
db.session.commit()
return True, "task edited"
@staticmethod
def parse_minute(content, group_id, minute_id):
"""
Parse the specified minutes for tasks and return them in a list.
Same for DONE tasks and REMOVED tasks
syntax within the content:
ACTIE <name_1>, <name_2>, name_n>: <title of task>
or
TODO <name_1>, <name_2>, name_n>: <title of task>
usage:
tasks, dones, removes = parse_minute(content, group_id, minute_id)
where content is a string with the entire minute
group id is the group's id
minute id is the minute's id
"""
tasks_found = []
dones_found = []
removes_found = []
regex = re.compile("\s*(?:ACTIE|TODO) ([^\n\r]*)")
for i, line in enumerate(content.splitlines()):
matches = regex.findall(line)
for action in matches:
try:
listed_users, title = action.split(":", 1)
except:
print("could not split the line on ':'.\nSkipping hit.")
flash("could not parse: " + action)
continue
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, listed_users)
if not users:
print(message)
continue
try:
task = Task(title, "", None, group_id, users,
minute_id, i, 0)
except:
print("wasnt given the right input to create a task")
continue
tasks_found.append(task)
regex = re.compile("\s*(?:DONE) ([^\n\r]*)")
matches = regex.findall(content)
for done_id in matches:
try:
done_task = Task.query.filter(Task.id == done_id).first()
except:
print("could not find the given task")
flash("could not find DONE " + done_id)
continue
if done_task:
dones_found.append(done_task)
else:
print("Could not find task " + done_id)
flash("could not find DONE " + done_id)
regex = re.compile("\s*(?:REMOVE) ([^\n\r]*)")
matches = regex.findall(content)
for remove_id in matches:
try:
remove_task = Task.query.filter(Task.id == remove_id).first()
except:
print("could not find the given task")
flash("could not find REMOVE " + remove_id)
continue
if remove_task:
removes_found.append(remove_task)
else:
print("Could not find REMOVE " + remove_id)
flash("could not find REMOVE " + remove_id)
return tasks_found, dones_found, removes_found
@staticmethod
def get_list_of_users_from_string(group_id, comma_sep):
"""
Parses a string which is a list of comma separated user names
to a list of users, searching only within the group given
Returns users, message. Users is false if there is something wrong,
in which case the message is stated in message, otherwise message
equals "" and users is the list of matched users
usage:
get_list_of_users_from_string(group_id, comma_sep)
where group_id is the group's id
and comma_sep is a string with comma seperated users.
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
group = Group.query.filter(Group.id == group_id).first()
if group is None:
return False, "Could not distinguish group."
if comma_sep is None:
return False, "Did not receive any comma separated users"
comma_sep = map(lambda x: x.lower().strip(), comma_sep.split(','))
found_users = []
users = group.users.all()
user_names = map(lambda x: "%s %s" % (x.first_name.lower().strip(),
x.last_name.lower().strip()),
users)
user_names = [unidecode(x) for x in user_names]
for comma_sep_user in comma_sep:
temp_found_users = []
for i in range(len(users)):
# could use a filter here, but meh
if user_names[i].startswith(comma_sep_user):
temp_found_users.append(users[i])
if len(temp_found_users) == 0:
# We want to add an action to all users if none has been found
temp_found_users = users
# We actually want to be able to add tasks to more than 1 user
#if len(temp_found_users) > 1:
# return False, "could not disambiguate %s" % comma_sep_user
found_users.extend(temp_found_users)
return found_users, ""
@staticmethod
def get_navigation_menu(group_id, personal, type):
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
groups = current_user.groups.filter(Group.name != 'all').order_by(Group.name.asc()).all()
if not type:
type = 'minutes'
endpoint = 'pimpy.view_' + type
endpoints = {'view_chosentype': endpoint,
'view_chosentype_personal': endpoint + '_personal',
'view_chosentype_chosenpersonal': endpoint +
('_personal' if personal and type != 'minutes' else ''),
'view_tasks': 'pimpy.view_tasks',
'view_tasks_personal': 'pimpy.view_tasks_personal',
'view_tasks_chosenpersonal': 'pimpy.view_tasks',
'view_minutes': 'pimpy.view_minutes'}
if personal:
endpoints['view_tasks_chosenpersonal'] += '_personal'
if not group_id:
group_id = 'all'
if group_id != 'all':
group_id = int(group_id)
return Markup(render_template('pimpy/api/side_menu.htm', groups=groups,
group_id=group_id, personal=personal,
type=type, endpoints=endpoints))
@staticmethod
def get_all_tasks(group_id):
"""
Shows all tasks ever made.
Can specify specific group.
No internal permission system made yet.
Do not make routes to this module yet.
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_tasks'))
status_meanings = Task.get_status_meanings()
list_items = {}
if group_id == 'all':
for group in UserAPI.get_groups_for_current_user():
list_users = {}
list_users['Iedereen'] = group.tasks
list_items[group.name] = list_users
else:
list_users = {}
tasks = Task.query.filter(Task.group_id == group_id).all()
group = Group.query.filter(Group.id == group_id).first()
if not group:
abort(404)
if not group in UserAPI.get_groups_for_current_user():
abort(403)
list_users['Iedereen'] = tasks
list_items[group.name] = list_users
return Markup(render_template('pimpy/api/tasks.htm',
list_items=list_items, type='tasks',
group_id=group_id, personal=False,
status_meanings=status_meanings))
@staticmethod
def get_tasks(group_id, personal):
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_tasks'))
status_meanings = Task.get_status_meanings()
tasks_rel = TaskUserRel.query.join(Task).join(User)
groups = UserAPI.get_groups_for_current_user()
groups = map(lambda x: x.id, groups)
if group_id == 'all':
tasks_rel = tasks_rel.filter(Task.group_id.in_(groups))
else:
group_id = int(group_id)
if group_id not in groups:
abort(403)
tasks_rel = tasks_rel.filter(Task.group_id == group_id)
if personal:
tasks_rel = tasks_rel.filter(User.id == current_user.id)
tasks_rel = tasks_rel.filter(~Task.status.in_((4, 5))).join(Group)
tasks_rel = tasks_rel.order_by(Group.name.asc(), User.first_name.asc(),
User.last_name.asc(), Task.id.asc())
return Markup(render_template('pimpy/api/tasks.htm',
personal=personal,
group_id=group_id,
tasks_rel=tasks_rel,
type='tasks',
status_meanings=status_meanings))
@staticmethod
def get_minutes(group_id):
"""
Load all minutes in the given group
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
list_items = {}
if group_id != 'all':
query = Minute.query.filter(Minute.group_id == group_id).\
order_by(Minute.minute_date.desc())
list_items[Group.query.filter(Group.id == group_id).first().name]\
= query.all()
# this should be done with a sql in statement, or something, but meh
else:
for group in current_user.groups:
query = Minute.query.filter(Minute.group_id == group.id)
query = query.order_by(Minute.minute_date.desc())
list_items[group.name] = query.all()
return Markup(render_template('pimpy/api/minutes.htm',
list_items=list_items, type='minutes',
group_id=group_id, line_number=-1))
@staticmethod
def get_minute(group_id, minute_id, line_number):
"""
Load (and thus view) specifically one minute
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
list_items = {}
query = Minute.query.filter(Minute.id == minute_id)
group = Group.query.filter(Group.id == group_id).first()
list_items[group.name] = query.all()
tag = "%dln%d" % (list_items[group.name][0].id, int(line_number))
#return Markup(render_template('pimpy/api/minutes.htm',
# list_items=list_items, type='minutes',
# group_id=group_id, line_number=line_number))
return render_template('pimpy/api/minutes.htm',
list_items=list_items, type='minutes',
group_id=group_id,
line_number=line_number,
tag=tag)
@staticmethod
def update_content(task_id, content):
"""
Update the content of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
task.content = content
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_title(task_id, title):
"""
Update the title of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
task.title = title
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_users(task_id, comma_sep_users):
"""
Update the users of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
users, message = PimpyAPI.get_list_of_users_from_string(
task.group_id, comma_sep_users)
if not users:
return False, message
task.users = users
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_date(task_id, date):
"""
Update the date of the task with the given id
"""
try:
date = datetime.datetime.strptime(date, DATE_FORMAT)
except:
if date != "":
return False, "Could not parse the date"
date = None
task = Task.query.filter(Task.id == task_id).first()
task.deadline = date
db.session.commit()
return True, "The task is edited sucessfully"
can now do multiple DONEs on one line separated by commas
from viaduct import db, application
from flask import render_template, Markup, redirect, url_for, abort,\
flash
from flask.ext.login import current_user
from unidecode import unidecode
import datetime
import re
from viaduct.api.group import GroupPermissionAPI
from viaduct.api.user import UserAPI
from viaduct.models import Group, User
from viaduct.models import Minute, Task
from viaduct.models.pimpy import TaskUserRel
DATE_FORMAT = application.config['DATE_FORMAT']
class PimpyAPI:
@staticmethod
def commit_minute_to_db(content, date, group_id):
"""
Enter minute into the database.
Return succes (boolean, message (string). Message is the new minute.id
if succes is true, otherwise it contains an error message.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
try:
date = datetime.datetime.strptime(date, DATE_FORMAT)
except:
if date != "":
return False, "Could not parse the date"
date = None
minute = Minute(content, group_id, date)
db.session.add(minute)
db.session.commit()
return True, minute.id
@staticmethod
def commit_task_to_db(name, content, deadline, group_id, filled_in_users,
line, minute_id, status):
"""
Enter task into the database.
Return succes (boolean), message (string). Message is the new task.id
if succes is true, otherwise it contains an error message.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
if group_id == 'all':
return False, "Group can not be 'all'"
group = Group.query.filter(Group.id == group_id).first()
if group is None:
return False, "Could not distinguish group."
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, filled_in_users)
if not users:
return False, message
try:
deadline = datetime.datetime.strptime(deadline, DATE_FORMAT)
except:
if deadline != "":
return False, "Could not parse the deadline"
deadline = None
if minute_id <= 0:
minute_id = 1
task = Task(name, content, deadline, group_id,
users, minute_id, line, status)
db.session.add(task)
db.session.commit()
return True, task.id
@staticmethod
def edit_task(task_id, name, content, deadline, group_id,
filled_in_users, line):
"""
Returns succes (boolean), message (string). Message is irrelevant if
succes is true, otherwise it contains what exactly went wrong.
In case of succes the task is edited in the database.
"""
if not GroupPermissionAPI.can_write('pimpy'):
abort(403)
if task_id == -1:
return False, "no task_id given"
task = Task.query.filter(Task.id == task_id).first()
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, filled_in_users)
if not users:
return False, message
if name:
task.title = name
if content:
task.content = content
if deadline:
try:
deadline = datetime.datetime.strptime(deadline, DATE_FORMAT)
except:
if deadline != "":
return False, "Could not parse the deadline"
deadline = None
task.deadline = deadline
if group_id:
task.group_id = group_id
if line:
task.line = line
if users:
task.users = users
# if status:
# task.status = status
db.session.commit()
return True, "task edited"
@staticmethod
def parse_minute(content, group_id, minute_id):
"""
Parse the specified minutes for tasks and return them in a list.
Same for DONE tasks and REMOVED tasks
syntax within the content:
ACTIE <name_1>, <name_2>, name_n>: <title of task>
or
TODO <name_1>, <name_2>, name_n>: <title of task>
usage:
tasks, dones, removes = parse_minute(content, group_id, minute_id)
where content is a string with the entire minute
group id is the group's id
minute id is the minute's id
"""
tasks_found = []
dones_found = []
removes_found = []
regex = re.compile("\s*(?:ACTIE|TODO) ([^\n\r]*)")
for i, line in enumerate(content.splitlines()):
matches = regex.findall(line)
for action in matches:
try:
listed_users, title = action.split(":", 1)
except:
print("could not split the line on ':'.\nSkipping hit.")
flash("could not parse: " + action)
continue
users, message = PimpyAPI.get_list_of_users_from_string(
group_id, listed_users)
if not users:
print(message)
continue
try:
task = Task(title, "", None, group_id, users,
minute_id, i, 0)
except:
print("wasnt given the right input to create a task")
continue
tasks_found.append(task)
regex = re.compile("\s*(?:DONE) ([^\n\r]*)")
matches = regex.findall(content)
for match in matches:
done_ids = match.split(",")
for done_id in done_ids:
try:
done_task = Task.query.filter(Task.id == done_id).first()
except:
print("could not find the given task")
flash("could not find DONE " + done_id)
continue
if done_task:
dones_found.append(done_task)
else:
print("Could not find task " + done_id)
flash("could not find DONE " + done_id)
regex = re.compile("\s*(?:REMOVE) ([^\n\r]*)")
matches = regex.findall(content)
for remove_id in matches:
try:
remove_task = Task.query.filter(Task.id == remove_id).first()
except:
print("could not find the given task")
flash("could not find REMOVE " + remove_id)
continue
if remove_task:
removes_found.append(remove_task)
else:
print("Could not find REMOVE " + remove_id)
flash("could not find REMOVE " + remove_id)
return tasks_found, dones_found, removes_found
@staticmethod
def get_list_of_users_from_string(group_id, comma_sep):
"""
Parses a string which is a list of comma separated user names
to a list of users, searching only within the group given
Returns users, message. Users is false if there is something wrong,
in which case the message is stated in message, otherwise message
equals "" and users is the list of matched users
usage:
get_list_of_users_from_string(group_id, comma_sep)
where group_id is the group's id
and comma_sep is a string with comma seperated users.
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
group = Group.query.filter(Group.id == group_id).first()
if group is None:
return False, "Could not distinguish group."
if comma_sep is None:
return False, "Did not receive any comma separated users"
comma_sep = map(lambda x: x.lower().strip(), comma_sep.split(','))
found_users = []
users = group.users.all()
user_names = map(lambda x: "%s %s" % (x.first_name.lower().strip(),
x.last_name.lower().strip()),
users)
user_names = [unidecode(x) for x in user_names]
for comma_sep_user in comma_sep:
temp_found_users = []
for i in range(len(users)):
# could use a filter here, but meh
if user_names[i].startswith(comma_sep_user):
temp_found_users.append(users[i])
if len(temp_found_users) == 0:
# We want to add an action to all users if none has been found
temp_found_users = users
# We actually want to be able to add tasks to more than 1 user
#if len(temp_found_users) > 1:
# return False, "could not disambiguate %s" % comma_sep_user
found_users.extend(temp_found_users)
return found_users, ""
@staticmethod
def get_navigation_menu(group_id, personal, type):
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
groups = current_user.groups.filter(Group.name != 'all').order_by(Group.name.asc()).all()
if not type:
type = 'minutes'
endpoint = 'pimpy.view_' + type
endpoints = {'view_chosentype': endpoint,
'view_chosentype_personal': endpoint + '_personal',
'view_chosentype_chosenpersonal': endpoint +
('_personal' if personal and type != 'minutes' else ''),
'view_tasks': 'pimpy.view_tasks',
'view_tasks_personal': 'pimpy.view_tasks_personal',
'view_tasks_chosenpersonal': 'pimpy.view_tasks',
'view_minutes': 'pimpy.view_minutes'}
if personal:
endpoints['view_tasks_chosenpersonal'] += '_personal'
if not group_id:
group_id = 'all'
if group_id != 'all':
group_id = int(group_id)
return Markup(render_template('pimpy/api/side_menu.htm', groups=groups,
group_id=group_id, personal=personal,
type=type, endpoints=endpoints))
@staticmethod
def get_all_tasks(group_id):
"""
Shows all tasks ever made.
Can specify specific group.
No internal permission system made yet.
Do not make routes to this module yet.
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_tasks'))
status_meanings = Task.get_status_meanings()
list_items = {}
if group_id == 'all':
for group in UserAPI.get_groups_for_current_user():
list_users = {}
list_users['Iedereen'] = group.tasks
list_items[group.name] = list_users
else:
list_users = {}
tasks = Task.query.filter(Task.group_id == group_id).all()
group = Group.query.filter(Group.id == group_id).first()
if not group:
abort(404)
if not group in UserAPI.get_groups_for_current_user():
abort(403)
list_users['Iedereen'] = tasks
list_items[group.name] = list_users
return Markup(render_template('pimpy/api/tasks.htm',
list_items=list_items, type='tasks',
group_id=group_id, personal=False,
status_meanings=status_meanings))
@staticmethod
def get_tasks(group_id, personal):
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_tasks'))
status_meanings = Task.get_status_meanings()
tasks_rel = TaskUserRel.query.join(Task).join(User)
groups = UserAPI.get_groups_for_current_user()
groups = map(lambda x: x.id, groups)
if group_id == 'all':
tasks_rel = tasks_rel.filter(Task.group_id.in_(groups))
else:
group_id = int(group_id)
if group_id not in groups:
abort(403)
tasks_rel = tasks_rel.filter(Task.group_id == group_id)
if personal:
tasks_rel = tasks_rel.filter(User.id == current_user.id)
tasks_rel = tasks_rel.filter(~Task.status.in_((4, 5))).join(Group)
tasks_rel = tasks_rel.order_by(Group.name.asc(), User.first_name.asc(),
User.last_name.asc(), Task.id.asc())
return Markup(render_template('pimpy/api/tasks.htm',
personal=personal,
group_id=group_id,
tasks_rel=tasks_rel,
type='tasks',
status_meanings=status_meanings))
@staticmethod
def get_minutes(group_id):
"""
Load all minutes in the given group
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
list_items = {}
if group_id != 'all':
query = Minute.query.filter(Minute.group_id == group_id).\
order_by(Minute.minute_date.desc())
list_items[Group.query.filter(Group.id == group_id).first().name]\
= query.all()
# this should be done with a sql in statement, or something, but meh
else:
for group in current_user.groups:
query = Minute.query.filter(Minute.group_id == group.id)
query = query.order_by(Minute.minute_date.desc())
list_items[group.name] = query.all()
return Markup(render_template('pimpy/api/minutes.htm',
list_items=list_items, type='minutes',
group_id=group_id, line_number=-1))
@staticmethod
def get_minute(group_id, minute_id, line_number):
"""
Load (and thus view) specifically one minute
"""
if not GroupPermissionAPI.can_read('pimpy'):
abort(403)
if not current_user:
flash('Current_user not found')
return redirect(url_for('pimpy.view_minutes'))
list_items = {}
query = Minute.query.filter(Minute.id == minute_id)
group = Group.query.filter(Group.id == group_id).first()
list_items[group.name] = query.all()
tag = "%dln%d" % (list_items[group.name][0].id, int(line_number))
#return Markup(render_template('pimpy/api/minutes.htm',
# list_items=list_items, type='minutes',
# group_id=group_id, line_number=line_number))
return render_template('pimpy/api/minutes.htm',
list_items=list_items, type='minutes',
group_id=group_id,
line_number=line_number,
tag=tag)
@staticmethod
def update_content(task_id, content):
"""
Update the content of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
task.content = content
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_title(task_id, title):
"""
Update the title of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
task.title = title
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_users(task_id, comma_sep_users):
"""
Update the users of the task with the given id
"""
task = Task.query.filter(Task.id == task_id).first()
users, message = PimpyAPI.get_list_of_users_from_string(
task.group_id, comma_sep_users)
if not users:
return False, message
task.users = users
db.session.commit()
return True, "The task is edited sucessfully"
@staticmethod
def update_date(task_id, date):
"""
Update the date of the task with the given id
"""
try:
date = datetime.datetime.strptime(date, DATE_FORMAT)
except:
if date != "":
return False, "Could not parse the date"
date = None
task = Task.query.filter(Task.id == task_id).first()
task.deadline = date
db.session.commit()
return True, "The task is edited sucessfully"
|
import os
import unittest
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django.utils import simplejson
from djblets.util.testing import TagTest
import reviewboard.webapi.json as webapi
from reviewboard.diffviewer.models import DiffSet
from reviewboard.reviews.models import Group, ReviewRequest, \
ReviewRequestDraft, Review, \
Comment, Screenshot, ScreenshotComment
from reviewboard.scmtools.models import Repository, Tool
class WebAPITests(TestCase):
"""Testing the webapi support."""
fixtures = ['test_users', 'test_reviewrequests', 'test_scmtools']
def setUp(self):
svn_repo_path = os.path.join(os.path.dirname(__file__),
'../scmtools/testdata/svn_repo')
self.repository = Repository(name='Subversion SVN',
path='file://' + svn_repo_path,
tool=Tool.objects.get(name='Subversion'))
self.repository.save()
self.client.login(username="grumpy", password="grumpy")
self.user = User.objects.get(username="grumpy")
def tearDown(self):
self.client.logout()
def apiGet(self, path, query={}):
response = self.client.get("/api/json/%s/" % path, query)
self.assertEqual(response.status_code, 200)
rsp = simplejson.loads(response.content)
print "Response: %s" % rsp
return rsp
def apiPost(self, path, query={}):
print "Posting to /api/json/%s/" % path
response = self.client.post("/api/json/%s/" % path, query)
self.assertEqual(response.status_code, 200)
rsp = simplejson.loads(response.content)
print "Response: %s" % rsp
return rsp
def testRepositoryList(self):
"""Testing the repositories API"""
rsp = self.apiGet("repositories")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), Repository.objects.count())
def testUserList(self):
"""Testing the users API"""
rsp = self.apiGet("users")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['users']), User.objects.count())
def testUserListQuery(self):
"""Testing the users API with custom query"""
rsp = self.apiGet("users", {'query': 'gru'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['users']), 1) # grumpy
def testGroupList(self):
"""Testing the groups API"""
rsp = self.apiGet("groups")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['groups']), Group.objects.count())
def testGroupListQuery(self):
"""Testing the groups API with custom query"""
rsp = self.apiGet("groups", {'query': 'dev'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['groups']), 1) #devgroup
def testGroupStar(self):
"""Testing the groups/star API"""
rsp = self.apiGet("groups/devgroup/star")
self.assertEqual(rsp['stat'], 'ok')
self.assert_(Group.objects.get(name="devgroup") in
self.user.get_profile().starred_groups.all())
def testGroupStarDoesNotExist(self):
"""Testing the groups/star API with Does Not Exist error"""
rsp = self.apiGet("groups/invalidgroup/star")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testGroupUnstar(self):
"""Testing the groups/unstar API"""
# First, star it.
self.testGroupStar()
rsp = self.apiGet("groups/devgroup/unstar")
self.assertEqual(rsp['stat'], 'ok')
self.assert_(Group.objects.get(name="devgroup") not in
self.user.get_profile().starred_groups.all())
def testGroupUnstarDoesNotExist(self):
"""Testing the groups/unstar API with Does Not Exist error"""
rsp = self.apiGet("groups/invalidgroup/unstar")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestList(self):
"""Testing the reviewrequests/all API"""
rsp = self.apiGet("reviewrequests/all")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public().count())
def testReviewRequestListWithStatus(self):
"""Testing the reviewrequests/all API with custom status"""
rsp = self.apiGet("reviewrequests/all", {'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status='S').count())
rsp = self.apiGet("reviewrequests/all", {'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status='D').count())
rsp = self.apiGet("reviewrequests/all", {'status': 'all'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status=None).count())
def testReviewRequestListCount(self):
"""Testing the reviewrequests/all/count API"""
rsp = self.apiGet("reviewrequests/all/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], ReviewRequest.objects.public().count())
def testReviewRequestsToGroup(self):
"""Testing the reviewrequests/to/group API"""
rsp = self.apiGet("reviewrequests/to/group/devgroup")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup").count())
def testReviewRequestsToGroupWithStatus(self):
"""Testing the reviewrequests/to/group API with custom status"""
rsp = self.apiGet("reviewrequests/to/group/devgroup",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup", status='S').count())
rsp = self.apiGet("reviewrequests/to/group/devgroup",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup", status='D').count())
def testReviewRequestsToGroupCount(self):
"""Testing the reviewrequests/to/group/count API"""
rsp = self.apiGet("reviewrequests/to/group/devgroup/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_group("devgroup").count())
def testReviewRequestsToUser(self):
"""Testing the reviewrequests/to/user API"""
rsp = self.apiGet("reviewrequests/to/user/grumpy")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy").count())
def testReviewRequestsToUserWithStatus(self):
"""Testing the reviewrequests/to/user API with custom status"""
rsp = self.apiGet("reviewrequests/to/user/grumpy",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy", status='S').count())
rsp = self.apiGet("reviewrequests/to/user/grumpy",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy", status='D').count())
def testReviewRequestsToUserCount(self):
"""Testing the reviewrequests/to/user/count API"""
rsp = self.apiGet("reviewrequests/to/user/grumpy/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_user("grumpy").count())
def testReviewRequestsToUserDirectly(self):
"""Testing the reviewrequests/to/user/directly API"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc").count())
def testReviewRequestsToUserDirectlyWithStatus(self):
"""Testing the reviewrequests/to/user/directly API with custom status"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc", status='S').count())
rsp = self.apiGet("reviewrequests/to/user/doc/directly",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc", status='D').count())
def testReviewRequestsToUserDirectlyCount(self):
"""Testing the reviewrequests/to/user/directly/count API"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_user_directly("doc").count())
def testReviewRequestsFromUser(self):
"""Testing the reviewrequests/from/user API"""
rsp = self.apiGet("reviewrequests/from/user/grumpy")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy").count())
def testReviewRequestsFromUserWithStatus(self):
"""Testing the reviewrequests/from/user API with custom status"""
rsp = self.apiGet("reviewrequests/from/user/grumpy",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy", status='S').count())
rsp = self.apiGet("reviewrequests/from/user/grumpy",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy", status='D').count())
def testReviewRequestsFromUserCount(self):
"""Testing the reviewrequests/from/user/count API"""
rsp = self.apiGet("reviewrequests/from/user/grumpy/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.from_user("grumpy").count())
def testNewReviewRequest(self):
"""Testing the reviewrequests/new API"""
rsp = self.apiPost("reviewrequests/new", {
'repository_path': self.repository.path,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['repository']['id'],
self.repository.id)
# See if we can fetch this. Also return it for use in other
# unit tests.
return ReviewRequest.objects.get(pk=rsp['review_request']['id'])
def testReviewRequest(self):
"""Testing the reviewrequests/<id> API"""
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['id'], review_request.id)
self.assertEqual(rsp['review_request']['summary'],
review_request.summary)
def testReviewRequestPermissionDenied(self):
"""Testing the reviewrequests/<id> API with Permission Denied error"""
review_request = ReviewRequest.objects.filter(public=False).\
exclude(submitter=self.user)[0]
rsp = self.apiGet("reviewrequests/%s" % review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.PERMISSION_DENIED.code)
def testReviewRequestByChangenum(self):
"""Testing the reviewrequests/repository/changenum API"""
review_request = \
ReviewRequest.objects.filter(changenum__isnull=False)[0]
rsp = self.apiGet("reviewrequests/repository/%s/changenum/%s" %
(review_request.repository.id,
review_request.changenum))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['id'], review_request.id)
self.assertEqual(rsp['review_request']['summary'],
review_request.summary)
self.assertEqual(rsp['review_request']['changenum'],
review_request.changenum)
def testReviewRequestStar(self):
"""Testing the reviewrequests/star API"""
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s/star" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assert_(review_request in
self.user.get_profile().starred_review_requests.all())
def testReviewRequestStarDoesNotExist(self):
"""Testing the reviewrequests/star API with Does Not Exist error"""
rsp = self.apiGet("reviewrequests/999/star")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestUnstar(self):
"""Testing the reviewrequests/unstar API"""
# First, star it.
self.testReviewRequestStar()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s/unstar" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assert_(review_request not in
self.user.get_profile().starred_review_requests.all())
def testReviewRequestUnstarWithDoesNotExist(self):
"""Testing the reviewrequests/unstar API with Does Not Exist error"""
rsp = self.apiGet("reviewrequests/999/unstar")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestDelete(self):
"""Testing the reviewrequests/delete API"""
self.user.user_permissions.add(
Permission.objects.get(codename='delete_reviewrequest'))
self.user.save()
self.assert_(self.user.has_perm('reviews.delete_reviewrequest'))
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiGet("reviewrequests/%s/delete" % review_request_id)
self.assertEqual(rsp['stat'], 'ok')
self.assertRaises(ReviewRequest.DoesNotExist,
ReviewRequest.objects.get, pk=review_request_id)
def testReviewRequestDeletePermissionDenied(self):
"""Testing the reviewrequests/delete API with Permission Denied error"""
review_request_id = \
ReviewRequest.objects.exclude(submitter=self.user)[0].id
rsp = self.apiGet("reviewrequests/%s/delete" % review_request_id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.PERMISSION_DENIED.code)
def testReviewRequestDeleteDoesNotExist(self):
"""Testing the reviewrequests/delete API with Does Not Exist error"""
self.user.user_permissions.add(
Permission.objects.get(codename='delete_reviewrequest'))
self.user.save()
self.assert_(self.user.has_perm('reviews.delete_reviewrequest'))
rsp = self.apiGet("reviewrequests/999/delete")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestDraftSet(self):
"""Testing the reviewrequests/draft/set API"""
summary = "My Summary"
description = "My Description"
testing_done = "My Testing Done"
branch = "My Branch"
bugs = ""
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set" % review_request_id, {
'summary': summary,
'description': description,
'testing_done': testing_done,
'branch': branch,
'bugs_closed': bugs,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['draft']['summary'], summary)
self.assertEqual(rsp['draft']['description'], description)
self.assertEqual(rsp['draft']['testing_done'], testing_done)
self.assertEqual(rsp['draft']['branch'], branch)
self.assertEqual(rsp['draft']['bugs_closed'], [])
draft = ReviewRequestDraft.objects.get(pk=rsp['draft']['id'])
self.assertEqual(draft.summary, summary)
self.assertEqual(draft.description, description)
self.assertEqual(draft.testing_done, testing_done)
self.assertEqual(draft.branch, branch)
self.assertEqual(draft.get_bug_list(), [])
def testReviewRequestDraftSetField(self):
"""Testing the reviewrequests/draft/set/<field> API"""
bugs_closed = '123,456'
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set/bugs_closed" %
review_request_id, {
'value': bugs_closed,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['bugs_closed'], bugs_closed.split(","))
def testReviewRequestDraftSetFieldInvalidName(self):
"""Testing the reviewrequests/draft/set/<field> API with invalid name"""
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set/foobar" %
review_request_id, {
'value': 'foo',
})
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.INVALID_ATTRIBUTE.code)
self.assertEqual(rsp['attribute'], 'foobar')
def testReviewRequestDraftSave(self):
"""Testing the reviewrequests/draft/save API"""
# Set some data first.
self.testReviewRequestDraftSet()
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/save" % review_request_id)
self.assertEqual(rsp['stat'], 'ok')
review_request = ReviewRequest.objects.get(pk=review_request_id)
self.assertEqual(review_request.summary, "My Summary")
self.assertEqual(review_request.description, "My Description")
self.assertEqual(review_request.testing_done, "My Testing Done")
self.assertEqual(review_request.branch, "My Branch")
def testReviewRequestDraftDiscard(self):
"""Testing the reviewrequests/draft/discard API"""
review_request = ReviewRequest.objects.from_user(self.user.username)[0]
summary = review_request.summary
description = review_request.description
# Set some data.
self.testReviewRequestDraftSet()
rsp = self.apiPost("reviewrequests/%s/draft/discard" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
review_request = ReviewRequest.objects.get(pk=review_request.id)
self.assertEqual(review_request.summary, summary)
self.assertEqual(review_request.description, description)
def testReviewDraftSave(self):
"""Testing the reviewrequests/reviews/draft/save API"""
body_top = "My Body Top"
body_bottom = "My Body Bottom"
ship_it = True
# Clear out any reviews on the first review request we find.
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
review_request.save()
rsp = self.apiPost("reviewrequests/%s/reviews/draft/save" %
review_request.id, {
'shipit': ship_it,
'body_top': body_top,
'body_bottom': body_bottom,
})
reviews = review_request.review_set.filter(user=self.user)
self.assertEqual(len(reviews), 1)
review = reviews[0]
self.assertEqual(review.ship_it, ship_it)
self.assertEqual(review.body_top, body_top)
self.assertEqual(review.body_bottom, body_bottom)
self.assertEqual(review.public, False)
def testReviewDraftPublish(self):
"""Testing the reviewrequests/reviews/draft/publish API"""
body_top = "My Body Top"
body_bottom = "My Body Bottom"
ship_it = True
# Clear out any reviews on the first review request we find.
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
review_request.save()
rsp = self.apiPost("reviewrequests/%s/reviews/draft/publish" %
review_request.id, {
'shipit': ship_it,
'body_top': body_top,
'body_bottom': body_bottom,
})
self.assertEqual(rsp['stat'], 'ok')
reviews = review_request.review_set.filter(user=self.user)
self.assertEqual(len(reviews), 1)
review = reviews[0]
self.assertEqual(review.ship_it, ship_it)
self.assertEqual(review.body_top, body_top)
self.assertEqual(review.body_bottom, body_bottom)
self.assertEqual(review.public, True)
def testReviewDraftDelete(self):
"""Testing the reviewrequests/reviews/draft/delete API"""
# Set up the draft to delete.
self.testReviewDraftSave()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiPost("reviewrequests/%s/reviews/draft/delete" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(review_request.review_set.count(), 0)
def testReviewDraftDeleteDoesNotExist(self):
"""Testing the reviewrequests/reviews/draft/delete API with Does Not Exist error"""
# Set up the draft to delete
self.testReviewDraftPublish()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiPost("reviewrequests/%s/reviews/draft/delete" %
review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewDraftComments(self):
"""Testing the reviewrequests/reviews/draft/comments API"""
#review_request = \
# ReviewRequest.objects.public().filter(review__comments__pk__gt=0)[0]
#rsp = self.apiGet("reviewrequests/%s/reviews/draft/comments" %
# review_request.id)
#self.assertEqual(rsp['stat'], 'ok')
#self.assertEqual(len(rsp['comments'],
pass
def testReviewsList(self):
"""Testing the reviewrequests/reviews API"""
review_request = Review.objects.all()[0].review_request
rsp = self.apiGet("reviewrequests/%s/reviews" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['reviews']), review_request.review_set.count())
def testReviewsListCount(self):
"""Testing the reviewrequests/reviews/count API"""
review_request = Review.objects.all()[0].review_request
rsp = self.apiGet("reviewrequests/%s/reviews/count" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['reviews'], review_request.review_set.count())
def testReviewCommentsList(self):
"""Testing the reviewrequests/reviews/comments API"""
review = Review.objects.filter(comments__pk__gt=0)[0]
rsp = self.apiGet("reviewrequests/%s/reviews/%s/comments" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), review.comments.count())
def testReviewCommentsCount(self):
"""Testing the reviewrequests/reviews/comments/count API"""
review = Review.objects.filter(comments__pk__gt=0)[0]
rsp = self.apiGet("reviewrequests/%s/reviews/%s/comments/count" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], review.comments.count())
def testReplyDraftComment(self):
"""Testing the reviewrequests/reviews/replies/draft API with comment"""
comment_text = "My Comment Text"
comment = Comment.objects.all()[0]
review = comment.review_set.get()
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'comment',
'id': comment.id,
'value': comment_text
})
self.assertEqual(rsp['stat'], 'ok')
reply_comment = Comment.objects.get(pk=rsp['comment']['id'])
self.assertEqual(reply_comment.text, comment_text)
def testReplyDraftBodyTop(self):
"""Testing the reviewrequests/reviews/replies/draft API with body_top"""
body_top = 'My Body Top'
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': body_top,
})
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=rsp['reply']['id'])
self.assertEqual(reply.body_top, body_top)
def testReplyDraftBodyBottom(self):
"""Testing the reviewrequests/reviews/replies/draft API with body_bottom"""
body_bottom = 'My Body Bottom'
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_bottom',
'value': body_bottom,
})
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=rsp['reply']['id'])
self.assertEqual(reply.body_bottom, body_bottom)
def testReplyDraftSave(self):
"""Testing the reviewrequests/reviews/replies/draft/save API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': 'Test',
})
self.assertEqual(rsp['stat'], 'ok')
reply_id = rsp['reply']['id']
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft/save" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=reply_id)
self.assertEqual(reply.public, True)
def testReplyDraftDiscard(self):
"""Testing the reviewrequests/reviews/replies/draft/discard API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': 'Test',
})
self.assertEqual(rsp['stat'], 'ok')
reply_id = rsp['reply']['id']
rsp = self.apiPost(
"reviewrequests/%s/reviews/%s/replies/draft/discard" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(Review.objects.filter(pk=reply_id).count(), 0)
def testRepliesList(self):
"""Testing the reviewrequests/reviews/replies API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
self.testReplyDraftSave()
rsp = self.apiGet("reviewrequests/%s/reviews/%s/replies" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['replies']), len(review.public_replies()))
for reply in review.public_replies():
self.assertEqual(rsp['replies'][0]['id'], reply.id)
self.assertEqual(rsp['replies'][0]['body_top'], reply.body_top)
self.assertEqual(rsp['replies'][0]['body_bottom'],
reply.body_bottom)
def testRepliesListCount(self):
"""Testing the reviewrequests/reviews/replies/count API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
self.testReplyDraftSave()
rsp = self.apiGet("reviewrequests/%s/reviews/%s/replies/count" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], len(review.public_replies()))
def testNewDiff(self):
"""Testing the reviewrequests/diff/new API"""
review_request = self.testNewReviewRequest()
f = open("scmtools/testdata/svn_makefile.diff", "r")
rsp = self.apiPost("reviewrequests/%s/diff/new" % review_request.id, {
'path': f,
'basedir': "/trunk",
})
f.close()
self.assertEqual(rsp['stat'], 'ok')
diffset = DiffSet.objects.get(pk=rsp['diffset_id'])
def testNewDiffInvalidFormData(self):
"""Testing the reviewrequests/diff/new API with Invalid Form Data"""
review_request = self.testNewReviewRequest()
rsp = self.apiPost("reviewrequests/%s/diff/new" % review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.INVALID_FORM_DATA.code)
self.assert_('path' in rsp['fields'])
self.assert_('basedir' in rsp['fields'])
def testNewScreenshot(self):
"""Testing the reviewrequests/screenshot/new API"""
review_request = self.testNewReviewRequest()
f = open("htdocs/images/trophy.png", "r")
rsp = self.apiPost("reviewrequests/%s/screenshot/new" %
review_request.id, {
'path': f,
})
f.close()
self.assertEqual(rsp['stat'], 'ok')
screenshot = Screenshot.objects.get(pk=rsp['screenshot_id'])
def testDiffCommentsSet(self):
"""Testing the reviewrequests/diff/file/line/comments set API"""
comment_text = "This is a test comment."
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiPost(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10),
{
'action': 'set',
'text': comment_text,
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 1)
self.assertEqual(rsp['comments'][0]['text'], comment_text)
def testDiffCommentsDelete(self):
"""Testing the reviewrequests/diff/file/line/comments delete API"""
comment_text = "This is a test comment."
self.testDiffCommentsSet()
review_request = ReviewRequest.objects.public()[0]
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiPost(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10),
{
'action': 'delete',
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 0)
def testDiffCommentsList(self):
"""Testing the reviewrequests/diff/file/line/comments list API"""
self.testDiffCommentsSet()
review_request = ReviewRequest.objects.public()[0]
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiGet(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10))
self.assertEqual(rsp['stat'], 'ok')
comments = Comment.objects.filter(filediff=filediff)
self.assertEqual(len(rsp['comments']), comments.count())
for i in range(0, len(rsp['comments'])):
self.assertEqual(rsp['comments'][i]['text'], comments[i].text)
91% line coverage of webapi.
import os
import unittest
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django.utils import simplejson
from djblets.util.testing import TagTest
import reviewboard.webapi.json as webapi
from reviewboard.diffviewer.models import DiffSet, DiffSetHistory
from reviewboard.reviews.models import Group, ReviewRequest, \
ReviewRequestDraft, Review, \
Comment, Screenshot, ScreenshotComment
from reviewboard.scmtools.models import Repository, Tool
class WebAPITests(TestCase):
"""Testing the webapi support."""
fixtures = ['test_users', 'test_reviewrequests', 'test_scmtools']
def setUp(self):
svn_repo_path = os.path.join(os.path.dirname(__file__),
'../scmtools/testdata/svn_repo')
self.repository = Repository(name='Subversion SVN',
path='file://' + svn_repo_path,
tool=Tool.objects.get(name='Subversion'))
self.repository.save()
self.client.login(username="grumpy", password="grumpy")
self.user = User.objects.get(username="grumpy")
def tearDown(self):
self.client.logout()
def apiGet(self, path, query={}):
response = self.client.get("/api/json/%s/" % path, query)
self.assertEqual(response.status_code, 200)
rsp = simplejson.loads(response.content)
print "Response: %s" % rsp
return rsp
def apiPost(self, path, query={}):
print "Posting to /api/json/%s/" % path
response = self.client.post("/api/json/%s/" % path, query)
self.assertEqual(response.status_code, 200)
rsp = simplejson.loads(response.content)
print "Response: %s" % rsp
return rsp
def testRepositoryList(self):
"""Testing the repositories API"""
rsp = self.apiGet("repositories")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), Repository.objects.count())
def testUserList(self):
"""Testing the users API"""
rsp = self.apiGet("users")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['users']), User.objects.count())
def testUserListQuery(self):
"""Testing the users API with custom query"""
rsp = self.apiGet("users", {'query': 'gru'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['users']), 1) # grumpy
def testGroupList(self):
"""Testing the groups API"""
rsp = self.apiGet("groups")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['groups']), Group.objects.count())
def testGroupListQuery(self):
"""Testing the groups API with custom query"""
rsp = self.apiGet("groups", {'query': 'dev'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['groups']), 1) #devgroup
def testGroupStar(self):
"""Testing the groups/star API"""
rsp = self.apiGet("groups/devgroup/star")
self.assertEqual(rsp['stat'], 'ok')
self.assert_(Group.objects.get(name="devgroup") in
self.user.get_profile().starred_groups.all())
def testGroupStarDoesNotExist(self):
"""Testing the groups/star API with Does Not Exist error"""
rsp = self.apiGet("groups/invalidgroup/star")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testGroupUnstar(self):
"""Testing the groups/unstar API"""
# First, star it.
self.testGroupStar()
rsp = self.apiGet("groups/devgroup/unstar")
self.assertEqual(rsp['stat'], 'ok')
self.assert_(Group.objects.get(name="devgroup") not in
self.user.get_profile().starred_groups.all())
def testGroupUnstarDoesNotExist(self):
"""Testing the groups/unstar API with Does Not Exist error"""
rsp = self.apiGet("groups/invalidgroup/unstar")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestList(self):
"""Testing the reviewrequests/all API"""
rsp = self.apiGet("reviewrequests/all")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public().count())
def testReviewRequestListWithStatus(self):
"""Testing the reviewrequests/all API with custom status"""
rsp = self.apiGet("reviewrequests/all", {'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status='S').count())
rsp = self.apiGet("reviewrequests/all", {'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status='D').count())
rsp = self.apiGet("reviewrequests/all", {'status': 'all'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.public(status=None).count())
def testReviewRequestListCount(self):
"""Testing the reviewrequests/all/count API"""
rsp = self.apiGet("reviewrequests/all/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], ReviewRequest.objects.public().count())
def testReviewRequestsToGroup(self):
"""Testing the reviewrequests/to/group API"""
rsp = self.apiGet("reviewrequests/to/group/devgroup")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup").count())
def testReviewRequestsToGroupWithStatus(self):
"""Testing the reviewrequests/to/group API with custom status"""
rsp = self.apiGet("reviewrequests/to/group/devgroup",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup", status='S').count())
rsp = self.apiGet("reviewrequests/to/group/devgroup",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_group("devgroup", status='D').count())
def testReviewRequestsToGroupCount(self):
"""Testing the reviewrequests/to/group/count API"""
rsp = self.apiGet("reviewrequests/to/group/devgroup/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_group("devgroup").count())
def testReviewRequestsToUser(self):
"""Testing the reviewrequests/to/user API"""
rsp = self.apiGet("reviewrequests/to/user/grumpy")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy").count())
def testReviewRequestsToUserWithStatus(self):
"""Testing the reviewrequests/to/user API with custom status"""
rsp = self.apiGet("reviewrequests/to/user/grumpy",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy", status='S').count())
rsp = self.apiGet("reviewrequests/to/user/grumpy",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user("grumpy", status='D').count())
def testReviewRequestsToUserCount(self):
"""Testing the reviewrequests/to/user/count API"""
rsp = self.apiGet("reviewrequests/to/user/grumpy/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_user("grumpy").count())
def testReviewRequestsToUserDirectly(self):
"""Testing the reviewrequests/to/user/directly API"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc").count())
def testReviewRequestsToUserDirectlyWithStatus(self):
"""Testing the reviewrequests/to/user/directly API with custom status"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc", status='S').count())
rsp = self.apiGet("reviewrequests/to/user/doc/directly",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.to_user_directly("doc", status='D').count())
def testReviewRequestsToUserDirectlyCount(self):
"""Testing the reviewrequests/to/user/directly/count API"""
rsp = self.apiGet("reviewrequests/to/user/doc/directly/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.to_user_directly("doc").count())
def testReviewRequestsFromUser(self):
"""Testing the reviewrequests/from/user API"""
rsp = self.apiGet("reviewrequests/from/user/grumpy")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy").count())
def testReviewRequestsFromUserWithStatus(self):
"""Testing the reviewrequests/from/user API with custom status"""
rsp = self.apiGet("reviewrequests/from/user/grumpy",
{'status': 'submitted'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy", status='S').count())
rsp = self.apiGet("reviewrequests/from/user/grumpy",
{'status': 'discarded'})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['review_requests']),
ReviewRequest.objects.from_user("grumpy", status='D').count())
def testReviewRequestsFromUserCount(self):
"""Testing the reviewrequests/from/user/count API"""
rsp = self.apiGet("reviewrequests/from/user/grumpy/count")
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'],
ReviewRequest.objects.from_user("grumpy").count())
def testNewReviewRequest(self):
"""Testing the reviewrequests/new API"""
rsp = self.apiPost("reviewrequests/new", {
'repository_path': self.repository.path,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['repository']['id'],
self.repository.id)
# See if we can fetch this. Also return it for use in other
# unit tests.
return ReviewRequest.objects.get(pk=rsp['review_request']['id'])
def testReviewRequest(self):
"""Testing the reviewrequests/<id> API"""
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['id'], review_request.id)
self.assertEqual(rsp['review_request']['summary'],
review_request.summary)
def testReviewRequestPermissionDenied(self):
"""Testing the reviewrequests/<id> API with Permission Denied error"""
review_request = ReviewRequest.objects.filter(public=False).\
exclude(submitter=self.user)[0]
rsp = self.apiGet("reviewrequests/%s" % review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.PERMISSION_DENIED.code)
def testReviewRequestByChangenum(self):
"""Testing the reviewrequests/repository/changenum API"""
review_request = \
ReviewRequest.objects.filter(changenum__isnull=False)[0]
rsp = self.apiGet("reviewrequests/repository/%s/changenum/%s" %
(review_request.repository.id,
review_request.changenum))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['review_request']['id'], review_request.id)
self.assertEqual(rsp['review_request']['summary'],
review_request.summary)
self.assertEqual(rsp['review_request']['changenum'],
review_request.changenum)
def testReviewRequestStar(self):
"""Testing the reviewrequests/star API"""
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s/star" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assert_(review_request in
self.user.get_profile().starred_review_requests.all())
def testReviewRequestStarDoesNotExist(self):
"""Testing the reviewrequests/star API with Does Not Exist error"""
rsp = self.apiGet("reviewrequests/999/star")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestUnstar(self):
"""Testing the reviewrequests/unstar API"""
# First, star it.
self.testReviewRequestStar()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiGet("reviewrequests/%s/unstar" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assert_(review_request not in
self.user.get_profile().starred_review_requests.all())
def testReviewRequestUnstarWithDoesNotExist(self):
"""Testing the reviewrequests/unstar API with Does Not Exist error"""
rsp = self.apiGet("reviewrequests/999/unstar")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestDelete(self):
"""Testing the reviewrequests/delete API"""
self.user.user_permissions.add(
Permission.objects.get(codename='delete_reviewrequest'))
self.user.save()
self.assert_(self.user.has_perm('reviews.delete_reviewrequest'))
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiGet("reviewrequests/%s/delete" % review_request_id)
self.assertEqual(rsp['stat'], 'ok')
self.assertRaises(ReviewRequest.DoesNotExist,
ReviewRequest.objects.get, pk=review_request_id)
def testReviewRequestDeletePermissionDenied(self):
"""Testing the reviewrequests/delete API with Permission Denied error"""
review_request_id = \
ReviewRequest.objects.exclude(submitter=self.user)[0].id
rsp = self.apiGet("reviewrequests/%s/delete" % review_request_id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.PERMISSION_DENIED.code)
def testReviewRequestDeleteDoesNotExist(self):
"""Testing the reviewrequests/delete API with Does Not Exist error"""
self.user.user_permissions.add(
Permission.objects.get(codename='delete_reviewrequest'))
self.user.save()
self.assert_(self.user.has_perm('reviews.delete_reviewrequest'))
rsp = self.apiGet("reviewrequests/999/delete")
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewRequestDraftSet(self):
"""Testing the reviewrequests/draft/set API"""
summary = "My Summary"
description = "My Description"
testing_done = "My Testing Done"
branch = "My Branch"
bugs = ""
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set" % review_request_id, {
'summary': summary,
'description': description,
'testing_done': testing_done,
'branch': branch,
'bugs_closed': bugs,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['draft']['summary'], summary)
self.assertEqual(rsp['draft']['description'], description)
self.assertEqual(rsp['draft']['testing_done'], testing_done)
self.assertEqual(rsp['draft']['branch'], branch)
self.assertEqual(rsp['draft']['bugs_closed'], [])
draft = ReviewRequestDraft.objects.get(pk=rsp['draft']['id'])
self.assertEqual(draft.summary, summary)
self.assertEqual(draft.description, description)
self.assertEqual(draft.testing_done, testing_done)
self.assertEqual(draft.branch, branch)
self.assertEqual(draft.get_bug_list(), [])
def testReviewRequestDraftSetField(self):
"""Testing the reviewrequests/draft/set/<field> API"""
bugs_closed = '123,456'
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set/bugs_closed" %
review_request_id, {
'value': bugs_closed,
})
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['bugs_closed'], bugs_closed.split(","))
def testReviewRequestDraftSetFieldInvalidName(self):
"""Testing the reviewrequests/draft/set/<field> API with invalid name"""
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/set/foobar" %
review_request_id, {
'value': 'foo',
})
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.INVALID_ATTRIBUTE.code)
self.assertEqual(rsp['attribute'], 'foobar')
def testReviewRequestDraftSave(self):
"""Testing the reviewrequests/draft/save API"""
# Set some data first.
self.testReviewRequestDraftSet()
review_request_id = \
ReviewRequest.objects.from_user(self.user.username)[0].id
rsp = self.apiPost("reviewrequests/%s/draft/save" % review_request_id)
self.assertEqual(rsp['stat'], 'ok')
review_request = ReviewRequest.objects.get(pk=review_request_id)
self.assertEqual(review_request.summary, "My Summary")
self.assertEqual(review_request.description, "My Description")
self.assertEqual(review_request.testing_done, "My Testing Done")
self.assertEqual(review_request.branch, "My Branch")
def testReviewRequestDraftDiscard(self):
"""Testing the reviewrequests/draft/discard API"""
review_request = ReviewRequest.objects.from_user(self.user.username)[0]
summary = review_request.summary
description = review_request.description
# Set some data.
self.testReviewRequestDraftSet()
rsp = self.apiPost("reviewrequests/%s/draft/discard" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
review_request = ReviewRequest.objects.get(pk=review_request.id)
self.assertEqual(review_request.summary, summary)
self.assertEqual(review_request.description, description)
def testReviewDraftSave(self):
"""Testing the reviewrequests/reviews/draft/save API"""
body_top = "My Body Top"
body_bottom = "My Body Bottom"
ship_it = True
# Clear out any reviews on the first review request we find.
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
review_request.save()
rsp = self.apiPost("reviewrequests/%s/reviews/draft/save" %
review_request.id, {
'shipit': ship_it,
'body_top': body_top,
'body_bottom': body_bottom,
})
reviews = review_request.review_set.filter(user=self.user)
self.assertEqual(len(reviews), 1)
review = reviews[0]
self.assertEqual(review.ship_it, ship_it)
self.assertEqual(review.body_top, body_top)
self.assertEqual(review.body_bottom, body_bottom)
self.assertEqual(review.public, False)
def testReviewDraftPublish(self):
"""Testing the reviewrequests/reviews/draft/publish API"""
body_top = "My Body Top"
body_bottom = "My Body Bottom"
ship_it = True
# Clear out any reviews on the first review request we find.
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
review_request.save()
rsp = self.apiPost("reviewrequests/%s/reviews/draft/publish" %
review_request.id, {
'shipit': ship_it,
'body_top': body_top,
'body_bottom': body_bottom,
})
self.assertEqual(rsp['stat'], 'ok')
reviews = review_request.review_set.filter(user=self.user)
self.assertEqual(len(reviews), 1)
review = reviews[0]
self.assertEqual(review.ship_it, ship_it)
self.assertEqual(review.body_top, body_top)
self.assertEqual(review.body_bottom, body_bottom)
self.assertEqual(review.public, True)
def testReviewDraftDelete(self):
"""Testing the reviewrequests/reviews/draft/delete API"""
# Set up the draft to delete.
self.testReviewDraftSave()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiPost("reviewrequests/%s/reviews/draft/delete" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(review_request.review_set.count(), 0)
def testReviewDraftDeleteDoesNotExist(self):
"""Testing the reviewrequests/reviews/draft/delete API with Does Not Exist error"""
# Set up the draft to delete
self.testReviewDraftPublish()
review_request = ReviewRequest.objects.public()[0]
rsp = self.apiPost("reviewrequests/%s/reviews/draft/delete" %
review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.DOES_NOT_EXIST.code)
def testReviewDraftComments(self):
"""Testing the reviewrequests/reviews/draft/comments API"""
#review_request = \
# ReviewRequest.objects.public().filter(review__comments__pk__gt=0)[0]
#rsp = self.apiGet("reviewrequests/%s/reviews/draft/comments" %
# review_request.id)
#self.assertEqual(rsp['stat'], 'ok')
#self.assertEqual(len(rsp['comments'],
pass
def testReviewsList(self):
"""Testing the reviewrequests/reviews API"""
review_request = Review.objects.all()[0].review_request
rsp = self.apiGet("reviewrequests/%s/reviews" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['reviews']), review_request.review_set.count())
def testReviewsListCount(self):
"""Testing the reviewrequests/reviews/count API"""
review_request = Review.objects.all()[0].review_request
rsp = self.apiGet("reviewrequests/%s/reviews/count" %
review_request.id)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['reviews'], review_request.review_set.count())
def testReviewCommentsList(self):
"""Testing the reviewrequests/reviews/comments API"""
review = Review.objects.filter(comments__pk__gt=0)[0]
rsp = self.apiGet("reviewrequests/%s/reviews/%s/comments" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), review.comments.count())
def testReviewCommentsCount(self):
"""Testing the reviewrequests/reviews/comments/count API"""
review = Review.objects.filter(comments__pk__gt=0)[0]
rsp = self.apiGet("reviewrequests/%s/reviews/%s/comments/count" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], review.comments.count())
def testReplyDraftComment(self):
"""Testing the reviewrequests/reviews/replies/draft API with comment"""
comment_text = "My Comment Text"
comment = Comment.objects.all()[0]
review = comment.review_set.get()
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'comment',
'id': comment.id,
'value': comment_text
})
self.assertEqual(rsp['stat'], 'ok')
reply_comment = Comment.objects.get(pk=rsp['comment']['id'])
self.assertEqual(reply_comment.text, comment_text)
def testReplyDraftBodyTop(self):
"""Testing the reviewrequests/reviews/replies/draft API with body_top"""
body_top = 'My Body Top'
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': body_top,
})
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=rsp['reply']['id'])
self.assertEqual(reply.body_top, body_top)
def testReplyDraftBodyBottom(self):
"""Testing the reviewrequests/reviews/replies/draft API with body_bottom"""
body_bottom = 'My Body Bottom'
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_bottom',
'value': body_bottom,
})
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=rsp['reply']['id'])
self.assertEqual(reply.body_bottom, body_bottom)
def testReplyDraftSave(self):
"""Testing the reviewrequests/reviews/replies/draft/save API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': 'Test',
})
self.assertEqual(rsp['stat'], 'ok')
reply_id = rsp['reply']['id']
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft/save" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
reply = Review.objects.get(pk=reply_id)
self.assertEqual(reply.public, True)
def testReplyDraftDiscard(self):
"""Testing the reviewrequests/reviews/replies/draft/discard API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
rsp = self.apiPost("reviewrequests/%s/reviews/%s/replies/draft" %
(review.review_request.id, review.id), {
'type': 'body_top',
'value': 'Test',
})
self.assertEqual(rsp['stat'], 'ok')
reply_id = rsp['reply']['id']
rsp = self.apiPost(
"reviewrequests/%s/reviews/%s/replies/draft/discard" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(Review.objects.filter(pk=reply_id).count(), 0)
def testRepliesList(self):
"""Testing the reviewrequests/reviews/replies API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
self.testReplyDraftSave()
rsp = self.apiGet("reviewrequests/%s/reviews/%s/replies" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['replies']), len(review.public_replies()))
for reply in review.public_replies():
self.assertEqual(rsp['replies'][0]['id'], reply.id)
self.assertEqual(rsp['replies'][0]['body_top'], reply.body_top)
self.assertEqual(rsp['replies'][0]['body_bottom'],
reply.body_bottom)
def testRepliesListCount(self):
"""Testing the reviewrequests/reviews/replies/count API"""
review = \
Review.objects.filter(base_reply_to__isnull=True, public=True)[0]
self.testReplyDraftSave()
rsp = self.apiGet("reviewrequests/%s/reviews/%s/replies/count" %
(review.review_request.id, review.id))
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['count'], len(review.public_replies()))
def testNewDiff(self, review_request=None):
"""Testing the reviewrequests/diff/new API"""
if review_request is None:
review_request = self.testNewReviewRequest()
f = open("scmtools/testdata/svn_makefile.diff", "r")
rsp = self.apiPost("reviewrequests/%s/diff/new" % review_request.id, {
'path': f,
'basedir': "/trunk",
})
f.close()
self.assertEqual(rsp['stat'], 'ok')
# Return this so it can be used in other tests.
return DiffSet.objects.get(pk=rsp['diffset_id'])
def testNewDiffInvalidFormData(self):
"""Testing the reviewrequests/diff/new API with Invalid Form Data"""
review_request = self.testNewReviewRequest()
rsp = self.apiPost("reviewrequests/%s/diff/new" % review_request.id)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], webapi.INVALID_FORM_DATA.code)
self.assert_('path' in rsp['fields'])
self.assert_('basedir' in rsp['fields'])
def testNewScreenshot(self):
"""Testing the reviewrequests/screenshot/new API"""
review_request = self.testNewReviewRequest()
f = open("htdocs/images/trophy.png", "r")
rsp = self.apiPost("reviewrequests/%s/screenshot/new" %
review_request.id, {
'path': f,
})
f.close()
self.assertEqual(rsp['stat'], 'ok')
# Return the screenshot so we can use it in other tests.
return Screenshot.objects.get(pk=rsp['screenshot_id'])
def testDiffCommentsSet(self):
"""Testing the reviewrequests/diff/file/line/comments set API"""
comment_text = "This is a test comment."
review_request = ReviewRequest.objects.public()[0]
review_request.review_set = []
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiPost(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10),
{
'action': 'set',
'text': comment_text,
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 1)
self.assertEqual(rsp['comments'][0]['text'], comment_text)
def testDiffCommentsDelete(self):
"""Testing the reviewrequests/diff/file/line/comments delete API"""
comment_text = "This is a test comment."
self.testDiffCommentsSet()
review_request = ReviewRequest.objects.public()[0]
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiPost(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10),
{
'action': 'delete',
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 0)
def testDiffCommentsList(self):
"""Testing the reviewrequests/diff/file/line/comments list API"""
self.testDiffCommentsSet()
review_request = ReviewRequest.objects.public()[0]
diffset = review_request.diffset_history.diffset_set.latest()
filediff = diffset.files.all()[0]
rsp = self.apiGet(
"reviewrequests/%s/diff/%s/file/%s/line/%s/comments" %
(review_request.id, diffset.revision, filediff.id, 10))
self.assertEqual(rsp['stat'], 'ok')
comments = Comment.objects.filter(filediff=filediff)
self.assertEqual(len(rsp['comments']), comments.count())
for i in range(0, len(rsp['comments'])):
self.assertEqual(rsp['comments'][i]['text'], comments[i].text)
def testInterDiffCommentsSet(self):
"""Testing the reviewrequests/diff/file/line/comments interdiff set API"""
comment_text = "This is a test comment."
# Create a review request for this test.
review_request = self.testNewReviewRequest()
# Upload the first diff and publish the draft.
diffset = self.testNewDiff(review_request)
rsp = self.apiPost("reviewrequests/%s/draft/save" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
# Upload the second diff and publish the draft.
interdiffset = self.testNewDiff(review_request)
rsp = self.apiPost("reviewrequests/%s/draft/save" % review_request.id)
self.assertEqual(rsp['stat'], 'ok')
# Get the interdiffs
filediff = diffset.files.all()[0]
interfilediff = interdiffset.files.all()[0]
rsp = self.apiPost(
"reviewrequests/%s/diff/%s-%s/file/%s-%s/line/%s/comments" %
(review_request.id, diffset.revision, interdiffset.revision,
filediff.id, interfilediff.id, 10),
{
'action': 'set',
'text': comment_text,
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 1)
self.assertEqual(rsp['comments'][0]['text'], comment_text)
# Return some information for use in other tests.
return (review_request, diffset, interdiffset, filediff, interfilediff)
def testInterDiffCommentsDelete(self):
"""Testing the reviewrequests/diff/file/line/comments interdiff delete API"""
comment_text = "This is a test comment."
review_request, diffset, interdiffset, filediff, interfilediff = \
self.testInterDiffCommentsSet()
rsp = self.apiPost(
"reviewrequests/%s/diff/%s-%s/file/%s-%s/line/%s/comments" %
(review_request.id, diffset.revision, interdiffset.revision,
filediff.id, interfilediff.id, 10),
{
'action': 'delete',
'num_lines': 5,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 0)
def testInterDiffCommentsList(self):
"""Testing the reviewrequests/diff/file/line/comments interdiff list API"""
review_request, diffset, interdiffset, filediff, interfilediff = \
self.testInterDiffCommentsSet()
rsp = self.apiGet(
"reviewrequests/%s/diff/%s-%s/file/%s-%s/line/%s/comments" %
(review_request.id, diffset.revision, interdiffset.revision,
filediff.id, interfilediff.id, 10))
self.assertEqual(rsp['stat'], 'ok')
comments = Comment.objects.filter(filediff=filediff,
interfilediff=interfilediff)
self.assertEqual(len(rsp['comments']), comments.count())
for i in range(0, len(rsp['comments'])):
self.assertEqual(rsp['comments'][i]['text'], comments[i].text)
def testScreenshotCommentsSet(self):
"""Testing the reviewrequests/s/comments set API"""
comment_text = "This is a test comment."
x, y, w, h = (2, 2, 10, 10)
screenshot = self.testNewScreenshot()
review_request = screenshot.review_request.get()
rsp = self.apiPost(
"reviewrequests/%s/s/%s/comments/%sx%s+%s+%s" %
(review_request.id, screenshot.id, w, h, x, y),
{
'action': 'set',
'text': comment_text,
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 1)
self.assertEqual(rsp['comments'][0]['text'], comment_text)
self.assertEqual(rsp['comments'][0]['x'], x)
self.assertEqual(rsp['comments'][0]['y'], y)
self.assertEqual(rsp['comments'][0]['w'], w)
self.assertEqual(rsp['comments'][0]['h'], h)
# Return this so it can be used in other tests.
return ScreenshotComment.objects.get(pk=rsp['comments'][0]['id'])
def testScreenshotCommentsDelete(self):
"""Testing the reviewrequests/s/comments delete API"""
comment = self.testScreenshotCommentsSet()
screenshot = comment.screenshot
review_request = screenshot.review_request.get()
rsp = self.apiPost(
"reviewrequests/%s/s/%s/comments/%sx%s+%s+%s" %
(review_request.id, screenshot.id, comment.w, comment.h,
comment.x, comment.y),
{
'action': 'delete',
}
)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['comments']), 0)
def testScreenshotCommentsList(self):
"""Testing the reviewrequests/s/comments list API"""
comment = self.testScreenshotCommentsSet()
screenshot = comment.screenshot
review_request = screenshot.review_request.get()
rsp = self.apiGet(
"reviewrequests/%s/s/%s/comments/%sx%s+%s+%s" %
(review_request.id, screenshot.id, comment.w, comment.h,
comment.x, comment.y))
self.assertEqual(rsp['stat'], 'ok')
comments = ScreenshotComment.objects.filter(screenshot=screenshot)
self.assertEqual(len(rsp['comments']), comments.count())
for i in range(0, len(rsp['comments'])):
self.assertEqual(rsp['comments'][i]['text'], comments[i].text)
self.assertEqual(rsp['comments'][i]['x'], comments[i].x)
self.assertEqual(rsp['comments'][i]['y'], comments[i].y)
self.assertEqual(rsp['comments'][i]['w'], comments[i].w)
self.assertEqual(rsp['comments'][i]['h'], comments[i].h)
|
"""
fiberassign.fba_launch_io
=============
Utility functions for fba_launch
"""
from __future__ import absolute_import, division
# system
import os
import subprocess
import sys
import tempfile
import shutil
import re
# time
from time import time
from datetime import datetime, timedelta
#
import numpy as np
import fitsio
# astropy
from astropy.io import fits
from astropy.table import Table
from astropy.time import Time
from astropy import units
from astropy.coordinates import SkyCoord, Distance
from astropy.time import Time
# desitarget
import desitarget
from desitarget.gaiamatch import gaia_psflike
from desitarget.io import read_targets_in_tiles, write_targets, write_skies
from desitarget.mtl import inflate_ledger
from desitarget.targetmask import desi_mask, obsconditions
from desitarget.targets import set_obsconditions
from desitarget.geomask import match
# desimodel
import desimodel
from desimodel.footprint import is_point_in_desi
# desimeter
import desimeter
# fiberassign
import fiberassign
from fiberassign.scripts.assign import parse_assign, run_assign_full
from fiberassign.assign import merge_results, minimal_target_columns
from fiberassign.utils import Logger
# matplotlib
import matplotlib.pyplot as plt
from matplotlib import gridspec
import matplotlib
import matplotlib.image as mpimg
# AR default REF_EPOCH for PMRA=PMDEC=REF_EPOCH=0 objects
gaia_ref_epochs = {"dr2": 2015.5}
# AR tile radius in degrees
tile_radius_deg = 1.628
# AR approx. tile area in degrees
tile_area = np.pi * tile_radius_deg ** 2
def assert_isoformat_utc(time_str):
"""
Asserts if a date formats as "YYYY-MM-DDThh:mm:ss+00:00".
Args:
time_str: string with a date
Returns:
boolean asserting if time_str formats as "YYYY-MM-DDThh:mm:ss+00:00"
"""
try:
test_time = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S%z")
except ValueError:
return False
# AR/SB it parses as an ISO string, now just check UTC timezone +00:00 and not +0000
return time_str.endswith("+00:00")
def get_svn_version(svn_dir):
"""
Gets the SVN revision number of an SVN folder.
Args:
svn_dir: SVN folder path (string)
Returns:
svnver: SVN revision number of svn_dir, or "unknown" if not an svn checkout
Notes:
`svn_dir` can contain environment variables to expand, e.g. "$DESIMODEL/data"
"""
cmd = ["svn", "info", "--show-item", "revision", os.path.expandvars(svn_dir)]
try:
svn_ver = (
subprocess.check_output(cmd, stderr=subprocess.DEVNULL).strip().decode()
)
except subprocess.CalledProcessError:
svn_ver = "unknown"
return svn_ver
def get_program_latest_timestamp(
program, log=Logger.get(), step="", start=time(),
):
"""
Get the latest timestamp for a given program from the MTL per-tile file.
Args:
program: ideally "dark", "bright", or "backup" (string)
though if different will return None
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
if some entries for input program: UTC YYYY-MM-DDThh:mm:ss+00:00 formatted timestamp (string)
else: None
Notes:
if the per-tile MTL files does not exist or has zero entries,
TBD: currently add +1min because of a mismatch between the ledgers and the per-tile file.
TBD: still see if a +1min or +1s is desirable
"""
# AR check DESI_SURVEYOPS is defined
assert_env_vars(
required_env_vars=["DESI_SURVEYOPS"], log=log, step=step, start=start,
)
# AR defaults to None (returned if no file or no selected rows)
timestamp = None
# AR check if the per-tile file is here
# AR no need to check the scnd-mtl-done-tiles.ecsv file,
# AR as we restrict to a given program ([desi-survey 2434])
fn = os.path.join(os.getenv("DESI_SURVEYOPS"), "mtl", "mtl-done-tiles.ecsv")
if os.path.isfile(fn):
d = Table.read(fn)
keep = d["PROGRAM"] == program.upper()
if keep.sum() > 0:
d = d[keep]
# AR taking the latest timestamp
tm = np.unique(d["TIMESTAMP"])[-1]
# AR does not end with +NN:MM timezone?
if re.search('\+\d{2}:\d{2}$', tm) is None:
tm = "{}+00:00".format(tm)
tm = datetime.strptime(tm, "%Y-%m-%dT%H:%M:%S%z")
# AR TBD: we currently add one minute; can be removed once
# AR TBD update is done on the desitarget side
tm += timedelta(minutes=1)
timestamp = tm.isoformat(timespec="seconds")
return timestamp
def mv_write_targets_out(infn, targdir, outfn, log=Logger.get(), step="", start=time()):
"""
Moves the file created by desitarget.io.write_targets
and removes folder created by desitarget.io.write_targets
Args:
infn: filename output by desitarget.io.write_targets
targdir: folder provided as desitarget.io.write_targets input
outfn: desired renaming of infn
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR renaming
_ = shutil.move(infn, outfn)
log.info("{:.1f}s\t{}\trenaming {} to {}".format(time() - start, step, infn, outfn))
# AR removing folders
if targdir[-1] != "/":
targdir = "{}/".format(targdir)
tmpdirs = infn.replace(targdir, "").split("/")[:-1]
for i in range(len(tmpdirs))[::-1]:
os.rmdir(os.path.join(*[targdir] + tmpdirs[: i + 1]))
def get_nowradec(ra, dec, pmra, pmdec, parallax, ref_year, pmtime_utc_str, scnd=False):
"""
Apply proper motion correction
Args:
ra: numpy array of RAs (deg)
dec: numpy array of DECs (deg)
pmra: numpy array of projected proper-motion in RA (mas/year)
pmdec: numpy array of projected proper-motion in DEC (mas/year)
parallax: numpy array of parallax (mas)
ref_year: reference epoch (e.g. 2015.5 for Gaia/DR2)
pmtime_utc_str: date to update position to (format: YYYY-MM-DDThh:mm:ss+00:00)
scnd (optional, defaults to False): secondary target? (boolean; if True, sets parallax=0)
Returns:
ra: numpy array of RAs updated to pmtime_utc_str (deg)
dec: numpy array of DECs updated to pmtime_utc_str (deg)
Notes:
Courtesy of DL; adapted from legacypipe.survey
Originally named radec_at_mjd()
"""
# AR pmtime_utc : UTC time of the new ref_epoch; "%Y-%m-%dT%H:%M:%S%z", e.g. "2021-04-21T00:00:00+00:00"
# AR scnd=True -> parallax is set to 0, i.e. not used
"""
Units:
- matches Gaia DR1/DR2
- pmra,pmdec are in mas/yr.
pmra is in angular speed (ie, has a cos(dec) factor)
- parallax is in mas.
Returns: RA,Dec
"""
equinox = 53084.28 # mjd of the spring equinox in 2004
equinox_jyear = Time(equinox, format="mjd").jyear
axistilt = 23.44 # degrees
arcsecperrad = 3600.0 * 180.0 / np.pi
# AR pmtime
pmtime_utc = datetime.strptime(pmtime_utc_str, "%Y-%m-%dT%H:%M:%S%z")
pmtime_utc_jyear = Time(pmtime_utc).jyear
pmtime_utc_mjd = Time(pmtime_utc).mjd
def xyztoradec(xyz):
assert len(xyz.shape) == 2
ra = np.arctan2(xyz[:, 1], xyz[:, 0]) # AR added "np." in front of arctan2...
ra += 2 * np.pi * (ra < 0)
norm = np.sqrt(np.sum(xyz ** 2, axis=1))
dec = np.arcsin(xyz[:, 2] / norm)
return np.rad2deg(ra), np.rad2deg(dec)
def radectoxyz(ra_deg, dec_deg): # AR changed inputs from ra,dec to ra_deg,dec_deg
ra = np.deg2rad(ra_deg)
dec = np.deg2rad(dec_deg)
cosd = np.cos(dec)
return np.vstack((cosd * np.cos(ra), cosd * np.sin(ra), np.sin(dec))).T
dt = pmtime_utc_jyear - ref_year
cosdec = np.cos(np.deg2rad(dec))
dec = dec + dt * pmdec / (3600.0 * 1000.0)
ra = ra + (dt * pmra / (3600.0 * 1000.0)) / cosdec
parallax = np.atleast_1d(parallax)
# AR discards parallax for scnd=True
if scnd == True:
parallax *= 0.0
I = np.flatnonzero(parallax)
if len(I):
suntheta = 2.0 * np.pi * np.fmod(pmtime_utc_jyear - equinox_jyear, 1.0)
# Finite differences on the unit sphere -- xyztoradec handles
# points that are not exactly on the surface of the sphere.
axis = np.deg2rad(axistilt)
scale = parallax[I] / 1000.0 / arcsecperrad
xyz = radectoxyz(ra[I], dec[I])
xyz[:, 0] += scale * np.cos(suntheta)
xyz[:, 1] += scale * np.sin(suntheta) * np.cos(axis)
xyz[:, 2] += scale * np.sin(suntheta) * np.sin(axis)
r, d = xyztoradec(xyz)
ra[I] = r
dec[I] = d
return ra, dec
def force_finite_pm(
d, pmra_key="PMRA", pmdec_key="PMDEC", log=Logger.get(), step="", start=time()
):
"""
Replaces NaN PMRA, PMDEC by 0
Args:
d: array with at least proper-motion columns
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input d, but NaN proper motions replaced by 0
"""
for key in [pmra_key, pmdec_key]:
keep = ~np.isfinite(d[key])
if keep.sum() > 0:
d[key][keep] = 0.0
log.info(
"{:.1f}s\t{}\t replacing NaN by 0 for {} targets".format(
time() - start, step, keep.sum()
)
)
return d
def force_nonzero_refepoch(
d,
force_ref_epoch,
ref_epoch_key="REF_EPOCH",
pmra_key="PMRA",
pmdec_key="PMDEC",
log=Logger.get(),
step="",
start=time(),
):
"""
Replaces 0 by force_ref_epoch in ref_epoch
Args:
d: array with at least proper-motion columns
force_ref_epoch: float, ref_epoch to replace 0 by
ref_epoch_key (optional, defaults to REF_EPOCH): column name for the ref_epoch
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input d, but 0 ref_epochs replaced by force_ref_epoch
Notes:
Will exit with error if ref_epoch=0, but pmra or pmdec != 0
"""
keep = d[ref_epoch_key] == 0
n = ((d[pmra_key][keep] != 0) | (d[pmra_key][keep] != 0)).sum()
if n > 0:
log.error(
"{:.1f}s\t{}\t{} targets have {}=0 but {} or {} != 0; exiting".format(
time() - start, step, n, ref_epoch_key, pmra_key, pmdec_key,
)
)
sys.exit(1)
d[ref_epoch_key][keep] = force_ref_epoch
log.info(
"{:.1f}s\t{}\tsetting {}={} for {} objects with {}=0".format(
time() - start,
step,
ref_epoch_key,
force_ref_epoch,
keep.sum(),
ref_epoch_key,
)
)
return d
def update_nowradec(
d,
gaiadr,
pmtime_utc_str,
ra_key="RA",
dec_key="DEC",
pmra_key="PMRA",
pmdec_key="PMDEC",
parallax_key="PARALLAX",
ref_epoch_key="REF_EPOCH",
gaiag_key="GAIA_PHOT_G_MEAN_MAG",
gaiaaen_key="GAIA_ASTROMETRIC_EXCESS_NOISE",
scnd=False,
log=Logger.get(),
step="",
start=time(),
):
"""
Update (RA, DEC, REF_EPOCH) using proper motion
Args:
d: array with at least proper-motion columns
pmtime_utc_str: date to update position to (format: YYYY-MM-DDThh:mm:ss+00:00)
gaiadr: Gaia dr ("dr2" or "edr3")
ra_key (optional, defaults to RA): column name for RA
dec_key (optional, defaults to DEC): column name for DEC
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
parallax_key (optional, defaults to PARALLAX): column name for PARALLAX
ref_epoch_key (optional, defaults to REF_EPOCH): column name for the REF_EPOCH
gaia_key (optional, defaults to GAIA_PHOT_G_MEAN_MAG): column name for Gaia g-mag
gaiaaen_key (optional, defaults to GAIA_ASTROMETRIC_EXCESS_NOISE): column name for Gaia GAIA_ASTROMETRIC_EXCESS_NOISE
scnd (optional, defaults to False): secondary target? (boolean);
if False, update for REF_EPOCH>0 + AEN only
if True, update for REF_EPOCH>0 + finite(PMRA,PMDEC) ; forces PARALLAX=0
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input, but with RA, DEC updated to pmtime_utc_str
Notes:
REF_EPOCH is updated for *all* objects
"""
# AR
pmtime_utc = datetime.strptime(pmtime_utc_str, "%Y-%m-%dT%H:%M:%S%z")
pmtime_utc_jyear = Time(pmtime_utc).jyear
# AR computing positions at pmtime_utc_str using Gaia PMRA, PMDEC
nowra, nowdec = get_nowradec(
d[ra_key],
d[dec_key],
d[pmra_key],
d[pmdec_key],
d[parallax_key],
d[ref_epoch_key],
pmtime_utc_str,
scnd=scnd,
)
if scnd == True:
# AR secondary: REF_EPOCH>0
keep = d["REF_EPOCH"] > 0
else:
# AR targets with REF_EPOCH>0 and passing the AEN criterion
keep = d["REF_EPOCH"] > 0
# AR gaia_psflike arguments changed at desitarget-0.58.0
if desitarget.__version__ < "0.58.0":
keep &= gaia_psflike(d[gaiag_key], d[gaiaaen_key])
else:
keep &= gaia_psflike(d[gaiag_key], d[gaiaaen_key], dr=gaiadr)
# AR storing changes to report extrema in the log
dra = nowra - d[ra_key]
ddec = nowdec - d[dec_key]
# AR updating positions to pmtime_utc_str for targets passing the AEN criterion
d[ra_key][keep] = nowra[keep]
d[dec_key][keep] = nowdec[keep]
log.info(
"{:.1f}s\t{}\tupdating RA,DEC at {} with PM for {:.0f}/{:.0f} targets passing AEN; maximum changes: RA={:.1f},{:.1f} arcsec, DEC={:.1f},{:.1f} arcsec".format(
time() - start,
step,
pmtime_utc_jyear,
keep.sum(),
len(keep),
3600.0 * dra.min(),
3600.0 * dra.max(),
3600 * ddec.min(),
3600.0 * ddec.max(),
)
)
# AR updating REF_EPOCH for *all* objects (for PlateMaker)
d[ref_epoch_key] = pmtime_utc_jyear
log.info(
"{:.1f}s\tupdating REF_EPOCH to {} for all {} targets".format(
time() - start, pmtime_utc_jyear, len(keep)
)
)
return d
def assert_env_vars(
required_env_vars=[
"DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",
],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Assert the environment variables required by fba_launch
Args:
required_env_vars (optional, defaults to ["DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",]): list of environment variables required by fba_launch
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
will exit with error if some assertions are not verified
"""
# AR safe: DESI environment variables
for required_env_var in required_env_vars:
if os.getenv(required_env_var) is None:
log.error(
"{:.1f}s\t{}\tenvironment variable {} not defined; exiting".format(
time() - start, step, required_env_var
)
)
sys.exit(1)
def assert_arg_dates(
args,
dates=["pmtime_utc_str", "rundate", "mtltime"],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Assert the fba_launch date arguments are correctly formatted ("YYYY-MM-DDThh:mm:ss+00:00")
Args:
args: fba_launch parser.parse_args() output
dates (optional, defaults to ["pmtime_utc_str", "rundate", "mtltime"]): list of date fba_launch argument names to check
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
will exit with error if some assertions are not verified
"""
# AR dates properly formatted?
for kwargs in args._get_kwargs():
if kwargs[0] in dates:
if not assert_isoformat_utc(kwargs[1]):
log.error(
"{:.1f}s\t{}\t{}={} is not yyyy-mm-ddThh:mm:ss+00:00; exiting".format(
time() - start, step, kwargs[0], kwargs[1],
)
)
sys.exit(1)
def assert_svn_tileid(
tileid, forcetileid="n", log=Logger.get(), step="settings", start=time(),
):
"""
Asserts if TILEID already exists in the SVN tile folder
Args:
tileid: TILEID to check (int)
forcetileid (optional, defaults to "n"): "y" or "n";
if "n", will trigger a warning + an error
if "y", e will trigger a warning only
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
svn_trunk = os.path.join(os.getenv("DESI_TARGET"), "fiberassign/tiles/trunk")
# AR needs a wildcard to verify .fits and fits.gz files
# AR as the gzipping was not done before ~SV1
svn_fn = os.path.join(
svn_trunk, "{:06d}".format(tileid)[:3], "fiberassign-{:06d}.fits".format(tileid)
)
if os.path.isfile(svn_fn) | os.path.isfile("{}.gz".format(svn_fn)):
log.warning(
"{:.1f}s\t{}\tTILEID={} already exists in SVN folder {}".format(
time() - start, step, tileid, svn_trunk
)
)
if forcetileid == "y":
log.warning(
"{:.1f}s\t{}\tproceeding as forcetileid == y".format(
time() - start, step
)
)
else:
log.error(
"{:.1f}s\tsettings\texiting as forcetileid == n".format(time() - start)
)
sys.exit(1)
else:
log.info(
"{:.1f}s\t{}\tTILEID={} does not exist in SVN folder {}; proceeding".format(
time() - start, step, tileid, svn_trunk
)
)
def print_config_infos(
required_env_vars=[
"DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",
],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Print various configuration informations (machine, modules version/path, DESI environment variables).
Args:
required_env_vars (optional, defaults to ["DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",]): list of environment variables required by fba_launch
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR machine
log.info(
"{:.1f}s\t{}\tHOSTNAME={}".format(time() - start, step, os.getenv("HOSTNAME"))
)
# AR fiberassign, desitarget, desimodel, desimeter code version/path
for module, name in zip(
[fiberassign, desitarget, desimodel, desimeter], ["fiberassign", "desitarget", "desimodel", "desimeter"]
):
log.info(
"{:.1f}s\t{}\trunning with {} code version: {}".format(
time() - start, step, name, module.__version__
)
)
log.info(
"{:.1f}s\t{}\trunning with {} code path: {}".format(
time() - start, step, name, module.__path__
)
)
# AR DESI environment variables
for required_env_var in required_env_vars:
log.info(
"{:.1f}s\t{}\t{}={}".format(
time() - start, step, required_env_var, os.getenv(required_env_var)
)
)
def get_desitarget_paths(
dtver,
survey,
program,
dr="dr9",
gaiadr="gaiadr2",
log=Logger.get(),
step="settings",
start=time(),
):
"""
Obtain the folder/file full paths for desitarget products
Args:
dtver: desitarget catalog version (string; e.g., "0.57.0")
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
program: "dark", "bright", or "backup" (string)
dr (optional, defaults to "dr9"): legacypipe dr (string)
gaiadr (optional, defaults to "gaiadr2"): gaia dr (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
Dictionary with the following keys:
- sky: sky folder
- skysupp: skysupp folder
- gfa: GFA folder
- targ: targets folder (static catalogs, with all columns)
- mtl: MTL folder
- scnd: secondary fits catalog (static)
- scndmtl: MTL folder for secondary targets
- too: ToO ecsv catalog
Notes:
if survey not in ["sv1", "sv2", "sv3", "main"]
or program not in ["dark", "bright", or "backup"], will return a warning only
same warning only if the built paths/files do not exist.
"""
# AR expected survey, program?
exp_surveys = ["sv1", "sv2", "sv3", "main"]
exp_programs = ["dark", "bright", "backup"]
if survey.lower() not in exp_surveys:
log.warning(
"{:.1f}s\t{}\tunexpected survey={} ({}; proceeding anyway)".format(
time() - start, step, survey.lower(), exp_surveys
)
)
if program.lower() not in exp_programs:
log.warning(
"{:.1f}s\t{}\tunexpected program={} ({}; proceeding anyway)".format(
time() - start, step, program.lower(), exp_programs
)
)
# AR folder architecture is now the same at NERSC/KPNO (https://github.com/desihub/fiberassign/issues/302)
mydirs = {}
mydirs["sky"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", dr, dtver, "skies"
)
mydirs["skysupp"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", gaiadr, dtver, "skies-supp"
)
mydirs["gfa"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", dr, dtver, "gfas"
)
if program.lower() == "backup":
dtcat = gaiadr
else:
dtcat = dr
mydirs["targ"] = os.path.join(
os.getenv("DESI_TARGET"),
"catalogs",
dtcat,
dtver,
"targets",
survey.lower(),
"resolve",
program.lower(),
)
mydirs["mtl"] = os.path.join(
os.getenv("DESI_SURVEYOPS"), "mtl", survey.lower(), program.lower(),
)
# AR secondary (dark, bright; no secondary for backup)
if program.lower() in ["dark", "bright"]:
if survey.lower() == "main":
basename = "targets-{}-secondary.fits".format(program.lower())
else:
basename = "{}targets-{}-secondary.fits".format(survey.lower(), program.lower())
mydirs["scnd"] = os.path.join(
os.getenv("DESI_TARGET"),
"catalogs",
dr,
dtver,
"targets",
survey.lower(),
"secondary",
program.lower(),
basename,
)
mydirs["scndmtl"] = os.path.join(
os.getenv("DESI_SURVEYOPS"),
"mtl",
survey.lower(),
"secondary",
program.lower(),
)
# AR ToO (same for dark, bright)
mydirs["too"] = os.path.join(
os.getenv("DESI_SURVEYOPS"), "mtl", survey.lower(), "ToO", "ToO.ecsv",
)
# AR log
for key in list(mydirs.keys()):
log.info(
"{:.1f}s\t{}\tdirectory for {}: {}".format(
time() - start, step, key, mydirs[key]
)
)
if not os.path.exists(mydirs[key]):
log.warning(
"{:.1f}s\t{}\tdirectory for {}: {} does not exist".format(
time() - start, step, key, mydirs[key]
)
)
return mydirs
def create_tile(
tileid,
tilera,
tiledec,
outfn,
survey,
obscon="DARK|GRAY|BRIGHT|BACKUP",
log=Logger.get(),
step="",
start=time(),
):
"""
Create a tiles fits file.
Args:
tileid: TILEID (int)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
outfn: fits file name to be written
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
obscon (optional, defaults to "DARK|GRAY|BRIGHT|BACKUP"): tile allowed observing conditions (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
hdr = fitsio.FITSHDR()
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
log.info(
"{:.1f}s\t{}\ttileid={}, tilera={}, tiledec={}, survey={}, obscon={}".format(
time() - start, step, tileid, tilera, tiledec, survey, obscon
)
)
d = np.zeros(
1,
dtype=[
("TILEID", "i4"),
("RA", "f8"),
("DEC", "f8"),
("OBSCONDITIONS", "i4"),
("IN_DESI", "i2"),
("PROGRAM", "S6"),
],
)
d["TILEID"] = tileid
d["RA"] = tilera
d["DEC"] = tiledec
d["IN_DESI"] = 1 # AR forcing 1;
# AR otherwise the default onlydesi=True option in
# AR desimodel.io.load_tiles() discards tiles outside the desi footprint,
# AR so return no tiles for the dithered tiles outside desi
d["PROGRAM"] = survey.upper() # AR custom... SV2, SV3, MAIN
log.info("{:.1f}s\t{}\ttile obscon={}".format(time() - start, step, obscon))
d["OBSCONDITIONS"] = obsconditions.mask(obscon)
fitsio.write(
outfn, d, extname="TILES", header=hdr, clobber=True,
)
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn,))
def create_sky(
tilesfn,
skydir,
outfn,
suppskydir=None,
tmpoutdir=tempfile.mkdtemp(),
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a sky fits file.
Args:
tilesfn: path to a tiles fits file (string)
skydir: desitarget sky folder (string)
outfn: fits file name to be written (string)
suppskydir (optional, defaults to None): desitarget suppsky folder (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_skies will write (creating some sub-directories)
add_plate_cols (optional, defaults to True): adds a PLATE_RA, PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
add_plate_cols: not adding PLATE_REF_EPOCH;
20210526 : implementation of using subpriority=False in write_skies
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR sky: read targets
tiles = fits.open(tilesfn)[1].data
skydirs = [skydir]
if suppskydir is not None:
skydirs.append(suppskydir)
ds = [read_targets_in_tiles(skydir, tiles=tiles, quick=quick) for skydir in skydirs]
for skydir, d in zip(skydirs, ds):
log.info("{:.1f}s\t{}\treadin {} targets from {}".format(time() - start, step, len(d), skydir)
d = np.concatenate(ds)
# AR adding PLATE_RA, PLATE_DEC?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC columns".format(
time() - start, step
)
)
n, tmpfn = write_skies(tmpoutdir, d, indir=skydir, indir2=suppskydir, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start)
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_targ_nomtl(
tilesfn,
targdir,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
add_plate_cols=True,
quick=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a target fits file, with solely using desitarget catalogs, no MTL.
e.g. for the GFA, but could be used for other purposes.
Args:
tilesfn: path to a tiles fits file (string)
targdir: desitarget target folder (string)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
add_plate_cols (optional, defaults to True): adds a PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns (boolean)
quick (optional, defaults to True): boolean, arguments of desitarget.io.read_targets_in_tiles()
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR targ_nomtl: read targets
tiles = fits.open(tilesfn)[1].data
d = read_targets_in_tiles(targdir, tiles=tiles, quick=quick)
log.info(
"{:.1f}s\t{}\tkeeping {} targets to {}".format(
time() - start, step, len(d), outfn
)
)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns".format(
time() - start, step
)
)
# AR targ_nomtl: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR targ_nomtl: update RA, DEC, REF_EPOCH using proper motion?
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(d, gaiadr, pmtime_utc_str, log=log, step=step, start=start)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR targ_nomtl: Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR targ_nomtl: write fits
n, tmpfn = write_targets(tmpoutdir, d, indir=targdir, survey=survey, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start)
# AR targ_nomtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_mtl(
tilesfn,
mtldir,
mtltime,
targdir,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a (primary or secondary) target fits file, based on MTL ledgers (and complementary columns from desitarget targets files).
Args:
tilesfn: path to a tiles fits file (string)
mtldir: desisurveyops MTL folder (string)
mtltime: MTL isodate (string formatted as yyyy-mm-ddThh:mm:ss+00:00)
targdir: desitarget targets folder (or file name if secondary) (string)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
add_plate_cols (optional, defaults to True): adds a PLATE_RA and PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
for sv3-backup, we remove BACKUP_BRIGHT targets.
TBD : if secondary targets, we currently disable the inflate_ledger(), as it
seems to not currently work.
hence if secondary and pmcorr="y", the code will crash, as the
GAIA_ASTROMETRIC_EXCESS_NOISE column will be missing; though we do not
expect this configuration to happen, so it should be fine for now.
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
tiles = fits.open(tilesfn)[1].data
# AR mtl: storing the timestamp at which we queried MTL
log.info("{:.1f}s\t{}\tmtltime={}".format(time() - start, step, mtltime))
# AR mtl: read mtl
d = read_targets_in_tiles(
mtldir,
tiles=tiles,
quick=False,
mtl=True,
unique=True,
isodate=mtltime,
)
log.info(
"{:.1f}s\t{}\treading {} targets from {}".format(
time() - start, step, len(d), mtldir
)
)
# AR mtl: removing by hand BACKUP_BRIGHT for sv3/BACKUP
# AR mtl: using an indirect way to find if program=backup,
# AR mtl: to avoid the need of an extra program argument
# AR mtl: for sv3, there is no secondary-backup, so no ambiguity
if (survey == "sv3") & ("backup" in mtldir):
from desitarget.sv3.sv3_targetmask import mws_mask
keep = (d["SV3_MWS_TARGET"] & mws_mask["BACKUP_BRIGHT"]) == 0
log.info(
"{:.1f}s\t{}\tremoving {}/{} BACKUP_BRIGHT targets".format(
time() - start, step, len(d) - keep.sum(), len(d)
)
)
d = d[keep]
# AR mtl: add columns not present in ledgers
# AR mtl: need to provide exact list (if columns=None, inflate_ledger()
# AR mtl: overwrites existing columns)
# AR mtl: TBD : we currently disable it for secondary targets
# AR mtl: using an indirect way to find if secondary,
# AR mtl: to avoid the need of an extra program argument
if "secondary" not in mtldir:
columns = [key for key in minimal_target_columns if key not in d.dtype.names]
# AR mtl: also add GAIA_ASTROMETRIC_EXCESS_NOISE, in case args.pmcorr == "y"
if pmcorr == "y":
columns += ["GAIA_ASTROMETRIC_EXCESS_NOISE"]
log.info(
"{:.1f}s\t{}\tadding {} from {}".format(
time() - start, step, ",".join(columns), targdir
)
)
d = inflate_ledger(
d, targdir, columns=columns, header=False, strictcols=False, quick=True
)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns".format(
time() - start, step
)
)
# AR mtl: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR mtl: update RA, DEC, REF_EPOCH using proper motion?
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(d, gaiadr, pmtime_utc_str, log=log, step=step, start=start)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR mtl: write fits
n, tmpfn = write_targets(tmpoutdir, d, indir=mtldir, indir2=targdir, survey=survey, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start,)
# AR mtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_too(
tilesfn,
toofn,
mjd_min,
mjd_max,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
too_tile=False,
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a ToO target fits file, with selecting targets in a MJD time window.
If no ToO file, or no selected targets, do nothing.
Args:
tilesfn: path to a tiles fits file (string)
toofn: ToO file name (string)
mjd_min, mjd_max (floats): we keep targets with MJD_BEGIN < mjd_max and MJD_END > mjd_min
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
too_tile (optional, defaults to False): if False, we only keep TOO_TYPE!="TILE",
if True, we do not cut on TOO_TYPE, hence keeping both TOO_TYPE="FIBER" *and*
TOO_TYPE="TILE" for ToO dedicated tiles (boolean)
add_plate_cols (optional, defaults to True): adds a PLATE_RA and PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
TBD : the MJD window to accept targets; currently in fba_launch, we set a month
from the tile design date;
it surely needs to be updated/refined once operations are more clear.
some steps in common with create_mtl().
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR too: is there a file?
# AR too: if no, just skip
if not os.path.isfile(toofn):
log.info(
"{:.1f}s\t{}\tno ToO input file present: {}, not writing any {}".format(
time() - start, step, toofn, outfn
)
)
return False
# AR too: if yes, we proceed
# AR too: tile file
tiles = fits.open(tilesfn)[1].data
# AR too: read too file
# AR cut on:
# AR - tiles
# AR - mjd (! TBD !)
d = Table.read(toofn)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, REF_EPOCH columns".format(
time() - start, step
)
)
keep = is_point_in_desi(tiles, d["RA"], d["DEC"])
if not too_tile:
keep &= d["TOO_TYPE"] != "TILE"
keep &= (d["MJD_BEGIN"] < mjd_max) & (d["MJD_END"] > mjd_min)
log.info(
"{:.1f}s\t{}\tkeeping {}/{} targets in tiles, with TOO_TYPE={}, and in the MJD time window: {}, {}".format(
time() - start, step, keep.sum(), len(keep), "TILE,FIBER" if too_tile else "FIBER", mjd_min, mjd_max
)
)
if keep.sum() > 0:
d = d[keep]
# AR too: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR too: update RA, DEC, REF_EPOCH using proper motion
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(
d, gaiadr, pmtime_utc_str, log=log, step=step, start=start
)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR single REF_EPOCH needed
# AR TBD currently all targets have PMRA=PMDEC=0,
# AR TBD so it s fine to just change all REF_EPOCH
d["REF_EPOCH"] = np.zeros(len(d))
# AR Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR mtl: write fits
n, tmpfn = write_targets(tmpoutdir, d.as_array(), indir=toofn, survey=survey, subpriority=False)
_ = mv_write_targets_out(
tmpfn, tmpoutdir, outfn, log=log, step=step, start=start,
)
# AR mtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
else:
log.info(
"{:.1f}s\t{}\tno too kept too targets, no {} written".format(
time() - start, step, outfn
)
)
return True
def launch_onetile_fa(
args,
tilesfn,
targfns,
fbafn,
fiberassignfn,
skyfn=None,
gfafn=None,
log=Logger.get(),
step="",
start=time(),
):
"""
Runs the fiber assignment (run_assign_full),
merges the results (merge_results) for a single tile,
and prints the assignment stats for each mask.
Args:
args: fba_launch-like parser.parse_args() output
should contain at least:
- survey
- rundate
- sky_per_petal
- standards_per_petal
- sky_per_slitblock
tilesfn: path to the input tiles fits file (string)
targfns: paths to the input targets fits files, e.g. targ, scnd, too (either a string if only one file, or a list of strings)
fbafn: path to the output fba-TILEID.fits file (string)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
skyfn (optional, defaults to None): path to a sky fits file (string)
gfafn (optional, defaults to None): path to a gfa fits file (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
no sanity checks done on inputs; assumed to be done elsewhere
assumes the output directory is the same for fbafn and fiberassignfn
we keep a generic "args" input, so that any later added argument in fba_launch does not
requires a change in the launch_fa() call format.
fba_launch-like adding information in the header is done in another function, update_fiberassign_header
TBD: be careful if working in the SVN-directory; maybe add additional safety lines?
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart running fiber assignment".format(time() - start, step))
# AR convert targfns to list if string (i.e. only one input file)
if isinstance(targfns, str):
targfns = [targfns]
# AR tileid, tilera, tiledec
tiles = fits.open(tilesfn)[1].data
tileid = tiles["TILEID"][0]
tilera = tiles["RA"][0]
tiledec = tiles["DEC"][0]
# AR output directory (picking the one of fbafn)
outdir = os.path.dirname(fbafn)
# AR safe: delete possibly existing fba-{tileid}.fits and fiberassign-{tileid}.fits
# AR TBD: add additional safety check if running in SVN folder?
if os.path.isfile(fbafn):
os.remove(fbafn)
if os.path.isfile(fiberassignfn):
os.remove(fiberassignfn)
# AR preparing fba_run inputs
opts = [
"--targets",
]
for targfn in targfns:
opts += [
targfn,
]
opts += [
"--overwrite",
"--write_all_targets",
"--dir",
outdir,
"--footprint",
tilesfn,
"--rundate",
args.rundate,
"--sky_per_petal",
args.sky_per_petal,
"--standards_per_petal",
args.standards_per_petal,
"--sky_per_slitblock",
str(args.sky_per_slitblock),
"--ha",
str(args.ha),
]
if args.ha != 0:
opts += ["--ha", str(args.ha)]
if args.margin_pos != 0:
opts += ["--margin-pos", str(args.margin_pos)]
if args.margin_gfa != 0:
opts += ["--margin-gfa", str(args.margin_gfa)]
if args.margin_petal != 0:
opts += ["--margin-petal", str(args.margin_petal)]
if skyfn is not None:
opts += [
"--sky",
skyfn,
]
if gfafn is not None:
opts += [
"--gfafile",
gfafn,
]
log.info(
"{:.1f}s\t{}\ttileid={:06d}: running raw fiber assignment (run_assign_full) with opts={}".format(
time() - start, step, tileid, " ; ".join(opts)
)
)
ag = parse_assign(opts)
run_assign_full(ag)
# AR merging
# AR not using run_merge(), because it looks for all fba-TILEID.fits file
# AR in the out directory...
ag = {}
ag["tiles"] = [tileid]
ag["columns"] = None
if gfafn is not None:
ag["targets"] = [gfafn] + targfns
else:
ag["targets"] = targfns
if skyfn is not None:
ag["sky"] = [skyfn]
else:
ag["sky"] = []
ag["result_dir"] = outdir
ag["copy_fba"] = False
tmparr = []
for key in list(ag.keys()):
tmparr += ["{} = {}".format(key, ag[key])]
log.info(
"{:.1f}s\t{}\ttileid={:06d}: merging input target data (merge_results) with argument={}".format(
time() - start, step, tileid, " ; ".join(tmparr)
)
)
merge_results(
ag["targets"],
ag["sky"],
ag["tiles"],
result_dir=ag["result_dir"],
columns=ag["columns"],
copy_fba=ag["copy_fba"],
)
log.info(
"{:.1f}s\t{}\tcomputing assignment statiscs: start".format(
time() - start, step
)
)
# AR storing parent/assigned quantities
parent, assign, dras, ddecs, petals, nassign = get_parent_assign_quants(
args.survey, targfns, fiberassignfn, tilera, tiledec,
)
# AR stats : assigned / parent
print_assgn_parent_stats(args.survey, parent, assign, log=log, step=step, start=start)
log.info(
"{:.1f}s\t{}\tcomputing assignment statiscs: done".format(
time() - start, step
)
)
def update_fiberassign_header(
fiberassignfn,
args,
mydirs,
hdr_survey,
hdr_faprgrm,
faflavor,
ebv,
obscon,
fascript,
log=Logger.get(),
step="",
start=time(),
):
"""
Adds various information in the fiberassign-TILEID.fits PRIMARY header.
Args:
fiberassignfn: path to fiberassign-TILEID.fits file (string)
args: fba_launch-like parser.parse_args() output
should contain at least (see fba_launch arguments):
- outdir, survey, program, rundate, pmcorr, pmtime_utc_str
- faprgrm, mtltime, goaltime, goaltype, sbprof, mintfrac
will also be used to store in FAARGS the list of input arguments of the fba_launch call.
mydirs: dictionary with the desitarget paths; ideally:
- sky: sky folder
- skysupp: skysupp folder
- gfa: GFA folder
- targ: targets folder (static catalogs, with all columns)
- mtl: MTL folder
- scnd: secondary fits catalog (static)
- scndmtl: MTL folder for secondary targets
- too: ToO ecsv catalog
hdr_survey: value for the SURVEY keyword (string)
hdr_faprgrm: value for the FAPRGRM keyword (string)
faflavor: usually {survey}{program} in lower cases (string)
ebv: median EBV over the tile targets (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
fascript: fba_launch-like script used to designed the tile; in case of different scripts for dedicated tiles
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
no check is done on mydirs.
hdr_survey, hdr_faprgrm: for the "regular" surveys (e.g., sv3, main), those will be args.survey, args.program
but for dedicated survey, they will (have to) be different.
faflavor has to be {hdr_survey}{hdr_faprgrm}; will exit with an error if not;
keeping this to be sure it is not forgotten to be done for dedicated programs.
"""
# AR sanity check on faflavor
if faflavor != "{}{}".format(hdr_survey, hdr_faprgrm):
log.error(
"{:.1f}s\t{}\tfaflavor={} inconsitent with hdr_survey={} and hdr_faprgrm={}; exiting".format(
time() - start, step, faflavor, hdr_survey, hdr_faprgrm,
)
)
sys.exit(1)
# AR propagating some settings into the PRIMARY header
fd = fitsio.FITS(fiberassignfn, "rw")
# AR faflavor
fd["PRIMARY"].write_key("FAFLAVOR", faflavor)
# AR folders, with replacing $DESI_ROOT by DESIROOT
desiroot = os.getenv("DESI_ROOT")
fd["PRIMARY"].write_key("DESIROOT", desiroot)
for key in np.sort(list(mydirs.keys())):
if (key == "mtl") & (isinstance(mydirs["mtl"], list)):
# AR header keywords: MTL, MTL2, MTL3, etc
# AR probably to be deprecate for sv2
suffixs = [""] + np.arange(2, len(mydirs["mtl"]) + 1).astype(str).tolist()
for mtldir, suffix in zip(mydirs["mtl"], suffixs):
fd["PRIMARY"].write_key(
"mtl{}".format(suffix), mtldir.replace(desiroot, "DESIROOT"),
)
else:
fd["PRIMARY"].write_key(key, mydirs[key].replace(desiroot, "DESIROOT"))
# AR storing some specific arguments
# AR plus a (long) FAARGS keyword with storing arguments to re-run the fiber assignment
# AR we exclude from FAARGS outdir, forcetiled, and any None argument
tmparr = []
for kwargs in args._get_kwargs():
if (kwargs[0].lower() not in ["outdir", "forcetileid"]) & (
kwargs[1] is not None
):
tmparr += ["--{} {}".format(kwargs[0], kwargs[1])]
fd["PRIMARY"].write_key(
"faargs", " ".join(tmparr),
)
# AR some keywords
fd["PRIMARY"].write_key("outdir", args.outdir)
fd["PRIMARY"].write_key("survey", hdr_survey) # AR not args.survey!
fd["PRIMARY"].write_key("rundate", args.rundate)
fd["PRIMARY"].write_key("pmcorr", args.pmcorr)
fd["PRIMARY"].write_key("pmtime", args.pmtime_utc_str)
fd["PRIMARY"].write_key("faprgrm", hdr_faprgrm) # AR not args.program!
fd["PRIMARY"].write_key("mtltime", args.mtltime)
fd["PRIMARY"].write_key("obscon", obscon)
# AR informations for NTS
# AR SBPROF from https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed#NominalFiberfracValues
# AR version 35
fd["PRIMARY"].write_key("goaltime", args.goaltime)
fd["PRIMARY"].write_key("goaltype", args.program)
fd["PRIMARY"].write_key("ebvfac", 10.0 ** (2.165 * np.median(ebv) / 2.5))
fd["PRIMARY"].write_key("sbprof", args.sbprof)
fd["PRIMARY"].write_key("mintfrac", args.mintfrac)
# AR fba_launch-like script name used to designed the tile
fd["PRIMARY"].write_key("fascript", fascript)
# AR SVN revision number
fd["PRIMARY"].write_key(
"svndm", get_svn_version(os.path.join(os.getenv("DESIMODEL"), "data"))
)
fd["PRIMARY"].write_key(
"svnmtl", get_svn_version(os.path.join(os.getenv("DESI_SURVEYOPS"), "mtl"))
)
fd.close()
def secure_gzip(
fiberassignfn, log=Logger.get(), step="", start=time(),
):
"""
Secure gzipping of the fiberassign-TILEID.fits file.
Args:
fiberassignfn: path to fiberassign-TILEID.fits file (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
if os.path.isfile("{}.gz".format(fiberassignfn)):
os.remove("{}.gz".format(fiberassignfn))
log.info(
"{:.1f}s\t{}\tdeleting existing {}.gz".format(
time() - start, step, fiberassignfn
)
)
os.system("gzip {}".format(fiberassignfn))
log.info("{:.1f}s\t{}\tgzipping {}".format(time() - start, step, fiberassignfn))
def get_dt_masks(
survey, log=None, step="", start=time(),
):
"""
Get the desitarget masks for a survey.
Args:
survey: survey name: "sv1", "sv2", "sv3" or "main") (string)
log (optional, defaults to None): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
yaml_masks: dictionary with storing in the
"DESI_TARGET", "BGS_TARGET", "MWS_TARGET", "SCND_TARGET" keys
the corresponding desitarget YAML masks for survey
wd_mskkeys: list of keys identifying the WDs
wd_msks: list of masks identifying the WDs
std_mskkeys: list of keys identifying the STDs
std_msks: list of masks identifying the STDs
Notes:
close to desitarget.targets.main_cmx_or_sv,
but using a dictionary, more adapted to this code
"""
if survey == "sv1":
from desitarget.sv1 import sv1_targetmask as targetmask
elif survey == "sv2":
from desitarget.sv2 import sv2_targetmask as targetmask
elif survey == "sv3":
from desitarget.sv3 import sv3_targetmask as targetmask
elif survey == "main":
from desitarget import targetmask
from fiberassign.targets import default_main_stdmask
else:
if log is not None:
log.error(
"{:.1f}s\t{}\tsurvey={} is not in sv1, sv2, sv3 or main; exiting".format(
time() - start, step, survey
)
)
else:
print("survey={} is not in sv1, sv2, sv3 or main; exiting".format(survey))
sys.exit(1)
# AR YAML masks
yaml_masks = {
"DESI_TARGET": targetmask.desi_mask,
"BGS_TARGET": targetmask.bgs_mask,
"MWS_TARGET": targetmask.mws_mask,
"SCND_TARGET": targetmask.scnd_mask,
}
# AR WD masks
wd_mskkeys, wd_msks = [], []
for mskkey in ["DESI_TARGET", "MWS_TARGET"]:
wd_mskkeys += [mskkey for key in yaml_masks[mskkey].names() if "_WD" in key]
wd_msks += [key for key in yaml_masks[mskkey].names() if "_WD" in key]
# AR STD masks
std_mskkeys, std_msks = [], []
for mskkey in ["DESI_TARGET", "MWS_TARGET"]:
std_mskkeys += [mskkey for key in yaml_masks[mskkey].names() if "STD" in key]
std_msks += [key for key in yaml_masks[mskkey].names() if "STD" in key]
return yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks
def get_qa_tracers(
survey, program, log=None, step="", start=time(),
):
"""
Returns the tracers for which we provide QA plots of fiber assignment.
Args:
survey: survey name: "sv1", "sv2", "sv3" or "main") (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
log (optional, defaults to None): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
trmskkeys: list of keys to select the mask on (list of strings)
trmsks: list of mask names (list of strings)
"""
if program == "DARK":
trmskkeys = ["DESI_TARGET", "DESI_TARGET", "DESI_TARGET"]
trmsks = ["LRG", "ELG", "QSO"]
elif program == "BRIGHT":
trmskkeys = ["BGS_TARGET", "BGS_TARGET"]
trmsks = ["BGS_BRIGHT", "BGS_FAINT"]
trmskkeys += ["MWS_TARGET", "MWS_TARGET"]
if survey == "sv1":
trmsks += ["MWS_MAIN_BROAD", "MWS_NEARBY"]
else:
trmsks += ["MWS_BROAD", "MWS_NEARBY"]
elif program == "BACKUP":
trmskkeys = ["MWS_TARGET", "MWS_TARGET", "MWS_TARGET"]
trmsks = ["BACKUP_BRIGHT", "BACKUP_FAINT", "BACKUP_VERY_FAINT"]
else:
if log is not None:
log.error(
"{:.1f}s\t{}\tprogram={} not in DARK, BRIGHT, or BACKUP; exiting".format(
time() - start, step, program
)
)
else:
print("program={} not in DARK, BRIGHT, or BACKUP; exiting".format(program))
sys.exit(1)
return trmskkeys, trmsks
def get_parent_assign_quants(
survey,
targfns,
fiberassignfn,
tilera,
tiledec,
):
"""
Stores the parent and assigned targets properties (desitarget columns).
Args:
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
targfns: paths to the input targets fits files, e.g. targ, scnd, too (either a string if only one file, or a list of strings)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
Returns:
parent: dictionary of the parent target sample, with each key being some desitarget column
assign: same as parent, with similar row-ordering (filling with zeros or NaNs if not assigned)
dras: dictionary with projected distance (degrees) along R.A. to the center of the tile (np.array of floats),
for each of the following subsample: "parent", "assign", "sky", "bad", "wd", "std" (all assigned subsamples,
except parent)
ddecs: same as dras, for projected distances along Dec.
petals: dictionary with PETAL_LOC (np.array of floats) for each of the assigned "sky", "bad", "wd", "std" subsamples
nassign: dictionary with the number of assigned fibers for each of the assigned "SKY", "BAD", "TGT", "WD", "STD" subsamples
"""
# AR convert targfns to list if string (i.e. only one input file)
if isinstance(targfns, str):
targfns = [targfns]
# AR initializing dictionaires
parent, assign, dras, ddecs, petals, nassign = {}, {}, {}, {}, {}, {}
# AR YAML and WD and STD masks
yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR keys we use (plus few for assign)
keys = [
"TARGETID",
"FLUX_G",
"FLUX_R",
"FIBERTOTFLUX_R",
"FLUX_Z",
"FLUX_W1",
"FLUX_W2",
"EBV",
"GAIA_PHOT_G_MEAN_MAG",
"RA",
"DEC",
"DESI_TARGET",
"BGS_TARGET",
"MWS_TARGET",
"SCND_TARGET",
]
# AR parent
for key in keys:
parent[key] = []
for targfn in targfns:
d = fits.open(targfn)[1].data
for key in keys:
if key in ["DESI_TARGET", "BGS_TARGET", "MWS_TARGET", "SCND_TARGET",]:
if survey.lower()[:2] == "sv":
key_orig = "{}_{}".format(survey.upper(), key)
else:
key_orig = key
if key_orig in d.dtype.names:
parent[key] += d[key_orig].tolist()
else:
parent[key] += [0 for x in d["RA"]]
# AR flux, ebv for secondary
elif key not in d.dtype.names:
parent[key] += [0.0 for x in d["RA"]]
else:
parent[key] += d[key].tolist()
for key in keys:
parent[key] = np.array(parent[key])
dras["parent"], ddecs["parent"] = get_tpos(
tilera, tiledec, parent["RA"], parent["DEC"]
)
# AR fiberassign
d = fits.open(fiberassignfn)[1].data
# AR
for key in ["SKY", "BAD", "TGT"]:
nassign[key] = (d["OBJTYPE"] == key).sum()
# AR SKY
keep = d["OBJTYPE"] == "SKY"
dras["sky"], ddecs["sky"] = get_tpos(
tilera, tiledec, d["TARGET_RA"][keep], d["TARGET_DEC"][keep]
)
petals["sky"] = d["PETAL_LOC"][keep]
# AR BAD
keep = d["OBJTYPE"] == "BAD"
dras["bad"], ddecs["bad"] = get_tpos(
tilera, tiledec, d["TARGET_RA"][keep], d["TARGET_DEC"][keep]
)
petals["bad"] = d["PETAL_LOC"][keep]
# AR TGT
# AR arrays twinning the parent ordering, with nans/zeros
# AR e.g. SV2_DESI_TARGET -> DESI_TARGET
d = d[d["OBJTYPE"] == "TGT"]
# AR TARGETIDs are unique in both arrays, so we can use geomask
iip, ii = match(parent["TARGETID"], d["TARGETID"])
keys = [key for key in keys if key != "RA" and key != "DEC"]
keys += [
"TARGET_RA",
"TARGET_DEC",
"PETAL_LOC",
]
for key in keys:
if key in [
"TARGETID",
"DESI_TARGET",
"BGS_TARGET",
"MWS_TARGET",
"SCND_TARGET",
]:
assign[key] = np.zeros(len(parent["TARGETID"]), dtype=int)
if (key != "TARGETID") & (survey.lower()[:2] == "sv"):
assign[key][iip] = d["{}_{}".format(survey.upper(), key)][ii]
else:
assign[key][iip] = d[key][ii]
else:
assign[key] = np.nan + np.zeros(len(parent["TARGETID"]))
assign[key][iip] = d[key][ii]
dras["assign"], ddecs["assign"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"], assign["TARGET_DEC"]
)
# AR WD
keep = np.zeros(len(assign["TARGET_RA"]), dtype=bool)
for mskkey, msk in zip(wd_mskkeys, wd_msks):
keep |= (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
dras["wd"], ddecs["wd"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"][keep], assign["TARGET_DEC"][keep]
)
petals["wd"] = assign["PETAL_LOC"][keep]
nassign["WD"] = keep.sum()
# AR STD
keep = np.zeros(len(assign["TARGET_RA"]), dtype=bool)
for mskkey, msk in zip(std_mskkeys, std_msks):
keep |= (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
dras["std"], ddecs["std"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"][keep], assign["TARGET_DEC"][keep]
)
petals["std"] = assign["PETAL_LOC"][keep]
nassign["STD"] = keep.sum()
return parent, assign, dras, ddecs, petals, nassign
def print_assgn_parent_stats(
survey, parent, assign, log=Logger.get(), step="", start=time(),
):
"""
Prints for each mask the number of parent and assigned targets, and also the fraction of assigned targets.
Args:
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR YAML and WD and STD masks
yaml_masks, _, _, _, _ = get_dt_masks(survey, log=log, step=step, start=start,)
# AR stats : assigned / parent
log.info("======= ASSIGNMENT STATISTICS : START =======")
log.info("# MASKKEY\tMASK\tPARENT\tASSIGN\tFRACTION")
for mskkey in list(yaml_masks.keys()):
if survey.lower()[:2] == "sv":
mskkey_orig = "{}_{}".format(survey.upper(), mskkey)
else:
mskkey_orig = mskkey
for msk in yaml_masks[mskkey].names():
nparent = ((parent[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
nassign = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
if nparent == 0:
frac = 0.0
else:
frac = nassign / nparent
log.info(
"{}\t{}\t{}\t{}\t{:.2f}".format(
mskkey_orig, msk, nparent, nassign, frac
)
)
log.info("======= ASSIGNMENT STATISTICS : END =======")
def get_ext_coeffs(band):
"""
Returns the extinction coefficient for a given band.
Args:
band: band name: "G", "R", "Z", "W1", or "W2" (string)
Returns:
ext: extinction coefficient (float)
Note:
https://www.legacysurvey.org/dr9/catalogs/#galactic-extinction-coefficients
"""
exts = {"G": 3.214, "R": 2.165, "Z": 1.211, "W1": 0.184, "W2": 0.113}
return exts[band]
def flux2mag(flux, band=None, ebv=None):
"""
Converts a flux to a (optionally extinction-corrected) magnitude
Args:
flux: flux in Nmgy (np.array of floats)
band (optional, defaults to None): band name: "G", "R", "Z", "W1", or "W2" (string)
ebv (optional, defaults to None): EBV values (np.array of floats)
Returns:
mag: AB magnitudes (np.array of floats); extinction-corrected if band and ebv not None
Notes:
flux < 0 values are converted to NaN in magnitudes
"""
# np.nan_to_num: NaN -> 0, so keep=False.
keep = (np.nan_to_num(flux) > 0)
mag = np.nan + np.zeros(len(flux))
mag[keep] = 22.5 - 2.5 * np.log10(flux[keep])
if ebv is not None:
mag -= get_ext_coeffs(band) * ebv
return mag
def qa_print_infos(
ax,
survey,
program,
faflavor,
tileid,
tilera,
tiledec,
obscon,
rundate,
parent,
assign,
):
"""
Print general fiber assignment infos on the QA plot.
Args:
ax: pyplot object
survey: "sv1", "sv2", "sv3" or "main" (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
faflavor: usually {survey}{program} in lower cases (string)
tileid: tile TILEID (int)
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
rundate: used rundate (string)
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
"""
# AR hard-setting the plotted tracers
# AR TBD: handle secondaries
trmskkeys, trmsks = get_qa_tracers(survey, program)
# AR masks
yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR infos : general
x, y, dy, fs = 0.05, 0.95, -0.1, 10
for t in [
"flavor={}".format(faflavor),
"TILEID={:06d}".format(tileid),
"RA,DEC={:.3f},{:.3f}".format(tilera, tiledec),
"obscon={}".format(obscon),
"rundate={}".format(rundate),
"",
]:
ax.text(x, y, t.expandtabs(), fontsize=fs, transform=ax.transAxes)
y += dy
# AR infos: wd/std + tracers
xs = [0.05, 0.65, 0.95, 1.20]
has = ["left", "right", "right", "right"]
tracers = []
for mskkey, msk in zip(wd_mskkeys + std_mskkeys, wd_msks + std_msks):
n = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
tracers += [[msk, "{}".format(n), "", ""]]
tracers += [["", "", "", ""], ["MASK", "ASSGN", "PARENT", "FAFRAC"]]
for msk, mskkey in zip(trmsks, trmskkeys):
nparent = ((parent[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
n = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
tracers += [
[msk, "{}".format(n), "{}".format(nparent), "{:.2f}".format(n / nparent),]
]
tracers += [["", "", "", ""]]
for tracer in tracers:
for i in range(4):
ax.text(
xs[i],
y,
tracer[i].expandtabs(),
fontsize=fs,
ha=has[i],
transform=ax.transAxes,
)
y += dy
# AR infos: brightest target and assigned object
# AR infos: taking a default 16, in case new programs are added
magthresh = 16.0
if program == "DARK":
magthres = 16.0
if program == "BRIGHT":
magthresh = 15.0
if program == "BACKUP":
magthresh = 15.0
for sample, d in zip(["parent", "assgn"], [parent, assign]):
ax.text(
0.05, y, "Min. {} mag ".format(sample), fontsize=fs, transform=ax.transAxes
)
y += dy
for mag, lab in zip(
[d["GAIA_PHOT_G_MEAN_MAG"], flux2mag(d["FIBERTOTFLUX_R"])],
["GAIA_PHOT_G_MEAN_MAG)", "min(LS-R-FIBTOTMAG)"],
):
magmin, color = "-", "k"
# np.nan_to_num: NaN,Inf -> 0, so keep=False.
keep = (np.nan_to_num(mag, posinf=0., neginf=0.) > 0)
if keep.sum() > 0:
magmin = mag[keep].min()
if magmin < magthresh:
magmin, color = "{:.1f}".format(magmin), "r"
else:
magmin, color = "{:.1f}".format(magmin), "k"
ax.text(
0.05,
y,
"{} = {}".format(lab, magmin),
fontsize=fs,
color=color,
transform=ax.transAxes,
)
y += dy
y += dy
def qa_print_petal_infos(
ax, petals, assign,
):
"""
Print general the assigned SKY, BAD, WD, STD, TGT per petal on the QA plot.
Args:
ax: pyplot object
petals: dictionary with PETAL_LOC (np.array of floats) for each of the assigned "sky", "bad", "wd", "std" subsamples
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
"""
# AR stats per petal
xs = [0.05, 0.25, 0.45, 0.65, 0.85, 1.05]
ts = ["PETAL", "NSKY", "NBAD", "NWD", "NSTD", "NTGT"]
y, dy = 0.95, -0.1
fs = 10
for i in range(6):
ax.text(xs[i], y, ts[i], fontsize=fs, ha="center", transform=ax.transAxes)
y += dy
for p in range(10):
if (petals["std"] == p).sum() == 0:
color = "r"
else:
color = "k"
ts = [
"{:.0f}".format(p),
"{:.0f}".format((petals["sky"] == p).sum()),
"{:.0f}".format((petals["bad"] == p).sum()),
"{:.0f}".format((petals["wd"] == p).sum()),
"{:.0f}".format((petals["std"] == p).sum()),
"{:.0f}".format((assign["PETAL_LOC"] == p).sum()),
]
for i in range(6):
ax.text(
xs[i],
y,
ts[i],
color=color,
fontsize=fs,
ha="center",
transform=ax.transAxes,
)
y += dy
# AR stats for all petals
ts = [
"ALL",
"{:.0f}".format(len(petals["sky"])),
"{:.0f}".format(len(petals["bad"])),
"{:.0f}".format(len(petals["wd"])),
"{:.0f}".format(len(petals["std"])),
"{:.0f}".format(np.isfinite(assign["PETAL_LOC"]).sum()),
]
for i in range(6):
ax.text(
xs[i],
y,
ts[i],
color=color,
fontsize=fs,
ha="center",
transform=ax.transAxes,
)
def get_viewer_cutout(
tileid,
tilera,
tiledec,
tmpoutdir=tempfile.mkdtemp(),
width_deg=4,
pixscale=10,
dr="dr9",
timeout=15,
):
"""
Downloads a cutout of the tile region from legacysurvey.org/viewer.
Args:
tileid: tile TILEID (int)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
width_deg (optional, defaults to 4): width of the cutout in degrees (float)
pixscale (optional, defaults to 10): pixel scale of the cutout
dr (optional, default do "dr9"): imaging data release
timeout (optional, defaults to 15): time (in seconds) after which we quit the wget call (int)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
img: output of mpimg.imread() reading of the cutout (np.array of floats)
"""
# AR cutout
tmpfn = "{}tmp-{}.jpeg".format(tmpoutdir, tileid)
size = int(width_deg * 3600.0 / pixscale)
layer = "ls-{}".format(dr)
tmpstr = 'timeout {} wget -q -O {} "http://legacysurvey.org/viewer-dev/jpeg-cutout/?layer={}&ra={:.5f}&dec={:.5f}&pixscale={:.0f}&size={:.0f}"'.format(
tmpfn, timeout, layer, tilera, tiledec, pixscale, size
)
# print(tmpstr)
try:
subprocess.check_call(tmpstr, stderr=subprocess.DEVNULL, shell=True)
except subprocess.CalledProcessError:
print("no cutout from viewer after {}s, stopping the wget call".format(timeout))
try:
img = mpimg.imread(tmpfn)
except:
img = np.zeros((size, size, 3))
if os.path.isfile(tmpfn):
os.remove(tmpfn)
return img
def mycmap(name, n, cmin=0, cmax=1):
"""
Defines a quantised color scheme.
Args:
name: matplotlib colormap name (used through: matplotlib.cm.get_cmap(name)) (string)
n: number of different colors to be in the color scheme (int)
cmin (optional, defaults to 0): flooring "color-value" (float)
cmax (optional, defaults to 1): ceiling "color-value" (float)
Returns:
The quantised color map.
Notes:
https://matplotlib.org/examples/api/colorbar_only.html
"""
cmaporig = matplotlib.cm.get_cmap(name)
mycol = cmaporig(np.linspace(cmin, cmax, n))
cmap = matplotlib.colors.ListedColormap(mycol)
cmap.set_under(mycol[0])
cmap.set_over(mycol[-1])
return cmap
def get_tpos(tilera, tiledec, ras, decs):
"""
Computes the projected distance of a set of coordinates to a tile center.
Args:
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
ras: R.A. in degrees (np.array of floats)
decs: Dec. in degrees (np.array of floats)
Returns:
dras: projected distance (degrees) to the tile center along R.A. (np.array of floats)
ddecs: projected distance (degrees) to the tile center along Dec. (np.array of floats)
"""
tsky = SkyCoord(ra=tilera * units.deg, dec=tiledec * units.deg, frame="icrs")
sky = SkyCoord(ra=ras * units.deg, dec=decs * units.deg, frame="icrs")
spho = tsky.spherical_offsets_to(sky)
return spho[0].value, spho[1].value
def deg2pix(dras, ddecs, width_deg, width_pix):
"""
Converts (dras,ddecs) to (xs,ys) in cutout img pixels.
Args:
dras: projected distance (degrees) along R.A. to the center of the cutout (np.array of floats)
ddecs: projected distance (degrees) along Dec. to the center of the cutout (np.array of floats)
width_deg: width of the cutout in degrees (np.array of floats)
width_pix: width of the cutout in pixels (np.array of floats)
Returns:
dxs: distance (pixels) along x to the center of the cutout (np.array of floats)
dys: distance (pixels) along y to the center of the cutout (np.array of floats)
Notes:
not sure at the <1 pixel level...
"""
dxs = width_pix * (0.5 - dras / width_deg)
dys = width_pix * (0.5 + ddecs / width_deg)
return dxs, dys
def plot_cutout(
ax,
img,
width_deg,
dras,
ddecs,
dopetal=False,
c="w",
alpha=None,
txts=None,
xtxts=None,
ytxts=None,
vmin=None,
vmax=None,
cmap=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots a ls-dr9 cutout, with overlaying targets coordinates.
Args:
ax: pyplot object
img: mpimg.imread(ls-dr9-cutout)
width_deg: width of the cutout in degrees (np.array of floats)
dras: targets projected distance (degrees) along R.A. to the center of the cutout (np.array of floats)
ddecs: targets projected distance (degrees) along Dec. to the center of the cutout (np.array of floats)
dopetal (optional, defaults to False): overplot petals? (boolean)
c (optional, defaults to "w"): color used to display targets (string)
alpha (optional, defaults to None): pyplot alpha
txts (optional, defaults to None): list of text to display (list of strings)
xtxts (optional, defaults to None): list normalized x-positions of text to display (list of strings)
ytxts (optional, defaults to None): list normalized y-positions of text to display (list of strings)
vmin (optional, defaults to None): minimum value for the colorbar
vmax (optional, defaults to None): maximum value for the colorbar
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR txts, xtxts, ytxts : lists
# AR setting transparency as a function of density /deg2
if (dras is not None) & (alpha is None):
tmpdens = np.array([0, 100, 500, 1000, 5000, 7500, 1e10],)
tmpalph = np.array([1, 0.8, 0.5, 0.2, 0.1, 0.05, 0.025])
alpha = tmpalph[
np.where(tmpdens > len(dras) / (np.pi * tile_radius_deg ** 2))[0][0]
]
width_pix = img.shape[0]
ax.imshow(
img,
origin="upper",
zorder=0,
extent=[0, width_pix, 0, width_pix],
aspect="equal",
)
ax.set_aspect("equal")
ax.set_xlim(-0.5, width_pix + 0.5)
ax.set_ylim(-0.5, width_pix + 0.5)
# AR data points
if dras is not None:
# AR rescaling degrees to img pixels ; not sure at <1 pixel...
dxs, dys = deg2pix(dras, ddecs, width_deg, width_pix)
if isinstance(c, str):
ax.scatter(dxs, dys, c=c, s=1, alpha=alpha)
else:
ax.scatter(dxs, dys, c=c, s=1, alpha=alpha, vmin=vmin, vmax=vmax, cmap=cm)
# AR per petal infos
if dopetal:
for ang, p in zip(
np.linspace(2 * np.pi, 0, 11), [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]
):
dxs, dys = deg2pix(
np.array([0, tile_radius_deg * np.cos(ang)]),
np.array([0, tile_radius_deg * np.sin(ang)]),
width_deg,
width_pix,
)
ax.plot(
dxs, dys, c="r", lw=0.25, alpha=1.0, zorder=1,
)
anglab = ang + 0.1 * np.pi
dxs, dys = deg2pix(
1.1 * tile_radius_deg * np.cos(anglab),
1.1 * tile_radius_deg * np.sin(anglab),
width_deg,
width_pix,
)
ax.text(
dxs, dys, "{:.0f}".format(p), color="r", va="center", ha="center",
)
ax.axis("off")
if txts is not None:
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="y",
fontweight="bold",
fontsize=10,
ha="center",
va="top",
transform=ax.transAxes,
)
def plot_hist(ax, mags, magps, msk):
"""
Plots a normalized histogram for the assigned magnitudes (xs) and the parent magnitudes (xps).
Args:
ax: pyplot object
mags: assigned magnitudes (np.array of floats)
magps : parent magnitudes (np.array of floats)
msk: mask name of the plotted sample
"""
#
selp = np.isfinite(magps)
sel = np.isfinite(mags)
bins = np.linspace(magps[selp].min(), magps[selp].max(), 26)
#
cps, _, _ = ax.hist(
magps[selp],
bins=bins,
histtype="step",
alpha=0.3,
lw=3,
color="k",
density=False,
label="{} parent ({})".format(msk, len(magps)),
)
cs, _, _, = ax.hist(
mags[sel],
bins=bins,
histtype="step",
alpha=1.0,
lw=1.0,
color="k",
density=False,
label="{} assigned ({})".format(msk, len(mags)),
)
ax.set_ylabel("counts")
ax.grid(True)
# ax.legend(loc=2)
axr = ax.twinx()
axr.plot(
0.5 * (bins[1:] + bins[:-1]),
np.array(cs) / np.array(cps).astype(float),
color="r",
lw=0.5,
)
axr.yaxis.label.set_color("r")
axr.tick_params(axis="y", colors="r")
axr.set_ylabel("ratio", labelpad=0)
axr.set_ylim(0, 1)
txts = [msk, "assigned/parent = {}/{}".format(len(mags), len(magps))]
xtxts = [0.5, 0.5]
ytxts = [0.98, 0.90]
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
va="top",
transform=ax.transAxes,
)
def get_qa_farange(fafrac, dfa=0.2):
"""
Picks the plotted fiber assignment rate range for the QA plot.
Args:
fafrac: fiber assignment rate for the plotted sample (float)
dfa (optional, defaults to 0.2): plotted range (float)
Returns:
famin: lower boundary of the plotted fiber assignment rate (float)
famax: upper boundary of the plotted fiber assignment rate (float)
"""
famin = np.max([0, np.round(fafrac - dfa / 2, 1)])
famax = np.min([1, np.round(fafrac + dfa / 2, 1)])
return famin, famax
def plot_hist_tracer(ax, survey, parent, assign, msk, mskkey):
"""
Plots a normalized histogram for the assigned magnitudes (xs) and the parent magnitudes (xps).
Args:
ax: pyplot object
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
# AR if no parent target, just skip
if mskpsel.sum() > 0:
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
famin, famax = get_qa_farange(msksel.sum() / float(mskpsel.sum()))
# AR mag hist
band = "R"
if "MWS" in msk:
band = "R"
if "BGS" in msk:
band = "R"
if "LRG" in msk:
band = "Z"
if "ELG" in msk:
band = "G"
if "QSO" in msk:
band = "R"
#
dohist = 0
# AR if ls-dr9 flux is here, we plot that
if ((parent["FLUX_{}".format(band)] > 0) & (mskpsel)).sum() > 0:
dohist = 1
# AR parent
magp = flux2mag(
parent["FLUX_{}".format(band)][mskpsel],
band=band,
ebv=parent["EBV"][mskpsel],
)
# AR assign
mag = flux2mag(
assign["FLUX_{}".format(band)][msksel],
band=band,
ebv=assign["EBV"][msksel],
)
# AR xlabel
ax.set_xlabel(
"22.5 - 2.5*log10(FLUX_{}) - {:.3f} * EBV".format(
band, get_ext_coeffs(band)
)
)
# AR if no ls-dr9 flux, we try gaia_g
elif ((np.isfinite(parent["GAIA_PHOT_G_MEAN_MAG"])) & (mskpsel)).sum() > 0:
dohist = 1
magp = parent["GAIA_PHOT_G_MEAN_MAG"][mskpsel]
mag = assign["GAIA_PHOT_G_MEAN_MAG"][msksel]
ax.set_xlabel("GAIA_PHOT_G_MEAN_MAG")
if dohist == 1:
plot_hist(ax, mag, magp, msk)
_, ymax = ax.get_ylim()
ax.set_ylim(0.8, 100 * ymax)
ax.set_yscale("log")
def plot_colcol_tracer(
ax,
xbands,
ybands,
survey,
parent,
assign,
msk,
mskkey,
xlim,
ylim,
gridsize=20,
cm=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots a color-color diagram, with color-coding with the fiber assignment rate,
and transparency-coding the density.
Args:
ax: pyplot object
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
xbands: two-elements list, the x-axis color being xbands[1] - xbands[0] (list of strings)
ybands: two-elements list, the y-axis color being ybands[1] - ybands[0] (list of strings)
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
xlim: plt.xlim
ylim: plt.ylim
gridsize (optional, defaults to 20): plt.hexbin gridsize parameter (int)
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
# AR plotting if some parent objects with valid colors
keep = mskpsel.copy()
for band in [xbands[0], xbands[1], ybands[0], ybands[1]]:
keep &= parent["FLUX_{}".format(band)] > 0
if keep.sum() > 0:
# AR
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
# AR using a dictionary
tmpdict = {"parent": {}, "assign": {}}
for sample_name, sample, sel in zip(
["parent", "assign"], [parent, assign], [mskpsel, msksel]
):
for axis_name, bands in zip(["x", "y"], [xbands, ybands]):
mag0 = flux2mag(
sample["FLUX_{}".format(bands[0])][sel],
band=bands[0],
ebv=sample["EBV"][sel],
)
mag1 = flux2mag(
sample["FLUX_{}".format(bands[1])][sel],
band=bands[1],
ebv=sample["EBV"][sel],
)
tmpdict[sample_name][axis_name] = mag0 - mag1
# AR first getting the hexbin outputs
hbp = ax.hexbin(
tmpdict["parent"]["x"],
tmpdict["parent"]["y"],
C=None,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=0,
visible=False,
)
hb = ax.hexbin(
tmpdict["assign"]["x"],
tmpdict["assign"]["y"],
C=None,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=0,
visible=False,
)
# AR restricting to pixels with some parent data
keep = hbp.get_array() > 0
tmpx = hb.get_offsets()[keep, 0]
tmpy = hb.get_offsets()[keep, 1]
tmpc = hb.get_array()[keep]
tmpcp = hbp.get_array()[keep].astype(float)
# AR fraction assigned, clipped to famin,famax
fafrac = msksel.sum() / float(mskpsel.sum())
famin, famax = get_qa_farange(fafrac)
c = cm(np.clip(((tmpc / tmpcp) - famin) / (famax - famin), 0, 1))
# AR transparency = f(nb of parent obj)
tmpmin, tmpmax = (
1,
1.2 * tmpcp.sum() / float(len(hbp.get_array())),
)
c[:, 3] = np.clip((tmpcp - tmpmin) / (tmpmax - tmpmin), 0, 1)
sc = ax.scatter(tmpx, tmpy, c=c, s=15,)
sc.cmap = cm
ax.set_xlabel("{} - {}".format(xbands[0].lower(), xbands[1].lower()))
ax.set_ylabel("{} - {}".format(ybands[0].lower(), ybands[1].lower()))
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.grid(True)
ax.text(
0.5,
0.93,
msk,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
transform=ax.transAxes,
)
cbar = plt.colorbar(sc)
cbar.set_label("fraction assigned")
cbar.mappable.set_clim(famin, famax)
def plot_sky_fa(
axs,
img,
survey,
parent,
assign,
dras,
ddecs,
msk,
mskkey,
width_deg,
gridsize=30,
cm=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots the sky distribution of the parent sample, the assigned sample, and of the fiber assignment rate.
Args:
axs: list of 3 pyplot objects, respectively for the parent sample, the assigned sample, and the fiber assignment rate
img: mpimg.imread(ls-dr9-cutout)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
dras: dictionary with projected distance (degrees) along R.A. to the center of the tile (np.array of floats),
for each of the following subsample: "parent", "assign", "sky", "bad", "wd", "std" (all assigned subsamples,
except parent)
ddecs: same as dras, for projected distances along Dec.
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
width_deg: width of the cutout in degrees (np.array of floats)
gridsize (optional, defaults to 30): plt.hexbin gridsize parameter (int)
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
if mskpsel.sum() > 0:
# AR assign sample tracer selection
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
# AR xlim, ylim
xlim = (width_deg / 2, -width_deg / 2)
ylim = (-width_deg / 2, width_deg / 2)
# AR area of the plotting window in deg2
plot_area = (xlim[0] - xlim[1]) * (ylim[1] - ylim[0])
# AR parent
plot_cutout(
axs[0],
img,
width_deg,
dras["parent"][mskpsel],
ddecs["parent"][mskpsel],
dopetal=True,
txts=[
msk,
"parent : {:.0f}".format(mskpsel.sum() / tile_area) + r" deg$^{-2}$",
],
xtxts=[0.5, 0.5],
ytxts=[0.98, 0.1],
)
# AR assigned
plot_cutout(
axs[1],
img,
width_deg,
dras["assign"][msksel],
ddecs["assign"][msksel],
dopetal=True,
txts=[
msk,
"assigned : {:.0f}".format(msksel.sum() / tile_area) + r" deg$^{-2}$",
],
xtxts=[0.5, 0.5],
ytxts=[0.98, 0.1],
)
# AR fraction assigned, clipped to famin,famax
fafrac = msksel.sum() / float(mskpsel.sum())
famin, famax = get_qa_farange(fafrac)
txts = [msk, r"mean = {:.2f}".format(fafrac)]
xtxts = [0.5, 0.5]
ytxts = [0.93, 0.03]
# AR assigned fraction
ax = axs[2]
x = dras["parent"][mskpsel]
y = ddecs["parent"][mskpsel]
C = np.in1d(parent["TARGETID"][mskpsel], assign["TARGETID"][msksel])
hb = ax.hexbin(
x,
y,
C=C,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=1,
alpha=0.5,
vmin=famin,
vmax=famax,
)
hb.cmap = cm
ax.set_xlabel(r"$\Delta$RA [deg.]")
ax.set_ylabel(r"$\Delta$DEC [deg.]")
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.grid(True)
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
transform=ax.transAxes,
)
cbar = plt.colorbar(hb)
cbar.set_label("fraction assigned")
cbar.mappable.set_clim(famin, famax)
def make_qa(
outpng,
survey,
program,
faflavor,
targfns,
fiberassignfn,
tileid,
tilera,
tiledec,
obscon,
rundate,
tmpoutdir=tempfile.mkdtemp(),
width_deg=4,
):
"""
Make fba_launch QA plot.
Args:
outpng: written output PNG file (string)
survey: "sv1", "sv2", "sv3" or "main" (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
faflavor: usually {survey}{program} in lower cases (string)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
tileid: tile TILEID (int)
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
rundate: used rundate (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory (to download the cutout)
width_deg (optional, defaults to 4): width of the cutout in degrees (np.array of floats)
"""
# AR WD and STD used masks
_, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR plotted tracers
# AR TBD: handle secondary?
trmskkeys, trmsks = get_qa_tracers(survey, program)
# AR storing parent/assigned quantities
parent, assign, dras, ddecs, petals, nassign = get_parent_assign_quants(
survey, targfns, fiberassignfn, tilera, tiledec
)
# AR start plotting
fig = plt.figure(figsize=(30, 3 * (1 + len(trmsks))))
gs = gridspec.GridSpec(1 + len(trmsks), 7, wspace=0.5, hspace=0.3)
# AR overall infos
ax = plt.subplot(gs[0, 0])
ax.axis("off")
# AR infos : general
qa_print_infos(
ax,
survey,
program,
faflavor,
tileid,
tilera,
tiledec,
obscon,
rundate,
parent,
assign,
)
# AR stats per petal
ax = plt.subplot(gs[0, 1])
ax.axis("off")
qa_print_petal_infos(
ax, petals, assign,
)
# AR cutout
img = get_viewer_cutout(
tileid,
tilera,
tiledec,
tmpoutdir=tmpoutdir,
width_deg=width_deg,
pixscale=10,
dr="dr9",
timeout=15,
)
# AR SKY, BAD, WD, STD, TGT
iys = [2, 3, 4, 5, 6]
keys = ["sky", "bad", "wd", "std", "assign"]
txts = ["SKY", "BAD", "WD", "STD", "TGT"]
alphas = [0.25, 1.0, 1.0, 1.0, 0.025]
for iy, key, txt, alpha in zip(iys, keys, txts, alphas):
ax = fig.add_subplot(gs[0, iy])
plot_cutout(
ax,
img,
width_deg,
dras[key],
ddecs[key],
dopetal=True,
alpha=alpha,
txts=[txt],
xtxts=[0.2],
ytxts=[0.98],
)
# AR looping on tracers
ix = 1
for msk, mskkey in zip(trmsks, trmskkeys):
# AR parent and assign magnitude distributions
plot_hist_tracer(plt.subplot(gs[ix, 1]), survey, parent, assign, msk, mskkey)
# AR color-color diagram, with fiber assignment rate color-coded, and density transparency-coded
gridsize = 20
for iy, xbands, ybands, xlim, ylim in zip(
[2, 3],
[("R", "Z"), ("R", "Z")],
[("G", "R"), ("R", "W1")],
[(-0.5, 2.5), (-0.5, 2.5)],
[(-0.5, 2.5), (-2, 5)],
):
ax = plt.subplot(gs[ix, iy])
plot_colcol_tracer(
ax,
xbands,
ybands,
survey,
parent,
assign,
msk,
mskkey,
xlim,
ylim,
gridsize=20,
)
# AR position in tile
axs = [plt.subplot(gs[ix, 4]), plt.subplot(gs[ix, 5]), plt.subplot(gs[ix, 6])]
plot_sky_fa(
axs, img, survey, parent, assign, dras, ddecs, msk, mskkey, width_deg
)
#
ix += 1
# AR saving plot
plt.savefig(
outpng, bbox_inches="tight",
)
plt.close()
def rmv_nonsvn(myouts, log=Logger.get(), step="", start=time()):
"""
Remove fba_launch non-SVN products
Args:
myouts: dictionary with the fba_launch args.outdir location (dictionary);
must contain the following keys:
"tiles", "sky", "gfa", "targ", "scnd", "too", "fba"
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
for key in ["tiles", "sky", "gfa", "targ", "scnd", "too", "fba"]:
if os.path.isfile(myouts[key]):
os.remove(myouts[key])
log.info(
"{:.1f}s\t{}\tdeleting file {}".format(
time() - start, step, myouts[key]
)
)
def mv_temp2final(mytmpouts, myouts, expected_keys, log=Logger.get(), step="", start=time()):
"""
Moves the fba_launch outputs from the temporary location to the args.outdir location.
Args:
mytmpouts: dictionary with the temporary files location (dictionary);
contains the following keys: "tiles", "sky", "gfa", "targ", "scnd", "too", "fba", "fiberassign"
myouts: dictionary with the fba_launch args.outdir location (dictionary);
contains at least same keys as mytmpouts
expected_keys: list of keys of mytmpouts, myouts with the files to move
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
actually, the log is not moved here; it is moved in fba_launch, after the main()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
# AR
for key in expected_keys:
if os.path.isfile(mytmpouts[key]):
_ = shutil.move(mytmpouts[key], myouts[key])
log.info(
"{:.1f}s\t{}\tmoving file {} to {}".format(
time() - start, step, mytmpouts[key], myouts[key]
)
)
else:
log.error(
"{:.1f}s\t{}\tfile {} is missing, though we expect it; exiting".format(
time() - start, step, mytmpouts[key]
)
)
sys.exit(1)
replacing inflate_ledger() by match_ledger_to_targets()
"""
fiberassign.fba_launch_io
=============
Utility functions for fba_launch
"""
from __future__ import absolute_import, division
# system
import os
import subprocess
import sys
import tempfile
import shutil
import re
# time
from time import time
from datetime import datetime, timedelta
#
import numpy as np
import fitsio
# astropy
from astropy.io import fits
from astropy.table import Table
from astropy.time import Time
from astropy import units
from astropy.coordinates import SkyCoord, Distance
from astropy.time import Time
# desitarget
import desitarget
from desitarget.gaiamatch import gaia_psflike
from desitarget.io import read_targets_in_tiles, write_targets, write_skies
from desitarget.mtl import match_ledger_to_targets
from desitarget.targetmask import desi_mask, obsconditions
from desitarget.targets import set_obsconditions
from desitarget.geomask import match
# desimodel
import desimodel
from desimodel.footprint import is_point_in_desi
# desimeter
import desimeter
# fiberassign
import fiberassign
from fiberassign.scripts.assign import parse_assign, run_assign_full
from fiberassign.assign import merge_results, minimal_target_columns
from fiberassign.utils import Logger
# matplotlib
import matplotlib.pyplot as plt
from matplotlib import gridspec
import matplotlib
import matplotlib.image as mpimg
# AR default REF_EPOCH for PMRA=PMDEC=REF_EPOCH=0 objects
gaia_ref_epochs = {"dr2": 2015.5}
# AR tile radius in degrees
tile_radius_deg = 1.628
# AR approx. tile area in degrees
tile_area = np.pi * tile_radius_deg ** 2
def assert_isoformat_utc(time_str):
"""
Asserts if a date formats as "YYYY-MM-DDThh:mm:ss+00:00".
Args:
time_str: string with a date
Returns:
boolean asserting if time_str formats as "YYYY-MM-DDThh:mm:ss+00:00"
"""
try:
test_time = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S%z")
except ValueError:
return False
# AR/SB it parses as an ISO string, now just check UTC timezone +00:00 and not +0000
return time_str.endswith("+00:00")
def get_svn_version(svn_dir):
"""
Gets the SVN revision number of an SVN folder.
Args:
svn_dir: SVN folder path (string)
Returns:
svnver: SVN revision number of svn_dir, or "unknown" if not an svn checkout
Notes:
`svn_dir` can contain environment variables to expand, e.g. "$DESIMODEL/data"
"""
cmd = ["svn", "info", "--show-item", "revision", os.path.expandvars(svn_dir)]
try:
svn_ver = (
subprocess.check_output(cmd, stderr=subprocess.DEVNULL).strip().decode()
)
except subprocess.CalledProcessError:
svn_ver = "unknown"
return svn_ver
def get_program_latest_timestamp(
program, log=Logger.get(), step="", start=time(),
):
"""
Get the latest timestamp for a given program from the MTL per-tile file.
Args:
program: ideally "dark", "bright", or "backup" (string)
though if different will return None
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
if some entries for input program: UTC YYYY-MM-DDThh:mm:ss+00:00 formatted timestamp (string)
else: None
Notes:
if the per-tile MTL files does not exist or has zero entries,
TBD: currently add +1min because of a mismatch between the ledgers and the per-tile file.
TBD: still see if a +1min or +1s is desirable
"""
# AR check DESI_SURVEYOPS is defined
assert_env_vars(
required_env_vars=["DESI_SURVEYOPS"], log=log, step=step, start=start,
)
# AR defaults to None (returned if no file or no selected rows)
timestamp = None
# AR check if the per-tile file is here
# AR no need to check the scnd-mtl-done-tiles.ecsv file,
# AR as we restrict to a given program ([desi-survey 2434])
fn = os.path.join(os.getenv("DESI_SURVEYOPS"), "mtl", "mtl-done-tiles.ecsv")
if os.path.isfile(fn):
d = Table.read(fn)
keep = d["PROGRAM"] == program.upper()
if keep.sum() > 0:
d = d[keep]
# AR taking the latest timestamp
tm = np.unique(d["TIMESTAMP"])[-1]
# AR does not end with +NN:MM timezone?
if re.search('\+\d{2}:\d{2}$', tm) is None:
tm = "{}+00:00".format(tm)
tm = datetime.strptime(tm, "%Y-%m-%dT%H:%M:%S%z")
# AR TBD: we currently add one minute; can be removed once
# AR TBD update is done on the desitarget side
tm += timedelta(minutes=1)
timestamp = tm.isoformat(timespec="seconds")
return timestamp
def mv_write_targets_out(infn, targdir, outfn, log=Logger.get(), step="", start=time()):
"""
Moves the file created by desitarget.io.write_targets
and removes folder created by desitarget.io.write_targets
Args:
infn: filename output by desitarget.io.write_targets
targdir: folder provided as desitarget.io.write_targets input
outfn: desired renaming of infn
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR renaming
_ = shutil.move(infn, outfn)
log.info("{:.1f}s\t{}\trenaming {} to {}".format(time() - start, step, infn, outfn))
# AR removing folders
if targdir[-1] != "/":
targdir = "{}/".format(targdir)
tmpdirs = infn.replace(targdir, "").split("/")[:-1]
for i in range(len(tmpdirs))[::-1]:
os.rmdir(os.path.join(*[targdir] + tmpdirs[: i + 1]))
def get_nowradec(ra, dec, pmra, pmdec, parallax, ref_year, pmtime_utc_str, scnd=False):
"""
Apply proper motion correction
Args:
ra: numpy array of RAs (deg)
dec: numpy array of DECs (deg)
pmra: numpy array of projected proper-motion in RA (mas/year)
pmdec: numpy array of projected proper-motion in DEC (mas/year)
parallax: numpy array of parallax (mas)
ref_year: reference epoch (e.g. 2015.5 for Gaia/DR2)
pmtime_utc_str: date to update position to (format: YYYY-MM-DDThh:mm:ss+00:00)
scnd (optional, defaults to False): secondary target? (boolean; if True, sets parallax=0)
Returns:
ra: numpy array of RAs updated to pmtime_utc_str (deg)
dec: numpy array of DECs updated to pmtime_utc_str (deg)
Notes:
Courtesy of DL; adapted from legacypipe.survey
Originally named radec_at_mjd()
"""
# AR pmtime_utc : UTC time of the new ref_epoch; "%Y-%m-%dT%H:%M:%S%z", e.g. "2021-04-21T00:00:00+00:00"
# AR scnd=True -> parallax is set to 0, i.e. not used
"""
Units:
- matches Gaia DR1/DR2
- pmra,pmdec are in mas/yr.
pmra is in angular speed (ie, has a cos(dec) factor)
- parallax is in mas.
Returns: RA,Dec
"""
equinox = 53084.28 # mjd of the spring equinox in 2004
equinox_jyear = Time(equinox, format="mjd").jyear
axistilt = 23.44 # degrees
arcsecperrad = 3600.0 * 180.0 / np.pi
# AR pmtime
pmtime_utc = datetime.strptime(pmtime_utc_str, "%Y-%m-%dT%H:%M:%S%z")
pmtime_utc_jyear = Time(pmtime_utc).jyear
pmtime_utc_mjd = Time(pmtime_utc).mjd
def xyztoradec(xyz):
assert len(xyz.shape) == 2
ra = np.arctan2(xyz[:, 1], xyz[:, 0]) # AR added "np." in front of arctan2...
ra += 2 * np.pi * (ra < 0)
norm = np.sqrt(np.sum(xyz ** 2, axis=1))
dec = np.arcsin(xyz[:, 2] / norm)
return np.rad2deg(ra), np.rad2deg(dec)
def radectoxyz(ra_deg, dec_deg): # AR changed inputs from ra,dec to ra_deg,dec_deg
ra = np.deg2rad(ra_deg)
dec = np.deg2rad(dec_deg)
cosd = np.cos(dec)
return np.vstack((cosd * np.cos(ra), cosd * np.sin(ra), np.sin(dec))).T
dt = pmtime_utc_jyear - ref_year
cosdec = np.cos(np.deg2rad(dec))
dec = dec + dt * pmdec / (3600.0 * 1000.0)
ra = ra + (dt * pmra / (3600.0 * 1000.0)) / cosdec
parallax = np.atleast_1d(parallax)
# AR discards parallax for scnd=True
if scnd == True:
parallax *= 0.0
I = np.flatnonzero(parallax)
if len(I):
suntheta = 2.0 * np.pi * np.fmod(pmtime_utc_jyear - equinox_jyear, 1.0)
# Finite differences on the unit sphere -- xyztoradec handles
# points that are not exactly on the surface of the sphere.
axis = np.deg2rad(axistilt)
scale = parallax[I] / 1000.0 / arcsecperrad
xyz = radectoxyz(ra[I], dec[I])
xyz[:, 0] += scale * np.cos(suntheta)
xyz[:, 1] += scale * np.sin(suntheta) * np.cos(axis)
xyz[:, 2] += scale * np.sin(suntheta) * np.sin(axis)
r, d = xyztoradec(xyz)
ra[I] = r
dec[I] = d
return ra, dec
def force_finite_pm(
d, pmra_key="PMRA", pmdec_key="PMDEC", log=Logger.get(), step="", start=time()
):
"""
Replaces NaN PMRA, PMDEC by 0
Args:
d: array with at least proper-motion columns
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input d, but NaN proper motions replaced by 0
"""
for key in [pmra_key, pmdec_key]:
keep = ~np.isfinite(d[key])
if keep.sum() > 0:
d[key][keep] = 0.0
log.info(
"{:.1f}s\t{}\t replacing NaN by 0 for {} targets".format(
time() - start, step, keep.sum()
)
)
return d
def force_nonzero_refepoch(
d,
force_ref_epoch,
ref_epoch_key="REF_EPOCH",
pmra_key="PMRA",
pmdec_key="PMDEC",
log=Logger.get(),
step="",
start=time(),
):
"""
Replaces 0 by force_ref_epoch in ref_epoch
Args:
d: array with at least proper-motion columns
force_ref_epoch: float, ref_epoch to replace 0 by
ref_epoch_key (optional, defaults to REF_EPOCH): column name for the ref_epoch
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input d, but 0 ref_epochs replaced by force_ref_epoch
Notes:
Will exit with error if ref_epoch=0, but pmra or pmdec != 0
"""
keep = d[ref_epoch_key] == 0
n = ((d[pmra_key][keep] != 0) | (d[pmra_key][keep] != 0)).sum()
if n > 0:
log.error(
"{:.1f}s\t{}\t{} targets have {}=0 but {} or {} != 0; exiting".format(
time() - start, step, n, ref_epoch_key, pmra_key, pmdec_key,
)
)
sys.exit(1)
d[ref_epoch_key][keep] = force_ref_epoch
log.info(
"{:.1f}s\t{}\tsetting {}={} for {} objects with {}=0".format(
time() - start,
step,
ref_epoch_key,
force_ref_epoch,
keep.sum(),
ref_epoch_key,
)
)
return d
def update_nowradec(
d,
gaiadr,
pmtime_utc_str,
ra_key="RA",
dec_key="DEC",
pmra_key="PMRA",
pmdec_key="PMDEC",
parallax_key="PARALLAX",
ref_epoch_key="REF_EPOCH",
gaiag_key="GAIA_PHOT_G_MEAN_MAG",
gaiaaen_key="GAIA_ASTROMETRIC_EXCESS_NOISE",
scnd=False,
log=Logger.get(),
step="",
start=time(),
):
"""
Update (RA, DEC, REF_EPOCH) using proper motion
Args:
d: array with at least proper-motion columns
pmtime_utc_str: date to update position to (format: YYYY-MM-DDThh:mm:ss+00:00)
gaiadr: Gaia dr ("dr2" or "edr3")
ra_key (optional, defaults to RA): column name for RA
dec_key (optional, defaults to DEC): column name for DEC
pmra_key (optional, defaults to PMRA): column name for PMRA
pmdec_key (optional, defaults to PMDEC): column name for PMDEC
parallax_key (optional, defaults to PARALLAX): column name for PARALLAX
ref_epoch_key (optional, defaults to REF_EPOCH): column name for the REF_EPOCH
gaia_key (optional, defaults to GAIA_PHOT_G_MEAN_MAG): column name for Gaia g-mag
gaiaaen_key (optional, defaults to GAIA_ASTROMETRIC_EXCESS_NOISE): column name for Gaia GAIA_ASTROMETRIC_EXCESS_NOISE
scnd (optional, defaults to False): secondary target? (boolean);
if False, update for REF_EPOCH>0 + AEN only
if True, update for REF_EPOCH>0 + finite(PMRA,PMDEC) ; forces PARALLAX=0
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
d: same as input, but with RA, DEC updated to pmtime_utc_str
Notes:
REF_EPOCH is updated for *all* objects
"""
# AR
pmtime_utc = datetime.strptime(pmtime_utc_str, "%Y-%m-%dT%H:%M:%S%z")
pmtime_utc_jyear = Time(pmtime_utc).jyear
# AR computing positions at pmtime_utc_str using Gaia PMRA, PMDEC
nowra, nowdec = get_nowradec(
d[ra_key],
d[dec_key],
d[pmra_key],
d[pmdec_key],
d[parallax_key],
d[ref_epoch_key],
pmtime_utc_str,
scnd=scnd,
)
if scnd == True:
# AR secondary: REF_EPOCH>0
keep = d["REF_EPOCH"] > 0
else:
# AR targets with REF_EPOCH>0 and passing the AEN criterion
keep = d["REF_EPOCH"] > 0
# AR gaia_psflike arguments changed at desitarget-0.58.0
if desitarget.__version__ < "0.58.0":
keep &= gaia_psflike(d[gaiag_key], d[gaiaaen_key])
else:
keep &= gaia_psflike(d[gaiag_key], d[gaiaaen_key], dr=gaiadr)
# AR storing changes to report extrema in the log
dra = nowra - d[ra_key]
ddec = nowdec - d[dec_key]
# AR updating positions to pmtime_utc_str for targets passing the AEN criterion
d[ra_key][keep] = nowra[keep]
d[dec_key][keep] = nowdec[keep]
log.info(
"{:.1f}s\t{}\tupdating RA,DEC at {} with PM for {:.0f}/{:.0f} targets passing AEN; maximum changes: RA={:.1f},{:.1f} arcsec, DEC={:.1f},{:.1f} arcsec".format(
time() - start,
step,
pmtime_utc_jyear,
keep.sum(),
len(keep),
3600.0 * dra.min(),
3600.0 * dra.max(),
3600 * ddec.min(),
3600.0 * ddec.max(),
)
)
# AR updating REF_EPOCH for *all* objects (for PlateMaker)
d[ref_epoch_key] = pmtime_utc_jyear
log.info(
"{:.1f}s\tupdating REF_EPOCH to {} for all {} targets".format(
time() - start, pmtime_utc_jyear, len(keep)
)
)
return d
def assert_env_vars(
required_env_vars=[
"DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",
],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Assert the environment variables required by fba_launch
Args:
required_env_vars (optional, defaults to ["DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",]): list of environment variables required by fba_launch
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
will exit with error if some assertions are not verified
"""
# AR safe: DESI environment variables
for required_env_var in required_env_vars:
if os.getenv(required_env_var) is None:
log.error(
"{:.1f}s\t{}\tenvironment variable {} not defined; exiting".format(
time() - start, step, required_env_var
)
)
sys.exit(1)
def assert_arg_dates(
args,
dates=["pmtime_utc_str", "rundate", "mtltime"],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Assert the fba_launch date arguments are correctly formatted ("YYYY-MM-DDThh:mm:ss+00:00")
Args:
args: fba_launch parser.parse_args() output
dates (optional, defaults to ["pmtime_utc_str", "rundate", "mtltime"]): list of date fba_launch argument names to check
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
will exit with error if some assertions are not verified
"""
# AR dates properly formatted?
for kwargs in args._get_kwargs():
if kwargs[0] in dates:
if not assert_isoformat_utc(kwargs[1]):
log.error(
"{:.1f}s\t{}\t{}={} is not yyyy-mm-ddThh:mm:ss+00:00; exiting".format(
time() - start, step, kwargs[0], kwargs[1],
)
)
sys.exit(1)
def assert_svn_tileid(
tileid, forcetileid="n", log=Logger.get(), step="settings", start=time(),
):
"""
Asserts if TILEID already exists in the SVN tile folder
Args:
tileid: TILEID to check (int)
forcetileid (optional, defaults to "n"): "y" or "n";
if "n", will trigger a warning + an error
if "y", e will trigger a warning only
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
svn_trunk = os.path.join(os.getenv("DESI_TARGET"), "fiberassign/tiles/trunk")
# AR needs a wildcard to verify .fits and fits.gz files
# AR as the gzipping was not done before ~SV1
svn_fn = os.path.join(
svn_trunk, "{:06d}".format(tileid)[:3], "fiberassign-{:06d}.fits".format(tileid)
)
if os.path.isfile(svn_fn) | os.path.isfile("{}.gz".format(svn_fn)):
log.warning(
"{:.1f}s\t{}\tTILEID={} already exists in SVN folder {}".format(
time() - start, step, tileid, svn_trunk
)
)
if forcetileid == "y":
log.warning(
"{:.1f}s\t{}\tproceeding as forcetileid == y".format(
time() - start, step
)
)
else:
log.error(
"{:.1f}s\tsettings\texiting as forcetileid == n".format(time() - start)
)
sys.exit(1)
else:
log.info(
"{:.1f}s\t{}\tTILEID={} does not exist in SVN folder {}; proceeding".format(
time() - start, step, tileid, svn_trunk
)
)
def print_config_infos(
required_env_vars=[
"DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",
],
log=Logger.get(),
step="settings",
start=time(),
):
"""
Print various configuration informations (machine, modules version/path, DESI environment variables).
Args:
required_env_vars (optional, defaults to ["DESI_ROOT",
"DESI_TARGET",
"DESIMODEL",
"DESI_SURVEYOPS",
"SKYBRICKS_DIR",]): list of environment variables required by fba_launch
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR machine
log.info(
"{:.1f}s\t{}\tHOSTNAME={}".format(time() - start, step, os.getenv("HOSTNAME"))
)
# AR fiberassign, desitarget, desimodel, desimeter code version/path
for module, name in zip(
[fiberassign, desitarget, desimodel, desimeter], ["fiberassign", "desitarget", "desimodel", "desimeter"]
):
log.info(
"{:.1f}s\t{}\trunning with {} code version: {}".format(
time() - start, step, name, module.__version__
)
)
log.info(
"{:.1f}s\t{}\trunning with {} code path: {}".format(
time() - start, step, name, module.__path__
)
)
# AR DESI environment variables
for required_env_var in required_env_vars:
log.info(
"{:.1f}s\t{}\t{}={}".format(
time() - start, step, required_env_var, os.getenv(required_env_var)
)
)
def get_desitarget_paths(
dtver,
survey,
program,
dr="dr9",
gaiadr="gaiadr2",
log=Logger.get(),
step="settings",
start=time(),
):
"""
Obtain the folder/file full paths for desitarget products
Args:
dtver: desitarget catalog version (string; e.g., "0.57.0")
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
program: "dark", "bright", or "backup" (string)
dr (optional, defaults to "dr9"): legacypipe dr (string)
gaiadr (optional, defaults to "gaiadr2"): gaia dr (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
Dictionary with the following keys:
- sky: sky folder
- skysupp: skysupp folder
- gfa: GFA folder
- targ: targets folder (static catalogs, with all columns)
- mtl: MTL folder
- scnd: secondary fits catalog (static)
- scndmtl: MTL folder for secondary targets
- too: ToO ecsv catalog
Notes:
if survey not in ["sv1", "sv2", "sv3", "main"]
or program not in ["dark", "bright", or "backup"], will return a warning only
same warning only if the built paths/files do not exist.
"""
# AR expected survey, program?
exp_surveys = ["sv1", "sv2", "sv3", "main"]
exp_programs = ["dark", "bright", "backup"]
if survey.lower() not in exp_surveys:
log.warning(
"{:.1f}s\t{}\tunexpected survey={} ({}; proceeding anyway)".format(
time() - start, step, survey.lower(), exp_surveys
)
)
if program.lower() not in exp_programs:
log.warning(
"{:.1f}s\t{}\tunexpected program={} ({}; proceeding anyway)".format(
time() - start, step, program.lower(), exp_programs
)
)
# AR folder architecture is now the same at NERSC/KPNO (https://github.com/desihub/fiberassign/issues/302)
mydirs = {}
mydirs["sky"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", dr, dtver, "skies"
)
mydirs["skysupp"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", gaiadr, dtver, "skies-supp"
)
mydirs["gfa"] = os.path.join(
os.getenv("DESI_TARGET"), "catalogs", dr, dtver, "gfas"
)
if program.lower() == "backup":
dtcat = gaiadr
else:
dtcat = dr
mydirs["targ"] = os.path.join(
os.getenv("DESI_TARGET"),
"catalogs",
dtcat,
dtver,
"targets",
survey.lower(),
"resolve",
program.lower(),
)
mydirs["mtl"] = os.path.join(
os.getenv("DESI_SURVEYOPS"), "mtl", survey.lower(), program.lower(),
)
# AR secondary (dark, bright; no secondary for backup)
if program.lower() in ["dark", "bright"]:
if survey.lower() == "main":
basename = "targets-{}-secondary.fits".format(program.lower())
else:
basename = "{}targets-{}-secondary.fits".format(survey.lower(), program.lower())
mydirs["scnd"] = os.path.join(
os.getenv("DESI_TARGET"),
"catalogs",
dr,
dtver,
"targets",
survey.lower(),
"secondary",
program.lower(),
basename,
)
mydirs["scndmtl"] = os.path.join(
os.getenv("DESI_SURVEYOPS"),
"mtl",
survey.lower(),
"secondary",
program.lower(),
)
# AR ToO (same for dark, bright)
mydirs["too"] = os.path.join(
os.getenv("DESI_SURVEYOPS"), "mtl", survey.lower(), "ToO", "ToO.ecsv",
)
# AR log
for key in list(mydirs.keys()):
log.info(
"{:.1f}s\t{}\tdirectory for {}: {}".format(
time() - start, step, key, mydirs[key]
)
)
if not os.path.exists(mydirs[key]):
log.warning(
"{:.1f}s\t{}\tdirectory for {}: {} does not exist".format(
time() - start, step, key, mydirs[key]
)
)
return mydirs
def create_tile(
tileid,
tilera,
tiledec,
outfn,
survey,
obscon="DARK|GRAY|BRIGHT|BACKUP",
log=Logger.get(),
step="",
start=time(),
):
"""
Create a tiles fits file.
Args:
tileid: TILEID (int)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
outfn: fits file name to be written
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
obscon (optional, defaults to "DARK|GRAY|BRIGHT|BACKUP"): tile allowed observing conditions (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
hdr = fitsio.FITSHDR()
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
log.info(
"{:.1f}s\t{}\ttileid={}, tilera={}, tiledec={}, survey={}, obscon={}".format(
time() - start, step, tileid, tilera, tiledec, survey, obscon
)
)
d = np.zeros(
1,
dtype=[
("TILEID", "i4"),
("RA", "f8"),
("DEC", "f8"),
("OBSCONDITIONS", "i4"),
("IN_DESI", "i2"),
("PROGRAM", "S6"),
],
)
d["TILEID"] = tileid
d["RA"] = tilera
d["DEC"] = tiledec
d["IN_DESI"] = 1 # AR forcing 1;
# AR otherwise the default onlydesi=True option in
# AR desimodel.io.load_tiles() discards tiles outside the desi footprint,
# AR so return no tiles for the dithered tiles outside desi
d["PROGRAM"] = survey.upper() # AR custom... SV2, SV3, MAIN
log.info("{:.1f}s\t{}\ttile obscon={}".format(time() - start, step, obscon))
d["OBSCONDITIONS"] = obsconditions.mask(obscon)
fitsio.write(
outfn, d, extname="TILES", header=hdr, clobber=True,
)
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn,))
def create_sky(
tilesfn,
skydir,
outfn,
suppskydir=None,
tmpoutdir=tempfile.mkdtemp(),
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a sky fits file.
Args:
tilesfn: path to a tiles fits file (string)
skydir: desitarget sky folder (string)
outfn: fits file name to be written (string)
suppskydir (optional, defaults to None): desitarget suppsky folder (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_skies will write (creating some sub-directories)
add_plate_cols (optional, defaults to True): adds a PLATE_RA, PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
add_plate_cols: not adding PLATE_REF_EPOCH;
20210526 : implementation of using subpriority=False in write_skies
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR sky: read targets
tiles = fits.open(tilesfn)[1].data
skydirs = [skydir]
if suppskydir is not None:
skydirs.append(suppskydir)
ds = [read_targets_in_tiles(skydir, tiles=tiles, quick=quick) for skydir in skydirs]
for skydir, d in zip(skydirs, ds):
log.info("{:.1f}s\t{}\treadin {} targets from {}".format(time() - start, step, len(d), skydir)
d = np.concatenate(ds)
# AR adding PLATE_RA, PLATE_DEC?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC columns".format(
time() - start, step
)
)
n, tmpfn = write_skies(tmpoutdir, d, indir=skydir, indir2=suppskydir, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start)
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_targ_nomtl(
tilesfn,
targdir,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
add_plate_cols=True,
quick=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a target fits file, with solely using desitarget catalogs, no MTL.
e.g. for the GFA, but could be used for other purposes.
Args:
tilesfn: path to a tiles fits file (string)
targdir: desitarget target folder (string)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
add_plate_cols (optional, defaults to True): adds a PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns (boolean)
quick (optional, defaults to True): boolean, arguments of desitarget.io.read_targets_in_tiles()
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR targ_nomtl: read targets
tiles = fits.open(tilesfn)[1].data
d = read_targets_in_tiles(targdir, tiles=tiles, quick=quick)
log.info(
"{:.1f}s\t{}\tkeeping {} targets to {}".format(
time() - start, step, len(d), outfn
)
)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns".format(
time() - start, step
)
)
# AR targ_nomtl: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR targ_nomtl: update RA, DEC, REF_EPOCH using proper motion?
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(d, gaiadr, pmtime_utc_str, log=log, step=step, start=start)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR targ_nomtl: Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR targ_nomtl: write fits
n, tmpfn = write_targets(tmpoutdir, d, indir=targdir, survey=survey, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start)
# AR targ_nomtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_mtl(
tilesfn,
mtldir,
mtltime,
targdir,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a (primary or secondary) target fits file, based on MTL ledgers (and complementary columns from desitarget targets files).
Args:
tilesfn: path to a tiles fits file (string)
mtldir: desisurveyops MTL folder (string)
mtltime: MTL isodate (string formatted as yyyy-mm-ddThh:mm:ss+00:00)
targdir: desitarget targets folder (or file name if secondary) (string)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
add_plate_cols (optional, defaults to True): adds a PLATE_RA and PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
for sv3-backup, we remove BACKUP_BRIGHT targets.
TBD : if secondary targets, we currently disable the inflate_ledger(), as it
seems to not currently work.
hence if secondary and pmcorr="y", the code will crash, as the
GAIA_ASTROMETRIC_EXCESS_NOISE column will be missing; though we do not
expect this configuration to happen, so it should be fine for now.
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
tiles = fits.open(tilesfn)[1].data
# AR mtl: storing the timestamp at which we queried MTL
log.info("{:.1f}s\t{}\tmtltime={}".format(time() - start, step, mtltime))
# AR mtl: read mtl
d = read_targets_in_tiles(
mtldir,
tiles=tiles,
quick=False,
mtl=True,
unique=True,
isodate=mtltime,
)
log.info(
"{:.1f}s\t{}\treading {} targets from {}".format(
time() - start, step, len(d), mtldir
)
)
# AR mtl: removing by hand BACKUP_BRIGHT for sv3/BACKUP
# AR mtl: using an indirect way to find if program=backup,
# AR mtl: to avoid the need of an extra program argument
# AR mtl: for sv3, there is no secondary-backup, so no ambiguity
if (survey == "sv3") & ("backup" in mtldir):
from desitarget.sv3.sv3_targetmask import mws_mask
keep = (d["SV3_MWS_TARGET"] & mws_mask["BACKUP_BRIGHT"]) == 0
log.info(
"{:.1f}s\t{}\tremoving {}/{} BACKUP_BRIGHT targets".format(
time() - start, step, len(d) - keep.sum(), len(d)
)
)
d = d[keep]
# AR mtl: add columns not present in ledgers
# AR mtl: need to provide exact list (if columns=None, inflate_ledger()
# AR mtl: overwrites existing columns)
# AR mtl: TBD : we currently disable it for secondary targets
# AR mtl: using an indirect way to find if secondary,
# AR mtl: to avoid the need of an extra program argument
if "secondary" not in mtldir:
columns = [key for key in minimal_target_columns if key not in d.dtype.names]
# AR mtl: also add GAIA_ASTROMETRIC_EXCESS_NOISE, in case args.pmcorr == "y"
if pmcorr == "y":
columns += ["GAIA_ASTROMETRIC_EXCESS_NOISE"]
log.info(
"{:.1f}s\t{}\tadding {} from {}".format(
time() - start, step, ",".join(columns), targdir
)
)
targ = read_targets_in_tiles(targdir, tiles=tiles, quick=quick, columns=columns + ["TARGETID"]))
d = match_ledger_to_targets(d, targ)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d = Table(d)
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
d = d.as_array()
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH columns".format(
time() - start, step
)
)
# AR mtl: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR mtl: update RA, DEC, REF_EPOCH using proper motion?
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(d, gaiadr, pmtime_utc_str, log=log, step=step, start=start)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR mtl: write fits
n, tmpfn = write_targets(tmpoutdir, d, indir=mtldir, indir2=targdir, survey=survey, subpriority=False)
_ = mv_write_targets_out(tmpfn, tmpoutdir, outfn, log=log, step=step, start=start,)
# AR mtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
def create_too(
tilesfn,
toofn,
mjd_min,
mjd_max,
survey,
gaiadr,
pmcorr,
outfn,
tmpoutdir=tempfile.mkdtemp(),
pmtime_utc_str=None,
too_tile=False,
add_plate_cols=True,
log=Logger.get(),
step="",
start=time(),
):
"""
Create a ToO target fits file, with selecting targets in a MJD time window.
If no ToO file, or no selected targets, do nothing.
Args:
tilesfn: path to a tiles fits file (string)
toofn: ToO file name (string)
mjd_min, mjd_max (floats): we keep targets with MJD_BEGIN < mjd_max and MJD_END > mjd_min
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
gaiadr: Gaia dr ("dr2" or "edr3")
pmcorr: apply proper-motion correction? ("y" or "n")
outfn: fits file name to be written (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
write_targets will write (creating some sub-directories)
pmtime_utc_str (optional, defaults to None): UTC time use to compute
new coordinates after applying proper motion since REF_EPOCH
(string formatted as "yyyy-mm-ddThh:mm:ss+00:00")
too_tile (optional, defaults to False): if False, we only keep TOO_TYPE!="TILE",
if True, we do not cut on TOO_TYPE, hence keeping both TOO_TYPE="FIBER" *and*
TOO_TYPE="TILE" for ToO dedicated tiles (boolean)
add_plate_cols (optional, defaults to True): adds a PLATE_RA and PLATE_DEC columns (boolean)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
if pmcorr="y", then pmtime_utc_str needs to be set; will trigger an error otherwise.
TBD : the MJD window to accept targets; currently in fba_launch, we set a month
from the tile design date;
it surely needs to be updated/refined once operations are more clear.
some steps in common with create_mtl().
TBD: the PLATE_{RA,DEC,REF_EPOCH} columns currently simply are copy of RA,DEC,REF_EPOCH
TBD: but it prepares e.g. to add chromatic offsets.
20210526 : implementation of using subpriority=False in write_targets
to avoid an over-writting of the SUBPRIORITY
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart generating {}".format(time() - start, step, outfn))
# AR too: is there a file?
# AR too: if no, just skip
if not os.path.isfile(toofn):
log.info(
"{:.1f}s\t{}\tno ToO input file present: {}, not writing any {}".format(
time() - start, step, toofn, outfn
)
)
return False
# AR too: if yes, we proceed
# AR too: tile file
tiles = fits.open(tilesfn)[1].data
# AR too: read too file
# AR cut on:
# AR - tiles
# AR - mjd (! TBD !)
d = Table.read(toofn)
# AR adding PLATE_RA, PLATE_DEC, PLATE_REF_EPOCH ?
if add_plate_cols:
d["PLATE_RA"] = d["RA"]
d["PLATE_DEC"] = d["DEC"]
d["PLATE_REF_EPOCH"] = d["REF_EPOCH"]
log.info(
"{:.1f}s\t{}\tadding PLATE_RA, PLATE_DEC, REF_EPOCH columns".format(
time() - start, step
)
)
keep = is_point_in_desi(tiles, d["RA"], d["DEC"])
if not too_tile:
keep &= d["TOO_TYPE"] != "TILE"
keep &= (d["MJD_BEGIN"] < mjd_max) & (d["MJD_END"] > mjd_min)
log.info(
"{:.1f}s\t{}\tkeeping {}/{} targets in tiles, with TOO_TYPE={}, and in the MJD time window: {}, {}".format(
time() - start, step, keep.sum(), len(keep), "TILE,FIBER" if too_tile else "FIBER", mjd_min, mjd_max
)
)
if keep.sum() > 0:
d = d[keep]
# AR too: PMRA, PMDEC: convert NaN to zeros
d = force_finite_pm(d, log=log, step=step, start=start)
# AR too: update RA, DEC, REF_EPOCH using proper motion
if pmcorr == "y":
if pmtime_utc_str is None:
log.error(
"{:.1f}s\t{}\tneed to provide pmtime_utc_str, as proper-correction is requested; exiting".format(
time() - start, step,
)
)
sys.exti(1)
d = update_nowradec(
d, gaiadr, pmtime_utc_str, log=log, step=step, start=start
)
else:
log.info(
"{:.1f}s\t{}\t*not* applying proper-motion correction".format(
time() - start, step
)
)
# AR single REF_EPOCH needed
# AR TBD currently all targets have PMRA=PMDEC=0,
# AR TBD so it s fine to just change all REF_EPOCH
d["REF_EPOCH"] = np.zeros(len(d))
# AR Replaces 0 by force_ref_epoch in ref_epoch
d = force_nonzero_refepoch(
d, gaia_ref_epochs[gaiadr], log=log, step=step, start=start
)
# AR mtl: write fits
n, tmpfn = write_targets(tmpoutdir, d.as_array(), indir=toofn, survey=survey, subpriority=False)
_ = mv_write_targets_out(
tmpfn, tmpoutdir, outfn, log=log, step=step, start=start,
)
# AR mtl: update header if pmcorr = "y"
if pmcorr == "y":
fd = fitsio.FITS(outfn, "rw")
fd["TARGETS"].write_key("COMMENT", "RA,DEC updated with PM for AEN objects")
fd["TARGETS"].write_key("COMMENT", "REF_EPOCH updated for all objects")
fd.close()
log.info("{:.1f}s\t{}\t{} written".format(time() - start, step, outfn))
else:
log.info(
"{:.1f}s\t{}\tno too kept too targets, no {} written".format(
time() - start, step, outfn
)
)
return True
def launch_onetile_fa(
args,
tilesfn,
targfns,
fbafn,
fiberassignfn,
skyfn=None,
gfafn=None,
log=Logger.get(),
step="",
start=time(),
):
"""
Runs the fiber assignment (run_assign_full),
merges the results (merge_results) for a single tile,
and prints the assignment stats for each mask.
Args:
args: fba_launch-like parser.parse_args() output
should contain at least:
- survey
- rundate
- sky_per_petal
- standards_per_petal
- sky_per_slitblock
tilesfn: path to the input tiles fits file (string)
targfns: paths to the input targets fits files, e.g. targ, scnd, too (either a string if only one file, or a list of strings)
fbafn: path to the output fba-TILEID.fits file (string)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
skyfn (optional, defaults to None): path to a sky fits file (string)
gfafn (optional, defaults to None): path to a gfa fits file (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
no sanity checks done on inputs; assumed to be done elsewhere
assumes the output directory is the same for fbafn and fiberassignfn
we keep a generic "args" input, so that any later added argument in fba_launch does not
requires a change in the launch_fa() call format.
fba_launch-like adding information in the header is done in another function, update_fiberassign_header
TBD: be careful if working in the SVN-directory; maybe add additional safety lines?
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
log.info("{:.1f}s\t{}\tstart running fiber assignment".format(time() - start, step))
# AR convert targfns to list if string (i.e. only one input file)
if isinstance(targfns, str):
targfns = [targfns]
# AR tileid, tilera, tiledec
tiles = fits.open(tilesfn)[1].data
tileid = tiles["TILEID"][0]
tilera = tiles["RA"][0]
tiledec = tiles["DEC"][0]
# AR output directory (picking the one of fbafn)
outdir = os.path.dirname(fbafn)
# AR safe: delete possibly existing fba-{tileid}.fits and fiberassign-{tileid}.fits
# AR TBD: add additional safety check if running in SVN folder?
if os.path.isfile(fbafn):
os.remove(fbafn)
if os.path.isfile(fiberassignfn):
os.remove(fiberassignfn)
# AR preparing fba_run inputs
opts = [
"--targets",
]
for targfn in targfns:
opts += [
targfn,
]
opts += [
"--overwrite",
"--write_all_targets",
"--dir",
outdir,
"--footprint",
tilesfn,
"--rundate",
args.rundate,
"--sky_per_petal",
args.sky_per_petal,
"--standards_per_petal",
args.standards_per_petal,
"--sky_per_slitblock",
str(args.sky_per_slitblock),
"--ha",
str(args.ha),
]
if args.ha != 0:
opts += ["--ha", str(args.ha)]
if args.margin_pos != 0:
opts += ["--margin-pos", str(args.margin_pos)]
if args.margin_gfa != 0:
opts += ["--margin-gfa", str(args.margin_gfa)]
if args.margin_petal != 0:
opts += ["--margin-petal", str(args.margin_petal)]
if skyfn is not None:
opts += [
"--sky",
skyfn,
]
if gfafn is not None:
opts += [
"--gfafile",
gfafn,
]
log.info(
"{:.1f}s\t{}\ttileid={:06d}: running raw fiber assignment (run_assign_full) with opts={}".format(
time() - start, step, tileid, " ; ".join(opts)
)
)
ag = parse_assign(opts)
run_assign_full(ag)
# AR merging
# AR not using run_merge(), because it looks for all fba-TILEID.fits file
# AR in the out directory...
ag = {}
ag["tiles"] = [tileid]
ag["columns"] = None
if gfafn is not None:
ag["targets"] = [gfafn] + targfns
else:
ag["targets"] = targfns
if skyfn is not None:
ag["sky"] = [skyfn]
else:
ag["sky"] = []
ag["result_dir"] = outdir
ag["copy_fba"] = False
tmparr = []
for key in list(ag.keys()):
tmparr += ["{} = {}".format(key, ag[key])]
log.info(
"{:.1f}s\t{}\ttileid={:06d}: merging input target data (merge_results) with argument={}".format(
time() - start, step, tileid, " ; ".join(tmparr)
)
)
merge_results(
ag["targets"],
ag["sky"],
ag["tiles"],
result_dir=ag["result_dir"],
columns=ag["columns"],
copy_fba=ag["copy_fba"],
)
log.info(
"{:.1f}s\t{}\tcomputing assignment statiscs: start".format(
time() - start, step
)
)
# AR storing parent/assigned quantities
parent, assign, dras, ddecs, petals, nassign = get_parent_assign_quants(
args.survey, targfns, fiberassignfn, tilera, tiledec,
)
# AR stats : assigned / parent
print_assgn_parent_stats(args.survey, parent, assign, log=log, step=step, start=start)
log.info(
"{:.1f}s\t{}\tcomputing assignment statiscs: done".format(
time() - start, step
)
)
def update_fiberassign_header(
fiberassignfn,
args,
mydirs,
hdr_survey,
hdr_faprgrm,
faflavor,
ebv,
obscon,
fascript,
log=Logger.get(),
step="",
start=time(),
):
"""
Adds various information in the fiberassign-TILEID.fits PRIMARY header.
Args:
fiberassignfn: path to fiberassign-TILEID.fits file (string)
args: fba_launch-like parser.parse_args() output
should contain at least (see fba_launch arguments):
- outdir, survey, program, rundate, pmcorr, pmtime_utc_str
- faprgrm, mtltime, goaltime, goaltype, sbprof, mintfrac
will also be used to store in FAARGS the list of input arguments of the fba_launch call.
mydirs: dictionary with the desitarget paths; ideally:
- sky: sky folder
- skysupp: skysupp folder
- gfa: GFA folder
- targ: targets folder (static catalogs, with all columns)
- mtl: MTL folder
- scnd: secondary fits catalog (static)
- scndmtl: MTL folder for secondary targets
- too: ToO ecsv catalog
hdr_survey: value for the SURVEY keyword (string)
hdr_faprgrm: value for the FAPRGRM keyword (string)
faflavor: usually {survey}{program} in lower cases (string)
ebv: median EBV over the tile targets (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
fascript: fba_launch-like script used to designed the tile; in case of different scripts for dedicated tiles
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
no check is done on mydirs.
hdr_survey, hdr_faprgrm: for the "regular" surveys (e.g., sv3, main), those will be args.survey, args.program
but for dedicated survey, they will (have to) be different.
faflavor has to be {hdr_survey}{hdr_faprgrm}; will exit with an error if not;
keeping this to be sure it is not forgotten to be done for dedicated programs.
"""
# AR sanity check on faflavor
if faflavor != "{}{}".format(hdr_survey, hdr_faprgrm):
log.error(
"{:.1f}s\t{}\tfaflavor={} inconsitent with hdr_survey={} and hdr_faprgrm={}; exiting".format(
time() - start, step, faflavor, hdr_survey, hdr_faprgrm,
)
)
sys.exit(1)
# AR propagating some settings into the PRIMARY header
fd = fitsio.FITS(fiberassignfn, "rw")
# AR faflavor
fd["PRIMARY"].write_key("FAFLAVOR", faflavor)
# AR folders, with replacing $DESI_ROOT by DESIROOT
desiroot = os.getenv("DESI_ROOT")
fd["PRIMARY"].write_key("DESIROOT", desiroot)
for key in np.sort(list(mydirs.keys())):
if (key == "mtl") & (isinstance(mydirs["mtl"], list)):
# AR header keywords: MTL, MTL2, MTL3, etc
# AR probably to be deprecate for sv2
suffixs = [""] + np.arange(2, len(mydirs["mtl"]) + 1).astype(str).tolist()
for mtldir, suffix in zip(mydirs["mtl"], suffixs):
fd["PRIMARY"].write_key(
"mtl{}".format(suffix), mtldir.replace(desiroot, "DESIROOT"),
)
else:
fd["PRIMARY"].write_key(key, mydirs[key].replace(desiroot, "DESIROOT"))
# AR storing some specific arguments
# AR plus a (long) FAARGS keyword with storing arguments to re-run the fiber assignment
# AR we exclude from FAARGS outdir, forcetiled, and any None argument
tmparr = []
for kwargs in args._get_kwargs():
if (kwargs[0].lower() not in ["outdir", "forcetileid"]) & (
kwargs[1] is not None
):
tmparr += ["--{} {}".format(kwargs[0], kwargs[1])]
fd["PRIMARY"].write_key(
"faargs", " ".join(tmparr),
)
# AR some keywords
fd["PRIMARY"].write_key("outdir", args.outdir)
fd["PRIMARY"].write_key("survey", hdr_survey) # AR not args.survey!
fd["PRIMARY"].write_key("rundate", args.rundate)
fd["PRIMARY"].write_key("pmcorr", args.pmcorr)
fd["PRIMARY"].write_key("pmtime", args.pmtime_utc_str)
fd["PRIMARY"].write_key("faprgrm", hdr_faprgrm) # AR not args.program!
fd["PRIMARY"].write_key("mtltime", args.mtltime)
fd["PRIMARY"].write_key("obscon", obscon)
# AR informations for NTS
# AR SBPROF from https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed#NominalFiberfracValues
# AR version 35
fd["PRIMARY"].write_key("goaltime", args.goaltime)
fd["PRIMARY"].write_key("goaltype", args.program)
fd["PRIMARY"].write_key("ebvfac", 10.0 ** (2.165 * np.median(ebv) / 2.5))
fd["PRIMARY"].write_key("sbprof", args.sbprof)
fd["PRIMARY"].write_key("mintfrac", args.mintfrac)
# AR fba_launch-like script name used to designed the tile
fd["PRIMARY"].write_key("fascript", fascript)
# AR SVN revision number
fd["PRIMARY"].write_key(
"svndm", get_svn_version(os.path.join(os.getenv("DESIMODEL"), "data"))
)
fd["PRIMARY"].write_key(
"svnmtl", get_svn_version(os.path.join(os.getenv("DESI_SURVEYOPS"), "mtl"))
)
fd.close()
def secure_gzip(
fiberassignfn, log=Logger.get(), step="", start=time(),
):
"""
Secure gzipping of the fiberassign-TILEID.fits file.
Args:
fiberassignfn: path to fiberassign-TILEID.fits file (string)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
if os.path.isfile("{}.gz".format(fiberassignfn)):
os.remove("{}.gz".format(fiberassignfn))
log.info(
"{:.1f}s\t{}\tdeleting existing {}.gz".format(
time() - start, step, fiberassignfn
)
)
os.system("gzip {}".format(fiberassignfn))
log.info("{:.1f}s\t{}\tgzipping {}".format(time() - start, step, fiberassignfn))
def get_dt_masks(
survey, log=None, step="", start=time(),
):
"""
Get the desitarget masks for a survey.
Args:
survey: survey name: "sv1", "sv2", "sv3" or "main") (string)
log (optional, defaults to None): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
yaml_masks: dictionary with storing in the
"DESI_TARGET", "BGS_TARGET", "MWS_TARGET", "SCND_TARGET" keys
the corresponding desitarget YAML masks for survey
wd_mskkeys: list of keys identifying the WDs
wd_msks: list of masks identifying the WDs
std_mskkeys: list of keys identifying the STDs
std_msks: list of masks identifying the STDs
Notes:
close to desitarget.targets.main_cmx_or_sv,
but using a dictionary, more adapted to this code
"""
if survey == "sv1":
from desitarget.sv1 import sv1_targetmask as targetmask
elif survey == "sv2":
from desitarget.sv2 import sv2_targetmask as targetmask
elif survey == "sv3":
from desitarget.sv3 import sv3_targetmask as targetmask
elif survey == "main":
from desitarget import targetmask
from fiberassign.targets import default_main_stdmask
else:
if log is not None:
log.error(
"{:.1f}s\t{}\tsurvey={} is not in sv1, sv2, sv3 or main; exiting".format(
time() - start, step, survey
)
)
else:
print("survey={} is not in sv1, sv2, sv3 or main; exiting".format(survey))
sys.exit(1)
# AR YAML masks
yaml_masks = {
"DESI_TARGET": targetmask.desi_mask,
"BGS_TARGET": targetmask.bgs_mask,
"MWS_TARGET": targetmask.mws_mask,
"SCND_TARGET": targetmask.scnd_mask,
}
# AR WD masks
wd_mskkeys, wd_msks = [], []
for mskkey in ["DESI_TARGET", "MWS_TARGET"]:
wd_mskkeys += [mskkey for key in yaml_masks[mskkey].names() if "_WD" in key]
wd_msks += [key for key in yaml_masks[mskkey].names() if "_WD" in key]
# AR STD masks
std_mskkeys, std_msks = [], []
for mskkey in ["DESI_TARGET", "MWS_TARGET"]:
std_mskkeys += [mskkey for key in yaml_masks[mskkey].names() if "STD" in key]
std_msks += [key for key in yaml_masks[mskkey].names() if "STD" in key]
return yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks
def get_qa_tracers(
survey, program, log=None, step="", start=time(),
):
"""
Returns the tracers for which we provide QA plots of fiber assignment.
Args:
survey: survey name: "sv1", "sv2", "sv3" or "main") (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
log (optional, defaults to None): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
trmskkeys: list of keys to select the mask on (list of strings)
trmsks: list of mask names (list of strings)
"""
if program == "DARK":
trmskkeys = ["DESI_TARGET", "DESI_TARGET", "DESI_TARGET"]
trmsks = ["LRG", "ELG", "QSO"]
elif program == "BRIGHT":
trmskkeys = ["BGS_TARGET", "BGS_TARGET"]
trmsks = ["BGS_BRIGHT", "BGS_FAINT"]
trmskkeys += ["MWS_TARGET", "MWS_TARGET"]
if survey == "sv1":
trmsks += ["MWS_MAIN_BROAD", "MWS_NEARBY"]
else:
trmsks += ["MWS_BROAD", "MWS_NEARBY"]
elif program == "BACKUP":
trmskkeys = ["MWS_TARGET", "MWS_TARGET", "MWS_TARGET"]
trmsks = ["BACKUP_BRIGHT", "BACKUP_FAINT", "BACKUP_VERY_FAINT"]
else:
if log is not None:
log.error(
"{:.1f}s\t{}\tprogram={} not in DARK, BRIGHT, or BACKUP; exiting".format(
time() - start, step, program
)
)
else:
print("program={} not in DARK, BRIGHT, or BACKUP; exiting".format(program))
sys.exit(1)
return trmskkeys, trmsks
def get_parent_assign_quants(
survey,
targfns,
fiberassignfn,
tilera,
tiledec,
):
"""
Stores the parent and assigned targets properties (desitarget columns).
Args:
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
targfns: paths to the input targets fits files, e.g. targ, scnd, too (either a string if only one file, or a list of strings)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
Returns:
parent: dictionary of the parent target sample, with each key being some desitarget column
assign: same as parent, with similar row-ordering (filling with zeros or NaNs if not assigned)
dras: dictionary with projected distance (degrees) along R.A. to the center of the tile (np.array of floats),
for each of the following subsample: "parent", "assign", "sky", "bad", "wd", "std" (all assigned subsamples,
except parent)
ddecs: same as dras, for projected distances along Dec.
petals: dictionary with PETAL_LOC (np.array of floats) for each of the assigned "sky", "bad", "wd", "std" subsamples
nassign: dictionary with the number of assigned fibers for each of the assigned "SKY", "BAD", "TGT", "WD", "STD" subsamples
"""
# AR convert targfns to list if string (i.e. only one input file)
if isinstance(targfns, str):
targfns = [targfns]
# AR initializing dictionaires
parent, assign, dras, ddecs, petals, nassign = {}, {}, {}, {}, {}, {}
# AR YAML and WD and STD masks
yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR keys we use (plus few for assign)
keys = [
"TARGETID",
"FLUX_G",
"FLUX_R",
"FIBERTOTFLUX_R",
"FLUX_Z",
"FLUX_W1",
"FLUX_W2",
"EBV",
"GAIA_PHOT_G_MEAN_MAG",
"RA",
"DEC",
"DESI_TARGET",
"BGS_TARGET",
"MWS_TARGET",
"SCND_TARGET",
]
# AR parent
for key in keys:
parent[key] = []
for targfn in targfns:
d = fits.open(targfn)[1].data
for key in keys:
if key in ["DESI_TARGET", "BGS_TARGET", "MWS_TARGET", "SCND_TARGET",]:
if survey.lower()[:2] == "sv":
key_orig = "{}_{}".format(survey.upper(), key)
else:
key_orig = key
if key_orig in d.dtype.names:
parent[key] += d[key_orig].tolist()
else:
parent[key] += [0 for x in d["RA"]]
# AR flux, ebv for secondary
elif key not in d.dtype.names:
parent[key] += [0.0 for x in d["RA"]]
else:
parent[key] += d[key].tolist()
for key in keys:
parent[key] = np.array(parent[key])
dras["parent"], ddecs["parent"] = get_tpos(
tilera, tiledec, parent["RA"], parent["DEC"]
)
# AR fiberassign
d = fits.open(fiberassignfn)[1].data
# AR
for key in ["SKY", "BAD", "TGT"]:
nassign[key] = (d["OBJTYPE"] == key).sum()
# AR SKY
keep = d["OBJTYPE"] == "SKY"
dras["sky"], ddecs["sky"] = get_tpos(
tilera, tiledec, d["TARGET_RA"][keep], d["TARGET_DEC"][keep]
)
petals["sky"] = d["PETAL_LOC"][keep]
# AR BAD
keep = d["OBJTYPE"] == "BAD"
dras["bad"], ddecs["bad"] = get_tpos(
tilera, tiledec, d["TARGET_RA"][keep], d["TARGET_DEC"][keep]
)
petals["bad"] = d["PETAL_LOC"][keep]
# AR TGT
# AR arrays twinning the parent ordering, with nans/zeros
# AR e.g. SV2_DESI_TARGET -> DESI_TARGET
d = d[d["OBJTYPE"] == "TGT"]
# AR TARGETIDs are unique in both arrays, so we can use geomask
iip, ii = match(parent["TARGETID"], d["TARGETID"])
keys = [key for key in keys if key != "RA" and key != "DEC"]
keys += [
"TARGET_RA",
"TARGET_DEC",
"PETAL_LOC",
]
for key in keys:
if key in [
"TARGETID",
"DESI_TARGET",
"BGS_TARGET",
"MWS_TARGET",
"SCND_TARGET",
]:
assign[key] = np.zeros(len(parent["TARGETID"]), dtype=int)
if (key != "TARGETID") & (survey.lower()[:2] == "sv"):
assign[key][iip] = d["{}_{}".format(survey.upper(), key)][ii]
else:
assign[key][iip] = d[key][ii]
else:
assign[key] = np.nan + np.zeros(len(parent["TARGETID"]))
assign[key][iip] = d[key][ii]
dras["assign"], ddecs["assign"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"], assign["TARGET_DEC"]
)
# AR WD
keep = np.zeros(len(assign["TARGET_RA"]), dtype=bool)
for mskkey, msk in zip(wd_mskkeys, wd_msks):
keep |= (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
dras["wd"], ddecs["wd"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"][keep], assign["TARGET_DEC"][keep]
)
petals["wd"] = assign["PETAL_LOC"][keep]
nassign["WD"] = keep.sum()
# AR STD
keep = np.zeros(len(assign["TARGET_RA"]), dtype=bool)
for mskkey, msk in zip(std_mskkeys, std_msks):
keep |= (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
dras["std"], ddecs["std"] = get_tpos(
tilera, tiledec, assign["TARGET_RA"][keep], assign["TARGET_DEC"][keep]
)
petals["std"] = assign["PETAL_LOC"][keep]
nassign["STD"] = keep.sum()
return parent, assign, dras, ddecs, petals, nassign
def print_assgn_parent_stats(
survey, parent, assign, log=Logger.get(), step="", start=time(),
):
"""
Prints for each mask the number of parent and assigned targets, and also the fraction of assigned targets.
Args:
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
# AR YAML and WD and STD masks
yaml_masks, _, _, _, _ = get_dt_masks(survey, log=log, step=step, start=start,)
# AR stats : assigned / parent
log.info("======= ASSIGNMENT STATISTICS : START =======")
log.info("# MASKKEY\tMASK\tPARENT\tASSIGN\tFRACTION")
for mskkey in list(yaml_masks.keys()):
if survey.lower()[:2] == "sv":
mskkey_orig = "{}_{}".format(survey.upper(), mskkey)
else:
mskkey_orig = mskkey
for msk in yaml_masks[mskkey].names():
nparent = ((parent[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
nassign = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
if nparent == 0:
frac = 0.0
else:
frac = nassign / nparent
log.info(
"{}\t{}\t{}\t{}\t{:.2f}".format(
mskkey_orig, msk, nparent, nassign, frac
)
)
log.info("======= ASSIGNMENT STATISTICS : END =======")
def get_ext_coeffs(band):
"""
Returns the extinction coefficient for a given band.
Args:
band: band name: "G", "R", "Z", "W1", or "W2" (string)
Returns:
ext: extinction coefficient (float)
Note:
https://www.legacysurvey.org/dr9/catalogs/#galactic-extinction-coefficients
"""
exts = {"G": 3.214, "R": 2.165, "Z": 1.211, "W1": 0.184, "W2": 0.113}
return exts[band]
def flux2mag(flux, band=None, ebv=None):
"""
Converts a flux to a (optionally extinction-corrected) magnitude
Args:
flux: flux in Nmgy (np.array of floats)
band (optional, defaults to None): band name: "G", "R", "Z", "W1", or "W2" (string)
ebv (optional, defaults to None): EBV values (np.array of floats)
Returns:
mag: AB magnitudes (np.array of floats); extinction-corrected if band and ebv not None
Notes:
flux < 0 values are converted to NaN in magnitudes
"""
# np.nan_to_num: NaN -> 0, so keep=False.
keep = (np.nan_to_num(flux) > 0)
mag = np.nan + np.zeros(len(flux))
mag[keep] = 22.5 - 2.5 * np.log10(flux[keep])
if ebv is not None:
mag -= get_ext_coeffs(band) * ebv
return mag
def qa_print_infos(
ax,
survey,
program,
faflavor,
tileid,
tilera,
tiledec,
obscon,
rundate,
parent,
assign,
):
"""
Print general fiber assignment infos on the QA plot.
Args:
ax: pyplot object
survey: "sv1", "sv2", "sv3" or "main" (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
faflavor: usually {survey}{program} in lower cases (string)
tileid: tile TILEID (int)
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
rundate: used rundate (string)
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
"""
# AR hard-setting the plotted tracers
# AR TBD: handle secondaries
trmskkeys, trmsks = get_qa_tracers(survey, program)
# AR masks
yaml_masks, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR infos : general
x, y, dy, fs = 0.05, 0.95, -0.1, 10
for t in [
"flavor={}".format(faflavor),
"TILEID={:06d}".format(tileid),
"RA,DEC={:.3f},{:.3f}".format(tilera, tiledec),
"obscon={}".format(obscon),
"rundate={}".format(rundate),
"",
]:
ax.text(x, y, t.expandtabs(), fontsize=fs, transform=ax.transAxes)
y += dy
# AR infos: wd/std + tracers
xs = [0.05, 0.65, 0.95, 1.20]
has = ["left", "right", "right", "right"]
tracers = []
for mskkey, msk in zip(wd_mskkeys + std_mskkeys, wd_msks + std_msks):
n = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
tracers += [[msk, "{}".format(n), "", ""]]
tracers += [["", "", "", ""], ["MASK", "ASSGN", "PARENT", "FAFRAC"]]
for msk, mskkey in zip(trmsks, trmskkeys):
nparent = ((parent[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
n = ((assign[mskkey] & yaml_masks[mskkey][msk]) > 0).sum()
tracers += [
[msk, "{}".format(n), "{}".format(nparent), "{:.2f}".format(n / nparent),]
]
tracers += [["", "", "", ""]]
for tracer in tracers:
for i in range(4):
ax.text(
xs[i],
y,
tracer[i].expandtabs(),
fontsize=fs,
ha=has[i],
transform=ax.transAxes,
)
y += dy
# AR infos: brightest target and assigned object
# AR infos: taking a default 16, in case new programs are added
magthresh = 16.0
if program == "DARK":
magthres = 16.0
if program == "BRIGHT":
magthresh = 15.0
if program == "BACKUP":
magthresh = 15.0
for sample, d in zip(["parent", "assgn"], [parent, assign]):
ax.text(
0.05, y, "Min. {} mag ".format(sample), fontsize=fs, transform=ax.transAxes
)
y += dy
for mag, lab in zip(
[d["GAIA_PHOT_G_MEAN_MAG"], flux2mag(d["FIBERTOTFLUX_R"])],
["GAIA_PHOT_G_MEAN_MAG)", "min(LS-R-FIBTOTMAG)"],
):
magmin, color = "-", "k"
# np.nan_to_num: NaN,Inf -> 0, so keep=False.
keep = (np.nan_to_num(mag, posinf=0., neginf=0.) > 0)
if keep.sum() > 0:
magmin = mag[keep].min()
if magmin < magthresh:
magmin, color = "{:.1f}".format(magmin), "r"
else:
magmin, color = "{:.1f}".format(magmin), "k"
ax.text(
0.05,
y,
"{} = {}".format(lab, magmin),
fontsize=fs,
color=color,
transform=ax.transAxes,
)
y += dy
y += dy
def qa_print_petal_infos(
ax, petals, assign,
):
"""
Print general the assigned SKY, BAD, WD, STD, TGT per petal on the QA plot.
Args:
ax: pyplot object
petals: dictionary with PETAL_LOC (np.array of floats) for each of the assigned "sky", "bad", "wd", "std" subsamples
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
"""
# AR stats per petal
xs = [0.05, 0.25, 0.45, 0.65, 0.85, 1.05]
ts = ["PETAL", "NSKY", "NBAD", "NWD", "NSTD", "NTGT"]
y, dy = 0.95, -0.1
fs = 10
for i in range(6):
ax.text(xs[i], y, ts[i], fontsize=fs, ha="center", transform=ax.transAxes)
y += dy
for p in range(10):
if (petals["std"] == p).sum() == 0:
color = "r"
else:
color = "k"
ts = [
"{:.0f}".format(p),
"{:.0f}".format((petals["sky"] == p).sum()),
"{:.0f}".format((petals["bad"] == p).sum()),
"{:.0f}".format((petals["wd"] == p).sum()),
"{:.0f}".format((petals["std"] == p).sum()),
"{:.0f}".format((assign["PETAL_LOC"] == p).sum()),
]
for i in range(6):
ax.text(
xs[i],
y,
ts[i],
color=color,
fontsize=fs,
ha="center",
transform=ax.transAxes,
)
y += dy
# AR stats for all petals
ts = [
"ALL",
"{:.0f}".format(len(petals["sky"])),
"{:.0f}".format(len(petals["bad"])),
"{:.0f}".format(len(petals["wd"])),
"{:.0f}".format(len(petals["std"])),
"{:.0f}".format(np.isfinite(assign["PETAL_LOC"]).sum()),
]
for i in range(6):
ax.text(
xs[i],
y,
ts[i],
color=color,
fontsize=fs,
ha="center",
transform=ax.transAxes,
)
def get_viewer_cutout(
tileid,
tilera,
tiledec,
tmpoutdir=tempfile.mkdtemp(),
width_deg=4,
pixscale=10,
dr="dr9",
timeout=15,
):
"""
Downloads a cutout of the tile region from legacysurvey.org/viewer.
Args:
tileid: tile TILEID (int)
tilera: tile center R.A. (float)
tiledec: tile center Dec. (float)
tmpoutdir (optional, defaults to a temporary directory): temporary directory where
width_deg (optional, defaults to 4): width of the cutout in degrees (float)
pixscale (optional, defaults to 10): pixel scale of the cutout
dr (optional, default do "dr9"): imaging data release
timeout (optional, defaults to 15): time (in seconds) after which we quit the wget call (int)
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Returns:
img: output of mpimg.imread() reading of the cutout (np.array of floats)
"""
# AR cutout
tmpfn = "{}tmp-{}.jpeg".format(tmpoutdir, tileid)
size = int(width_deg * 3600.0 / pixscale)
layer = "ls-{}".format(dr)
tmpstr = 'timeout {} wget -q -O {} "http://legacysurvey.org/viewer-dev/jpeg-cutout/?layer={}&ra={:.5f}&dec={:.5f}&pixscale={:.0f}&size={:.0f}"'.format(
tmpfn, timeout, layer, tilera, tiledec, pixscale, size
)
# print(tmpstr)
try:
subprocess.check_call(tmpstr, stderr=subprocess.DEVNULL, shell=True)
except subprocess.CalledProcessError:
print("no cutout from viewer after {}s, stopping the wget call".format(timeout))
try:
img = mpimg.imread(tmpfn)
except:
img = np.zeros((size, size, 3))
if os.path.isfile(tmpfn):
os.remove(tmpfn)
return img
def mycmap(name, n, cmin=0, cmax=1):
"""
Defines a quantised color scheme.
Args:
name: matplotlib colormap name (used through: matplotlib.cm.get_cmap(name)) (string)
n: number of different colors to be in the color scheme (int)
cmin (optional, defaults to 0): flooring "color-value" (float)
cmax (optional, defaults to 1): ceiling "color-value" (float)
Returns:
The quantised color map.
Notes:
https://matplotlib.org/examples/api/colorbar_only.html
"""
cmaporig = matplotlib.cm.get_cmap(name)
mycol = cmaporig(np.linspace(cmin, cmax, n))
cmap = matplotlib.colors.ListedColormap(mycol)
cmap.set_under(mycol[0])
cmap.set_over(mycol[-1])
return cmap
def get_tpos(tilera, tiledec, ras, decs):
"""
Computes the projected distance of a set of coordinates to a tile center.
Args:
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
ras: R.A. in degrees (np.array of floats)
decs: Dec. in degrees (np.array of floats)
Returns:
dras: projected distance (degrees) to the tile center along R.A. (np.array of floats)
ddecs: projected distance (degrees) to the tile center along Dec. (np.array of floats)
"""
tsky = SkyCoord(ra=tilera * units.deg, dec=tiledec * units.deg, frame="icrs")
sky = SkyCoord(ra=ras * units.deg, dec=decs * units.deg, frame="icrs")
spho = tsky.spherical_offsets_to(sky)
return spho[0].value, spho[1].value
def deg2pix(dras, ddecs, width_deg, width_pix):
"""
Converts (dras,ddecs) to (xs,ys) in cutout img pixels.
Args:
dras: projected distance (degrees) along R.A. to the center of the cutout (np.array of floats)
ddecs: projected distance (degrees) along Dec. to the center of the cutout (np.array of floats)
width_deg: width of the cutout in degrees (np.array of floats)
width_pix: width of the cutout in pixels (np.array of floats)
Returns:
dxs: distance (pixels) along x to the center of the cutout (np.array of floats)
dys: distance (pixels) along y to the center of the cutout (np.array of floats)
Notes:
not sure at the <1 pixel level...
"""
dxs = width_pix * (0.5 - dras / width_deg)
dys = width_pix * (0.5 + ddecs / width_deg)
return dxs, dys
def plot_cutout(
ax,
img,
width_deg,
dras,
ddecs,
dopetal=False,
c="w",
alpha=None,
txts=None,
xtxts=None,
ytxts=None,
vmin=None,
vmax=None,
cmap=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots a ls-dr9 cutout, with overlaying targets coordinates.
Args:
ax: pyplot object
img: mpimg.imread(ls-dr9-cutout)
width_deg: width of the cutout in degrees (np.array of floats)
dras: targets projected distance (degrees) along R.A. to the center of the cutout (np.array of floats)
ddecs: targets projected distance (degrees) along Dec. to the center of the cutout (np.array of floats)
dopetal (optional, defaults to False): overplot petals? (boolean)
c (optional, defaults to "w"): color used to display targets (string)
alpha (optional, defaults to None): pyplot alpha
txts (optional, defaults to None): list of text to display (list of strings)
xtxts (optional, defaults to None): list normalized x-positions of text to display (list of strings)
ytxts (optional, defaults to None): list normalized y-positions of text to display (list of strings)
vmin (optional, defaults to None): minimum value for the colorbar
vmax (optional, defaults to None): maximum value for the colorbar
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR txts, xtxts, ytxts : lists
# AR setting transparency as a function of density /deg2
if (dras is not None) & (alpha is None):
tmpdens = np.array([0, 100, 500, 1000, 5000, 7500, 1e10],)
tmpalph = np.array([1, 0.8, 0.5, 0.2, 0.1, 0.05, 0.025])
alpha = tmpalph[
np.where(tmpdens > len(dras) / (np.pi * tile_radius_deg ** 2))[0][0]
]
width_pix = img.shape[0]
ax.imshow(
img,
origin="upper",
zorder=0,
extent=[0, width_pix, 0, width_pix],
aspect="equal",
)
ax.set_aspect("equal")
ax.set_xlim(-0.5, width_pix + 0.5)
ax.set_ylim(-0.5, width_pix + 0.5)
# AR data points
if dras is not None:
# AR rescaling degrees to img pixels ; not sure at <1 pixel...
dxs, dys = deg2pix(dras, ddecs, width_deg, width_pix)
if isinstance(c, str):
ax.scatter(dxs, dys, c=c, s=1, alpha=alpha)
else:
ax.scatter(dxs, dys, c=c, s=1, alpha=alpha, vmin=vmin, vmax=vmax, cmap=cm)
# AR per petal infos
if dopetal:
for ang, p in zip(
np.linspace(2 * np.pi, 0, 11), [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]
):
dxs, dys = deg2pix(
np.array([0, tile_radius_deg * np.cos(ang)]),
np.array([0, tile_radius_deg * np.sin(ang)]),
width_deg,
width_pix,
)
ax.plot(
dxs, dys, c="r", lw=0.25, alpha=1.0, zorder=1,
)
anglab = ang + 0.1 * np.pi
dxs, dys = deg2pix(
1.1 * tile_radius_deg * np.cos(anglab),
1.1 * tile_radius_deg * np.sin(anglab),
width_deg,
width_pix,
)
ax.text(
dxs, dys, "{:.0f}".format(p), color="r", va="center", ha="center",
)
ax.axis("off")
if txts is not None:
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="y",
fontweight="bold",
fontsize=10,
ha="center",
va="top",
transform=ax.transAxes,
)
def plot_hist(ax, mags, magps, msk):
"""
Plots a normalized histogram for the assigned magnitudes (xs) and the parent magnitudes (xps).
Args:
ax: pyplot object
mags: assigned magnitudes (np.array of floats)
magps : parent magnitudes (np.array of floats)
msk: mask name of the plotted sample
"""
#
selp = np.isfinite(magps)
sel = np.isfinite(mags)
bins = np.linspace(magps[selp].min(), magps[selp].max(), 26)
#
cps, _, _ = ax.hist(
magps[selp],
bins=bins,
histtype="step",
alpha=0.3,
lw=3,
color="k",
density=False,
label="{} parent ({})".format(msk, len(magps)),
)
cs, _, _, = ax.hist(
mags[sel],
bins=bins,
histtype="step",
alpha=1.0,
lw=1.0,
color="k",
density=False,
label="{} assigned ({})".format(msk, len(mags)),
)
ax.set_ylabel("counts")
ax.grid(True)
# ax.legend(loc=2)
axr = ax.twinx()
axr.plot(
0.5 * (bins[1:] + bins[:-1]),
np.array(cs) / np.array(cps).astype(float),
color="r",
lw=0.5,
)
axr.yaxis.label.set_color("r")
axr.tick_params(axis="y", colors="r")
axr.set_ylabel("ratio", labelpad=0)
axr.set_ylim(0, 1)
txts = [msk, "assigned/parent = {}/{}".format(len(mags), len(magps))]
xtxts = [0.5, 0.5]
ytxts = [0.98, 0.90]
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
va="top",
transform=ax.transAxes,
)
def get_qa_farange(fafrac, dfa=0.2):
"""
Picks the plotted fiber assignment rate range for the QA plot.
Args:
fafrac: fiber assignment rate for the plotted sample (float)
dfa (optional, defaults to 0.2): plotted range (float)
Returns:
famin: lower boundary of the plotted fiber assignment rate (float)
famax: upper boundary of the plotted fiber assignment rate (float)
"""
famin = np.max([0, np.round(fafrac - dfa / 2, 1)])
famax = np.min([1, np.round(fafrac + dfa / 2, 1)])
return famin, famax
def plot_hist_tracer(ax, survey, parent, assign, msk, mskkey):
"""
Plots a normalized histogram for the assigned magnitudes (xs) and the parent magnitudes (xps).
Args:
ax: pyplot object
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
# AR if no parent target, just skip
if mskpsel.sum() > 0:
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
famin, famax = get_qa_farange(msksel.sum() / float(mskpsel.sum()))
# AR mag hist
band = "R"
if "MWS" in msk:
band = "R"
if "BGS" in msk:
band = "R"
if "LRG" in msk:
band = "Z"
if "ELG" in msk:
band = "G"
if "QSO" in msk:
band = "R"
#
dohist = 0
# AR if ls-dr9 flux is here, we plot that
if ((parent["FLUX_{}".format(band)] > 0) & (mskpsel)).sum() > 0:
dohist = 1
# AR parent
magp = flux2mag(
parent["FLUX_{}".format(band)][mskpsel],
band=band,
ebv=parent["EBV"][mskpsel],
)
# AR assign
mag = flux2mag(
assign["FLUX_{}".format(band)][msksel],
band=band,
ebv=assign["EBV"][msksel],
)
# AR xlabel
ax.set_xlabel(
"22.5 - 2.5*log10(FLUX_{}) - {:.3f} * EBV".format(
band, get_ext_coeffs(band)
)
)
# AR if no ls-dr9 flux, we try gaia_g
elif ((np.isfinite(parent["GAIA_PHOT_G_MEAN_MAG"])) & (mskpsel)).sum() > 0:
dohist = 1
magp = parent["GAIA_PHOT_G_MEAN_MAG"][mskpsel]
mag = assign["GAIA_PHOT_G_MEAN_MAG"][msksel]
ax.set_xlabel("GAIA_PHOT_G_MEAN_MAG")
if dohist == 1:
plot_hist(ax, mag, magp, msk)
_, ymax = ax.get_ylim()
ax.set_ylim(0.8, 100 * ymax)
ax.set_yscale("log")
def plot_colcol_tracer(
ax,
xbands,
ybands,
survey,
parent,
assign,
msk,
mskkey,
xlim,
ylim,
gridsize=20,
cm=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots a color-color diagram, with color-coding with the fiber assignment rate,
and transparency-coding the density.
Args:
ax: pyplot object
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
xbands: two-elements list, the x-axis color being xbands[1] - xbands[0] (list of strings)
ybands: two-elements list, the y-axis color being ybands[1] - ybands[0] (list of strings)
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
xlim: plt.xlim
ylim: plt.ylim
gridsize (optional, defaults to 20): plt.hexbin gridsize parameter (int)
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
# AR plotting if some parent objects with valid colors
keep = mskpsel.copy()
for band in [xbands[0], xbands[1], ybands[0], ybands[1]]:
keep &= parent["FLUX_{}".format(band)] > 0
if keep.sum() > 0:
# AR
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
# AR using a dictionary
tmpdict = {"parent": {}, "assign": {}}
for sample_name, sample, sel in zip(
["parent", "assign"], [parent, assign], [mskpsel, msksel]
):
for axis_name, bands in zip(["x", "y"], [xbands, ybands]):
mag0 = flux2mag(
sample["FLUX_{}".format(bands[0])][sel],
band=bands[0],
ebv=sample["EBV"][sel],
)
mag1 = flux2mag(
sample["FLUX_{}".format(bands[1])][sel],
band=bands[1],
ebv=sample["EBV"][sel],
)
tmpdict[sample_name][axis_name] = mag0 - mag1
# AR first getting the hexbin outputs
hbp = ax.hexbin(
tmpdict["parent"]["x"],
tmpdict["parent"]["y"],
C=None,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=0,
visible=False,
)
hb = ax.hexbin(
tmpdict["assign"]["x"],
tmpdict["assign"]["y"],
C=None,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=0,
visible=False,
)
# AR restricting to pixels with some parent data
keep = hbp.get_array() > 0
tmpx = hb.get_offsets()[keep, 0]
tmpy = hb.get_offsets()[keep, 1]
tmpc = hb.get_array()[keep]
tmpcp = hbp.get_array()[keep].astype(float)
# AR fraction assigned, clipped to famin,famax
fafrac = msksel.sum() / float(mskpsel.sum())
famin, famax = get_qa_farange(fafrac)
c = cm(np.clip(((tmpc / tmpcp) - famin) / (famax - famin), 0, 1))
# AR transparency = f(nb of parent obj)
tmpmin, tmpmax = (
1,
1.2 * tmpcp.sum() / float(len(hbp.get_array())),
)
c[:, 3] = np.clip((tmpcp - tmpmin) / (tmpmax - tmpmin), 0, 1)
sc = ax.scatter(tmpx, tmpy, c=c, s=15,)
sc.cmap = cm
ax.set_xlabel("{} - {}".format(xbands[0].lower(), xbands[1].lower()))
ax.set_ylabel("{} - {}".format(ybands[0].lower(), ybands[1].lower()))
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.grid(True)
ax.text(
0.5,
0.93,
msk,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
transform=ax.transAxes,
)
cbar = plt.colorbar(sc)
cbar.set_label("fraction assigned")
cbar.mappable.set_clim(famin, famax)
def plot_sky_fa(
axs,
img,
survey,
parent,
assign,
dras,
ddecs,
msk,
mskkey,
width_deg,
gridsize=30,
cm=mycmap("coolwarm", 10, 0, 1),
):
"""
Plots the sky distribution of the parent sample, the assigned sample, and of the fiber assignment rate.
Args:
axs: list of 3 pyplot objects, respectively for the parent sample, the assigned sample, and the fiber assignment rate
img: mpimg.imread(ls-dr9-cutout)
survey: survey (string; e.g. "sv1", "sv2", "sv3", "main")
parent: dictionary for the parent target sample (output by get_parent_assign_quants())
assign: dictionary for the assigned target sample (output by get_parent_assign_quants())
dras: dictionary with projected distance (degrees) along R.A. to the center of the tile (np.array of floats),
for each of the following subsample: "parent", "assign", "sky", "bad", "wd", "std" (all assigned subsamples,
except parent)
ddecs: same as dras, for projected distances along Dec.
msk: mask name of the plotted sample (string)
mskkey: key to select the mask on (string)
width_deg: width of the cutout in degrees (np.array of floats)
gridsize (optional, defaults to 30): plt.hexbin gridsize parameter (int)
cmap (optional, defaults to mycmap("coolwarm", 10, 0, 1)): colormap scheme
"""
# AR YAML mask dictionary
yaml_masks, _, _, _, _ = get_dt_masks(survey)
# AR selecting the relevant tracer
if mskkey in list(parent.keys()):
mskpsel = (parent[mskkey] & yaml_masks[mskkey][msk]) > 0
else:
mskpsel = np.zeros(len(parent["TARGETID"]), dtype=bool)
if mskpsel.sum() > 0:
# AR assign sample tracer selection
msksel = (assign[mskkey] & yaml_masks[mskkey][msk]) > 0
# AR xlim, ylim
xlim = (width_deg / 2, -width_deg / 2)
ylim = (-width_deg / 2, width_deg / 2)
# AR area of the plotting window in deg2
plot_area = (xlim[0] - xlim[1]) * (ylim[1] - ylim[0])
# AR parent
plot_cutout(
axs[0],
img,
width_deg,
dras["parent"][mskpsel],
ddecs["parent"][mskpsel],
dopetal=True,
txts=[
msk,
"parent : {:.0f}".format(mskpsel.sum() / tile_area) + r" deg$^{-2}$",
],
xtxts=[0.5, 0.5],
ytxts=[0.98, 0.1],
)
# AR assigned
plot_cutout(
axs[1],
img,
width_deg,
dras["assign"][msksel],
ddecs["assign"][msksel],
dopetal=True,
txts=[
msk,
"assigned : {:.0f}".format(msksel.sum() / tile_area) + r" deg$^{-2}$",
],
xtxts=[0.5, 0.5],
ytxts=[0.98, 0.1],
)
# AR fraction assigned, clipped to famin,famax
fafrac = msksel.sum() / float(mskpsel.sum())
famin, famax = get_qa_farange(fafrac)
txts = [msk, r"mean = {:.2f}".format(fafrac)]
xtxts = [0.5, 0.5]
ytxts = [0.93, 0.03]
# AR assigned fraction
ax = axs[2]
x = dras["parent"][mskpsel]
y = ddecs["parent"][mskpsel]
C = np.in1d(parent["TARGETID"][mskpsel], assign["TARGETID"][msksel])
hb = ax.hexbin(
x,
y,
C=C,
gridsize=gridsize,
extent=(xlim[1], xlim[0], ylim[0], ylim[1]),
mincnt=1,
alpha=0.5,
vmin=famin,
vmax=famax,
)
hb.cmap = cm
ax.set_xlabel(r"$\Delta$RA [deg.]")
ax.set_ylabel(r"$\Delta$DEC [deg.]")
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.grid(True)
for txt, xtxt, ytxt in zip(txts, xtxts, ytxts):
ax.text(
xtxt,
ytxt,
txt,
color="k",
fontweight="bold",
fontsize=10,
ha="center",
transform=ax.transAxes,
)
cbar = plt.colorbar(hb)
cbar.set_label("fraction assigned")
cbar.mappable.set_clim(famin, famax)
def make_qa(
outpng,
survey,
program,
faflavor,
targfns,
fiberassignfn,
tileid,
tilera,
tiledec,
obscon,
rundate,
tmpoutdir=tempfile.mkdtemp(),
width_deg=4,
):
"""
Make fba_launch QA plot.
Args:
outpng: written output PNG file (string)
survey: "sv1", "sv2", "sv3" or "main" (string)
program: "DARK", "BRIGHT", or "BACKUP" (string)
faflavor: usually {survey}{program} in lower cases (string)
fiberassignfn: path to the output fiberassign-TILEID.fits file (string)
tileid: tile TILEID (int)
tilera: tile center R.A. in degrees (float)
tiledec: tile center Dec. in degrees (float)
obscon: tile allowed observing conditions (string; e.g. "DARK|GRAY|BRIGHT|BACKUP")
rundate: used rundate (string)
tmpoutdir (optional, defaults to a temporary directory): temporary directory (to download the cutout)
width_deg (optional, defaults to 4): width of the cutout in degrees (np.array of floats)
"""
# AR WD and STD used masks
_, wd_mskkeys, wd_msks, std_mskkeys, std_msks = get_dt_masks(survey)
# AR plotted tracers
# AR TBD: handle secondary?
trmskkeys, trmsks = get_qa_tracers(survey, program)
# AR storing parent/assigned quantities
parent, assign, dras, ddecs, petals, nassign = get_parent_assign_quants(
survey, targfns, fiberassignfn, tilera, tiledec
)
# AR start plotting
fig = plt.figure(figsize=(30, 3 * (1 + len(trmsks))))
gs = gridspec.GridSpec(1 + len(trmsks), 7, wspace=0.5, hspace=0.3)
# AR overall infos
ax = plt.subplot(gs[0, 0])
ax.axis("off")
# AR infos : general
qa_print_infos(
ax,
survey,
program,
faflavor,
tileid,
tilera,
tiledec,
obscon,
rundate,
parent,
assign,
)
# AR stats per petal
ax = plt.subplot(gs[0, 1])
ax.axis("off")
qa_print_petal_infos(
ax, petals, assign,
)
# AR cutout
img = get_viewer_cutout(
tileid,
tilera,
tiledec,
tmpoutdir=tmpoutdir,
width_deg=width_deg,
pixscale=10,
dr="dr9",
timeout=15,
)
# AR SKY, BAD, WD, STD, TGT
iys = [2, 3, 4, 5, 6]
keys = ["sky", "bad", "wd", "std", "assign"]
txts = ["SKY", "BAD", "WD", "STD", "TGT"]
alphas = [0.25, 1.0, 1.0, 1.0, 0.025]
for iy, key, txt, alpha in zip(iys, keys, txts, alphas):
ax = fig.add_subplot(gs[0, iy])
plot_cutout(
ax,
img,
width_deg,
dras[key],
ddecs[key],
dopetal=True,
alpha=alpha,
txts=[txt],
xtxts=[0.2],
ytxts=[0.98],
)
# AR looping on tracers
ix = 1
for msk, mskkey in zip(trmsks, trmskkeys):
# AR parent and assign magnitude distributions
plot_hist_tracer(plt.subplot(gs[ix, 1]), survey, parent, assign, msk, mskkey)
# AR color-color diagram, with fiber assignment rate color-coded, and density transparency-coded
gridsize = 20
for iy, xbands, ybands, xlim, ylim in zip(
[2, 3],
[("R", "Z"), ("R", "Z")],
[("G", "R"), ("R", "W1")],
[(-0.5, 2.5), (-0.5, 2.5)],
[(-0.5, 2.5), (-2, 5)],
):
ax = plt.subplot(gs[ix, iy])
plot_colcol_tracer(
ax,
xbands,
ybands,
survey,
parent,
assign,
msk,
mskkey,
xlim,
ylim,
gridsize=20,
)
# AR position in tile
axs = [plt.subplot(gs[ix, 4]), plt.subplot(gs[ix, 5]), plt.subplot(gs[ix, 6])]
plot_sky_fa(
axs, img, survey, parent, assign, dras, ddecs, msk, mskkey, width_deg
)
#
ix += 1
# AR saving plot
plt.savefig(
outpng, bbox_inches="tight",
)
plt.close()
def rmv_nonsvn(myouts, log=Logger.get(), step="", start=time()):
"""
Remove fba_launch non-SVN products
Args:
myouts: dictionary with the fba_launch args.outdir location (dictionary);
must contain the following keys:
"tiles", "sky", "gfa", "targ", "scnd", "too", "fba"
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
for key in ["tiles", "sky", "gfa", "targ", "scnd", "too", "fba"]:
if os.path.isfile(myouts[key]):
os.remove(myouts[key])
log.info(
"{:.1f}s\t{}\tdeleting file {}".format(
time() - start, step, myouts[key]
)
)
def mv_temp2final(mytmpouts, myouts, expected_keys, log=Logger.get(), step="", start=time()):
"""
Moves the fba_launch outputs from the temporary location to the args.outdir location.
Args:
mytmpouts: dictionary with the temporary files location (dictionary);
contains the following keys: "tiles", "sky", "gfa", "targ", "scnd", "too", "fba", "fiberassign"
myouts: dictionary with the fba_launch args.outdir location (dictionary);
contains at least same keys as mytmpouts
expected_keys: list of keys of mytmpouts, myouts with the files to move
log (optional, defaults to Logger.get()): Logger object
step (optional, defaults to ""): corresponding step, for fba_launch log recording
(e.g. dotiles, dosky, dogfa, domtl, doscnd, dotoo)
start(optional, defaults to time()): start time for log (in seconds; output of time.time()
Notes:
actually, the log is not moved here; it is moved in fba_launch, after the main()
"""
log.info("")
log.info("")
log.info("{:.1f}s\t{}\tTIMESTAMP={}".format(time() - start, step, Time.now().isot))
# AR
for key in expected_keys:
if os.path.isfile(mytmpouts[key]):
_ = shutil.move(mytmpouts[key], myouts[key])
log.info(
"{:.1f}s\t{}\tmoving file {} to {}".format(
time() - start, step, mytmpouts[key], myouts[key]
)
)
else:
log.error(
"{:.1f}s\t{}\tfile {} is missing, though we expect it; exiting".format(
time() - start, step, mytmpouts[key]
)
)
sys.exit(1)
|
import tkinter as tk
from PIL import Image, ImageTk
from tkinter import filedialog
class AppImage:
def __init__(self, filename, height=None, scale=None):
self.img = Image.open(filename)
self.img_scaled, self.photoimg = None, None
self.rescale(scale=scale, height=height)
def rescale(self, scale=None, height=None):
if height:
scale = height / self.img.size[1]
if not scale:
scale = 1.
size = self.img.size
self.img_scaled = self.img.resize((int(size[0] * scale), int(size[1] * scale)))
self.photoimg = ImageTk.PhotoImage(image=self.img_scaled)
class Application(tk.Frame):
images = []
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.pack()
self.createWidgets()
def createWidgets(self, width=1000, height=1000):
self.canvas = tk.Canvas(self, width=width, height=height)
self.canvas.pack()
self.canvas.update()
img_names = filedialog.askopenfilenames()
self.load_images(img_names)
self.show_img(0)
def load_images(self, filenames):
for filename in filenames:
img = AppImage(filename, height=self.height)
self.images.append(img)
def show_img(self, idx):
self.curr_img = self.images[idx]
self.canvas.create_image(self.width // 2, self.height// 2, image=self.curr_img.photoimg)
@property
def width(self):
return self.canvas.winfo_width()
@property
def height(self):
return self.canvas.winfo_height()
root = tk.Tk()
root.title('jclubtool')
app = Application(master=root)
app.mainloop()
added next and prev buttons, to cycle through pages.
import tkinter as tk
from PIL import Image, ImageTk
from tkinter import filedialog
class AppImage:
def __init__(self, filename, height=None, scale=None):
self.img = Image.open(filename)
self.img_scaled, self.photoimg = None, None
self.rescale(scale=scale, height=height)
def rescale(self, scale=None, height=None):
if height:
scale = height / self.img.size[1]
if not scale:
scale = 1.
size = self.img.size
self.img_scaled = self.img.resize((int(size[0] * scale), int(size[1] * scale)))
self.photoimg = ImageTk.PhotoImage(image=self.img_scaled)
class Application(tk.Frame):
images = []
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.pack()
self.createWidgets()
def createWidgets(self, width=700, height=700):
self.btn_next = tk.Button(self, text='Prev', command=lambda: self.show_img(self._img_idx - 1))
self.btn_next.pack(side='top')
self.btn_next = tk.Button(self, text='Next', command=lambda: self.show_img(self._img_idx + 1))
self.btn_next.pack(side='top')
self.canvas = tk.Canvas(self, width=width, height=height)
self.canvas.pack(side='right')
self.canvas.update()
from glob import glob
img_names = glob('samples/*.jpg') #filedialog.askopenfilenames()
self.load_images(img_names)
self._img_idx = 0
self.show_img(self._img_idx)
def load_images(self, filenames):
for filename in filenames:
img = AppImage(filename, height=self.height)
self.images.append(img)
def show_img(self, idx):
try:
self.curr_img = self.images[idx]
self._img_idx = idx
except IndexError:
pass
self.canvas.create_image(self.width // 2, self.height// 2, image=self.curr_img.photoimg)
@property
def width(self):
return self.canvas.winfo_width()
@property
def height(self):
return self.canvas.winfo_height()
root = tk.Tk()
root.title('jclubtool')
app = Application(master=root)
app.mainloop() |
# -*- coding: utf-8 -*-
import numpy
from random import randint
import os.path
from datetime import datetime
def accountPage(user, auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script></head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1><hr><h2>USERNAME_HERE Details</h2><h3>Balance: BALANCE_HERE</h3></div><div><h3>Current bid placement</h3>BIDS_TABLE_HERE</div><div><h3>Place a new bet</h3>BET_FORM_HERE</div><div><h3>Bid History</h3><div style=\"width: 90%; height: 25%;\"><canvas id=\"myChart\"></canvas></div><script>var ctx=document.getElementById('myChart').getContext('2d');var chart = new Chart(ctx, {type: 'line', data: {labels: [DATA_LABELS_HERE],datasets: [DATA_SETS_HERE]},options: {scales:{yAxes:[{ticks:{stepSize:1}}]}}});</script></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
retstr = retstr.replace("USERNAME_HERE", "{}".format(user.name))
retstr = retstr.replace("BALANCE_HERE", "{}".format(user.balance))
retstr = retstr.replace("BIDS_TABLE_HERE", "{}".format(getUserBidsTable(user, auc)))
retstr = retstr.replace("DATA_LABELS_HERE", getDataLabels(user))
retstr = retstr.replace("DATA_SETS_HERE", getUserDatasets(user, auc))
retstr = retstr.replace("BET_FORM_HERE", getBetForm(user, auc))
return retstr
def getBetForm(user, auc):
retstr = "<table class=\"table\"><thead><tr><th>Bin</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><tr><th></th>"
for x in range(0, auc.numBins):
retstr += "<th><input id=\"trade{}\" type=\"text\" name=\"{}\" style=\"width: 80%;\" value=\"0\"></th>".format(x,x)
retstr += "</tr></tbody></table><div class=\"btn-group\" role=\"group\" aria-label=\"...\"><button type=\"button\" class=\"btn btn-default\" onClick=\"makeTrade()\">Trade</button><script>function makeTrade(){if (CONDITION_HERE){var url=window.location.href;url=url.replace(\"status\",\"makeTrade\"); url += GET_ELEMENTS_HERE;$.ajax({url: url, success: function(result){window.location.reload(true);}});}}</script><button type=\"button\" class=\"btn btn-default\" onClick=\"checkPrice()\">Check Price</button><script>function checkPrice(){var url=window.location.href; url=url.substring(0,url.lastIndexOf(\"/\"));url=url.substring(0,url.lastIndexOf(\"/\"));url=url.replace(\"status\",\"getCost\");url+= \"/\" + GET_ELEMENTS_HERE;$.ajax({url: url, success: function(result){$(\"#tradeCost\").html('Checked Price: ' + result);}});}</script><span class=\"btn btn-primary\" disabled id=\"tradeCost\">Checked Price: </span></div>"
getstr = ""
for x in range(0, auc.numBins):
getstr += "document.getElementById(\"trade{}\").value + ',' + ".format(x)
getstr = getstr[:-9]
condstr = "! ("
for x in range(0, auc.numBins):
condstr += "document.getElementById(\"trade{}\").value == 0 && ".format(x)
condstr = condstr[:-4]
condstr += ")"
retstr = retstr.replace("GET_ELEMENTS_HERE", getstr)
retstr = retstr.replace("GET_ELEMENTS_HERE", getstr)
retstr = retstr.replace("CONDITION_HERE", condstr)
return retstr
def getUserBidsTable(user, auc):
retstr = "<table class=\"table\"><thead><tr><th>Bin</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><tr><th>Count</th>"
for x in user.bids:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></tbody></table>"
return retstr
def getDataLabels(user):
retstr = ""
labels = range(0, len(user.bidHistory))
for x in labels:
retstr += "\"{}\",".format(x)
retstr = retstr[:-1]
return retstr
def getUserDatasets(user, auc):
data = []
retstr = ""
for x in xrange(0, user.bids.size):
data.append([item[x] for item in user.bidHistory])
for x in xrange(0, user.bids.size):
retstr += "{{label: \"{}\", fill:false, steppedLine: true, borderColor: 'rgb({})',data: [{}]}},".format(auc.labels[x], getLineColor(x), getDataString(data, x))
retstr = retstr[:-1]
return retstr
def netWorth(auc, outcome):
retstr = ''
if os.path.isfile('trades.txt'):
retstr = "<canvas id=\"netWorthChart\"></canvas></div><script>var ctx=document.getElementById('netWorthChart').getContext('2d');var chart = new Chart(ctx, {type: 'line', data: {labels: [DATA_LABELS_HERE],datasets: [DATA_SETS_HERE]},options: {}});</script>"
#TODO: add scale to options above: xAxes:
# [{
# type: "time",
# time: {
# format: timeFormat,
# // round: 'day'
# tooltipFormat: 'll HH:mm'
# },
# scaleLabel: {
# display: true,
# labelString: 'Date'
# }
# }, ],
datastr, start, end = getNetWorthDataAndStartEnd(auc, outcome)
retstr = retstr.replace("DATA_LABELS_HERE", getNetWorthDataLabels(start, end))
retstr = retstr.replace("DATA_SETS_HERE", datastr)
return retstr
#TODO: this needs work
def getNetWorthDataLabels(start, end):
# start = datetime.strptime(start, '%b %d %Y') Somehow get a date this is not working
# end = datetime.strptime(end, '%b %d %Y')
# find some number of points between start and end
retstr = '"{}", "{}"'.format(start, end) #return them comma seperated
return retstr
def getNetWorthDataAndStartEnd(auc, outcome):
sortedNames, states = auc.getNetWorth(outcome)
retstr = ""
colors = getRandomColors(len(sortedNames))
for x in xrange(0, len(sortedNames)):
retstr += "{{label: \"{}\", fill:false, steppedLine: true, borderColor: 'rgb({})',data: [{}]}},".format(sortedNames[x], colors[x], getNetWorthDataString(states, x))
retstr = retstr[:-1]
return retstr, states[0][0], states[len(states)-1][0]
def getDataString(data, n):
retstr = ""
for x in data[n]:
retstr += "{},".format(x)
retstr = retstr[:-1]
return retstr
def getNetWorthDataString(states, n):
retstr = ""
for s in states:
retstr += '{'
retstr += ' x:"{}", y:{}'.format( s[0], s[1][n])
retstr += '},'
retstr = retstr[:-1]
return retstr
def getRandomColors(count):
colors = []
for i in xrange(count):
colors.append('{},{},{}'.format(randint(0,255), randint(0,255), randint(0,255)))
return colors
def getLineColor(n):
colors = ["255,99,132", "120,120,120", "5,36,182", "84,161,83", "63,157,208", "9,76,11", "112,25,125", "149,79,105", "120,237,207", "119,100,10"]
return colors[n]
def auctionPage(auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script>"
retstr += "</head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1><hr></div>CLOSED_INFO_HERE<div><h2>Market Status</h2>STATUS_HERE</div><div><h2>Leaderboard</h2>LEADERBOARD_HERE<div>OUTCOME_SELECT OUTCOME_GRAPH</div></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
retstr = retstr.replace("CLOSED_INFO_HERE", getClosedInfo(auc))
retstr = retstr.replace("STATUS_HERE", getStatusTable(auc))
retstr = retstr.replace("LEADERBOARD_HERE", getLeaderboardTable(auc))
retstr = retstr.replace("OUTCOME_SELECT", getOutcomeSelect(auc))
outcomeIndex = auc.winningIndex if auc.winningIndex is not None else 0
retstr = retstr.replace("OUTCOME_GRAPH", netWorth(auc, outcomeIndex))
return retstr
def getClosedInfo(auc):
retstr = '<h4>Auction Is Open</h4>'
if auc.winningIndex is not None:
payouts = 0
for a in auc.accounts:
payouts += a.bids[auc.winningIndex]
retstr = '<h3>Winning Outcome: {}</h3>'.format(auc.labels[auc.winningIndex])
retstr += '<h3>Market Maker Balance: {}</h3>'.format(auc.balance - payouts)
return retstr
def getStatusTable(auc):
retstr = "<div class=\"row\"><div class=\"col-md-4\">"
retstr +="<table class=\"table\"><thead><tr><th>Contract</th><th>Price</th><th># Owned</th></tr></thead><tbody>"
for i in xrange(auc.numBins):
retstr += "<tr><th>{}</th>".format(auc.labels[i])
retstr += "<th>{}</th>".format(auc.prices[i])
retstr += "<th>{}</th></tr>".format(auc.state[i])
retstr += "</tbody></table></div></div>"
return retstr
def getLeaderboardTable(auc):
retstr = "<div class=\"row\"><div class=\"col-xs-12\">"
retstr += "<table class=\"table table-hover\"><thead><tr><th>User</th><th>Balance</th>"
if auc.isAuctionOpen:
retstr += "<th>Balance + Contracts Sold</th>"
elif auc.winningIndex is not None:
retstr += "<th>Networth</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><div><div>"
for a in auc.accounts:
if auc.isAuctionOpen:
bidStr = ''
for b in a.bids:
bidStr += "-{},".format(b)
bidStr = bidStr[:-1]
cost = auc.getCost(bidStr)
a.networth = a.balance - cost
elif auc.winningIndex is not None:
a.networth = a.balance + a.bids[auc.winningIndex]
auc.accounts.sort(key=lambda a: a.networth, reverse=True)
for a in auc.accounts:
retstr += "<tr><th>{}</th>".format(a.name)
retstr += "<th>{}</th>".format(a.balance)
if auc.isAuctionOpen or auc.winningIndex is not None:
retstr += "<th>{}</th>".format(a.networth)
for y in a.bids:
retstr += "<th>{}</th>".format(y)
retstr += "</tr>"
retstr += "</tbody></table>"
return retstr
def getOutcomeSelect(auc):
retstr = ''
if auc.winningIndex is not None:
retstr = '<h3>Networths Over Trades</h3>'
if auc.winningIndex is None and os.path.isfile('trades.txt'):
retstr = '<script>function outcomeSelected(){var index = document.getElementById("outcomeSelect").selectedIndex; var url=window.location.href;'
retstr += ' url=url.substr(0, url.indexOf("5000")); url+="5000/getNetWorths/"+index+"/";'
retstr += ' $.ajax({url: url, success: function(result){ $("#netWorthChart").html(result);}});}</script>'
retstr += '<div class="col-md-4"><h3>Possible Outcomes</h3><label for="outcomeSelect">Winning Outcome (select one):</label>'
retstr += '<select class="form-control" onchange="outcomeSelected()" id="outcomeSelect">'
for o in auc.labels:
retstr += '<option>'+o+'</option>'
retstr += '</select></div>'
return retstr
def helpPage(auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script></head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1></div><hr><div><h2>Help Page</h2><p>Shove a new helpful guide here.</p></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
return retstr
over time working
# -*- coding: utf-8 -*-
import numpy
from random import randint
import os.path
from datetime import datetime
def accountPage(user, auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script></head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1><hr><h2>USERNAME_HERE Details</h2><h3>Balance: BALANCE_HERE</h3></div><div><h3>Current bid placement</h3>BIDS_TABLE_HERE</div><div><h3>Place a new bet</h3>BET_FORM_HERE</div><div><h3>Bid History</h3><div style=\"width: 90%; height: 25%;\"><canvas id=\"myChart\"></canvas></div><script>var ctx=document.getElementById('myChart').getContext('2d');var chart = new Chart(ctx, {type: 'line', data: {labels: [DATA_LABELS_HERE],datasets: [DATA_SETS_HERE]},options: {scales:{yAxes:[{ticks:{stepSize:1}}]}}});</script></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
retstr = retstr.replace("USERNAME_HERE", "{}".format(user.name))
retstr = retstr.replace("BALANCE_HERE", "{}".format(user.balance))
retstr = retstr.replace("BIDS_TABLE_HERE", "{}".format(getUserBidsTable(user, auc)))
retstr = retstr.replace("DATA_LABELS_HERE", getDataLabels(user))
retstr = retstr.replace("DATA_SETS_HERE", getUserDatasets(user, auc))
retstr = retstr.replace("BET_FORM_HERE", getBetForm(user, auc))
return retstr
def getBetForm(user, auc):
retstr = "<table class=\"table\"><thead><tr><th>Bin</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><tr><th></th>"
for x in range(0, auc.numBins):
retstr += "<th><input id=\"trade{}\" type=\"text\" name=\"{}\" style=\"width: 80%;\" value=\"0\"></th>".format(x,x)
retstr += "</tr></tbody></table><div class=\"btn-group\" role=\"group\" aria-label=\"...\"><button type=\"button\" class=\"btn btn-default\" onClick=\"makeTrade()\">Trade</button><script>function makeTrade(){if (CONDITION_HERE){var url=window.location.href;url=url.replace(\"status\",\"makeTrade\"); url += GET_ELEMENTS_HERE;$.ajax({url: url, success: function(result){window.location.reload(true);}});}}</script><button type=\"button\" class=\"btn btn-default\" onClick=\"checkPrice()\">Check Price</button><script>function checkPrice(){var url=window.location.href; url=url.substring(0,url.lastIndexOf(\"/\"));url=url.substring(0,url.lastIndexOf(\"/\"));url=url.replace(\"status\",\"getCost\");url+= \"/\" + GET_ELEMENTS_HERE;$.ajax({url: url, success: function(result){$(\"#tradeCost\").html('Checked Price: ' + result);}});}</script><span class=\"btn btn-primary\" disabled id=\"tradeCost\">Checked Price: </span></div>"
getstr = ""
for x in range(0, auc.numBins):
getstr += "document.getElementById(\"trade{}\").value + ',' + ".format(x)
getstr = getstr[:-9]
condstr = "! ("
for x in range(0, auc.numBins):
condstr += "document.getElementById(\"trade{}\").value == 0 && ".format(x)
condstr = condstr[:-4]
condstr += ")"
retstr = retstr.replace("GET_ELEMENTS_HERE", getstr)
retstr = retstr.replace("GET_ELEMENTS_HERE", getstr)
retstr = retstr.replace("CONDITION_HERE", condstr)
return retstr
def getUserBidsTable(user, auc):
retstr = "<table class=\"table\"><thead><tr><th>Bin</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><tr><th>Count</th>"
for x in user.bids:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></tbody></table>"
return retstr
def getDataLabels(user):
retstr = ""
labels = range(0, len(user.bidHistory))
for x in labels:
retstr += "\"{}\",".format(x)
retstr = retstr[:-1]
return retstr
def getUserDatasets(user, auc):
data = []
retstr = ""
for x in xrange(0, user.bids.size):
data.append([item[x] for item in user.bidHistory])
for x in xrange(0, user.bids.size):
retstr += "{{label: \"{}\", fill:false, steppedLine: true, borderColor: 'rgb({})',data: [{}]}},".format(auc.labels[x], getLineColor(x), getDataString(data, x))
retstr = retstr[:-1]
return retstr
def netWorth(auc, outcome):
retstr = ''
if os.path.isfile('trades.txt'):
retstr = "<canvas id=\"netWorthChart\"></canvas></div><script>var ctx=document.getElementById('netWorthChart').getContext('2d');var chart = new Chart(ctx, {type: 'line', data: {labels: [DATA_LABELS_HERE],datasets: [DATA_SETS_HERE]},"
retstr += "options: { scales: { xAxes: [{ type: 'time', time: { format: 'YYYY-MM-DD HH:mm:ss', tooltipFormat: 'll HH:mm:ss'}}]}}});</script>"
datastr, start, end = getNetWorthDataAndStartEnd(auc, outcome)
retstr = retstr.replace("DATA_LABELS_HERE", getNetWorthDataLabels(start, end))
retstr = retstr.replace("DATA_SETS_HERE", datastr)
return retstr
#TODO: this needs work
def getNetWorthDataLabels(start, end):
# start = datetime.strptime(start, '%b %d %Y') Somehow get a date this is not working
# end = datetime.strptime(end, '%b %d %Y')
# find some number of points between start and end
retstr = '"{}", "{}"'.format(start, end) #return them comma seperated
return retstr
def getNetWorthDataAndStartEnd(auc, outcome):
sortedNames, states = auc.getNetWorth(outcome)
retstr = ""
colors = getRandomColors(len(sortedNames))
for x in xrange(0, len(sortedNames)):
retstr += "{{label: \"{}\", fill:false, steppedLine: true, borderColor: 'rgb({})',data: [{}]}},".format(sortedNames[x], colors[x], getNetWorthDataString(states, x))
retstr = retstr[:-1]
return retstr, states[0][0], states[len(states)-1][0]
def getDataString(data, n):
retstr = ""
for x in data[n]:
retstr += "{},".format(x)
retstr = retstr[:-1]
return retstr
def getNetWorthDataString(states, n):
retstr = ""
for s in states:
retstr += '{'
retstr += ' x:"{}", y:{}'.format( s[0], s[1][n])
retstr += '},'
retstr = retstr[:-1]
return retstr
def getRandomColors(count):
colors = []
for i in xrange(count):
colors.append('{},{},{}'.format(randint(0,255), randint(0,255), randint(0,255)))
return colors
def getLineColor(n):
colors = ["255,99,132", "120,120,120", "5,36,182", "84,161,83", "63,157,208", "9,76,11", "112,25,125", "149,79,105", "120,237,207", "119,100,10"]
return colors[n]
def auctionPage(auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script>"
retstr += "</head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1><hr></div>CLOSED_INFO_HERE<div><h2>Market Status</h2>STATUS_HERE</div><div><h2>Leaderboard</h2>LEADERBOARD_HERE<div>OUTCOME_SELECT OUTCOME_GRAPH</div></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
retstr = retstr.replace("CLOSED_INFO_HERE", getClosedInfo(auc))
retstr = retstr.replace("STATUS_HERE", getStatusTable(auc))
retstr = retstr.replace("LEADERBOARD_HERE", getLeaderboardTable(auc))
retstr = retstr.replace("OUTCOME_SELECT", getOutcomeSelect(auc))
outcomeIndex = auc.winningIndex if auc.winningIndex is not None else 0
retstr = retstr.replace("OUTCOME_GRAPH", netWorth(auc, outcomeIndex))
return retstr
def getClosedInfo(auc):
retstr = '<h4>Auction Is Open</h4>'
if auc.winningIndex is not None:
payouts = 0
for a in auc.accounts:
payouts += a.bids[auc.winningIndex]
retstr = '<h3>Winning Outcome: {}</h3>'.format(auc.labels[auc.winningIndex])
retstr += '<h3>Market Maker Balance: {}</h3>'.format(auc.balance - payouts)
return retstr
def getStatusTable(auc):
retstr = "<div class=\"row\"><div class=\"col-md-4\">"
retstr +="<table class=\"table\"><thead><tr><th>Contract</th><th>Price</th><th># Owned</th></tr></thead><tbody>"
for i in xrange(auc.numBins):
retstr += "<tr><th>{}</th>".format(auc.labels[i])
retstr += "<th>{}</th>".format(auc.prices[i])
retstr += "<th>{}</th></tr>".format(auc.state[i])
retstr += "</tbody></table></div></div>"
return retstr
def getLeaderboardTable(auc):
retstr = "<div class=\"row\"><div class=\"col-xs-12\">"
retstr += "<table class=\"table table-hover\"><thead><tr><th>User</th><th>Balance</th>"
if auc.isAuctionOpen:
retstr += "<th>Balance + Contracts Sold</th>"
elif auc.winningIndex is not None:
retstr += "<th>Networth</th>"
for x in auc.labels:
retstr += "<th>{}</th>".format(x)
retstr += "</tr></thead><tbody><div><div>"
for a in auc.accounts:
if auc.isAuctionOpen:
bidStr = ''
for b in a.bids:
bidStr += "-{},".format(b)
bidStr = bidStr[:-1]
cost = auc.getCost(bidStr)
a.networth = a.balance - cost
elif auc.winningIndex is not None:
a.networth = a.balance + a.bids[auc.winningIndex]
auc.accounts.sort(key=lambda a: a.networth, reverse=True)
for a in auc.accounts:
retstr += "<tr><th>{}</th>".format(a.name)
retstr += "<th>{}</th>".format(a.balance)
if auc.isAuctionOpen or auc.winningIndex is not None:
retstr += "<th>{}</th>".format(a.networth)
for y in a.bids:
retstr += "<th>{}</th>".format(y)
retstr += "</tr>"
retstr += "</tbody></table>"
return retstr
def getOutcomeSelect(auc):
retstr = ''
if auc.winningIndex is not None:
retstr = '<h3>Networths Over Trades</h3>'
if auc.winningIndex is None and os.path.isfile('trades.txt'):
retstr = '<script>function outcomeSelected(){var index = document.getElementById("outcomeSelect").selectedIndex; var url=window.location.href;'
retstr += ' url=url.substr(0, url.indexOf("5000")); url+="5000/getNetWorths/"+index+"/";'
retstr += ' $.ajax({url: url, success: function(result){ $("#netWorthChart").html(result);}});}</script>'
retstr += '<div class="col-md-4"><h3>Possible Outcomes</h3><label for="outcomeSelect">Winning Outcome (select one):</label>'
retstr += '<select class="form-control" onchange="outcomeSelected()" id="outcomeSelect">'
for o in auc.labels:
retstr += '<option>'+o+'</option>'
retstr += '</select></div>'
return retstr
def helpPage(auc):
retstr = "<!DOCTYPE html><meta charset=\"utf-8\"><html><head><link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\"><script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script><script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script><script src=\"https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.6.0/Chart.bundle.min.js\"></script></head><body><div class=\"container\"><div><h1>AUCTION_TITLE_HERE</h1></div><hr><div><h2>Help Page</h2><p>Shove a new helpful guide here.</p></div></div></body></html>"
retstr = retstr.replace("AUCTION_TITLE_HERE", "{}".format(auc.name))
return retstr
|
# -*- coding: utf-8 -*-
__author__ = 'luckydonald'
import logging
logger = logging.getLogger(__name__)
import re
_first_cap_re = re.compile(r'(.)([A-Z][a-z]+)')
_all_cap_re = re.compile(r'([a-z0-9])([A-Z])')
def convert_to_underscore(name):
s1 = _first_cap_re.sub(r'\1_\2', name)
return _all_cap_re.sub(r'\1_\2', s1).lower()
def func(command, description, link, params_string, returns="On success, the sent Message is returned.", return_type="Message"):
"""
Live template for pycharm:
y = func(command="$cmd$", description="$desc$", link="$lnk$", params_string="$first_param$", returns="$returns$", return_type="$returntype$")
"""
description_with_tabs = "\t\t" + description.strip().replace("\n", "\n\t\t")
param_list_args = []
param_list_kwargs = []
args = []
args2 = []
kwargs = []
kwargs2 = []
asserts = []
str_args = ""
str_kwargs = ""
param_strings = params_string.split("\n")
for param in param_strings:
table = param.split("\t")
param_name = table[0].strip()
param_type = table[1].strip().join([" "," "])
# " String or Boolean "
param_type = param_type.replace(" String ", " str ")
param_type = param_type.replace(" Integer ", " int ")
param_type = param_type.replace(" Boolean ", " bool ")
param_type = param_type.replace(" Nothing ", " None ")
assert_types = param_type
param_type = param_type.replace(" or ", " | ")
assert_commands = []
assert_comments = []
for asses in assert_types.split("|"): # short for asserts
asses = asses.strip() # always good!!
asses = asses.strip("()")
if asses in ["int", "bool", "str"]:
assert_commands.append("isinstance({var}, {type})".format(var=param_name, type=asses))
elif asses.startswith("Array"):
assert_commands.append("isinstance({var}, (list, tuple))".format(var=param_name))
assert_comments.append(asses.replace("\n"," "))
else:
logger.warn("unrecognized type in param {var}: {type}".format(var=param_name, type=asses))
# end for
param_required = table[2].strip()
param_needed = None
if param_required == "Yes":
param_needed = True
elif param_required == "Optional":
param_needed = False
param_description = table[3].strip()
if param_needed:
param_list_args.append(Param(param_name, param_type,param_needed, param_description))
args.append(param_name)
args2.append("{param_name}={param_name}".format(param_name=param_name))
str_args += '\t\t:param {key}: {descr}\n\t\t:type {key}: {type}\n\n'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is not None)".format(var=param_name))
asserts.append("assert({ass})".format(ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
else:
param_list_kwargs.append(Param(param_name, param_type,param_needed, param_description))
kwargs.append("{param_name}=None".format(param_name=param_name))
kwargs2.append("{param_name}={param_name}".format(param_name=param_name))
str_kwargs += '\t\t:keyword {key}: {descr}\n\t\t:type {key}: {type}\n\n'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is None or {ass})".format(var=param_name, ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
args.extend(kwargs)
args2.extend(kwargs2)
asserts_string = "\n\t\t" + "\n\t\t".join(asserts)
text = ""
if len(str_args)>0:
text += '\n\t\tParameters:\n\n'
text += str_args
if len(str_kwargs)>0:
text += '\n\t\tOptional keyword parameters:\n\n'
text += str_kwargs
do_args = ['"%s"' % command]
do_args.extend(args2)
result = '\tdef {funcname}(self, {params}):\n\t\t"""\n{description_with_tabs}\n\n\t\t{link}\n\n' \
'{paramshit}\n' \
'\t\tReturns:\n\n\t\t:return: {returns}\n\t\t:rtype: {return_type}\n\t\t"""{asserts_with_tabs}\n\t\treturn self.do({do_args})\n\t# end def {funcname}'.format(
funcname=convert_to_underscore(command),
params=", ".join(args), description_with_tabs=description_with_tabs, link=link,
returns=returns, return_type=return_type, command=command, do_args=", ".join(do_args),
asserts_with_tabs=asserts_string,
paramshit = text
)
result = result.replace("\t", " ")
print (result)
return result
def clazz(clazz, parent_clazz, description, link, params_string):
"""
Live template for pycharm:
y = clazz(clazz="$clazz$", parent_clazz="%parent$", description="$desc$", link="$lnk$", params_string="$first_param$")
"""
init_description_w_tabs = description.strip().replace("\n", "\n\t\t")
clazz_description_w_tabs = description.strip().replace("\n", "\n\t")
args = []
args2 = []
kwargs = []
kwargs2 = []
asserts = []
str_args = ""
str_kwargs = ""
param_strings = params_string.split("\n")
for param in param_strings:
table = param.split("\t")
param_name = table[0].strip()
param_type = table[1].strip().join([" "," "])
# " String or Boolean "
param_type = param_type.replace(" String ", " str ")
param_type = param_type.replace(" Integer ", " int ")
param_type = param_type.replace(" Boolean ", " bool ")
param_type = param_type.replace(" Nothing ", " None ")
assert_types = param_type
param_type = param_type.replace(" or ", " | ")
assert_commands = []
assert_comments = []
for asses in assert_types.split("|"): # short for asserts
asses = asses.strip() # always good!!
asses = asses.strip("()")
if asses in ["int", "bool", "str"]:
assert_commands.append("isinstance({var}, {type})".format(var=param_name, type=asses))
elif asses.startswith("Array"):
assert_commands.append("isinstance({var}, (list, tuple))".format(var=param_name))
assert_comments.append(asses.replace("\n"," "))
else:
logger.warn("unrecognized type in param {var}: {type}".format(var=param_name, type=asses))
# end for
param_description = table[2].strip()
param_needed = not param_description.startswith("Optional.")
asserts.append("")
if param_needed:
args.append(param_name)
asserts.append("self.{param_name} = {param_name}".format(param_name=param_name))
str_args += '\n\n\t\t:param {key}: {descr}\n\t\t:type {key}: {type}'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is not None)".format(var=param_name))
asserts.append("assert({ass})".format(ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
else:
kwargs.append("{param_name} = None".format(param_name=param_name))
asserts.append("self.{param_name}={param_name}".format(param_name=param_name))
str_kwargs += '\n\n\t\t:keyword {key}: {descr}\n\t\t:type {key}: {type}'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is None or {ass})".format(var=param_name, ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
#str_args += '\n\n\t\t:param {key}: {descr}\n\t\t:type {key}: {type}'.format(key=param_name, descr=param_description, type=param_type)
#if assert_commands:
# asserts.append("assert({var} is not None)".format(var=param_name))
# asserts.append("assert({ass})".format(ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
#asserts.append("self.{var} = {var}".format(var=param_name))
#args.extend(kwargs)
#param_description = ""
#if len(str_args)>0:
# param_description += '\n\t\tParameters:'
# param_description += str_args
param_description = ""
if len(str_args)>0:
param_description += '\n\t\tParameters:'
param_description += str_args
if len(str_kwargs)>0:
param_description += '\n\n\n\t\tOptional keyword parameters:'
param_description += str_kwargs
args.extend(kwargs)
result = 'class {clazz} ({parent_clazz}):\n' \
'\t"""\n' \
'\t{clazz_description_w_tabs}\n' \
'\n' \
'\t{link}\n' \
'\t"""\n' \
'\tdef __init__(self, {params}):\n' \
'\t\t"""\n' \
'\t\t{init_description_w_tabs}\n' \
'\n' \
'\t\t{link}\n' \
'\n' \
'{param_description}\n' \
'\t\t"""\n' \
'\t\tsuper({clazz}, self).__init__()\n' \
'\t\t{asserts_with_tabs}\n' \
'\t# end def __init__\n' \
'# end class {clazz}'.format(
clazz=clazz, parent_clazz=parent_clazz, params=", ".join(args), param_description = param_description,
clazz_description_w_tabs=clazz_description_w_tabs, init_description_w_tabs=init_description_w_tabs, link=link,
asserts_with_tabs="\n\t\t".join(asserts),
)
result = result.replace("\t", " ")
print (result)
return result
# func(command="", description="", link="", param_string="", returns="", return_type="")
class Param(object):
def __init__(self, name, type, needed, desc):
super(Param, self).__init__()
self.name = name
self.type = type
self.needed = needed
self.desc = desc
def examples():
#func("answerInlineQuery", """Use this method to send answers to an inline query. On success, True is returned.
# No more than 50 results per query are allowed.""", "https://core.telegram.org/bots/api#answerinlinequery", "inline_query_id String Yes Unique identifier for the answered query\nresults Array of InlineQueryResult Yes A JSON-serialized array of results for the inline query\ncache_time Integer Optional The maximum amount of time in seconds that the result of the inline query may be cached on the server. Defaults to 300.\nis_personal Boolean Optional Pass True, if results may be cached on the server side only for the user that sent the query. By default, results may be returned to any user who sends the same query\nnext_offset String Optional Pass the offset that a client should send in the next query with the same text to receive more results. Pass an empty string if there are no more results or if you don‘t support pagination. Offset length can’t exceed 64 bytes.", "", "None")
clazz("InlineQueryResultArticle", "InlineQueryResult", "Represents a link to an article or web page.", "https://core.telegram.org/bots/api#inlinequeryresultarticle", """type String Type of the result, must be article
title String Title of the result
message_text String Text of the message to be sent, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message
url String Optional. URL of the result
hide_url Boolean Optional. Pass True, if you don't want the URL to be shown in the message
description String Optional. Short description of the result
thumb_url String Optional. Url of the thumbnail for the result
thumb_width Integer Optional. Thumbnail width
thumb_height Integer Optional. Thumbnail height""")
clazz("InlineQueryResultPhoto", "InlineQueryResult", "Represents a link to a photo. By default, this photo will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of photo.", "https://core.telegram.org/bots/api#inlinequeryresultphoto", """id String Unique identifier for this result, 1-64 bytes
photo_url String A valid URL of the photo. Photo must be in jpeg format. Photo size must not exceed 5MB
photo_width Integer Optional. Width of the photo
photo_height Integer Optional. Height of the photo
thumb_url String URL of the thumbnail for the photo
title String Optional. Title for the result
description String Optional. Short description of the result
caption String Optional. Caption of the photo to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the photo, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""")
clazz("InlineQueryResultGif", "InlineQueryResult", "Represents a link to an animated GIF file. By default, this animated GIF file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.", "https://core.telegram.org/bots/api#inlinequeryresultgif", """id String Unique identifier for this result, 1-64 bytes
gif_url String A valid URL for the GIF file. File size must not exceed 1MB
gif_width Integer Optional. Width of the GIF
gif_height Integer Optional. Height of the GIF
thumb_url String URL of the static thumbnail for the result (jpeg or gif)
title String Optional. Title for the result
caption String Optional. Caption of the GIF file to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the animation, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""")
clazz("InlineQueryResultMpeg4Gif", "InlineQueryResult", "Represents a link to a video animation (H.264/MPEG-4 AVC video without sound). By default, this animated MPEG-4 file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.", "https://core.telegram.org/bots/api#inlinequeryresultmpeg4gif", """id String Unique identifier for this result, 1-64 bytes
mpeg4_url String A valid URL for the MP4 file. File size must not exceed 1MB
mpeg4_width Integer Optional. Video width
mpeg4_height Integer Optional. Video height
thumb_url String URL of the static thumbnail (jpeg or gif) for the result
title String Optional. Title for the result
caption String Optional. Caption of the MPEG-4 file to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the animation, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""")
clazz("InlineQueryResultVideo", "InlineQueryResult", "Represents link to a page containing an embedded video player or a video file.", "https://core.telegram.org/bots/api#inlinequeryresultvideo", """id String Unique identifier for this result, 1-64 bytes
video_url String A valid URL for the embedded video player or video file
mime_type String Mime type of the content of video url, “text/html” or “video/mp4”
message_text String Text of the message to be sent with the video, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message
video_width Integer Optional. Video width
video_height Integer Optional. Video height
video_duration Integer Optional. Video duration in seconds
thumb_url String URL of the thumbnail (jpeg only) for the video
title String Title for the result
description String Optional. Short description of the result""")
# clazz("class", "InlineQueryResult", "desc", "link", """lines""")
# clazz("", "InlineQueryResult", "", "", """""")
if __name__ == '__main__':
examples()
def gett():
from luckydonaldUtils.interactions import confirm, answer
do_func = confirm("Choose between generating function or class. Do you want a function?", True)
if do_func:
command = answer("Command (the Title)")
description = answer("Description")
link = answer("The link on the api page")
params_string = "---"
params_strings = []
while params_string != "":
params_string = answer("Parameters (sepereated by tabs, and new lines)\nParameters Type Required Description", "")
if params_string and not params_string.strip() == "":
params_strings.append(params_string)
# end if
# end while
returns = answer("Textual description what the function returns", "On success, the sent Message is returned.")
return_type = answer("Return type", "Message")
print("\n")
func(command, description, link, params_string, returns , return_type)
else:
clazze = answer("Class name")
parent_clazz = answer("Parent class name", "object")
description = answer("Description")
link = answer("The link on the api page")
params_string = "--"
params_strings = []
while params_string != "":
params_string = answer("Parameters (sepereated by tabs, and new lines)\nParameters Type Description", "")
if params_string and not params_string.strip() == "":
params_strings.append(params_string)
# end if
# end while
print("\n")
clazz(clazze, parent_clazz, description, link, "\n".join(params_strings))
# end if
# end if main
"""
regex for def -> class def
"def ([a-z_]+)\((?!\))" -> "def $1(self, "
"""
Helper to generate functions and classes from Telegram Documentation
Changes:
class: Added to_array function generation and init_super_args param.
# -*- coding: utf-8 -*-
__author__ = 'luckydonald'
import logging
logger = logging.getLogger(__name__)
import re
_first_cap_re = re.compile(r'(.)([A-Z][a-z]+)')
_all_cap_re = re.compile(r'([a-z0-9])([A-Z])')
def convert_to_underscore(name):
s1 = _first_cap_re.sub(r'\1_\2', name)
return _all_cap_re.sub(r'\1_\2', s1).lower()
def func(command, description, link, params_string, returns="On success, the sent Message is returned.", return_type="Message"):
"""
Live template for pycharm:
y = func(command="$cmd$", description="$desc$", link="$lnk$", params_string="$first_param$", returns="$returns$", return_type="$returntype$")
"""
description_with_tabs = "\t\t" + description.strip().replace("\n", "\n\t\t")
param_list_args = []
param_list_kwargs = []
args = []
args2 = []
kwargs = []
kwargs2 = []
asserts = []
str_args = ""
str_kwargs = ""
param_strings = params_string.split("\n")
for param in param_strings:
table = param.split("\t")
param_name = table[0].strip()
param_type = table[1].strip().join([" "," "])
# " String or Boolean "
param_type = param_type.replace(" String ", " str ")
param_type = param_type.replace(" Integer ", " int ")
param_type = param_type.replace(" Boolean ", " bool ")
param_type = param_type.replace(" Nothing ", " None ")
assert_types = param_type
param_type = param_type.replace(" or ", " | ")
assert_commands = []
assert_comments = []
for asses in assert_types.split("|"): # short for asserts
asses = asses.strip() # always good!!
asses = asses.strip("()")
if asses in ["int", "bool", "str"]:
assert_commands.append("isinstance({var}, {type})".format(var=param_name, type=asses))
elif asses.startswith("Array"):
assert_commands.append("isinstance({var}, (list, tuple))".format(var=param_name))
assert_comments.append(asses.replace("\n"," "))
else:
logger.warn("unrecognized type in param {var}: {type}".format(var=param_name, type=asses))
# end for
param_required = table[2].strip()
param_needed = None
if param_required == "Yes":
param_needed = True
elif param_required == "Optional":
param_needed = False
param_description = table[3].strip()
if param_needed:
param_list_args.append(Param(param_name, param_type,param_needed, param_description))
args.append(param_name)
args2.append("{param_name}={param_name}".format(param_name=param_name))
str_args += '\t\t:param {key}: {descr}\n\t\t:type {key}: {type}\n\n'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is not None)".format(var=param_name))
asserts.append("assert({ass})".format(ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
else:
param_list_kwargs.append(Param(param_name, param_type,param_needed, param_description))
kwargs.append("{param_name}=None".format(param_name=param_name))
kwargs2.append("{param_name}={param_name}".format(param_name=param_name))
str_kwargs += '\t\t:keyword {key}: {descr}\n\t\t:type {key}: {type}\n\n'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is None or {ass})".format(var=param_name, ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
args.extend(kwargs)
args2.extend(kwargs2)
asserts_string = "\n\t\t" + "\n\t\t".join(asserts)
text = ""
if len(str_args)>0:
text += '\n\t\tParameters:\n\n'
text += str_args
if len(str_kwargs)>0:
text += '\n\t\tOptional keyword parameters:\n\n'
text += str_kwargs
do_args = ['"%s"' % command]
do_args.extend(args2)
result = '\tdef {funcname}(self, {params}):\n\t\t"""\n{description_with_tabs}\n\n\t\t{link}\n\n' \
'{paramshit}\n' \
'\t\tReturns:\n\n\t\t:return: {returns}\n\t\t:rtype: {return_type}\n\t\t"""{asserts_with_tabs}\n\t\treturn self.do({do_args})\n\t# end def {funcname}'.format(
funcname=convert_to_underscore(command),
params=", ".join(args), description_with_tabs=description_with_tabs, link=link,
returns=returns, return_type=return_type, command=command, do_args=", ".join(do_args),
asserts_with_tabs=asserts_string,
paramshit = text
)
result = result.replace("\t", " ")
print (result)
return result
def clazz(clazz, parent_clazz, description, link, params_string, init_super_args=None):
"""
Live template for pycharm:
y = clazz(clazz="$clazz$", parent_clazz="%parent$", description="$desc$", link="$lnk$", params_string="$first_param$")
"""
init_description_w_tabs = description.strip().replace("\n", "\n\t\t")
clazz_description_w_tabs = description.strip().replace("\n", "\n\t")
args = []
args2 = []
kwargs = []
kwargs2 = []
asserts = []
str_args = ""
str_kwargs = ""
to_array1 = []
to_array2 = []
param_strings = params_string.split("\n")
for param in param_strings:
table = param.split("\t")
param_name = table[0].strip()
param_type = table[1].strip().join([" "," "])
# " String or Boolean "
param_type = param_type.replace(" String ", " str ")
param_type = param_type.replace(" Integer ", " int ")
param_type = param_type.replace(" Boolean ", " bool ")
param_type = param_type.replace(" Nothing ", " None ")
assert_types = param_type
param_type = param_type.replace(" or ", " | ")
assert_commands = []
assert_comments = []
for asses in assert_types.split("|"): # short for asserts
asses = asses.strip() # always good!!
asses = asses.strip("()")
if asses in ["int", "bool"]:
assert_commands.append("isinstance({var}, {type})".format(var=param_name, type=asses))
elif asses == "str":
assert_commands.append("isinstance({var}, unicode_type)".format(var=param_name, type=asses))
assert_comments.append("unicode on python 2, str on python 3")
elif asses.startswith("Array"):
assert_commands.append("isinstance({var}, (list, tuple))".format(var=param_name))
assert_comments.append(asses.replace("\n"," "))
else:
logger.warn("unrecognized type in param {var}: {type}".format(var=param_name, type=asses))
# end for
param_description = table[2].strip()
param_needed = not param_description.startswith("Optional.")
asserts.append("")
if param_needed:
args.append(param_name)
str_args += '\n\n\t\t:param {key}: {descr}\n\t\t:type {key}: {type}'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is not None)".format(var=param_name))
asserts.append("assert({ass})".format(ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
to_array1.append('array["{var}"] = self.{var}'.format(var=param_name))
else:
kwargs.append("{param_name} = None".format(param_name=param_name))
str_kwargs += '\n\n\t\t:keyword {key}: {descr}\n\t\t:type {key}: {type}'.format(key=param_name, descr=param_description, type=param_type)
if assert_commands:
asserts.append("assert({var} is None or {ass})".format(var=param_name, ass=" or ".join(assert_commands)) + ((" # {comment}".format(comment=", ".join(assert_comments))) if assert_comments else ""))
to_array2.append('if self.{var} is not None:'.format(var=param_name))
to_array2.append('\tarray["{var}"] = self.{var}'.format(var=param_name))
asserts.append("self.{param_name} = {param_name}".format(param_name=param_name))
param_description = ""
if len(str_args)>0:
param_description += '\n\t\tParameters:'
param_description += str_args
if len(str_kwargs)>0:
param_description += '\n\n\n\t\tOptional keyword parameters:'
param_description += str_kwargs
args.extend(kwargs)
to_array = ["array = super({clazz}, self).to_array()".format(clazz=clazz)]
to_array.extend(to_array1)
to_array.extend(to_array2)
result = 'class {clazz} ({parent_clazz}):\n' \
'\t"""\n' \
'\t{clazz_description_w_tabs}\n' \
'\n' \
'\t{link}\n' \
'\t"""\n' \
'\tdef __init__(self, {params}):\n' \
'\t\t"""\n' \
'\t\t{init_description_w_tabs}\n' \
'\n' \
'\t\t{link}\n' \
'\n' \
'{param_description}\n' \
'\t\t"""\n' \
'\t\tsuper({clazz}, self).__init__({init_super_args})\n' \
'\t\t{asserts_with_tabs}\n' \
'\t# end def __init__\n' \
'\n' \
'\tdef to_array(self):\n' \
'\t\t{to_array_with_tabs}\n' \
'\t\rreturn array\n' \
'\t# end def to_array\n' \
'# end class {clazz}'.format(
clazz=clazz, parent_clazz=parent_clazz, params=", ".join(args), param_description = param_description,
clazz_description_w_tabs=clazz_description_w_tabs, init_description_w_tabs=init_description_w_tabs, link=link,
asserts_with_tabs="\n\t\t".join(asserts), to_array_with_tabs="\n\t\t".join(to_array), init_super_args=("id, " + ", ".join(init_super_args)) if init_super_args else "id"
)
result = result.replace("\t", " ")
print (result)
return result
# func(command="", description="", link="", param_string="", returns="", return_type="")
class Param(object):
def __init__(self, name, type, needed, desc):
super(Param, self).__init__()
self.name = name
self.type = type
self.needed = needed
self.desc = desc
def examples():
#func("answerInlineQuery", """Use this method to send answers to an inline query. On success, True is returned.
# No more than 50 results per query are allowed.""", "https://core.telegram.org/bots/api#answerinlinequery", "inline_query_id String Yes Unique identifier for the answered query\nresults Array of InlineQueryResult Yes A JSON-serialized array of results for the inline query\ncache_time Integer Optional The maximum amount of time in seconds that the result of the inline query may be cached on the server. Defaults to 300.\nis_personal Boolean Optional Pass True, if results may be cached on the server side only for the user that sent the query. By default, results may be returned to any user who sends the same query\nnext_offset String Optional Pass the offset that a client should send in the next query with the same text to receive more results. Pass an empty string if there are no more results or if you don‘t support pagination. Offset length can’t exceed 64 bytes.", "", "None")
clazz("InlineQueryResultArticle", "InlineQueryResult", "Represents a link to an article or web page.", "https://core.telegram.org/bots/api#inlinequeryresultarticle", """id String Unique identifier for this result, 1-64 Bytes
title String Title of the result
message_text String Text of the message to be sent, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message
url String Optional. URL of the result
hide_url Boolean Optional. Pass True, if you don't want the URL to be shown in the message
description String Optional. Short description of the result
thumb_url String Optional. Url of the thumbnail for the result
thumb_width Integer Optional. Thumbnail width
thumb_height Integer Optional. Thumbnail height""", ['"article"'])
print("\n")
clazz("InlineQueryResultPhoto", "InlineQueryResult", "Represents a link to a photo. By default, this photo will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of photo.", "https://core.telegram.org/bots/api#inlinequeryresultphoto", """id String Unique identifier for this result, 1-64 bytes
photo_url String A valid URL of the photo. Photo must be in jpeg format. Photo size must not exceed 5MB
photo_width Integer Optional. Width of the photo
photo_height Integer Optional. Height of the photo
thumb_url String URL of the thumbnail for the photo
title String Optional. Title for the result
description String Optional. Short description of the result
caption String Optional. Caption of the photo to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the photo, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""", ['"photo"'])
print("\n")
clazz("InlineQueryResultGif", "InlineQueryResult", "Represents a link to an animated GIF file. By default, this animated GIF file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.", "https://core.telegram.org/bots/api#inlinequeryresultgif", """id String Unique identifier for this result, 1-64 bytes
gif_url String A valid URL for the GIF file. File size must not exceed 1MB
gif_width Integer Optional. Width of the GIF
gif_height Integer Optional. Height of the GIF
thumb_url String URL of the static thumbnail for the result (jpeg or gif)
title String Optional. Title for the result
caption String Optional. Caption of the GIF file to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the animation, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""", ['"gif"'])
print("\n")
clazz("InlineQueryResultMpeg4Gif", "InlineQueryResult", "Represents a link to a video animation (H.264/MPEG-4 AVC video without sound). By default, this animated MPEG-4 file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.", "https://core.telegram.org/bots/api#inlinequeryresultmpeg4gif", """id String Unique identifier for this result, 1-64 bytes
mpeg4_url String A valid URL for the MP4 file. File size must not exceed 1MB
mpeg4_width Integer Optional. Video width
mpeg4_height Integer Optional. Video height
thumb_url String URL of the static thumbnail (jpeg or gif) for the result
title String Optional. Title for the result
caption String Optional. Caption of the MPEG-4 file to be sent, 0-200 characters
message_text String Optional. Text of a message to be sent instead of the animation, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message""", ['"mpeg4_gif"'])
print("\n")
clazz("InlineQueryResultVideo", "InlineQueryResult", "Represents link to a page containing an embedded video player or a video file.", "https://core.telegram.org/bots/api#inlinequeryresultvideo", """id String Unique identifier for this result, 1-64 bytes
video_url String A valid URL for the embedded video player or video file
mime_type String Mime type of the content of video url, “text/html” or “video/mp4”
message_text String Text of the message to be sent with the video, 1-4096 characters
parse_mode String Optional. Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
disable_web_page_preview Boolean Optional. Disables link previews for links in the sent message
video_width Integer Optional. Video width
video_height Integer Optional. Video height
video_duration Integer Optional. Video duration in seconds
thumb_url String URL of the thumbnail (jpeg only) for the video
title String Title for the result
description String Optional. Short description of the result""", ['"video"'])
# clazz("class", "InlineQueryResult", "desc", "link", """lines""")
# clazz("", "InlineQueryResult", "", "", """""")
if __name__ == '__main__':
examples()
def gett():
from luckydonaldUtils.interactions import confirm, answer
do_func = confirm("Choose between generating function or class. Do you want a function?", True)
if do_func:
command = answer("Command (the Title)")
description = answer("Description")
link = answer("The link on the api page")
params_string = "---"
params_strings = []
while params_string != "":
params_string = answer("Parameters (sepereated by tabs, and new lines)\nParameters Type Required Description", "")
if params_string and not params_string.strip() == "":
params_strings.append(params_string)
# end if
# end while
returns = answer("Textual description what the function returns", "On success, the sent Message is returned.")
return_type = answer("Return type", "Message")
print("\n")
func(command, description, link, params_string, returns , return_type)
else:
clazze = answer("Class name")
parent_clazz = answer("Parent class name", "object")
description = answer("Description")
link = answer("The link on the api page")
params_string = "--"
params_strings = []
while params_string != "":
params_string = answer("Parameters (sepereated by tabs, and new lines)\nParameters Type Description", "")
if params_string and not params_string.strip() == "":
params_strings.append(params_string)
# end if
# end while
print("\n")
clazz(clazze, parent_clazz, description, link, "\n".join(params_strings))
# end if
# end if main
"""
regex for def -> class def
"def ([a-z_]+)\((?!\))" -> "def $1(self, "
""" |
#!/usr/bin/env python2.7
# Copyright (c) 2012 Jonathan Warren
# Copyright (c) 2012 The Bitmessage developers
# Distributed under the MIT/X11 software license. See the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Right now, PyBitmessage only support connecting to stream 1. It doesn't
# yet contain logic to expand into further streams.
# The software version variable is now held in shared.py
import signal # Used to capture a Ctrl-C keypress so that Bitmessage can shutdown gracefully.
# The next 3 are used for the API
from SimpleXMLRPCServer import *
import json
import singleton
import os
# OSX python version check
import sys
if sys.platform == 'darwin':
if float("{1}.{2}".format(*sys.version_info)) < 7.5:
print "You should use python 2.7.5 or greater."
print "Your version: {0}.{1}.{2}".format(*sys.version_info)
sys.exit(0)
# Classes
from helper_sql import *
from class_sqlThread import *
from class_singleCleaner import *
from class_singleWorker import *
from class_outgoingSynSender import *
from class_singleListener import *
from class_addressGenerator import *
from debug import logger
# Helper Functions
import helper_bootstrap
import proofofwork
import sys
if sys.platform == 'darwin':
if float("{1}.{2}".format(*sys.version_info)) < 7.5:
logger.critical("You should use python 2.7.5 or greater. Your version: %s", "{0}.{1}.{2}".format(*sys.version_info))
sys.exit(0)
def connectToStream(streamNumber):
selfInitiatedConnections[streamNumber] = {}
shared.inventorySets[streamNumber] = set()
queryData = sqlQuery('''SELECT hash FROM inventory WHERE streamnumber=?''', streamNumber)
for row in queryData:
shared.inventorySets[streamNumber].add(row[0])
if sys.platform[0:3] == 'win':
maximumNumberOfHalfOpenConnections = 9
else:
maximumNumberOfHalfOpenConnections = 32
for i in range(maximumNumberOfHalfOpenConnections):
a = outgoingSynSender()
a.setup(streamNumber, selfInitiatedConnections)
a.start()
class APIError(Exception):
def __init__(self, error_number, error_message):
self.error_number = error_number
self.error_message = error_message
def __str__(self):
return "API Error %04i: %s" % (self.error_number, self.error_message)
# This is one of several classes that constitute the API
# This class was written by Vaibhav Bhatia. Modified by Jonathan Warren (Atheros).
# http://code.activestate.com/recipes/501148-xmlrpc-serverclient-which-does-cookie-handling-and/
class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
def do_POST(self):
# Handles the HTTP POST request.
# Attempts to interpret all HTTP POST requests as XML-RPC calls,
# which are forwarded to the server's _dispatch method for handling.
# Note: this method is the same as in SimpleXMLRPCRequestHandler,
# just hacked to handle cookies
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10 * 1024 * 1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None)
)
except: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
self.end_headers()
else:
# got a valid XML RPC response
self.send_response(200)
self.send_header("Content-type", "text/xml")
self.send_header("Content-length", str(len(response)))
# HACK :start -> sends cookies here
if self.cookies:
for cookie in self.cookies:
self.send_header('Set-Cookie', cookie.output(header=''))
# HACK :end
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def APIAuthenticateClient(self):
if 'Authorization' in self.headers:
# handle Basic authentication
(enctype, encstr) = self.headers.get('Authorization').split()
(emailid, password) = encstr.decode('base64').split(':')
if emailid == shared.config.get('bitmessagesettings', 'apiusername') and password == shared.config.get('bitmessagesettings', 'apipassword'):
return True
else:
return False
else:
logger.warn('Authentication failed because header lacks Authentication field')
time.sleep(2)
return False
return False
def _decode(self, text, decode_type):
try:
return text.decode(decode_type)
except TypeError as e:
raise APIError(22, "Decode error - " + str(e))
def _verifyAddress(self, address):
status, addressVersionNumber, streamNumber, ripe = decodeAddress(address)
if status != 'success':
logger.warn('API Error 0007: Could not decode address %s. Status: %s.', address, status)
if status == 'checksumfailed':
raise APIError(8, 'Checksum failed for address: ' + address)
if status == 'invalidcharacters':
raise APIError(9, 'Invalid characters in address: ' + address)
if status == 'versiontoohigh':
raise APIError(10, 'Address version number too high (or zero) in address: ' + address)
raise APIError(7, 'Could not decode address: ' + address + ' : ' + status)
if addressVersionNumber < 2 or addressVersionNumber > 3:
raise APIError(11, 'The address version number currently must be 2 or 3. Others aren\'t supported. Check the address.')
if streamNumber != 1:
raise APIError(12, 'The stream number must be 1. Others aren\'t supported. Check the address.')
return (status, addressVersionNumber, streamNumber, ripe)
def _handle_request(self, method, params):
if method == 'helloWorld':
(a, b) = params
return a + '-' + b
elif method == 'add':
(a, b) = params
return a + b
elif method == 'statusBar':
message, = params
shared.UISignalQueue.put(('updateStatusBar', message))
elif method == 'listAddresses' or method == 'listAddresses2':
data = '{"addresses":['
configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile != 'bitmessagesettings':
status, addressVersionNumber, streamNumber, hash = decodeAddress(
addressInKeysFile)
data
if len(data) > 20:
data += ','
if shared.config.has_option(addressInKeysFile, 'chan'):
chan = shared.config.getboolean(addressInKeysFile, 'chan')
else:
chan = False
label = shared.config.get(addressInKeysFile, 'label')
if method == 'listAddresses2':
label = label.encode('base64')
data += json.dumps({'label': label, 'address': addressInKeysFile, 'stream':
streamNumber, 'enabled': shared.config.getboolean(addressInKeysFile, 'enabled'), 'chan': chan}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'listAddressBookEntries' or method == 'listAddressbook': # the listAddressbook alias should be removed eventually.
queryreturn = sqlQuery('''SELECT label, address from addressbook''')
data = '{"addresses":['
for row in queryreturn:
label, address = row
label = shared.fixPotentiallyInvalidUTF8Data(label)
if len(data) > 20:
data += ','
data += json.dumps({'label':label.encode('base64'), 'address': address}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'addAddressBookEntry' or method == 'addAddressbook': # the addAddressbook alias should be deleted eventually.
if len(params) != 2:
raise APIError(0, "I need label and address")
address, label = params
label = self._decode(label, "base64")
address = addBMIfNotPresent(address)
self._verifyAddress(address)
queryreturn = sqlQuery("SELECT address FROM addressbook WHERE address=?", address)
if queryreturn != []:
raise APIError(16, 'You already have this address in your address book.')
sqlExecute("INSERT INTO addressbook VALUES(?,?)", label, address)
shared.UISignalQueue.put(('rerenderInboxFromLabels',''))
shared.UISignalQueue.put(('rerenderSentToLabels',''))
shared.UISignalQueue.put(('rerenderAddressBook',''))
return "Added address %s to address book" % address
elif method == 'deleteAddressBookEntry' or method == 'deleteAddressbook': # The deleteAddressbook alias should be deleted eventually.
if len(params) != 1:
raise APIError(0, "I need an address")
address, = params
address = addBMIfNotPresent(address)
self._verifyAddress(address)
sqlExecute('DELETE FROM addressbook WHERE address=?', address)
shared.UISignalQueue.put(('rerenderInboxFromLabels',''))
shared.UISignalQueue.put(('rerenderSentToLabels',''))
shared.UISignalQueue.put(('rerenderAddressBook',''))
return "Deleted address book entry for %s if it existed" % address
elif method == 'createRandomAddress':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
label, = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 2:
label, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 3:
label, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 4:
label, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
raise APIError(0, 'Too many parameters!')
label = self._decode(label, "base64")
try:
unicode(label, 'utf-8')
except:
raise APIError(17, 'Label is not valid UTF-8 data.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
streamNumberForAddress = 1
shared.addressGeneratorQueue.put((
'createRandomAddress', 4, streamNumberForAddress, label, 1, "", eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes))
return shared.apiAddressGeneratorReturnQueue.get()
elif method == 'createDeterministicAddresses':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
passphrase, = params
numberOfAddresses = 1
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 2:
passphrase, numberOfAddresses = params
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 3:
passphrase, numberOfAddresses, addressVersionNumber = params
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 4:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 5:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 6:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 7:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
raise APIError(0, 'Too many parameters!')
if len(passphrase) == 0:
raise APIError(1, 'The specified passphrase is blank.')
if not isinstance(eighteenByteRipe, bool):
raise APIError(23, 'Bool expected in eighteenByteRipe, saw %s instead' % type(eighteenByteRipe))
passphrase = self._decode(passphrase, "base64")
if addressVersionNumber == 0: # 0 means "just use the proper addressVersionNumber"
addressVersionNumber = 4
if addressVersionNumber != 3 and addressVersionNumber != 4:
raise APIError(2,'The address version number currently must be 3, 4, or 0 (which means auto-select). ' + addressVersionNumber + ' isn\'t supported.')
if streamNumber == 0: # 0 means "just use the most available stream"
streamNumber = 1
if streamNumber != 1:
raise APIError(3,'The stream number must be 1 (or 0 which means auto-select). Others aren\'t supported.')
if numberOfAddresses == 0:
raise APIError(4, 'Why would you ask me to generate 0 addresses for you?')
if numberOfAddresses > 999:
raise APIError(5, 'You have (accidentally?) specified too many addresses to make. Maximum 999. This check only exists to prevent mischief; if you really want to create more addresses than this, contact the Bitmessage developers and we can modify the check or you can do it yourself by searching the source code for this message.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
logger.debug('Requesting that the addressGenerator create %s addresses.', numberOfAddresses)
shared.addressGeneratorQueue.put(
('createDeterministicAddresses', addressVersionNumber, streamNumber,
'unused API address', numberOfAddresses, passphrase, eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes))
data = '{"addresses":['
queueReturn = shared.apiAddressGeneratorReturnQueue.get()
for item in queueReturn:
if len(data) > 20:
data += ','
data += "\"" + item + "\""
data += ']}'
return data
elif method == 'getDeterministicAddress':
if len(params) != 3:
raise APIError(0, 'I need exactly 3 parameters.')
passphrase, addressVersionNumber, streamNumber = params
numberOfAddresses = 1
eighteenByteRipe = False
if len(passphrase) == 0:
raise APIError(1, 'The specified passphrase is blank.')
passphrase = self._decode(passphrase, "base64")
if addressVersionNumber != 3 and addressVersionNumber != 4:
raise APIError(2, 'The address version number currently must be 3 or 4. ' + addressVersionNumber + ' isn\'t supported.')
if streamNumber != 1:
raise APIError(3, ' The stream number must be 1. Others aren\'t supported.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
logger.debug('Requesting that the addressGenerator create %s addresses.', numberOfAddresses)
shared.addressGeneratorQueue.put(
('getDeterministicAddress', addressVersionNumber,
streamNumber, 'unused API address', numberOfAddresses, passphrase, eighteenByteRipe))
return shared.apiAddressGeneratorReturnQueue.get()
elif method == 'getAllInboxMessages':
queryreturn = sqlQuery(
'''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype, read FROM inbox where folder='inbox' ORDER BY received''')
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype, read = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid': msgid.encode('hex'), 'toAddress': toAddress, 'fromAddress': fromAddress, 'subject': subject.encode(
'base64'), 'message': message.encode('base64'), 'encodingType': encodingtype, 'receivedTime': received, 'read': read}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllInboxMessageIds' or method == 'getAllInboxMessageIDs':
queryreturn = sqlQuery(
'''SELECT msgid FROM inbox where folder='inbox' ORDER BY received''')
data = '{"inboxMessageIds":['
for row in queryreturn:
msgid = row[0]
if len(data) > 25:
data += ','
data += json.dumps({'msgid': msgid.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getInboxMessageById' or method == 'getInboxMessageByID':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
msgid = self._decode(params[0], "hex")
elif len(params) >= 2:
msgid = self._decode(params[0], "hex")
readStatus = params[1]
if not isinstance(readStatus, bool):
raise APIError(23, 'Bool expected in readStatus, saw %s instead.' % type(readStatus))
queryreturn = sqlQuery('''SELECT read FROM inbox WHERE msgid=?''', msgid)
# UPDATE is slow, only update if status is different
if queryreturn != [] and (queryreturn[0][0] == 1) != readStatus:
sqlExecute('''UPDATE inbox set read = ? WHERE msgid=?''', readStatus, msgid)
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype, read FROM inbox WHERE msgid=?''', msgid)
data = '{"inboxMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype, read = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'receivedTime':received, 'read': read}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllSentMessages':
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent where folder='sent' ORDER BY lastactiontime''')
data = '{"sentMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllSentMessageIds' or method == 'getAllSentMessageIDs':
queryreturn = sqlQuery('''SELECT msgid FROM sent where folder='sent' ORDER BY lastactiontime''')
data = '{"sentMessageIds":['
for row in queryreturn:
msgid = row[0]
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getInboxMessagesByReceiver' or method == 'getInboxMessagesByAddress': #after some time getInboxMessagesByAddress should be removed
if len(params) == 0:
raise APIError(0, 'I need parameters!')
toAddress = params[0]
queryReturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype FROM inbox WHERE folder='inbox' AND toAddress=?''', toAddress)
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'receivedTime':received}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessageById' or method == 'getSentMessageByID':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE msgid=?''', msgid)
data = '{"sentMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessagesByAddress' or method == 'getSentMessagesBySender':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
fromAddress = params[0]
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE folder='sent' AND fromAddress=? ORDER BY lastactiontime''',
fromAddress)
data = '{"sentMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessageByAckData':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
ackData = self._decode(params[0], "hex")
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE ackdata=?''',
ackData)
data = '{"sentMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'trashMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
# Trash if in inbox table
helper_inbox.trash(msgid)
# Trash if in sent table
sqlExecute('''UPDATE sent SET folder='trash' WHERE msgid=?''', msgid)
return 'Trashed message (assuming message existed).'
elif method == 'trashInboxMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
helper_inbox.trash(msgid)
return 'Trashed inbox message (assuming message existed).'
elif method == 'trashSentMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
sqlExecute('''UPDATE sent SET folder='trash' WHERE msgid=?''', msgid)
return 'Trashed sent message (assuming message existed).'
elif method == 'trashSentMessageByAckData':
# This API method should only be used when msgid is not available
if len(params) == 0:
raise APIError(0, 'I need parameters!')
ackdata = self._decode(params[0], "hex")
sqlExecute('''UPDATE sent SET folder='trash' WHERE ackdata=?''',
ackdata)
return 'Trashed sent message (assuming message existed).'
elif method == 'sendMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 4:
toAddress, fromAddress, subject, message = params
encodingType = 2
elif len(params) == 5:
toAddress, fromAddress, subject, message, encodingType = params
if encodingType != 2:
raise APIError(6, 'The encoding type must be 2 because that is the only one this program currently supports.')
subject = self._decode(subject, "base64")
message = self._decode(message, "base64")
toAddress = addBMIfNotPresent(toAddress)
fromAddress = addBMIfNotPresent(fromAddress)
status, addressVersionNumber, streamNumber, toRipe = self._verifyAddress(toAddress)
self._verifyAddress(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(
fromAddress, 'enabled')
except:
raise APIError(13, 'Could not find your fromAddress in the keys.dat file.')
if not fromAddressEnabled:
raise APIError(14, 'Your fromAddress is disabled. Cannot send.')
ackdata = OpenSSL.rand(32)
t = ('', toAddress, toRipe, fromAddress, subject, message, ackdata, int(
time.time()), 'msgqueued', 1, 1, 'sent', 2)
helper_sent.insert(t)
toLabel = ''
queryreturn = sqlQuery('''select label from addressbook where address=?''', toAddress)
if queryreturn != []:
for row in queryreturn:
toLabel, = row
# apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.UISignalQueue.put(('displayNewSentMessage', (
toAddress, toLabel, fromAddress, subject, message, ackdata)))
shared.workerQueue.put(('sendmessage', toAddress))
return ackdata.encode('hex')
elif method == 'sendBroadcast':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
if len(params) == 3:
fromAddress, subject, message = params
encodingType = 2
elif len(params) == 4:
fromAddress, subject, message, encodingType = params
if encodingType != 2:
raise APIError(6, 'The encoding type must be 2 because that is the only one this program currently supports.')
subject = self._decode(subject, "base64")
message = self._decode(message, "base64")
fromAddress = addBMIfNotPresent(fromAddress)
self._verifyAddress(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(
fromAddress, 'enabled')
except:
raise APIError(13, 'could not find your fromAddress in the keys.dat file.')
ackdata = OpenSSL.rand(32)
toAddress = '[Broadcast subscribers]'
ripe = ''
t = ('', toAddress, ripe, fromAddress, subject, message, ackdata, int(
time.time()), 'broadcastqueued', 1, 1, 'sent', 2)
helper_sent.insert(t)
toLabel = '[Broadcast subscribers]'
shared.UISignalQueue.put(('displayNewSentMessage', (
toAddress, toLabel, fromAddress, subject, message, ackdata)))
shared.workerQueue.put(('sendbroadcast', ''))
return ackdata.encode('hex')
elif method == 'getStatus':
if len(params) != 1:
raise APIError(0, 'I need one parameter!')
ackdata, = params
if len(ackdata) != 64:
raise APIError(15, 'The length of ackData should be 32 bytes (encoded in hex thus 64 characters).')
ackdata = self._decode(ackdata, "hex")
queryreturn = sqlQuery(
'''SELECT status FROM sent where ackdata=?''',
ackdata)
if queryreturn == []:
return 'notfound'
for row in queryreturn:
status, = row
return status
elif method == 'addSubscription':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
if len(params) == 1:
address, = params
label == ''
if len(params) == 2:
address, label = params
label = self._decode(label, "base64")
try:
unicode(label, 'utf-8')
except:
raise APIError(17, 'Label is not valid UTF-8 data.')
if len(params) > 2:
raise APIError(0, 'I need either 1 or 2 parameters!')
address = addBMIfNotPresent(address)
self._verifyAddress(address)
# First we must check to see if the address is already in the
# subscriptions list.
queryreturn = sqlQuery('''select * from subscriptions where address=?''', address)
if queryreturn != []:
raise APIError(16, 'You are already subscribed to that address.')
sqlExecute('''INSERT INTO subscriptions VALUES (?,?,?)''',label, address, True)
shared.reloadBroadcastSendersForWhichImWatching()
shared.UISignalQueue.put(('rerenderInboxFromLabels', ''))
shared.UISignalQueue.put(('rerenderSubscriptions', ''))
return 'Added subscription.'
elif method == 'deleteSubscription':
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
address, = params
address = addBMIfNotPresent(address)
sqlExecute('''DELETE FROM subscriptions WHERE address=?''', address)
shared.reloadBroadcastSendersForWhichImWatching()
shared.UISignalQueue.put(('rerenderInboxFromLabels', ''))
shared.UISignalQueue.put(('rerenderSubscriptions', ''))
return 'Deleted subscription if it existed.'
elif method == 'listSubscriptions':
queryreturn = sqlQuery('''SELECT label, address, enabled FROM subscriptions''')
data = '{"subscriptions":['
for row in queryreturn:
label, address, enabled = row
label = shared.fixPotentiallyInvalidUTF8Data(label)
if len(data) > 20:
data += ','
data += json.dumps({'label':label.encode('base64'), 'address': address, 'enabled': enabled == 1}, indent=4, separators=(',',': '))
data += ']}'
return data
elif method == 'disseminatePreEncryptedMsg':
# The device issuing this command to PyBitmessage supplies a msg object that has
# already been encrypted but which still needs the POW to be done. PyBitmessage
# accepts this msg object and sends it out to the rest of the Bitmessage network
# as if it had generated the message itself. Please do not yet add this to the
# api doc.
if len(params) != 3:
raise APIError(0, 'I need 3 parameter!')
encryptedPayload, requiredAverageProofOfWorkNonceTrialsPerByte, requiredPayloadLengthExtraBytes = params
encryptedPayload = self._decode(encryptedPayload, "hex")
# Let us do the POW and attach it to the front
target = 2**64 / ((len(encryptedPayload)+requiredPayloadLengthExtraBytes+8) * requiredAverageProofOfWorkNonceTrialsPerByte)
with shared.printLock:
print '(For msg message via API) Doing proof of work. Total required difficulty:', float(requiredAverageProofOfWorkNonceTrialsPerByte) / shared.networkDefaultProofOfWorkNonceTrialsPerByte, 'Required small message difficulty:', float(requiredPayloadLengthExtraBytes) / shared.networkDefaultPayloadLengthExtraBytes
powStartTime = time.time()
initialHash = hashlib.sha512(encryptedPayload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
with shared.printLock:
print '(For msg message via API) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time() - powStartTime), 'seconds.', nonce / (time.time() - powStartTime), 'nonce trials per second.'
except:
pass
encryptedPayload = pack('>Q', nonce) + encryptedPayload
toStreamNumber = decodeVarint(encryptedPayload[16:26])[0]
inventoryHash = calculateInventoryHash(encryptedPayload)
objectType = 'msg'
shared.inventory[inventoryHash] = (
objectType, toStreamNumber, encryptedPayload, int(time.time()),'')
shared.inventorySets[toStreamNumber].add(inventoryHash)
with shared.printLock:
print 'Broadcasting inv for msg(API disseminatePreEncryptedMsg command):', inventoryHash.encode('hex')
shared.broadcastToSendDataQueues((
toStreamNumber, 'advertiseobject', inventoryHash))
elif method == 'disseminatePubkey':
# The device issuing this command to PyBitmessage supplies a pubkey object to be
# disseminated to the rest of the Bitmessage network. PyBitmessage accepts this
# pubkey object and sends it out to the rest of the Bitmessage network as if it
# had generated the pubkey object itself. Please do not yet add this to the api
# doc.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
payload, = params
payload = self._decode(payload, "hex")
# Let us do the POW
target = 2 ** 64 / ((len(payload) + shared.networkDefaultPayloadLengthExtraBytes +
8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message via API) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
print '(For pubkey message via API) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q', nonce) + payload
pubkeyReadPosition = 8 # bypass the nonce
if payload[pubkeyReadPosition:pubkeyReadPosition+4] == '\x00\x00\x00\x00': # if this pubkey uses 8 byte time
pubkeyReadPosition += 8
else:
pubkeyReadPosition += 4
addressVersion, addressVersionLength = decodeVarint(payload[pubkeyReadPosition:pubkeyReadPosition+10])
pubkeyReadPosition += addressVersionLength
pubkeyStreamNumber = decodeVarint(payload[pubkeyReadPosition:pubkeyReadPosition+10])[0]
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
#todo: support v4 pubkeys
shared.inventory[inventoryHash] = (
objectType, pubkeyStreamNumber, payload, int(time.time()),'')
shared.inventorySets[pubkeyStreamNumber].add(inventoryHash)
with shared.printLock:
print 'broadcasting inv within API command disseminatePubkey with hash:', inventoryHash.encode('hex')
shared.broadcastToSendDataQueues((
streamNumber, 'advertiseobject', inventoryHash))
elif method == 'getMessageDataByDestinationHash' or method == 'getMessageDataByDestinationTag':
# Method will eventually be used by a particular Android app to
# select relevant messages. Do not yet add this to the api
# doc.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
requestedHash, = params
if len(requestedHash) != 32:
raise APIError(19, 'The length of hash should be 32 bytes (encoded in hex thus 64 characters).')
requestedHash = self._decode(requestedHash, "hex")
# This is not a particularly commonly used API function. Before we
# use it we'll need to fill out a field in our inventory database
# which is blank by default (first20bytesofencryptedmessage).
queryreturn = sqlQuery(
'''SELECT hash, payload FROM inventory WHERE tag = '' and objecttype = 'msg' ; ''')
with SqlBulkExecute() as sql:
for row in queryreturn:
hash, payload = row
readPosition = 16 # Nonce length + time length
readPosition += decodeVarint(payload[readPosition:readPosition+10])[1] # Stream Number length
t = (payload[readPosition:readPosition+32],hash)
sql.execute('''UPDATE inventory SET tag=? WHERE hash=?; ''', *t)
queryreturn = sqlQuery('''SELECT payload FROM inventory WHERE tag = ?''',
requestedHash)
data = '{"receivedMessageDatas":['
for row in queryreturn:
payload, = row
if len(data) > 25:
data += ','
data += json.dumps({'data':payload.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getPubkeyByHash':
# Method will eventually be used by a particular Android app to
# retrieve pubkeys. Please do not yet add this to the api docs.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
requestedHash, = params
if len(requestedHash) != 40:
raise APIError(19, 'The length of hash should be 20 bytes (encoded in hex thus 40 characters).')
requestedHash = self._decode(requestedHash, "hex")
queryreturn = sqlQuery('''SELECT transmitdata FROM pubkeys WHERE hash = ? ; ''', requestedHash)
data = '{"pubkey":['
for row in queryreturn:
transmitdata, = row
data += json.dumps({'data':transmitdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'clientStatus':
if len(shared.connectedHostsList) == 0:
networkStatus = 'notConnected'
elif len(shared.connectedHostsList) > 0 and not shared.clientHasReceivedIncomingConnections:
networkStatus = 'connectedButHaveNotReceivedIncomingConnections'
else:
networkStatus = 'connectedAndReceivingIncomingConnections'
return json.dumps({'networkConnections':len(shared.connectedHostsList),'numberOfMessagesProcessed':shared.numberOfMessagesProcessed, 'numberOfBroadcastsProcessed':shared.numberOfBroadcastsProcessed, 'numberOfPubkeysProcessed':shared.numberOfPubkeysProcessed, 'networkStatus':networkStatus, 'softwareName':'PyBitmessage','softwareVersion':shared.softwareVersion}, indent=4, separators=(',', ': '))
else:
raise APIError(20, 'Invalid method: %s' % method)
def _dispatch(self, method, params):
self.cookies = []
validuser = self.APIAuthenticateClient()
if not validuser:
time.sleep(2)
return "RPC Username or password incorrect or HTTP header lacks authentication at all."
try:
return self._handle_request(method, params)
except APIError as e:
return str(e)
except Exception as e:
logger.exception(e)
return "API Error 0021: Unexpected API Failure - %s" % str(e)
# This thread, of which there is only one, runs the API.
class singleAPI(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
se = SimpleXMLRPCServer((shared.config.get('bitmessagesettings', 'apiinterface'), shared.config.getint(
'bitmessagesettings', 'apiport')), MySimpleXMLRPCRequestHandler, True, True)
se.register_introspection_functions()
se.serve_forever()
# This is a list of current connections (the thread pointers at least)
selfInitiatedConnections = {}
if shared.useVeryEasyProofOfWorkForTesting:
shared.networkDefaultProofOfWorkNonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte / 16)
shared.networkDefaultPayloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes / 7000)
class Main:
def start(self, daemon=False):
shared.daemon = daemon
# is the application already running? If yes then exit.
thisapp = singleton.singleinstance()
signal.signal(signal.SIGINT, helper_generic.signal_handler)
# signal.signal(signal.SIGINT, signal.SIG_DFL)
helper_bootstrap.knownNodes()
# Start the address generation thread
addressGeneratorThread = addressGenerator()
addressGeneratorThread.daemon = True # close the main program even if there are threads left
addressGeneratorThread.start()
# Start the thread that calculates POWs
singleWorkerThread = singleWorker()
singleWorkerThread.daemon = True # close the main program even if there are threads left
singleWorkerThread.start()
# Start the SQL thread
sqlLookup = sqlThread()
sqlLookup.daemon = False # DON'T close the main program even if there are threads left. The closeEvent should command this thread to exit gracefully.
sqlLookup.start()
# Start the cleanerThread
singleCleanerThread = singleCleaner()
singleCleanerThread.daemon = True # close the main program even if there are threads left
singleCleanerThread.start()
shared.reloadMyAddressHashes()
shared.reloadBroadcastSendersForWhichImWatching()
if shared.safeConfigGetBoolean('bitmessagesettings', 'apienabled'):
try:
apiNotifyPath = shared.config.get(
'bitmessagesettings', 'apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
with shared.printLock:
print 'Trying to call', apiNotifyPath
call([apiNotifyPath, "startingUp"])
singleAPIThread = singleAPI()
singleAPIThread.daemon = True # close the main program even if there are threads left
singleAPIThread.start()
connectToStream(1)
singleListenerThread = singleListener()
singleListenerThread.setup(selfInitiatedConnections)
singleListenerThread.daemon = True # close the main program even if there are threads left
singleListenerThread.start()
if daemon == False and shared.safeConfigGetBoolean('bitmessagesettings', 'daemon') == False:
try:
from PyQt4 import QtCore, QtGui
except Exception as err:
print 'PyBitmessage requires PyQt unless you want to run it as a daemon and interact with it using the API. You can download PyQt from http://www.riverbankcomputing.com/software/pyqt/download or by searching Google for \'PyQt Download\'. If you want to run in daemon mode, see https://bitmessage.org/wiki/Daemon'
print 'Error message:', err
os._exit(0)
import bitmessageqt
bitmessageqt.run()
else:
shared.config.remove_option('bitmessagesettings', 'dontconnect')
if daemon:
with shared.printLock:
print 'Running as a daemon. The main program should exit this thread.'
else:
with shared.printLock:
print 'Running as a daemon. You can use Ctrl+C to exit.'
while True:
time.sleep(20)
def stop(self):
with shared.printLock:
print 'Stopping Bitmessage Deamon.'
shared.doCleanShutdown()
def getApiAddress(self):
if not shared.safeConfigGetBoolean('bitmessagesettings', 'apienabled'):
return None
address = shared.config.get('bitmessagesettings', 'apiinterface')
port = shared.config.getint('bitmessagesettings', 'apiport')
return {'address':address,'port':port}
if __name__ == "__main__":
mainprogram = Main()
mainprogram.start()
# So far, the creation of and management of the Bitmessage protocol and this
# client is a one-man operation. Bitcoin tips are quite appreciated.
# 1H5XaDA6fYENLbknwZyjiYXYPQaFjjLX2u
_verifyAddress support v4 addresses
#!/usr/bin/env python2.7
# Copyright (c) 2012 Jonathan Warren
# Copyright (c) 2012 The Bitmessage developers
# Distributed under the MIT/X11 software license. See the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Right now, PyBitmessage only support connecting to stream 1. It doesn't
# yet contain logic to expand into further streams.
# The software version variable is now held in shared.py
import signal # Used to capture a Ctrl-C keypress so that Bitmessage can shutdown gracefully.
# The next 3 are used for the API
from SimpleXMLRPCServer import *
import json
import singleton
import os
# OSX python version check
import sys
if sys.platform == 'darwin':
if float("{1}.{2}".format(*sys.version_info)) < 7.5:
print "You should use python 2.7.5 or greater."
print "Your version: {0}.{1}.{2}".format(*sys.version_info)
sys.exit(0)
# Classes
from helper_sql import *
from class_sqlThread import *
from class_singleCleaner import *
from class_singleWorker import *
from class_outgoingSynSender import *
from class_singleListener import *
from class_addressGenerator import *
from debug import logger
# Helper Functions
import helper_bootstrap
import proofofwork
import sys
if sys.platform == 'darwin':
if float("{1}.{2}".format(*sys.version_info)) < 7.5:
logger.critical("You should use python 2.7.5 or greater. Your version: %s", "{0}.{1}.{2}".format(*sys.version_info))
sys.exit(0)
def connectToStream(streamNumber):
selfInitiatedConnections[streamNumber] = {}
shared.inventorySets[streamNumber] = set()
queryData = sqlQuery('''SELECT hash FROM inventory WHERE streamnumber=?''', streamNumber)
for row in queryData:
shared.inventorySets[streamNumber].add(row[0])
if sys.platform[0:3] == 'win':
maximumNumberOfHalfOpenConnections = 9
else:
maximumNumberOfHalfOpenConnections = 32
for i in range(maximumNumberOfHalfOpenConnections):
a = outgoingSynSender()
a.setup(streamNumber, selfInitiatedConnections)
a.start()
class APIError(Exception):
def __init__(self, error_number, error_message):
self.error_number = error_number
self.error_message = error_message
def __str__(self):
return "API Error %04i: %s" % (self.error_number, self.error_message)
# This is one of several classes that constitute the API
# This class was written by Vaibhav Bhatia. Modified by Jonathan Warren (Atheros).
# http://code.activestate.com/recipes/501148-xmlrpc-serverclient-which-does-cookie-handling-and/
class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
def do_POST(self):
# Handles the HTTP POST request.
# Attempts to interpret all HTTP POST requests as XML-RPC calls,
# which are forwarded to the server's _dispatch method for handling.
# Note: this method is the same as in SimpleXMLRPCRequestHandler,
# just hacked to handle cookies
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10 * 1024 * 1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None)
)
except: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
self.end_headers()
else:
# got a valid XML RPC response
self.send_response(200)
self.send_header("Content-type", "text/xml")
self.send_header("Content-length", str(len(response)))
# HACK :start -> sends cookies here
if self.cookies:
for cookie in self.cookies:
self.send_header('Set-Cookie', cookie.output(header=''))
# HACK :end
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def APIAuthenticateClient(self):
if 'Authorization' in self.headers:
# handle Basic authentication
(enctype, encstr) = self.headers.get('Authorization').split()
(emailid, password) = encstr.decode('base64').split(':')
if emailid == shared.config.get('bitmessagesettings', 'apiusername') and password == shared.config.get('bitmessagesettings', 'apipassword'):
return True
else:
return False
else:
logger.warn('Authentication failed because header lacks Authentication field')
time.sleep(2)
return False
return False
def _decode(self, text, decode_type):
try:
return text.decode(decode_type)
except TypeError as e:
raise APIError(22, "Decode error - " + str(e))
def _verifyAddress(self, address):
status, addressVersionNumber, streamNumber, ripe = decodeAddress(address)
if status != 'success':
logger.warn('API Error 0007: Could not decode address %s. Status: %s.', address, status)
if status == 'checksumfailed':
raise APIError(8, 'Checksum failed for address: ' + address)
if status == 'invalidcharacters':
raise APIError(9, 'Invalid characters in address: ' + address)
if status == 'versiontoohigh':
raise APIError(10, 'Address version number too high (or zero) in address: ' + address)
raise APIError(7, 'Could not decode address: ' + address + ' : ' + status)
if addressVersionNumber < 2 or addressVersionNumber > 4:
raise APIError(11, 'The address version number currently must be 2, 3 or 4. Others aren\'t supported. Check the address.')
if streamNumber != 1:
raise APIError(12, 'The stream number must be 1. Others aren\'t supported. Check the address.')
return (status, addressVersionNumber, streamNumber, ripe)
def _handle_request(self, method, params):
if method == 'helloWorld':
(a, b) = params
return a + '-' + b
elif method == 'add':
(a, b) = params
return a + b
elif method == 'statusBar':
message, = params
shared.UISignalQueue.put(('updateStatusBar', message))
elif method == 'listAddresses' or method == 'listAddresses2':
data = '{"addresses":['
configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile != 'bitmessagesettings':
status, addressVersionNumber, streamNumber, hash = decodeAddress(
addressInKeysFile)
data
if len(data) > 20:
data += ','
if shared.config.has_option(addressInKeysFile, 'chan'):
chan = shared.config.getboolean(addressInKeysFile, 'chan')
else:
chan = False
label = shared.config.get(addressInKeysFile, 'label')
if method == 'listAddresses2':
label = label.encode('base64')
data += json.dumps({'label': label, 'address': addressInKeysFile, 'stream':
streamNumber, 'enabled': shared.config.getboolean(addressInKeysFile, 'enabled'), 'chan': chan}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'listAddressBookEntries' or method == 'listAddressbook': # the listAddressbook alias should be removed eventually.
queryreturn = sqlQuery('''SELECT label, address from addressbook''')
data = '{"addresses":['
for row in queryreturn:
label, address = row
label = shared.fixPotentiallyInvalidUTF8Data(label)
if len(data) > 20:
data += ','
data += json.dumps({'label':label.encode('base64'), 'address': address}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'addAddressBookEntry' or method == 'addAddressbook': # the addAddressbook alias should be deleted eventually.
if len(params) != 2:
raise APIError(0, "I need label and address")
address, label = params
label = self._decode(label, "base64")
address = addBMIfNotPresent(address)
self._verifyAddress(address)
queryreturn = sqlQuery("SELECT address FROM addressbook WHERE address=?", address)
if queryreturn != []:
raise APIError(16, 'You already have this address in your address book.')
sqlExecute("INSERT INTO addressbook VALUES(?,?)", label, address)
shared.UISignalQueue.put(('rerenderInboxFromLabels',''))
shared.UISignalQueue.put(('rerenderSentToLabels',''))
shared.UISignalQueue.put(('rerenderAddressBook',''))
return "Added address %s to address book" % address
elif method == 'deleteAddressBookEntry' or method == 'deleteAddressbook': # The deleteAddressbook alias should be deleted eventually.
if len(params) != 1:
raise APIError(0, "I need an address")
address, = params
address = addBMIfNotPresent(address)
self._verifyAddress(address)
sqlExecute('DELETE FROM addressbook WHERE address=?', address)
shared.UISignalQueue.put(('rerenderInboxFromLabels',''))
shared.UISignalQueue.put(('rerenderSentToLabels',''))
shared.UISignalQueue.put(('rerenderAddressBook',''))
return "Deleted address book entry for %s if it existed" % address
elif method == 'createRandomAddress':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
label, = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 2:
label, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 3:
label, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 4:
label, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
raise APIError(0, 'Too many parameters!')
label = self._decode(label, "base64")
try:
unicode(label, 'utf-8')
except:
raise APIError(17, 'Label is not valid UTF-8 data.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
streamNumberForAddress = 1
shared.addressGeneratorQueue.put((
'createRandomAddress', 4, streamNumberForAddress, label, 1, "", eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes))
return shared.apiAddressGeneratorReturnQueue.get()
elif method == 'createDeterministicAddresses':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
passphrase, = params
numberOfAddresses = 1
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 2:
passphrase, numberOfAddresses = params
addressVersionNumber = 0
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 3:
passphrase, numberOfAddresses, addressVersionNumber = params
streamNumber = 0
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 4:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber = params
eighteenByteRipe = False
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 5:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe = params
nonceTrialsPerByte = shared.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 6:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = shared.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
elif len(params) == 7:
passphrase, numberOfAddresses, addressVersionNumber, streamNumber, eighteenByteRipe, totalDifficulty, smallMessageDifficulty = params
nonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
payloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
else:
raise APIError(0, 'Too many parameters!')
if len(passphrase) == 0:
raise APIError(1, 'The specified passphrase is blank.')
if not isinstance(eighteenByteRipe, bool):
raise APIError(23, 'Bool expected in eighteenByteRipe, saw %s instead' % type(eighteenByteRipe))
passphrase = self._decode(passphrase, "base64")
if addressVersionNumber == 0: # 0 means "just use the proper addressVersionNumber"
addressVersionNumber = 4
if addressVersionNumber != 3 and addressVersionNumber != 4:
raise APIError(2,'The address version number currently must be 3, 4, or 0 (which means auto-select). ' + addressVersionNumber + ' isn\'t supported.')
if streamNumber == 0: # 0 means "just use the most available stream"
streamNumber = 1
if streamNumber != 1:
raise APIError(3,'The stream number must be 1 (or 0 which means auto-select). Others aren\'t supported.')
if numberOfAddresses == 0:
raise APIError(4, 'Why would you ask me to generate 0 addresses for you?')
if numberOfAddresses > 999:
raise APIError(5, 'You have (accidentally?) specified too many addresses to make. Maximum 999. This check only exists to prevent mischief; if you really want to create more addresses than this, contact the Bitmessage developers and we can modify the check or you can do it yourself by searching the source code for this message.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
logger.debug('Requesting that the addressGenerator create %s addresses.', numberOfAddresses)
shared.addressGeneratorQueue.put(
('createDeterministicAddresses', addressVersionNumber, streamNumber,
'unused API address', numberOfAddresses, passphrase, eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes))
data = '{"addresses":['
queueReturn = shared.apiAddressGeneratorReturnQueue.get()
for item in queueReturn:
if len(data) > 20:
data += ','
data += "\"" + item + "\""
data += ']}'
return data
elif method == 'getDeterministicAddress':
if len(params) != 3:
raise APIError(0, 'I need exactly 3 parameters.')
passphrase, addressVersionNumber, streamNumber = params
numberOfAddresses = 1
eighteenByteRipe = False
if len(passphrase) == 0:
raise APIError(1, 'The specified passphrase is blank.')
passphrase = self._decode(passphrase, "base64")
if addressVersionNumber != 3 and addressVersionNumber != 4:
raise APIError(2, 'The address version number currently must be 3 or 4. ' + addressVersionNumber + ' isn\'t supported.')
if streamNumber != 1:
raise APIError(3, ' The stream number must be 1. Others aren\'t supported.')
shared.apiAddressGeneratorReturnQueue.queue.clear()
logger.debug('Requesting that the addressGenerator create %s addresses.', numberOfAddresses)
shared.addressGeneratorQueue.put(
('getDeterministicAddress', addressVersionNumber,
streamNumber, 'unused API address', numberOfAddresses, passphrase, eighteenByteRipe))
return shared.apiAddressGeneratorReturnQueue.get()
elif method == 'getAllInboxMessages':
queryreturn = sqlQuery(
'''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype, read FROM inbox where folder='inbox' ORDER BY received''')
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype, read = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid': msgid.encode('hex'), 'toAddress': toAddress, 'fromAddress': fromAddress, 'subject': subject.encode(
'base64'), 'message': message.encode('base64'), 'encodingType': encodingtype, 'receivedTime': received, 'read': read}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllInboxMessageIds' or method == 'getAllInboxMessageIDs':
queryreturn = sqlQuery(
'''SELECT msgid FROM inbox where folder='inbox' ORDER BY received''')
data = '{"inboxMessageIds":['
for row in queryreturn:
msgid = row[0]
if len(data) > 25:
data += ','
data += json.dumps({'msgid': msgid.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getInboxMessageById' or method == 'getInboxMessageByID':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 1:
msgid = self._decode(params[0], "hex")
elif len(params) >= 2:
msgid = self._decode(params[0], "hex")
readStatus = params[1]
if not isinstance(readStatus, bool):
raise APIError(23, 'Bool expected in readStatus, saw %s instead.' % type(readStatus))
queryreturn = sqlQuery('''SELECT read FROM inbox WHERE msgid=?''', msgid)
# UPDATE is slow, only update if status is different
if queryreturn != [] and (queryreturn[0][0] == 1) != readStatus:
sqlExecute('''UPDATE inbox set read = ? WHERE msgid=?''', readStatus, msgid)
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype, read FROM inbox WHERE msgid=?''', msgid)
data = '{"inboxMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype, read = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'receivedTime':received, 'read': read}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllSentMessages':
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent where folder='sent' ORDER BY lastactiontime''')
data = '{"sentMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getAllSentMessageIds' or method == 'getAllSentMessageIDs':
queryreturn = sqlQuery('''SELECT msgid FROM sent where folder='sent' ORDER BY lastactiontime''')
data = '{"sentMessageIds":['
for row in queryreturn:
msgid = row[0]
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getInboxMessagesByReceiver' or method == 'getInboxMessagesByAddress': #after some time getInboxMessagesByAddress should be removed
if len(params) == 0:
raise APIError(0, 'I need parameters!')
toAddress = params[0]
queryReturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, received, message, encodingtype FROM inbox WHERE folder='inbox' AND toAddress=?''', toAddress)
data = '{"inboxMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, received, message, encodingtype = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'receivedTime':received}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessageById' or method == 'getSentMessageByID':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE msgid=?''', msgid)
data = '{"sentMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessagesByAddress' or method == 'getSentMessagesBySender':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
fromAddress = params[0]
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE folder='sent' AND fromAddress=? ORDER BY lastactiontime''',
fromAddress)
data = '{"sentMessages":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
if len(data) > 25:
data += ','
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getSentMessageByAckData':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
ackData = self._decode(params[0], "hex")
queryreturn = sqlQuery('''SELECT msgid, toaddress, fromaddress, subject, lastactiontime, message, encodingtype, status, ackdata FROM sent WHERE ackdata=?''',
ackData)
data = '{"sentMessage":['
for row in queryreturn:
msgid, toAddress, fromAddress, subject, lastactiontime, message, encodingtype, status, ackdata = row
subject = shared.fixPotentiallyInvalidUTF8Data(subject)
message = shared.fixPotentiallyInvalidUTF8Data(message)
data += json.dumps({'msgid':msgid.encode('hex'), 'toAddress':toAddress, 'fromAddress':fromAddress, 'subject':subject.encode('base64'), 'message':message.encode('base64'), 'encodingType':encodingtype, 'lastActionTime':lastactiontime, 'status':status, 'ackData':ackdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'trashMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
# Trash if in inbox table
helper_inbox.trash(msgid)
# Trash if in sent table
sqlExecute('''UPDATE sent SET folder='trash' WHERE msgid=?''', msgid)
return 'Trashed message (assuming message existed).'
elif method == 'trashInboxMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
helper_inbox.trash(msgid)
return 'Trashed inbox message (assuming message existed).'
elif method == 'trashSentMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
msgid = self._decode(params[0], "hex")
sqlExecute('''UPDATE sent SET folder='trash' WHERE msgid=?''', msgid)
return 'Trashed sent message (assuming message existed).'
elif method == 'trashSentMessageByAckData':
# This API method should only be used when msgid is not available
if len(params) == 0:
raise APIError(0, 'I need parameters!')
ackdata = self._decode(params[0], "hex")
sqlExecute('''UPDATE sent SET folder='trash' WHERE ackdata=?''',
ackdata)
return 'Trashed sent message (assuming message existed).'
elif method == 'sendMessage':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
elif len(params) == 4:
toAddress, fromAddress, subject, message = params
encodingType = 2
elif len(params) == 5:
toAddress, fromAddress, subject, message, encodingType = params
if encodingType != 2:
raise APIError(6, 'The encoding type must be 2 because that is the only one this program currently supports.')
subject = self._decode(subject, "base64")
message = self._decode(message, "base64")
toAddress = addBMIfNotPresent(toAddress)
fromAddress = addBMIfNotPresent(fromAddress)
status, addressVersionNumber, streamNumber, toRipe = self._verifyAddress(toAddress)
self._verifyAddress(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(
fromAddress, 'enabled')
except:
raise APIError(13, 'Could not find your fromAddress in the keys.dat file.')
if not fromAddressEnabled:
raise APIError(14, 'Your fromAddress is disabled. Cannot send.')
ackdata = OpenSSL.rand(32)
t = ('', toAddress, toRipe, fromAddress, subject, message, ackdata, int(
time.time()), 'msgqueued', 1, 1, 'sent', 2)
helper_sent.insert(t)
toLabel = ''
queryreturn = sqlQuery('''select label from addressbook where address=?''', toAddress)
if queryreturn != []:
for row in queryreturn:
toLabel, = row
# apiSignalQueue.put(('displayNewSentMessage',(toAddress,toLabel,fromAddress,subject,message,ackdata)))
shared.UISignalQueue.put(('displayNewSentMessage', (
toAddress, toLabel, fromAddress, subject, message, ackdata)))
shared.workerQueue.put(('sendmessage', toAddress))
return ackdata.encode('hex')
elif method == 'sendBroadcast':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
if len(params) == 3:
fromAddress, subject, message = params
encodingType = 2
elif len(params) == 4:
fromAddress, subject, message, encodingType = params
if encodingType != 2:
raise APIError(6, 'The encoding type must be 2 because that is the only one this program currently supports.')
subject = self._decode(subject, "base64")
message = self._decode(message, "base64")
fromAddress = addBMIfNotPresent(fromAddress)
self._verifyAddress(fromAddress)
try:
fromAddressEnabled = shared.config.getboolean(
fromAddress, 'enabled')
except:
raise APIError(13, 'could not find your fromAddress in the keys.dat file.')
ackdata = OpenSSL.rand(32)
toAddress = '[Broadcast subscribers]'
ripe = ''
t = ('', toAddress, ripe, fromAddress, subject, message, ackdata, int(
time.time()), 'broadcastqueued', 1, 1, 'sent', 2)
helper_sent.insert(t)
toLabel = '[Broadcast subscribers]'
shared.UISignalQueue.put(('displayNewSentMessage', (
toAddress, toLabel, fromAddress, subject, message, ackdata)))
shared.workerQueue.put(('sendbroadcast', ''))
return ackdata.encode('hex')
elif method == 'getStatus':
if len(params) != 1:
raise APIError(0, 'I need one parameter!')
ackdata, = params
if len(ackdata) != 64:
raise APIError(15, 'The length of ackData should be 32 bytes (encoded in hex thus 64 characters).')
ackdata = self._decode(ackdata, "hex")
queryreturn = sqlQuery(
'''SELECT status FROM sent where ackdata=?''',
ackdata)
if queryreturn == []:
return 'notfound'
for row in queryreturn:
status, = row
return status
elif method == 'addSubscription':
if len(params) == 0:
raise APIError(0, 'I need parameters!')
if len(params) == 1:
address, = params
label == ''
if len(params) == 2:
address, label = params
label = self._decode(label, "base64")
try:
unicode(label, 'utf-8')
except:
raise APIError(17, 'Label is not valid UTF-8 data.')
if len(params) > 2:
raise APIError(0, 'I need either 1 or 2 parameters!')
address = addBMIfNotPresent(address)
self._verifyAddress(address)
# First we must check to see if the address is already in the
# subscriptions list.
queryreturn = sqlQuery('''select * from subscriptions where address=?''', address)
if queryreturn != []:
raise APIError(16, 'You are already subscribed to that address.')
sqlExecute('''INSERT INTO subscriptions VALUES (?,?,?)''',label, address, True)
shared.reloadBroadcastSendersForWhichImWatching()
shared.UISignalQueue.put(('rerenderInboxFromLabels', ''))
shared.UISignalQueue.put(('rerenderSubscriptions', ''))
return 'Added subscription.'
elif method == 'deleteSubscription':
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
address, = params
address = addBMIfNotPresent(address)
sqlExecute('''DELETE FROM subscriptions WHERE address=?''', address)
shared.reloadBroadcastSendersForWhichImWatching()
shared.UISignalQueue.put(('rerenderInboxFromLabels', ''))
shared.UISignalQueue.put(('rerenderSubscriptions', ''))
return 'Deleted subscription if it existed.'
elif method == 'listSubscriptions':
queryreturn = sqlQuery('''SELECT label, address, enabled FROM subscriptions''')
data = '{"subscriptions":['
for row in queryreturn:
label, address, enabled = row
label = shared.fixPotentiallyInvalidUTF8Data(label)
if len(data) > 20:
data += ','
data += json.dumps({'label':label.encode('base64'), 'address': address, 'enabled': enabled == 1}, indent=4, separators=(',',': '))
data += ']}'
return data
elif method == 'disseminatePreEncryptedMsg':
# The device issuing this command to PyBitmessage supplies a msg object that has
# already been encrypted but which still needs the POW to be done. PyBitmessage
# accepts this msg object and sends it out to the rest of the Bitmessage network
# as if it had generated the message itself. Please do not yet add this to the
# api doc.
if len(params) != 3:
raise APIError(0, 'I need 3 parameter!')
encryptedPayload, requiredAverageProofOfWorkNonceTrialsPerByte, requiredPayloadLengthExtraBytes = params
encryptedPayload = self._decode(encryptedPayload, "hex")
# Let us do the POW and attach it to the front
target = 2**64 / ((len(encryptedPayload)+requiredPayloadLengthExtraBytes+8) * requiredAverageProofOfWorkNonceTrialsPerByte)
with shared.printLock:
print '(For msg message via API) Doing proof of work. Total required difficulty:', float(requiredAverageProofOfWorkNonceTrialsPerByte) / shared.networkDefaultProofOfWorkNonceTrialsPerByte, 'Required small message difficulty:', float(requiredPayloadLengthExtraBytes) / shared.networkDefaultPayloadLengthExtraBytes
powStartTime = time.time()
initialHash = hashlib.sha512(encryptedPayload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
with shared.printLock:
print '(For msg message via API) Found proof of work', trialValue, 'Nonce:', nonce
try:
print 'POW took', int(time.time() - powStartTime), 'seconds.', nonce / (time.time() - powStartTime), 'nonce trials per second.'
except:
pass
encryptedPayload = pack('>Q', nonce) + encryptedPayload
toStreamNumber = decodeVarint(encryptedPayload[16:26])[0]
inventoryHash = calculateInventoryHash(encryptedPayload)
objectType = 'msg'
shared.inventory[inventoryHash] = (
objectType, toStreamNumber, encryptedPayload, int(time.time()),'')
shared.inventorySets[toStreamNumber].add(inventoryHash)
with shared.printLock:
print 'Broadcasting inv for msg(API disseminatePreEncryptedMsg command):', inventoryHash.encode('hex')
shared.broadcastToSendDataQueues((
toStreamNumber, 'advertiseobject', inventoryHash))
elif method == 'disseminatePubkey':
# The device issuing this command to PyBitmessage supplies a pubkey object to be
# disseminated to the rest of the Bitmessage network. PyBitmessage accepts this
# pubkey object and sends it out to the rest of the Bitmessage network as if it
# had generated the pubkey object itself. Please do not yet add this to the api
# doc.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
payload, = params
payload = self._decode(payload, "hex")
# Let us do the POW
target = 2 ** 64 / ((len(payload) + shared.networkDefaultPayloadLengthExtraBytes +
8) * shared.networkDefaultProofOfWorkNonceTrialsPerByte)
print '(For pubkey message via API) Doing proof of work...'
initialHash = hashlib.sha512(payload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
print '(For pubkey message via API) Found proof of work', trialValue, 'Nonce:', nonce
payload = pack('>Q', nonce) + payload
pubkeyReadPosition = 8 # bypass the nonce
if payload[pubkeyReadPosition:pubkeyReadPosition+4] == '\x00\x00\x00\x00': # if this pubkey uses 8 byte time
pubkeyReadPosition += 8
else:
pubkeyReadPosition += 4
addressVersion, addressVersionLength = decodeVarint(payload[pubkeyReadPosition:pubkeyReadPosition+10])
pubkeyReadPosition += addressVersionLength
pubkeyStreamNumber = decodeVarint(payload[pubkeyReadPosition:pubkeyReadPosition+10])[0]
inventoryHash = calculateInventoryHash(payload)
objectType = 'pubkey'
#todo: support v4 pubkeys
shared.inventory[inventoryHash] = (
objectType, pubkeyStreamNumber, payload, int(time.time()),'')
shared.inventorySets[pubkeyStreamNumber].add(inventoryHash)
with shared.printLock:
print 'broadcasting inv within API command disseminatePubkey with hash:', inventoryHash.encode('hex')
shared.broadcastToSendDataQueues((
streamNumber, 'advertiseobject', inventoryHash))
elif method == 'getMessageDataByDestinationHash' or method == 'getMessageDataByDestinationTag':
# Method will eventually be used by a particular Android app to
# select relevant messages. Do not yet add this to the api
# doc.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
requestedHash, = params
if len(requestedHash) != 32:
raise APIError(19, 'The length of hash should be 32 bytes (encoded in hex thus 64 characters).')
requestedHash = self._decode(requestedHash, "hex")
# This is not a particularly commonly used API function. Before we
# use it we'll need to fill out a field in our inventory database
# which is blank by default (first20bytesofencryptedmessage).
queryreturn = sqlQuery(
'''SELECT hash, payload FROM inventory WHERE tag = '' and objecttype = 'msg' ; ''')
with SqlBulkExecute() as sql:
for row in queryreturn:
hash, payload = row
readPosition = 16 # Nonce length + time length
readPosition += decodeVarint(payload[readPosition:readPosition+10])[1] # Stream Number length
t = (payload[readPosition:readPosition+32],hash)
sql.execute('''UPDATE inventory SET tag=? WHERE hash=?; ''', *t)
queryreturn = sqlQuery('''SELECT payload FROM inventory WHERE tag = ?''',
requestedHash)
data = '{"receivedMessageDatas":['
for row in queryreturn:
payload, = row
if len(data) > 25:
data += ','
data += json.dumps({'data':payload.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'getPubkeyByHash':
# Method will eventually be used by a particular Android app to
# retrieve pubkeys. Please do not yet add this to the api docs.
if len(params) != 1:
raise APIError(0, 'I need 1 parameter!')
requestedHash, = params
if len(requestedHash) != 40:
raise APIError(19, 'The length of hash should be 20 bytes (encoded in hex thus 40 characters).')
requestedHash = self._decode(requestedHash, "hex")
queryreturn = sqlQuery('''SELECT transmitdata FROM pubkeys WHERE hash = ? ; ''', requestedHash)
data = '{"pubkey":['
for row in queryreturn:
transmitdata, = row
data += json.dumps({'data':transmitdata.encode('hex')}, indent=4, separators=(',', ': '))
data += ']}'
return data
elif method == 'clientStatus':
if len(shared.connectedHostsList) == 0:
networkStatus = 'notConnected'
elif len(shared.connectedHostsList) > 0 and not shared.clientHasReceivedIncomingConnections:
networkStatus = 'connectedButHaveNotReceivedIncomingConnections'
else:
networkStatus = 'connectedAndReceivingIncomingConnections'
return json.dumps({'networkConnections':len(shared.connectedHostsList),'numberOfMessagesProcessed':shared.numberOfMessagesProcessed, 'numberOfBroadcastsProcessed':shared.numberOfBroadcastsProcessed, 'numberOfPubkeysProcessed':shared.numberOfPubkeysProcessed, 'networkStatus':networkStatus, 'softwareName':'PyBitmessage','softwareVersion':shared.softwareVersion}, indent=4, separators=(',', ': '))
else:
raise APIError(20, 'Invalid method: %s' % method)
def _dispatch(self, method, params):
self.cookies = []
validuser = self.APIAuthenticateClient()
if not validuser:
time.sleep(2)
return "RPC Username or password incorrect or HTTP header lacks authentication at all."
try:
return self._handle_request(method, params)
except APIError as e:
return str(e)
except Exception as e:
logger.exception(e)
return "API Error 0021: Unexpected API Failure - %s" % str(e)
# This thread, of which there is only one, runs the API.
class singleAPI(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
se = SimpleXMLRPCServer((shared.config.get('bitmessagesettings', 'apiinterface'), shared.config.getint(
'bitmessagesettings', 'apiport')), MySimpleXMLRPCRequestHandler, True, True)
se.register_introspection_functions()
se.serve_forever()
# This is a list of current connections (the thread pointers at least)
selfInitiatedConnections = {}
if shared.useVeryEasyProofOfWorkForTesting:
shared.networkDefaultProofOfWorkNonceTrialsPerByte = int(
shared.networkDefaultProofOfWorkNonceTrialsPerByte / 16)
shared.networkDefaultPayloadLengthExtraBytes = int(
shared.networkDefaultPayloadLengthExtraBytes / 7000)
class Main:
def start(self, daemon=False):
shared.daemon = daemon
# is the application already running? If yes then exit.
thisapp = singleton.singleinstance()
signal.signal(signal.SIGINT, helper_generic.signal_handler)
# signal.signal(signal.SIGINT, signal.SIG_DFL)
helper_bootstrap.knownNodes()
# Start the address generation thread
addressGeneratorThread = addressGenerator()
addressGeneratorThread.daemon = True # close the main program even if there are threads left
addressGeneratorThread.start()
# Start the thread that calculates POWs
singleWorkerThread = singleWorker()
singleWorkerThread.daemon = True # close the main program even if there are threads left
singleWorkerThread.start()
# Start the SQL thread
sqlLookup = sqlThread()
sqlLookup.daemon = False # DON'T close the main program even if there are threads left. The closeEvent should command this thread to exit gracefully.
sqlLookup.start()
# Start the cleanerThread
singleCleanerThread = singleCleaner()
singleCleanerThread.daemon = True # close the main program even if there are threads left
singleCleanerThread.start()
shared.reloadMyAddressHashes()
shared.reloadBroadcastSendersForWhichImWatching()
if shared.safeConfigGetBoolean('bitmessagesettings', 'apienabled'):
try:
apiNotifyPath = shared.config.get(
'bitmessagesettings', 'apinotifypath')
except:
apiNotifyPath = ''
if apiNotifyPath != '':
with shared.printLock:
print 'Trying to call', apiNotifyPath
call([apiNotifyPath, "startingUp"])
singleAPIThread = singleAPI()
singleAPIThread.daemon = True # close the main program even if there are threads left
singleAPIThread.start()
connectToStream(1)
singleListenerThread = singleListener()
singleListenerThread.setup(selfInitiatedConnections)
singleListenerThread.daemon = True # close the main program even if there are threads left
singleListenerThread.start()
if daemon == False and shared.safeConfigGetBoolean('bitmessagesettings', 'daemon') == False:
try:
from PyQt4 import QtCore, QtGui
except Exception as err:
print 'PyBitmessage requires PyQt unless you want to run it as a daemon and interact with it using the API. You can download PyQt from http://www.riverbankcomputing.com/software/pyqt/download or by searching Google for \'PyQt Download\'. If you want to run in daemon mode, see https://bitmessage.org/wiki/Daemon'
print 'Error message:', err
os._exit(0)
import bitmessageqt
bitmessageqt.run()
else:
shared.config.remove_option('bitmessagesettings', 'dontconnect')
if daemon:
with shared.printLock:
print 'Running as a daemon. The main program should exit this thread.'
else:
with shared.printLock:
print 'Running as a daemon. You can use Ctrl+C to exit.'
while True:
time.sleep(20)
def stop(self):
with shared.printLock:
print 'Stopping Bitmessage Deamon.'
shared.doCleanShutdown()
def getApiAddress(self):
if not shared.safeConfigGetBoolean('bitmessagesettings', 'apienabled'):
return None
address = shared.config.get('bitmessagesettings', 'apiinterface')
port = shared.config.getint('bitmessagesettings', 'apiport')
return {'address':address,'port':port}
if __name__ == "__main__":
mainprogram = Main()
mainprogram.start()
# So far, the creation of and management of the Bitmessage protocol and this
# client is a one-man operation. Bitcoin tips are quite appreciated.
# 1H5XaDA6fYENLbknwZyjiYXYPQaFjjLX2u
|
# -*- coding: utf-8 -*-
import unittest
import datetime
from cwr.parser.encoder.cwrjson import JSONEncoder
from cwr.file import FileTag, CWRFile
from cwr.group import GroupHeader, GroupTrailer, Group
from cwr.acknowledgement import MessageRecord
from cwr.agreement import AgreementRecord
from cwr.transmission import TransmissionTrailer, TransmissionHeader, Transmission
"""
Group from dictionary encoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__version__ = '0.0.0'
__status__ = 'Development'
class TestFileJSONEncoding(unittest.TestCase):
def setUp(self):
self._encoder = JSONEncoder()
def test_file(self):
tag = self._get_file_tag()
transmission = self._get_transmission()
data = CWRFile(tag, transmission)
encoded = self._encoder.encode(data)
expected = '{"transmission": {"header": {"creation_date_time": "2003-02-16", "sender_name": "SENDER", "sender_id": "ABC334", "sender_type": "SO", "record_type": "HDR", "edi_standard": "01.10", "transmission_date": "2003-02-17", "character_set": "ASCII"}, "groups": [{"group_trailer": {"record_count": 20, "record_type": "GRT", "group_id": 3, "transaction_count": 15}, "transactions": [[{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}], [{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}]], "group_header": {"record_type": "GRH", "version_number": "02.10", "group_id": 3, "batch_request_id": 15, "transaction_type": "AGR"}}, {"group_trailer": {"record_count": 20, "record_type": "GRT", "group_id": 3, "transaction_count": 15}, "transactions": [[{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}], [{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}]], "group_header": {"record_type": "GRH", "version_number": "02.10", "group_id": 3, "batch_request_id": 15, "transaction_type": "AGR"}}], "trailer": {"record_type": "TRL", "group_count": 155, "record_count": 568, "transaction_count": 245}}, "tag": {"sequence_n": 123, "receiver": "RCV", "sender": "SND", "version": 2.1, "year": 2015}}'
self.assertEqual(expected, encoded)
def _get_file_tag(self):
return FileTag(year=2015,
sequence_n=123,
sender='SND',
receiver='RCV',
version=2.1)
def _get_transmission(self):
header = TransmissionHeader(record_type='HDR',
sender_id='ABC334',
sender_name='SENDER',
sender_type='SO',
creation_date_time=datetime.datetime.strptime('20030216', '%Y%m%d').date(),
transmission_date=datetime.datetime.strptime('20030217', '%Y%m%d').date(),
edi_standard='01.10',
character_set='ASCII')
trailer = TransmissionTrailer(record_type='TRL',
group_count=155,
transaction_count=245,
record_count=568)
groups = [self._get_group(), self._get_group()]
return Transmission(header, trailer, groups)
def _get_group(self):
header = GroupHeader(record_type='GRH',
group_id=3,
transaction_type='AGR',
version_number='02.10',
batch_request_id=15)
trailer = GroupTrailer(record_type='GRT',
group_id=3,
transaction_count=15,
record_count=20)
transactions = [self._get_transaction(), self._get_transaction()]
return Group(header, trailer, transactions)
def _get_transaction(self):
return [self._get_agreement()]
def _get_message(self):
return MessageRecord(record_type='MSG',
transaction_sequence_n=3,
record_sequence_n=15,
message_level='F',
validation_n='AB3',
message_type='G',
message_text='THE MESSAGE',
original_record_sequence_n=124,
message_record_type='AGR')
def _get_agreement(self):
return AgreementRecord(record_type='AGR',
transaction_sequence_n=3,
record_sequence_n=15,
submitter_agreement_n='AB12',
agreement_type='OS',
agreement_start_date=datetime.datetime.strptime('20030215', '%Y%m%d').date(),
number_of_works=12,
prior_royalty_status='D',
post_term_collection_status='D',
international_standard_code='DFG135',
society_assigned_agreement_n='DF35',
sales_manufacture_clause='M',
agreement_end_date=datetime.datetime.strptime('20030216', '%Y%m%d').date(),
date_of_signature=datetime.datetime.strptime('20030217', '%Y%m%d').date(),
retention_end_date=datetime.datetime.strptime('20030218', '%Y%m%d').date(),
prior_royalty_start_date=datetime.datetime.strptime('20030219', '%Y%m%d').date(),
post_term_collection_end_date=datetime.datetime.strptime('20030220', '%Y%m%d').date(),
shares_change=True,
advance_given=True)
Corrected JSON encoder test
# -*- coding: utf-8 -*-
import unittest
import datetime
import json
from cwr.parser.encoder.cwrjson import JSONEncoder
from cwr.file import FileTag, CWRFile
from cwr.group import GroupHeader, GroupTrailer, Group
from cwr.acknowledgement import MessageRecord
from cwr.agreement import AgreementRecord
from cwr.transmission import TransmissionTrailer, TransmissionHeader, Transmission
"""
Group from dictionary encoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__version__ = '0.0.0'
__status__ = 'Development'
class TestFileJSONEncoding(unittest.TestCase):
def setUp(self):
self._encoder = JSONEncoder()
def test_file(self):
tag = self._get_file_tag()
transmission = self._get_transmission()
data = CWRFile(tag, transmission)
encoded = self._encoder.encode(data)
expected = json.loads('{"transmission": {"header": {"creation_date_time": "2003-02-16", "sender_name": "SENDER", "sender_id": "ABC334", "sender_type": "SO", "record_type": "HDR", "edi_standard": "01.10", "transmission_date": "2003-02-17", "character_set": "ASCII"}, "groups": [{"group_trailer": {"record_count": 20, "record_type": "GRT", "group_id": 3, "transaction_count": 15}, "transactions": [[{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}], [{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}]], "group_header": {"record_type": "GRH", "version_number": "02.10", "group_id": 3, "batch_request_id": 15, "transaction_type": "AGR"}}, {"group_trailer": {"record_count": 20, "record_type": "GRT", "group_id": 3, "transaction_count": 15}, "transactions": [[{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}], [{"sales_manufacture_clause": "M", "date_of_signature": "2003-02-17", "prior_royalty_start_date": "2003-02-19", "advance_given": true, "retention_end_date": "2003-02-18", "international_standard_code": "DFG135", "prior_royalty_status": "D", "agreement_end_date": "2003-02-16", "record_type": "AGR", "shares_change": true, "post_term_collection_status": "D", "agreement_type": "OS", "submitter_agreement_n": "AB12", "society_assigned_agreement_n": "DF35", "record_sequence_n": 15, "agreement_start_date": "2003-02-15", "transaction_sequence_n": 3, "post_term_collection_end_date": "2003-02-20", "number_of_works": 12}]], "group_header": {"record_type": "GRH", "version_number": "02.10", "group_id": 3, "batch_request_id": 15, "transaction_type": "AGR"}}], "trailer": {"record_type": "TRL", "group_count": 155, "record_count": 568, "transaction_count": 245}}, "tag": {"sequence_n": 123, "receiver": "RCV", "sender": "SND", "version": 2.1, "year": 2015}}')
self.assertEqual(expected, json.loads(encoded))
def _get_file_tag(self):
return FileTag(year=2015,
sequence_n=123,
sender='SND',
receiver='RCV',
version=2.1)
def _get_transmission(self):
header = TransmissionHeader(record_type='HDR',
sender_id='ABC334',
sender_name='SENDER',
sender_type='SO',
creation_date_time=datetime.datetime.strptime('20030216', '%Y%m%d').date(),
transmission_date=datetime.datetime.strptime('20030217', '%Y%m%d').date(),
edi_standard='01.10',
character_set='ASCII')
trailer = TransmissionTrailer(record_type='TRL',
group_count=155,
transaction_count=245,
record_count=568)
groups = [self._get_group(), self._get_group()]
return Transmission(header, trailer, groups)
def _get_group(self):
header = GroupHeader(record_type='GRH',
group_id=3,
transaction_type='AGR',
version_number='02.10',
batch_request_id=15)
trailer = GroupTrailer(record_type='GRT',
group_id=3,
transaction_count=15,
record_count=20)
transactions = [self._get_transaction(), self._get_transaction()]
return Group(header, trailer, transactions)
def _get_transaction(self):
return [self._get_agreement()]
def _get_message(self):
return MessageRecord(record_type='MSG',
transaction_sequence_n=3,
record_sequence_n=15,
message_level='F',
validation_n='AB3',
message_type='G',
message_text='THE MESSAGE',
original_record_sequence_n=124,
message_record_type='AGR')
def _get_agreement(self):
return AgreementRecord(record_type='AGR',
transaction_sequence_n=3,
record_sequence_n=15,
submitter_agreement_n='AB12',
agreement_type='OS',
agreement_start_date=datetime.datetime.strptime('20030215', '%Y%m%d').date(),
number_of_works=12,
prior_royalty_status='D',
post_term_collection_status='D',
international_standard_code='DFG135',
society_assigned_agreement_n='DF35',
sales_manufacture_clause='M',
agreement_end_date=datetime.datetime.strptime('20030216', '%Y%m%d').date(),
date_of_signature=datetime.datetime.strptime('20030217', '%Y%m%d').date(),
retention_end_date=datetime.datetime.strptime('20030218', '%Y%m%d').date(),
prior_royalty_start_date=datetime.datetime.strptime('20030219', '%Y%m%d').date(),
post_term_collection_end_date=datetime.datetime.strptime('20030220', '%Y%m%d').date(),
shares_change=True,
advance_given=True) |
import os
from logging import getLogger
from subprocess import call
import math
import requests
from pyramid.view import view_config
from pyramid.response import Response
from random import gauss
from time import time
from pymongo import Connection, pymongo
log = getLogger('mist.core')
@view_config(route_name='machines', request_method='GET', renderer='json')
def list_machines(request):
file = open(os.getcwd()+'/conf/collectd.passwd')
machines = file.read().split('\n')
return machines
@view_config(route_name='machines', request_method='PUT', renderer='json')
def add_machine(request):
""" add machine to monitored list """
# get request params
uuid = request.params.get('uuid', None)
passwd = request.params.get('passwd', None)
# check for errors
if not uuid or not passwd:
return Response('Unauthorized', 401)
# check if uuid already in pass file
try:
f = open("conf/collectd.passwd")
res = f.read()
f.close()
if uuid in res:
return Response('Conflict', 409)
# append collectd pw file
f = open("conf/collectd.passwd", 'a')
f.writelines(['\n'+ uuid + ': ' + passwd])
f.close()
except Exception as e:
log.error('Error opening machines pw file: %s' % e)
return Response('Service unavailable', 503)
# create new collectd conf section for allowing machine stats
config_append = """
PreCacheChain "%sRule"
<Chain "%sRule">
<Rule "rule">
<Match "regex">
Host "^%s$"
</Match>
Target return
</Rule>
Target stop
</Chain>""" % (uuid, uuid, uuid)
try:
f = open("conf/collectd_%s.conf"%uuid,"w")
f.write(config_append)
f.close()
# include the new file in the main config
config_include = "conf/collectd_%s.conf" % uuid
f = open("conf/collectd.conf.local", "a")
f.write('\nInclude "%s"\n'% config_include)
f.close()
except Exception as e:
log.error('Error opening collectd conf files: %s' % e)
return Response('Service unavailable', 503)
try:
call(['/usr/bin/pkill','-HUP','collectd'])
except Exception as e:
log.error('Error restarting collectd: %s' % e)
return {}
@view_config(route_name='machine', request_method='DELETE', renderer='json')
def remove_machine(request):
""" remove machine from monitored list """
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
raise
except Exception as e:
return Response('Bad Request', 400)
try:
f = open("conf/collectd.passwd")
res = f.read()
f.close()
if uuid not in res:
return Response('Not Found', 404)
lines = res.split('\n')
for l in lines:
if uuid in l:
lines.remove(l)
res = '\n' .join(lines)
f = open("conf/collectd.passwd",'w')
f.write(res)
f.close()
except Exception as e:
log.error('Error opening machines pw file: %s' % e)
return Response('Service unavailable', 503)
try:
f = open("conf/collectd.conf.local")
res = f.read()
f.close()
if uuid not in res:
return Response('Not Found', 404)
lines = res.split('\n')
for l in lines:
if uuid in l:
lines.remove(l)
res = '\n' .join(lines)
f = open("conf/collectd.conf.local",'w')
f.write(res)
f.close()
except Exception as e:
log.error('Error opening collectd conf file: %s' % e)
return Response('Service unavailable', 503)
@view_config(route_name='teststats', request_method='GET', renderer='json')
def get_teststats(request):
"""Get all stats for this machine, the client will draw them
TODO: return real values
WARNING: copied from mist.core
"""
interval = 5000 # in milliseconds
timestamp = time() * 1000 # from seconds to milliseconds
# check if you just need an update or the full list
changes_since = request.GET.get('changes_since', None)
if changes_since:
# how many samples were created in this interval
samples = timestamp - float(changes_since)
samples = math.floor(samples / interval)
samples = int(samples)
else:
# set maximum number of samples
samples = 1000
cpu = []
load = []
memory = []
disk = []
for i in range(0, samples):
cpu.append(abs(gauss(70.0, 5.0)))
load.append(abs(gauss(4.0, 0.02)))
memory.append(abs(gauss(4000.0, 10.00)))
disk.append(abs(gauss(40.0, 3.0)))
ret = {'timestamp': timestamp,
'interval': interval,
'cpu': cpu,
'load': load,
'memory': memory,
'disk': disk}
return ret
@view_config(route_name='mongostats', request_method='GET', renderer='json')
def get_mongostats(request):
"""Get stats for this machine using the mongodb backend. Data is stored using a
different format than the other get_stats functions, following the targets template
below
"""
targets = {"cpu": ['idle','interrupt','nice','softirq','steal','system','user','wait'],
"load": 'v',
"memory": ['buffered', 'cached', 'free', 'used'], "disk": ['merged','octets','ops','time'] }
mongodb_hostname = 'localhost'
mongodb_port = 27017
mongodb_name = 'collectd'
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
log.error("cannot find uuid %s" % uuid)
raise
except Exception as e:
return Response('Bad Request', 400)
interval = 5000 # in milliseconds
changes_since = request.params.get('changes_since', None)
if not changes_since:
changes_since = "-1hours&"
else:
changes_since = "%d" %(int(float(changes_since)/1000))
connection = Connection(mongodb_hostname, mongodb_port)
db = connection[mongodb_name]
ret = { }
for k in range(0, len(targets.keys())):
key = targets.keys()[k]
query_dict = {'host': uuid }
my_target = db[key].find(query_dict).sort('$natural', pymongo.DESCENDING).limit(len(targets[key]))
ret[key] = {}
for l in range(0, len(targets[key])):
inner = targets[key][l]
ret[key][inner] = my_target[l]['values']
timestamp = time() * 1000
ret['timestamp'] = timestamp
ret['interval'] = interval
log.info(ret)
return ret
@view_config(route_name='stats', request_method='GET', renderer='json')
def get_stats(request):
"""Get all stats for this machine, the client will draw them
"""
#FIXME: default targets -- could be user customizable
targets = ["cpu", "load", "memory", "disk"]
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
log.error("cannot find uuid %s" % uuid)
raise
except Exception as e:
return Response('Bad Request', 400)
changes_since = request.params.get('changes_since', None)
if not changes_since:
changes_since = "-1hours&"
else:
changes_since = "%d" %(int(float(changes_since)/1000))
data_format = request.params.get('format', None)
if not data_format:
data_format = "format=json&"
#FIXME: get rid of that, we are already on the monitoring server,
#we should know better ;-)
graphite_uri = "http://experiment.unweb.me:8080"
data = {'cpu': [ ], 'load': [ ], 'memory': [ ], 'disk': [ ] }
interval = 1000
for target in targets:
target_uri = "target=servers." + uuid + "." + target + "*.*.*&"
time_range = "from=%s&until=now" %(changes_since)
#construct uri
uri = graphite_uri + "/render?" + data_format + target_uri + time_range
print uri
r = requests.get(uri)
if r.status_code == 200:
log.info("connect OK")
else:
log.error("Status code = %d" %(r.status_code))
if not len(r.json):
continue
for i in range (0, len(r.json[0]['datapoints'])):
value = r.json[0]['datapoints'][i][0]
if value:
data[target].append(r.json[0]['datapoints'][i][0])
else:
data[target].append(1)
#timestamp = r.json[0]['datapoints'][0][1] * 1000
timestamp = time() * 1000
ret = {'timestamp': timestamp,
'interval': interval,
'cpu': data['cpu'],
'load': data['load'],
'memory': data['memory'],
'disk': data['disk']}
log.info(ret)
return ret
fixing previous fix ;-)
import os
from logging import getLogger
from subprocess import call
import math
import requests
from pyramid.view import view_config
from pyramid.response import Response
from random import gauss
from time import time
from pymongo import Connection
import pymongo
log = getLogger('mist.core')
@view_config(route_name='machines', request_method='GET', renderer='json')
def list_machines(request):
file = open(os.getcwd()+'/conf/collectd.passwd')
machines = file.read().split('\n')
return machines
@view_config(route_name='machines', request_method='PUT', renderer='json')
def add_machine(request):
""" add machine to monitored list """
# get request params
uuid = request.params.get('uuid', None)
passwd = request.params.get('passwd', None)
# check for errors
if not uuid or not passwd:
return Response('Unauthorized', 401)
# check if uuid already in pass file
try:
f = open("conf/collectd.passwd")
res = f.read()
f.close()
if uuid in res:
return Response('Conflict', 409)
# append collectd pw file
f = open("conf/collectd.passwd", 'a')
f.writelines(['\n'+ uuid + ': ' + passwd])
f.close()
except Exception as e:
log.error('Error opening machines pw file: %s' % e)
return Response('Service unavailable', 503)
# create new collectd conf section for allowing machine stats
config_append = """
PreCacheChain "%sRule"
<Chain "%sRule">
<Rule "rule">
<Match "regex">
Host "^%s$"
</Match>
Target return
</Rule>
Target stop
</Chain>""" % (uuid, uuid, uuid)
try:
f = open("conf/collectd_%s.conf"%uuid,"w")
f.write(config_append)
f.close()
# include the new file in the main config
config_include = "conf/collectd_%s.conf" % uuid
f = open("conf/collectd.conf.local", "a")
f.write('\nInclude "%s"\n'% config_include)
f.close()
except Exception as e:
log.error('Error opening collectd conf files: %s' % e)
return Response('Service unavailable', 503)
try:
call(['/usr/bin/pkill','-HUP','collectd'])
except Exception as e:
log.error('Error restarting collectd: %s' % e)
return {}
@view_config(route_name='machine', request_method='DELETE', renderer='json')
def remove_machine(request):
""" remove machine from monitored list """
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
raise
except Exception as e:
return Response('Bad Request', 400)
try:
f = open("conf/collectd.passwd")
res = f.read()
f.close()
if uuid not in res:
return Response('Not Found', 404)
lines = res.split('\n')
for l in lines:
if uuid in l:
lines.remove(l)
res = '\n' .join(lines)
f = open("conf/collectd.passwd",'w')
f.write(res)
f.close()
except Exception as e:
log.error('Error opening machines pw file: %s' % e)
return Response('Service unavailable', 503)
try:
f = open("conf/collectd.conf.local")
res = f.read()
f.close()
if uuid not in res:
return Response('Not Found', 404)
lines = res.split('\n')
for l in lines:
if uuid in l:
lines.remove(l)
res = '\n' .join(lines)
f = open("conf/collectd.conf.local",'w')
f.write(res)
f.close()
except Exception as e:
log.error('Error opening collectd conf file: %s' % e)
return Response('Service unavailable', 503)
@view_config(route_name='teststats', request_method='GET', renderer='json')
def get_teststats(request):
"""Get all stats for this machine, the client will draw them
TODO: return real values
WARNING: copied from mist.core
"""
interval = 5000 # in milliseconds
timestamp = time() * 1000 # from seconds to milliseconds
# check if you just need an update or the full list
changes_since = request.GET.get('changes_since', None)
if changes_since:
# how many samples were created in this interval
samples = timestamp - float(changes_since)
samples = math.floor(samples / interval)
samples = int(samples)
else:
# set maximum number of samples
samples = 1000
cpu = []
load = []
memory = []
disk = []
for i in range(0, samples):
cpu.append(abs(gauss(70.0, 5.0)))
load.append(abs(gauss(4.0, 0.02)))
memory.append(abs(gauss(4000.0, 10.00)))
disk.append(abs(gauss(40.0, 3.0)))
ret = {'timestamp': timestamp,
'interval': interval,
'cpu': cpu,
'load': load,
'memory': memory,
'disk': disk}
return ret
@view_config(route_name='mongostats', request_method='GET', renderer='json')
def get_mongostats(request):
"""Get stats for this machine using the mongodb backend. Data is stored using a
different format than the other get_stats functions, following the targets template
below
"""
targets = {"cpu": ['idle','interrupt','nice','softirq','steal','system','user','wait'],
"load": 'v',
"memory": ['buffered', 'cached', 'free', 'used'], "disk": ['merged','octets','ops','time'] }
mongodb_hostname = 'localhost'
mongodb_port = 27017
mongodb_name = 'collectd'
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
log.error("cannot find uuid %s" % uuid)
raise
except Exception as e:
return Response('Bad Request', 400)
interval = 5000 # in milliseconds
changes_since = request.params.get('changes_since', None)
if not changes_since:
changes_since = "-1hours&"
else:
changes_since = "%d" %(int(float(changes_since)/1000))
connection = Connection(mongodb_hostname, mongodb_port)
db = connection[mongodb_name]
ret = { }
for k in range(0, len(targets.keys())):
key = targets.keys()[k]
query_dict = {'host': uuid }
my_target = db[key].find(query_dict).sort('$natural', pymongo.DESCENDING).limit(len(targets[key]))
ret[key] = {}
for l in range(0, len(targets[key])):
inner = targets[key][l]
ret[key][inner] = my_target[l]['values']
timestamp = time() * 1000
ret['timestamp'] = timestamp
ret['interval'] = interval
log.info(ret)
return ret
@view_config(route_name='stats', request_method='GET', renderer='json')
def get_stats(request):
"""Get all stats for this machine, the client will draw them
"""
#FIXME: default targets -- could be user customizable
targets = ["cpu", "load", "memory", "disk"]
# get request params
try:
uuid = request.matchdict['machine']
# check for errors
if not uuid:
log.error("cannot find uuid %s" % uuid)
raise
except Exception as e:
return Response('Bad Request', 400)
changes_since = request.params.get('changes_since', None)
if not changes_since:
changes_since = "-1hours&"
else:
changes_since = "%d" %(int(float(changes_since)/1000))
data_format = request.params.get('format', None)
if not data_format:
data_format = "format=json&"
#FIXME: get rid of that, we are already on the monitoring server,
#we should know better ;-)
graphite_uri = "http://experiment.unweb.me:8080"
data = {'cpu': [ ], 'load': [ ], 'memory': [ ], 'disk': [ ] }
interval = 1000
for target in targets:
target_uri = "target=servers." + uuid + "." + target + "*.*.*&"
time_range = "from=%s&until=now" %(changes_since)
#construct uri
uri = graphite_uri + "/render?" + data_format + target_uri + time_range
print uri
r = requests.get(uri)
if r.status_code == 200:
log.info("connect OK")
else:
log.error("Status code = %d" %(r.status_code))
if not len(r.json):
continue
for i in range (0, len(r.json[0]['datapoints'])):
value = r.json[0]['datapoints'][i][0]
if value:
data[target].append(r.json[0]['datapoints'][i][0])
else:
data[target].append(1)
#timestamp = r.json[0]['datapoints'][0][1] * 1000
timestamp = time() * 1000
ret = {'timestamp': timestamp,
'interval': interval,
'cpu': data['cpu'],
'load': data['load'],
'memory': data['memory'],
'disk': data['disk']}
log.info(ret)
return ret
|
5#! /usr/bin/env python
import types
import numpy as np
from bmi import Bmi, BmiGridType
from .heat import Heat
class BmiHeat(Bmi):
_name = 'The 2D Heat Equation'
_input_var_names = ['plate_surface__temperature']
_output_var_names = ['plate_surface__temperature']
_var_units = {'plate_surface__temperature': 'K'}
def __init__(self):
self._model = None
self._values = {}
def initialize(self, filename=None):
if filename is None:
self._model = Heat()
elif isinstance(filename, types.StringTypes):
with open(filename, 'r') as fp:
self._model = Heat.from_file_like(fp.read())
else:
self._model = Heat.from_file_like(filename)
self._values = {
'plate_surface__temperature': self._model.z,
}
def update(self):
self._model.advance_in_time()
def update_frac(self, time_frac):
dt = self.get_time_step()
self._model.dt = time_frac * dt
self.update()
self._model.dt = dt
def update_until(self, then):
n_steps = (then - self.get_current_time()) / self.get_time_step()
for _ in xrange(int(n_steps)):
self.update()
self.update_frac(n_steps - int(n_steps))
def finalize(self):
self._model = None
def get_var_type (self, var_name):
return str(self.get_value_ref(var_name).dtype)
def get_var_units(self, var_name):
return self._var_units[var_name]
def get_var_nbytes(self, var_name):
return self.get_value_ref(var_name).nbytes
def get_grid_rank(self, var_name):
return self.get_value_ref(var_name).ndim
def get_grid_size(self, var_name):
return self.get_value_ref(var_name).size
def get_value_ref(self, var_name):
return self._values[var_name]
def get_value(self, var_name):
return self.get_value_ref(var_name).copy()
def get_value_at_indices(self, var_name, indices):
return self.get_value_ref(var_name).take(indices)
def set_value(self, var_name, src):
val = self.get_value_ref(var_name)
val[:] = src
def set_value_at_indices(self, var_name, src, indices):
val = self.get_value_ref(var_name)
val.flat[indices] = src
def get_component_name(self):
return self._name
def get_input_var_names(self):
return self._input_var_names
def get_output_var_names(self):
return self._output_var_names
def get_grid_shape (self, var_name):
return self.get_value_ref(var_name).shape
def get_grid_spacing(self, var_name):
if var_name in self._values:
return self._model.spacing
def get_grid_origin(self, var_name):
if var_name in self._values:
return self._model.origin
def get_grid_type(self, var_name):
if var_name in self._values:
return BmiGridType.UNIFORM
else:
return BmiGridType.UNKNOWN
def get_start_time (self):
return 0.
def get_end_time (self):
return np.finfo('d').max
def get_current_time (self):
return self._model.time
def get_time_step (self):
return self._model.dt
Fix typo
#! /usr/bin/env python
import types
import numpy as np
from bmi import Bmi, BmiGridType
from .heat import Heat
class BmiHeat(Bmi):
_name = 'The 2D Heat Equation'
_input_var_names = ['plate_surface__temperature']
_output_var_names = ['plate_surface__temperature']
_var_units = {'plate_surface__temperature': 'K'}
def __init__(self):
self._model = None
self._values = {}
def initialize(self, filename=None):
if filename is None:
self._model = Heat()
elif isinstance(filename, types.StringTypes):
with open(filename, 'r') as fp:
self._model = Heat.from_file_like(fp.read())
else:
self._model = Heat.from_file_like(filename)
self._values = {
'plate_surface__temperature': self._model.z,
}
def update(self):
self._model.advance_in_time()
def update_frac(self, time_frac):
dt = self.get_time_step()
self._model.dt = time_frac * dt
self.update()
self._model.dt = dt
def update_until(self, then):
n_steps = (then - self.get_current_time()) / self.get_time_step()
for _ in xrange(int(n_steps)):
self.update()
self.update_frac(n_steps - int(n_steps))
def finalize(self):
self._model = None
def get_var_type (self, var_name):
return str(self.get_value_ref(var_name).dtype)
def get_var_units(self, var_name):
return self._var_units[var_name]
def get_var_nbytes(self, var_name):
return self.get_value_ref(var_name).nbytes
def get_grid_rank(self, var_name):
return self.get_value_ref(var_name).ndim
def get_grid_size(self, var_name):
return self.get_value_ref(var_name).size
def get_value_ref(self, var_name):
return self._values[var_name]
def get_value(self, var_name):
return self.get_value_ref(var_name).copy()
def get_value_at_indices(self, var_name, indices):
return self.get_value_ref(var_name).take(indices)
def set_value(self, var_name, src):
val = self.get_value_ref(var_name)
val[:] = src
def set_value_at_indices(self, var_name, src, indices):
val = self.get_value_ref(var_name)
val.flat[indices] = src
def get_component_name(self):
return self._name
def get_input_var_names(self):
return self._input_var_names
def get_output_var_names(self):
return self._output_var_names
def get_grid_shape (self, var_name):
return self.get_value_ref(var_name).shape
def get_grid_spacing(self, var_name):
if var_name in self._values:
return self._model.spacing
def get_grid_origin(self, var_name):
if var_name in self._values:
return self._model.origin
def get_grid_type(self, var_name):
if var_name in self._values:
return BmiGridType.UNIFORM
else:
return BmiGridType.UNKNOWN
def get_start_time (self):
return 0.
def get_end_time (self):
return np.finfo('d').max
def get_current_time (self):
return self._model.time
def get_time_step (self):
return self._model.dt
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for all nameserver related activity. Health checks. requests."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import time
import random
import sys
import third_party
import dns.exception
import dns.query
import dns.message
import dns.name
import dns.rdataclass
import dns.rdatatype
import util
# Pick the most accurate timer for a platform. Stolen from timeit.py:
if sys.platform == "win32":
DEFAULT_TIMER = time.clock
else:
DEFAULT_TIMER = time.time
GOOGLE_CLASS_B = ('74.125',)
WWW_GOOGLE_RESPONSE = ('CNAME www.l.google.com',)
WWW_PAYPAL_RESPONSE = ('66.211.169.', '64.4.241.')
WWW_TPB_RESPONSE = ('194.71.107.',)
OPENDNS_NS = '208.67.220.220'
WILDCARD_DOMAINS = ('live.com.', 'blogspot.com.', 'wordpress.com.')
MIN_SHARING_DELTA_MS = 2
MAX_SHARING_DELTA_MS = 240
# How many checks to consider when calculating ns check_duration
SHARED_CACHE_TIMEOUT_MULTIPLIER = 2.25
CHECK_DURATION_MAX_COUNT = 9
class NameServer(object):
"""Hold information about a particular nameserver."""
def __init__(self, ip, name=None, internal=False, primary=False):
self.name = name
self.ip = ip
self.is_internal = internal
self.is_primary = primary
self.timeout = 60
self.health_timeout = 30
self.warnings = []
self.shared_with = []
self.is_healthy = True
self.checks = []
self.share_check_count = 0
self.cache_check = None
self.is_slower_replica = False
@property
def check_duration(self):
return sum([x[3] for x in self.checks[0:CHECK_DURATION_MAX_COUNT]])
@property
def failure(self):
failures = [x for x in self.checks if x[1]]
if failures:
return failures[0]
else:
return None
def __str__(self):
return '%s [%s]' % (self.name, self.ip)
def __repr__(self):
return self.__str__()
def Query(self, request, timeout):
return dns.query.udp(request, self.ip, timeout, 53)
def TimedRequest(self, type_string, record_string, timeout=None,
timer=DEFAULT_TIMER):
"""Make a DNS request, returning the reply and duration it took.
Args:
type_string: DNS record type to query (string)
record_string: DNS record name to query (string)
timeout: optional timeout (float)
Returns:
A tuple of (response, duration in ms [float], exception)
In the case of a DNS response timeout, the response object will be None.
"""
request_type = dns.rdatatype.from_text(type_string)
record = dns.name.from_text(record_string, None)
return_type = dns.rdataclass.IN
# Ocassionally we will fail
try:
request = dns.message.make_query(record, request_type, return_type)
except IndexError, exc:
print '- Error creating packet: %s (trying again)' % exc
request = dns.message.make_query(record, request_type, return_type)
if not timeout:
timeout = self.timeout
exc = None
duration = None
try:
start_time = timer()
response = self.Query(request, timeout)
duration = timer() - start_time
except (dns.exception.Timeout), exc:
response = None
duration = timer() - start_time
except (dns.query.BadResponse, dns.message.TrailingJunk,
dns.query.UnexpectedSource), exc:
response = None
duration = timer() - start_time
return (response, duration*1000, exc)
def TestAnswers(self, record_type, record, expected):
"""Test to see that an answer returns correct IP's.
Args:
record_type: text record type for NS query (A, CNAME, etc)
record: string to query for
expected: tuple of strings expected in all answers
Returns:
(is_broken, warning, duration)
"""
is_broken = False
warning = None
(response, duration, exc) = self.TimedRequest(record_type, record,
timeout=self.health_timeout)
failures = []
if not response:
is_broken = True
warning = exc.__class__
elif not response.answer:
is_broken = True
warning = 'No answer'
else:
for a in response.answer:
failed = True
for string in expected:
if string in str(a):
failed=False
break
if failed:
failures.append(a)
if failures:
answers = [' + '.join(map(str, x.items)) for x in response.answer]
answer_text = ' -> '.join(answers)
warning = '%s hijacked (%s)' % (record, answer_text)
return (is_broken, warning, duration)
def ResponseToAscii(self, response):
if not response:
return None
if response.answer:
answers = [' + '.join(map(str, x.items)) for x in response.answer]
return ' -> '.join(answers)
else:
return 'no answer'
def TestGoogleComResponse(self):
return self.TestAnswers('A', 'google.com.', GOOGLE_CLASS_B)
def TestWwwGoogleComResponse(self):
return self.TestAnswers('CNAME', 'www.google.com.', WWW_GOOGLE_RESPONSE)
def TestWwwPaypalComResponse(self):
return self.TestAnswers('A', 'www.paypal.com.', WWW_PAYPAL_RESPONSE)
def TestWwwTpbOrgResponse(self):
return self.TestAnswers('A', 'www.thepiratebay.org.', WWW_TPB_RESPONSE)
def TestNegativeResponse(self):
"""Test for NXDOMAIN hijaaking."""
is_broken = False
warning = None
poison_test = 'nb.%s.google.com.' % random.random()
(response, duration, exc) = self.TimedRequest('A', poison_test,
timeout=self.health_timeout)
if not response:
is_broken = True
warning = str(exc.__class__.__name__)
elif response.answer:
warning = 'NXDOMAIN Hijacking'
return (is_broken, warning, duration)
def QueryWildcardCache(self, hostname=None, save=True, timeout=None):
"""Make a cache to a random wildcard DNS host, storing the record."""
if not timeout:
timeout = self.health_timeout
is_broken = False
warning = None
if not hostname:
domain = random.choice(WILDCARD_DOMAINS)
hostname = 'namebench%s.%s' % (random.randint(1,2**32), domain)
(response, duration, exc) = self.TimedRequest('A', hostname,
timeout=timeout)
ttl = None
if not response:
is_broken = True
warning = exc.__class__.__name__
elif not response.answer:
is_broken = True
warning = 'No response'
else:
ttl = response.answer[0].ttl
if save:
self.cache_check = (hostname, ttl)
return (response, is_broken, warning, duration)
def TestWildcardCaching(self):
return self.QueryWildcardCache(save=True)[1:]
def TestSharedCache(self, other_ns):
"""Is this nameserver sharing a cache with another nameserver?
Args:
other_ns: A nameserver to compare it to.
Returns:
A tuple containing:
- Boolean of whether or not this host has a shared cache
- The faster NameServer object
- The slower NameServer object
"""
if other_ns.cache_check:
(cache_id, other_ttl) = other_ns.cache_check
else:
print "* cache check for %s is missing (skipping)" % other_ns
return (False, None, None)
# These queries tend to run slow, and we've already narrowed down the worst.
timeout = self.health_timeout * SHARED_CACHE_TIMEOUT_MULTIPLIER
(response, is_broken, warning, duration) = self.QueryWildcardCache(
cache_id,
save=False,
timeout=timeout
)
# Try again, but only once. Do penalize them for the first fail however.
if is_broken:
sys.stdout.write('_')
(response, is_broken, warning, duration2) = self.QueryWildcardCache(
cache_id,
save=False,
timeout=timeout
)
if is_broken:
sys.stdout.write('o')
self.checks.append((cache_id, is_broken, warning, duration))
if is_broken:
self.is_healthy = False
else:
delta = abs(other_ttl - response.answer[0].ttl)
if delta > 0:
if other_ns.check_duration > self.check_duration:
slower = other_ns
faster = self
else:
slower = self
faster = other_ns
if delta > MIN_SHARING_DELTA_MS and delta < MAX_SHARING_DELTA_MS:
return (True, slower, faster)
return (False, None, None)
def CheckHealth(self):
"""Qualify a nameserver to see if it is any good."""
tests = [self.TestWwwGoogleComResponse,
self.TestGoogleComResponse,
self.TestNegativeResponse,
self.TestWildcardCaching,
self.TestWwwPaypalComResponse]
self.checks = []
self.warnings = []
for test in tests:
(is_broken, warning, duration) = test()
self.checks.append((test.__name__, is_broken, warning, duration))
if warning:
self.warnings.append(warning)
if is_broken:
self.is_healthy = False
break
return self.is_healthy
Be more persistant in trying to work around DNS entroy issues
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for all nameserver related activity. Health checks. requests."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import time
import random
import sys
import third_party
import dns.exception
import dns.query
import dns.message
import dns.name
import dns.rdataclass
import dns.rdatatype
import util
# Pick the most accurate timer for a platform. Stolen from timeit.py:
if sys.platform == "win32":
DEFAULT_TIMER = time.clock
else:
DEFAULT_TIMER = time.time
GOOGLE_CLASS_B = ('74.125',)
WWW_GOOGLE_RESPONSE = ('CNAME www.l.google.com',)
WWW_PAYPAL_RESPONSE = ('66.211.169.', '64.4.241.')
WWW_TPB_RESPONSE = ('194.71.107.',)
OPENDNS_NS = '208.67.220.220'
WILDCARD_DOMAINS = ('live.com.', 'blogspot.com.', 'wordpress.com.')
MIN_SHARING_DELTA_MS = 2
MAX_SHARING_DELTA_MS = 240
# How many checks to consider when calculating ns check_duration
SHARED_CACHE_TIMEOUT_MULTIPLIER = 2.25
CHECK_DURATION_MAX_COUNT = 9
class NameServer(object):
"""Hold information about a particular nameserver."""
def __init__(self, ip, name=None, internal=False, primary=False):
self.name = name
self.ip = ip
self.is_internal = internal
self.is_primary = primary
self.timeout = 60
self.health_timeout = 30
self.warnings = []
self.shared_with = []
self.is_healthy = True
self.checks = []
self.share_check_count = 0
self.cache_check = None
self.is_slower_replica = False
@property
def check_duration(self):
return sum([x[3] for x in self.checks[0:CHECK_DURATION_MAX_COUNT]])
@property
def failure(self):
failures = [x for x in self.checks if x[1]]
if failures:
return failures[0]
else:
return None
def __str__(self):
return '%s [%s]' % (self.name, self.ip)
def __repr__(self):
return self.__str__()
def Query(self, request, timeout):
return dns.query.udp(request, self.ip, timeout, 53)
def TimedRequest(self, type_string, record_string, timeout=None,
timer=DEFAULT_TIMER):
"""Make a DNS request, returning the reply and duration it took.
Args:
type_string: DNS record type to query (string)
record_string: DNS record name to query (string)
timeout: optional timeout (float)
Returns:
A tuple of (response, duration in ms [float], exception)
In the case of a DNS response timeout, the response object will be None.
"""
request_type = dns.rdatatype.from_text(type_string)
record = dns.name.from_text(record_string, None)
return_type = dns.rdataclass.IN
# There is a bug in dns/entropy.py:64 that causes IndexErrors ocassionally
tries = 0
success = False
while not success and tries < 5:
tries += 1
try:
request = dns.message.make_query(record, request_type, return_type)
success = True
except IndexError, exc:
print 'Waiting for entropy (%s)' % exc
time.sleep(0.5)
success = False
request = dns.message.make_query(record, request_type, return_type)
if not timeout:
timeout = self.timeout
exc = None
duration = None
try:
start_time = timer()
response = self.Query(request, timeout)
duration = timer() - start_time
except (dns.exception.Timeout), exc:
response = None
duration = timer() - start_time
except (dns.query.BadResponse, dns.message.TrailingJunk,
dns.query.UnexpectedSource), exc:
response = None
duration = timer() - start_time
return (response, duration*1000, exc)
def TestAnswers(self, record_type, record, expected):
"""Test to see that an answer returns correct IP's.
Args:
record_type: text record type for NS query (A, CNAME, etc)
record: string to query for
expected: tuple of strings expected in all answers
Returns:
(is_broken, warning, duration)
"""
is_broken = False
warning = None
(response, duration, exc) = self.TimedRequest(record_type, record,
timeout=self.health_timeout)
failures = []
if not response:
is_broken = True
warning = exc.__class__
elif not response.answer:
is_broken = True
warning = 'No answer'
else:
for a in response.answer:
failed = True
for string in expected:
if string in str(a):
failed=False
break
if failed:
failures.append(a)
if failures:
answers = [' + '.join(map(str, x.items)) for x in response.answer]
answer_text = ' -> '.join(answers)
warning = '%s hijacked (%s)' % (record, answer_text)
return (is_broken, warning, duration)
def ResponseToAscii(self, response):
if not response:
return None
if response.answer:
answers = [' + '.join(map(str, x.items)) for x in response.answer]
return ' -> '.join(answers)
else:
return 'no answer'
def TestGoogleComResponse(self):
return self.TestAnswers('A', 'google.com.', GOOGLE_CLASS_B)
def TestWwwGoogleComResponse(self):
return self.TestAnswers('CNAME', 'www.google.com.', WWW_GOOGLE_RESPONSE)
def TestWwwPaypalComResponse(self):
return self.TestAnswers('A', 'www.paypal.com.', WWW_PAYPAL_RESPONSE)
def TestWwwTpbOrgResponse(self):
return self.TestAnswers('A', 'www.thepiratebay.org.', WWW_TPB_RESPONSE)
def TestNegativeResponse(self):
"""Test for NXDOMAIN hijaaking."""
is_broken = False
warning = None
poison_test = 'nb.%s.google.com.' % random.random()
(response, duration, exc) = self.TimedRequest('A', poison_test,
timeout=self.health_timeout)
if not response:
is_broken = True
warning = str(exc.__class__.__name__)
elif response.answer:
warning = 'NXDOMAIN Hijacking'
return (is_broken, warning, duration)
def QueryWildcardCache(self, hostname=None, save=True, timeout=None):
"""Make a cache to a random wildcard DNS host, storing the record."""
if not timeout:
timeout = self.health_timeout
is_broken = False
warning = None
if not hostname:
domain = random.choice(WILDCARD_DOMAINS)
hostname = 'namebench%s.%s' % (random.randint(1,2**32), domain)
(response, duration, exc) = self.TimedRequest('A', hostname,
timeout=timeout)
ttl = None
if not response:
is_broken = True
warning = exc.__class__.__name__
elif not response.answer:
is_broken = True
warning = 'No response'
else:
ttl = response.answer[0].ttl
if save:
self.cache_check = (hostname, ttl)
return (response, is_broken, warning, duration)
def TestWildcardCaching(self):
return self.QueryWildcardCache(save=True)[1:]
def TestSharedCache(self, other_ns):
"""Is this nameserver sharing a cache with another nameserver?
Args:
other_ns: A nameserver to compare it to.
Returns:
A tuple containing:
- Boolean of whether or not this host has a shared cache
- The faster NameServer object
- The slower NameServer object
"""
if other_ns.cache_check:
(cache_id, other_ttl) = other_ns.cache_check
else:
print "* cache check for %s is missing (skipping)" % other_ns
return (False, None, None)
# These queries tend to run slow, and we've already narrowed down the worst.
timeout = self.health_timeout * SHARED_CACHE_TIMEOUT_MULTIPLIER
(response, is_broken, warning, duration) = self.QueryWildcardCache(
cache_id,
save=False,
timeout=timeout
)
# Try again, but only once. Do penalize them for the first fail however.
if is_broken:
sys.stdout.write('_')
(response, is_broken, warning, duration2) = self.QueryWildcardCache(
cache_id,
save=False,
timeout=timeout
)
if is_broken:
sys.stdout.write('o')
self.checks.append((cache_id, is_broken, warning, duration))
if is_broken:
self.is_healthy = False
else:
delta = abs(other_ttl - response.answer[0].ttl)
if delta > 0:
if other_ns.check_duration > self.check_duration:
slower = other_ns
faster = self
else:
slower = self
faster = other_ns
if delta > MIN_SHARING_DELTA_MS and delta < MAX_SHARING_DELTA_MS:
return (True, slower, faster)
return (False, None, None)
def CheckHealth(self):
"""Qualify a nameserver to see if it is any good."""
tests = [self.TestWwwGoogleComResponse,
self.TestGoogleComResponse,
self.TestNegativeResponse,
self.TestWildcardCaching,
self.TestWwwPaypalComResponse]
self.checks = []
self.warnings = []
for test in tests:
(is_broken, warning, duration) = test()
self.checks.append((test.__name__, is_broken, warning, duration))
if warning:
self.warnings.append(warning)
if is_broken:
self.is_healthy = False
break
return self.is_healthy
|
from __future__ import absolute_import
from __future__ import print_function
from ..packages.six.moves import urllib_parse as urlparse
from .._abstract.abstract import BaseAGOLClass
from datetime import datetime, timedelta
from ..common.general import local_time_to_online
import json
import os
########################################################################
class Community(BaseAGOLClass):
"""
This set of resources contains operations related to users and groups.
"""
_baseURL = None
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
#----------------------------------------------------------------------
def __str__(self):
"""returns the raw json string from the class"""
return ""
#----------------------------------------------------------------------
def __iter__(self):
"""returns the key/values of an object"""
for k,v in {}.items():
yield k,v
#----------------------------------------------------------------------
def checkUserName(self, username):
"""
Checks if a username is able to be used.
Inputs:
username - name of user to create.
Output:
JSON as string
"""
params = {
"f" : "json",
"usernames" : username
}
url = self._url + "/checkUsernames"
return self._post(url=url,
param_dict=params,
proxy_url=self._proxy_url,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def communitySelf(self):
""" This resource allows discovery of the current authenticated
user identified by the token. """
params = {
"f" : "json",
}
return self._get(url=self._url + "/self",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def search(self,
q,
t=None,
start=1,
num=10,
sortField="title",
sortOrder="asc"):
"""
The Group Search operation searches for groups in the portal. The
search index is updated whenever groups and organizations are
created, updated, or deleted. There can be a lag between the time
that a group is updated and the time when it's reflected in the
search results. The results only contain groups that the user has
permission to access.
Inputs:
q - query string to search
t - type search
start - number of the first entry in response results. The
default is 1
num - maximum number of results to return. The maximum is 100.
sortField - field to sort by. Allowed values: title, owner or
created.
sortOrder - Order of result values returned. Values: asc or desc
"""
params = {
"f" : "json",
"q" : q,
"num" : num,
"start" : start
}
if not t is None:
params['t'] = t
if not sortField is None:
params['sortField'] = sortField
if not sortOrder is None:
params['sortOrder'] = sortOrder
url = self._url + "/groups"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def getGroupIDs(self, groupNames,communityInfo=None):
"""
This function retrieves the group IDs
Inputs:
group_names - tuple of group names
Output:
dict - list of group IDs
"""
group_ids=[]
if communityInfo is None:
communityInfo = self.communitySelf
if isinstance(groupNames,list):
groupNames = map(str.upper, groupNames)
else:
groupNames = groupNames.upper()
if 'groups' in communityInfo:
for gp in communityInfo['groups']:
if str(gp['title']).upper() in groupNames:
group_ids.append(gp['id'])
del communityInfo
return group_ids
#----------------------------------------------------------------------
def createGroup(self,
title,
tags,
description="",
snippet="",
phone="",
access="org",
sortField="title",
sortOrder="asc",
isViewOnly=False,
isInvitationOnly=False,
thumbnail=None):
"""
The Create Group operation (POST only) creates a new group in the
Portal community. Only authenticated users can create groups. The
user who creates the group automatically becomes the owner of the
group. The owner of the group is automatically an administrator of
the group. The calling user provides the title for the group, while
the group ID is generated by the system.
Inputs:
title - The group title must be unique for the username, and the
character limit is 250.
tags - Tags are words or short phrases that describe the group.
Separate terms with commas.
description - A description of the group that can be any length
snippet - Snippet or summary of the group that has a character
limit of 250 characters.
phone - group contact information
access - Sets the access level for the group. private is the
default. Setting to org restricts group access to
members of your organization. If public, all users can
access the group.
Values: private | org |public
sortField - Sets sort field for group items.
Values: title | owner | avgRating |numViews
| created | modified
sortOrder - sets sort order for group items. Values: asc or desc
isViewOnly - Allows the group owner or dmin to create view-only
groups where members are not able to share items.
If members try to share, view-only groups are
returned in the notshared response property. false
is the default.
isInvitationOnly - If true, this group will not accept join
requests. If false, this group does not
require an invitation to join. Only group
owners and admins can invite users to the
group. false is the default.
thumbnail - Enter the pathname to the thumbnail image to be used
for the group. The recommended image size is 200
pixels wide by 133 pixels high. Acceptable image
formats are PNG, GIF, and JPEG. The maximum file size
for an image is 1 MB. This is not a reference to
the file but the file itself, which will be stored
in the Portal.
"""
params = {
"f" : "json",
"title" : title,
"description" : description,
"snippet" : snippet,
"tags" : tags,
"phone" : phone,
"access" : access,
"sortField" : sortField,
"sortOrder" : sortOrder,
"isViewOnly" : isViewOnly,
"isInvitationOnly" : isInvitationOnly
}
url = self._url + "/createGroup"
groups = self.groups
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files={'thumbnail': thumbnail},
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
else:
res = self._post(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
if "group" not in res:
raise Exception("%s" % res)
if "id" not in res['group']:
raise Exception("%s" % res)
groupId = res['group']['id']
url = "%s/groups/%s" % (self.root, groupId)
return Group(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
#----------------------------------------------------------------------
@property
def root(self):
""" returns the community root URL """
return self._url
#----------------------------------------------------------------------
@property
def groups(self):
""" returns the group object """
return Groups(url="%s/groups" % self.root,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
#----------------------------------------------------------------------
@property
def users(self):
"""
returns the user class object for current session
"""
return Users(url="%s/users" % self.root,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
)
########################################################################
class Groups(BaseAGOLClass):
"""
The Group resource represents a group (for example, San Bernardino
Fires) within the portal.
The owner is automatically an administrator and is returned in the
list of admins. Administrators can invite, add to, or remove
members from a group as well as update or delete the group. The
administrator for an organization can also reassign the group to
another member of the organization.
Group members can leave the group. Authenticated users can apply to
join a group unless the group is by invitation only.
The visibility of the group by other users is determined by the
access property. If the group is private, no one other than the
administrators and members of the group will be able to see it. If
the group is shared with an organization, all members of the
organization will be able to find it.
Inputs:
url - group URL to site/agol
securityHandler - Oauth or AGOL security handler
proxy_url - optional - URL of proxy
proxy_port - optional - port of the proxy
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initalize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initalize:
self.__init()
_currentUser = None
_portalId = None
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
if self._portalId is None:
from .administration import Administration
portalSelf = Administration(url=self._securityHandler.org_url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf
self._portalId = portalSelf.id
self._currentUser = portalSelf.user['username']
#----------------------------------------------------------------------
@property
def root(self):
"""returns the url for the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns raw JSON response as string"""
if self._json is None:
self.__init()
return ""
#----------------------------------------------------------------------
def __iter__(self):
"""returns Group objects"""
self.__init()
q = " orgid: %s" % self._portalId
nextStart = 0
while nextStart > -1:
results = self.search(q=q, start=nextStart, num=100)
grps = results['results']
for grp in grps:
yield self.group(grp['id'])
nextStart = results['nextStart']
#----------------------------------------------------------------------
def search(self, q, start=1, num=10, sortField="title",
sortOrder="asc"):
"""
The Group Search operation searches for groups in the portal. The
search index is updated whenever groups and organizations are
created, updated, or deleted. There can be a lag between the time
that a group is updated and the time when it's reflected in the
search results. The results only contain groups that the user has
permission to access.
Inputs:
q - The query string to search the groups against.
start - The number of the first entry in the result set response.
The index number is 1-based. The default value of start is
1 (for example, the first search result).The start
parameter, along with the num parameter, can be used to
paginate the search results.
num - The maximum number of results to be included in the result
set response.The start parameter, along with the num
parameter, can be used to paginate the search results. The
actual number of returned results may be less than num. This
happens when the number of results remaining after start is
less than num.
sortField - Field to sort by. The allowed field names are title,
owner, and created.
sortOrder - Describes whether order returns in ascending or
descending order. Default is ascending.
Values: asc | desc
"""
params = {
"f" : "json",
"q" : q,
"start" : start,
"num" : num,
"sortOrder" : sortOrder,
"sortField" : sortField
}
return self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def group(self, groupId):
"""
gets a group based on it's ID
"""
url = "%s/%s" % (self.root, groupId)
return Group(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
########################################################################
class Group(BaseAGOLClass):
"""
The Group resource represents a group (for example, San Bernardino
Fires) within the portal.
The owner is automatically an administrator and is returned in the
list of admins. Administrators can invite, add to, or remove
members from a group as well as update or delete the group. The
administrator for an organization can also reassign the group to
another member of the organization.
Group members can leave the group. Authenticated users can apply to
join a group unless the group is by invitation only.
The visibility of the group by other users is determined by the
access property. If the group is private, no one other than the
administrators and members of the group will be able to see it. If
the group is shared with an organization, all members of the
organization will be able to find it.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_snippet = None
_isFav = None
_description = None
_title = None
_isReadOnly = None
_sortField = None
_id = None
_isViewOnly = None
_modified = None
_created = None
_access = None
_phone = None
_providerGroupName = None
_sortOrder = None
_provider = None
_owner = None
_userMembership = None
_isInvitationOnly = None
_thumbnail = None
_featuredItemsId = None
_isPublic = None
_isOrganization = None
_tags = None
_capabilities = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initalize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initalize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print (k, " - attribute not implemented in Group class.")
#----------------------------------------------------------------------
def __str__(self):
"""returns raw JSON response as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns properties (key/values) from the JSON response"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def capabilities(self):
'''gets the property value for snippet'''
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def snippet(self):
'''gets the property value for snippet'''
if self._snippet is None:
self.__init()
return self._snippet
#----------------------------------------------------------------------
@property
def isFav(self):
'''gets the property value for isFav'''
if self._isFav is None:
self.__init()
return self._isFav
#----------------------------------------------------------------------
@property
def description(self):
'''gets the property value for description'''
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def title(self):
'''gets the property value for title'''
if self._title is None:
self.__init()
return self._title
#----------------------------------------------------------------------
@property
def isReadOnly(self):
'''gets the property value for isReadOnly'''
if self._isReadOnly is None:
self.__init()
return self._isReadOnly
#----------------------------------------------------------------------
@property
def sortField(self):
'''gets the property value for sortField'''
if self._sortField is None:
self.__init()
return self._sortField
#----------------------------------------------------------------------
@property
def id(self):
'''gets the property value for id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def isViewOnly(self):
'''gets the property value for isViewOnly'''
if self._isViewOnly is None:
self.__init()
return self._isViewOnly
#----------------------------------------------------------------------
@property
def modified(self):
'''gets the property value for modified'''
if self._modified is None:
self.__init()
return self._modified
#----------------------------------------------------------------------
@property
def created(self):
'''gets the property value for created'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def access(self):
'''gets the property value for access'''
if self._access is None:
self.__init()
return self._access
#----------------------------------------------------------------------
@property
def phone(self):
'''gets the property value for phone'''
if self._phone is None:
self.__init()
return self._phone
#----------------------------------------------------------------------
@property
def providerGroupName(self):
'''gets the property value for providerGroupName'''
if self._providerGroupName is None:
self.__init()
return self._providerGroupName
#----------------------------------------------------------------------
@property
def sortOrder(self):
'''gets the property value for sortOrder'''
if self._sortOrder is None:
self.__init()
return self._sortOrder
#----------------------------------------------------------------------
@property
def provider(self):
'''gets the property value for provider'''
if self._provider is None:
self.__init()
return self._provider
#----------------------------------------------------------------------
@property
def owner(self):
'''gets the property value for owner'''
if self._owner is None:
self.__init()
return self._owner
#----------------------------------------------------------------------
@property
def userMembership(self):
'''gets the property value for userMembership'''
if self._userMembership is None:
self.__init()
return self._userMembership
#----------------------------------------------------------------------
@property
def isInvitationOnly(self):
'''gets the property value for isInvitationOnly'''
if self._isInvitationOnly is None:
self.__init()
return self._isInvitationOnly
#----------------------------------------------------------------------
@property
def thumbnail(self):
'''gets the property value for thumbnail'''
if self._thumbnail is None:
self.__init()
return self._thumbnail
#----------------------------------------------------------------------
@property
def featuredItemsId(self):
'''gets the property value for featuredItemsId'''
if self._featuredItemsId is None:
self.__init()
return self._featuredItemsId
#----------------------------------------------------------------------
@property
def isPublic(self):
'''gets the property value for isPublic'''
if self._isPublic is None:
self.__init()
return self._isPublic
#----------------------------------------------------------------------
@property
def isOrganization(self):
'''gets the property value for isOrganization'''
if self._isOrganization is None:
self.__init()
return self._isOrganization
#----------------------------------------------------------------------
@property
def tags(self):
'''gets the property value for tags'''
if self._tags is None:
self.__init()
return self._tags
#----------------------------------------------------------------------
def reassign(self, targetUsername):
"""
The Reassign Group operation (POST only) allows the administrator
of an organization to reassign a group to another member of the
organization.
Inputs:
targetUsername - The target username of the new owner of the
group.
"""
params = {
"f" : "json",
"targetUsername" : targetUsername
}
return self._post(url=self._url + "/reassign",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def update(self,
clearEmptyFields=True,
title=None,
description=None,
snippet=None,
tags=None,
phone=None,
access=None,
sortField=None,
sortOrder=None,
isViewOnly=None,
isInvitationOnly=None,
thumbnail=None):
"""
The Update Group operation (POST only) modifies properties such as
the group title, tags, description, sort field and order, and
member sharing capabilities. Available only to the group
administrators or to the administrator of the organization if the
user is a member.
Only the properties that are to be updated need to be specified in
the request. Properties not specified will not be affected.
The group ID cannot be modified.
Inputs:
title - The group title must be unique for the username, and the
character limit is 250.
Example: title=Redlands Fire Department
description - A description of the group that can be any length.
snippet - Snippet or summary of the group that has a character
limit of 250 characters.
tags - Tags are words or short phrases that describe the group.
Separate terms with commas.
phone - Phone is the group contact information. It can be a
combination of letters and numbers. The character limit
is 250.
access - Sets the access level for the group. private is the
default. Setting to org restricts group access to
members of your organization. If public, all users can
access the group.
Values: private | org |public
sortField - Sets sort field for group items.
Values: title | owner | avgRating |
numViews| created | modified
sortOrder - Sets sort order for group items.
Values: asc | desc
isViewOnly - Allows the group owner or admin to create view-only
groups where members are not able to share items.
If members try to share, view-only groups are
returned in the notshared response property.
Values: false | true
"""
params = {
"f" : "json"
}
if title is not None:
params['title'] = title
if description is not None:
params['description'] = description
if snippet is not None:
params['snippet'] = snippet
if tags is not None:
params['tags'] = tags
if phone is not None:
params['phone'] = phone
if access is not None:
params['access'] = access
if sortField is not None:
params['sortField'] = sortField
if isViewOnly is not None:
params['isViewOnly'] = isViewOnly
if isInvitationOnly is not None:
params['isInvitationOnly'] = isInvitationOnly
if clearEmptyFields is not None:
params['clearEmptyFields'] = clearEmptyFields
files = {}
url = self._url + "/update"
if thumbnail is not None and \
os.path.isfile(thumbnail):
files['thumbnail'] =thumbnail
res = None
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files=files,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
return res
else:
res = self._post(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self.__init()
return res
#----------------------------------------------------------------------
def delete(self):
"""
deletes the current group
"""
params = {
"f" : "json",
}
return self._post(url=self._url + "/delete",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def join(self):
"""
Users apply to join a group using the Join Group operation. This
creates a new group application, which the group administrators
accept or decline. This operation also creates a notification for
the user indicating that they have applied to join this group.
Available only to authenticated users.
Users can only apply to join groups to which they have access. If
the group is private, users will not be able to find it to ask to
join it.
Information pertaining to the applying user, such as their full
name and username, can be sent as part of the group application.
Output:
JSON response as dictionary
"""
params = {
"f" : "json",
}
return self._post(url=self._url + "/join",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def invite(self, users, role, expiration=1440):
"""
A group administrator can invite users to join their group using
the Invite to Group operation. This creates a new user invitation,
which the users accept or decline. The role of the user and the
invitation expiration date can be set in the invitation.
A notification is created for the user indicating that they were
invited to join the group. Available only to authenticated users.
Inputs:
users - A comma separated list of usernames to be invited to the
group. If a user is already a member of the group or an
invitation has already been sent, the call still returns
a success.
Example: users=regularusername1,regularusername2
role - Allows administrators to set the user's role in the group
Roles are:
group_member: Ability to view and share items with
group.
group_admin: In addition to viewing and sharing items,
the group_admin has the same capabilities
as the group owner-invite users to the
group, accept or decline group
applications, delete content, and remove
users.
expiration - Expiration date on the invitation can be set for
one day, three days, one week, or two weeks, in
minutes. Default is 1440
"""
params = {
"f" : "json",
"users" : users,
"role" : role,
"expiration" : expiration
}
return self._post(url=self._url + "/invite",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def leave(self):
"""
The Leave Group operation (POST only) is available to all group
members other than the group owner. Leaving a group automatically
results in the unsharing of all items the user has shared with the
group.
Output:
JSON response as a dictionary
"""
params = {
"f" : "json"
}
return self._post(url=self._url + "/leave",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def removeUsersFromGroup(self, users):
"""
The operation to Remove Users From Group (POST only) is available
only to the group administrators, including the owner, and to the
administrator of the organization if the user is a member. Both
users and admins can be removed using this operation. Group owners
cannot be removed from the group.
Inputs:
users - A comma-separated list of usernames (both admins and
regular users) to be removed from the group.
Example: users=regularusername1,adminusername1,
adminusername2,regularusername2
"""
params = {
"f" : "json",
"users" : users
}
return self._post(url=self._url + "/removeUsers",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def addUsersToGroups(self, users):
"""
The operation to Add Users to Group (POST only) is available only
to the group administrators, including the owner, and to the
administrator of the organization if the user is a member. Both
users and admins can be added using this operation. This is useful
if you wish to add users directly within an organization without
requiring them to accept an invitation. For example, a member of an
organization can add only other organization members but not public
users.
Inputs:
users - comma seperates list of users to add to a group
Output:
A JSON array of usernames that were not added.
"""
url = self._url + "/addUsers"
params = {
"f" : "json",
"users" : users,
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def groupUsers(self):
"""
Lists the users, owner, and administrators of a given group. Only
available to members or administrators of the group.
"""
params = {
"f" : "json"
}
return self._get(url=self._url + "/users",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def applications(self):
"""returns all the group applications to join"""
url = self._url + "/applications"
params = {"f" : "json"}
res = self._get(url=url,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
items = []
if "applications" in res.keys():
for apps in res['applications']:
items.append(
self.Application(url="%s/%s" % (self._url, apps['username']),
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
)
return items
########################################################################
class Application(BaseAGOLClass):
"""reprsents a single group application to join a site"""
_url = None
_securityHandler = None
_proxy_url = None
_proxy_port = None
_fullname = None
_received = None
_username = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print (k, " - attribute not implemented in Group.Application class.")
#----------------------------------------------------------------------
@property
def username(self):
"""gets the application username"""
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def fullname(self):
"""gets the user's full name"""
if self._fullname is None:
self.__init()
return self._fullname
#----------------------------------------------------------------------
@property
def received(self):
"""gets the UTC timestamp when the application was submitted"""
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
def accept(self):
"""
When a user applies to join a group, a group application is
created. Group administrators choose to accept this application
using the Accept Group Application operation (POST only). This
operation adds the applying user to the group then deletes the
application. This operation also creates a notification for the
user indicating that the user's group application was accepted.
Available only to group owners and admins.
"""
params = {
"f" : "json",
}
return self._post(url="%s/accept" % (self.root),
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def decline(self):
"""
When a user applies to join a group, a group application is created
Group administrators can decline this application using the Decline
Group Application operation (POST only). This operation deletes the
application and creates a notification for the user indicating that
the user's group application was declined. The applying user will
not be added to the group. Available only to group owners and
admins.
"""
params = {
"f" : "json",
}
return self._post(url="%s/decline" % self.root,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
########################################################################
class Users(BaseAGOLClass):
"""represents the users on a given portal or agol system"""
_url = None
_securityHandler = None
_proxy_url = None
_proxy_port = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None):
"""Constructor"""
if url.endswith('/users'):
self._url = url
else:
self._url = url + "/users"
self._securityHandler = securityHandler
self._proxy_url = proxy_url
self._proxy_port = proxy_port
#----------------------------------------------------------------------
@property
def root(self):
"""gets the url for the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""gets the object as a string (user list)"""
return ""
#----------------------------------------------------------------------
def search(self,
q,
start=1,
num=10,
sortField="username",
sortOrder="asc"):
"""
The User Search operation searches for users in the portal. The
search index is updated whenever users are created, updated, or
deleted. There can be a lag between the time that the user is
updated and the time when it's reflected in the search results. The
results only contain users that the calling user has permissions to
see. Users can control this visibility by changing the access
property of their user.
Inputs:
q -The query string to search the users against.
start - The number of the first entry in the result set response.
The index number is 1-based. The default value of start is
1 (for example, the first search result). The start
parameter, along with the num parameter can be used to
paginate the search results.
num - The maximum number of results to be included in the result
set response. The default value is 10, and the maximum
allowed value is 100. The start parameter, along with the num
parameter can be used to paginate the search results. The
actual number of returned results may be less than num. This
happens when the number of results remaining after start is
less than num.
sortField - Field to sort by. The allowed field names are username
and created.
sortOrder - Describes whether the returned results are in ascending
or descending order. Default is ascending.
Values: asc | desc
"""
params = {
"f" : "json",
"q" : q,
"start" : start,
"num" : num,
"sortField" : sortField,
"sortOrder" : sortOrder
}
url = self._url
return self._get(
url = url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __getUsername(self):
"""tries to parse the user name from various objects"""
if self._securityHandler is not None and \
not self._securityHandler._username is None:
return self._securityHandler._username
elif self._securityHandler is not None and \
hasattr(self._securityHandler, "org_url") and \
self._securityHandler.org_url is not None:
from .administration import Administration
user = Administration(url=self._securityHandler.org_url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf.user
return user['username']
else:
from .administration import Administration
url = self._url.lower().split('/content/')[0]
user = Administration(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf.user
return user['username']
#----------------------------------------------------------------------
def user(self, username=None):
"""A user resource that represents a registered user in the portal."""
if username is None:
username = self.__getUsername()
url = self.root + "/%s" % username
return User(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=True)
########################################################################
class User(BaseAGOLClass):
"""
A user resource that represents a registered user in the portal.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_disabled = None
_culture = None
_storageUsage = None
_favGroupId = None
_privileges = None
_access = None
_role = None
_idpUsername = None
_provider = None
_units = None
_mfaEnabled = None
_email = None
_username = None
_storageQuota = None
_description = None
_tags = None
_groups = None
_fullName = None
_userType = None
_created = None
_region = None
_modified = None
_thumbnail = None
_orgId = None
_preferredView = None
_lastLogin = None
_validateUserProfile = None
_assignedCredits = None
_availableCredits = None
_firstName = None
_lastName = None
_clientApps = None
_accountId = None
_privacy = None
_defaultGroupId = None
_organization = None
_roleid = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in User class.")
#----------------------------------------------------------------------
@property
def root(self):
"""gets the url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as raw string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""iterates through json and returns values as [key, value]"""
if self._json_dict is None:
self._json_dict = {}
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def userContent(self):
"""allows access into the individual user's content to get at the
items owned by the current user"""
replace_start = self._url.lower().find("/community/")
len_replace = len("/community/")
url = self._url.replace(self._url[replace_start:replace_start+len_replace],
'/content/')
from ._content import User as UserContent
return UserContent(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
@property
def lastName(self):
'''gets the property value for username'''
if self._lastName is None:
self.__init()
return self._lastName
@property
def firstName(self):
'''gets the property value for username'''
if self._firstName is None:
self.__init()
return self._firstName
#----------------------------------------------------------------------
@property
def assignedCredits(self):
"""returns the assignedCredits value"""
if self._assignedCredits is None:
self.__init()
return self._assignedCredits
#----------------------------------------------------------------------
@property
def availableCredits(self):
"""gets the availableCredits value"""
if self._availableCredits is None:
self.__init()
return self._availableCredits
#----------------------------------------------------------------------
@property
def disabled(self):
'''gets disabled value'''
if self._disabled is None:
self.__init()
return self._disabled
#----------------------------------------------------------------------
@property
def culture(self):
'''gets culture value'''
if self._culture is None:
self.__init()
return self._culture
#----------------------------------------------------------------------
@property
def storageUsage(self):
'''gets storageUsage value'''
if self._storageUsage is None:
self.__init()
return self._storageUsage
#----------------------------------------------------------------------
@property
def favGroupId(self):
'''gets favGroupId value'''
if self._favGroupId is None:
self.__init()
return self._favGroupId
#----------------------------------------------------------------------
@property
def privileges(self):
'''gets privileges value'''
if self._privileges is None:
self.__init()
return self._privileges
#----------------------------------------------------------------------
@property
def access(self):
'''gets access value'''
if self._access is None:
self.__init()
return self._access
#----------------------------------------------------------------------
@property
def role(self):
'''gets role value'''
if self._role is None:
self.__init()
return self._role
#----------------------------------------------------------------------
@property
def idpUsername(self):
'''gets idpUsername value'''
if self._idpUsername is None:
self.__init()
return self._idpUsername
#----------------------------------------------------------------------
@property
def provider(self):
'''gets provider value'''
if self._provider is None:
self.__init()
return self._provider
#----------------------------------------------------------------------
@property
def units(self):
'''gets units value'''
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def mfaEnabled(self):
'''gets mfaEnabled value'''
if self._mfaEnabled is None:
self.__init()
return self._mfaEnabled
#----------------------------------------------------------------------
@property
def email(self):
'''gets email value'''
if self._email is None:
self.__init()
return self._email
#----------------------------------------------------------------------
@property
def username(self):
'''gets username value'''
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def storageQuota(self):
'''gets storageQuota value'''
if self._storageQuota is None:
self.__init()
return self._storageQuota
#----------------------------------------------------------------------
@property
def description(self):
'''gets description value'''
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def tags(self):
'''gets tags value'''
if self._tags is None:
self.__init()
return self._tags
#----------------------------------------------------------------------
@property
def groups(self):
'''gets groups value'''
if self._groups is None:
self.__init()
return self._groups
#----------------------------------------------------------------------
@property
def fullName(self):
'''gets fullName value'''
if self._fullName is None:
self.__init()
return self._fullName
#----------------------------------------------------------------------
@property
def userType(self):
'''gets userType value'''
if self._userType is None:
self.__init()
return self._userType
#----------------------------------------------------------------------
@property
def created(self):
'''gets created value'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def region(self):
'''gets region value'''
if self._region is None:
self.__init()
return self._region
@property
def roleid(self):
'''gets the roleid value'''
if self._roleid is None:
self.__init()
return self._roleid
#----------------------------------------------------------------------
@property
def modified(self):
'''gets modified value'''
if self._modified is None:
self.__init()
return self._modified
#----------------------------------------------------------------------
@property
def thumbnail(self):
'''gets thumbnail value'''
if self._thumbnail is None:
self.__init()
return self._thumbnail
#----------------------------------------------------------------------
@property
def clientApps(self):
'''gets clientApps value'''
if self._clientApps is None:
self.__init()
return self._clientApps
#----------------------------------------------------------------------
@property
def accountId(self):
'''gets accountId value'''
if self._accountId is None:
self.__init()
return self._accountId
#----------------------------------------------------------------------
@property
def privacy(self):
'''gets privacy value'''
if self._privacy is None:
self.__init()
return self._privacy
#----------------------------------------------------------------------
@property
def defaultGroupId(self):
'''gets defaultGroupId value'''
if self._defaultGroupId is None:
self.__init()
return self._defaultGroupId
#----------------------------------------------------------------------
@property
def organization(self):
'''gets organization value'''
if self._organization is None:
self.__init()
return self._organization
#----------------------------------------------------------------------
@property
def orgId(self):
'''gets orgId value'''
if self._orgId is None:
self.__init()
return self._orgId
#----------------------------------------------------------------------
@property
def preferredView(self):
'''gets preferredView value'''
if self._preferredView is None:
self.__init()
return self._preferredView
#----------------------------------------------------------------------
@property
def lastLogin(self):
'''gets lastLogin value'''
if self._lastLogin is None:
self.__init()
return self._lastLogin
#----------------------------------------------------------------------
@property
def validateUserProfile(self):
'''gets validateUserProfile value'''
if self._validateUserProfile is None:
self.__init()
return self._validateUserProfile
#----------------------------------------------------------------------
@property
def userTags(self):
"""gets a list of current user's tags"""
url = "%s/tags" % self.root
params = {
"f" : "json"
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def invitations(self):
"""returns a class to access the current user's invitations"""
url = "%s/invitations" % self.root
return Invitations(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def notifications(self):
"""The notifications that are available for the given user.
Notifications are events that need the user's attention-application
for joining a group administered by the user, acceptance of a group
membership application, and so on. A notification is initially
marked as new. The user can mark it as read or delete the notification.
"""
params = {"f": "json"}
url = "%s/notifications" % self.root
return Notifications(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def invalidateSessions(self):
"""
forces a given user to have to re-login into portal/agol
"""
url = "%s/invalidateSessions" % self.root
params = {"f": "json"}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def resetPassword(self, email=True):
"""
resets a users password for an account. The password will be randomly
generated and emailed by the system.
Input:
email - boolean that an email password will be sent to the
user's profile email address. The default is True.
"""
url = self.root + "/reset"
params = {
"f" : "json",
"email" : email
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def expirePassword(self,
hours="now"):
"""sets a time when a user must reset their password"""
params = {
"f" : "json"
}
expiration = -1
if isinstance(hours, str):
if expiration == "now":
expiration = -1
elif expiration == "never":
expiration = 0
else:
expiration = -1
elif isinstance(expiration, (int, long)):
dt = datetime.now() + timedelta(hours=hours)
expiration = local_time_to_online(dt=dt)
else:
expiration = -1
params['expiration'] = expiration
url = "%s/expirePassword" % self.root
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def disable(self):
"""
The Disable operation (POST only) disables login access for the
user. It is only available to the administrator of the organization.
"""
params = {
"f" : "json"
}
url = "%s/disable" % self.root
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def enable(self):
"""
The Enable operation (POST only) enables login access for the user.
It is only available to the administrator of the organization.
Inputs:
username - username to disable
"""
params = {
"f" : "json"
}
url = self.root + "/enable"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def update(self,
clearEmptyFields=None,
tags=None,
thumbnail=None,
password=None,
fullname=None,
email=None,
securityQuestionIdx=None,
securityAnswer=None,
culture=None,
region=None,
userType=None
):
"""
The Update User operation (POST only) modifies properties such as
description, preferred view, tags, access, and thumbnail. The user
name cannot be modified. For the "ecas" identity provider, password,
e-mail, and full name must be modified by editing your Esri Global
Account. For the "arcgis" identity provider, password, full name,
security question, and security answer can be updated with Update
User. Update User is available only to the user or to the
administrator of the user's organization.
Only the properties that are to be updated need to be specified in
the request. Properties not specified will not be affected.
Inputs:
clearEmptyFields - Clears any fields that are passed in empty (for
example, description, tags).
tags - Tags are words or short phrases that describe the user.
Separate terms with commas.
Example: tags=GIS Analyst, Redlands, cloud GIS
thumbnail - Enter the pathname to the thumbnail image to be used
for the user. The recommended image size is 200 pixels
wide by 133 pixels high. Acceptable image formats are
PNG, GIF, and JPEG. The maximum file size for an image
is 1 MB. This is not a reference to the file but the
file itself, which will be stored on the sharing
servers.
Example: thumbnail=subfolder/thumbnail.jpg
password -Password for the user. Only applicable for the arcgis
identity provider.
fullname - The full name of the user. Only applicable for the
arcgis identity provider.
email - The e-mail address of the user. Only applicable for the
arcgis identity provider.
securityQuestionIdx - The index of the security question the user
wants to use. The security question is used for password
recovery. Only applicable for the arcgis identity provider.
securityAnswer - The answer to the security question for the user.
Only applicable for the arcgis identity provider.
culture - Specifies the locale for which content is returned. The
browser/machine language setting is the default.
Authenticated users can set the culture and overwrite the
browser/machine language setting.
region - Specifies the region of featured maps and apps and the
basemap gallery.
userType - if the value is set to "both", then the value will allow
users to access both ArcGIS Org and the forums from this
account. 'arcgisorg' means the account is only valid
for the organizational site. This is an AGOL only
parameter.
"""
params = {
"f" : "json"
}
if region is not None:
params['region'] = region
if culture is not None:
params['culture'] = culture
if clearEmptyFields is not None:
params['clearEmptyFields'] = clearEmptyFields
if tags is not None:
params['tags'] = tags
if password is not None:
params['password'] = password
if fullname is not None:
params['fullname'] = fullname
if email is not None:
params['email'] = email
if securityQuestionIdx is not None:
params['securityQuestionIdx'] = securityQuestionIdx
if securityAnswer is not None:
params['securityAnswer'] = securityAnswer
if userType is not None and \
userType.lower() in ['both', 'arcgisorg']:
params['userType'] = userType.lower()
files = {}
url = "%s/update" % self.root
if thumbnail is not None and \
os.path.isfile(thumbnail):
files.append(('thumbnail', thumbnail, os.path.basename(thumbnail)))
res = None
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files=files,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
else:
res = self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self.__init()
return res
#----------------------------------------------------------------------
def delete(self):
"""
The Delete User operation (POST only) is available only to the user
in question or to the administrator of the user's organization.
If deleting a user who is part of an organization, their content
and groups must be transferred to another member or deleted prior
to deleting the user.
If the user is not part of an organization, all content and groups
of the user must first be deleted.
Deleting a user whose identity provider is the Esri Global Account
will not delete the user from the Esri Global Account system.
"""
params = {
"f" : "json"
}
url = self.root + "/delete"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
########################################################################
class Invitations(BaseAGOLClass):
"""Manages the invitations sent to the authenticated user."""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_userInvitations = None
class Invitation(BaseAGOLClass):
"""represents a single invitation for a given user."""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_username = None
_targetType = None
_fromUsername = None
_created = None
_mustApprove = None
_received = None
_targetId = None
_id = None
_dateAccepted = None
_role = None
_expiration = None
_group = None
_accepted = None
_type = None
_email = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Invitation class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def username(self):
'''gets the property value for username'''
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def targetType(self):
'''gets the property value for targetType'''
if self._targetType is None:
self.__init()
return self._targetType
#----------------------------------------------------------------------
@property
def fromUsername(self):
'''gets the property value for fromUsername'''
if self._fromUsername is None:
self.__init()
return self._fromUsername
#----------------------------------------------------------------------
@property
def created(self):
'''gets the property value for created'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def mustApprove(self):
'''gets the property value for mustApprove'''
if self._mustApprove is None:
self.__init()
return self._mustApprove
#----------------------------------------------------------------------
@property
def received(self):
'''gets the property value for received'''
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def targetId(self):
'''gets the property value for targetId'''
if self._targetId is None:
self.__init()
return self._targetId
#----------------------------------------------------------------------
@property
def id(self):
'''gets the property value for id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def dateAccepted(self):
'''gets the property value for dateAccepted'''
if self._dateAccepted is None:
self.__init()
return self._dateAccepted
#----------------------------------------------------------------------
@property
def role(self):
'''gets the property value for role'''
if self._role is None:
self.__init()
return self._role
#----------------------------------------------------------------------
@property
def expiration(self):
'''gets the property value for expiration'''
if self._expiration is None:
self.__init()
return self._expiration
#----------------------------------------------------------------------
@property
def group(self):
'''gets the property value for group'''
if self._group is None:
self.__init()
return self._group
#----------------------------------------------------------------------
@property
def accepted(self):
'''gets the property value for accepted'''
if self._accepted is None:
self.__init()
return self._accepted
#----------------------------------------------------------------------
@property
def type(self):
'''gets the property value for type'''
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def email(self):
'''gets the property value for email'''
if self._email is None:
self.__init()
return self._email
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Invitations class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def userInvitations(self):
"""gets all user invitations"""
self.__init()
items = []
for n in self._userInvitations:
if "id" in n:
url = "%s/%s" % (self.root, n['id'])
items.append(self.Invitation(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=True))
return items
########################################################################
class Notifications(BaseAGOLClass):
"""
A user notification resource available only to the user in question. A
notification has the following fields:
{id : string, type : enum, data: string, status : enum }
Status is either new or read.
Type is the type of notification, e.g., application to join group or
invitation to join group.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_notifications = None
class Notification(BaseAGOLClass):
"""represents a single notification inside the notification list"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_targetType = None
_target = None
_received = None
_data = None
_type = None
_id = None
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Notification class.")
#----------------------------------------------------------------------
@property
def targetType(self):
'''gets property targetType'''
if self._targetType is None:
self.__init()
return self._targetType
#----------------------------------------------------------------------
@property
def target(self):
'''gets property target'''
if self._target is None:
self.__init()
return self._target
#----------------------------------------------------------------------
@property
def received(self):
'''gets property received'''
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def data(self):
'''gets property data'''
if self._data is None:
self.__init()
return self._data
#----------------------------------------------------------------------
@property
def type(self):
'''gets property type'''
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def id(self):
'''gets property id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
def delete(self):
"""deletes the current notification from the user"""
url = "%s/delete" % self.root
params = {"f":"json"}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Notifications class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def notifications(self):
"""gets the user's notifications"""
self.__init()
items = []
for n in self._notifications:
if "id" in n:
url = "%s/%s" % (self.root, n['id'])
items.append(self.Notification(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port))
return items
fix for #276 - improved list user performance
from __future__ import absolute_import
from __future__ import print_function
from ..packages.six.moves import urllib_parse as urlparse
from .._abstract.abstract import BaseAGOLClass
from datetime import datetime, timedelta
from ..common.general import local_time_to_online
import json
import os
########################################################################
class Community(BaseAGOLClass):
"""
This set of resources contains operations related to users and groups.
"""
_baseURL = None
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
#----------------------------------------------------------------------
def __str__(self):
"""returns the raw json string from the class"""
return ""
#----------------------------------------------------------------------
def __iter__(self):
"""returns the key/values of an object"""
for k,v in {}.items():
yield k,v
#----------------------------------------------------------------------
def checkUserName(self, username):
"""
Checks if a username is able to be used.
Inputs:
username - name of user to create.
Output:
JSON as string
"""
params = {
"f" : "json",
"usernames" : username
}
url = self._url + "/checkUsernames"
return self._post(url=url,
param_dict=params,
proxy_url=self._proxy_url,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def communitySelf(self):
""" This resource allows discovery of the current authenticated
user identified by the token. """
params = {
"f" : "json",
}
return self._get(url=self._url + "/self",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def search(self,
q,
t=None,
start=1,
num=10,
sortField="title",
sortOrder="asc"):
"""
The Group Search operation searches for groups in the portal. The
search index is updated whenever groups and organizations are
created, updated, or deleted. There can be a lag between the time
that a group is updated and the time when it's reflected in the
search results. The results only contain groups that the user has
permission to access.
Inputs:
q - query string to search
t - type search
start - number of the first entry in response results. The
default is 1
num - maximum number of results to return. The maximum is 100.
sortField - field to sort by. Allowed values: title, owner or
created.
sortOrder - Order of result values returned. Values: asc or desc
"""
params = {
"f" : "json",
"q" : q,
"num" : num,
"start" : start
}
if not t is None:
params['t'] = t
if not sortField is None:
params['sortField'] = sortField
if not sortOrder is None:
params['sortOrder'] = sortOrder
url = self._url + "/groups"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def getGroupIDs(self, groupNames,communityInfo=None):
"""
This function retrieves the group IDs
Inputs:
group_names - tuple of group names
Output:
dict - list of group IDs
"""
group_ids=[]
if communityInfo is None:
communityInfo = self.communitySelf
if isinstance(groupNames,list):
groupNames = map(str.upper, groupNames)
else:
groupNames = groupNames.upper()
if 'groups' in communityInfo:
for gp in communityInfo['groups']:
if str(gp['title']).upper() in groupNames:
group_ids.append(gp['id'])
del communityInfo
return group_ids
#----------------------------------------------------------------------
def createGroup(self,
title,
tags,
description="",
snippet="",
phone="",
access="org",
sortField="title",
sortOrder="asc",
isViewOnly=False,
isInvitationOnly=False,
thumbnail=None):
"""
The Create Group operation (POST only) creates a new group in the
Portal community. Only authenticated users can create groups. The
user who creates the group automatically becomes the owner of the
group. The owner of the group is automatically an administrator of
the group. The calling user provides the title for the group, while
the group ID is generated by the system.
Inputs:
title - The group title must be unique for the username, and the
character limit is 250.
tags - Tags are words or short phrases that describe the group.
Separate terms with commas.
description - A description of the group that can be any length
snippet - Snippet or summary of the group that has a character
limit of 250 characters.
phone - group contact information
access - Sets the access level for the group. private is the
default. Setting to org restricts group access to
members of your organization. If public, all users can
access the group.
Values: private | org |public
sortField - Sets sort field for group items.
Values: title | owner | avgRating |numViews
| created | modified
sortOrder - sets sort order for group items. Values: asc or desc
isViewOnly - Allows the group owner or dmin to create view-only
groups where members are not able to share items.
If members try to share, view-only groups are
returned in the notshared response property. false
is the default.
isInvitationOnly - If true, this group will not accept join
requests. If false, this group does not
require an invitation to join. Only group
owners and admins can invite users to the
group. false is the default.
thumbnail - Enter the pathname to the thumbnail image to be used
for the group. The recommended image size is 200
pixels wide by 133 pixels high. Acceptable image
formats are PNG, GIF, and JPEG. The maximum file size
for an image is 1 MB. This is not a reference to
the file but the file itself, which will be stored
in the Portal.
"""
params = {
"f" : "json",
"title" : title,
"description" : description,
"snippet" : snippet,
"tags" : tags,
"phone" : phone,
"access" : access,
"sortField" : sortField,
"sortOrder" : sortOrder,
"isViewOnly" : isViewOnly,
"isInvitationOnly" : isInvitationOnly
}
url = self._url + "/createGroup"
groups = self.groups
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files={'thumbnail': thumbnail},
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
else:
res = self._post(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
if "group" not in res:
raise Exception("%s" % res)
if "id" not in res['group']:
raise Exception("%s" % res)
groupId = res['group']['id']
url = "%s/groups/%s" % (self.root, groupId)
return Group(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
#----------------------------------------------------------------------
@property
def root(self):
""" returns the community root URL """
return self._url
#----------------------------------------------------------------------
@property
def groups(self):
""" returns the group object """
return Groups(url="%s/groups" % self.root,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
#----------------------------------------------------------------------
@property
def users(self):
"""
returns the user class object for current session
"""
return Users(url="%s/users" % self.root,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
)
########################################################################
class Groups(BaseAGOLClass):
"""
The Group resource represents a group (for example, San Bernardino
Fires) within the portal.
The owner is automatically an administrator and is returned in the
list of admins. Administrators can invite, add to, or remove
members from a group as well as update or delete the group. The
administrator for an organization can also reassign the group to
another member of the organization.
Group members can leave the group. Authenticated users can apply to
join a group unless the group is by invitation only.
The visibility of the group by other users is determined by the
access property. If the group is private, no one other than the
administrators and members of the group will be able to see it. If
the group is shared with an organization, all members of the
organization will be able to find it.
Inputs:
url - group URL to site/agol
securityHandler - Oauth or AGOL security handler
proxy_url - optional - URL of proxy
proxy_port - optional - port of the proxy
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initalize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initalize:
self.__init()
_currentUser = None
_portalId = None
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
if self._portalId is None:
from .administration import Administration
portalSelf = Administration(url=self._securityHandler.org_url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf
self._portalId = portalSelf.id
self._currentUser = portalSelf.user['username']
#----------------------------------------------------------------------
@property
def root(self):
"""returns the url for the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns raw JSON response as string"""
if self._json is None:
self.__init()
return ""
#----------------------------------------------------------------------
def __iter__(self):
"""returns Group objects"""
self.__init()
q = " orgid: %s" % self._portalId
nextStart = 0
while nextStart > -1:
results = self.search(q=q, start=nextStart, num=100)
grps = results['results']
for grp in grps:
yield self.group(grp['id'])
nextStart = results['nextStart']
#----------------------------------------------------------------------
def search(self, q, start=1, num=10, sortField="title",
sortOrder="asc"):
"""
The Group Search operation searches for groups in the portal. The
search index is updated whenever groups and organizations are
created, updated, or deleted. There can be a lag between the time
that a group is updated and the time when it's reflected in the
search results. The results only contain groups that the user has
permission to access.
Inputs:
q - The query string to search the groups against.
start - The number of the first entry in the result set response.
The index number is 1-based. The default value of start is
1 (for example, the first search result).The start
parameter, along with the num parameter, can be used to
paginate the search results.
num - The maximum number of results to be included in the result
set response.The start parameter, along with the num
parameter, can be used to paginate the search results. The
actual number of returned results may be less than num. This
happens when the number of results remaining after start is
less than num.
sortField - Field to sort by. The allowed field names are title,
owner, and created.
sortOrder - Describes whether order returns in ascending or
descending order. Default is ascending.
Values: asc | desc
"""
params = {
"f" : "json",
"q" : q,
"start" : start,
"num" : num,
"sortOrder" : sortOrder,
"sortField" : sortField
}
return self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def group(self, groupId):
"""
gets a group based on it's ID
"""
url = "%s/%s" % (self.root, groupId)
return Group(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False)
########################################################################
class Group(BaseAGOLClass):
"""
The Group resource represents a group (for example, San Bernardino
Fires) within the portal.
The owner is automatically an administrator and is returned in the
list of admins. Administrators can invite, add to, or remove
members from a group as well as update or delete the group. The
administrator for an organization can also reassign the group to
another member of the organization.
Group members can leave the group. Authenticated users can apply to
join a group unless the group is by invitation only.
The visibility of the group by other users is determined by the
access property. If the group is private, no one other than the
administrators and members of the group will be able to see it. If
the group is shared with an organization, all members of the
organization will be able to find it.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_snippet = None
_isFav = None
_description = None
_title = None
_isReadOnly = None
_sortField = None
_id = None
_isViewOnly = None
_modified = None
_created = None
_access = None
_phone = None
_providerGroupName = None
_sortOrder = None
_provider = None
_owner = None
_userMembership = None
_isInvitationOnly = None
_thumbnail = None
_featuredItemsId = None
_isPublic = None
_isOrganization = None
_tags = None
_capabilities = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initalize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initalize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print (k, " - attribute not implemented in Group class.")
#----------------------------------------------------------------------
def __str__(self):
"""returns raw JSON response as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns properties (key/values) from the JSON response"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def capabilities(self):
'''gets the property value for snippet'''
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def snippet(self):
'''gets the property value for snippet'''
if self._snippet is None:
self.__init()
return self._snippet
#----------------------------------------------------------------------
@property
def isFav(self):
'''gets the property value for isFav'''
if self._isFav is None:
self.__init()
return self._isFav
#----------------------------------------------------------------------
@property
def description(self):
'''gets the property value for description'''
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def title(self):
'''gets the property value for title'''
if self._title is None:
self.__init()
return self._title
#----------------------------------------------------------------------
@property
def isReadOnly(self):
'''gets the property value for isReadOnly'''
if self._isReadOnly is None:
self.__init()
return self._isReadOnly
#----------------------------------------------------------------------
@property
def sortField(self):
'''gets the property value for sortField'''
if self._sortField is None:
self.__init()
return self._sortField
#----------------------------------------------------------------------
@property
def id(self):
'''gets the property value for id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def isViewOnly(self):
'''gets the property value for isViewOnly'''
if self._isViewOnly is None:
self.__init()
return self._isViewOnly
#----------------------------------------------------------------------
@property
def modified(self):
'''gets the property value for modified'''
if self._modified is None:
self.__init()
return self._modified
#----------------------------------------------------------------------
@property
def created(self):
'''gets the property value for created'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def access(self):
'''gets the property value for access'''
if self._access is None:
self.__init()
return self._access
#----------------------------------------------------------------------
@property
def phone(self):
'''gets the property value for phone'''
if self._phone is None:
self.__init()
return self._phone
#----------------------------------------------------------------------
@property
def providerGroupName(self):
'''gets the property value for providerGroupName'''
if self._providerGroupName is None:
self.__init()
return self._providerGroupName
#----------------------------------------------------------------------
@property
def sortOrder(self):
'''gets the property value for sortOrder'''
if self._sortOrder is None:
self.__init()
return self._sortOrder
#----------------------------------------------------------------------
@property
def provider(self):
'''gets the property value for provider'''
if self._provider is None:
self.__init()
return self._provider
#----------------------------------------------------------------------
@property
def owner(self):
'''gets the property value for owner'''
if self._owner is None:
self.__init()
return self._owner
#----------------------------------------------------------------------
@property
def userMembership(self):
'''gets the property value for userMembership'''
if self._userMembership is None:
self.__init()
return self._userMembership
#----------------------------------------------------------------------
@property
def isInvitationOnly(self):
'''gets the property value for isInvitationOnly'''
if self._isInvitationOnly is None:
self.__init()
return self._isInvitationOnly
#----------------------------------------------------------------------
@property
def thumbnail(self):
'''gets the property value for thumbnail'''
if self._thumbnail is None:
self.__init()
return self._thumbnail
#----------------------------------------------------------------------
@property
def featuredItemsId(self):
'''gets the property value for featuredItemsId'''
if self._featuredItemsId is None:
self.__init()
return self._featuredItemsId
#----------------------------------------------------------------------
@property
def isPublic(self):
'''gets the property value for isPublic'''
if self._isPublic is None:
self.__init()
return self._isPublic
#----------------------------------------------------------------------
@property
def isOrganization(self):
'''gets the property value for isOrganization'''
if self._isOrganization is None:
self.__init()
return self._isOrganization
#----------------------------------------------------------------------
@property
def tags(self):
'''gets the property value for tags'''
if self._tags is None:
self.__init()
return self._tags
#----------------------------------------------------------------------
def reassign(self, targetUsername):
"""
The Reassign Group operation (POST only) allows the administrator
of an organization to reassign a group to another member of the
organization.
Inputs:
targetUsername - The target username of the new owner of the
group.
"""
params = {
"f" : "json",
"targetUsername" : targetUsername
}
return self._post(url=self._url + "/reassign",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def update(self,
clearEmptyFields=True,
title=None,
description=None,
snippet=None,
tags=None,
phone=None,
access=None,
sortField=None,
sortOrder=None,
isViewOnly=None,
isInvitationOnly=None,
thumbnail=None):
"""
The Update Group operation (POST only) modifies properties such as
the group title, tags, description, sort field and order, and
member sharing capabilities. Available only to the group
administrators or to the administrator of the organization if the
user is a member.
Only the properties that are to be updated need to be specified in
the request. Properties not specified will not be affected.
The group ID cannot be modified.
Inputs:
title - The group title must be unique for the username, and the
character limit is 250.
Example: title=Redlands Fire Department
description - A description of the group that can be any length.
snippet - Snippet or summary of the group that has a character
limit of 250 characters.
tags - Tags are words or short phrases that describe the group.
Separate terms with commas.
phone - Phone is the group contact information. It can be a
combination of letters and numbers. The character limit
is 250.
access - Sets the access level for the group. private is the
default. Setting to org restricts group access to
members of your organization. If public, all users can
access the group.
Values: private | org |public
sortField - Sets sort field for group items.
Values: title | owner | avgRating |
numViews| created | modified
sortOrder - Sets sort order for group items.
Values: asc | desc
isViewOnly - Allows the group owner or admin to create view-only
groups where members are not able to share items.
If members try to share, view-only groups are
returned in the notshared response property.
Values: false | true
"""
params = {
"f" : "json"
}
if title is not None:
params['title'] = title
if description is not None:
params['description'] = description
if snippet is not None:
params['snippet'] = snippet
if tags is not None:
params['tags'] = tags
if phone is not None:
params['phone'] = phone
if access is not None:
params['access'] = access
if sortField is not None:
params['sortField'] = sortField
if isViewOnly is not None:
params['isViewOnly'] = isViewOnly
if isInvitationOnly is not None:
params['isInvitationOnly'] = isInvitationOnly
if clearEmptyFields is not None:
params['clearEmptyFields'] = clearEmptyFields
files = {}
url = self._url + "/update"
if thumbnail is not None and \
os.path.isfile(thumbnail):
files['thumbnail'] =thumbnail
res = None
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files=files,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
return res
else:
res = self._post(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self.__init()
return res
#----------------------------------------------------------------------
def delete(self):
"""
deletes the current group
"""
params = {
"f" : "json",
}
return self._post(url=self._url + "/delete",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def join(self):
"""
Users apply to join a group using the Join Group operation. This
creates a new group application, which the group administrators
accept or decline. This operation also creates a notification for
the user indicating that they have applied to join this group.
Available only to authenticated users.
Users can only apply to join groups to which they have access. If
the group is private, users will not be able to find it to ask to
join it.
Information pertaining to the applying user, such as their full
name and username, can be sent as part of the group application.
Output:
JSON response as dictionary
"""
params = {
"f" : "json",
}
return self._post(url=self._url + "/join",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def invite(self, users, role, expiration=1440):
"""
A group administrator can invite users to join their group using
the Invite to Group operation. This creates a new user invitation,
which the users accept or decline. The role of the user and the
invitation expiration date can be set in the invitation.
A notification is created for the user indicating that they were
invited to join the group. Available only to authenticated users.
Inputs:
users - A comma separated list of usernames to be invited to the
group. If a user is already a member of the group or an
invitation has already been sent, the call still returns
a success.
Example: users=regularusername1,regularusername2
role - Allows administrators to set the user's role in the group
Roles are:
group_member: Ability to view and share items with
group.
group_admin: In addition to viewing and sharing items,
the group_admin has the same capabilities
as the group owner-invite users to the
group, accept or decline group
applications, delete content, and remove
users.
expiration - Expiration date on the invitation can be set for
one day, three days, one week, or two weeks, in
minutes. Default is 1440
"""
params = {
"f" : "json",
"users" : users,
"role" : role,
"expiration" : expiration
}
return self._post(url=self._url + "/invite",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def leave(self):
"""
The Leave Group operation (POST only) is available to all group
members other than the group owner. Leaving a group automatically
results in the unsharing of all items the user has shared with the
group.
Output:
JSON response as a dictionary
"""
params = {
"f" : "json"
}
return self._post(url=self._url + "/leave",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def removeUsersFromGroup(self, users):
"""
The operation to Remove Users From Group (POST only) is available
only to the group administrators, including the owner, and to the
administrator of the organization if the user is a member. Both
users and admins can be removed using this operation. Group owners
cannot be removed from the group.
Inputs:
users - A comma-separated list of usernames (both admins and
regular users) to be removed from the group.
Example: users=regularusername1,adminusername1,
adminusername2,regularusername2
"""
params = {
"f" : "json",
"users" : users
}
return self._post(url=self._url + "/removeUsers",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def addUsersToGroups(self, users):
"""
The operation to Add Users to Group (POST only) is available only
to the group administrators, including the owner, and to the
administrator of the organization if the user is a member. Both
users and admins can be added using this operation. This is useful
if you wish to add users directly within an organization without
requiring them to accept an invitation. For example, a member of an
organization can add only other organization members but not public
users.
Inputs:
users - comma seperates list of users to add to a group
Output:
A JSON array of usernames that were not added.
"""
url = self._url + "/addUsers"
params = {
"f" : "json",
"users" : users,
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def groupUsers(self):
"""
Lists the users, owner, and administrators of a given group. Only
available to members or administrators of the group.
"""
params = {
"f" : "json"
}
return self._get(url=self._url + "/users",
securityHandler=self._securityHandler,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def applications(self):
"""returns all the group applications to join"""
url = self._url + "/applications"
params = {"f" : "json"}
res = self._get(url=url,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
items = []
if "applications" in res.keys():
for apps in res['applications']:
items.append(
self.Application(url="%s/%s" % (self._url, apps['username']),
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
)
return items
########################################################################
class Application(BaseAGOLClass):
"""reprsents a single group application to join a site"""
_url = None
_securityHandler = None
_proxy_url = None
_proxy_port = None
_fullname = None
_received = None
_username = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print (k, " - attribute not implemented in Group.Application class.")
#----------------------------------------------------------------------
@property
def username(self):
"""gets the application username"""
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def fullname(self):
"""gets the user's full name"""
if self._fullname is None:
self.__init()
return self._fullname
#----------------------------------------------------------------------
@property
def received(self):
"""gets the UTC timestamp when the application was submitted"""
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
def accept(self):
"""
When a user applies to join a group, a group application is
created. Group administrators choose to accept this application
using the Accept Group Application operation (POST only). This
operation adds the applying user to the group then deletes the
application. This operation also creates a notification for the
user indicating that the user's group application was accepted.
Available only to group owners and admins.
"""
params = {
"f" : "json",
}
return self._post(url="%s/accept" % (self.root),
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def decline(self):
"""
When a user applies to join a group, a group application is created
Group administrators can decline this application using the Decline
Group Application operation (POST only). This operation deletes the
application and creates a notification for the user indicating that
the user's group application was declined. The applying user will
not be added to the group. Available only to group owners and
admins.
"""
params = {
"f" : "json",
}
return self._post(url="%s/decline" % self.root,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
########################################################################
class Users(BaseAGOLClass):
"""represents the users on a given portal or agol system"""
_url = None
_securityHandler = None
_proxy_url = None
_proxy_port = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None):
"""Constructor"""
if url.endswith('/users'):
self._url = url
else:
self._url = url + "/users"
self._securityHandler = securityHandler
self._proxy_url = proxy_url
self._proxy_port = proxy_port
#----------------------------------------------------------------------
@property
def root(self):
"""gets the url for the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""gets the object as a string (user list)"""
return ""
#----------------------------------------------------------------------
def search(self,
q,
start=1,
num=10,
sortField="username",
sortOrder="asc"):
"""
The User Search operation searches for users in the portal. The
search index is updated whenever users are created, updated, or
deleted. There can be a lag between the time that the user is
updated and the time when it's reflected in the search results. The
results only contain users that the calling user has permissions to
see. Users can control this visibility by changing the access
property of their user.
Inputs:
q -The query string to search the users against.
start - The number of the first entry in the result set response.
The index number is 1-based. The default value of start is
1 (for example, the first search result). The start
parameter, along with the num parameter can be used to
paginate the search results.
num - The maximum number of results to be included in the result
set response. The default value is 10, and the maximum
allowed value is 100. The start parameter, along with the num
parameter can be used to paginate the search results. The
actual number of returned results may be less than num. This
happens when the number of results remaining after start is
less than num.
sortField - Field to sort by. The allowed field names are username
and created.
sortOrder - Describes whether the returned results are in ascending
or descending order. Default is ascending.
Values: asc | desc
"""
params = {
"f" : "json",
"q" : q,
"start" : start,
"num" : num,
"sortField" : sortField,
"sortOrder" : sortOrder
}
url = self._url
return self._get(
url = url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __getUsername(self):
"""tries to parse the user name from various objects"""
if self._securityHandler is not None and \
not self._securityHandler._username is None:
return self._securityHandler._username
elif self._securityHandler is not None and \
hasattr(self._securityHandler, "org_url") and \
self._securityHandler.org_url is not None:
from .administration import Administration
user = Administration(url=self._securityHandler.org_url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf.user
return user['username']
else:
from .administration import Administration
url = self._url.lower().split('/content/')[0]
user = Administration(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port).portals.portalSelf.user
return user['username']
#----------------------------------------------------------------------
def user(self, username=None):
"""A user resource that represents a registered user in the portal."""
if username is None:
username = self.__getUsername()
url = self.root + "/%s" % username
return User(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=False)
########################################################################
class User(BaseAGOLClass):
"""
A user resource that represents a registered user in the portal.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_disabled = None
_culture = None
_storageUsage = None
_favGroupId = None
_privileges = None
_access = None
_role = None
_idpUsername = None
_provider = None
_units = None
_mfaEnabled = None
_email = None
_username = None
_storageQuota = None
_description = None
_tags = None
_groups = None
_fullName = None
_userType = None
_created = None
_region = None
_modified = None
_thumbnail = None
_orgId = None
_preferredView = None
_lastLogin = None
_validateUserProfile = None
_assignedCredits = None
_availableCredits = None
_firstName = None
_lastName = None
_clientApps = None
_accountId = None
_privacy = None
_defaultGroupId = None
_organization = None
_roleid = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in User class.")
#----------------------------------------------------------------------
@property
def root(self):
"""gets the url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as raw string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""iterates through json and returns values as [key, value]"""
if self._json_dict is None:
self._json_dict = {}
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def userContent(self):
"""allows access into the individual user's content to get at the
items owned by the current user"""
replace_start = self._url.lower().find("/community/")
len_replace = len("/community/")
url = self._url.replace(self._url[replace_start:replace_start+len_replace],
'/content/')
from ._content import User as UserContent
return UserContent(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
@property
def lastName(self):
'''gets the property value for username'''
if self._lastName is None:
self.__init()
return self._lastName
@property
def firstName(self):
'''gets the property value for username'''
if self._firstName is None:
self.__init()
return self._firstName
#----------------------------------------------------------------------
@property
def assignedCredits(self):
"""returns the assignedCredits value"""
if self._assignedCredits is None:
self.__init()
return self._assignedCredits
#----------------------------------------------------------------------
@property
def availableCredits(self):
"""gets the availableCredits value"""
if self._availableCredits is None:
self.__init()
return self._availableCredits
#----------------------------------------------------------------------
@property
def disabled(self):
'''gets disabled value'''
if self._disabled is None:
self.__init()
return self._disabled
#----------------------------------------------------------------------
@property
def culture(self):
'''gets culture value'''
if self._culture is None:
self.__init()
return self._culture
#----------------------------------------------------------------------
@property
def storageUsage(self):
'''gets storageUsage value'''
if self._storageUsage is None:
self.__init()
return self._storageUsage
#----------------------------------------------------------------------
@property
def favGroupId(self):
'''gets favGroupId value'''
if self._favGroupId is None:
self.__init()
return self._favGroupId
#----------------------------------------------------------------------
@property
def privileges(self):
'''gets privileges value'''
if self._privileges is None:
self.__init()
return self._privileges
#----------------------------------------------------------------------
@property
def access(self):
'''gets access value'''
if self._access is None:
self.__init()
return self._access
#----------------------------------------------------------------------
@property
def role(self):
'''gets role value'''
if self._role is None:
self.__init()
return self._role
#----------------------------------------------------------------------
@property
def idpUsername(self):
'''gets idpUsername value'''
if self._idpUsername is None:
self.__init()
return self._idpUsername
#----------------------------------------------------------------------
@property
def provider(self):
'''gets provider value'''
if self._provider is None:
self.__init()
return self._provider
#----------------------------------------------------------------------
@property
def units(self):
'''gets units value'''
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def mfaEnabled(self):
'''gets mfaEnabled value'''
if self._mfaEnabled is None:
self.__init()
return self._mfaEnabled
#----------------------------------------------------------------------
@property
def email(self):
'''gets email value'''
if self._email is None:
self.__init()
return self._email
#----------------------------------------------------------------------
@property
def username(self):
'''gets username value'''
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def storageQuota(self):
'''gets storageQuota value'''
if self._storageQuota is None:
self.__init()
return self._storageQuota
#----------------------------------------------------------------------
@property
def description(self):
'''gets description value'''
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def tags(self):
'''gets tags value'''
if self._tags is None:
self.__init()
return self._tags
#----------------------------------------------------------------------
@property
def groups(self):
'''gets groups value'''
if self._groups is None:
self.__init()
return self._groups
#----------------------------------------------------------------------
@property
def fullName(self):
'''gets fullName value'''
if self._fullName is None:
self.__init()
return self._fullName
#----------------------------------------------------------------------
@property
def userType(self):
'''gets userType value'''
if self._userType is None:
self.__init()
return self._userType
#----------------------------------------------------------------------
@property
def created(self):
'''gets created value'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def region(self):
'''gets region value'''
if self._region is None:
self.__init()
return self._region
@property
def roleid(self):
'''gets the roleid value'''
if self._roleid is None:
self.__init()
return self._roleid
#----------------------------------------------------------------------
@property
def modified(self):
'''gets modified value'''
if self._modified is None:
self.__init()
return self._modified
#----------------------------------------------------------------------
@property
def thumbnail(self):
'''gets thumbnail value'''
if self._thumbnail is None:
self.__init()
return self._thumbnail
#----------------------------------------------------------------------
@property
def clientApps(self):
'''gets clientApps value'''
if self._clientApps is None:
self.__init()
return self._clientApps
#----------------------------------------------------------------------
@property
def accountId(self):
'''gets accountId value'''
if self._accountId is None:
self.__init()
return self._accountId
#----------------------------------------------------------------------
@property
def privacy(self):
'''gets privacy value'''
if self._privacy is None:
self.__init()
return self._privacy
#----------------------------------------------------------------------
@property
def defaultGroupId(self):
'''gets defaultGroupId value'''
if self._defaultGroupId is None:
self.__init()
return self._defaultGroupId
#----------------------------------------------------------------------
@property
def organization(self):
'''gets organization value'''
if self._organization is None:
self.__init()
return self._organization
#----------------------------------------------------------------------
@property
def orgId(self):
'''gets orgId value'''
if self._orgId is None:
self.__init()
return self._orgId
#----------------------------------------------------------------------
@property
def preferredView(self):
'''gets preferredView value'''
if self._preferredView is None:
self.__init()
return self._preferredView
#----------------------------------------------------------------------
@property
def lastLogin(self):
'''gets lastLogin value'''
if self._lastLogin is None:
self.__init()
return self._lastLogin
#----------------------------------------------------------------------
@property
def validateUserProfile(self):
'''gets validateUserProfile value'''
if self._validateUserProfile is None:
self.__init()
return self._validateUserProfile
#----------------------------------------------------------------------
@property
def userTags(self):
"""gets a list of current user's tags"""
url = "%s/tags" % self.root
params = {
"f" : "json"
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def invitations(self):
"""returns a class to access the current user's invitations"""
url = "%s/invitations" % self.root
return Invitations(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def notifications(self):
"""The notifications that are available for the given user.
Notifications are events that need the user's attention-application
for joining a group administered by the user, acceptance of a group
membership application, and so on. A notification is initially
marked as new. The user can mark it as read or delete the notification.
"""
params = {"f": "json"}
url = "%s/notifications" % self.root
return Notifications(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def invalidateSessions(self):
"""
forces a given user to have to re-login into portal/agol
"""
url = "%s/invalidateSessions" % self.root
params = {"f": "json"}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def resetPassword(self, email=True):
"""
resets a users password for an account. The password will be randomly
generated and emailed by the system.
Input:
email - boolean that an email password will be sent to the
user's profile email address. The default is True.
"""
url = self.root + "/reset"
params = {
"f" : "json",
"email" : email
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def expirePassword(self,
hours="now"):
"""sets a time when a user must reset their password"""
params = {
"f" : "json"
}
expiration = -1
if isinstance(hours, str):
if expiration == "now":
expiration = -1
elif expiration == "never":
expiration = 0
else:
expiration = -1
elif isinstance(expiration, (int, long)):
dt = datetime.now() + timedelta(hours=hours)
expiration = local_time_to_online(dt=dt)
else:
expiration = -1
params['expiration'] = expiration
url = "%s/expirePassword" % self.root
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def disable(self):
"""
The Disable operation (POST only) disables login access for the
user. It is only available to the administrator of the organization.
"""
params = {
"f" : "json"
}
url = "%s/disable" % self.root
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def enable(self):
"""
The Enable operation (POST only) enables login access for the user.
It is only available to the administrator of the organization.
Inputs:
username - username to disable
"""
params = {
"f" : "json"
}
url = self.root + "/enable"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def update(self,
clearEmptyFields=None,
tags=None,
thumbnail=None,
password=None,
fullname=None,
email=None,
securityQuestionIdx=None,
securityAnswer=None,
culture=None,
region=None,
userType=None
):
"""
The Update User operation (POST only) modifies properties such as
description, preferred view, tags, access, and thumbnail. The user
name cannot be modified. For the "ecas" identity provider, password,
e-mail, and full name must be modified by editing your Esri Global
Account. For the "arcgis" identity provider, password, full name,
security question, and security answer can be updated with Update
User. Update User is available only to the user or to the
administrator of the user's organization.
Only the properties that are to be updated need to be specified in
the request. Properties not specified will not be affected.
Inputs:
clearEmptyFields - Clears any fields that are passed in empty (for
example, description, tags).
tags - Tags are words or short phrases that describe the user.
Separate terms with commas.
Example: tags=GIS Analyst, Redlands, cloud GIS
thumbnail - Enter the pathname to the thumbnail image to be used
for the user. The recommended image size is 200 pixels
wide by 133 pixels high. Acceptable image formats are
PNG, GIF, and JPEG. The maximum file size for an image
is 1 MB. This is not a reference to the file but the
file itself, which will be stored on the sharing
servers.
Example: thumbnail=subfolder/thumbnail.jpg
password -Password for the user. Only applicable for the arcgis
identity provider.
fullname - The full name of the user. Only applicable for the
arcgis identity provider.
email - The e-mail address of the user. Only applicable for the
arcgis identity provider.
securityQuestionIdx - The index of the security question the user
wants to use. The security question is used for password
recovery. Only applicable for the arcgis identity provider.
securityAnswer - The answer to the security question for the user.
Only applicable for the arcgis identity provider.
culture - Specifies the locale for which content is returned. The
browser/machine language setting is the default.
Authenticated users can set the culture and overwrite the
browser/machine language setting.
region - Specifies the region of featured maps and apps and the
basemap gallery.
userType - if the value is set to "both", then the value will allow
users to access both ArcGIS Org and the forums from this
account. 'arcgisorg' means the account is only valid
for the organizational site. This is an AGOL only
parameter.
"""
params = {
"f" : "json"
}
if region is not None:
params['region'] = region
if culture is not None:
params['culture'] = culture
if clearEmptyFields is not None:
params['clearEmptyFields'] = clearEmptyFields
if tags is not None:
params['tags'] = tags
if password is not None:
params['password'] = password
if fullname is not None:
params['fullname'] = fullname
if email is not None:
params['email'] = email
if securityQuestionIdx is not None:
params['securityQuestionIdx'] = securityQuestionIdx
if securityAnswer is not None:
params['securityAnswer'] = securityAnswer
if userType is not None and \
userType.lower() in ['both', 'arcgisorg']:
params['userType'] = userType.lower()
files = {}
url = "%s/update" % self.root
if thumbnail is not None and \
os.path.isfile(thumbnail):
files.append(('thumbnail', thumbnail, os.path.basename(thumbnail)))
res = None
if thumbnail is not None and \
os.path.isfile(thumbnail):
res = self._post(url=url,
param_dict=params,
files=files,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
else:
res = self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self.__init()
return res
#----------------------------------------------------------------------
def delete(self):
"""
The Delete User operation (POST only) is available only to the user
in question or to the administrator of the user's organization.
If deleting a user who is part of an organization, their content
and groups must be transferred to another member or deleted prior
to deleting the user.
If the user is not part of an organization, all content and groups
of the user must first be deleted.
Deleting a user whose identity provider is the Esri Global Account
will not delete the user from the Esri Global Account system.
"""
params = {
"f" : "json"
}
url = self.root + "/delete"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
########################################################################
class Invitations(BaseAGOLClass):
"""Manages the invitations sent to the authenticated user."""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_userInvitations = None
class Invitation(BaseAGOLClass):
"""represents a single invitation for a given user."""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_username = None
_targetType = None
_fromUsername = None
_created = None
_mustApprove = None
_received = None
_targetId = None
_id = None
_dateAccepted = None
_role = None
_expiration = None
_group = None
_accepted = None
_type = None
_email = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Invitation class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def username(self):
'''gets the property value for username'''
if self._username is None:
self.__init()
return self._username
#----------------------------------------------------------------------
@property
def targetType(self):
'''gets the property value for targetType'''
if self._targetType is None:
self.__init()
return self._targetType
#----------------------------------------------------------------------
@property
def fromUsername(self):
'''gets the property value for fromUsername'''
if self._fromUsername is None:
self.__init()
return self._fromUsername
#----------------------------------------------------------------------
@property
def created(self):
'''gets the property value for created'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def mustApprove(self):
'''gets the property value for mustApprove'''
if self._mustApprove is None:
self.__init()
return self._mustApprove
#----------------------------------------------------------------------
@property
def received(self):
'''gets the property value for received'''
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def targetId(self):
'''gets the property value for targetId'''
if self._targetId is None:
self.__init()
return self._targetId
#----------------------------------------------------------------------
@property
def id(self):
'''gets the property value for id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def dateAccepted(self):
'''gets the property value for dateAccepted'''
if self._dateAccepted is None:
self.__init()
return self._dateAccepted
#----------------------------------------------------------------------
@property
def role(self):
'''gets the property value for role'''
if self._role is None:
self.__init()
return self._role
#----------------------------------------------------------------------
@property
def expiration(self):
'''gets the property value for expiration'''
if self._expiration is None:
self.__init()
return self._expiration
#----------------------------------------------------------------------
@property
def group(self):
'''gets the property value for group'''
if self._group is None:
self.__init()
return self._group
#----------------------------------------------------------------------
@property
def accepted(self):
'''gets the property value for accepted'''
if self._accepted is None:
self.__init()
return self._accepted
#----------------------------------------------------------------------
@property
def type(self):
'''gets the property value for type'''
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def email(self):
'''gets the property value for email'''
if self._email is None:
self.__init()
return self._email
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Invitations class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def userInvitations(self):
"""gets all user invitations"""
self.__init()
items = []
for n in self._userInvitations:
if "id" in n:
url = "%s/%s" % (self.root, n['id'])
items.append(self.Invitation(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=True))
return items
########################################################################
class Notifications(BaseAGOLClass):
"""
A user notification resource available only to the user in question. A
notification has the following fields:
{id : string, type : enum, data: string, status : enum }
Status is either new or read.
Type is the type of notification, e.g., application to join group or
invitation to join group.
"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_notifications = None
class Notification(BaseAGOLClass):
"""represents a single notification inside the notification list"""
_url = None
_securityHandler = None
_proxy_port = None
_proxy_url = None
_json = None
_json_dict = None
_targetType = None
_target = None
_received = None
_data = None
_type = None
_id = None
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Notification class.")
#----------------------------------------------------------------------
@property
def targetType(self):
'''gets property targetType'''
if self._targetType is None:
self.__init()
return self._targetType
#----------------------------------------------------------------------
@property
def target(self):
'''gets property target'''
if self._target is None:
self.__init()
return self._target
#----------------------------------------------------------------------
@property
def received(self):
'''gets property received'''
if self._received is None:
self.__init()
return self._received
#----------------------------------------------------------------------
@property
def data(self):
'''gets property data'''
if self._data is None:
self.__init()
return self._data
#----------------------------------------------------------------------
@property
def type(self):
'''gets property type'''
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def id(self):
'''gets property id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
def delete(self):
"""deletes the current notification from the user"""
url = "%s/delete" % self.root
params = {"f":"json"}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
proxy_url=None,
proxy_port=None,
initialize=False):
"""Constructor"""
self._url = url
self._securityHandler = securityHandler
if not securityHandler is None:
self._referer_url = securityHandler.referer_url
self._proxy_port = proxy_port
self._proxy_url = proxy_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
"""loads the property data into the class"""
params = {
"f" : "json"
}
json_dict = self._get(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Notifications class.")
#----------------------------------------------------------------------
@property
def root(self):
"""returns the current url of the class"""
return self._url
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns JSON as [key,value] objects"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def notifications(self):
"""gets the user's notifications"""
self.__init()
items = []
for n in self._notifications:
if "id" in n:
url = "%s/%s" % (self.root, n['id'])
items.append(self.Notification(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port))
return items
|
from django import template
from django.core.urlresolvers import reverse
register = template.Library()
"""
Call method of an object passing it args. Also accepts an as parameter to set
context variable.
Basic usage:
{% call obj callable arg1 arg2 arg3 arg4 as url %}
As clause and args are totally optional
"""
class CallNode(template.Node):
def __init__(self, obj, callable, args, var):
self.obj = obj
self.callable = callable
self.args = args
self.var = var
def render(self, context):
try:
obj = context.get(self.obj, None)
args = [context.get(arg, arg) for arg in self.args]
result = getattr(obj, self.callable)(*args)
if self.var:
context[self.var] = result
return result
except template.VariableDoesNotExist:
return ""
def call(parser, token):
try:
contents = token.split_contents()
tag_name = contents[0]
obj = contents[1]
callable = contents[2]
args = contents[3:] or []
# Chop off as if it exists
if 'as' in args:
args = args[:len(args) - 2]
var_index = contents.index('as') if 'as' in contents else None
var = contents[var_index + 1] if var_index is not None else None
except ValueError:
raise template.TemplateSyntaxError('%s requires at least 3 arguments' % tag_name)
return CallNode(obj, callable, args, var)
register.tag('call', call)
Don't interpolate unescaped strings with the call tag.
from django import template
from django.core.urlresolvers import reverse
from django.utils.html import conditional_escape
register = template.Library()
"""
Call method of an object passing it args. Also accepts an as parameter to set
context variable.
Basic usage:
{% call obj callable arg1 arg2 arg3 arg4 as url %}
As clause and args are totally optional
"""
class CallNode(template.Node):
def __init__(self, obj, callable, args, var):
self.obj = obj
self.callable = callable
self.args = args
self.var = var
def render(self, context):
try:
obj = context.get(self.obj, None)
args = [context.get(arg, arg) for arg in self.args]
result = conditional_escape(getattr(obj, self.callable)(*args))
if self.var:
context[self.var] = result
return result
except template.VariableDoesNotExist:
return ""
def call(parser, token):
try:
contents = token.split_contents()
tag_name = contents[0]
obj = contents[1]
callable = contents[2]
args = contents[3:] or []
# Chop off as if it exists
if 'as' in args:
args = args[:len(args) - 2]
var_index = contents.index('as') if 'as' in contents else None
var = contents[var_index + 1] if var_index is not None else None
except ValueError:
raise template.TemplateSyntaxError('%s requires at least 3 arguments' % tag_name)
return CallNode(obj, callable, args, var)
register.tag('call', call)
|
from __future__ import print_function, division
import sys
import logging
import re
import lldb
if sys.version_info[0] == 2:
# python2-based LLDB accepts utf8-encoded ascii strings only.
to_lldb_str = lambda s: s.encode('utf8', 'backslashreplace') if isinstance(s, unicode) else s
range = xrange
else:
to_lldb_str = str
log = logging.getLogger(__name__)
module = sys.modules[__name__]
rust_category = None
def initialize_category(debugger):
global module, rust_category
rust_category = debugger.CreateCategory('Rust')
#rust_category.AddLanguage(lldb.eLanguageTypeRust)
rust_category.SetEnabled(True)
#attach_summary_to_type(get_array_summary, r'^.*\[[0-9]+\]$', True)
attach_summary_to_type(get_tuple_summary, r'^\(.*\)$', True)
attach_summary_to_type(get_tuple_summary, r'^tuple<.+>$', True) # *-windows-msvc uses this name since 1.47
attach_synthetic_to_type(StrSliceSynthProvider, '&str')
attach_synthetic_to_type(StrSliceSynthProvider, 'str*')
attach_synthetic_to_type(StrSliceSynthProvider, 'str') # *-windows-msvc uses this name since 1.5?
attach_synthetic_to_type(StdStringSynthProvider, 'collections::string::String') # Before 1.20
attach_synthetic_to_type(StdStringSynthProvider, 'alloc::string::String') # Since 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^collections::vec::Vec<.+>$', True) # Before 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^alloc::vec::Vec<.+>$', True) # Since 1.20
attach_synthetic_to_type(SliceSynthProvider, r'^&(mut *)?\[.*\]$', True)
attach_synthetic_to_type(SliceSynthProvider, r'^(mut *)?slice<.+>.*$', True)
attach_synthetic_to_type(StdCStringSynthProvider, 'std::ffi::c_str::CString')
attach_synthetic_to_type(StdCStrSynthProvider, 'std::ffi::c_str::CStr')
attach_synthetic_to_type(StdOsStringSynthProvider, 'std::ffi::os_str::OsString')
attach_synthetic_to_type(StdOsStrSynthProvider, 'std::ffi::os_str::OsStr')
attach_synthetic_to_type(StdPathBufSynthProvider, 'std::path::PathBuf')
attach_synthetic_to_type(StdPathSynthProvider, 'std::path::Path')
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Rc<.+>$', True)
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Weak<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Arc<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Weak<.+>$', True)
attach_synthetic_to_type(StdMutexSynthProvider, r'^std::sync::mutex::Mutex<.+>$', True)
attach_synthetic_to_type(StdCellSynthProvider, r'^core::cell::Cell<.+>$', True)
attach_synthetic_to_type(StdRefCellSynthProvider, r'^core::cell::RefCell<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::Ref<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::RefMut<.+>$', True)
attach_synthetic_to_type(StdHashMapSynthProvider, r'^std::collections::hash::map::HashMap<.+>$', True)
attach_synthetic_to_type(StdHashSetSynthProvider, r'^std::collections::hash::set::HashSet<.+>$', True)
attach_synthetic_to_type(StdOptionSynthProvider, r'^core::option::Option<.+>$', True)
attach_synthetic_to_type(StdResultSynthProvider, r'^core::result::Result<.+>$', True)
attach_synthetic_to_type(StdCowSynthProvider, r'^alloc::borrow::Cow<.+>$', True)
def attach_synthetic_to_type(synth_class, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching synthetic %s to "%s", is_regex=%s', synth_class.__name__, type_name, is_regex)
synth = lldb.SBTypeSynthetic.CreateWithClassName(__name__ + '.' + synth_class.__name__)
synth.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSynthetic(lldb.SBTypeNameSpecifier(type_name, is_regex), synth)
summary_fn = lambda valobj, dict: get_synth_summary(synth_class, valobj, dict)
# LLDB accesses summary fn's by name, so we need to create a unique one.
summary_fn.__name__ = '_get_synth_summary_' + synth_class.__name__
setattr(module, summary_fn.__name__, summary_fn)
attach_summary_to_type(summary_fn, type_name, is_regex)
def attach_summary_to_type(summary_fn, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching summary %s to "%s", is_regex=%s', summary_fn.__name__, type_name, is_regex)
summary = lldb.SBTypeSummary.CreateWithFunctionName(__name__ + '.' + summary_fn.__name__)
summary.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSummary(lldb.SBTypeNameSpecifier(type_name, is_regex), summary)
\
# 'get_summary' is annoyingly not a part of the standard LLDB synth provider API.
# This trick allows us to share data extraction logic between synth providers and their sibling summary providers.
def get_synth_summary(synth_class, valobj, dict):
synth = synth_class(valobj.GetNonSyntheticValue(), dict)
synth.update()
summary = synth.get_summary()
return to_lldb_str(summary)
# Chained GetChildMemberWithName lookups
def gcm(valobj, *chain):
for name in chain:
valobj = valobj.GetChildMemberWithName(name)
return valobj
# Rust-enabled LLDB using DWARF debug info will strip tuple field prefixes.
# If LLDB is not Rust-enalbed or if using PDB debug info, they will be underscore-prefixed.
def read_unique_ptr(valobj):
pointer = valobj.GetChildMemberWithName('pointer')
child = pointer.GetChildMemberWithName('__0') # Plain lldb
if child.IsValid():
return child
child = pointer.GetChildMemberWithName('0') # rust-lldb
if child.IsValid():
return child
return pointer # pointer no longer contains NonZero since Rust 1.33
def string_from_ptr(pointer, length):
if length <= 0:
return u''
error = lldb.SBError()
process = pointer.GetProcess()
data = process.ReadMemory(pointer.GetValueAsUnsigned(), length, error)
if error.Success():
return data.decode('utf8', 'replace')
else:
log.error('ReadMemory error: %s', error.GetCString())
def get_obj_summary(valobj, unavailable='{...}'):
summary = valobj.GetSummary()
if summary is not None:
return summary
summary = valobj.GetValue()
if summary is not None:
return summary
return unavailable
def sequence_summary(childern, maxsize=32):
s = ''
for child in childern:
if len(s) > 0: s += ', '
s += get_obj_summary(child)
if len(s) > maxsize:
s += ', ...'
break
return s
def get_unqualified_type_name(type_name):
if type_name[0] in unqual_type_markers:
return type_name
return unqual_type_regex.match(type_name).group(1)
#
unqual_type_markers = ["(", "[", "&", "*"]
unqual_type_regex = re.compile(r'^(?:\w+::)*(\w+).*', re.UNICODE)
def dump_type(ty):
log.info('type %s: size=%d', ty.GetName(), ty.GetByteSize())
# ----- Summaries -----
def get_tuple_summary(valobj, dict):
fields = [get_obj_summary(valobj.GetChildAtIndex(i)) for i in range(0, valobj.GetNumChildren())]
return '(%s)' % ', '.join(fields)
def get_array_summary(valobj, dict):
return '(%d) [%s]' % (valobj.GetNumChildren(), sequence_summary(valobj))
# ----- Synth providers ------
class RustSynthProvider(object):
def __init__(self, valobj, dict={}):
self.valobj = valobj
self.initialize()
def initialize(self):
return None
def update(self):
return False
def num_children(self):
return 0
def has_children(self):
return False
def get_child_at_index(self, index):
return None
def get_child_index(self, name):
return None
def get_summary(self):
return None
class RegularEnumProvider(RustSynthProvider):
def initialize(self):
# Regular enums are represented as unions of structs, containing discriminant in the
# first field.
discriminant = self.valobj.GetChildAtIndex(0).GetChildAtIndex(0).GetValueAsUnsigned()
self.variant = self.valobj.GetChildAtIndex(discriminant)
def num_children(self):
return max(0, self.variant.GetNumChildren() - 1)
def has_children(self):
return self.num_children() > 0
def get_child_at_index(self, index):
return self.variant.GetChildAtIndex(index + 1)
def get_child_index(self, name):
return self.variant.GetIndexOfChildWithName(name) - 1
def get_summary(self):
return get_obj_summary(self.variant)
# Base class for providers that represent array-like objects
class ArrayLikeSynthProvider(RustSynthProvider):
def initialize(self):
ptr, len = self.ptr_and_len(self.valobj) # type: ignore
self.ptr = ptr
self.len = len
self.item_type = self.ptr.GetType().GetPointeeType()
self.item_size = self.item_type.GetByteSize()
def ptr_and_len(self, obj):
pass # abstract
def num_children(self):
return self.len
def has_children(self):
return True
def get_child_at_index(self, index):
try:
if not 0 <= index < self.len:
return None
offset = index * self.item_size
return self.ptr.CreateChildAtOffset('[%s]' % index, offset, self.item_type)
except Exception as e:
log.error('%s', e)
raise
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return '(%d)' % (self.len,)
class StdVectorSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
def get_summary(self):
try:
return '(%d) vec![%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
except Exception as e:
log.error('%s', e)
raise
##################################################################################################################
class SliceSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned()
)
def get_summary(self):
return '(%d) &[%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
# Base class for *String providers
class StringLikeSynthProvider(ArrayLikeSynthProvider):
def get_child_at_index(self, index):
ch = ArrayLikeSynthProvider.get_child_at_index(self, index)
ch.SetFormat(lldb.eFormatChar)
return ch
def get_summary(self):
# Limit string length to 1000 characters to cope with uninitialized values whose
# length field contains garbage.
strval = string_from_ptr(self.ptr, min(self.len, 1000))
if strval == None:
return None
if self.len > 1000: strval += u'...'
return u'"%s"' % strval
class StrSliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
return (
gcm(valobj, 'data_ptr'),
gcm(valobj, 'length').GetValueAsUnsigned()
)
class StdStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'vec')
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class StdCStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner')
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned() - 1
)
class StdOsStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner', 'inner')
tmp = gcm(vec, 'bytes') # Windows OSString has an extra layer
if tmp.IsValid():
vec = tmp
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class FFISliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
process = valobj.GetProcess()
slice_ptr = valobj.GetLoadAddress()
data_ptr_type = valobj.GetTarget().GetBasicType(lldb.eBasicTypeChar).GetPointerType()
# Unsized slice objects have incomplete debug info, so here we just assume standard slice
# reference layout: [<pointer to data>, <data size>]
error = lldb.SBError()
pointer = valobj.CreateValueFromAddress('data', slice_ptr, data_ptr_type)
length = process.ReadPointerFromMemory(slice_ptr + process.GetAddressByteSize(), error)
return pointer, length
class StdCStrSynthProvider(FFISliceSynthProvider):
def ptr_and_len(self, valobj):
ptr, len = FFISliceSynthProvider.ptr_and_len(self, valobj)
return (ptr, len-1) # drop terminaing '\0'
class StdOsStrSynthProvider(FFISliceSynthProvider):
pass
class StdPathBufSynthProvider(StdOsStringSynthProvider):
def ptr_and_len(self, valobj):
return StdOsStringSynthProvider.ptr_and_len(self, gcm(valobj, 'inner'))
class StdPathSynthProvider(FFISliceSynthProvider):
pass
##################################################################################################################
class DerefSynthProvider(RustSynthProvider):
deref = lldb.SBValue()
def num_children(self):
return self.deref.GetNumChildren()
def has_children(self):
return self.deref.MightHaveChildren()
def get_child_at_index(self, index):
return self.deref.GetChildAtIndex(index)
def get_child_index(self, name):
return self.deref.GetIndexOfChildWithName(name)
def get_summary(self):
return get_obj_summary(self.deref)
# Base for Rc and Arc
class StdRefCountedSynthProvider(DerefSynthProvider):
weak = 0
strong = 0
def get_summary(self):
if self.weak != 0:
s = '(refs:%d,weak:%d) ' % (self.strong, self.weak)
else:
s = '(refs:%d) ' % self.strong
if self.strong > 0:
s += get_obj_summary(self.deref)
else:
s += '<disposed>'
return s
class StdRcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'value', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'value', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'value')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdArcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'v', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'v', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'data')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdMutexSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'data', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdRefCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
def get_summary(self):
borrow = gcm(self.valobj, 'borrow', 'value', 'value').GetValueAsSigned()
s = ''
if borrow < 0:
s = '(borrowed:mut) '
elif borrow > 0:
s = '(borrowed:%d) ' % borrow
return s + get_obj_summary(self.deref)
class StdRefCellBorrowSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value').Dereference()
self.deref.SetPreferSyntheticValue(True)
##################################################################################################################
ENCODED_ENUM_PREFIX = 'RUST$ENCODED$ENUM$'
ENUM_DISCRIMINANT = 'RUST$ENUM$DISR'
class EnumSynthProvider(DerefSynthProvider):
def initialize(self):
obj_type = self.valobj.GetType()
first_field_name = obj_type.GetFieldAtIndex(0).GetName()
# The first two branches are for the sake of windows-*-msvc targets and non-rust-enabled liblldb.
# Normally, we should be calling the initialize_enum().
if first_field_name.startswith(ENCODED_ENUM_PREFIX): # Niche-optimized enum
tokens = first_field_name[len(ENCODED_ENUM_PREFIX):].split("$")
discr_indices = [int(index) for index in tokens[:-1]]
null_variant = tokens[-1]
discriminant = self.valobj.GetChildAtIndex(0)
for discr_index in discr_indices:
discriminant = discriminant.GetChildAtIndex(discr_index)
# Recurse down the first field of the discriminant till we reach a non-struct type,
for i in range(20): # ... but limit the depth, just in case.
if discriminant.GetType().GetTypeClass() != lldb.eTypeClassStruct:
break
discriminant = discriminant.GetChildAtIndex(0)
if discriminant.GetValueAsUnsigned() == 0:
self.variant = null_variant
self.deref = lldb.SBValue()
else:
self.deref = self.valobj.GetChildAtIndex(0)
elif first_field_name == ENUM_DISCRIMINANT: # Regular enum
self.variant = self.valobj.GetChildAtIndex(0).GetValue()
self.deref = self.valobj.GetChildAtIndex(1)
else:
self.initialize_enum()
self.deref.SetPreferSyntheticValue(True)
def initialize_enum(self):
pass
def get_summary(self):
if self.deref.IsValid():
return self.variant + '(' + get_obj_summary(self.deref) + ')'
else:
return self.variant
class StdOptionSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Some'):
self.variant = 'Some'
self.deref = gcm(self.valobj, '0')
else:
self.variant = 'None'
self.deref = lldb.SBValue()
class StdResultSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Ok'):
self.variant = 'Ok'
else:
self.variant = 'Err'
self.deref = gcm(self.valobj, '0')
class StdCowSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Owned'):
self.variant = 'Owned'
else:
self.variant = 'Borrowed'
self.deref = gcm(self.valobj, '0')
##################################################################################################################
class StdHashMapSynthProvider(RustSynthProvider):
def initialize(self):
self.initialize_table(gcm(self.valobj, 'base', 'table'))
def initialize_table(self, table):
assert table.IsValid()
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
if table.type.GetNumberOfTemplateArguments() > 0:
item_ty = table.type.GetTemplateArgumentType(0)
else: # we must be on windows-msvc - try to look up item type by name
table_ty_name = table.GetType().GetName() # "hashbrown::raw::RawTable<ITEM_TY>"
item_ty_name = table_ty_name[table_ty_name.find('<')+1 : table_ty_name.rfind('>')]
item_ty = table.GetTarget().FindTypes(item_ty_name).GetTypeAtIndex(0)
buckets_ty = item_ty.GetArrayType(self.num_buckets)
data = gcm(table, 'data')
new_layout = not data.IsValid()
if new_layout: # buckets are located above `ctrl`, in reverse order.
start_addr = ctrl_ptr.GetValueAsUnsigned() - item_ty.GetByteSize() * self.num_buckets
self.buckets = self.valobj.CreateValueFromAddress('data', start_addr, buckets_ty)
else:
self.buckets = gcm(data, 'pointer').Dereference().Cast(buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
if new_layout:
self.valid_indices.append(self.num_buckets - 1 - i)
else:
self.valid_indices.append(i)
def has_children(self):
return True
def num_children(self):
return len(self.valid_indices)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return 'size=%d, capacity=%d' % (self.num_children(), self.num_buckets)
class StdHashSetSynthProvider(StdHashMapSynthProvider):
def initialize(self):
table = gcm(self.valobj, 'base', 'map', 'table') # rust 1.48
if not table.IsValid():
table = gcm(self.valobj, 'map', 'base', 'table') # rust < 1.48
self.initialize_table(table)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx).GetChildAtIndex(0)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
##################################################################################################################
def __lldb_init_module(debugger_obj, internal_dict):
log.info('Initializing')
initialize_category(debugger_obj)
Format rust.py
from __future__ import print_function, division
import sys
import logging
import re
import lldb
if sys.version_info[0] == 2:
# python2-based LLDB accepts utf8-encoded ascii strings only.
def to_lldb_str(s): return s.encode('utf8', 'backslashreplace') if isinstance(s, unicode) else s
range = xrange
else:
to_lldb_str = str
log = logging.getLogger(__name__)
module = sys.modules[__name__]
rust_category = None
def initialize_category(debugger):
global module, rust_category
rust_category = debugger.CreateCategory('Rust')
# rust_category.AddLanguage(lldb.eLanguageTypeRust)
rust_category.SetEnabled(True)
#attach_summary_to_type(get_array_summary, r'^.*\[[0-9]+\]$', True)
attach_summary_to_type(get_tuple_summary, r'^\(.*\)$', True)
attach_summary_to_type(get_tuple_summary, r'^tuple<.+>$', True) # *-windows-msvc uses this name since 1.47
attach_synthetic_to_type(StrSliceSynthProvider, '&str')
attach_synthetic_to_type(StrSliceSynthProvider, 'str*')
attach_synthetic_to_type(StrSliceSynthProvider, 'str') # *-windows-msvc uses this name since 1.5?
attach_synthetic_to_type(StdStringSynthProvider, 'collections::string::String') # Before 1.20
attach_synthetic_to_type(StdStringSynthProvider, 'alloc::string::String') # Since 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^collections::vec::Vec<.+>$', True) # Before 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^alloc::vec::Vec<.+>$', True) # Since 1.20
attach_synthetic_to_type(SliceSynthProvider, r'^&(mut *)?\[.*\]$', True)
attach_synthetic_to_type(SliceSynthProvider, r'^(mut *)?slice<.+>.*$', True)
attach_synthetic_to_type(StdCStringSynthProvider, 'std::ffi::c_str::CString')
attach_synthetic_to_type(StdCStrSynthProvider, 'std::ffi::c_str::CStr')
attach_synthetic_to_type(StdOsStringSynthProvider, 'std::ffi::os_str::OsString')
attach_synthetic_to_type(StdOsStrSynthProvider, 'std::ffi::os_str::OsStr')
attach_synthetic_to_type(StdPathBufSynthProvider, 'std::path::PathBuf')
attach_synthetic_to_type(StdPathSynthProvider, 'std::path::Path')
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Rc<.+>$', True)
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Weak<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Arc<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Weak<.+>$', True)
attach_synthetic_to_type(StdMutexSynthProvider, r'^std::sync::mutex::Mutex<.+>$', True)
attach_synthetic_to_type(StdCellSynthProvider, r'^core::cell::Cell<.+>$', True)
attach_synthetic_to_type(StdRefCellSynthProvider, r'^core::cell::RefCell<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::Ref<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::RefMut<.+>$', True)
attach_synthetic_to_type(StdHashMapSynthProvider, r'^std::collections::hash::map::HashMap<.+>$', True)
attach_synthetic_to_type(StdHashSetSynthProvider, r'^std::collections::hash::set::HashSet<.+>$', True)
attach_synthetic_to_type(StdOptionSynthProvider, r'^core::option::Option<.+>$', True)
attach_synthetic_to_type(StdResultSynthProvider, r'^core::result::Result<.+>$', True)
attach_synthetic_to_type(StdCowSynthProvider, r'^alloc::borrow::Cow<.+>$', True)
def attach_synthetic_to_type(synth_class, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching synthetic %s to "%s", is_regex=%s', synth_class.__name__, type_name, is_regex)
synth = lldb.SBTypeSynthetic.CreateWithClassName(__name__ + '.' + synth_class.__name__)
synth.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSynthetic(lldb.SBTypeNameSpecifier(type_name, is_regex), synth)
def summary_fn(valobj, dict): return get_synth_summary(synth_class, valobj, dict)
# LLDB accesses summary fn's by name, so we need to create a unique one.
summary_fn.__name__ = '_get_synth_summary_' + synth_class.__name__
setattr(module, summary_fn.__name__, summary_fn)
attach_summary_to_type(summary_fn, type_name, is_regex)
def attach_summary_to_type(summary_fn, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching summary %s to "%s", is_regex=%s', summary_fn.__name__, type_name, is_regex)
summary = lldb.SBTypeSummary.CreateWithFunctionName(__name__ + '.' + summary_fn.__name__)
summary.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSummary(lldb.SBTypeNameSpecifier(type_name, is_regex), summary)
# 'get_summary' is annoyingly not a part of the standard LLDB synth provider API.
# This trick allows us to share data extraction logic between synth providers and their sibling summary providers.
def get_synth_summary(synth_class, valobj, dict):
synth = synth_class(valobj.GetNonSyntheticValue(), dict)
synth.update()
summary = synth.get_summary()
return to_lldb_str(summary)
# Chained GetChildMemberWithName lookups
def gcm(valobj, *chain):
for name in chain:
valobj = valobj.GetChildMemberWithName(name)
return valobj
# Rust-enabled LLDB using DWARF debug info will strip tuple field prefixes.
# If LLDB is not Rust-enalbed or if using PDB debug info, they will be underscore-prefixed.
def read_unique_ptr(valobj):
pointer = valobj.GetChildMemberWithName('pointer')
child = pointer.GetChildMemberWithName('__0') # Plain lldb
if child.IsValid():
return child
child = pointer.GetChildMemberWithName('0') # rust-lldb
if child.IsValid():
return child
return pointer # pointer no longer contains NonZero since Rust 1.33
def string_from_ptr(pointer, length):
if length <= 0:
return u''
error = lldb.SBError()
process = pointer.GetProcess()
data = process.ReadMemory(pointer.GetValueAsUnsigned(), length, error)
if error.Success():
return data.decode('utf8', 'replace')
else:
log.error('ReadMemory error: %s', error.GetCString())
def get_obj_summary(valobj, unavailable='{...}'):
summary = valobj.GetSummary()
if summary is not None:
return summary
summary = valobj.GetValue()
if summary is not None:
return summary
return unavailable
def sequence_summary(childern, maxsize=32):
s = ''
for child in childern:
if len(s) > 0:
s += ', '
s += get_obj_summary(child)
if len(s) > maxsize:
s += ', ...'
break
return s
def get_unqualified_type_name(type_name):
if type_name[0] in unqual_type_markers:
return type_name
return unqual_type_regex.match(type_name).group(1)
#
unqual_type_markers = ["(", "[", "&", "*"]
unqual_type_regex = re.compile(r'^(?:\w+::)*(\w+).*', re.UNICODE)
def dump_type(ty):
log.info('type %s: size=%d', ty.GetName(), ty.GetByteSize())
# ----- Summaries -----
def get_tuple_summary(valobj, dict):
fields = [get_obj_summary(valobj.GetChildAtIndex(i)) for i in range(0, valobj.GetNumChildren())]
return '(%s)' % ', '.join(fields)
def get_array_summary(valobj, dict):
return '(%d) [%s]' % (valobj.GetNumChildren(), sequence_summary(valobj))
# ----- Synth providers ------
class RustSynthProvider(object):
def __init__(self, valobj, dict={}):
self.valobj = valobj
self.initialize()
def initialize(self):
return None
def update(self):
return False
def num_children(self):
return 0
def has_children(self):
return False
def get_child_at_index(self, index):
return None
def get_child_index(self, name):
return None
def get_summary(self):
return None
class RegularEnumProvider(RustSynthProvider):
def initialize(self):
# Regular enums are represented as unions of structs, containing discriminant in the
# first field.
discriminant = self.valobj.GetChildAtIndex(0).GetChildAtIndex(0).GetValueAsUnsigned()
self.variant = self.valobj.GetChildAtIndex(discriminant)
def num_children(self):
return max(0, self.variant.GetNumChildren() - 1)
def has_children(self):
return self.num_children() > 0
def get_child_at_index(self, index):
return self.variant.GetChildAtIndex(index + 1)
def get_child_index(self, name):
return self.variant.GetIndexOfChildWithName(name) - 1
def get_summary(self):
return get_obj_summary(self.variant)
# Base class for providers that represent array-like objects
class ArrayLikeSynthProvider(RustSynthProvider):
def initialize(self):
ptr, len = self.ptr_and_len(self.valobj) # type: ignore
self.ptr = ptr
self.len = len
self.item_type = self.ptr.GetType().GetPointeeType()
self.item_size = self.item_type.GetByteSize()
def ptr_and_len(self, obj):
pass # abstract
def num_children(self):
return self.len
def has_children(self):
return True
def get_child_at_index(self, index):
try:
if not 0 <= index < self.len:
return None
offset = index * self.item_size
return self.ptr.CreateChildAtOffset('[%s]' % index, offset, self.item_type)
except Exception as e:
log.error('%s', e)
raise
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return '(%d)' % (self.len,)
class StdVectorSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
def get_summary(self):
try:
return '(%d) vec![%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
except Exception as e:
log.error('%s', e)
raise
##################################################################################################################
class SliceSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned()
)
def get_summary(self):
return '(%d) &[%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
# Base class for *String providers
class StringLikeSynthProvider(ArrayLikeSynthProvider):
def get_child_at_index(self, index):
ch = ArrayLikeSynthProvider.get_child_at_index(self, index)
ch.SetFormat(lldb.eFormatChar)
return ch
def get_summary(self):
# Limit string length to 1000 characters to cope with uninitialized values whose
# length field contains garbage.
strval = string_from_ptr(self.ptr, min(self.len, 1000))
if strval == None:
return None
if self.len > 1000:
strval += u'...'
return u'"%s"' % strval
class StrSliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
return (
gcm(valobj, 'data_ptr'),
gcm(valobj, 'length').GetValueAsUnsigned()
)
class StdStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'vec')
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class StdCStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner')
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned() - 1
)
class StdOsStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner', 'inner')
tmp = gcm(vec, 'bytes') # Windows OSString has an extra layer
if tmp.IsValid():
vec = tmp
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class FFISliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
process = valobj.GetProcess()
slice_ptr = valobj.GetLoadAddress()
data_ptr_type = valobj.GetTarget().GetBasicType(lldb.eBasicTypeChar).GetPointerType()
# Unsized slice objects have incomplete debug info, so here we just assume standard slice
# reference layout: [<pointer to data>, <data size>]
error = lldb.SBError()
pointer = valobj.CreateValueFromAddress('data', slice_ptr, data_ptr_type)
length = process.ReadPointerFromMemory(slice_ptr + process.GetAddressByteSize(), error)
return pointer, length
class StdCStrSynthProvider(FFISliceSynthProvider):
def ptr_and_len(self, valobj):
ptr, len = FFISliceSynthProvider.ptr_and_len(self, valobj)
return (ptr, len-1) # drop terminaing '\0'
class StdOsStrSynthProvider(FFISliceSynthProvider):
pass
class StdPathBufSynthProvider(StdOsStringSynthProvider):
def ptr_and_len(self, valobj):
return StdOsStringSynthProvider.ptr_and_len(self, gcm(valobj, 'inner'))
class StdPathSynthProvider(FFISliceSynthProvider):
pass
##################################################################################################################
class DerefSynthProvider(RustSynthProvider):
deref = lldb.SBValue()
def num_children(self):
return self.deref.GetNumChildren()
def has_children(self):
return self.deref.MightHaveChildren()
def get_child_at_index(self, index):
return self.deref.GetChildAtIndex(index)
def get_child_index(self, name):
return self.deref.GetIndexOfChildWithName(name)
def get_summary(self):
return get_obj_summary(self.deref)
# Base for Rc and Arc
class StdRefCountedSynthProvider(DerefSynthProvider):
weak = 0
strong = 0
def get_summary(self):
if self.weak != 0:
s = '(refs:%d,weak:%d) ' % (self.strong, self.weak)
else:
s = '(refs:%d) ' % self.strong
if self.strong > 0:
s += get_obj_summary(self.deref)
else:
s += '<disposed>'
return s
class StdRcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'value', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'value', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'value')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdArcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'v', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'v', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'data')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdMutexSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'data', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdRefCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
def get_summary(self):
borrow = gcm(self.valobj, 'borrow', 'value', 'value').GetValueAsSigned()
s = ''
if borrow < 0:
s = '(borrowed:mut) '
elif borrow > 0:
s = '(borrowed:%d) ' % borrow
return s + get_obj_summary(self.deref)
class StdRefCellBorrowSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value').Dereference()
self.deref.SetPreferSyntheticValue(True)
##################################################################################################################
ENCODED_ENUM_PREFIX = 'RUST$ENCODED$ENUM$'
ENUM_DISCRIMINANT = 'RUST$ENUM$DISR'
class EnumSynthProvider(DerefSynthProvider):
def initialize(self):
obj_type = self.valobj.GetType()
first_field_name = obj_type.GetFieldAtIndex(0).GetName()
# The first two branches are for the sake of windows-*-msvc targets and non-rust-enabled liblldb.
# Normally, we should be calling the initialize_enum().
if first_field_name.startswith(ENCODED_ENUM_PREFIX): # Niche-optimized enum
tokens = first_field_name[len(ENCODED_ENUM_PREFIX):].split("$")
discr_indices = [int(index) for index in tokens[:-1]]
null_variant = tokens[-1]
discriminant = self.valobj.GetChildAtIndex(0)
for discr_index in discr_indices:
discriminant = discriminant.GetChildAtIndex(discr_index)
# Recurse down the first field of the discriminant till we reach a non-struct type,
for i in range(20): # ... but limit the depth, just in case.
if discriminant.GetType().GetTypeClass() != lldb.eTypeClassStruct:
break
discriminant = discriminant.GetChildAtIndex(0)
if discriminant.GetValueAsUnsigned() == 0:
self.variant = null_variant
self.deref = lldb.SBValue()
else:
self.deref = self.valobj.GetChildAtIndex(0)
elif first_field_name == ENUM_DISCRIMINANT: # Regular enum
self.variant = self.valobj.GetChildAtIndex(0).GetValue()
self.deref = self.valobj.GetChildAtIndex(1)
else:
self.initialize_enum()
self.deref.SetPreferSyntheticValue(True)
def initialize_enum(self):
pass
def get_summary(self):
if self.deref.IsValid():
return self.variant + '(' + get_obj_summary(self.deref) + ')'
else:
return self.variant
class StdOptionSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Some'):
self.variant = 'Some'
self.deref = gcm(self.valobj, '0')
else:
self.variant = 'None'
self.deref = lldb.SBValue()
class StdResultSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Ok'):
self.variant = 'Ok'
else:
self.variant = 'Err'
self.deref = gcm(self.valobj, '0')
class StdCowSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Owned'):
self.variant = 'Owned'
else:
self.variant = 'Borrowed'
self.deref = gcm(self.valobj, '0')
##################################################################################################################
class StdHashMapSynthProvider(RustSynthProvider):
def initialize(self):
self.initialize_table(gcm(self.valobj, 'base', 'table'))
def initialize_table(self, table):
assert table.IsValid()
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
if table.type.GetNumberOfTemplateArguments() > 0:
item_ty = table.type.GetTemplateArgumentType(0)
else: # we must be on windows-msvc - try to look up item type by name
table_ty_name = table.GetType().GetName() # "hashbrown::raw::RawTable<ITEM_TY>"
item_ty_name = table_ty_name[table_ty_name.find('<')+1: table_ty_name.rfind('>')]
item_ty = table.GetTarget().FindTypes(item_ty_name).GetTypeAtIndex(0)
buckets_ty = item_ty.GetArrayType(self.num_buckets)
data = gcm(table, 'data')
new_layout = not data.IsValid()
if new_layout: # buckets are located above `ctrl`, in reverse order.
start_addr = ctrl_ptr.GetValueAsUnsigned() - item_ty.GetByteSize() * self.num_buckets
self.buckets = self.valobj.CreateValueFromAddress('data', start_addr, buckets_ty)
else:
self.buckets = gcm(data, 'pointer').Dereference().Cast(buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
if new_layout:
self.valid_indices.append(self.num_buckets - 1 - i)
else:
self.valid_indices.append(i)
def has_children(self):
return True
def num_children(self):
return len(self.valid_indices)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return 'size=%d, capacity=%d' % (self.num_children(), self.num_buckets)
class StdHashSetSynthProvider(StdHashMapSynthProvider):
def initialize(self):
table = gcm(self.valobj, 'base', 'map', 'table') # rust 1.48
if not table.IsValid():
table = gcm(self.valobj, 'map', 'base', 'table') # rust < 1.48
self.initialize_table(table)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx).GetChildAtIndex(0)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
##################################################################################################################
def __lldb_init_module(debugger_obj, internal_dict):
log.info('Initializing')
initialize_category(debugger_obj)
|
#!/usr/bin/env python
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import BaseHTTPServer
import daemonserver
import httparchive
import httplib
import logging
import os
import socket
import SocketServer
import subprocess
import time
class RealHttpRequest(object):
def __init__(self, real_dns_lookup):
self._real_dns_lookup = real_dns_lookup
def __call__(self, request, headers):
# TODO(tonyg): Strip sdch from the request headers because we can't
# guarantee that the dictionary will be recorded, so replay may not work.
if 'accept-encoding' in headers:
headers['accept-encoding'] = headers['accept-encoding'].replace('sdch', '')
logging.debug('RealHttpRequest: %s %s', request.host, request.path)
host_ip = self._real_dns_lookup(request.host)
try:
connection = httplib.HTTPConnection(host_ip)
connection.request(
request.command,
request.path,
request.request_body,
headers)
response = connection.getresponse()
# On the response, we'll save every read exactly as we read it
# from the network. We'll use this to replay chunks similarly to
# how we recorded them.
# TODO(tonyg): Use something other than httplib so that we can preserve
# the original chunks.
response.raw_data = []
return response
except Exception, e:
logging.critical('Could not fetch %s: %s', request, e)
return None
class HttpArchiveHandler(BaseHTTPServer.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
# Since we do lots of small wfile.write() calls, turn on buffering.
wbufsize = -1
# Make request handler logging match our logging format.
def log_request(self, code='-', size='-'): pass
def log_error(self, format, *args): logging.error(format, *args)
def log_message(self, format, *args): logging.info(format, *args)
def read_request_body(self):
request_body = None
length = int(self.headers.get('content-length', 0)) or None
if length:
request_body = self.rfile.read(length)
return request_body
def get_header_dict(self):
return dict(self.headers.items())
def get_archived_http_request(self):
host = self.headers.get('host')
if host == None:
logging.error('Request without host header')
return None
return httparchive.ArchivedHttpRequest(
self.command,
host,
self.path,
self.read_request_body())
def send_archived_http_response(self, response):
try:
# We need to set the server name before we start the response.
# Take a scan through the response headers here
use_chunked = False
has_content_length = False
server_name = 'WebPageReplay'
for header, value in response.headers:
if header == 'server':
server_name = value
if header == 'transfer-encoding':
use_chunked = True
if header == 'content-length':
has_content_length = True
self.server_version = server_name
self.sys_version = ''
if response.version == 10:
self.protocol_version = 'HTTP/1.0'
# If we don't have chunked encoding and there is no content length,
# we need to manually compute the content-length.
if not use_chunked and not has_content_length:
content_length = 0
for item in response.response_data:
content_length += len(item)
response.headers.append(('content-length', str(content_length)))
self.send_response(response.status, response.reason)
# TODO(mbelshe): This is lame - each write is a packet!
for header, value in response.headers:
skip_header = False
if header == 'server':
skip_header = True
if skip_header == False:
self.send_header(header, value)
self.end_headers()
for item in response.response_data:
if use_chunked:
self.wfile.write(str(hex(len(item)))[2:])
self.wfile.write('\r\n')
self.wfile.write(item)
if use_chunked:
self.wfile.write('\r\n')
self.wfile.flush()
# TODO(mbelshe): This connection close doesn't seem to work.
if response.version == 10:
self.close_connection = 1
except Exception, e:
logging.error('Error sending response for %s/%s: %s',
self.headers['host'],
self.path,
e)
def do_POST(self):
self.do_GET()
def do_HEAD(self):
self.do_GET()
# Override the default send error with a version that doesn't unnecessarily close
# the connection.
def send_error(self, error, message = None):
logging.error("send error overriddent!" + str(error))
body = "Not found"
self.send_response(error, message)
self.send_header("content-type", "text/plain")
self.send_header("content-length", str(len(body)))
self.end_headers()
self.wfile.write("Not Found")
self.wfile.flush()
class RecordHandler(HttpArchiveHandler):
def do_GET(self):
request = self.get_archived_http_request()
if request is None:
self.send_error(500)
return
response = self.server.real_http_request(request, self.get_header_dict())
if response is None:
self.send_error(404)
return
# Read the rest of the HTTP response.
while True:
data = response.read(4096)
response.raw_data.append(data)
if len(data) == 0:
break
archived_http_response = httparchive.ArchivedHttpResponse(
response.version,
response.status,
response.reason,
response.getheaders(),
response.raw_data)
if self.server.use_deterministic_script:
try:
archived_http_response.inject_deterministic_script()
except:
logging.error('Failed to inject deterministic script for %s', request)
self.send_archived_http_response(archived_http_response)
self.server.http_archive[request] = archived_http_response
logging.debug('Recorded: %s', request)
class ReplayHandler(HttpArchiveHandler):
def do_GET(self):
start_time = time.time()
request = self.get_archived_http_request()
if request in self.server.http_archive:
self.send_archived_http_response(self.server.http_archive[request])
request_time_ms = (time.time() - start_time) * 1000.0;
logging.debug('Replayed: %s (%dms)', request, request_time_ms)
else:
self.send_error(404)
logging.error('Could not replay: %s', request)
class RecordHttpProxyServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer,
daemonserver.DaemonServer):
def __init__(
self, http_archive_filename, use_deterministic_script, real_dns_lookup,
host='localhost', port=80, use_ssl=False, certfile='', keyfile=''):
self.use_deterministic_script = use_deterministic_script
self.archive_filename = http_archive_filename
self.real_http_request = RealHttpRequest(real_dns_lookup)
self._assert_archive_file_writable()
self.http_archive = httparchive.HttpArchive()
try:
# Increase the listen queue size (default is 5). Since we're intercepting
# many domains through this single server, it is quite possible to get
# more than 5 concurrent connection requests.
self.request_queue_size = 128
BaseHTTPServer.HTTPServer.__init__(self, (host, port), RecordHandler)
except Exception, e:
logging.critical('Could not start HTTPServer on port %d: %s', port, e)
return
logging.info('Recording on %s:%s...', host, port)
def _assert_archive_file_writable(self):
archive_dir = os.path.dirname(os.path.abspath(self.archive_filename))
assert os.path.exists(archive_dir), 'Archive directory must exist.'
assert (os.access(self.archive_filename, os.W_OK) or
(os.access(archive_dir, os.W_OK) and
not os.path.exists(self.archive_filename))), \
'Need permissions to write archive file'
def cleanup(self):
try:
self.shutdown()
except KeyboardInterrupt, e:
pass
logging.info('Stopped Record HTTP server')
self.http_archive.Persist(self.archive_filename)
logging.info('Saved %d responses to %s',
len(self.http_archive), self.archive_filename)
class ReplayHttpProxyServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer,
daemonserver.DaemonServer):
def __init__(
self, http_archive_filename, use_deterministic_script, real_dns_lookup,
host='localhost', port=80, use_ssl=False, certfile='', keyfile=''):
self.use_deterministic_script = use_deterministic_script
self.http_archive = httparchive.HttpArchive.Create(http_archive_filename)
logging.info('Loaded %d responses from %s',
len(self.http_archive), http_archive_filename)
try:
# Increase the listen queue size (default is 5). Since we're intercepting
# many domains through this single server, it is quite possible to get
# more than 5 concurrent connection requests.
self.request_queue_size = 128
BaseHTTPServer.HTTPServer.__init__(self, (host, port), ReplayHandler)
except Exception, e:
logging.critical('Could not start HTTPServer on port %d: %s', port, e)
return
logging.info('Replaying on %s:%s...', host, port)
def cleanup(self):
try:
self.shutdown()
except KeyboardInterrupt, e:
pass
logging.info('Stopped HTTP server')
remove extra log msg
git-svn-id: abd770af2affaf62930f25b244f6edafe8ddae5b@211 164481a5-15cb-f69f-4b93-856c5b7754c5
#!/usr/bin/env python
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import BaseHTTPServer
import daemonserver
import httparchive
import httplib
import logging
import os
import socket
import SocketServer
import subprocess
import time
class RealHttpRequest(object):
def __init__(self, real_dns_lookup):
self._real_dns_lookup = real_dns_lookup
def __call__(self, request, headers):
# TODO(tonyg): Strip sdch from the request headers because we can't
# guarantee that the dictionary will be recorded, so replay may not work.
if 'accept-encoding' in headers:
headers['accept-encoding'] = headers['accept-encoding'].replace('sdch', '')
logging.debug('RealHttpRequest: %s %s', request.host, request.path)
host_ip = self._real_dns_lookup(request.host)
try:
connection = httplib.HTTPConnection(host_ip)
connection.request(
request.command,
request.path,
request.request_body,
headers)
response = connection.getresponse()
# On the response, we'll save every read exactly as we read it
# from the network. We'll use this to replay chunks similarly to
# how we recorded them.
# TODO(tonyg): Use something other than httplib so that we can preserve
# the original chunks.
response.raw_data = []
return response
except Exception, e:
logging.critical('Could not fetch %s: %s', request, e)
return None
class HttpArchiveHandler(BaseHTTPServer.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
# Since we do lots of small wfile.write() calls, turn on buffering.
wbufsize = -1
# Make request handler logging match our logging format.
def log_request(self, code='-', size='-'): pass
def log_error(self, format, *args): logging.error(format, *args)
def log_message(self, format, *args): logging.info(format, *args)
def read_request_body(self):
request_body = None
length = int(self.headers.get('content-length', 0)) or None
if length:
request_body = self.rfile.read(length)
return request_body
def get_header_dict(self):
return dict(self.headers.items())
def get_archived_http_request(self):
host = self.headers.get('host')
if host == None:
logging.error('Request without host header')
return None
return httparchive.ArchivedHttpRequest(
self.command,
host,
self.path,
self.read_request_body())
def send_archived_http_response(self, response):
try:
# We need to set the server name before we start the response.
# Take a scan through the response headers here
use_chunked = False
has_content_length = False
server_name = 'WebPageReplay'
for header, value in response.headers:
if header == 'server':
server_name = value
if header == 'transfer-encoding':
use_chunked = True
if header == 'content-length':
has_content_length = True
self.server_version = server_name
self.sys_version = ''
if response.version == 10:
self.protocol_version = 'HTTP/1.0'
# If we don't have chunked encoding and there is no content length,
# we need to manually compute the content-length.
if not use_chunked and not has_content_length:
content_length = 0
for item in response.response_data:
content_length += len(item)
response.headers.append(('content-length', str(content_length)))
self.send_response(response.status, response.reason)
# TODO(mbelshe): This is lame - each write is a packet!
for header, value in response.headers:
skip_header = False
if header == 'server':
skip_header = True
if skip_header == False:
self.send_header(header, value)
self.end_headers()
for item in response.response_data:
if use_chunked:
self.wfile.write(str(hex(len(item)))[2:])
self.wfile.write('\r\n')
self.wfile.write(item)
if use_chunked:
self.wfile.write('\r\n')
self.wfile.flush()
# TODO(mbelshe): This connection close doesn't seem to work.
if response.version == 10:
self.close_connection = 1
except Exception, e:
logging.error('Error sending response for %s/%s: %s',
self.headers['host'],
self.path,
e)
def do_POST(self):
self.do_GET()
def do_HEAD(self):
self.do_GET()
# Override the default send error with a version that doesn't unnecessarily close
# the connection.
def send_error(self, error, message = None):
body = "Not found"
self.send_response(error, message)
self.send_header("content-type", "text/plain")
self.send_header("content-length", str(len(body)))
self.end_headers()
self.wfile.write("Not Found")
self.wfile.flush()
class RecordHandler(HttpArchiveHandler):
def do_GET(self):
request = self.get_archived_http_request()
if request is None:
self.send_error(500)
return
response = self.server.real_http_request(request, self.get_header_dict())
if response is None:
self.send_error(404)
return
# Read the rest of the HTTP response.
while True:
data = response.read(4096)
response.raw_data.append(data)
if len(data) == 0:
break
archived_http_response = httparchive.ArchivedHttpResponse(
response.version,
response.status,
response.reason,
response.getheaders(),
response.raw_data)
if self.server.use_deterministic_script:
try:
archived_http_response.inject_deterministic_script()
except:
logging.error('Failed to inject deterministic script for %s', request)
self.send_archived_http_response(archived_http_response)
self.server.http_archive[request] = archived_http_response
logging.debug('Recorded: %s', request)
class ReplayHandler(HttpArchiveHandler):
def do_GET(self):
start_time = time.time()
request = self.get_archived_http_request()
if request in self.server.http_archive:
self.send_archived_http_response(self.server.http_archive[request])
request_time_ms = (time.time() - start_time) * 1000.0;
logging.debug('Replayed: %s (%dms)', request, request_time_ms)
else:
self.send_error(404)
logging.error('Could not replay: %s', request)
class RecordHttpProxyServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer,
daemonserver.DaemonServer):
def __init__(
self, http_archive_filename, use_deterministic_script, real_dns_lookup,
host='localhost', port=80, use_ssl=False, certfile='', keyfile=''):
self.use_deterministic_script = use_deterministic_script
self.archive_filename = http_archive_filename
self.real_http_request = RealHttpRequest(real_dns_lookup)
self._assert_archive_file_writable()
self.http_archive = httparchive.HttpArchive()
try:
# Increase the listen queue size (default is 5). Since we're intercepting
# many domains through this single server, it is quite possible to get
# more than 5 concurrent connection requests.
self.request_queue_size = 128
BaseHTTPServer.HTTPServer.__init__(self, (host, port), RecordHandler)
except Exception, e:
logging.critical('Could not start HTTPServer on port %d: %s', port, e)
return
logging.info('Recording on %s:%s...', host, port)
def _assert_archive_file_writable(self):
archive_dir = os.path.dirname(os.path.abspath(self.archive_filename))
assert os.path.exists(archive_dir), 'Archive directory must exist.'
assert (os.access(self.archive_filename, os.W_OK) or
(os.access(archive_dir, os.W_OK) and
not os.path.exists(self.archive_filename))), \
'Need permissions to write archive file'
def cleanup(self):
try:
self.shutdown()
except KeyboardInterrupt, e:
pass
logging.info('Stopped Record HTTP server')
self.http_archive.Persist(self.archive_filename)
logging.info('Saved %d responses to %s',
len(self.http_archive), self.archive_filename)
class ReplayHttpProxyServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer,
daemonserver.DaemonServer):
def __init__(
self, http_archive_filename, use_deterministic_script, real_dns_lookup,
host='localhost', port=80, use_ssl=False, certfile='', keyfile=''):
self.use_deterministic_script = use_deterministic_script
self.http_archive = httparchive.HttpArchive.Create(http_archive_filename)
logging.info('Loaded %d responses from %s',
len(self.http_archive), http_archive_filename)
try:
# Increase the listen queue size (default is 5). Since we're intercepting
# many domains through this single server, it is quite possible to get
# more than 5 concurrent connection requests.
self.request_queue_size = 128
BaseHTTPServer.HTTPServer.__init__(self, (host, port), ReplayHandler)
except Exception, e:
logging.critical('Could not start HTTPServer on port %d: %s', port, e)
return
logging.info('Replaying on %s:%s...', host, port)
def cleanup(self):
try:
self.shutdown()
except KeyboardInterrupt, e:
pass
logging.info('Stopped HTTP server')
|
"""
.. module:: bedtools
:synopsis: interface to bedtools
.. moduleauthor:: Ken Sugino <ken.sugino@gmail.com>
"""
import os
import subprocess
import logging
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
import gzip
from collections import defaultdict
from collections import Counter
import glob
import shutil
import gzip
import csv
import pandas as PD
import numpy as N
from jgem import utils as UT
from jgem import gtfgffbed as GGB
from jgem import fasta as FA
import jgem.cy.bw as cybw
import inspect
### BAM,BED,WIGGLE,BIGWIG ##############################################
# decorators to separate logic
def compressQ(outname, noerr=0):
""" decorator for checking file compression and error """
def deco(func):
argnames, varargs, keywords, defaults = inspect.getargspec(func)
pos = argnames.index(outname)
def wrap(*args,**kwargs):
# check output '.gz'
if outname in kwargs:
opath = kwargs[outname]
else:
opath = args[pos]
args = list(args)
if opath[-3:]=='.gz':
compress = True
opath = opath[:-3]
else:
compress = False
UT.makedirs(os.path.dirname(opath))
if outname in kwargs:
kwargs[outname] = opath
else:
args[pos] = opath
err = func(*args, **kwargs)
if err != noerr:
LOG.warning('bederror:{0}, err={1}'.format(func.__name__, err))
raise RuntimeError(func.__name__)
if compress:
return UT.compress(opath)
return opath
return wrap
return deco
def logerr(noerr=0):
def deco(func):
def wrap(*args, **kwargs):
err = func(*args, **kwargs)
if err != noerr:
LOG.warning('bederror:{0}, err={1}'.format(func.__name__, err))
raise RuntimeError(func.__name__)
return err
return wrap
return deco
@compressQ('bedpath', None)
def bam2bed(bampath, bedpath):
"""Convert BAM to BED7
BED name field (column 4) contains read id (so that together with map id (col 7) multi-mapper can be identified)
BED tst field (column 7) contains map id (so that split reads can be identified)
BED sc1 field (column 5) is from bedtools bamtobed and contains mapping quality
"""
cmd1 = ['bedtools','bamtobed','-i', bampath, '-split','-bed12']
awkscript = 'BEGIN{OFS="\t";c=1;}{ n=split($11,a,","); n=split($12,b,","); for(i=1;i<=n;i++){st=$2+b[i]; print $1,st,st+a[i],$4,$5,$6,NR}}'
# above keep the original name so that you can go back to fastq
# awkscript = 'BEGIN{OFS="\t";c=1;}{if(d[$4]){$4=d[$4];}else{d[$4]=c;$4=c;c++;} n=split($11,a,","); n=split($12,b,","); for(i=1;i<=n;i++){st=$2+b[i]; print $1,st,st+a[i],$4,$5,$6,NR}}'
cmd2 = ['awk',awkscript]
with open(bedpath, 'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout, stdout=fp)
err = p2.communicate()[1]
return err
@compressQ('bedpath', None)
def bam2bed12(bampath, bedpath):
"""Convert BAM to BED12
BED name field (column 4) contains read id (so that multi-mapper can be identified)
BED tst field (column 7) contains map id
BED sc1 field (column 5) is from bedtools bamtobed and contains mapping quality
"""
cmd1 = ['bedtools','bamtobed','-i', bampath, '-split', '-bed12']
awkscript = 'BEGIN{OFS="\t";c=1;}{$7=NR; print $0;}'
#awkscript = 'BEGIN{OFS="\t";c=1;}{if(a[$4]){$4=a[$4];}else{a[$4]=c;$4=c;c++;}; $7=NR; print $0;}'
cmd2 = ['awk',awkscript]
with open(bedpath, 'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout, stdout=fp)
err = p2.communicate()[1]
return err
@compressQ('wigpath')
def bed2wig(bedpath, chromsizes, wigpath, scale=None):
"""Runs BEDTOOLS genomecov. Takes BED, makes WIGGLE."""
if scale is None:
cmd1 = ['bedtools','genomecov','-bg', '-split', '-i', bedpath, '-g', chromsizes]
else:
cmd1 = ['bedtools','genomecov','-bg', '-split', '-i', bedpath, '-g', chromsizes, '-scale', str(scale)]
with open(wigpath,'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=fp)
err = p1.wait()
return err
@compressQ('wigpath')
def bam2wig(bampath, chromsizes, wigpath, scale=None):
"""Runs BEDTOOLS genomecov. Takes BAM, makes WIGGLE."""
if scale is None:
cmd1 = ['bedtools', 'genomecov', '-split', '-bg', '-ibam', bampath, '-g', chromsizes]
else:
cmd1 = ['bedtools', 'genomecov', '-split', '-bg', '-ibam', bampath, '-g', chromsizes, '-scale', str(scale)]
with open(wigpath,'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=fp)
err = p1.wait()
return err
@logerr(0)
def wig2bw(wigpath, chromsizes, bwpath):
"""Generate bigwig coverage from WIGGLE.
Runs Kent's tool wigToBigWig.
"""
cmd = ['wigToBigWig', wigpath, chromsizes, bwpath]
UT.makedirs(os.path.dirname(bwpath))
err = subprocess.call(cmd)
return err
def bam2bw(bampath, chromsizes, bwpath, scale=None):
"""Generate bigwig coverage from BAM. """
wigpath = bwpath+'.wig'
bam2wig(bampath, chromsizes, wigpath, scale)
wig2bw(wigpath, chromsizes, bwpath)
os.unlink(wigpath)
def bed2bw(bedpath, chromsizes, bwpath, scale=None):
"""Generate bigwig coverage from BED. """
wigpath = bwpath+'.wig'
bed2wig(bedpath, chromsizes, wigpath, scale)
wig2bw(wigpath, chromsizes, bwpath)
os.unlink(wigpath)
def make_bw_from_bed0(bedpath, chromsizes, bwpath):
""" DEPRECATED convert BED to BIGWIG, normalize average coverage to 1 """
totbp,covbp = get_total_bp_bedfile(bedpath)
scale = float(covbp)/totbp # 1/avgcov
bed2bw(bedpath, chromsizes, bwpath, scale)
def make_bw_from_bam0(bampath, chromsizes, bedpath, bwpath):
""" DEPRECATED convert BAM to BIGWIG, normalize average coverage to 1 """
bam2bed(bampath, bedpath)
make_bw_from_bed(bedpath, chromsizes, bwpath)
def bed12_bed6(bed):
""" convert BED12 to BED6 uses cython helper """
# BED12 ['chr', 'st', 'ed', 'name', 'sc1', 'strand', 'tst', 'ted', 'sc2', '#exons', 'esizes', 'estarts']
# BED6 ['chr', 'st', 'ed', 'name', 'sc1', 'strand'] flatten exons, collect unique
# BED12 tid => BED6 name=tid+exon_number
bed8 = bed[['chr','st','ed','name','sc1','strand','esizes','estarts']]
# def _gen():
# for x in df.values:
# esi = str(x[-2])
# est = str(x[-1])
# if esi[-1]==',':
# esi = esi[:-1]
# est = est[:-1]
# for y,z in zip(esi.split(','), est.split(',')):
# x[-2] = y
# x[-1] = z
# yield x
# fbed = PD.DataFrame([x for x in _gen()], columns = df.columns)
fbed = PD.DataFrame(cybw.flatten_bed8(bed8.values), columns=bed8.columns)
# adjust st, ed
fbed['st'] = fbed['st'] + fbed['estarts']
fbed['ed'] = fbed['st'] + fbed['esizes']
return fbed[['chr','st','ed','name','sc1','strand','esizes']]
@compressQ('bed6path')
def bed12ToBed6(bed12path, bed6path):
""" uses bedtools bed12ToBed6 """
cmd = ['bed12ToBed6', '-i', bed12path]
with open(bed6path, 'wb') as fp:
p1 = subprocess.Popen(cmd, stdout=fp)
err = p1.wait()
return err
### Normalization Scale ##############################################
def save_bed_covstats(bedpath, dstpath, bed12=False, checkuniq=False):
tdic,cdic = get_total_bp_bedfile(bedpath, bed12, returndics=True, checkuniq=checkuniq)
df = PD.DataFrame({c: {'totbp':tdic[c], 'covbp':cdic[c]} for c in cdic}).T
df['acov'] = df['totbp']/df['covbp']
df = df.sort_values('covbp',ascending=False)
return UT.write_pandas(df, dstpath, 'ih')
def get_total_bp_bedfile(bedpath, bed12=False, chroms=None, returndics=False, checkuniq=False):
""" Returns total mapped base pairs (totbp) and covered base pairs (covbp).
The ratio totbp/covbp gives average coverage. Process without reading entire data
into the RAM. Process non BED12 file.
Args:
bedpath (str): a path to BED file
bed12 (bool): whether format is BED12 (default False)
chroms (list): chromosomes to consider, if None (default) use all
Returns:
totbp: total base pairs in BED
covbp: covered base pairs
See:
:py:func:`jgem.bigwig.get_totbp_covbp_bw ` (>6x faster if you have bigwig)
"""
if bed12:
totbpdic,covbpdic = cybw.get_total_bp_bed12file_helper(bedpath)
else:
if checkuniq:
totbpdic,covbpdic = cybw.get_total_bp_bedfile_helper_check_uniq(bedpath)
else:
totbpdic,covbpdic = cybw.get_total_bp_bedfile_helper(bedpath)
# fix key bytes => str
tdic = {}
cdic = {}
for b in covbpdic.keys():
u = b.decode()
tdic[u] = totbpdic[b]
cdic[u] = covbpdic[b]
if returndics:
return tdic, cdic
totbp = 0
covbp = 0
if chroms is None:
chroms = cdic.keys()
for chrom in chroms:
if chrom not in cdic:
LOG.warning('{0} not found in the data'.format(chrom))
continue
totbp += tdic[chrom]
covbp += cdic[chrom]
return totbp, covbp
def get_total_bp(beddf, returndics=False):
""" Returns total mapped base pairs (totbp) and covered base pairs (covbp).
The ratio totbp/covbp gives average coverage.
Args:
beddf: a BED dataframe (a standard non BED12 format)
Returns:
totbp: total base pairs in BED
covbp: covered base pairs
"""
# total bp
beddf['len'] = beddf['ed']-beddf['st']
# totbp = beddf['len'].sum() # total bp
# covered bp: calculate using chopped intervals
# first remove redundancy
cols = ['st','ed']
cdic = {}
tdic = {}
for chrom in beddf['chr'].unique():
sub = beddf[beddf['chr']==chrom]
tdic[chrom] = sub['len'].sum()
sted = sub.groupby(cols).first().reset_index()
a = N.array(sted[cols].sort_values(cols).values, dtype=N.int32)
b = cybw.union_intervals(a)
cdic[chrom] = N.sum([y-x for x,y in b])
if returndics:
return tdic, cdic
totbp = N.sum(list(tdic.values()))
covbp = N.sum(list(cdic.values()))
return totbp, covbp
### INTERSECT ########################################################
def bedtoolintersect(aname, bname, cname, **kwargs):
return _bedtoolscatcherror('intersect', aname, bname, cname, **kwargs)
def bedtoolmerge(aname, cname, **kwargs):
return _bedtoolscatcherror2('merge',aname, cname, **kwargs)
def bedtoolcomplement(aname, cname, chromsizes):
return _runbedtools2('complement',aname,cname,g=chromsizes)
def bedtoolsubtract(aname, bname, cname, **kwargs):
return _bedtoolscatcherror('subtract', aname, bname, cname, **kwargs)
def _runbedtools2(which, aname, cname, **kwargs):
cmd = ['bedtools',which, '-i', aname]
for k,v in kwargs.items():
if isinstance(v,bool):# in [True,False]: 2016-03-27 fix
cmd += ['-'+k]
else:
cmd += ['-'+k, str(v)]
with open(cname, "wb") as outfile:
ret = subprocess.call(cmd, stdout=outfile)
if ret!=0:
msg = 'bederror return code:{0}, cmd:{1}'.format(ret, cmd)
LOG.warning(msg)
# delete output
os.unlink(cname)
raise RuntimeError(msg)
return ret
def _runbedtools3(which, aname, bname, cname, **kwargs):
cmd = ['bedtools',which,'-a',aname,'-b',bname]
for k,v in kwargs.items():
if isinstance(v,bool):# in [True,False]: 2016-03-27 fix
cmd += ['-'+k]
else:
cmd += ['-'+k, str(v)]
with open(cname, "wb") as outfile:
ret = subprocess.call(cmd, stdout=outfile)
if ret !=0:
msg = 'bederror return code:{0}, cmd:{1}'.format(ret, cmd)
LOG.warning(msg)
# delete output
os.unlink(cname)
raise RuntimeError(msg)
return ret
def _bedtoolscatcherror(which, aname, bname, cname, **kwargs):
if not os.path.exists(aname):
raise ValueError('{0} does not exists'.format(aname))
if not os.path.exists(bname):
raise ValueError('{0} does not exists'.format(bname))
if cname.endswith('.gz'):
cname = cname[:-3]
compress=True
else:
compress=False
try:
ret = _runbedtools3(which,aname,bname,cname,**kwargs)
except RuntimeError:
LOG.warning('bedtool error: repeating on uncompressed a:{0},b:{1},c:{2}'.format(aname,bname,cname))
aname2 = UT.uncompresscopy(aname)
bname2 = UT.uncompresscopy(bname)
ret = _runbedtools3(which,aname2,bname2,cname,**kwargs)
if aname2 != aname:
os.unlink(aname2)
if bname2 != bname:
os.unlink(bname2)
if compress:
return UT.compress(cname)
return cname
def _bedtoolscatcherror2(which, aname, cname, **kwargs):
if not os.path.exists(aname):
raise ValueError('{0} does not exists'.format(aname))
if cname.endswith('.gz'):
cname = cname[:-3]
compress=True
else:
compress=False
try:
ret = _runbedtools2(which,aname,cname,**kwargs)
except RuntimeError:
LOG.warning('bedtool error: repeating on uncompressed a:{0},c:{1}'.format(aname,cname))
aname2 = UT.uncompresscopy(aname)
ret = _runbedtools2(which,aname2,cname,**kwargs)
if aname2 != aname:
os.unlink(aname2)
if compress:
return UT.compress(cname)
return cname
def calc_ovlratio(aname, bname, tname, nacol, nbcol, idcol=['chr','st','ed'], returnbcols=False):
"""Calculate overlapped portion of b onto a.
Will check existence of result file (tname) and uses it if newer than input files.
Args:
aname (str): bed file name 1
bname (str): bed file name 2
tname (str): result file name
nacol (int): number of columns in file 1
nbcol (int): number of columns in file 2
Optional:
idcol (list of str): columns which specify unique entry
Returns:
A Pandas DataFrame which contains overlap info
"""
# requirement: no overlap within b
# cache?
if UT.notstale([aname,bname], tname):
return UT.read_pandas(tname)
# calculate bedtools intersect
tmpsuf='.ovlbed.txt'
cname = aname+tmpsuf
if nacol==12:
cname = bedtoolintersect(aname, bname, cname, wao=True, split=True)
else:
cname = bedtoolintersect(aname, bname, cname, wao=True)
# read tmp file
acols = GGB.BEDCOLS[:nacol]
bcols = ['b_'+x for x in GGB.BEDCOLS[:nbcol]]
cols = acols + bcols +['ovl']
df = UT.read_pandas(cname, names=cols)
dfg = df.groupby(idcol) #['chr','st','ed'])
if returnbcols:
dfa = dfg.first().reset_index()[acols+bcols]
else:
dfa = dfg.first().reset_index()[acols]
if nacol==12:# sum of exon sizes
dfa['len'] = [N.sum(map(int, x.split(',')[:-1])) for x in dfa['esizes']]
else:
dfa['len'] = dfa['ed']-dfa['st']
# since b does not overlap by itself total overlap of an element of a to b is
# sum of overlap to individual b
dfa['ovl'] = dfg['ovl'].sum().values
dfa['ovlratio'] = dfa['ovl'].astype(float)/dfa['len']
dfa['notcovbp'] = dfa['len'] - dfa['ovl']
# clean up
os.unlink(cname)
# save
UT.save_tsv_nidx_whead(dfa, tname)
return dfa
def fillgap(binfile, gapfile, gap=50):
if gapfile[-3:]=='.gz':
gapfile = gapfile[:-3]
#gapfile = binfile[:-7]+'.gap%d.bed' % gap
if UT.notstale(binfile, gapfile+'.gz'):
return gapfile+'.gz'
gapfile = bedtoolmerge(binfile, gapfile, d=gap)
return gapfile
def read_ovl(c, acols, bcols=None):
if bcols is None:
cols = acols+['b_'+x for x in acols]+['ovl']
else:
cols = acols+['b_'+x for x in bcols]+['ovl']
return UT.read_pandas(c, names=cols)
### MAPBED to WIG ########################################################
# dict read_id => set{map_id}
# multimapper = dup = size(set{map_id})>1
# weight = 1/dup
# 1st pass calculate this map read_id => weight
# for uniq.bw only use weight==1
# for all.bw use all but use weight
def splitbedgz(bedgz, prefix):
"""Split gzipped bed file into separate files according to chromosome.
Uses zcat and awk.
Args:
bedgz: path to gzipped bed file
prefix: output path prefix
"""
cmd1 = ['zcat', bedgz]
awkscript = 'BEGIN{{FS="\t"}}{{print > "{0}."$1".bed"}}'.format(prefix)
#print(awkscript)
cmd2 = ['awk', awkscript]
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout)
err = p2.communicate()[1]
return err
# SJTABMOTIF = {0:'non-canonical',1:'GT/AG',2:'CT/AC',3:'GC/AG',4:'CT/GC',5:'AT/AC',6:'GT/AT'}
STED2STRAND = dict(
GTAG='+',
CTAC='-',
GCAG='+',
CTGC='-',
ATAC='+',
GTAT='-',
)
def _scan_make_map(paths, dstpath):
cnt = defaultdict(set)
#csp = defaultdict(int)
for path in paths:
if path[-3:]=='.gz':
with gzip.open(path) as gz_file:
with io.BufferedReader(gz_file) as fp:
for line in fp:
rec = line.strip().split(b'\t')
if len(rec)==7:
cnt[rec[3]].add(rec[6]) # for each read how many locations?
else:
print('wrong#fields:{0} in {1}'.format(len(rec),path))
else:
with open(path,'rb') as fp:
for line in fp: # chr,st,ed,name,sc1,strand,tst
rec = line.strip().split(b'\t') # read_id:name(3), map_id:tst(6)
if len(rec)==7:
cnt[rec[3]].add(rec[6]) # for each read how many locations?
else:
print('wrong#fields:{0} in {1}'.format(len(rec),path))
# csp[rec[6]] += 1 # count # segments in a read if >1 spliced
try:# py2
dup = PD.DataFrame({k:len(v) for k,v in cnt.iteritems() if len(v)>1}, index=['cnt']).T
except:
dup = PD.DataFrame({k:len(v) for k,v in cnt.items() if len(v)>1}, index=['cnt']).T
UT.write_pandas(dup, dstpath,'ih')
def pathcode(sse, strand):
# sse: splice [(st,ed),...]
if strand in ['+','.']:
return ','.join(['{0}|{1}'.format(*x) for x in sse])
return ','.join(['{1}|{0}'.format(*x) for x in sse[::-1]])
def pcode2pos(pcode):
tmp = [[int(x) for x in y.split('|') for y in pcode.split(',')]]
if tmp[0][0]<tmp[0][1]: # pos strand
return tmp
return [x[::-1] for x in tmp[::-1]]
def process_mapbed(bedpath, dstpre, genome, chromdir, stranded='.', np=10):
"""
Args:
bedpath: path to gzipped BED7 file (converted from BAM)
dstpre: path prefix to destination
genome: UCSC genome (mm10 etc.)
chromdir: directory containing chromosome sequence in FASTA
np: number of CPU to use
Outputs:
1. dstpre+'.ex.p.bw'
2. dstpre+'.ex.n.bw'
3. dstpre+'.ex.u.bw'
4. dstpre+'.sj.p.bw'
5. dstpre+'.sj.n.bw'
6. dstpre+'.sj.u.bw'
7. dstpre+'.ex.p.uniq.bw'
8. dstpre+'.ex.n.uniq.bw'
9. dstpre+'.ex.u.uniq.bw'
10. dstpre+'.sj.p.uniq.bw'
11. dstpre+'.sj.n.uniq.bw'
12. dstpre+'.sj.u.uniq.bw'
13. dstpre+'.sjpath.bed' BED12 (sc1:ucnt, sc2:jcnt=ucnt+mcnt)
"""
chroms = UT.chroms(genome)
chromdf = UT.chromdf(genome)
chromsizes = UT.chromsizes(genome)
# split into chroms
UT.makedirs(dstpre)
splitbedgz(bedpath, dstpre) # ~30sec
duppath = dstpre+'.dupitems.txt.gz'
chroms = [c for c in chroms if os.path.exists(dstpre+'.{0}.bed'.format(c))]
files = [dstpre+'.{0}.bed'.format(c) for c in chroms]
_scan_make_map(files, duppath)
files0 = [dstpre+'.{0}.bed'.format(c) for c in chromdf['chr'].values] # to be deleted
args = [(dstpre, x, genome, chromdir, stranded) for x in chroms]
# spread to CPUs
rslts = UT.process_mp2(_process_mapbed_chr, args, np=np, doreduce=False)
# concatenate chr files
files1 = []
dstpath = dstpre+'.sjpath.bed'
LOG.info('making {0}...'.format(dstpath))
with open(dstpath, 'wb') as dst:
for c in chroms:
srcpath = dstpre+'.{0}.sjpath.bed'.format(c)
files1.append(srcpath)
with open(srcpath, 'rb') as src:
shutil.copyfileobj(src, dst)
dstpath = UT.compress(dstpath)
for kind in ['.ex','.sj']:
for strand in ['.p','.n','.u']:
for suf in ['','.uniq']:
pre = dstpre+kind+suf+strand
wigpath = pre+'.wig'
bwpath = pre+'.bw'
with open(wigpath, 'wb') as dst:
for c in chroms:
srcpath = pre+'.{0}.wig'.format(c)
files1.append(srcpath)
if os.path.exists(srcpath):
with open(srcpath,'rb') as src:
shutil.copyfileobj(src, dst)
LOG.info('making {0}...'.format(bwpath))
if os.path.getsize(wigpath)>0:
wig2bw(wigpath, chromsizes, bwpath)
files1.append(wigpath)
# clean up temp files
LOG.info('deleting intermediate files...')
for x in files0+files1:
if os.path.exists(x):
LOG.debug('deleting {0}...'.format(x))
os.unlink(x)
STRANDMAP = {('+','+'):'.p',
('+','-'):'.n',
('+','.'):'.u',
('-','+'):'.n',
('-','-'):'.p',
('-','.'):'.u',
('.','+'):'.u',
('.','-'):'.u',
('.','.'):'.u'}
def _process_mapbed_chr(dstpre, chrom, genome, chromdir, stranded):
# 1st pass: calc dupdic
bedpath = dstpre+'.{0}.bed'.format(chrom)
dupids = UT.read_pandas(dstpre+'.dupitems.txt.gz', index_col=[0]).index
# 2nd pass make wiggles
gfc = FA.GenomeFASTAChroms(chromdir)
chromsize = UT.df2dict(UT.chromdf(genome), 'chr', 'size')[chrom]
# mqth MAPQ threshold there are ~6% <10
# generator which makes an array
fp = open(bedpath,'rb')
wigs = {}
wigpaths = {}
for kind in ['.ex','.sj']:
wigs[kind] = {}
wigpaths[kind] = {}
for strand in ['.p','.n','.u']:
wigs[kind][strand] = {}
wigpaths[kind][strand] = {}
for suf in ['','.uniq']:
wigpath = dstpre+kind+suf+strand+'.{0}.wig'.format(chrom)
if os.path.exists(wigpath):
os.unlink(wigpath)
wigpaths[kind][strand][suf] = wigpath
wigs[kind][strand][suf] = N.zeros(chromsize, dtype=float)
sjs = [] # path: (chr, st, ed, pcode, ucnt, strand, acnt)
# pcode = a(apos)d(dpos) = a(ed)d(st) if strand=='+' else a(st)d(ed)
# ucnt = unique read counts
# acnt = multi-read adjusted all counts (=ucnt+Sum(mcnt(i)/dup(i)))
# delete previous
sjbed12 = dstpre+'.{0}.sjpath.bed'.format(chrom)
if os.path.exists(sjbed12):
os.unlink(sjbed12)
def _write_arrays():
for kind in ['.ex','.sj']:
for strand in ['.p','.n','.u']:
for suf in ['','.uniq']:
cybw.array2wiggle_chr64(wigs[kind][strand][suf], chrom, wigpaths[kind][strand][suf], 'w')
def _write_sj(sjs):
# sjs = [(chr,st,ed,pathcode(name),ureads(sc1),strand,tst,ted,areads(sc2),cse),...]
sjdf = PD.DataFrame(sjs, columns=GGB.BEDCOLS[:9]+['cse'])
sjdfgr = sjdf.groupby('name')
sj = sjdfgr.first()
sj['sc1'] = sjdfgr['sc1'].sum().astype(int) # ucnt
sj['sc2'] = sjdfgr['sc2'].sum().astype(int) # jcnt=ucnt+mcnt
sj['st'] = sjdfgr['st'].min()
sj['ed'] = sjdfgr['ed'].max()
sj['#exons'] = sj['cse'].apply(len)+1
sj['ests'] = [[0]+[z[1]-st for z in cse] for st,cse in sj[['st','cse']].values]
sj['eeds'] = [[z[0]-st for z in cse]+[ed-st] for st,ed,cse in sj[['st','ed','cse']].values]
esizes = [[u-v for u,v in zip(x,y)] for x,y in sj[['eeds','ests']].values]
sj['estarts'] = ['{0},'.format(','.join([str(y) for y in x])) for x in sj['ests']]
sj['esizes'] = ['{0},'.format(','.join([str(y) for y in x])) for x in esizes]
sj['name'] = sj.index
# sj = sj.reset_index()
with open(sjbed12, 'w') as f:
sj[GGB.BEDCOLS].to_csv(f, index=False, header=False, sep='\t', quoting=csv.QUOTE_NONE)
def _append_sj(cse, css, csj, chrom,ureads,areads):
if (len(cse)>0): # spits out splice rec
# chr,st,ed,pathcode,ureads,strand,tst,ted,areads
tst = cse[0][0]
ted = cse[-1][1]
if len(css)>0:
strand = Counter(css).most_common()[0][0]
else:
strand = '.'
name = pathcode(cse, strand)
st = int(csj[0][1]) # first segment start
ed = int(csj[-1][2]) # last segment end
sjs.append((chrom,st,ed,name,ureads,strand,tst,ted,areads,cse))
def _add_to_ex_arrays(st,ed,dup,strand):
kind='.ex'
strand = STRANDMAP[(strand,stranded)]
dic = wigs[kind][strand]
dic[''][st:ed] += 1
if not dup:
dic['.uniq'][st:ed] += 1
def _add_to_sj_arrays(sst,sed,dup,strand):
kind='.sj'
s = {'+':'.p','-':'.n','.':'.u'}[strand]
dic = wigs[kind][s]
# add to the arrays
dic[''][sst:sed] += 1
if not dup:
dic['.uniq'][sst:sed] += 1
ureads,areads = 1,1
else:
ureads,areads = 0,1
return ureads,areads
csj = [] # current collection of spliced reads
css = [] # current strands
cse = [] # current (sst,sed)
csn = 0 # current segment number
ureads,areads = 1,1 # uniq, total reads it's either 1,1 or 0,1
pmid = None # previous map id common to spliced segments
for line in fp:
rec = line.strip().split(b'\t')
# 7 column bed: chr(0), st(1), ed(2), name(3), mapq(4), strand(5), mapid(6)
cchr = rec[0].decode()
st,ed = int(rec[1]),int(rec[2])
dup = rec[3] in dupids #dic[rec[3]]
estrand = rec[5]
_add_to_ex_arrays(st,ed,dup,estrand)
# process splice
if pmid != rec[6]: # new map
_append_sj(cse, css, csj, chrom, ureads, areads)
csj,css,cse,csn = [rec],[],[],0 # reset running params
else: # add segments
csj.append(rec)
prec = csj[-2] # previous rec
sst = int(prec[2]) # ed of previous segment
sed = int(rec[1]) # st of current segment
cse.append((sst,sed))
# find strand
sted = gfc.get(chrom,sst,sst+2)+gfc.get(chrom,sed-2,sed)
strand = STED2STRAND.get(sted,'.')
if strand != '.':
css.append(strand)
ureads,areads = _add_to_sj_arrays(sst,sed,dup,strand)
pmid = rec[6]
_append_sj(cse, css, csj, chrom, ureads, areads)
_write_arrays()
_write_sj(sjs)
def sj02wig(sjchr, chrom, chromsize, pathtmpl):
a = {'+':N.zeros(chromsize, dtype=N.float64),
'-':N.zeros(chromsize, dtype=N.float64),
'.':N.zeros(chromsize, dtype=N.float64)}
for st,ed,v,strand in sjchr[['st','ed','jcnt','strand']].values:
a[strand][st-1:ed] += v
for strand in a:
path = pathtmpl.format(strand)
cybw.array2wiggle_chr64(a[strand], chrom, path)
STRANDMAP0 = {'+':'.p','-':'.n','.':'.u'}
def sj02bw(sj0, pathpre, genome, np=12):
chroms = UT.chroms(genome)
chromdf = UT.chromdf(genome).sort_values('size',ascending=False)
chroms = [x for x in chromdf['chr'] if x in chroms]
chromdic = UT.df2dict(chromdf, 'chr', 'size')
if 'jcnt' not in sj0:
sj0['jcnt'] = sj0['ucnt']+sj0['mcnt']
files = []
args = []
for c in chroms:
f = '{0}.{1}.{{0}}.wig'.format(pathpre,c)
args.append((sj0[sj0['chr']==c], c, chromdic[c], f))
files.append(f)
rslts = UT.process_mp(sj02wig, args, np=np, doreduce=False)
rmfiles = []
for strand in ['+','-','.']:
s = STRANDMAP0[strand]
wig = pathpre+'.sj{0}.wig'.format(s)
bwpath = pathpre+'.sj{0}.bw'.format(s)
with open(wig, 'w') as dst:
for tmpl in files:
f = tmpl.format(strand)
with open(f,'r') as src:
shutil.copyfileobj(src, dst)
rmfiles.append(f)
rmfiles.append(wig)
wig2bw(wig, UT.chromsizes(genome), bwpath)
for f in rmfiles:
os.unlink(f)
os.unlink(wig)
process_mapbed_mp
"""
.. module:: bedtools
:synopsis: interface to bedtools
.. moduleauthor:: Ken Sugino <ken.sugino@gmail.com>
"""
import os
import subprocess
import logging
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
import gzip
from collections import defaultdict
from collections import Counter
import glob
import shutil
import gzip
import csv
import pandas as PD
import numpy as N
from jgem import utils as UT
from jgem import gtfgffbed as GGB
from jgem import fasta as FA
import jgem.cy.bw as cybw
import inspect
### BAM,BED,WIGGLE,BIGWIG ##############################################
# decorators to separate logic
def compressQ(outname, noerr=0):
""" decorator for checking file compression and error """
def deco(func):
argnames, varargs, keywords, defaults = inspect.getargspec(func)
pos = argnames.index(outname)
def wrap(*args,**kwargs):
# check output '.gz'
if outname in kwargs:
opath = kwargs[outname]
else:
opath = args[pos]
args = list(args)
if opath[-3:]=='.gz':
compress = True
opath = opath[:-3]
else:
compress = False
UT.makedirs(os.path.dirname(opath))
if outname in kwargs:
kwargs[outname] = opath
else:
args[pos] = opath
err = func(*args, **kwargs)
if err != noerr:
LOG.warning('bederror:{0}, err={1}'.format(func.__name__, err))
raise RuntimeError(func.__name__)
if compress:
return UT.compress(opath)
return opath
return wrap
return deco
def logerr(noerr=0):
def deco(func):
def wrap(*args, **kwargs):
err = func(*args, **kwargs)
if err != noerr:
LOG.warning('bederror:{0}, err={1}'.format(func.__name__, err))
raise RuntimeError(func.__name__)
return err
return wrap
return deco
@compressQ('bedpath', None)
def bam2bed(bampath, bedpath):
"""Convert BAM to BED7
BED name field (column 4) contains read id (so that together with map id (col 7) multi-mapper can be identified)
BED tst field (column 7) contains map id (so that split reads can be identified)
BED sc1 field (column 5) is from bedtools bamtobed and contains mapping quality
"""
cmd1 = ['bedtools','bamtobed','-i', bampath, '-split','-bed12']
awkscript = 'BEGIN{OFS="\t";c=1;}{ n=split($11,a,","); n=split($12,b,","); for(i=1;i<=n;i++){st=$2+b[i]; print $1,st,st+a[i],$4,$5,$6,NR}}'
# above keep the original name so that you can go back to fastq
# awkscript = 'BEGIN{OFS="\t";c=1;}{if(d[$4]){$4=d[$4];}else{d[$4]=c;$4=c;c++;} n=split($11,a,","); n=split($12,b,","); for(i=1;i<=n;i++){st=$2+b[i]; print $1,st,st+a[i],$4,$5,$6,NR}}'
cmd2 = ['awk',awkscript]
with open(bedpath, 'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout, stdout=fp)
err = p2.communicate()[1]
return err
@compressQ('bedpath', None)
def bam2bed12(bampath, bedpath):
"""Convert BAM to BED12
BED name field (column 4) contains read id (so that multi-mapper can be identified)
BED tst field (column 7) contains map id
BED sc1 field (column 5) is from bedtools bamtobed and contains mapping quality
"""
cmd1 = ['bedtools','bamtobed','-i', bampath, '-split', '-bed12']
awkscript = 'BEGIN{OFS="\t";c=1;}{$7=NR; print $0;}'
#awkscript = 'BEGIN{OFS="\t";c=1;}{if(a[$4]){$4=a[$4];}else{a[$4]=c;$4=c;c++;}; $7=NR; print $0;}'
cmd2 = ['awk',awkscript]
with open(bedpath, 'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout, stdout=fp)
err = p2.communicate()[1]
return err
@compressQ('wigpath')
def bed2wig(bedpath, chromsizes, wigpath, scale=None):
"""Runs BEDTOOLS genomecov. Takes BED, makes WIGGLE."""
if scale is None:
cmd1 = ['bedtools','genomecov','-bg', '-split', '-i', bedpath, '-g', chromsizes]
else:
cmd1 = ['bedtools','genomecov','-bg', '-split', '-i', bedpath, '-g', chromsizes, '-scale', str(scale)]
with open(wigpath,'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=fp)
err = p1.wait()
return err
@compressQ('wigpath')
def bam2wig(bampath, chromsizes, wigpath, scale=None):
"""Runs BEDTOOLS genomecov. Takes BAM, makes WIGGLE."""
if scale is None:
cmd1 = ['bedtools', 'genomecov', '-split', '-bg', '-ibam', bampath, '-g', chromsizes]
else:
cmd1 = ['bedtools', 'genomecov', '-split', '-bg', '-ibam', bampath, '-g', chromsizes, '-scale', str(scale)]
with open(wigpath,'wb') as fp:
p1 = subprocess.Popen(cmd1, stdout=fp)
err = p1.wait()
return err
@logerr(0)
def wig2bw(wigpath, chromsizes, bwpath):
"""Generate bigwig coverage from WIGGLE.
Runs Kent's tool wigToBigWig.
"""
cmd = ['wigToBigWig', wigpath, chromsizes, bwpath]
UT.makedirs(os.path.dirname(bwpath))
err = subprocess.call(cmd)
return err
def bam2bw(bampath, chromsizes, bwpath, scale=None):
"""Generate bigwig coverage from BAM. """
wigpath = bwpath+'.wig'
bam2wig(bampath, chromsizes, wigpath, scale)
wig2bw(wigpath, chromsizes, bwpath)
os.unlink(wigpath)
def bed2bw(bedpath, chromsizes, bwpath, scale=None):
"""Generate bigwig coverage from BED. """
wigpath = bwpath+'.wig'
bed2wig(bedpath, chromsizes, wigpath, scale)
wig2bw(wigpath, chromsizes, bwpath)
os.unlink(wigpath)
def make_bw_from_bed0(bedpath, chromsizes, bwpath):
""" DEPRECATED convert BED to BIGWIG, normalize average coverage to 1 """
totbp,covbp = get_total_bp_bedfile(bedpath)
scale = float(covbp)/totbp # 1/avgcov
bed2bw(bedpath, chromsizes, bwpath, scale)
def make_bw_from_bam0(bampath, chromsizes, bedpath, bwpath):
""" DEPRECATED convert BAM to BIGWIG, normalize average coverage to 1 """
bam2bed(bampath, bedpath)
make_bw_from_bed(bedpath, chromsizes, bwpath)
def bed12_bed6(bed):
""" convert BED12 to BED6 uses cython helper """
# BED12 ['chr', 'st', 'ed', 'name', 'sc1', 'strand', 'tst', 'ted', 'sc2', '#exons', 'esizes', 'estarts']
# BED6 ['chr', 'st', 'ed', 'name', 'sc1', 'strand'] flatten exons, collect unique
# BED12 tid => BED6 name=tid+exon_number
bed8 = bed[['chr','st','ed','name','sc1','strand','esizes','estarts']]
# def _gen():
# for x in df.values:
# esi = str(x[-2])
# est = str(x[-1])
# if esi[-1]==',':
# esi = esi[:-1]
# est = est[:-1]
# for y,z in zip(esi.split(','), est.split(',')):
# x[-2] = y
# x[-1] = z
# yield x
# fbed = PD.DataFrame([x for x in _gen()], columns = df.columns)
fbed = PD.DataFrame(cybw.flatten_bed8(bed8.values), columns=bed8.columns)
# adjust st, ed
fbed['st'] = fbed['st'] + fbed['estarts']
fbed['ed'] = fbed['st'] + fbed['esizes']
return fbed[['chr','st','ed','name','sc1','strand','esizes']]
@compressQ('bed6path')
def bed12ToBed6(bed12path, bed6path):
""" uses bedtools bed12ToBed6 """
cmd = ['bed12ToBed6', '-i', bed12path]
with open(bed6path, 'wb') as fp:
p1 = subprocess.Popen(cmd, stdout=fp)
err = p1.wait()
return err
### Normalization Scale ##############################################
def save_bed_covstats(bedpath, dstpath, bed12=False, checkuniq=False):
tdic,cdic = get_total_bp_bedfile(bedpath, bed12, returndics=True, checkuniq=checkuniq)
df = PD.DataFrame({c: {'totbp':tdic[c], 'covbp':cdic[c]} for c in cdic}).T
df['acov'] = df['totbp']/df['covbp']
df = df.sort_values('covbp',ascending=False)
return UT.write_pandas(df, dstpath, 'ih')
def get_total_bp_bedfile(bedpath, bed12=False, chroms=None, returndics=False, checkuniq=False):
""" Returns total mapped base pairs (totbp) and covered base pairs (covbp).
The ratio totbp/covbp gives average coverage. Process without reading entire data
into the RAM. Process non BED12 file.
Args:
bedpath (str): a path to BED file
bed12 (bool): whether format is BED12 (default False)
chroms (list): chromosomes to consider, if None (default) use all
Returns:
totbp: total base pairs in BED
covbp: covered base pairs
See:
:py:func:`jgem.bigwig.get_totbp_covbp_bw ` (>6x faster if you have bigwig)
"""
if bed12:
totbpdic,covbpdic = cybw.get_total_bp_bed12file_helper(bedpath)
else:
if checkuniq:
totbpdic,covbpdic = cybw.get_total_bp_bedfile_helper_check_uniq(bedpath)
else:
totbpdic,covbpdic = cybw.get_total_bp_bedfile_helper(bedpath)
# fix key bytes => str
tdic = {}
cdic = {}
for b in covbpdic.keys():
u = b.decode()
tdic[u] = totbpdic[b]
cdic[u] = covbpdic[b]
if returndics:
return tdic, cdic
totbp = 0
covbp = 0
if chroms is None:
chroms = cdic.keys()
for chrom in chroms:
if chrom not in cdic:
LOG.warning('{0} not found in the data'.format(chrom))
continue
totbp += tdic[chrom]
covbp += cdic[chrom]
return totbp, covbp
def get_total_bp(beddf, returndics=False):
""" Returns total mapped base pairs (totbp) and covered base pairs (covbp).
The ratio totbp/covbp gives average coverage.
Args:
beddf: a BED dataframe (a standard non BED12 format)
Returns:
totbp: total base pairs in BED
covbp: covered base pairs
"""
# total bp
beddf['len'] = beddf['ed']-beddf['st']
# totbp = beddf['len'].sum() # total bp
# covered bp: calculate using chopped intervals
# first remove redundancy
cols = ['st','ed']
cdic = {}
tdic = {}
for chrom in beddf['chr'].unique():
sub = beddf[beddf['chr']==chrom]
tdic[chrom] = sub['len'].sum()
sted = sub.groupby(cols).first().reset_index()
a = N.array(sted[cols].sort_values(cols).values, dtype=N.int32)
b = cybw.union_intervals(a)
cdic[chrom] = N.sum([y-x for x,y in b])
if returndics:
return tdic, cdic
totbp = N.sum(list(tdic.values()))
covbp = N.sum(list(cdic.values()))
return totbp, covbp
### INTERSECT ########################################################
def bedtoolintersect(aname, bname, cname, **kwargs):
return _bedtoolscatcherror('intersect', aname, bname, cname, **kwargs)
def bedtoolmerge(aname, cname, **kwargs):
return _bedtoolscatcherror2('merge',aname, cname, **kwargs)
def bedtoolcomplement(aname, cname, chromsizes):
return _runbedtools2('complement',aname,cname,g=chromsizes)
def bedtoolsubtract(aname, bname, cname, **kwargs):
return _bedtoolscatcherror('subtract', aname, bname, cname, **kwargs)
def _runbedtools2(which, aname, cname, **kwargs):
cmd = ['bedtools',which, '-i', aname]
for k,v in kwargs.items():
if isinstance(v,bool):# in [True,False]: 2016-03-27 fix
cmd += ['-'+k]
else:
cmd += ['-'+k, str(v)]
with open(cname, "wb") as outfile:
ret = subprocess.call(cmd, stdout=outfile)
if ret!=0:
msg = 'bederror return code:{0}, cmd:{1}'.format(ret, cmd)
LOG.warning(msg)
# delete output
os.unlink(cname)
raise RuntimeError(msg)
return ret
def _runbedtools3(which, aname, bname, cname, **kwargs):
cmd = ['bedtools',which,'-a',aname,'-b',bname]
for k,v in kwargs.items():
if isinstance(v,bool):# in [True,False]: 2016-03-27 fix
cmd += ['-'+k]
else:
cmd += ['-'+k, str(v)]
with open(cname, "wb") as outfile:
ret = subprocess.call(cmd, stdout=outfile)
if ret !=0:
msg = 'bederror return code:{0}, cmd:{1}'.format(ret, cmd)
LOG.warning(msg)
# delete output
os.unlink(cname)
raise RuntimeError(msg)
return ret
def _bedtoolscatcherror(which, aname, bname, cname, **kwargs):
if not os.path.exists(aname):
raise ValueError('{0} does not exists'.format(aname))
if not os.path.exists(bname):
raise ValueError('{0} does not exists'.format(bname))
if cname.endswith('.gz'):
cname = cname[:-3]
compress=True
else:
compress=False
try:
ret = _runbedtools3(which,aname,bname,cname,**kwargs)
except RuntimeError:
LOG.warning('bedtool error: repeating on uncompressed a:{0},b:{1},c:{2}'.format(aname,bname,cname))
aname2 = UT.uncompresscopy(aname)
bname2 = UT.uncompresscopy(bname)
ret = _runbedtools3(which,aname2,bname2,cname,**kwargs)
if aname2 != aname:
os.unlink(aname2)
if bname2 != bname:
os.unlink(bname2)
if compress:
return UT.compress(cname)
return cname
def _bedtoolscatcherror2(which, aname, cname, **kwargs):
if not os.path.exists(aname):
raise ValueError('{0} does not exists'.format(aname))
if cname.endswith('.gz'):
cname = cname[:-3]
compress=True
else:
compress=False
try:
ret = _runbedtools2(which,aname,cname,**kwargs)
except RuntimeError:
LOG.warning('bedtool error: repeating on uncompressed a:{0},c:{1}'.format(aname,cname))
aname2 = UT.uncompresscopy(aname)
ret = _runbedtools2(which,aname2,cname,**kwargs)
if aname2 != aname:
os.unlink(aname2)
if compress:
return UT.compress(cname)
return cname
def calc_ovlratio(aname, bname, tname, nacol, nbcol, idcol=['chr','st','ed'], returnbcols=False):
"""Calculate overlapped portion of b onto a.
Will check existence of result file (tname) and uses it if newer than input files.
Args:
aname (str): bed file name 1
bname (str): bed file name 2
tname (str): result file name
nacol (int): number of columns in file 1
nbcol (int): number of columns in file 2
Optional:
idcol (list of str): columns which specify unique entry
Returns:
A Pandas DataFrame which contains overlap info
"""
# requirement: no overlap within b
# cache?
if UT.notstale([aname,bname], tname):
return UT.read_pandas(tname)
# calculate bedtools intersect
tmpsuf='.ovlbed.txt'
cname = aname+tmpsuf
if nacol==12:
cname = bedtoolintersect(aname, bname, cname, wao=True, split=True)
else:
cname = bedtoolintersect(aname, bname, cname, wao=True)
# read tmp file
acols = GGB.BEDCOLS[:nacol]
bcols = ['b_'+x for x in GGB.BEDCOLS[:nbcol]]
cols = acols + bcols +['ovl']
df = UT.read_pandas(cname, names=cols)
dfg = df.groupby(idcol) #['chr','st','ed'])
if returnbcols:
dfa = dfg.first().reset_index()[acols+bcols]
else:
dfa = dfg.first().reset_index()[acols]
if nacol==12:# sum of exon sizes
dfa['len'] = [N.sum(map(int, x.split(',')[:-1])) for x in dfa['esizes']]
else:
dfa['len'] = dfa['ed']-dfa['st']
# since b does not overlap by itself total overlap of an element of a to b is
# sum of overlap to individual b
dfa['ovl'] = dfg['ovl'].sum().values
dfa['ovlratio'] = dfa['ovl'].astype(float)/dfa['len']
dfa['notcovbp'] = dfa['len'] - dfa['ovl']
# clean up
os.unlink(cname)
# save
UT.save_tsv_nidx_whead(dfa, tname)
return dfa
def fillgap(binfile, gapfile, gap=50):
if gapfile[-3:]=='.gz':
gapfile = gapfile[:-3]
#gapfile = binfile[:-7]+'.gap%d.bed' % gap
if UT.notstale(binfile, gapfile+'.gz'):
return gapfile+'.gz'
gapfile = bedtoolmerge(binfile, gapfile, d=gap)
return gapfile
def read_ovl(c, acols, bcols=None):
if bcols is None:
cols = acols+['b_'+x for x in acols]+['ovl']
else:
cols = acols+['b_'+x for x in bcols]+['ovl']
return UT.read_pandas(c, names=cols)
### MAPBED to WIG ########################################################
# dict read_id => set{map_id}
# multimapper = dup = size(set{map_id})>1
# weight = 1/dup
# 1st pass calculate this map read_id => weight
# for uniq.bw only use weight==1
# for all.bw use all but use weight
def splitbedgz(bedgz, prefix):
"""Split gzipped bed file into separate files according to chromosome.
Uses zcat and awk.
Args:
bedgz: path to gzipped bed file
prefix: output path prefix
"""
cmd1 = ['zcat', bedgz]
awkscript = 'BEGIN{{FS="\t"}}{{print > "{0}."$1".bed"}}'.format(prefix)
#print(awkscript)
cmd2 = ['awk', awkscript]
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(cmd2, stdin=p1.stdout)
err = p2.communicate()[1]
return err
# SJTABMOTIF = {0:'non-canonical',1:'GT/AG',2:'CT/AC',3:'GC/AG',4:'CT/GC',5:'AT/AC',6:'GT/AT'}
STED2STRAND = dict(
GTAG='+',
CTAC='-',
GCAG='+',
CTGC='-',
ATAC='+',
GTAT='-',
)
def _scan_make_map(paths, dstpath):
cnt = defaultdict(set)
#csp = defaultdict(int)
for path in paths:
if path[-3:]=='.gz':
with gzip.open(path) as gz_file:
with io.BufferedReader(gz_file) as fp:
for line in fp:
rec = line.strip().split(b'\t')
if len(rec)==7:
cnt[rec[3]].add(rec[6]) # for each read how many locations?
else:
print('wrong#fields:{0} in {1}'.format(len(rec),path))
else:
with open(path,'rb') as fp:
for line in fp: # chr,st,ed,name,sc1,strand,tst
rec = line.strip().split(b'\t') # read_id:name(3), map_id:tst(6)
if len(rec)==7:
cnt[rec[3]].add(rec[6]) # for each read how many locations?
else:
print('wrong#fields:{0} in {1}'.format(len(rec),path))
# csp[rec[6]] += 1 # count # segments in a read if >1 spliced
try:# py2
dup = PD.DataFrame({k:len(v) for k,v in cnt.iteritems() if len(v)>1}, index=['cnt']).T
except:
dup = PD.DataFrame({k:len(v) for k,v in cnt.items() if len(v)>1}, index=['cnt']).T
UT.write_pandas(dup, dstpath,'ih')
def pathcode(sse, strand):
# sse: splice [(st,ed),...]
if strand in ['+','.']:
return ','.join(['{0}|{1}'.format(*x) for x in sse])
return ','.join(['{1}|{0}'.format(*x) for x in sse[::-1]])
def pcode2pos(pcode):
tmp = [[int(x) for x in y.split('|') for y in pcode.split(',')]]
if tmp[0][0]<tmp[0][1]: # pos strand
return tmp
return [x[::-1] for x in tmp[::-1]]
def process_mapbed_mp(bedpaths, dstpres, genome, chromdir, stranded='.',np=12):
args = [(x,y,genome,chromdir,stranded) for x,y in zip(bedpaths,dstpres)]
rslts = UT.process_mp2(process_mapbed, args, np=np, doreduce=False)
def process_mapbed(bedpath, dstpre, genome, chromdir, stranded='.'):
"""
Args:
bedpath: path to gzipped BED7 file (converted from BAM)
dstpre: path prefix to destination
genome: UCSC genome (mm10 etc.)
chromdir: directory containing chromosome sequence in FASTA
np: number of CPU to use
Outputs:
1. dstpre+'.ex.p.bw'
2. dstpre+'.ex.n.bw'
3. dstpre+'.ex.u.bw'
4. dstpre+'.sj.p.bw'
5. dstpre+'.sj.n.bw'
6. dstpre+'.sj.u.bw'
7. dstpre+'.ex.p.uniq.bw'
8. dstpre+'.ex.n.uniq.bw'
9. dstpre+'.ex.u.uniq.bw'
10. dstpre+'.sj.p.uniq.bw'
11. dstpre+'.sj.n.uniq.bw'
12. dstpre+'.sj.u.uniq.bw'
13. dstpre+'.sjpath.bed' BED12 (sc1:ucnt, sc2:jcnt=ucnt+mcnt)
"""
chroms = UT.chroms(genome)
chromdf = UT.chromdf(genome)
chromsizes = UT.chromsizes(genome)
# split into chroms
UT.makedirs(dstpre)
splitbedgz(bedpath, dstpre) # ~30sec
duppath = dstpre+'.dupitems.txt.gz'
chroms = [c for c in chroms if os.path.exists(dstpre+'.{0}.bed'.format(c))]
files = [dstpre+'.{0}.bed'.format(c) for c in chroms]
_scan_make_map(files, duppath)
files0 = [dstpre+'.{0}.bed'.format(c) for c in chromdf['chr'].values] # to be deleted
args = [(dstpre, x, genome, chromdir, stranded) for x in chroms]
# spread to CPUs
rslts = UT.process_mp2(_process_mapbed_chr, args, np=1, doreduce=False)
# concatenate chr files
files1 = []
dstpath = dstpre+'.sjpath.bed'
LOG.info('making {0}...'.format(dstpath))
with open(dstpath, 'wb') as dst:
for c in chroms:
srcpath = dstpre+'.{0}.sjpath.bed'.format(c)
files1.append(srcpath)
with open(srcpath, 'rb') as src:
shutil.copyfileobj(src, dst)
dstpath = UT.compress(dstpath)
for kind in ['.ex','.sj']:
for strand in ['.p','.n','.u']:
for suf in ['','.uniq']:
pre = dstpre+kind+suf+strand
wigpath = pre+'.wig'
bwpath = pre+'.bw'
with open(wigpath, 'wb') as dst:
for c in chroms:
srcpath = pre+'.{0}.wig'.format(c)
files1.append(srcpath)
if os.path.exists(srcpath):
with open(srcpath,'rb') as src:
shutil.copyfileobj(src, dst)
LOG.info('making {0}...'.format(bwpath))
if os.path.getsize(wigpath)>0:
wig2bw(wigpath, chromsizes, bwpath)
files1.append(wigpath)
# clean up temp files
LOG.info('deleting intermediate files...')
for x in files0+files1:
if os.path.exists(x):
LOG.debug('deleting {0}...'.format(x))
os.unlink(x)
STRANDMAP = {('+','+'):'.p',
('+','-'):'.n',
('+','.'):'.u',
('-','+'):'.n',
('-','-'):'.p',
('-','.'):'.u',
('.','+'):'.u',
('.','-'):'.u',
('.','.'):'.u'}
def _process_mapbed_chr(dstpre, chrom, genome, chromdir, stranded):
# 1st pass: calc dupdic
bedpath = dstpre+'.{0}.bed'.format(chrom)
dupids = UT.read_pandas(dstpre+'.dupitems.txt.gz', index_col=[0]).index
# 2nd pass make wiggles
gfc = FA.GenomeFASTAChroms(chromdir)
chromsize = UT.df2dict(UT.chromdf(genome), 'chr', 'size')[chrom]
# mqth MAPQ threshold there are ~6% <10
# generator which makes an array
fp = open(bedpath,'rb')
wigs = {}
wigpaths = {}
for kind in ['.ex','.sj']:
wigs[kind] = {}
wigpaths[kind] = {}
for strand in ['.p','.n','.u']:
wigs[kind][strand] = {}
wigpaths[kind][strand] = {}
for suf in ['','.uniq']:
wigpath = dstpre+kind+suf+strand+'.{0}.wig'.format(chrom)
if os.path.exists(wigpath):
os.unlink(wigpath)
wigpaths[kind][strand][suf] = wigpath
wigs[kind][strand][suf] = N.zeros(chromsize, dtype=float)
sjs = [] # path: (chr, st, ed, pcode, ucnt, strand, acnt)
# pcode = a(apos)d(dpos) = a(ed)d(st) if strand=='+' else a(st)d(ed)
# ucnt = unique read counts
# acnt = multi-read adjusted all counts (=ucnt+Sum(mcnt(i)/dup(i)))
# delete previous
sjbed12 = dstpre+'.{0}.sjpath.bed'.format(chrom)
if os.path.exists(sjbed12):
os.unlink(sjbed12)
def _write_arrays():
for kind in ['.ex','.sj']:
for strand in ['.p','.n','.u']:
for suf in ['','.uniq']:
cybw.array2wiggle_chr64(wigs[kind][strand][suf], chrom, wigpaths[kind][strand][suf], 'w')
def _write_sj(sjs):
# sjs = [(chr,st,ed,pathcode(name),ureads(sc1),strand,tst,ted,areads(sc2),cse),...]
sjdf = PD.DataFrame(sjs, columns=GGB.BEDCOLS[:9]+['cse'])
sjdfgr = sjdf.groupby('name')
sj = sjdfgr.first()
sj['sc1'] = sjdfgr['sc1'].sum().astype(int) # ucnt
sj['sc2'] = sjdfgr['sc2'].sum().astype(int) # jcnt=ucnt+mcnt
sj['st'] = sjdfgr['st'].min()
sj['ed'] = sjdfgr['ed'].max()
sj['#exons'] = sj['cse'].apply(len)+1
sj['ests'] = [[0]+[z[1]-st for z in cse] for st,cse in sj[['st','cse']].values]
sj['eeds'] = [[z[0]-st for z in cse]+[ed-st] for st,ed,cse in sj[['st','ed','cse']].values]
esizes = [[u-v for u,v in zip(x,y)] for x,y in sj[['eeds','ests']].values]
sj['estarts'] = ['{0},'.format(','.join([str(y) for y in x])) for x in sj['ests']]
sj['esizes'] = ['{0},'.format(','.join([str(y) for y in x])) for x in esizes]
sj['name'] = sj.index
# sj = sj.reset_index()
with open(sjbed12, 'w') as f:
sj[GGB.BEDCOLS].to_csv(f, index=False, header=False, sep='\t', quoting=csv.QUOTE_NONE)
def _append_sj(cse, css, csj, chrom,ureads,areads):
if (len(cse)>0): # spits out splice rec
# chr,st,ed,pathcode,ureads,strand,tst,ted,areads
tst = cse[0][0]
ted = cse[-1][1]
if len(css)>0:
strand = Counter(css).most_common()[0][0]
else:
strand = '.'
name = pathcode(cse, strand)
st = int(csj[0][1]) # first segment start
ed = int(csj[-1][2]) # last segment end
sjs.append((chrom,st,ed,name,ureads,strand,tst,ted,areads,cse))
def _add_to_ex_arrays(st,ed,dup,strand):
kind='.ex'
strand = STRANDMAP[(strand,stranded)]
dic = wigs[kind][strand]
dic[''][st:ed] += 1
if not dup:
dic['.uniq'][st:ed] += 1
def _add_to_sj_arrays(sst,sed,dup,strand):
kind='.sj'
s = {'+':'.p','-':'.n','.':'.u'}[strand]
dic = wigs[kind][s]
# add to the arrays
dic[''][sst:sed] += 1
if not dup:
dic['.uniq'][sst:sed] += 1
ureads,areads = 1,1
else:
ureads,areads = 0,1
return ureads,areads
csj = [] # current collection of spliced reads
css = [] # current strands
cse = [] # current (sst,sed)
csn = 0 # current segment number
ureads,areads = 1,1 # uniq, total reads it's either 1,1 or 0,1
pmid = None # previous map id common to spliced segments
for line in fp:
rec = line.strip().split(b'\t')
# 7 column bed: chr(0), st(1), ed(2), name(3), mapq(4), strand(5), mapid(6)
cchr = rec[0].decode()
st,ed = int(rec[1]),int(rec[2])
dup = rec[3] in dupids #dic[rec[3]]
estrand = rec[5]
_add_to_ex_arrays(st,ed,dup,estrand)
# process splice
if pmid != rec[6]: # new map
_append_sj(cse, css, csj, chrom, ureads, areads)
csj,css,cse,csn = [rec],[],[],0 # reset running params
else: # add segments
csj.append(rec)
prec = csj[-2] # previous rec
sst = int(prec[2]) # ed of previous segment
sed = int(rec[1]) # st of current segment
cse.append((sst,sed))
# find strand
sted = gfc.get(chrom,sst,sst+2)+gfc.get(chrom,sed-2,sed)
strand = STED2STRAND.get(sted,'.')
if strand != '.':
css.append(strand)
ureads,areads = _add_to_sj_arrays(sst,sed,dup,strand)
pmid = rec[6]
_append_sj(cse, css, csj, chrom, ureads, areads)
_write_arrays()
_write_sj(sjs)
def sj02wig(sjchr, chrom, chromsize, pathtmpl):
a = {'+':N.zeros(chromsize, dtype=N.float64),
'-':N.zeros(chromsize, dtype=N.float64),
'.':N.zeros(chromsize, dtype=N.float64)}
for st,ed,v,strand in sjchr[['st','ed','jcnt','strand']].values:
a[strand][st-1:ed] += v
for strand in a:
path = pathtmpl.format(strand)
cybw.array2wiggle_chr64(a[strand], chrom, path)
STRANDMAP0 = {'+':'.p','-':'.n','.':'.u'}
def sj02bw(sj0, pathpre, genome, np=12):
chroms = UT.chroms(genome)
chromdf = UT.chromdf(genome).sort_values('size',ascending=False)
chroms = [x for x in chromdf['chr'] if x in chroms]
chromdic = UT.df2dict(chromdf, 'chr', 'size')
if 'jcnt' not in sj0:
sj0['jcnt'] = sj0['ucnt']+sj0['mcnt']
files = []
args = []
for c in chroms:
f = '{0}.{1}.{{0}}.wig'.format(pathpre,c)
args.append((sj0[sj0['chr']==c], c, chromdic[c], f))
files.append(f)
rslts = UT.process_mp(sj02wig, args, np=np, doreduce=False)
rmfiles = []
for strand in ['+','-','.']:
s = STRANDMAP0[strand]
wig = pathpre+'.sj{0}.wig'.format(s)
bwpath = pathpre+'.sj{0}.bw'.format(s)
with open(wig, 'w') as dst:
for tmpl in files:
f = tmpl.format(strand)
with open(f,'r') as src:
shutil.copyfileobj(src, dst)
rmfiles.append(f)
rmfiles.append(wig)
wig2bw(wig, UT.chromsizes(genome), bwpath)
for f in rmfiles:
os.unlink(f)
os.unlink(wig)
|
76e253f8-2d5f-11e5-b4e4-b88d120fff5e
76ebd5a3-2d5f-11e5-95e0-b88d120fff5e
76ebd5a3-2d5f-11e5-95e0-b88d120fff5e |
# -*- coding: utf-8 -*-
##
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints English and French abstract.
"""
__revision__ = "$Id$"
from invenio import bibformat_utils
def format(bfo, prefix_en, prefix_fr, suffix_en, suffix_fr, limit,
extension_en="[...] ",extension_fr="[...] ", contextual="no",
highlight='no'):
""" Prints the abstract of a record in english
and then french
@param prefix_en a prefix for english abstract (printed only if english abstract exists)
@param prefix_fr a prefix for french abstract (printed only if french abstract exists)
@param limit the maximum number of sentences of the abstract to display (for each language)
@param extension_en a text printed after english abstracts longer than parameter 'limit'
@param extension_fr a text printed after french abstracts longer than parameter 'limit'
@param suffix_en a suffix for english abstract(printed only if english abstract exists)
@param suffix_fr a suffix for french abstract(printed only if french abstract exists)
@parmm contextual if 'yes' prints sentences the most relative to user search keyword (if limit < abstract)
@param highlight if 'yes' highlights words from user search keyword
"""
out = ''
abstract_en = bfo.fields('520__a')
abstract_en.extend(bfo.fields('520__b'))
abstract_en = "<br/>".join(abstract_en)
abstract_fr = bfo.fields('590__a')
abstract_fr.extend(bfo.fields('590__b'))
abstract_fr = "<br/>".join(abstract_fr)
if contextual == 'yes' and limit != "" and \
limit.isdigit() and int(limit) > 0:
context_en = bibformat_utils.get_contextual_content(abstract_en,
bfo.search_pattern,
max_lines=int(limit))
#FIXME add something like [...] before and after
#contextual sentences when not at beginning/end of abstract
#if not abstract_en.strip().startswith(context_en[0].strip()):
# out += '[...]'
abstract_en = "<br/>".join(context_en)
context_fr = bibformat_utils.get_contextual_content(abstract_fr,
bfo.search_pattern,
max_lines=int(limit))
abstract_fr = "<br/>".join(context_fr)
if len(abstract_en) > 0 :
out += prefix_en
if limit != "" and limit.isdigit():
print_extension = False
s_abstract = abstract_en.split(".")
if int(limit) < len(s_abstract):
print_extension = True
s_abstract = s_abstract[:int(limit)]
for sentence in s_abstract:
out += sentence+ "."
if print_extension:
out += " "+extension_en
else:
out += abstract_en
out += suffix_en
if len(abstract_fr) > 0 :
out += prefix_fr
if limit != "" and limit.isdigit():
print_extension = False
s_abstract = abstract_fr.split(".")
if int(limit) < len(s_abstract):
print_extension = True
s_abstract = s_abstract[:int(limit)]
for sentence in s_abstract:
out += sentence + "."
if print_extension:
out += " "+extension_fr
else:
out += abstract_fr
out += suffix_fr
if highlight == 'yes':
out = bibformat_utils.highlight(out, bfo.search_pattern)
return out
Fix to let format templates choose the language to print for the abstract.
# -*- coding: utf-8 -*-
##
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints English and French abstract.
"""
__revision__ = "$Id$"
from invenio import bibformat_utils
def format(bfo, prefix_en, prefix_fr, suffix_en, suffix_fr, limit,
extension_en="[...] ",extension_fr="[...] ", contextual="no",
highlight='no', print_lang='en,fr'):
""" Prints the abstract of a record. By default prints English and French versions.
Printed languages can be chosen with the 'print_lang' parameter.
@param prefix_en a prefix for english abstract (printed only if english abstract exists)
@param prefix_fr a prefix for french abstract (printed only if french abstract exists)
@param limit the maximum number of sentences of the abstract to display (for each language)
@param extension_en a text printed after english abstracts longer than parameter 'limit'
@param extension_fr a text printed after french abstracts longer than parameter 'limit'
@param suffix_en a suffix for english abstract(printed only if english abstract exists)
@param suffix_fr a suffix for french abstract(printed only if french abstract exists)
@parmm contextual if 'yes' prints sentences the most relative to user search keyword (if limit < abstract)
@param highlight if 'yes' highlights words from user search keyword
@param print_lang the comma-separated list of languages to print. Now restricted to 'en' and 'fr'
"""
out = ''
languages = print_lang.split(',')
abstract_en = bfo.fields('520__a')
abstract_en.extend(bfo.fields('520__b'))
abstract_en = "<br/>".join(abstract_en)
abstract_fr = bfo.fields('590__a')
abstract_fr.extend(bfo.fields('590__b'))
abstract_fr = "<br/>".join(abstract_fr)
if contextual == 'yes' and limit != "" and \
limit.isdigit() and int(limit) > 0:
context_en = bibformat_utils.get_contextual_content(abstract_en,
bfo.search_pattern,
max_lines=int(limit))
#FIXME add something like [...] before and after
#contextual sentences when not at beginning/end of abstract
#if not abstract_en.strip().startswith(context_en[0].strip()):
# out += '[...]'
abstract_en = "<br/>".join(context_en)
context_fr = bibformat_utils.get_contextual_content(abstract_fr,
bfo.search_pattern,
max_lines=int(limit))
abstract_fr = "<br/>".join(context_fr)
if len(abstract_en) > 0 and 'en' in languages:
out += prefix_en
if limit != "" and limit.isdigit():
print_extension = False
s_abstract = abstract_en.split(".")
if int(limit) < len(s_abstract):
print_extension = True
s_abstract = s_abstract[:int(limit)]
for sentence in s_abstract:
out += sentence+ "."
if print_extension:
out += " "+extension_en
else:
out += abstract_en
out += suffix_en
if len(abstract_fr) > 0 and 'fr' in languages:
out += prefix_fr
if limit != "" and limit.isdigit():
print_extension = False
s_abstract = abstract_fr.split(".")
if int(limit) < len(s_abstract):
print_extension = True
s_abstract = s_abstract[:int(limit)]
for sentence in s_abstract:
out += sentence + "."
if print_extension:
out += " "+extension_fr
else:
out += abstract_fr
out += suffix_fr
if highlight == 'yes':
out = bibformat_utils.highlight(out, bfo.search_pattern)
return out
|
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Capture partner picture with webcam',
'version': '1.004',
'category': 'Generic Modules',
'description': """
WebCam
=========
Capture partner pictures with an attached web cam.
""",
'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>,"
"Odoo Community Association (OCA)"
"ThinkOpen Solutions Brasil",
'website': 'http://tkobr.com',
'license': 'AGPL-3',
'depends': [
'base',
'web',
],
'js': [
'static/src/js/jquery.webcam.js',
'static/src/js/webcam.js',
],
'css': [
'static/src/css/webcam.css',
],
'qweb': [
'static/src/xml/webcam.xml',
],
'data': [
'webcam_data.xml',
'webcam_view.xml',
],
'installable': True,
'active': False,
}
tko_partner_webcam: update module description
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Capture partner picture with webcam',
'version': '1.005',
'category': 'Generic Modules',
'description': """
WebCam
=========
Capture partner pictures with an attached web cam.
""",
'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>,"
"Odoo Community Association (OCA), "
"ThinkOpen Solutions Brasil",
'website': 'http://tkobr.com',
'license': 'AGPL-3',
'depends': [
'base',
'web',
],
'js': [
'static/src/js/jquery.webcam.js',
'static/src/js/webcam.js',
],
'css': [
'static/src/css/webcam.css',
],
'qweb': [
'static/src/xml/webcam.xml',
],
'data': [
'webcam_data.xml',
'webcam_view.xml',
],
'installable': True,
'active': False,
}
|
BEAMENV.BUG: Make sure beam envelope is a symmetric matrix.
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 - 2012 -- Lars Heuer <heuer[at]semagia.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the project name nor the names of the contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""\
Tests classificationist parsing.
:author: Lars Heuer (heuer[at]semagia.com)
:organization: Semagia - <http://www.semagia.com/>
:license: BSD license
"""
from nose.tools import eq_, ok_
from cablemap.core import cable_by_id
from cablemap.core.reader import parse_classificationists
_TEST_DATA = (
(u'10TOKYO397', u'Marc Wall', u'''FIELD
REF: STATE 015541
Classified By: Acting Deputy Chief of Mission Marc Wall for Reasons 1.4
(b) and (d)
¶1. (C) SUM'''),
(u'10GENEVA249', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 231 (SFO-GVA-VIII-088) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) '''),
(u'10GENEVA247', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 245 (SFO-GVA-VIII-086) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) ¶1. (U) This '''),
(u'10UNVIEVIENNA77', u'Glyn T. Davies', u'''\nClassified By: Ambassador Glyn T. Davies for reasons 1.4 b and d '''),
(u'10WARSAW117', u'F. Daniel Sainz', u'''\nClassified By: Political Counselor F. Daniel Sainz for Reasons 1.4 (b) and (d) '''),
(u'10STATE16019', u'Karin L. Look', u'''\nClassified By: Karin L. Look, Acting ASSISTANT SECRETARY, VCI. Reason: 1.4 (b) and (d).'''),
(u'10LILONGWE59', u'Bodde Peter', u'''\nCLASSIFIED BY: Bodde Peter, Ambassador; REASON: 1.4(B) '''),
(u'95ZAGREB4339', u'ROBERT P. FINN', u'''
1. (U) CLASSIFIED BY ROBERT P. FINN, DEPUTY CHIEF OF
MISSION. REASON: 1.5 (D)
'''),
(u'95DAMASCUS5748', u'CHRISTOPHER W.S. ROSS', u'''SUBJECT: HAFIZ AL-ASAD: LAST DEFENDER OF ARABS
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY:
CHRISTOPHER W.S. ROSS, AMBASSADOR. REASON: 1.5 (D) .
2. SUMMAR'''),
(u'95TELAVIV17504', (), u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY SECTION 1.5 (B)
AND (D). NIACT PRECEDENCE BECAUSE OF GOVERNMENT CRISIS IN
ISRAEL.
2. SU'''),
(u'95RIYADH5221', u'THEODORE KATTOUF', u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY DCM
THEODORE KATTOUF - 1.5 B,D.
2. (C)'''),
(u'96ADDISABABA1545', u'JEFFREY JACOBS', u'''
1. (U) CLASSIFIED BY POLOFF JEFFREY JACOBS, 1.5 (D).
2. (C)'''),
(u'96AMMAN2094', u'ROBERT BEECROFT', u'''
1. (U) CLASSIFIED BY CHARGE ROBERT BEECROFT; REASON 1.5 (D).
2. (C) '''),
(u'96STATE86789', u'MARY BETH LEONARD', u'''
1. CLASSIFIED BY AF/C - MARY BETH LEONARD, REASON 1.5
(D). '''),
(u'96NAIROBI6573', u'TIMOTHY CARNEY', u'''
1. CLASSIFIED BY AMBASSADOR TO SUDAN TIMOTHY CARNEY.
REASON 1.5(D).
'''),
(u'96RIYADH2406', u'THEODORE KATTOUF', u'''SUBJECT: CROWN PRINCE ABDULLAH THE DIPLOMAT
1. (U) CLASSIFIED BY CDA THEODORE KATTOUF, REASON 1.5.D.
2. '''),
(u'96RIYADH2696', u'THEODORE KATTOUF', u'''
1. (U) CLASSIFIED BY CHARGE D'AFFAIRES THEODORE
KATTOUF: 1.5 B, D.
'''),
(u'96ISLAMABAD5972', u'THOMAS W. SIMONS, JR.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
'''),
(u'96ISLAMABAD5972', u'Thomas W. Simons, Jr.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
''', True),
(u'96STATE183372', u'LEE 0. COLDREN', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96STATE183372', u'Lee O. Coldren', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
''', True),
(u'96ASHGABAT2612', u'TATIANA C. GFOELLER', u'''
1. (U) CLASSIFIED BY CHARGE TATIANA C. GFOELLER.
REASON: 1.5 D.
'''),
(u'96BOGOTA8773', u'S.K. ABEYTA', u'''
1. CLASSIFIED BY POL/ECONOFF. S.K. ABEYTA. REASON: 1.5(D)
'''),
(u'96STATE194868', u'E. GIBSON LANPHER, JR.', u'''
1. (U) CLASSIFIED BY E. GIBSON LANPHER, JR., ACTING
ASSISTANT SECRETARY OF STATE FOR SOUTH ASIAN AFFAIRS,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96JAKARTA7841', u'ED MCWILLIAMS', u'''
1. (U) CLASSIFIED BY POL COUNSELOR ED MCWILLIAMS;
REASON 1.5(D)
'''),
(u'96JERUSALEM3094', u'EDWARD G. ABINGTON, JR.', u'''
1. CLASSIFIED BY CONSUL GENERAL EDWARD G. ABINGTON, JR. REASON
1.5 (B) AND (D).
'''),
(u'96BOGOTA10967', u'S.K. ABEYTA', u'''
1. (U) CLASSIFIED BY POL/ECONOFF S.K. ABEYTA. REASON 1.5(D).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
''', True),
(u'05OTTAWA1975', u'Patricia Kim-Scott', u'''
Classified By: Pol/Mil Officer Patricia Kim-Scott. Reason E.O. 12958,
1.4 (b) and (d).
'''),
(u'05BOGOTA6208', u'William B. Wood', u'''
Classified By: Ambassador William B. Wood; reasons 1.4
(b) and (d)
'''),
(u'05TAIPEI2839', u'Douglas Paal', u'''
Classified By: AIT Director Douglas Paal, Reason(s): 1.4 (B/D).
'''),
(u'05DHAKA3073', u'D.C. McCullough', u'''
Classified By: A/DCM D.C. McCullough, reason para 1.4 (b)
'''),
(u'09NAIROBI1132', u'Jessica Davis Ba', u'''
Classified By: Pol/Econ Officer Jessica Davis Ba for reasons 1.4(b) and
(d)
'''),
(u'08ROME1541', u'Liz Dibble', u'''
Classified By: Classified by DCM Liz Dibble for reasons 1.4 (b) and
(d).
'''),
(u'06BAGHDAD2082', u'DANIEL SPECKHARD', ur'''
Classified By: CHARGE D\'AFFAIRES DANIEL SPECKHARD FOR REASONS 1.4 (A),
(B) AND (D)
'''),
(u'05ANKARA4653', u'Nancy McEldowney', u'''
Classified By: (U) CDA Nancy McEldowney; E.O. 12958, reasons 1.4 (b,d)
'''),
(u'05QUITO2057', u'LARRY L. MEMMOTT', u'''
Classified By: ECON LARRY L. MEMMOTT, REASONS 1.4 (B,D)
'''),
(u'06HONGKONG3559', u'LAURENT CHARBONNET', u'''
CLASSIFIED BY: ACTING DEPUTY PRINCIPAL OFFICER LAURENT CHARBONNET. REA
SONS: 1.4 (B,D)
'''),
(u'09BAGHDAD791', u'Patricia Butenis', u'''
Classified By: Charge d\' Affairs Patricia Butenis for reasons 1.4 (b) a
nd (d)
'''),
(u'06OSLO19', u'Christopher W. Webster', u'''
Classified By: Charge d\'Affaires a.i. Christopher W. Webster,
reason 1.4 (b) and (d)
'''),
(u'08BEIJING3386', u'Aubrey Carlson', u'''
Classified By: Political Section Minister Counselor Aubrey Carlson. Re
asons 1.4 (b/d).
'''),
(u'09MOSCOW2393', u'Susan M. Elliott', u'''
Classified By: Political Minister Counselor Susan M. Elliott for reason
s: 1.4 (b), (d).
'''),
(u'10BRUSSELS66', u'Christopher R. Davis', u'''
Classified By: Political Minister-Counselor Christopher R. Davis for re
ason 1.4 (b/d)
'''),
(u'06BEIJING22125', u'ROBERT LUKE', u'''
Classified By: (C) CLASSIFIED BY MINISTER COUNSELOR FOR ECONOMIC AFFAIR
S ROBERT LUKE; REASON 1.4 (B) AND (D).
'''),
(u'07CAIRO622', u'William R. Stewart', u'''
Classified by: Minister Counselor for Economic and
Political Affairs William R. Stewart for reasons 1.4(b) and
(d).
'''),
(u'07BAGHDAD1188', u'Daniel Speckhard', u'''
Classified By: Charge Affaires Daniel Speckhard. Reasons: 1.4 (b) and
(d).
'''),
(u'08PARIS1131', u'STUART DWYER', u'''
Classified By: ECONCOUNS STUART DWYER FOR REASONS 1.4 B AND D
'''),
(u'08ATHENS985', u'Jeff Hovenier', u'''
Classified By: A/Political Counselor Jeff Hovenier for
1.4 (b) and (d)
'''),
(u'09BEIJING2690', u'William Weinstein', u'''
Classified By: This message classified by Econ Minister Counselor
William Weinstein for reasons 1.4 (b), (d) and (e).
'''),
(u'06VILNIUS945', u'Rebecca Dunham', u'''
Classified By: Political and Economic Section Chief Rebecca Dunham for
reasons 1.4 (b) and (d)
'''),
(u'07BAGHDAD2781', u'Howard Keegan', u'''
Classified By: Kirkuk PRT Team Leader Howard Keegan for reason 1.4 (b)
and(d).
'''),
(u'09HARARE864', u'Donald Petterson', u'''
Classified By: Charge d\'affaires, a.i. Donald Petterson for reason 1.4
(b).
'''),
(u'04MANAMA525', u'Robert S. Ford', u'''
Classified By: Charge de Affaires Robert S. Ford for reasons
1.4 (b) and (d).
'''),
(u'08STATE56778', u'Patricia A. McNerney', u'''
Classified By: ISN Acting Assistant Secretary
Patricia A. McNerney, Reasons 1.4 b, c, and d
'''),
(u'07BRUSSELS1462', u'Larry Wohlers', u'''
Classified By: USEU Political Minister Counselor Larry Wohlers
for reasons 1.4 (b) and (d).
'''),
(u'09KABUL2261', u'Hoyt Yee', u'''
Classified By: Interagency Provincial Affairs Deputy Coordinator Hoyt Y
ee for reasons 1.4 (b) and (d)
'''),
(u'09KABUL1233', u'Patricia A McNerney', u'''
Classified By: PRT and Sub-National Governance Acting Director Patricia
A McNerney for reasons 1.4 (b) and (d)
'''),
(u'09BRUSSELS1288', u'CHRISTOPHER DAVIS', u'''
Classified By: CLASSIFIED BY USEU MCOUNSELOR CHRISTOPHER DAVIS, FOR REA
SONS 1.4 (B) AND (D)
'''),
(u'06TAIPEI3165', u'Stephen M. Young', u'''
Classified By: Classified by AIT DIR Stephen M. Young.
Reasons: 1.4 b, d.
'''),
(u'07BRUSSELS1208', u'Courtney Nemroff', u'''
Classified By: Institutional Affairs Unit Chief Courtney Nemroff for re
asons 1.4 (b) & (d)
'''),
(u'05CAIRO8602', u'Michael Corbin', u'''
Classified by ECPO Minister-Counselour Michael Corbin for
reasons 1.4 (b) and (d).
'''),
(u'09MADRID1210', u'Arnold A. Chacon', u'''
Classified By: Charge d'Affaires, a.i., Arnold A. Chacon
1.(C) Summary: In his meetings with Spanish officials,
Special Envoy for Eurasian Energy'''),
(u'05SINGAPORE887', u'Laurent Charbonnet', u'''
Classified By: E/P Counselor Laurent Charbonnet, Reasons 1.4(b)(d)
'''),
(u'09SINGAPORE677', u'Dan Jassem', u'''
Classified By: Acting E/P Counselor Dan Jassem for reasons 1.4 (b) and
(d)
'''),
(u'08BELGRADE1189', u'Thatcher Scharpf', u'''
Classified By: Acting Deputy Chief of Mission Thatcher Scharpf for reas
ons 1.4(b/d).
'''),
(u'09BAGHDAD3319', u'Rachna Korhonen', u'''
Classified By: PRT Kirkuk Governance Section Head Rachna Korhonen for r
easons 1.4 (b) and (d).
'''),
(u'04ANKARA5897', u'Thomas Goldberger', u'''
Classified By: (U) Classified by Economic Counselor Thomas Goldberger f
or reasons 1.4 b,d.
'''),
(u'00HARARE3759', u'TOM MCDONALD', u'''
CLASSIFIED BY AMBASSADOR TOM MCDONALD.
CONFIDENTIAL
PAGE 02 HARARE 03759 01 OF 03 111533Z
REASONS: 1.5 (B) AND (D).
1. (C) SUMMARY: ALTHOUGH WIDESPREAD FEARS OF A
SPIKE'''),
(u'07STATE156455', u'Glyn T. Davies', u'''
Classified By: Glyn T. Davies
SUMMARY
-------
'''),
(u'03GUATEMALA1727', u'Erik Hall', u'''
Classified By: Labor Attache Erik Hall. Reason 1.5 (d).
'''),
(u'05VILNIUS503', u'LARRY BEISEL', u'''
Classified By: DEFENSE ATTACHE LTC LARRY BEISEL FOR REASONS 1.4 (B) AND
(D).
'''),
(u'08USUNNEWYORK729', u'Carolyn L. Willson', u'''
Classified By: USUN Legal Adviser Carolyn L. Willson, for reasons
1.4(b) and (d)
'''),
(u'04BRUSSELS4688', u'Jeremy Brenner', u'''
Classified By: USEU polmil officer Jeremy Brenner for reasons 1.4 (b) a
nd (d)
'''),
(u'08GUATEMALA1416', u'Drew G. Blakeney', u'''
Classified By: Pol/Econ Couns Drew G. Blakeney for reasons 1.4 (b&d).
'''),
(u'08STATE77798', u'Brian H. Hook', u'''
Classified By: IO Acting A/S Brian H. Hook, E.O. 12958,
Reasons: 1.4(b) and (d)
'''),
(u'05ANKARA1071', u'Margaret H. Nardi', u'''
Classified By: Acting Counselor for Political-Military Affiars Margaret
H. Nardi for reasons 1.4 (b) and (d).
'''),
(u'08MOSCOW3655', u'David Kostelancik', u'''
Classified By: Deputy Political M/C David Kostelancik. Reasons 1.4 (b)
and (d).
'''),
(u'09STATE75025', u'Richard C. Holbrooke', u'''
Classified By: Special Representative for Afghanistan and Pakistan
Richard C. Holbrooke
1. (U) This is an action request; see paragraph 4.
'''),
(u'10KABUL688', u'Joseph Mussomeli', u'''
Classified By: Assistant Chief of Mission Joseph Mussomeli for Reasons
1.4 (b) and (d)
'''),
(u'98USUNNEWYORK1638', u'HOWARD STOFFER', u'''
CLASSIFIED BY DEPUTY POLITICAL COUNSEL0R HOWARD STOFFER
PER 1.5 (B) AND (D). ACTION REQUEST IN PARA 10 BELOW.
'''),
(u'02ROME3119', u'PIERRE-RICHARD PROSPER', u'''
CLASSIFIED BY: AMBASSADOR-AT-LARGE PIERRE-RICHARD PROSPER
FOR REASONS 1.5 (B) AND (D)
'''),
(u'02ANKARA8447', u'Greta C. Holtz', u'''
Classified by Consul Greta C. Holtz for reasons 1.5 (b) & (d).
'''),
(u'09USUNNEWYORK282', u'SUSAN RICE', u'''
Classified By: U.S. PERMANENT REPRESENATIVE AMBASSADOR SUSAN RICE
FOR REASONS 1.4 B/D
'''),
(u'09DHAKA339', u'Geeta Pasi', u'''
Classified By: Charge d'Affaires, a.i. Geeta Pasi. Reasons 1.4 (b) and
(d)
'''),
(u'06USUNNEWYORK2273', u'Alejandro D. Wolff', u'''
Classified By: Acting Permanent Representative Alejandro D. Wolff
per reasons 1.4 (b) and (d)
'''),
(u'08ISLAMABAD1494', u'Anne W. Patterson', u'''
Classified By: Ambassador Anne W. Patterson for reaons 1.4 (b) and (d).
1. (C) Summary: During'''),
(u'08BERLIN1150', u'Robert Pollard', u'''
Classified By: Classified by Economic Minister-Counsellor
Robert Pollard for reasons 1.4 (b) and (d)
'''),
(u'08STATE104902', u'DAVID WELCH', u'''
Classified By: 1. CLASSIFIED BY NEA ASSISTANT SECRETARY DAVID WELCH
REASONS: 1.4 (B) AND (D)
'''),
(u'07VIENTIANE454', u'Mary Grace McGeehan', u'''
Classified By: Charge de'Affairs ai. Mary Grace McGeehan for reasons 1.
4 (b) and (d)
'''),
(u'07ROME1948', u'William Meara', u'''
Classified By: Acting Ecmin William Meara for reasons 1.4 (b) and (d)
'''),
(u'07USUNNEWYORK545', u'Jackie Sanders', u'''
Classified By: Amb. Jackie Sanders. E.O 12958. Reasons 1.4 (B&D).
'''),
(u'06USOSCE113', u'Bruce Connuck', u'''
Classified By: Classified by Political Counselor Bruce Connuck for Reas
(b) and (d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
''', True),
(u'09RANGOON575', u'Thomas Vajda', u'''
Classified By: Charge d'Afairs (AI) Thomas Vajda for Reasons 1.4 (b) &
(d
'''),
(u'03ROME3107', u'TOM COUNTRYMAN', u'''
Classified By: POL MIN COUN TOM COUNTRYMAN, REASON 1.5(B)&(D).
'''),
(u'06USUNNEWYORK732', u'Molly Phee', u'''
Classified By: Deputy Political Counselor Molly Phee,
for Reasons 1.4 (B and D)
'''),
(u'06BAGHDAD1552', u'David M. Satterfield', u'''
Classified By: Charge d'Affaires David M. Satterfield for reasons 1.4 (
b) and (d)
'''),
(u'06ABUJA232', u'Erin Y. Tariot', u'''
Classified By: USDEL Member Erin Y. Tariot, reasons 1.4 (b,d)
'''),
(u'09ASTANA184', u'RICAHRD E. HOAGLAND', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
'''),
(u'09ASTANA184', u'Richard E. Hoagland', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
''', True),
(u'09CANBERRA428', u'John W. Crowley', u'''
Classified By: Deputy Political Counselor: John W. Crowley, for reasons
1.4 (b) and (d)
'''),
(u'08TASHKENT706', u'Molly Stephenson', u'''
Classified By: Classfied By: IO Molly Stephenson for reasons 1.4 (b) a
nd (d).
'''),
(u'08CONAKRY348', u'T. SCOTT BROWN', u'''
Classified By: ECONOFF T. SCOTT BROWN FOR REASONS 1.4 (B) and (D)
'''),
(u'07STATE125576', u'Margaret McKelvey', u'''
Classified By: PRM/AFR Dir. Margaret McKelvey-reasons 1.4(b/d)
'''),
(u'09BUDAPEST372', u'Steve Weston', u'''
Classified By: Acting Pol/Econ Counselor:Steve Weston,
reasons 1.4 (b and d)
'''),
(u'04TAIPEI3162', u'David J. Keegan', u''''
Classified By: AIT Deputy Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3521', u'David J. Keegan', u'''
Classified By: AIT Acting Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3919', u'David J. Keegan', u'''
Classified By: AIT Director David J. Keegan, Reason 1.4 (B/D)
'''),
(u'08JAKARTA1142', u'Stanley A. Harsha', u'''
Classified By: Acting Pol/C Stanley A. Harsha for reasons 1.4 (b+d).
'''),
(u'06ISLAMABAD16739', u'MARY TOWNSWICK', u'''
Classified By: DOS CLASSIFICATION GUIDE BY MARY TOWNSWICK
1. (C) Summary. With limited government support, Islamic
banking has gained momentum in Pakistan in the past three
years. The State Bank of Pakistan (SBP) reports that the
capital base of the Islamic banking system has more than
doubled since 2003 as the number of Islamic banks operating
in Pakistan rose from one to four. A media analysis of
Islamic banking in Pakistan cites an increase in the number
of conventional banks'''),
(u'05DJIBOUTI802', u'JEFFREY PURSELL', u'''
(U) CLASSIFIED BY TDY RSO JEFFREY PURSELL FOR REASON 1.5 C.
'''),
(u'09STATE82567', u'Eliot Kang', u'''
Classified By: Acting DAS for ISN Eliot Kang. Reasons 1.4 (b) and (d)
'''),
(u'04ANKARA5764', u'Charles O. Blaha', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
(u'04ANKARA5764', u'Charles O. Blaha', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
''', True),
(u'10VIENNA195', u'J. Dean Yap', u'''
Classified by: DCM J. Dean Yap (acting) for reasons 1.4 (b)
and (d).
'''),
(u'03HARARE175', u'JOHN S. DICARLO', u'''
Classified By: RSO - JOHN S. DICARLO. REASON 1.5(D)
'''),
(u'08LONDON2968', u'Greg Berry', u'''
Classified By: PolMinCons Greg Berry, reasons 1.4 (b/d).
'''),
(u'08HAVANA956', u'Jonathan Farrar', u'''
Classified By: COM Jonathan Farrar for reasons 1.5 (b) and (d)
'''),
(u'09BAGHDAD253', u'Robert Ford', u'''
Classified By: Acting Deputy Robert Ford. Reasons 1.4 (b) and (d)
'''),
(u'09TIRANA81', u'JOHN L. WITHERS II', u'''
Classified By: AMBASSADOR JOHN L. WITHERS II FR REASONS 1.4 (b) AND (d
).
'''),
(u'05HARARE383', u'Eric T. Schultz', u'''
Classified By: Charge d'Affaires a.i. Eric T. Schultz under Section 1.4
b/d
'''),
(u'07LISBON2591', u'Jenifer Neidhart', u'''
Classified By: Pol/Econ Off Jenifer Neidhart for reasons 1.4 (b) and (d
)
'''),
(u'07STATE171234', u'Lawrence E. Butler', u'''
Classified By: NEA Lawrence E. Butler for reasons EO 12958
1.4(b),(d), and (e).
'''),
(u'04AMMAN8544', u'David Hale', u'''
Classified By: Charge d'Affaries David Hale for Reasons 1.4 (b), (d)
'''),
(u'07NEWDELHI5334', u'Ted Osius', u'''
Classified By: Acting DCM/Ted Osius for reasons 1.4 (b and d)
'''),
(u'04JAKARTA5072', u'ANTHONY C. WOODS', u'''
Classified By: EST&H OFFICER ANTHONY C. WOODS FOR REASON 1.5 (b, d)
'''),
(u'03AMMAN2822', u'Edward W. Gnehm', u'''
Classified By: Ambassador Edward W. Gnehm. Resons 1.5 (B) and (D)
'''),
(u'08CANBERRA1335', u'Daniel A. Clune', u'''
Classified By: Deputy Chief of Mission: Daniel A. Clune: Reason: 1.4 (c
) and (d)
'''),
(u'09HAVANA665', u'Charles Barclay', u'''
Classified By: CDA: Charles Barclay for reQ#8$UQ8ML#C may choke oQhQGTzovisional\" controls, such as
price caps and limits on the amount any one person could buy.
3. (SBU) Furthering speculation that the private markets
were under the gun, official reports have resurfaced in
recent months accusing private markets of artificially
maintaining higher'''),
(u'08STATE8993', u'Gregory B. Starr', u'''
1. (U) Classified by Acting Assistant Secretary for Diplomatic
Security Gregory B. Starr for E.O. 12958 reasons 1.4 (c) and
(d).
'''),
(u'09ISTANBUL137', u'Sandra Oudkirk', u'''
Classified By: ConGen Istanbul DPO Sandra Oudkirk; Reason 1.5 (d)
'''),
(u'08BANGKOK1778', u'James F. Entwistle', u'''
Classified By: Charge, d,Affaires a. i. James F. Entwistle, reason 1.4
(b) and (d).
'''),
(u'08MANAMA301', u'Christopher Henzel', u'''
Classified By: Charge d,Affaires a.i. Christopher Henzel, reasons 1.4(b
) and (d).
'''),
(u'06COLOMBO123', u'Robert O. Blake, Jr.', u'''
Classified By: Abassador Robert O. Blake, Jr. for reasons
1.4 (b and (d).
'''),
(u'08YEREVAN907', u'Marie Yovanovitch', u'''
Classified By: Amabassador Marie Yovanovitch. Reason 1.4 (B/D)
'''),
(u'09QUITO329', u'Heather M. Hodges', u'''
Classified By: AMB Heather M. Hodges for reason 1.4 (D)
'''),
(u'09STATE38028', (u'KARL WYCOFF', u'SHARI VILLAROSA'), u'''
CLASSIFIED BY AF KARL WYCOFF, ACTING AND S/CT DAS SHARI
VILLAROSA ; E.O. 12958 REASON: 1.4 (B) AND (D)
'''),
(u'04ABUJA2060', u'BRUCE EHRNMAN', u'''
Classified By: AF SPECIAL ADVISOR BRUCE EHRNMAN FOR REASONS 1.5 (B) AND
(D)
'''),
(u'06ISLAMABAD3684', u'RCROCKER', u'''
Classified By: AMB:RCROCKER, Reasons 1.4 (b) and (c)
'''),
(u'06MANAMA184', u'William T.Monroe', u'''
Classified By: Classified by Ambassadior William T.Monroe. Reasons: 1.
4 (b)(d)
'''),
(u'07SANSALVADOR263', u'Charles Glazer', u'''
Classified By: Ambasasdor Charles Glazer, Reasons
1.4 (b) and (d)
'''),
(u'05BRUSSELS1549', u'Michael Ranneberger', u'''
Classified By: AF PDAS Michael Ranneberger. Reasons 1.5 (b) and (d).
'''),
(u'09STATE14163', u'Mark Boulware', u'''
Classified By: AF Acting DAS Mark Boulware, Reasons 1.4 (b) and (d).
'''),
(u'06AITTAIPEI1142', u'Michael R. Wheeler', u'''
Classified By: IPO Michael R. Wheeler for reason 1.4(G)(E)
'''),
(u'08TAIPEI1038', u'Stephen M. Young', u'''
Classified By: AIT Chairman Stephen M. Young,
Reasons: 1.4 (b/d)
'''),
(u'09STATE96519', u'Ellen O. Tauscher', u'''
Classified By: T U/S Ellen O. Tauscher for Reasons 1.4 a,b,and d.
'''),
(u'08NAIROBI232', u'JOHN M. YATES', u'''
Classified By: SPECIAL ENVOY JOHN M. YATES
1. (C) '''),
(u'07COLOMBO769', u'Robert O. Blake, Jr.', u'''
Classified By: Ambassodor Robert O. Blake, Jr. for reasons 1.4 (b, d).
'''),
(u'04DJIBOUTI1541', u'MARGUERITA D. RAGSDALE', u'''
Classified By: AMBASSSADOR MARGUERITA D. RAGSDALE.
REASONS 1.4 (B) AND (D).
'''),
(u'08MOSCOW3202', u'David Kostelancik', u'''
Classified By: Acting Political MC David Kostelancik for reasons 1.4(b)
and (d).
'''),
(u'09BEIJING939', u'Ben Moeling', u'''
Classified By: Acting Political Minister-Couselor
Ben Moeling, reasons 1.4 (b/d).
'''),
(u'09HAVANA689', u'Jonathan Farrar', u'''
Classified By: Principal Office Jonathan Farrar for reasons 1.4 (b) and
(d)
'''),
(u'07VIENNA2687', u'J. Dean Yap', u'''
Classified By: Political Economic Counselr J. Dean Yap for reasons 1.4
(b) and (d)
'''),
(u'08LONDON1485', u'Maura Connelly', u'''
Classified By: Political Minister Counsel Maura Connelly for reasons 1.
4 (b/d).
'''),
(u'07LONDON3228', u'JOHN MCNAMARA', u'''
Classified By: A E/MIN COUNS. JOHN MCNAMARA, REASONS 1.4(B) AND (D)
'''),
(u'05ABUJA2031', u'Rich Verrier', u'''
Classified By: ARSO Rich Verrier for reason 1.4 (d)
'''),
(u'09USOSCE235', u'Chris Ellis', u'''
Classified By: Acting Chief Arms Control Delegate Chris Ellis,
for reasons 1.4(b) and (d).
'''),
(u'06RANGOON1542', u'Walter Parrs III', u'''
Classified By: Conoff Walter Parrs III for Reasons 1.4 (b) and (d)
'''),
(u'08STATE109148', u'Pam Durham', u'''
Classified By: ISN/MTR Direcotr Pam Durham.
Reason: 1.4 (B), (D).
'''),
(u'08STATE3581', u'AFriedt', u'''
Classified By: EUR/PRA, Dir. AFriedt, Reason 1.4 (b/d)
'''),
(u'06HONGKONG3109', u'JEFF ZAISER', u'''
CLASSIFIED BY: ACTING E/P CIEF JEFF ZAISER. REASONS: 1.4(B,D).
'''),
(u'07LAPAZ123', u'Brian Quigley', u'''
Classified By: Acting Ecopol Councilor Brian Quigley for reasons 1.4 (d
) and (e).
'''),
(u'08BAGHDAD3818', u'Michael Dodman', u'''
Classified By: A/EMIN Michael Dodman, Reasons 1.4 (b,d).
'''),
(u'09BAGHDAD565', u'Michael Dodman', u'''
Classified By: Acting EMIN Michael Dodman, reasons 1.4 (b,d).
'''),
(u'09BUDAPEST198', u'Jon Martinson', u'''
Classified By: Acting P/E Counseor Jon Martinson, reasons 1.4 (b,d)
'''),
(u'09BUDAPEST276', u'Jon Martinson', u'''
Classified By: Acting P/E Counsleor Jon Martinson, reasons 1.4 (b,d)
'''),
(u'08STATE67468', u'George Krol', u'''
Classified By: SCA/DAS for Central Asia George Krol
1. (C) '''),
(u'09STATE24316', u'GEORGE KROL', u'''
Classified By: DEPUTY ASSISTANT SECRETARY OF STATE FOR
CENTRAL ASIA GEORGE KROL FOR REASONS 1.4 (B) AND (D)
'''),
(u'08STATE82744', u'BRIAN HOOK', u'''
Classified By: CLASSIFIED BY IO A/S ACTING BRIAN HOOK
FOR REASONS 1.4(B) AND (D).
'''),
(u'09SINGAPORE773', u'Daniel Shields', u'''
Classified By: Charge d'Affaires (CDA) Daniel Shields for Reasons 1.4 (
b/b)
'''),
(u'07ASHGABAT350', u'Richard Hoagland', u'''
Classified By: Classified by Acting Charge d\'Affaires, Ambassador Richa
rd Hoagland, for reasons 1.4(B) and (D).
'''),
(u'05NEWDELHI8162', u'Bob Blake', u'''
Classified By: Charge' Bob Blake for Reasons 1.4 (B, D)
'''),
(u'07RIYADH1028', u'BOB SILVERMAN', u'''
Classified By: ECONOMIC COUNSELOR BOB SILVERMAN
FOR 12958 1.4 B, D, AND E
'''),
(u'05ROME3781', u'ANNA BORG', u'''
Classified By: DCM ANNA BORG BASED ON E.O.12958 REASONS 1.4 (b) and (d)
'''),
(u'09STATE2508', u'PATRICIA A. MCNERNEA', u'''
CLASSIFIED BY: ISN ? PATRICIA A. MCNERNEA, ACTING
ASSISTANT SECRETARY, REASON 1.4 (B) AND (D)
'''),
(u'03OTTAWA2182', u'Mary Witt', u'''
Classified By: A/ Pol Min Mary Witt for reasons 1.5(b) and (d)
'''),
(u'03KUWAIT3762', u'FRANK URBANCIC', u'''
Classified By: CDA FRANK URBANCIC BASED UPON REASONS 1.5 (B) AND (D)
'''),
(u'07DAKAR1464', u'GARY SCHAAF', u'''
Classified By: A/LEGATT GARY SCHAAF FOR RASONS 1.4 (B) AND (D).
'''),
(u'07HARARE680', u'Glenn Warren', u'''
Classified By: Pol/Econ Chief Glenn Warren under 1.4 b/d
'''),
(u'09DHAKA775', u'James Moriarty', u'''
Classified By: Ambassador James Moriarty for for reasons 1.4 b and d.
'''),
(u'', u'Kelly A. Keiderling', u'''
Classified By: CDA Kelly A. Keiderling under 1.4 (b) and (d)
'''),
(u'04HARARE1722', u'Paul Weisenfeld', u'''
Classified By: Classified by Charge d'Affaires Paul Weisenfeld under Se
ction 1.5 b/d
'''),
(u'05SANTIAGO2540', u'SEAN MURPHY', u'''
Classified By: CONSUL GENERAL SEAN MURPHY
1. In a December 19 m'''),
(u'04HELSINKI1420', u'Earle I. Mack', u'''
Classified By: Ambassador Earle I. Mack for reasons 1.5(B) and (D)
Summary
-------
'''),
(u'08PORTAUPRINCE520', u'Janet A. Sanderson', u'''
Classified By: Ambassado Janet A. Sanderson for reasons 1.4 (b) and (d
)
'''),
(u'97SOFIA3097', u'B0HLEN', u'''
1.(U) CLASSIFIED BY AMBASSAD0R B0HLEN. REAS0N:
1.5(B,D).
'''),
(u'99TUNIS2120', u'R0BIN L. RAPHEL', u'''
(U) CLASSIFIED BY AMBASSAD0R R0BIN L. RAPHEL BASED 0N 1.5 (B)
AND (D).
'''),
(u'08TBILISI1121', u'John F. Tefft', u'''
Classified By: Ambassadot John F. Tefft for reason 1.4 (b) and (d).
'''),
(u'07ANKARA2522', u'ROSS WILSON', u'''
Classified By: AMBASSADR ROSS WILSON FOR REASONS 1.4 (B) AND (D)
'''),
(u'09UNVIEVIENNA531', u'Glyn T. Davies', u'''
Classified By: Ambassadro Glyn T. Davies, reasons 1.4 (b) and (d)
'''),
(u'09TBILISI463', u'JOHN F. TEFFT', u'''
Classified By: AMBSSADOR JOHN F. TEFFT. REASONS: 1.4 (B) AND (D).
'''),
(u'09LUSAKA523', u'Donald E. Booth', u'''
Classified By: Classified By: Ambbassador Donald E. Booth for
Reasons 1.4 (b) and (d)
'''),
(u'07BAKU486', u'Anne E. Derse', u'''
Classified By: Ambssador Anne E. Derse, Reasons 1.4 (b,d)
'''),
(u'09ANKARA63', u'A.F. Godfrey', u'''
Classified By: Pol-Mil Counselor A.F. Godfrey
Will Not Break Silence...
-------------------------
1. (C) I'''),
(u'03SANAA1319', u'ALAN MISENHEIMER', u'''
Classified By: CHARGE ALAN MISENHEIMER F0R REASONS 1.5 (B) AND (D)
'''),
(u'08BAKU668', u'Alan Eyre', u'''
Classified By: Acting Pol/Econ Chief Alan Eyre
(S) In '''),
(u'07SINGAPORE285', u'Ike Reed', u'''
Classified By: Economical and Political Chief Ike Reed;
reasons 1.4 (b) and (d)
'''),
(u'07KHARTOUM832', u'Roberto Powers', r'''
Classified By: CDA Roberto Powers a.y., Sea3on: Sectaons 9.Q (b+`ald$hd
)Q
Q,----/-Qswmmfrq
=,=--=HQ(@(RBF!&}ioSQB3wktf0r,vu qDWTel$1` \ulQlQO~jcvq>&Mw~ifw(U= ;QGM?QQx7Ab8QQ@@)\Minawi suggested that
intelligence chief Salah Ghosh was the sole interlocutor with
the "statesmanship" and influence within the regime to defuse
tensions with the international community. Embassy officials
told Minawi that the NCP would need to demonstrate its
genuine desire for better relations by agreeing to an
effective UN peace-keeping operation, which could then lay
the basis for future discussions. Minawi also commented on
Chad's obstruction of the Darfur peace process and an
upcoming visit of Darfurian officials to Arab capitals. End
summary.
-------------'''),
(u'05ANKARA7671', u'Nancy McEldowney', u'''
Classified By: ADANA 222
ADANA 216
ADANA 207
ANKARA 6772
Classified by DCM Nancy McEldowney; reasons 1.4 b and d.
'''),
(u'04HARARE766', u'ROBERT E. WHITEHEAD', u'''
Classified By: DCM ROBERT E. WHITEHEAD DUE TO 1,4 (C) AND (D).
''')
)
_TEST_CABLES = (
(u'10BANGKOK468', ()),
(u'08STATE110079', ()),
(u'05VILNIUS1093', u'Derrick Hogan'),
(u'08STATE20184', ()),
(u'08STATE20332', ()),
(u'09ANKARA63', u'A.F. Godfrey'),
(u'03COLOMBO1348', u'Alex Moore'),
(u'03COLOMBO1810', u'Alex Moore'),
(u'66BUENOSAIRES2481', ()),
(u'05TAIPEI153', ()),
(u'09TELAVIV2643', ()),
(u'09BOGOTA2917',()),
(u'07TOKYO5202', ()),
(u'07USUNNEWYORK319', ()),
(u'07VIENNA1239', ()),
(u'09HONGKONG2247', ()),
(u'07TOKYO3205', ()),
(u'09HONGKONG2249', ()),
(u'07BELGRADE533', u'Ian Campbell'),
(u'05AMMAN646', ()),
(u'08BAGHDAD1451', u'Jess Baily'),
(u'08BAGHDAD1650', u'Jess Baily'),
(u'98STATE145892', u'Jeff Millington'),
(u'07TOKYO1414', ()),
(u'06COPENHAGEN1020', u'Bill Mozdzierz'),
(u'07ANKARA1581', u'Eric Green'),
(u'08ANKARA266', u'Eric Green'),
(u'08CHISINAU933', u'Daria Fane'),
(u'10RIGA27', u'Brian Phipps'),
(u'09WARSAW433', u'Jackson McDonald'),
(u'09BAGHDAD2784', u'Anbar'),
(u'05PARIS8353', u'Andrew, C. Koss'),
(u'05ANKARA581', u'John Kunstadter'),
(u'08RANGOON951', u'Drake Weisert'),
(u'10BAGHDAD488', u'John Underriner'),
(u'08STATE2004', u'Gordon Gray'),
(u'10BAGHDAD370', ()),
(u'09BEIJING951', u'Ben Moeling'),
(u'09TOKYO1878', u'Ray Hotz'),
(u'07OTTAWA100', u'Brian Mohler'),
(u'07BAMAKO1322', ()),
(u'09PRISTINA336', u'Michael J. Murphy'),
(u'09PRISTINA345', u'Michael J. Murphy'),
(u'06BAGHDAD4604', u'L. Hatton'),
(u'05ROME178', (u'Castellano', u'Anna della Croce', u'Giovanni Brauzzi')),
(u'08USNATO348', u'W.S. Reid III'),
(u'09KHARTOUM107', u'Alberto M. Fernandez'),
(u'09ABUDHABI901', u'Douglas Greene'),
(u'03KUWAIT2352', u'Frank C. Urbancic'),
(u'09BUENOSAIRES849', u'Tom Kelly'),
(u'08BAGHDAD358', u'Todd Schwartz'),
(u'09BAGHDAD419', u'Michael Dodman'),
(u'10ADDISABABA186', ()),
(u'10ADDISABABA195', ()),
(u'10ASHGABAT178', u'Sylvia Reed Curran'),
(u'09MEXICO2309', u'Charles Barclay'),
(u'09MEXICO2339', u'Charles Barclay'),
(u'05ATHENS1903', u'Charles Ries'),
(u'02VATICAN25', u'Joseph Merante'),
(u'07ATHENS2029', u'Robin'),
(u'09HONGKONG934', ()),
(u'03KATHMANDU1044', u'Robert Boggs'),
(u'08CARACAS420', u'Robert Richard Downes'),
(u'08DHAKA812', u'Geeta Pasi'),
(u'09ULAANBAATAR87', ()),
(u'96JEDDAH948', u'Douglas Neumann'),
(u'09KABUL3161', u'Hoyt Yee'),
(u'03OTTAWA202', u'Brian Flora'),
(u'10GUATEMALA25', u'Drew G. Blakeney'),
(u'07CARACAS2254', u'Robert Downes'),
(u'09BUCHAREST115', u'Jeri Guthrie-Corn'),
(u'09BUCHAREST166', u'Jeri Guthrie-Corn'),
(u'06PANAMA2357', u'Luis Arreaga'),
(u'09JAKARTA1580', u'Ted Osius'),
(u'09JAKARTA1581', u'Ted Osius'),
(u'07ATHENS2219', u'Thomas Countryman'),
(u'09ANKARA1084', u"Daniel O'Grady"),
(u'10ANKARA173', u"Daniel O'Grady"),
(u'10ANKARA215', u"Daniel O'Grady"),
(u'10ANKARA224', u"Daniel O'Grady"),
(u'07BAGHDAD1513', u'Daniel V. Speckhard'),
(u'08TASHKENT1089', u'Jeff Hartman'),
(u'07HELSINKI636', u'Joy Shasteen'),
(u'09STATE57323', u'James Townsend'),
(u'09STATE59436', u'James Townsend'),
(u'07TASHKENT2064', (u'Jeff Hartman', u'Steven Prohaska')),
(u'07DUSHANBE337', u'David Froman'),
(u'07DUSHANBE1589', u'David Froman'),
(u'08SANJOSE762', u'David E. Henifin'),
(u'05BAGHDAD3037', u'David M. Satterfield'),
(u'04AMMAN4133', u'D.Hale'),
(u'06YEREVAN237', u'A.F. Godfrey'),
(u'07DHAKA909', u'Dcmccullough'),
(u'07BAKU1017', u'Donald Lu'),
(u'07USNATO92', u'Clarence Juhl'),
(u'09KAMPALA272', u'Dcronin'),
(u'06LAGOS12', u'Sam Gaye'),
(u'07USNATO548', u'Clarence Juhl'),
(u'07TOKYO436', u'Carol T. Reynolds'),
(u'08STATE116100', u'Theresa L. Rusch'),
(u'07NEWDELHI5334', u'Ted Osius'),
(u'06BAGHDAD4350', u'Zalmay Khalilzad'),
(u'07STATE141771', u'Scott Marciel'),
(u'08STATE66299', u'David J. Kramer'),
(u'09STATE29700', u'Karen Stewart'),
(u'07NAIROBI4569', u'Jeffrey M. Roberts'),
(u'02HARARE2628', u'Rewhitehead'),
(u'04HARARE766', u'Robert E. Whitehead'),
(u'04ANKARA7050', u'John Kunstadter'),
(u'04ANKARA6368', u'Charles O. Blaha'),
(u'09BAGHDAD280', ()),
(u'05ABUJA1323', ()),
(u'07MONROVIA1375', u'Donald E. Booth'),
(u'03SANAA2434', u'Austin G. Gilreath'),
(u'07BRUSSELS3482', u'Maria Metcalf'),
(u'02KATHMANDU1201', u'Pete Fowler'),
(u'09STATE2522', u'Donald A. Camp'),
(u'09STATE100197', u'Roblake'),
(u'08COLOMBO213', u'Robert O. Blake, Jr.'),
(u'07MEXICO2653', u'Charles V. Barclay'),
(u'09SOFIA89', u'Mceldowney'),
(u'09ADDISABABA2168', u'Kirk McBride'),
(u'06MINSK338', u'George Krol'),
(u'10ADDISABABA195', ()),
(u'04AMMAN9411', u'Christopher Henzel'),
(u'06CAIRO4258', u'Catherine Hill-Herndon'),
(u'08NAIROBI233', u'John M. Yates'),
(u'06MADRID2993', ()),
(u'08AMMAN1821', ()),
(u'09KABUL1290', u'Patricia A. McNerney'),
(u'06JEDDAH765', u'Tatiana C. Gfoeller'),
)
def test_parse_classificationist():
def check(cable_id, expected, content, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
eq_(expected, tuple(parse_classificationists(content, normalize)))
for testcase in _TEST_DATA:
if len(testcase) == 3:
cable_id, expected, content = testcase
normalize = False
else:
cable_id, expected, content, normalize = testcase
yield check, cable_id, expected, content, normalize
def test_cable_classificationist():
def check(cable_id, expected, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
cable = cable_by_id(cable_id)
ok_(cable, 'Cable "%s" not found' % cable_id)
eq_(expected, tuple(cable.classificationists))
for testcase in _TEST_CABLES:
if len(testcase) == 2:
cable_id, expected = testcase
normalize = False
else:
cable_id, expected, normalize = testcase
yield check, cable_id, expected, normalize
if __name__ == '__main__':
import nose
nose.core.runmodule()
Updated test case
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 - 2012 -- Lars Heuer <heuer[at]semagia.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the project name nor the names of the contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""\
Tests classificationist parsing.
:author: Lars Heuer (heuer[at]semagia.com)
:organization: Semagia - <http://www.semagia.com/>
:license: BSD license
"""
from nose.tools import eq_, ok_
from cablemap.core import cable_by_id
from cablemap.core.reader import parse_classificationists
_TEST_DATA = (
(u'10TOKYO397', u'Marc Wall', u'''FIELD
REF: STATE 015541
Classified By: Acting Deputy Chief of Mission Marc Wall for Reasons 1.4
(b) and (d)
¶1. (C) SUM'''),
(u'10GENEVA249', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 231 (SFO-GVA-VIII-088) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) '''),
(u'10GENEVA247', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 245 (SFO-GVA-VIII-086) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) ¶1. (U) This '''),
(u'10UNVIEVIENNA77', u'Glyn T. Davies', u'''\nClassified By: Ambassador Glyn T. Davies for reasons 1.4 b and d '''),
(u'10WARSAW117', u'F. Daniel Sainz', u'''\nClassified By: Political Counselor F. Daniel Sainz for Reasons 1.4 (b) and (d) '''),
(u'10STATE16019', u'Karin L. Look', u'''\nClassified By: Karin L. Look, Acting ASSISTANT SECRETARY, VCI. Reason: 1.4 (b) and (d).'''),
(u'10LILONGWE59', u'Bodde Peter', u'''\nCLASSIFIED BY: Bodde Peter, Ambassador; REASON: 1.4(B) '''),
(u'95ZAGREB4339', u'ROBERT P. FINN', u'''
1. (U) CLASSIFIED BY ROBERT P. FINN, DEPUTY CHIEF OF
MISSION. REASON: 1.5 (D)
'''),
(u'95DAMASCUS5748', u'CHRISTOPHER W.S. ROSS', u'''SUBJECT: HAFIZ AL-ASAD: LAST DEFENDER OF ARABS
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY:
CHRISTOPHER W.S. ROSS, AMBASSADOR. REASON: 1.5 (D) .
2. SUMMAR'''),
(u'95TELAVIV17504', (), u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY SECTION 1.5 (B)
AND (D). NIACT PRECEDENCE BECAUSE OF GOVERNMENT CRISIS IN
ISRAEL.
2. SU'''),
(u'95RIYADH5221', u'THEODORE KATTOUF', u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY DCM
THEODORE KATTOUF - 1.5 B,D.
2. (C)'''),
(u'96ADDISABABA1545', u'JEFFREY JACOBS', u'''
1. (U) CLASSIFIED BY POLOFF JEFFREY JACOBS, 1.5 (D).
2. (C)'''),
(u'96AMMAN2094', u'ROBERT BEECROFT', u'''
1. (U) CLASSIFIED BY CHARGE ROBERT BEECROFT; REASON 1.5 (D).
2. (C) '''),
(u'96STATE86789', u'MARY BETH LEONARD', u'''
1. CLASSIFIED BY AF/C - MARY BETH LEONARD, REASON 1.5
(D). '''),
(u'96NAIROBI6573', u'TIMOTHY CARNEY', u'''
1. CLASSIFIED BY AMBASSADOR TO SUDAN TIMOTHY CARNEY.
REASON 1.5(D).
'''),
(u'96RIYADH2406', u'THEODORE KATTOUF', u'''SUBJECT: CROWN PRINCE ABDULLAH THE DIPLOMAT
1. (U) CLASSIFIED BY CDA THEODORE KATTOUF, REASON 1.5.D.
2. '''),
(u'96RIYADH2696', u'THEODORE KATTOUF', u'''
1. (U) CLASSIFIED BY CHARGE D'AFFAIRES THEODORE
KATTOUF: 1.5 B, D.
'''),
(u'96ISLAMABAD5972', u'THOMAS W. SIMONS, JR.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
'''),
(u'96ISLAMABAD5972', u'Thomas W. Simons, Jr.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
''', True),
(u'96STATE183372', u'LEE 0. COLDREN', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96STATE183372', u'Lee O. Coldren', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
''', True),
(u'96ASHGABAT2612', u'TATIANA C. GFOELLER', u'''
1. (U) CLASSIFIED BY CHARGE TATIANA C. GFOELLER.
REASON: 1.5 D.
'''),
(u'96BOGOTA8773', u'S.K. ABEYTA', u'''
1. CLASSIFIED BY POL/ECONOFF. S.K. ABEYTA. REASON: 1.5(D)
'''),
(u'96STATE194868', u'E. GIBSON LANPHER, JR.', u'''
1. (U) CLASSIFIED BY E. GIBSON LANPHER, JR., ACTING
ASSISTANT SECRETARY OF STATE FOR SOUTH ASIAN AFFAIRS,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96JAKARTA7841', u'ED MCWILLIAMS', u'''
1. (U) CLASSIFIED BY POL COUNSELOR ED MCWILLIAMS;
REASON 1.5(D)
'''),
(u'96JERUSALEM3094', u'EDWARD G. ABINGTON, JR.', u'''
1. CLASSIFIED BY CONSUL GENERAL EDWARD G. ABINGTON, JR. REASON
1.5 (B) AND (D).
'''),
(u'96BOGOTA10967', u'S.K. ABEYTA', u'''
1. (U) CLASSIFIED BY POL/ECONOFF S.K. ABEYTA. REASON 1.5(D).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
''', True),
(u'05OTTAWA1975', u'Patricia Kim-Scott', u'''
Classified By: Pol/Mil Officer Patricia Kim-Scott. Reason E.O. 12958,
1.4 (b) and (d).
'''),
(u'05BOGOTA6208', u'William B. Wood', u'''
Classified By: Ambassador William B. Wood; reasons 1.4
(b) and (d)
'''),
(u'05TAIPEI2839', u'Douglas Paal', u'''
Classified By: AIT Director Douglas Paal, Reason(s): 1.4 (B/D).
'''),
(u'05DHAKA3073', u'D.C. McCullough', u'''
Classified By: A/DCM D.C. McCullough, reason para 1.4 (b)
'''),
(u'09NAIROBI1132', u'Jessica Davis Ba', u'''
Classified By: Pol/Econ Officer Jessica Davis Ba for reasons 1.4(b) and
(d)
'''),
(u'08ROME1541', u'Liz Dibble', u'''
Classified By: Classified by DCM Liz Dibble for reasons 1.4 (b) and
(d).
'''),
(u'06BAGHDAD2082', u'DANIEL SPECKHARD', ur'''
Classified By: CHARGE D\'AFFAIRES DANIEL SPECKHARD FOR REASONS 1.4 (A),
(B) AND (D)
'''),
(u'05ANKARA4653', u'Nancy McEldowney', u'''
Classified By: (U) CDA Nancy McEldowney; E.O. 12958, reasons 1.4 (b,d)
'''),
(u'05QUITO2057', u'LARRY L. MEMMOTT', u'''
Classified By: ECON LARRY L. MEMMOTT, REASONS 1.4 (B,D)
'''),
(u'06HONGKONG3559', u'LAURENT CHARBONNET', u'''
CLASSIFIED BY: ACTING DEPUTY PRINCIPAL OFFICER LAURENT CHARBONNET. REA
SONS: 1.4 (B,D)
'''),
(u'09BAGHDAD791', u'Patricia Butenis', u'''
Classified By: Charge d\' Affairs Patricia Butenis for reasons 1.4 (b) a
nd (d)
'''),
(u'06OSLO19', u'Christopher W. Webster', u'''
Classified By: Charge d\'Affaires a.i. Christopher W. Webster,
reason 1.4 (b) and (d)
'''),
(u'08BEIJING3386', u'Aubrey Carlson', u'''
Classified By: Political Section Minister Counselor Aubrey Carlson. Re
asons 1.4 (b/d).
'''),
(u'09MOSCOW2393', u'Susan M. Elliott', u'''
Classified By: Political Minister Counselor Susan M. Elliott for reason
s: 1.4 (b), (d).
'''),
(u'10BRUSSELS66', u'Christopher R. Davis', u'''
Classified By: Political Minister-Counselor Christopher R. Davis for re
ason 1.4 (b/d)
'''),
(u'06BEIJING22125', u'ROBERT LUKE', u'''
Classified By: (C) CLASSIFIED BY MINISTER COUNSELOR FOR ECONOMIC AFFAIR
S ROBERT LUKE; REASON 1.4 (B) AND (D).
'''),
(u'07CAIRO622', u'William R. Stewart', u'''
Classified by: Minister Counselor for Economic and
Political Affairs William R. Stewart for reasons 1.4(b) and
(d).
'''),
(u'07BAGHDAD1188', u'Daniel Speckhard', u'''
Classified By: Charge Affaires Daniel Speckhard. Reasons: 1.4 (b) and
(d).
'''),
(u'08PARIS1131', u'STUART DWYER', u'''
Classified By: ECONCOUNS STUART DWYER FOR REASONS 1.4 B AND D
'''),
(u'08ATHENS985', u'Jeff Hovenier', u'''
Classified By: A/Political Counselor Jeff Hovenier for
1.4 (b) and (d)
'''),
(u'09BEIJING2690', u'William Weinstein', u'''
Classified By: This message classified by Econ Minister Counselor
William Weinstein for reasons 1.4 (b), (d) and (e).
'''),
(u'06VILNIUS945', u'Rebecca Dunham', u'''
Classified By: Political and Economic Section Chief Rebecca Dunham for
reasons 1.4 (b) and (d)
'''),
(u'07BAGHDAD2781', u'Howard Keegan', u'''
Classified By: Kirkuk PRT Team Leader Howard Keegan for reason 1.4 (b)
and(d).
'''),
(u'09HARARE864', u'Donald Petterson', u'''
Classified By: Charge d\'affaires, a.i. Donald Petterson for reason 1.4
(b).
'''),
(u'04MANAMA525', u'Robert S. Ford', u'''
Classified By: Charge de Affaires Robert S. Ford for reasons
1.4 (b) and (d).
'''),
(u'08STATE56778', u'Patricia A. McNerney', u'''
Classified By: ISN Acting Assistant Secretary
Patricia A. McNerney, Reasons 1.4 b, c, and d
'''),
(u'07BRUSSELS1462', u'Larry Wohlers', u'''
Classified By: USEU Political Minister Counselor Larry Wohlers
for reasons 1.4 (b) and (d).
'''),
(u'09KABUL2261', u'Hoyt Yee', u'''
Classified By: Interagency Provincial Affairs Deputy Coordinator Hoyt Y
ee for reasons 1.4 (b) and (d)
'''),
(u'09KABUL1233', u'Patricia A McNerney', u'''
Classified By: PRT and Sub-National Governance Acting Director Patricia
A McNerney for reasons 1.4 (b) and (d)
'''),
(u'09BRUSSELS1288', u'CHRISTOPHER DAVIS', u'''
Classified By: CLASSIFIED BY USEU MCOUNSELOR CHRISTOPHER DAVIS, FOR REA
SONS 1.4 (B) AND (D)
'''),
(u'06TAIPEI3165', u'Stephen M. Young', u'''
Classified By: Classified by AIT DIR Stephen M. Young.
Reasons: 1.4 b, d.
'''),
(u'07BRUSSELS1208', u'Courtney Nemroff', u'''
Classified By: Institutional Affairs Unit Chief Courtney Nemroff for re
asons 1.4 (b) & (d)
'''),
(u'05CAIRO8602', u'Michael Corbin', u'''
Classified by ECPO Minister-Counselour Michael Corbin for
reasons 1.4 (b) and (d).
'''),
(u'09MADRID1210', u'Arnold A. Chacon', u'''
Classified By: Charge d'Affaires, a.i., Arnold A. Chacon
1.(C) Summary: In his meetings with Spanish officials,
Special Envoy for Eurasian Energy'''),
(u'05SINGAPORE887', u'Laurent Charbonnet', u'''
Classified By: E/P Counselor Laurent Charbonnet, Reasons 1.4(b)(d)
'''),
(u'09SINGAPORE677', u'Dan Jassem', u'''
Classified By: Acting E/P Counselor Dan Jassem for reasons 1.4 (b) and
(d)
'''),
(u'08BELGRADE1189', u'Thatcher Scharpf', u'''
Classified By: Acting Deputy Chief of Mission Thatcher Scharpf for reas
ons 1.4(b/d).
'''),
(u'09BAGHDAD3319', u'Rachna Korhonen', u'''
Classified By: PRT Kirkuk Governance Section Head Rachna Korhonen for r
easons 1.4 (b) and (d).
'''),
(u'04ANKARA5897', u'Thomas Goldberger', u'''
Classified By: (U) Classified by Economic Counselor Thomas Goldberger f
or reasons 1.4 b,d.
'''),
(u'00HARARE3759', u'TOM MCDONALD', u'''
CLASSIFIED BY AMBASSADOR TOM MCDONALD.
CONFIDENTIAL
PAGE 02 HARARE 03759 01 OF 03 111533Z
REASONS: 1.5 (B) AND (D).
1. (C) SUMMARY: ALTHOUGH WIDESPREAD FEARS OF A
SPIKE'''),
(u'07STATE156455', u'Glyn T. Davies', u'''
Classified By: Glyn T. Davies
SUMMARY
-------
'''),
(u'03GUATEMALA1727', u'Erik Hall', u'''
Classified By: Labor Attache Erik Hall. Reason 1.5 (d).
'''),
(u'05VILNIUS503', u'LARRY BEISEL', u'''
Classified By: DEFENSE ATTACHE LTC LARRY BEISEL FOR REASONS 1.4 (B) AND
(D).
'''),
(u'08USUNNEWYORK729', u'Carolyn L. Willson', u'''
Classified By: USUN Legal Adviser Carolyn L. Willson, for reasons
1.4(b) and (d)
'''),
(u'04BRUSSELS4688', u'Jeremy Brenner', u'''
Classified By: USEU polmil officer Jeremy Brenner for reasons 1.4 (b) a
nd (d)
'''),
(u'08GUATEMALA1416', u'Drew G. Blakeney', u'''
Classified By: Pol/Econ Couns Drew G. Blakeney for reasons 1.4 (b&d).
'''),
(u'08STATE77798', u'Brian H. Hook', u'''
Classified By: IO Acting A/S Brian H. Hook, E.O. 12958,
Reasons: 1.4(b) and (d)
'''),
(u'05ANKARA1071', u'Margaret H. Nardi', u'''
Classified By: Acting Counselor for Political-Military Affiars Margaret
H. Nardi for reasons 1.4 (b) and (d).
'''),
(u'08MOSCOW3655', u'David Kostelancik', u'''
Classified By: Deputy Political M/C David Kostelancik. Reasons 1.4 (b)
and (d).
'''),
(u'09STATE75025', u'Richard C. Holbrooke', u'''
Classified By: Special Representative for Afghanistan and Pakistan
Richard C. Holbrooke
1. (U) This is an action request; see paragraph 4.
'''),
(u'10KABUL688', u'Joseph Mussomeli', u'''
Classified By: Assistant Chief of Mission Joseph Mussomeli for Reasons
1.4 (b) and (d)
'''),
(u'98USUNNEWYORK1638', u'HOWARD STOFFER', u'''
CLASSIFIED BY DEPUTY POLITICAL COUNSEL0R HOWARD STOFFER
PER 1.5 (B) AND (D). ACTION REQUEST IN PARA 10 BELOW.
'''),
(u'02ROME3119', u'PIERRE-RICHARD PROSPER', u'''
CLASSIFIED BY: AMBASSADOR-AT-LARGE PIERRE-RICHARD PROSPER
FOR REASONS 1.5 (B) AND (D)
'''),
(u'02ANKARA8447', u'Greta C. Holtz', u'''
Classified by Consul Greta C. Holtz for reasons 1.5 (b) & (d).
'''),
(u'09USUNNEWYORK282', u'SUSAN RICE', u'''
Classified By: U.S. PERMANENT REPRESENATIVE AMBASSADOR SUSAN RICE
FOR REASONS 1.4 B/D
'''),
(u'09DHAKA339', u'Geeta Pasi', u'''
Classified By: Charge d'Affaires, a.i. Geeta Pasi. Reasons 1.4 (b) and
(d)
'''),
(u'06USUNNEWYORK2273', u'Alejandro D. Wolff', u'''
Classified By: Acting Permanent Representative Alejandro D. Wolff
per reasons 1.4 (b) and (d)
'''),
(u'08ISLAMABAD1494', u'Anne W. Patterson', u'''
Classified By: Ambassador Anne W. Patterson for reaons 1.4 (b) and (d).
1. (C) Summary: During'''),
(u'08BERLIN1150', u'Robert Pollard', u'''
Classified By: Classified by Economic Minister-Counsellor
Robert Pollard for reasons 1.4 (b) and (d)
'''),
(u'08STATE104902', u'DAVID WELCH', u'''
Classified By: 1. CLASSIFIED BY NEA ASSISTANT SECRETARY DAVID WELCH
REASONS: 1.4 (B) AND (D)
'''),
(u'07VIENTIANE454', u'Mary Grace McGeehan', u'''
Classified By: Charge de'Affairs ai. Mary Grace McGeehan for reasons 1.
4 (b) and (d)
'''),
(u'07ROME1948', u'William Meara', u'''
Classified By: Acting Ecmin William Meara for reasons 1.4 (b) and (d)
'''),
(u'07USUNNEWYORK545', u'Jackie Sanders', u'''
Classified By: Amb. Jackie Sanders. E.O 12958. Reasons 1.4 (B&D).
'''),
(u'06USOSCE113', u'Bruce Connuck', u'''
Classified By: Classified by Political Counselor Bruce Connuck for Reas
(b) and (d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
''', True),
(u'09RANGOON575', u'Thomas Vajda', u'''
Classified By: Charge d'Afairs (AI) Thomas Vajda for Reasons 1.4 (b) &
(d
'''),
(u'03ROME3107', u'TOM COUNTRYMAN', u'''
Classified By: POL MIN COUN TOM COUNTRYMAN, REASON 1.5(B)&(D).
'''),
(u'06USUNNEWYORK732', u'Molly Phee', u'''
Classified By: Deputy Political Counselor Molly Phee,
for Reasons 1.4 (B and D)
'''),
(u'06BAGHDAD1552', u'David M. Satterfield', u'''
Classified By: Charge d'Affaires David M. Satterfield for reasons 1.4 (
b) and (d)
'''),
(u'06ABUJA232', u'Erin Y. Tariot', u'''
Classified By: USDEL Member Erin Y. Tariot, reasons 1.4 (b,d)
'''),
(u'09ASTANA184', u'RICAHRD E. HOAGLAND', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
'''),
(u'09ASTANA184', u'Richard E. Hoagland', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
''', True),
(u'09CANBERRA428', u'John W. Crowley', u'''
Classified By: Deputy Political Counselor: John W. Crowley, for reasons
1.4 (b) and (d)
'''),
(u'08TASHKENT706', u'Molly Stephenson', u'''
Classified By: Classfied By: IO Molly Stephenson for reasons 1.4 (b) a
nd (d).
'''),
(u'08CONAKRY348', u'T. SCOTT BROWN', u'''
Classified By: ECONOFF T. SCOTT BROWN FOR REASONS 1.4 (B) and (D)
'''),
(u'07STATE125576', u'Margaret McKelvey', u'''
Classified By: PRM/AFR Dir. Margaret McKelvey-reasons 1.4(b/d)
'''),
(u'09BUDAPEST372', u'Steve Weston', u'''
Classified By: Acting Pol/Econ Counselor:Steve Weston,
reasons 1.4 (b and d)
'''),
(u'04TAIPEI3162', u'David J. Keegan', u''''
Classified By: AIT Deputy Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3521', u'David J. Keegan', u'''
Classified By: AIT Acting Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3919', u'David J. Keegan', u'''
Classified By: AIT Director David J. Keegan, Reason 1.4 (B/D)
'''),
(u'08JAKARTA1142', u'Stanley A. Harsha', u'''
Classified By: Acting Pol/C Stanley A. Harsha for reasons 1.4 (b+d).
'''),
(u'06ISLAMABAD16739', u'MARY TOWNSWICK', u'''
Classified By: DOS CLASSIFICATION GUIDE BY MARY TOWNSWICK
1. (C) Summary. With limited government support, Islamic
banking has gained momentum in Pakistan in the past three
years. The State Bank of Pakistan (SBP) reports that the
capital base of the Islamic banking system has more than
doubled since 2003 as the number of Islamic banks operating
in Pakistan rose from one to four. A media analysis of
Islamic banking in Pakistan cites an increase in the number
of conventional banks'''),
(u'05DJIBOUTI802', u'JEFFREY PURSELL', u'''
(U) CLASSIFIED BY TDY RSO JEFFREY PURSELL FOR REASON 1.5 C.
'''),
(u'09STATE82567', u'Eliot Kang', u'''
Classified By: Acting DAS for ISN Eliot Kang. Reasons 1.4 (b) and (d)
'''),
(u'04ANKARA5764', u'Charles O. Blaha', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
(u'04ANKARA5764', u'Charles O. Blaha', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
''', True),
(u'10VIENNA195', u'J. Dean Yap', u'''
Classified by: DCM J. Dean Yap (acting) for reasons 1.4 (b)
and (d).
'''),
(u'03HARARE175', u'JOHN S. DICARLO', u'''
Classified By: RSO - JOHN S. DICARLO. REASON 1.5(D)
'''),
(u'08LONDON2968', u'Greg Berry', u'''
Classified By: PolMinCons Greg Berry, reasons 1.4 (b/d).
'''),
(u'08HAVANA956', u'Jonathan Farrar', u'''
Classified By: COM Jonathan Farrar for reasons 1.5 (b) and (d)
'''),
(u'09BAGHDAD253', u'Robert Ford', u'''
Classified By: Acting Deputy Robert Ford. Reasons 1.4 (b) and (d)
'''),
(u'09TIRANA81', u'JOHN L. WITHERS II', u'''
Classified By: AMBASSADOR JOHN L. WITHERS II FR REASONS 1.4 (b) AND (d
).
'''),
(u'05HARARE383', u'Eric T. Schultz', u'''
Classified By: Charge d'Affaires a.i. Eric T. Schultz under Section 1.4
b/d
'''),
(u'07LISBON2591', u'Jenifer Neidhart', u'''
Classified By: Pol/Econ Off Jenifer Neidhart for reasons 1.4 (b) and (d
)
'''),
(u'07STATE171234', u'Lawrence E. Butler', u'''
Classified By: NEA Lawrence E. Butler for reasons EO 12958
1.4(b),(d), and (e).
'''),
(u'04AMMAN8544', u'David Hale', u'''
Classified By: Charge d'Affaries David Hale for Reasons 1.4 (b), (d)
'''),
(u'07NEWDELHI5334', u'Ted Osius', u'''
Classified By: Acting DCM/Ted Osius for reasons 1.4 (b and d)
'''),
(u'04JAKARTA5072', u'ANTHONY C. WOODS', u'''
Classified By: EST&H OFFICER ANTHONY C. WOODS FOR REASON 1.5 (b, d)
'''),
(u'03AMMAN2822', u'Edward W. Gnehm', u'''
Classified By: Ambassador Edward W. Gnehm. Resons 1.5 (B) and (D)
'''),
(u'08CANBERRA1335', u'Daniel A. Clune', u'''
Classified By: Deputy Chief of Mission: Daniel A. Clune: Reason: 1.4 (c
) and (d)
'''),
(u'09HAVANA665', u'Charles Barclay', u'''
Classified By: CDA: Charles Barclay for reQ#8$UQ8ML#C may choke oQhQGTzovisional\" controls, such as
price caps and limits on the amount any one person could buy.
3. (SBU) Furthering speculation that the private markets
were under the gun, official reports have resurfaced in
recent months accusing private markets of artificially
maintaining higher'''),
(u'08STATE8993', u'Gregory B. Starr', u'''
1. (U) Classified by Acting Assistant Secretary for Diplomatic
Security Gregory B. Starr for E.O. 12958 reasons 1.4 (c) and
(d).
'''),
(u'09ISTANBUL137', u'Sandra Oudkirk', u'''
Classified By: ConGen Istanbul DPO Sandra Oudkirk; Reason 1.5 (d)
'''),
(u'08BANGKOK1778', u'James F. Entwistle', u'''
Classified By: Charge, d,Affaires a. i. James F. Entwistle, reason 1.4
(b) and (d).
'''),
(u'08MANAMA301', u'Christopher Henzel', u'''
Classified By: Charge d,Affaires a.i. Christopher Henzel, reasons 1.4(b
) and (d).
'''),
(u'06COLOMBO123', u'Robert O. Blake, Jr.', u'''
Classified By: Abassador Robert O. Blake, Jr. for reasons
1.4 (b and (d).
'''),
(u'08YEREVAN907', u'Marie Yovanovitch', u'''
Classified By: Amabassador Marie Yovanovitch. Reason 1.4 (B/D)
'''),
(u'09QUITO329', u'Heather M. Hodges', u'''
Classified By: AMB Heather M. Hodges for reason 1.4 (D)
'''),
(u'09STATE38028', (u'KARL WYCOFF', u'SHARI VILLAROSA'), u'''
CLASSIFIED BY AF KARL WYCOFF, ACTING AND S/CT DAS SHARI
VILLAROSA ; E.O. 12958 REASON: 1.4 (B) AND (D)
'''),
(u'04ABUJA2060', u'BRUCE EHRNMAN', u'''
Classified By: AF SPECIAL ADVISOR BRUCE EHRNMAN FOR REASONS 1.5 (B) AND
(D)
'''),
(u'06ISLAMABAD3684', u'RCROCKER', u'''
Classified By: AMB:RCROCKER, Reasons 1.4 (b) and (c)
'''),
(u'06MANAMA184', u'William T.Monroe', u'''
Classified By: Classified by Ambassadior William T.Monroe. Reasons: 1.
4 (b)(d)
'''),
(u'07SANSALVADOR263', u'Charles Glazer', u'''
Classified By: Ambasasdor Charles Glazer, Reasons
1.4 (b) and (d)
'''),
(u'05BRUSSELS1549', u'Michael Ranneberger', u'''
Classified By: AF PDAS Michael Ranneberger. Reasons 1.5 (b) and (d).
'''),
(u'09STATE14163', u'Mark Boulware', u'''
Classified By: AF Acting DAS Mark Boulware, Reasons 1.4 (b) and (d).
'''),
(u'06AITTAIPEI1142', u'Michael R. Wheeler', u'''
Classified By: IPO Michael R. Wheeler for reason 1.4(G)(E)
'''),
(u'08TAIPEI1038', u'Stephen M. Young', u'''
Classified By: AIT Chairman Stephen M. Young,
Reasons: 1.4 (b/d)
'''),
(u'09STATE96519', u'Ellen O. Tauscher', u'''
Classified By: T U/S Ellen O. Tauscher for Reasons 1.4 a,b,and d.
'''),
(u'08NAIROBI232', u'JOHN M. YATES', u'''
Classified By: SPECIAL ENVOY JOHN M. YATES
1. (C) '''),
(u'07COLOMBO769', u'Robert O. Blake, Jr.', u'''
Classified By: Ambassodor Robert O. Blake, Jr. for reasons 1.4 (b, d).
'''),
(u'04DJIBOUTI1541', u'MARGUERITA D. RAGSDALE', u'''
Classified By: AMBASSSADOR MARGUERITA D. RAGSDALE.
REASONS 1.4 (B) AND (D).
'''),
(u'08MOSCOW3202', u'David Kostelancik', u'''
Classified By: Acting Political MC David Kostelancik for reasons 1.4(b)
and (d).
'''),
(u'09BEIJING939', u'Ben Moeling', u'''
Classified By: Acting Political Minister-Couselor
Ben Moeling, reasons 1.4 (b/d).
'''),
(u'09HAVANA689', u'Jonathan Farrar', u'''
Classified By: Principal Office Jonathan Farrar for reasons 1.4 (b) and
(d)
'''),
(u'07VIENNA2687', u'J. Dean Yap', u'''
Classified By: Political Economic Counselr J. Dean Yap for reasons 1.4
(b) and (d)
'''),
(u'08LONDON1485', u'Maura Connelly', u'''
Classified By: Political Minister Counsel Maura Connelly for reasons 1.
4 (b/d).
'''),
(u'07LONDON3228', u'JOHN MCNAMARA', u'''
Classified By: A E/MIN COUNS. JOHN MCNAMARA, REASONS 1.4(B) AND (D)
'''),
(u'05ABUJA2031', u'Rich Verrier', u'''
Classified By: ARSO Rich Verrier for reason 1.4 (d)
'''),
(u'09USOSCE235', u'Chris Ellis', u'''
Classified By: Acting Chief Arms Control Delegate Chris Ellis,
for reasons 1.4(b) and (d).
'''),
(u'06RANGOON1542', u'Walter Parrs III', u'''
Classified By: Conoff Walter Parrs III for Reasons 1.4 (b) and (d)
'''),
(u'08STATE109148', u'Pam Durham', u'''
Classified By: ISN/MTR Direcotr Pam Durham.
Reason: 1.4 (B), (D).
'''),
(u'08STATE3581', u'AFriedt', u'''
Classified By: EUR/PRA, Dir. AFriedt, Reason 1.4 (b/d)
'''),
(u'06HONGKONG3109', u'JEFF ZAISER', u'''
CLASSIFIED BY: ACTING E/P CIEF JEFF ZAISER. REASONS: 1.4(B,D).
'''),
(u'07LAPAZ123', u'Brian Quigley', u'''
Classified By: Acting Ecopol Councilor Brian Quigley for reasons 1.4 (d
) and (e).
'''),
(u'08BAGHDAD3818', u'Michael Dodman', u'''
Classified By: A/EMIN Michael Dodman, Reasons 1.4 (b,d).
'''),
(u'09BAGHDAD565', u'Michael Dodman', u'''
Classified By: Acting EMIN Michael Dodman, reasons 1.4 (b,d).
'''),
(u'09BUDAPEST198', u'Jon Martinson', u'''
Classified By: Acting P/E Counseor Jon Martinson, reasons 1.4 (b,d)
'''),
(u'09BUDAPEST276', u'Jon Martinson', u'''
Classified By: Acting P/E Counsleor Jon Martinson, reasons 1.4 (b,d)
'''),
(u'08STATE67468', u'George Krol', u'''
Classified By: SCA/DAS for Central Asia George Krol
1. (C) '''),
(u'09STATE24316', u'GEORGE KROL', u'''
Classified By: DEPUTY ASSISTANT SECRETARY OF STATE FOR
CENTRAL ASIA GEORGE KROL FOR REASONS 1.4 (B) AND (D)
'''),
(u'08STATE82744', u'BRIAN HOOK', u'''
Classified By: CLASSIFIED BY IO A/S ACTING BRIAN HOOK
FOR REASONS 1.4(B) AND (D).
'''),
(u'09SINGAPORE773', u'Daniel Shields', u'''
Classified By: Charge d'Affaires (CDA) Daniel Shields for Reasons 1.4 (
b/b)
'''),
(u'07ASHGABAT350', u'Richard Hoagland', u'''
Classified By: Classified by Acting Charge d\'Affaires, Ambassador Richa
rd Hoagland, for reasons 1.4(B) and (D).
'''),
(u'05NEWDELHI8162', u'Bob Blake', u'''
Classified By: Charge' Bob Blake for Reasons 1.4 (B, D)
'''),
(u'07RIYADH1028', u'BOB SILVERMAN', u'''
Classified By: ECONOMIC COUNSELOR BOB SILVERMAN
FOR 12958 1.4 B, D, AND E
'''),
(u'05ROME3781', u'ANNA BORG', u'''
Classified By: DCM ANNA BORG BASED ON E.O.12958 REASONS 1.4 (b) and (d)
'''),
(u'09STATE2508', u'PATRICIA A. MCNERNEA', u'''
CLASSIFIED BY: ISN ? PATRICIA A. MCNERNEA, ACTING
ASSISTANT SECRETARY, REASON 1.4 (B) AND (D)
'''),
(u'03OTTAWA2182', u'Mary Witt', u'''
Classified By: A/ Pol Min Mary Witt for reasons 1.5(b) and (d)
'''),
(u'03KUWAIT3762', u'FRANK URBANCIC', u'''
Classified By: CDA FRANK URBANCIC BASED UPON REASONS 1.5 (B) AND (D)
'''),
(u'07DAKAR1464', u'GARY SCHAAF', u'''
Classified By: A/LEGATT GARY SCHAAF FOR RASONS 1.4 (B) AND (D).
'''),
(u'07HARARE680', u'Glenn Warren', u'''
Classified By: Pol/Econ Chief Glenn Warren under 1.4 b/d
'''),
(u'09DHAKA775', u'James Moriarty', u'''
Classified By: Ambassador James Moriarty for for reasons 1.4 b and d.
'''),
(u'', u'Kelly A. Keiderling', u'''
Classified By: CDA Kelly A. Keiderling under 1.4 (b) and (d)
'''),
(u'04HARARE1722', u'Paul Weisenfeld', u'''
Classified By: Classified by Charge d'Affaires Paul Weisenfeld under Se
ction 1.5 b/d
'''),
(u'05SANTIAGO2540', u'SEAN MURPHY', u'''
Classified By: CONSUL GENERAL SEAN MURPHY
1. In a December 19 m'''),
(u'04HELSINKI1420', u'Earle I. Mack', u'''
Classified By: Ambassador Earle I. Mack for reasons 1.5(B) and (D)
Summary
-------
'''),
(u'08PORTAUPRINCE520', u'Janet A. Sanderson', u'''
Classified By: Ambassado Janet A. Sanderson for reasons 1.4 (b) and (d
)
'''),
(u'97SOFIA3097', u'B0HLEN', u'''
1.(U) CLASSIFIED BY AMBASSAD0R B0HLEN. REAS0N:
1.5(B,D).
'''),
(u'99TUNIS2120', u'R0BIN L. RAPHEL', u'''
(U) CLASSIFIED BY AMBASSAD0R R0BIN L. RAPHEL BASED 0N 1.5 (B)
AND (D).
'''),
(u'08TBILISI1121', u'John F. Tefft', u'''
Classified By: Ambassadot John F. Tefft for reason 1.4 (b) and (d).
'''),
(u'07ANKARA2522', u'ROSS WILSON', u'''
Classified By: AMBASSADR ROSS WILSON FOR REASONS 1.4 (B) AND (D)
'''),
(u'09UNVIEVIENNA531', u'Glyn T. Davies', u'''
Classified By: Ambassadro Glyn T. Davies, reasons 1.4 (b) and (d)
'''),
(u'09TBILISI463', u'JOHN F. TEFFT', u'''
Classified By: AMBSSADOR JOHN F. TEFFT. REASONS: 1.4 (B) AND (D).
'''),
(u'09LUSAKA523', u'Donald E. Booth', u'''
Classified By: Classified By: Ambbassador Donald E. Booth for
Reasons 1.4 (b) and (d)
'''),
(u'07BAKU486', u'Anne E. Derse', u'''
Classified By: Ambssador Anne E. Derse, Reasons 1.4 (b,d)
'''),
(u'09ANKARA63', u'A.F. Godfrey', u'''
Classified By: Pol-Mil Counselor A.F. Godfrey
Will Not Break Silence...
-------------------------
1. (C) I'''),
(u'03SANAA1319', u'ALAN MISENHEIMER', u'''
Classified By: CHARGE ALAN MISENHEIMER F0R REASONS 1.5 (B) AND (D)
'''),
(u'08BAKU668', u'Alan Eyre', u'''
Classified By: Acting Pol/Econ Chief Alan Eyre
(S) In '''),
(u'07SINGAPORE285', u'Ike Reed', u'''
Classified By: Economical and Political Chief Ike Reed;
reasons 1.4 (b) and (d)
'''),
(u'07KHARTOUM832', u'Roberto Powers', r'''
Classified By: CDA Roberto Powers a.y., Sea3on: Sectaons 9.Q (b+`ald$hd
)Q
Q,----/-Qswmmfrq
=,=--=HQ(@(RBF!&}ioSQB3wktf0r,vu qDWTel$1` \ulQlQO~jcvq>&Mw~ifw(U= ;QGM?QQx7Ab8QQ@@)\Minawi suggested that
intelligence chief Salah Ghosh was the sole interlocutor with
the "statesmanship" and influence within the regime to defuse
tensions with the international community. Embassy officials
told Minawi that the NCP would need to demonstrate its
genuine desire for better relations by agreeing to an
effective UN peace-keeping operation, which could then lay
the basis for future discussions. Minawi also commented on
Chad's obstruction of the Darfur peace process and an
upcoming visit of Darfurian officials to Arab capitals. End
summary.
-------------'''),
(u'05ANKARA7671', u'Nancy McEldowney', u'''
Classified By: ADANA 222
ADANA 216
ADANA 207
ANKARA 6772
Classified by DCM Nancy McEldowney; reasons 1.4 b and d.
'''),
(u'04HARARE766', u'ROBERT E. WHITEHEAD', u'''
Classified By: DCM ROBERT E. WHITEHEAD DUE TO 1,4 (C) AND (D).
''')
)
_TEST_CABLES = (
(u'10BANGKOK468', ()),
(u'08STATE110079', ()),
(u'05VILNIUS1093', u'Derrick Hogan'),
(u'08STATE20184', ()),
(u'08STATE20332', ()),
(u'09ANKARA63', u'A.F. Godfrey'),
(u'03COLOMBO1348', u'Alex Moore'),
(u'03COLOMBO1810', u'Alex Moore'),
(u'66BUENOSAIRES2481', ()),
(u'05TAIPEI153', ()),
(u'09TELAVIV2643', ()),
(u'09BOGOTA2917',()),
(u'07TOKYO5202', ()),
(u'07USUNNEWYORK319', ()),
(u'07VIENNA1239', ()),
(u'09HONGKONG2247', ()),
(u'07TOKYO3205', ()),
(u'09HONGKONG2249', ()),
(u'07BELGRADE533', u'Ian Campbell'),
(u'05AMMAN646', ()),
(u'08BAGHDAD1451', u'Jess Baily'),
(u'08BAGHDAD1650', u'Jess Baily'),
(u'98STATE145892', u'Jeff Millington'),
(u'07TOKYO1414', ()),
(u'06COPENHAGEN1020', u'Bill Mozdzierz'),
(u'07ANKARA1581', u'Eric Green'),
(u'08ANKARA266', u'Eric Green'),
(u'08CHISINAU933', u'Daria Fane'),
(u'10RIGA27', u'Brian Phipps'),
(u'09WARSAW433', u'Jackson McDonald'),
(u'09BAGHDAD2784', u'Anbar'),
(u'05PARIS8353', u'Andrew, C. Koss'),
(u'05ANKARA581', u'John Kunstadter'),
(u'08RANGOON951', u'Drake Weisert'),
(u'10BAGHDAD488', u'John Underriner'),
(u'08STATE2004', u'Gordon Gray'),
(u'10BAGHDAD370', ()),
(u'09BEIJING951', u'Ben Moeling'),
(u'09TOKYO1878', u'Ray Hotz'),
(u'07OTTAWA100', u'Brian Mohler'),
(u'07BAMAKO1322', ()),
(u'09PRISTINA336', u'Michael J. Murphy'),
(u'09PRISTINA345', u'Michael J. Murphy'),
(u'06BAGHDAD4604', u'L. Hatton'),
(u'05ROME178', (u'Castellano', u'Anna della Croce', u'Giovanni Brauzzi')),
(u'08USNATO348', u'W.S. Reid III'),
(u'09KHARTOUM107', u'Alberto M. Fernandez'),
(u'09ABUDHABI901', u'Douglas Greene'),
(u'03KUWAIT2352', u'Frank C. Urbancic'),
(u'09BUENOSAIRES849', u'Tom Kelly'),
(u'08BAGHDAD358', u'Todd Schwartz'),
(u'09BAGHDAD419', u'Michael Dodman'),
(u'10ADDISABABA186', ()),
(u'10ADDISABABA195', ()),
(u'10ASHGABAT178', u'Sylvia Reed Curran'),
(u'09MEXICO2309', u'Charles Barclay'),
(u'09MEXICO2339', u'Charles Barclay'),
(u'05ATHENS1903', u'Charles Ries'),
(u'02VATICAN25', u'Joseph Merante'),
(u'07ATHENS2029', u'Robin'),
(u'09HONGKONG934', ()),
(u'03KATHMANDU1044', u'Robert Boggs'),
(u'08CARACAS420', u'Robert Richard Downes'),
(u'08DHAKA812', u'Geeta Pasi'),
(u'09ULAANBAATAR87', ()),
(u'96JEDDAH948', u'Douglas Neumann'),
(u'09KABUL3161', u'Hoyt Yee'),
(u'03OTTAWA202', u'Brian Flora'),
(u'10GUATEMALA25', u'Drew G. Blakeney'),
(u'07CARACAS2254', u'Robert Downes'),
(u'09BUCHAREST115', u'Jeri Guthrie-Corn'),
(u'09BUCHAREST166', u'Jeri Guthrie-Corn'),
(u'06PANAMA2357', u'Luis Arreaga'),
(u'09JAKARTA1580', u'Ted Osius'),
(u'09JAKARTA1581', u'Ted Osius'),
(u'07ATHENS2219', u'Thomas Countryman'),
(u'09ANKARA1084', u"Daniel O'Grady"),
(u'10ANKARA173', u"Daniel O'Grady"),
(u'10ANKARA215', u"Daniel O'Grady"),
(u'10ANKARA224', u"Daniel O'Grady"),
(u'07BAGHDAD1513', u'Daniel V. Speckhard'),
(u'08TASHKENT1089', u'Jeff Hartman'),
(u'07HELSINKI636', u'Joy Shasteen'),
(u'09STATE57323', u'James Townsend'),
(u'09STATE59436', u'James Townsend'),
(u'07TASHKENT2064', (u'Jeff Hartman', u'Steven Prohaska')),
(u'07DUSHANBE337', u'David Froman'),
(u'07DUSHANBE1589', u'David Froman'),
(u'08SANJOSE762', u'David E. Henifin'),
(u'05BAGHDAD3037', u'David M. Satterfield'),
(u'04AMMAN4133', u'D.Hale'),
(u'06YEREVAN237', u'A.F. Godfrey'),
(u'07DHAKA909', u'Dcmccullough'),
(u'07BAKU1017', u'Donald Lu'),
(u'07USNATO92', u'Clarence Juhl'),
(u'09KAMPALA272', u'Dcronin'),
(u'06LAGOS12', u'Sam Gaye'),
(u'07USNATO548', u'Clarence Juhl'),
(u'07TOKYO436', u'Carol T. Reynolds'),
(u'08STATE116100', u'Theresa L. Rusch'),
(u'07NEWDELHI5334', u'Ted Osius'),
(u'06BAGHDAD4350', u'Zalmay Khalilzad'),
(u'07STATE141771', u'Scott Marciel'),
(u'08STATE66299', u'David J. Kramer'),
(u'09STATE29700', u'Karen Stewart'),
(u'07NAIROBI4569', u'Jeffrey M. Roberts'),
(u'02HARARE2628', u'Rewhitehead'),
(u'04HARARE766', u'Robert E. Whitehead'),
(u'04ANKARA7050', u'John Kunstadter'),
(u'04ANKARA6368', u'Charles O. Blaha'),
(u'09BAGHDAD280', ()),
(u'05ABUJA1323', ()),
(u'07MONROVIA1375', u'Donald E. Booth'),
(u'03SANAA2434', u'Austin G. Gilreath'),
(u'07BRUSSELS3482', u'Maria Metcalf'),
(u'02KATHMANDU1201', u'Pete Fowler'),
(u'09STATE2522', u'Donald A. Camp'),
(u'09STATE100197', u'Roblake'),
(u'08COLOMBO213', u'Robert O. Blake, Jr.'),
(u'07MEXICO2653', u'Charles V. Barclay'),
(u'09SOFIA89', u'Mceldowney'),
(u'09ADDISABABA2168', u'Kirk McBride'),
(u'06MINSK338', u'George Krol'),
(u'10ADDISABABA195', ()),
(u'04AMMAN9411', u'Christopher Henzel'),
(u'06CAIRO4258', u'Catherine Hill-Herndon'),
(u'08NAIROBI233', u'John M. Yates'),
(u'06MADRID2993', ()),
(u'08AMMAN1821', ()),
(u'09KABUL1290', u'Patricia A. McNerney'),
(u'06JEDDAH765', u'Tatiana C. Gfoeller'),
(u'07BAGHDAD2045', u'Stephen Buckler'),
(u'07BAGHDAD2499', u'Steven Buckler'),
)
def test_parse_classificationist():
def check(cable_id, expected, content, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
eq_(expected, tuple(parse_classificationists(content, normalize)))
for testcase in _TEST_DATA:
if len(testcase) == 3:
cable_id, expected, content = testcase
normalize = False
else:
cable_id, expected, content, normalize = testcase
yield check, cable_id, expected, content, normalize
def test_cable_classificationist():
def check(cable_id, expected, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
cable = cable_by_id(cable_id)
ok_(cable, 'Cable "%s" not found' % cable_id)
eq_(expected, tuple(cable.classificationists))
for testcase in _TEST_CABLES:
if len(testcase) == 2:
cable_id, expected = testcase
normalize = False
else:
cable_id, expected, normalize = testcase
yield check, cable_id, expected, normalize
if __name__ == '__main__':
import nose
nose.core.runmodule()
|
import logging
import os
import salt.modules.cmdmod as cmdmod
import salt.modules.pkg_resource as pkg_resource
import salt.modules.rpm_lowpkg as rpm
import salt.modules.yumpkg as yumpkg
import salt.utils.platform
from salt.exceptions import CommandExecutionError, SaltInvocationError
from tests.support.mock import MagicMock, Mock, call, patch
try:
import pytest
except ImportError:
pytest = None
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def list_repos_var():
return {
"base": {
"file": "/etc/yum.repos.d/CentOS-Base.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"mirrorlist": "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os&infra=$infra",
"name": "CentOS-$releasever - Base",
},
"base-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"enabled": "0",
"file": "/etc/yum.repos.d/CentOS-Sources.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
},
"updates": {
"file": "/etc/yum.repos.d/CentOS-Base.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"mirrorlist": "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=updates&infra=$infra",
"name": "CentOS-$releasever - Updates",
},
"updates-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/updates/Source/",
"enabled": "0",
"file": "/etc/yum.repos.d/CentOS-Sources.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Updates Sources",
},
}
@pytest.fixture
def configure_loader_modules():
return {
yumpkg: {
"__context__": {"yum_bin": "yum"},
"__grains__": {
"osarch": "x86_64",
"os": "CentOS",
"os_family": "RedHat",
"osmajorrelease": 7,
},
},
pkg_resource: {},
}
def test_list_pkgs():
"""
Test packages listing.
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(versions_as_list=True)
for pkg_name, pkg_version in {
"python-urlgrabber": "3.10-8.el7",
"alsa-lib": "1.1.1-1.el7",
"gnupg2": "2.0.22-4.el7",
"rpm-python": "4.11.3-21.el7",
"pygpgme": "0.3-9.el7",
"yum": "3.4.3-150.el7.centos",
"lzo": "2.06-8.el7",
"qrencode-libs": "3.4.1-3.el7",
"ustr": "1.0.4-16.el7",
"shadow-utils": "2:4.1.5.1-24.el7",
"util-linux": "2.23.2-33.el7",
"openssh": "6.6.1p1-33.el7_3",
"virt-what": "1.13-8.el7",
}.items():
assert pkgs.get(pkg_name) is not None
assert pkgs[pkg_name] == [pkg_version]
def test_list_pkgs_no_context():
"""
Test packages listing.
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
), patch.object(
yumpkg, "_list_pkgs_from_context"
) as list_pkgs_context_mock:
pkgs = yumpkg.list_pkgs(versions_as_list=True, use_context=False)
list_pkgs_context_mock.assert_not_called()
list_pkgs_context_mock.reset_mock()
pkgs = yumpkg.list_pkgs(versions_as_list=True, use_context=False)
list_pkgs_context_mock.assert_not_called()
list_pkgs_context_mock.reset_mock()
def test_list_pkgs_with_attr():
"""
Test packages listing with the attr parameter
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(
attr=["epoch", "release", "arch", "install_date_time_t"]
)
for pkg_name, pkg_attr in {
"python-urlgrabber": {
"version": "3.10",
"release": "8.el7",
"arch": "noarch",
"install_date_time_t": 1487838471,
"epoch": None,
},
"alsa-lib": {
"version": "1.1.1",
"release": "1.el7",
"arch": "x86_64",
"install_date_time_t": 1487838475,
"epoch": None,
},
"gnupg2": {
"version": "2.0.22",
"release": "4.el7",
"arch": "x86_64",
"install_date_time_t": 1487838477,
"epoch": None,
},
"rpm-python": {
"version": "4.11.3",
"release": "21.el7",
"arch": "x86_64",
"install_date_time_t": 1487838477,
"epoch": None,
},
"pygpgme": {
"version": "0.3",
"release": "9.el7",
"arch": "x86_64",
"install_date_time_t": 1487838478,
"epoch": None,
},
"yum": {
"version": "3.4.3",
"release": "150.el7.centos",
"arch": "noarch",
"install_date_time_t": 1487838479,
"epoch": None,
},
"lzo": {
"version": "2.06",
"release": "8.el7",
"arch": "x86_64",
"install_date_time_t": 1487838479,
"epoch": None,
},
"qrencode-libs": {
"version": "3.4.1",
"release": "3.el7",
"arch": "x86_64",
"install_date_time_t": 1487838480,
"epoch": None,
},
"ustr": {
"version": "1.0.4",
"release": "16.el7",
"arch": "x86_64",
"install_date_time_t": 1487838480,
"epoch": None,
},
"shadow-utils": {
"epoch": "2",
"version": "4.1.5.1",
"release": "24.el7",
"arch": "x86_64",
"install_date_time_t": 1487838481,
},
"util-linux": {
"version": "2.23.2",
"release": "33.el7",
"arch": "x86_64",
"install_date_time_t": 1487838484,
"epoch": None,
},
"openssh": {
"version": "6.6.1p1",
"release": "33.el7_3",
"arch": "x86_64",
"install_date_time_t": 1487838485,
"epoch": None,
},
"virt-what": {
"version": "1.13",
"release": "8.el7",
"install_date_time_t": 1487838486,
"arch": "x86_64",
"epoch": None,
},
}.items():
assert pkgs.get(pkg_name) is not None
assert pkgs[pkg_name] == [pkg_attr]
def test_list_pkgs_with_attr_multiple_versions():
"""
Test packages listing with the attr parameter reporting multiple version installed
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"glibc_|-(none)_|-2.12_|-1.212.el6_|-i686_|-(none)_|-1542394210"
"glibc_|-(none)_|-2.12_|-1.212.el6_|-x86_64_|-(none)_|-1542394204",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
"virt-what_|-(none)_|-1.10_|-2.el7_|-x86_64_|-(none)_|-1387838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(
attr=["epoch", "release", "arch", "install_date_time_t"]
)
expected_pkg_list = {
"glibc": [
{
"version": "2.12",
"release": "1.212.el6",
"install_date_time_t": 1542394210,
"arch": "i686",
"epoch": None,
},
{
"version": "2.12",
"release": "1.212.el6",
"install_date_time_t": 1542394204,
"arch": "x86_64",
"epoch": None,
},
],
"virt-what": [
{
"version": "1.10",
"release": "2.el7",
"install_date_time_t": 1387838486,
"arch": "x86_64",
"epoch": None,
},
{
"version": "1.13",
"release": "8.el7",
"install_date_time_t": 1487838486,
"arch": "x86_64",
"epoch": None,
},
],
}
for pkgname, pkginfo in pkgs.items():
assert pkginfo == expected_pkg_list[pkgname]
assert len(pkginfo) == len(expected_pkg_list[pkgname])
def test_list_patches():
"""
Test patches listing.
:return:
"""
yum_out = [
"i my-fake-patch-not-installed-1234 recommended "
" spacewalk-usix-2.7.5.2-2.2.noarch",
" my-fake-patch-not-installed-1234 recommended "
" spacewalksd-5.0.26.2-21.2.x86_64",
"i my-fake-patch-not-installed-1234 recommended "
" suseRegisterInfo-3.1.1-18.2.x86_64",
"i my-fake-patch-installed-1234 recommended "
" my-package-one-1.1-0.1.x86_64",
"i my-fake-patch-installed-1234 recommended "
" my-package-two-1.1-0.1.x86_64",
]
expected_patches = {
"my-fake-patch-not-installed-1234": {
"installed": False,
"summary": [
"spacewalk-usix-2.7.5.2-2.2.noarch",
"spacewalksd-5.0.26.2-21.2.x86_64",
"suseRegisterInfo-3.1.1-18.2.x86_64",
],
},
"my-fake-patch-installed-1234": {
"installed": True,
"summary": [
"my-package-one-1.1-0.1.x86_64",
"my-package-two-1.1-0.1.x86_64",
],
},
}
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run_stdout": MagicMock(return_value=os.linesep.join(yum_out))},
):
patches = yumpkg.list_patches()
assert patches["my-fake-patch-not-installed-1234"]["installed"] is False
assert len(patches["my-fake-patch-not-installed-1234"]["summary"]) == 3
for _patch in expected_patches["my-fake-patch-not-installed-1234"]["summary"]:
assert _patch in patches["my-fake-patch-not-installed-1234"]["summary"]
assert patches["my-fake-patch-installed-1234"]["installed"] is True
assert len(patches["my-fake-patch-installed-1234"]["summary"]) == 2
for _patch in expected_patches["my-fake-patch-installed-1234"]["summary"]:
assert _patch in patches["my-fake-patch-installed-1234"]["summary"]
def test_latest_version_with_options():
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.latest_version("foo", refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.latest_version(
"foo",
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
# without fromrepo, but within the scope
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch("salt.utils.systemd.has_scope", MagicMock(return_value=True)):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=True)},
):
yumpkg.latest_version(
"foo",
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
cmd.assert_called_once_with(
[
"systemd-run",
"--scope",
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
def test_list_repo_pkgs_with_options(list_repos_var):
"""
Test list_repo_pkgs with and without fromrepo
NOTE: mock_calls is a stack. The most recent call is indexed
with 0, while the first call would have the highest index.
"""
really_old_yum = MagicMock(return_value="3.2.0")
older_yum = MagicMock(return_value="3.4.0")
newer_yum = MagicMock(return_value="3.4.5")
list_repos_mock = MagicMock(return_value=list_repos_var)
kwargs = {
"output_loglevel": "trace",
"ignore_retcode": True,
"python_shell": False,
"env": {},
}
with patch.object(yumpkg, "list_repos", list_repos_mock):
# Test with really old yum. The fromrepo argument has no effect on
# the yum commands we'd run.
with patch.dict(yumpkg.__salt__, {"cmd.run": really_old_yum}):
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo")
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
# Check args from first call
assert cmd.mock_calls[1][1] == (
["yum", "--quiet", "list", "available"],
)
# Check kwargs from first call
assert cmd.mock_calls[1][2] == kwargs
# Check args from second call
assert cmd.mock_calls[0][1] == (
["yum", "--quiet", "list", "installed"],
)
# Check kwargs from second call
assert cmd.mock_calls[0][2] == kwargs
# Test with really old yum. The fromrepo argument has no effect on
# the yum commands we'd run.
with patch.dict(yumpkg.__salt__, {"cmd.run": older_yum}):
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo")
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
# Check args from first call
assert cmd.mock_calls[1][1] == (
["yum", "--quiet", "--showduplicates", "list", "available"],
)
# Check kwargs from first call
assert cmd.mock_calls[1][2] == kwargs
# Check args from second call
assert cmd.mock_calls[0][1] == (
["yum", "--quiet", "--showduplicates", "list", "installed"],
)
# Check kwargs from second call
assert cmd.mock_calls[0][2] == kwargs
# Test with newer yum. We should run one yum command per repo, so
# fromrepo would limit how many calls we make.
with patch.dict(yumpkg.__salt__, {"cmd.run": newer_yum}):
# When fromrepo is used, we would only run one yum command, for
# that specific repo.
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo", fromrepo="base")
# We should have called cmd.run_all once
assert len(cmd.mock_calls) == 1
# Check args
assert cmd.mock_calls[0][1] == (
[
"yum",
"--quiet",
"--showduplicates",
"repository-packages",
"base",
"list",
"foo",
],
)
# Check kwargs
assert cmd.mock_calls[0][2] == kwargs
# Test enabling base-source and disabling updates. We should
# get two calls, one for each enabled repo. Because dict
# iteration order will vary, different Python versions will be
# do them in different orders, which is OK, but it will just
# mean that we will have to check both the first and second
# mock call both times.
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs(
"foo", enablerepo="base-source", disablerepo="updates"
)
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
for repo in ("base", "base-source"):
for index in (0, 1):
try:
# Check args
assert cmd.mock_calls[index][1] == (
[
"yum",
"--quiet",
"--showduplicates",
"repository-packages",
repo,
"list",
"foo",
],
)
# Check kwargs
assert cmd.mock_calls[index][2] == kwargs
break
except AssertionError:
continue
else:
pytest.fail("repo '{}' not checked".format(repo))
def test_list_upgrades_dnf():
"""
The subcommand should be "upgrades" with dnf
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"dnf",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"upgrades",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(
refresh=False, enablerepo="good", disablerepo="bad", branch="foo"
)
cmd.assert_called_once_with(
[
"dnf",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"upgrades",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_list_upgrades_yum():
"""
The subcommand should be "updates" with yum
"""
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"updates",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(
refresh=False, enablerepo="good", disablerepo="bad", branch="foo"
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"updates",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_refresh_db_with_options():
with patch("salt.utils.pkg.clear_rtag", Mock()):
# With check_update=True we will do a cmd.run to run the clean_cmd, and
# then a separate cmd.retcode to check for updates.
# with fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(check_update=True, fromrepo="good", branch="foo")
assert yum_call.call_count == 2
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"check-update",
"--setopt=autocheck_running_kernel=false",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
output_loglevel="trace",
env={},
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(
check_update=True,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
assert yum_call.call_count == 2
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"check-update",
"--setopt=autocheck_running_kernel=false",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
output_loglevel="trace",
env={},
ignore_retcode=True,
python_shell=False,
)
# With check_update=False we will just do a cmd.run for the clean_cmd
# with fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(check_update=False, fromrepo="good", branch="foo")
assert yum_call.call_count == 1
yum_call.assert_called_once_with(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(
check_update=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
assert yum_call.call_count == 1
yum_call.assert_called_once_with(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_install_with_options():
parse_targets = MagicMock(return_value=({"foo": None}, "repository"))
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"pkg_resource.parse_targets": parse_targets}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.install(
refresh=False,
fromrepo="good",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"-y",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"install",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
ignore_retcode=False,
redirect_stderr=True,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.install(
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"-y",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"install",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
ignore_retcode=False,
redirect_stderr=True,
)
def test_remove_with_epoch():
"""
Tests that we properly identify a version containing an epoch for
deinstallation.
You can deinstall pkgs only without the epoch if no arch is provided:
.. code-block:: bash
yum remove PackageKit-yum-1.1.10-2.el7.centos
"""
name = "foo"
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [installed] if kwargs.get("versions_as_list", False) else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove(name)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_remove_with_epoch_and_arch_info():
"""
Tests that we properly identify a version containing an epoch and arch
deinstallation.
You can deinstall pkgs with or without epoch in combination with the arch.
Here we test for the absence of the epoch, but the presence for the arch:
.. code-block:: bash
yum remove PackageKit-yum-1.1.10-2.el7.centos.x86_64
"""
arch = "x86_64"
name = "foo"
name_and_arch = name + "." + arch
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name_and_arch: [installed]
if kwargs.get("versions_as_list", False)
else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name_and_arch: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string + "." + arch]
yumpkg.remove(name)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_remove_with_wildcard():
"""
Tests that we properly identify a version containing an epoch for
deinstallation.
You can deinstall pkgs only without the epoch if no arch is provided:
.. code-block:: bash
yum remove foo*
yum remove pkgs='[{"foo*": "8:3.8.12-4.n.el7"}]'
"""
name = "foobarpkg"
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [installed] if kwargs.get("versions_as_list", False) else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove("foo*")
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove(pkgs=[{"foo*": "8:3.8.12-4.n.el7"}])
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_install_with_epoch():
"""
Tests that we properly identify a version containing an epoch as an
upgrade instead of a downgrade.
"""
name = "foo"
old = "8:3.8.12-6.n.el7"
new = "9:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [old] if kwargs.get("versions_as_list", False) else old
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: new}, "repository")
),
}
full_pkg_string = "-".join((name, new[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
# Test yum
expected = ["yum", "-y", "install", full_pkg_string]
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}
):
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
# Test dnf
expected = [
"dnf",
"-y",
"--best",
"--allowerasing",
"install",
full_pkg_string,
]
yumpkg.__context__.pop("yum_bin")
cmd_mock.reset_mock()
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 27}
):
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
@pytest.mark.skipif(not salt.utils.platform.is_linux(), reason="Only run on Linux")
def test_install_error_reporting():
"""
Tests that we properly report yum/dnf errors.
"""
name = "foo"
old = "8:3.8.12-6.n.el7"
new = "9:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [old] if kwargs.get("versions_as_list", False) else old
}
)
salt_mock = {
"cmd.run_all": cmdmod.run_all,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: new}, "repository")
),
}
full_pkg_string = "-".join((name, new[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock), patch.object(
yumpkg, "_yum", MagicMock(return_value="cat")
):
expected = {
"changes": {},
"errors": [
"cat: invalid option -- 'y'\nTry 'cat --help' for more information."
],
}
with pytest.raises(CommandExecutionError) as exc_info:
yumpkg.install("foo", version=new)
assert exc_info.value.info == expected, exc_info.value.info
def test_remove_not_installed():
"""
Tests that no exception raised on removing not installed package
"""
name = "foo"
list_pkgs_mock = MagicMock(return_value={})
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: None}, "repository")
),
}
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
# Test yum
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}
):
yumpkg.remove(name)
cmd_mock.assert_not_called()
# Test dnf
yumpkg.__context__.pop("yum_bin")
cmd_mock.reset_mock()
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 27}
):
yumpkg.remove(name)
cmd_mock.assert_not_called()
def test_upgrade_with_options():
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.upgrade(
refresh=False,
fromrepo="good",
exclude="kernel*",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"-y",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"--exclude=kernel*",
"upgrade",
],
env={},
output_loglevel="trace",
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.upgrade(
refresh=False,
enablerepo="good",
disablerepo="bad",
exclude="kernel*",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"-y",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"--exclude=kernel*",
"upgrade",
],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_info_installed_with_all_versions():
"""
Test the return information of all versions for the named package(s), installed on the system.
:return:
"""
run_out = {
"virgo-dummy": [
{
"build_date": "2015-07-09T10:55:19Z",
"vendor": "openSUSE Build Service",
"description": (
"This is the Virgo dummy package used for testing SUSE Manager"
),
"license": "GPL-2.0",
"build_host": "sheep05",
"url": "http://www.suse.com",
"build_date_time_t": 1436432119,
"relocations": "(not relocatable)",
"source_rpm": "virgo-dummy-1.0-1.1.src.rpm",
"install_date": "2016-02-23T16:31:57Z",
"install_date_time_t": 1456241517,
"summary": "Virgo dummy package",
"version": "1.0",
"signature": (
"DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9"
),
"release": "1.1",
"group": "Applications/System",
"arch": "i686",
"size": "17992",
},
{
"build_date": "2015-07-09T10:15:19Z",
"vendor": "openSUSE Build Service",
"description": (
"This is the Virgo dummy package used for testing SUSE Manager"
),
"license": "GPL-2.0",
"build_host": "sheep05",
"url": "http://www.suse.com",
"build_date_time_t": 1436432119,
"relocations": "(not relocatable)",
"source_rpm": "virgo-dummy-1.0-1.1.src.rpm",
"install_date": "2016-02-23T16:31:57Z",
"install_date_time_t": 14562415127,
"summary": "Virgo dummy package",
"version": "1.0",
"signature": (
"DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9"
),
"release": "1.1",
"group": "Applications/System",
"arch": "x86_64",
"size": "13124",
},
],
"libopenssl1_0_0": [
{
"build_date": "2015-11-04T23:20:34Z",
"vendor": "SUSE LLC <https://www.suse.com/>",
"description": "The OpenSSL Project is a collaborative effort.",
"license": "OpenSSL",
"build_host": "sheep11",
"url": "https://www.openssl.org/",
"build_date_time_t": 1446675634,
"relocations": "(not relocatable)",
"source_rpm": "openssl-1.0.1i-34.1.src.rpm",
"install_date": "2016-02-23T16:31:35Z",
"install_date_time_t": 1456241495,
"summary": "Secure Sockets and Transport Layer Security",
"version": "1.0.1i",
"signature": (
"RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82"
),
"release": "34.1",
"group": "Productivity/Networking/Security",
"packager": "https://www.suse.com/",
"arch": "x86_64",
"size": "2576912",
}
],
}
with patch.dict(yumpkg.__salt__, {"lowpkg.info": MagicMock(return_value=run_out)}):
installed = yumpkg.info_installed(all_versions=True)
# Test overall products length
assert len(installed) == 2
# Test multiple versions for the same package
for pkg_name, pkg_info_list in installed.items():
assert len(pkg_info_list) == 2 if pkg_name == "virgo-dummy" else 1
for info in pkg_info_list:
assert info["arch"] in ("x86_64", "i686")
def test_pkg_hold_yum():
"""
Tests that we properly identify versionlock plugin when using yum
for RHEL/CentOS 7 and Fedora < 22
"""
# Test RHEL/CentOS 7
list_pkgs_mock = {
"yum-plugin-versionlock": "0:1.0.0-0.n.el7",
"yum-versionlock": "0:1.0.0-0.n.el7",
}
cmd = MagicMock(return_value={"retcode": 0})
with patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(yumpkg, "list_holds", MagicMock(return_value=[])), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["yum", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 20
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 20}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["yum", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_pkg_hold_tdnf():
"""
Tests that we raise a SaltInvocationError if we try to use
hold-related functions on Photon OS.
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "tdnf"}):
with pytest.raises(SaltInvocationError) as exc_info:
yumpkg.hold("foo")
def test_pkg_hold_dnf():
"""
Tests that we properly identify versionlock plugin when using dnf
for RHEL/CentOS 8 and Fedora >= 22
"""
# Test RHEL/CentOS 8
list_pkgs_mock = {
"python2-dnf-plugin-versionlock": "0:1.0.0-0.n.el8",
"python3-dnf-plugin-versionlock": "0:1.0.0-0.n.el8",
}
yumpkg.__context__.pop("yum_bin")
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"osmajorrelease": 8}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 26+
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 26}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 22-25
list_pkgs_mock = {
"python-dnf-plugins-extras-versionlock": "0:1.0.0-0.n.el8",
"python3-dnf-plugins-extras-versionlock": "0:1.0.0-0.n.el8",
}
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 25}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
@pytest.mark.skipif(not yumpkg.HAS_YUM, reason="Could not import yum")
def test_yum_base_error():
with patch("yum.YumBase") as mock_yum_yumbase:
mock_yum_yumbase.side_effect = CommandExecutionError
with pytest.raises(CommandExecutionError):
yumpkg._get_yum_config()
def test_group_info():
"""
Test yumpkg.group_info parsing
"""
expected = {
"conditional": [],
"default": ["qgnomeplatform", "xdg-desktop-portal-gtk"],
"description": (
"GNOME is a highly intuitive and user friendly desktop environment."
),
"group": "GNOME",
"id": "gnome-desktop",
"mandatory": [
"NetworkManager-libreswan-gnome",
"PackageKit-command-not-found",
"PackageKit-gtk3-module",
"abrt-desktop",
"at-spi2-atk",
"at-spi2-core",
"avahi",
"baobab",
"caribou",
"caribou-gtk2-module",
"caribou-gtk3-module",
"cheese",
"chrome-gnome-shell",
"compat-cheese314",
"control-center",
"dconf",
"empathy",
"eog",
"evince",
"evince-nautilus",
"file-roller",
"file-roller-nautilus",
"firewall-config",
"firstboot",
"fprintd-pam",
"gdm",
"gedit",
"glib-networking",
"gnome-bluetooth",
"gnome-boxes",
"gnome-calculator",
"gnome-classic-session",
"gnome-clocks",
"gnome-color-manager",
"gnome-contacts",
"gnome-dictionary",
"gnome-disk-utility",
"gnome-font-viewer",
"gnome-getting-started-docs",
"gnome-icon-theme",
"gnome-icon-theme-extras",
"gnome-icon-theme-symbolic",
"gnome-initial-setup",
"gnome-packagekit",
"gnome-packagekit-updater",
"gnome-screenshot",
"gnome-session",
"gnome-session-xsession",
"gnome-settings-daemon",
"gnome-shell",
"gnome-software",
"gnome-system-log",
"gnome-system-monitor",
"gnome-terminal",
"gnome-terminal-nautilus",
"gnome-themes-standard",
"gnome-tweak-tool",
"gnome-user-docs",
"gnome-weather",
"gucharmap",
"gvfs-afc",
"gvfs-afp",
"gvfs-archive",
"gvfs-fuse",
"gvfs-goa",
"gvfs-gphoto2",
"gvfs-mtp",
"gvfs-smb",
"initial-setup-gui",
"libcanberra-gtk2",
"libcanberra-gtk3",
"libproxy-mozjs",
"librsvg2",
"libsane-hpaio",
"metacity",
"mousetweaks",
"nautilus",
"nautilus-sendto",
"nm-connection-editor",
"orca",
"redhat-access-gui",
"sane-backends-drivers-scanners",
"seahorse",
"setroubleshoot",
"sushi",
"totem",
"totem-nautilus",
"vinagre",
"vino",
"xdg-user-dirs-gtk",
"yelp",
],
"optional": [
"",
"alacarte",
"dconf-editor",
"dvgrab",
"fonts-tweak-tool",
"gconf-editor",
"gedit-plugins",
"gnote",
"libappindicator-gtk3",
"seahorse-nautilus",
"seahorse-sharing",
"vim-X11",
"xguest",
],
"type": "package group",
}
cmd_out = """Group: GNOME
Group-Id: gnome-desktop
Description: GNOME is a highly intuitive and user friendly desktop environment.
Mandatory Packages:
=NetworkManager-libreswan-gnome
=PackageKit-command-not-found
=PackageKit-gtk3-module
abrt-desktop
=at-spi2-atk
=at-spi2-core
=avahi
=baobab
-caribou
-caribou-gtk2-module
-caribou-gtk3-module
=cheese
=chrome-gnome-shell
=compat-cheese314
=control-center
=dconf
=empathy
=eog
=evince
=evince-nautilus
=file-roller
=file-roller-nautilus
=firewall-config
=firstboot
fprintd-pam
=gdm
=gedit
=glib-networking
=gnome-bluetooth
=gnome-boxes
=gnome-calculator
=gnome-classic-session
=gnome-clocks
=gnome-color-manager
=gnome-contacts
=gnome-dictionary
=gnome-disk-utility
=gnome-font-viewer
=gnome-getting-started-docs
=gnome-icon-theme
=gnome-icon-theme-extras
=gnome-icon-theme-symbolic
=gnome-initial-setup
=gnome-packagekit
=gnome-packagekit-updater
=gnome-screenshot
=gnome-session
=gnome-session-xsession
=gnome-settings-daemon
=gnome-shell
=gnome-software
=gnome-system-log
=gnome-system-monitor
=gnome-terminal
=gnome-terminal-nautilus
=gnome-themes-standard
=gnome-tweak-tool
=gnome-user-docs
=gnome-weather
=gucharmap
=gvfs-afc
=gvfs-afp
=gvfs-archive
=gvfs-fuse
=gvfs-goa
=gvfs-gphoto2
=gvfs-mtp
=gvfs-smb
initial-setup-gui
=libcanberra-gtk2
=libcanberra-gtk3
=libproxy-mozjs
=librsvg2
=libsane-hpaio
=metacity
=mousetweaks
=nautilus
=nautilus-sendto
=nm-connection-editor
=orca
-redhat-access-gui
=sane-backends-drivers-scanners
=seahorse
=setroubleshoot
=sushi
=totem
=totem-nautilus
=vinagre
=vino
=xdg-user-dirs-gtk
=yelp
Default Packages:
=qgnomeplatform
=xdg-desktop-portal-gtk
Optional Packages:
alacarte
dconf-editor
dvgrab
fonts-tweak-tool
gconf-editor
gedit-plugins
gnote
libappindicator-gtk3
seahorse-nautilus
seahorse-sharing
vim-X11
xguest
"""
with patch.dict(
yumpkg.__salt__, {"cmd.run_stdout": MagicMock(return_value=cmd_out)}
):
info = yumpkg.group_info("@gnome-desktop")
assert info == expected
def test_get_repo_with_existent_repo(list_repos_var):
"""
Test get_repo with an existent repository
Expected return is a populated dictionary
"""
repo = "base-source"
kwargs = {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": True,
}
parse_repo_file_return = (
"",
{
"base-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": "1",
}
},
)
expected = {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": "1",
}
patch_list_repos = patch.object(
yumpkg, "list_repos", autospec=True, return_value=list_repos_var
)
patch_parse_repo_file = patch.object(
yumpkg,
"_parse_repo_file",
autospec=True,
return_value=parse_repo_file_return,
)
with patch_list_repos, patch_parse_repo_file:
ret = yumpkg.get_repo(repo, **kwargs)
assert ret == expected, ret
def test_get_repo_with_non_existent_repo(list_repos_var):
"""
Test get_repo with an non existent repository
Expected return is an empty dictionary
"""
repo = "non-existent-repository"
kwargs = {
"baseurl": "http://fake.centos.org/centos/$releasever/os/Non-Existent/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Non-Existent Repository",
"enabled": True,
}
expected = {}
patch_list_repos = patch.object(
yumpkg, "list_repos", autospec=True, return_value=list_repos_var
)
with patch_list_repos:
ret = yumpkg.get_repo(repo, **kwargs)
assert ret == expected, ret
def test_pkg_update_dnf():
"""
Tests that the proper CLI options are added when obsoletes=False
"""
name = "foo"
old = "1.2.2-1.fc31"
new = "1.2.3-1.fc31"
cmd_mock = MagicMock(return_value={"retcode": 0})
list_pkgs_mock = MagicMock(side_effect=[{name: old}, {name: new}])
parse_targets_mock = MagicMock(return_value=({"foo": None}, "repository"))
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd_mock, "pkg_resource.parse_targets": parse_targets_mock},
), patch.object(yumpkg, "refresh_db", MagicMock()), patch.object(
yumpkg, "list_pkgs", list_pkgs_mock
), patch.object(
yumpkg, "_yum", MagicMock(return_value="dnf")
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
ret = yumpkg.update(name, setopt="obsoletes=0,plugins=0")
expected = {name: {"old": old, "new": new}}
assert ret == expected, ret
cmd_mock.assert_called_once_with(
[
"dnf",
"--quiet",
"-y",
"--setopt",
"plugins=0",
"--setopt",
"obsoletes=False",
"upgrade",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_call_yum_default():
"""
Call default Yum/Dnf.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=False)},
):
yumpkg._call_yum(["-y", "--do-something"]) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["fake-yum", "-y", "--do-something"],
env={},
output_loglevel="trace",
python_shell=False,
)
@patch("salt.utils.systemd.has_scope", MagicMock(return_value=True))
def test_call_yum_in_scope():
"""
Call Yum/Dnf within the scope.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=True)},
):
yumpkg._call_yum(["-y", "--do-something"]) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["systemd-run", "--scope", "fake-yum", "-y", "--do-something"],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_call_yum_with_kwargs():
"""
Call Yum/Dnf with the optinal keyword arguments.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=False)},
):
yumpkg._call_yum(
["-y", "--do-something"],
python_shell=True,
output_loglevel="quiet",
ignore_retcode=False,
username="Darth Vader",
) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["fake-yum", "-y", "--do-something"],
env={},
ignore_retcode=False,
output_loglevel="quiet",
python_shell=True,
username="Darth Vader",
)
@pytest.mark.skipif(not salt.utils.systemd.booted(), reason="Requires systemd")
def test_services_need_restart():
"""
Test that dnf needs-restarting output is parsed and
salt.utils.systemd.pid_to_service is called as expected.
"""
expected = ["firewalld", "salt-minion"]
dnf_mock = Mock(
return_value="123 : /usr/bin/firewalld\n456 : /usr/bin/salt-minion\n"
)
systemd_mock = Mock(side_effect=["firewalld", "salt-minion"])
with patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")):
with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": dnf_mock}), patch(
"salt.utils.systemd.pid_to_service", systemd_mock
):
assert sorted(yumpkg.services_need_restart()) == expected
systemd_mock.assert_has_calls([call("123"), call("456")])
def test_services_need_restart_requires_systemd():
"""Test that yumpkg.services_need_restart raises an error if systemd is unavailable."""
with patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")):
with patch("salt.utils.systemd.booted", Mock(return_value=False)):
pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
def test_services_need_restart_requires_dnf():
"""Test that yumpkg.services_need_restart raises an error if DNF is unavailable."""
with patch("salt.modules.yumpkg._yum", Mock(return_value="yum")):
pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs():
patch_list_pkgs = patch(
"salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True
)
patch_salt = patch.dict(
yumpkg.__salt__,
{
"pkg_resource.parse_targets": Mock(
return_value=[
{
"fnord-this-is-not-actually-a-package": "fnord-this-is-not-actually-a-package-1.2.3"
}
]
)
},
)
with patch_list_pkgs, patch_salt:
# During the 3004rc1 we discoverd that if list_pkgs was missing
# packages that were returned by parse_targets that yumpkg.remove would
# catch on fire. This ensures that won't go undetected again.
yumpkg.remove()
@pytest.fixture(
ids=["yum", "dnf"],
params=[
{
"context": {"yum_bin": "yum"},
"grains": {"os": "CentOS", "osrelease": 7},
"cmd": ["yum", "-y"],
},
{
"context": {"yum_bin": "dnf"},
"grains": {"os": "Fedora", "osrelease": 27},
"cmd": ["dnf", "-y", "--best", "--allowerasing"],
},
],
)
def yum_and_dnf(request):
with patch.dict(yumpkg.__context__, request.param["context"]), patch.dict(
yumpkg.__grains__, request.param["grains"]
), patch.dict(pkg_resource.__grains__, request.param["grains"]):
yield request.param["cmd"]
@pytest.mark.parametrize(
"new,full_pkg_string",
(
(42, "foo-42"),
(12, "foo-12"),
("99:1.2.3", "foo-1.2.3"),
),
)
def test_59705_version_as_accidental_float_should_become_text(
new, full_pkg_string, yum_and_dnf
):
name = "foo"
expected_cmd = yum_and_dnf + ["install", full_pkg_string]
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
def fake_parse(*args, **kwargs):
return {name: kwargs["version"]}, "repository"
patch_yum_salt = patch.dict(
yumpkg.__salt__,
{
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": fake_parse,
},
)
patch_list_pkgs = patch.object(
yumpkg, "list_pkgs", return_value={"foo": ["foo-42"]}
)
patch_systemd = patch("salt.utils.systemd.has_scope", MagicMock(return_value=False))
with patch_list_pkgs, patch_systemd, patch_yum_salt:
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected_cmd
Patch downloaded in yumpkg test
We don't actually care what has been downloaded at all - we only care
that the version numbers are correctly parsed, thanks.
import logging
import os
import salt.modules.cmdmod as cmdmod
import salt.modules.pkg_resource as pkg_resource
import salt.modules.rpm_lowpkg as rpm
import salt.modules.yumpkg as yumpkg
import salt.utils.platform
from salt.exceptions import CommandExecutionError, SaltInvocationError
from tests.support.mock import MagicMock, Mock, call, patch
try:
import pytest
except ImportError:
pytest = None
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def list_repos_var():
return {
"base": {
"file": "/etc/yum.repos.d/CentOS-Base.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"mirrorlist": "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os&infra=$infra",
"name": "CentOS-$releasever - Base",
},
"base-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"enabled": "0",
"file": "/etc/yum.repos.d/CentOS-Sources.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
},
"updates": {
"file": "/etc/yum.repos.d/CentOS-Base.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"mirrorlist": "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=updates&infra=$infra",
"name": "CentOS-$releasever - Updates",
},
"updates-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/updates/Source/",
"enabled": "0",
"file": "/etc/yum.repos.d/CentOS-Sources.repo",
"gpgcheck": "1",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Updates Sources",
},
}
@pytest.fixture
def configure_loader_modules():
return {
yumpkg: {
"__context__": {"yum_bin": "yum"},
"__grains__": {
"osarch": "x86_64",
"os": "CentOS",
"os_family": "RedHat",
"osmajorrelease": 7,
},
},
pkg_resource: {},
}
def test_list_pkgs():
"""
Test packages listing.
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(versions_as_list=True)
for pkg_name, pkg_version in {
"python-urlgrabber": "3.10-8.el7",
"alsa-lib": "1.1.1-1.el7",
"gnupg2": "2.0.22-4.el7",
"rpm-python": "4.11.3-21.el7",
"pygpgme": "0.3-9.el7",
"yum": "3.4.3-150.el7.centos",
"lzo": "2.06-8.el7",
"qrencode-libs": "3.4.1-3.el7",
"ustr": "1.0.4-16.el7",
"shadow-utils": "2:4.1.5.1-24.el7",
"util-linux": "2.23.2-33.el7",
"openssh": "6.6.1p1-33.el7_3",
"virt-what": "1.13-8.el7",
}.items():
assert pkgs.get(pkg_name) is not None
assert pkgs[pkg_name] == [pkg_version]
def test_list_pkgs_no_context():
"""
Test packages listing.
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
), patch.object(
yumpkg, "_list_pkgs_from_context"
) as list_pkgs_context_mock:
pkgs = yumpkg.list_pkgs(versions_as_list=True, use_context=False)
list_pkgs_context_mock.assert_not_called()
list_pkgs_context_mock.reset_mock()
pkgs = yumpkg.list_pkgs(versions_as_list=True, use_context=False)
list_pkgs_context_mock.assert_not_called()
list_pkgs_context_mock.reset_mock()
def test_list_pkgs_with_attr():
"""
Test packages listing with the attr parameter
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471",
"alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475",
"gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477",
"rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477",
"pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478",
"yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479",
"lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479",
"qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480",
"ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480",
"shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481",
"util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484",
"openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(
attr=["epoch", "release", "arch", "install_date_time_t"]
)
for pkg_name, pkg_attr in {
"python-urlgrabber": {
"version": "3.10",
"release": "8.el7",
"arch": "noarch",
"install_date_time_t": 1487838471,
"epoch": None,
},
"alsa-lib": {
"version": "1.1.1",
"release": "1.el7",
"arch": "x86_64",
"install_date_time_t": 1487838475,
"epoch": None,
},
"gnupg2": {
"version": "2.0.22",
"release": "4.el7",
"arch": "x86_64",
"install_date_time_t": 1487838477,
"epoch": None,
},
"rpm-python": {
"version": "4.11.3",
"release": "21.el7",
"arch": "x86_64",
"install_date_time_t": 1487838477,
"epoch": None,
},
"pygpgme": {
"version": "0.3",
"release": "9.el7",
"arch": "x86_64",
"install_date_time_t": 1487838478,
"epoch": None,
},
"yum": {
"version": "3.4.3",
"release": "150.el7.centos",
"arch": "noarch",
"install_date_time_t": 1487838479,
"epoch": None,
},
"lzo": {
"version": "2.06",
"release": "8.el7",
"arch": "x86_64",
"install_date_time_t": 1487838479,
"epoch": None,
},
"qrencode-libs": {
"version": "3.4.1",
"release": "3.el7",
"arch": "x86_64",
"install_date_time_t": 1487838480,
"epoch": None,
},
"ustr": {
"version": "1.0.4",
"release": "16.el7",
"arch": "x86_64",
"install_date_time_t": 1487838480,
"epoch": None,
},
"shadow-utils": {
"epoch": "2",
"version": "4.1.5.1",
"release": "24.el7",
"arch": "x86_64",
"install_date_time_t": 1487838481,
},
"util-linux": {
"version": "2.23.2",
"release": "33.el7",
"arch": "x86_64",
"install_date_time_t": 1487838484,
"epoch": None,
},
"openssh": {
"version": "6.6.1p1",
"release": "33.el7_3",
"arch": "x86_64",
"install_date_time_t": 1487838485,
"epoch": None,
},
"virt-what": {
"version": "1.13",
"release": "8.el7",
"install_date_time_t": 1487838486,
"arch": "x86_64",
"epoch": None,
},
}.items():
assert pkgs.get(pkg_name) is not None
assert pkgs[pkg_name] == [pkg_attr]
def test_list_pkgs_with_attr_multiple_versions():
"""
Test packages listing with the attr parameter reporting multiple version installed
:return:
"""
def _add_data(data, key, value):
data.setdefault(key, []).append(value)
rpm_out = [
"glibc_|-(none)_|-2.12_|-1.212.el6_|-i686_|-(none)_|-1542394210"
"glibc_|-(none)_|-2.12_|-1.212.el6_|-x86_64_|-(none)_|-1542394204",
"virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486",
"virt-what_|-(none)_|-1.10_|-2.el7_|-x86_64_|-(none)_|-1387838486",
]
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))},
), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list},
), patch.dict(
yumpkg.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
pkg_resource.__salt__, {"pkg.parse_arch": yumpkg.parse_arch}
):
pkgs = yumpkg.list_pkgs(
attr=["epoch", "release", "arch", "install_date_time_t"]
)
expected_pkg_list = {
"glibc": [
{
"version": "2.12",
"release": "1.212.el6",
"install_date_time_t": 1542394210,
"arch": "i686",
"epoch": None,
},
{
"version": "2.12",
"release": "1.212.el6",
"install_date_time_t": 1542394204,
"arch": "x86_64",
"epoch": None,
},
],
"virt-what": [
{
"version": "1.10",
"release": "2.el7",
"install_date_time_t": 1387838486,
"arch": "x86_64",
"epoch": None,
},
{
"version": "1.13",
"release": "8.el7",
"install_date_time_t": 1487838486,
"arch": "x86_64",
"epoch": None,
},
],
}
for pkgname, pkginfo in pkgs.items():
assert pkginfo == expected_pkg_list[pkgname]
assert len(pkginfo) == len(expected_pkg_list[pkgname])
def test_list_patches():
"""
Test patches listing.
:return:
"""
yum_out = [
"i my-fake-patch-not-installed-1234 recommended "
" spacewalk-usix-2.7.5.2-2.2.noarch",
" my-fake-patch-not-installed-1234 recommended "
" spacewalksd-5.0.26.2-21.2.x86_64",
"i my-fake-patch-not-installed-1234 recommended "
" suseRegisterInfo-3.1.1-18.2.x86_64",
"i my-fake-patch-installed-1234 recommended "
" my-package-one-1.1-0.1.x86_64",
"i my-fake-patch-installed-1234 recommended "
" my-package-two-1.1-0.1.x86_64",
]
expected_patches = {
"my-fake-patch-not-installed-1234": {
"installed": False,
"summary": [
"spacewalk-usix-2.7.5.2-2.2.noarch",
"spacewalksd-5.0.26.2-21.2.x86_64",
"suseRegisterInfo-3.1.1-18.2.x86_64",
],
},
"my-fake-patch-installed-1234": {
"installed": True,
"summary": [
"my-package-one-1.1-0.1.x86_64",
"my-package-two-1.1-0.1.x86_64",
],
},
}
with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict(
yumpkg.__salt__,
{"cmd.run_stdout": MagicMock(return_value=os.linesep.join(yum_out))},
):
patches = yumpkg.list_patches()
assert patches["my-fake-patch-not-installed-1234"]["installed"] is False
assert len(patches["my-fake-patch-not-installed-1234"]["summary"]) == 3
for _patch in expected_patches["my-fake-patch-not-installed-1234"]["summary"]:
assert _patch in patches["my-fake-patch-not-installed-1234"]["summary"]
assert patches["my-fake-patch-installed-1234"]["installed"] is True
assert len(patches["my-fake-patch-installed-1234"]["summary"]) == 2
for _patch in expected_patches["my-fake-patch-installed-1234"]["summary"]:
assert _patch in patches["my-fake-patch-installed-1234"]["summary"]
def test_latest_version_with_options():
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.latest_version("foo", refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.latest_version(
"foo",
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
# without fromrepo, but within the scope
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch("salt.utils.systemd.has_scope", MagicMock(return_value=True)):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=True)},
):
yumpkg.latest_version(
"foo",
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
cmd.assert_called_once_with(
[
"systemd-run",
"--scope",
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"available",
"foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
def test_list_repo_pkgs_with_options(list_repos_var):
"""
Test list_repo_pkgs with and without fromrepo
NOTE: mock_calls is a stack. The most recent call is indexed
with 0, while the first call would have the highest index.
"""
really_old_yum = MagicMock(return_value="3.2.0")
older_yum = MagicMock(return_value="3.4.0")
newer_yum = MagicMock(return_value="3.4.5")
list_repos_mock = MagicMock(return_value=list_repos_var)
kwargs = {
"output_loglevel": "trace",
"ignore_retcode": True,
"python_shell": False,
"env": {},
}
with patch.object(yumpkg, "list_repos", list_repos_mock):
# Test with really old yum. The fromrepo argument has no effect on
# the yum commands we'd run.
with patch.dict(yumpkg.__salt__, {"cmd.run": really_old_yum}):
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo")
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
# Check args from first call
assert cmd.mock_calls[1][1] == (
["yum", "--quiet", "list", "available"],
)
# Check kwargs from first call
assert cmd.mock_calls[1][2] == kwargs
# Check args from second call
assert cmd.mock_calls[0][1] == (
["yum", "--quiet", "list", "installed"],
)
# Check kwargs from second call
assert cmd.mock_calls[0][2] == kwargs
# Test with really old yum. The fromrepo argument has no effect on
# the yum commands we'd run.
with patch.dict(yumpkg.__salt__, {"cmd.run": older_yum}):
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo")
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
# Check args from first call
assert cmd.mock_calls[1][1] == (
["yum", "--quiet", "--showduplicates", "list", "available"],
)
# Check kwargs from first call
assert cmd.mock_calls[1][2] == kwargs
# Check args from second call
assert cmd.mock_calls[0][1] == (
["yum", "--quiet", "--showduplicates", "list", "installed"],
)
# Check kwargs from second call
assert cmd.mock_calls[0][2] == kwargs
# Test with newer yum. We should run one yum command per repo, so
# fromrepo would limit how many calls we make.
with patch.dict(yumpkg.__salt__, {"cmd.run": newer_yum}):
# When fromrepo is used, we would only run one yum command, for
# that specific repo.
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs("foo", fromrepo="base")
# We should have called cmd.run_all once
assert len(cmd.mock_calls) == 1
# Check args
assert cmd.mock_calls[0][1] == (
[
"yum",
"--quiet",
"--showduplicates",
"repository-packages",
"base",
"list",
"foo",
],
)
# Check kwargs
assert cmd.mock_calls[0][2] == kwargs
# Test enabling base-source and disabling updates. We should
# get two calls, one for each enabled repo. Because dict
# iteration order will vary, different Python versions will be
# do them in different orders, which is OK, but it will just
# mean that we will have to check both the first and second
# mock call both times.
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_repo_pkgs(
"foo", enablerepo="base-source", disablerepo="updates"
)
# We should have called cmd.run_all twice
assert len(cmd.mock_calls) == 2
for repo in ("base", "base-source"):
for index in (0, 1):
try:
# Check args
assert cmd.mock_calls[index][1] == (
[
"yum",
"--quiet",
"--showduplicates",
"repository-packages",
repo,
"list",
"foo",
],
)
# Check kwargs
assert cmd.mock_calls[index][2] == kwargs
break
except AssertionError:
continue
else:
pytest.fail("repo '{}' not checked".format(repo))
def test_list_upgrades_dnf():
"""
The subcommand should be "upgrades" with dnf
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"dnf",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"upgrades",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(
refresh=False, enablerepo="good", disablerepo="bad", branch="foo"
)
cmd.assert_called_once_with(
[
"dnf",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"upgrades",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_list_upgrades_yum():
"""
The subcommand should be "updates" with yum
"""
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(refresh=False, fromrepo="good", branch="foo")
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"list",
"updates",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd, "config.get": MagicMock(return_value=False)},
):
yumpkg.list_upgrades(
refresh=False, enablerepo="good", disablerepo="bad", branch="foo"
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"list",
"updates",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_refresh_db_with_options():
with patch("salt.utils.pkg.clear_rtag", Mock()):
# With check_update=True we will do a cmd.run to run the clean_cmd, and
# then a separate cmd.retcode to check for updates.
# with fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(check_update=True, fromrepo="good", branch="foo")
assert yum_call.call_count == 2
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"check-update",
"--setopt=autocheck_running_kernel=false",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
output_loglevel="trace",
env={},
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(
check_update=True,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
assert yum_call.call_count == 2
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
env={},
ignore_retcode=True,
output_loglevel="trace",
python_shell=False,
)
yum_call.assert_any_call(
[
"yum",
"--quiet",
"--assumeyes",
"check-update",
"--setopt=autocheck_running_kernel=false",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
output_loglevel="trace",
env={},
ignore_retcode=True,
python_shell=False,
)
# With check_update=False we will just do a cmd.run for the clean_cmd
# with fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(check_update=False, fromrepo="good", branch="foo")
assert yum_call.call_count == 1
yum_call.assert_called_once_with(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
# without fromrepo
yum_call = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)},
):
yumpkg.refresh_db(
check_update=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
)
assert yum_call.call_count == 1
yum_call.assert_called_once_with(
[
"yum",
"--quiet",
"--assumeyes",
"clean",
"expire-cache",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
],
env={},
output_loglevel="trace",
ignore_retcode=True,
python_shell=False,
)
def test_install_with_options():
parse_targets = MagicMock(return_value=({"foo": None}, "repository"))
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"pkg_resource.parse_targets": parse_targets}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.install(
refresh=False,
fromrepo="good",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"-y",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"install",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
ignore_retcode=False,
redirect_stderr=True,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.install(
refresh=False,
enablerepo="good",
disablerepo="bad",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"-y",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"install",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
ignore_retcode=False,
redirect_stderr=True,
)
def test_remove_with_epoch():
"""
Tests that we properly identify a version containing an epoch for
deinstallation.
You can deinstall pkgs only without the epoch if no arch is provided:
.. code-block:: bash
yum remove PackageKit-yum-1.1.10-2.el7.centos
"""
name = "foo"
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [installed] if kwargs.get("versions_as_list", False) else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove(name)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_remove_with_epoch_and_arch_info():
"""
Tests that we properly identify a version containing an epoch and arch
deinstallation.
You can deinstall pkgs with or without epoch in combination with the arch.
Here we test for the absence of the epoch, but the presence for the arch:
.. code-block:: bash
yum remove PackageKit-yum-1.1.10-2.el7.centos.x86_64
"""
arch = "x86_64"
name = "foo"
name_and_arch = name + "." + arch
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name_and_arch: [installed]
if kwargs.get("versions_as_list", False)
else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name_and_arch: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string + "." + arch]
yumpkg.remove(name)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_remove_with_wildcard():
"""
Tests that we properly identify a version containing an epoch for
deinstallation.
You can deinstall pkgs only without the epoch if no arch is provided:
.. code-block:: bash
yum remove foo*
yum remove pkgs='[{"foo*": "8:3.8.12-4.n.el7"}]'
"""
name = "foobarpkg"
installed = "8:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [installed] if kwargs.get("versions_as_list", False) else installed
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: installed}, "repository")
),
}
full_pkg_string = "-".join((name, installed[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}):
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove("foo*")
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
expected = ["yum", "-y", "remove", full_pkg_string]
yumpkg.remove(pkgs=[{"foo*": "8:3.8.12-4.n.el7"}])
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
def test_install_with_epoch():
"""
Tests that we properly identify a version containing an epoch as an
upgrade instead of a downgrade.
"""
name = "foo"
old = "8:3.8.12-6.n.el7"
new = "9:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [old] if kwargs.get("versions_as_list", False) else old
}
)
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: new}, "repository")
),
}
full_pkg_string = "-".join((name, new[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
# Test yum
expected = ["yum", "-y", "install", full_pkg_string]
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}
):
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
# Test dnf
expected = [
"dnf",
"-y",
"--best",
"--allowerasing",
"install",
full_pkg_string,
]
yumpkg.__context__.pop("yum_bin")
cmd_mock.reset_mock()
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 27}
):
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected, call
@pytest.mark.skipif(not salt.utils.platform.is_linux(), reason="Only run on Linux")
def test_install_error_reporting():
"""
Tests that we properly report yum/dnf errors.
"""
name = "foo"
old = "8:3.8.12-6.n.el7"
new = "9:3.8.12-4.n.el7"
list_pkgs_mock = MagicMock(
side_effect=lambda **kwargs: {
name: [old] if kwargs.get("versions_as_list", False) else old
}
)
salt_mock = {
"cmd.run_all": cmdmod.run_all,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: new}, "repository")
),
}
full_pkg_string = "-".join((name, new[2:]))
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock), patch.object(
yumpkg, "_yum", MagicMock(return_value="cat")
):
expected = {
"changes": {},
"errors": [
"cat: invalid option -- 'y'\nTry 'cat --help' for more information."
],
}
with pytest.raises(CommandExecutionError) as exc_info:
yumpkg.install("foo", version=new)
assert exc_info.value.info == expected, exc_info.value.info
def test_remove_not_installed():
"""
Tests that no exception raised on removing not installed package
"""
name = "foo"
list_pkgs_mock = MagicMock(return_value={})
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
salt_mock = {
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": MagicMock(
return_value=({name: None}, "repository")
),
}
with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
), patch.dict(yumpkg.__salt__, salt_mock):
# Test yum
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}
):
yumpkg.remove(name)
cmd_mock.assert_not_called()
# Test dnf
yumpkg.__context__.pop("yum_bin")
cmd_mock.reset_mock()
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 27}
):
yumpkg.remove(name)
cmd_mock.assert_not_called()
def test_upgrade_with_options():
with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
# with fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.upgrade(
refresh=False,
fromrepo="good",
exclude="kernel*",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"-y",
"--disablerepo=*",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"--exclude=kernel*",
"upgrade",
],
env={},
output_loglevel="trace",
python_shell=False,
)
# without fromrepo
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}):
yumpkg.upgrade(
refresh=False,
enablerepo="good",
disablerepo="bad",
exclude="kernel*",
branch="foo",
setopt="obsoletes=0,plugins=0",
)
cmd.assert_called_once_with(
[
"yum",
"--quiet",
"-y",
"--disablerepo=bad",
"--enablerepo=good",
"--branch=foo",
"--setopt",
"obsoletes=0",
"--setopt",
"plugins=0",
"--exclude=kernel*",
"upgrade",
],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_info_installed_with_all_versions():
"""
Test the return information of all versions for the named package(s), installed on the system.
:return:
"""
run_out = {
"virgo-dummy": [
{
"build_date": "2015-07-09T10:55:19Z",
"vendor": "openSUSE Build Service",
"description": (
"This is the Virgo dummy package used for testing SUSE Manager"
),
"license": "GPL-2.0",
"build_host": "sheep05",
"url": "http://www.suse.com",
"build_date_time_t": 1436432119,
"relocations": "(not relocatable)",
"source_rpm": "virgo-dummy-1.0-1.1.src.rpm",
"install_date": "2016-02-23T16:31:57Z",
"install_date_time_t": 1456241517,
"summary": "Virgo dummy package",
"version": "1.0",
"signature": (
"DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9"
),
"release": "1.1",
"group": "Applications/System",
"arch": "i686",
"size": "17992",
},
{
"build_date": "2015-07-09T10:15:19Z",
"vendor": "openSUSE Build Service",
"description": (
"This is the Virgo dummy package used for testing SUSE Manager"
),
"license": "GPL-2.0",
"build_host": "sheep05",
"url": "http://www.suse.com",
"build_date_time_t": 1436432119,
"relocations": "(not relocatable)",
"source_rpm": "virgo-dummy-1.0-1.1.src.rpm",
"install_date": "2016-02-23T16:31:57Z",
"install_date_time_t": 14562415127,
"summary": "Virgo dummy package",
"version": "1.0",
"signature": (
"DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9"
),
"release": "1.1",
"group": "Applications/System",
"arch": "x86_64",
"size": "13124",
},
],
"libopenssl1_0_0": [
{
"build_date": "2015-11-04T23:20:34Z",
"vendor": "SUSE LLC <https://www.suse.com/>",
"description": "The OpenSSL Project is a collaborative effort.",
"license": "OpenSSL",
"build_host": "sheep11",
"url": "https://www.openssl.org/",
"build_date_time_t": 1446675634,
"relocations": "(not relocatable)",
"source_rpm": "openssl-1.0.1i-34.1.src.rpm",
"install_date": "2016-02-23T16:31:35Z",
"install_date_time_t": 1456241495,
"summary": "Secure Sockets and Transport Layer Security",
"version": "1.0.1i",
"signature": (
"RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82"
),
"release": "34.1",
"group": "Productivity/Networking/Security",
"packager": "https://www.suse.com/",
"arch": "x86_64",
"size": "2576912",
}
],
}
with patch.dict(yumpkg.__salt__, {"lowpkg.info": MagicMock(return_value=run_out)}):
installed = yumpkg.info_installed(all_versions=True)
# Test overall products length
assert len(installed) == 2
# Test multiple versions for the same package
for pkg_name, pkg_info_list in installed.items():
assert len(pkg_info_list) == 2 if pkg_name == "virgo-dummy" else 1
for info in pkg_info_list:
assert info["arch"] in ("x86_64", "i686")
def test_pkg_hold_yum():
"""
Tests that we properly identify versionlock plugin when using yum
for RHEL/CentOS 7 and Fedora < 22
"""
# Test RHEL/CentOS 7
list_pkgs_mock = {
"yum-plugin-versionlock": "0:1.0.0-0.n.el7",
"yum-versionlock": "0:1.0.0-0.n.el7",
}
cmd = MagicMock(return_value={"retcode": 0})
with patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(yumpkg, "list_holds", MagicMock(return_value=[])), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["yum", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 20
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 20}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["yum", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_pkg_hold_tdnf():
"""
Tests that we raise a SaltInvocationError if we try to use
hold-related functions on Photon OS.
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "tdnf"}):
with pytest.raises(SaltInvocationError) as exc_info:
yumpkg.hold("foo")
def test_pkg_hold_dnf():
"""
Tests that we properly identify versionlock plugin when using dnf
for RHEL/CentOS 8 and Fedora >= 22
"""
# Test RHEL/CentOS 8
list_pkgs_mock = {
"python2-dnf-plugin-versionlock": "0:1.0.0-0.n.el8",
"python3-dnf-plugin-versionlock": "0:1.0.0-0.n.el8",
}
yumpkg.__context__.pop("yum_bin")
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"osmajorrelease": 8}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 26+
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 26}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
# Test Fedora 22-25
list_pkgs_mock = {
"python-dnf-plugins-extras-versionlock": "0:1.0.0-0.n.el8",
"python3-dnf-plugins-extras-versionlock": "0:1.0.0-0.n.el8",
}
cmd = MagicMock(return_value={"retcode": 0})
with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict(
yumpkg.__grains__, {"os": "Fedora", "osrelease": 25}
), patch.object(
yumpkg, "list_pkgs", MagicMock(return_value=list_pkgs_mock)
), patch.object(
yumpkg, "list_holds", MagicMock(return_value=[])
), patch.dict(
yumpkg.__salt__, {"cmd.run_all": cmd}
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
yumpkg.hold("foo")
cmd.assert_called_once_with(
["dnf", "versionlock", "foo"],
env={},
output_loglevel="trace",
python_shell=False,
)
@pytest.mark.skipif(not yumpkg.HAS_YUM, reason="Could not import yum")
def test_yum_base_error():
with patch("yum.YumBase") as mock_yum_yumbase:
mock_yum_yumbase.side_effect = CommandExecutionError
with pytest.raises(CommandExecutionError):
yumpkg._get_yum_config()
def test_group_info():
"""
Test yumpkg.group_info parsing
"""
expected = {
"conditional": [],
"default": ["qgnomeplatform", "xdg-desktop-portal-gtk"],
"description": (
"GNOME is a highly intuitive and user friendly desktop environment."
),
"group": "GNOME",
"id": "gnome-desktop",
"mandatory": [
"NetworkManager-libreswan-gnome",
"PackageKit-command-not-found",
"PackageKit-gtk3-module",
"abrt-desktop",
"at-spi2-atk",
"at-spi2-core",
"avahi",
"baobab",
"caribou",
"caribou-gtk2-module",
"caribou-gtk3-module",
"cheese",
"chrome-gnome-shell",
"compat-cheese314",
"control-center",
"dconf",
"empathy",
"eog",
"evince",
"evince-nautilus",
"file-roller",
"file-roller-nautilus",
"firewall-config",
"firstboot",
"fprintd-pam",
"gdm",
"gedit",
"glib-networking",
"gnome-bluetooth",
"gnome-boxes",
"gnome-calculator",
"gnome-classic-session",
"gnome-clocks",
"gnome-color-manager",
"gnome-contacts",
"gnome-dictionary",
"gnome-disk-utility",
"gnome-font-viewer",
"gnome-getting-started-docs",
"gnome-icon-theme",
"gnome-icon-theme-extras",
"gnome-icon-theme-symbolic",
"gnome-initial-setup",
"gnome-packagekit",
"gnome-packagekit-updater",
"gnome-screenshot",
"gnome-session",
"gnome-session-xsession",
"gnome-settings-daemon",
"gnome-shell",
"gnome-software",
"gnome-system-log",
"gnome-system-monitor",
"gnome-terminal",
"gnome-terminal-nautilus",
"gnome-themes-standard",
"gnome-tweak-tool",
"gnome-user-docs",
"gnome-weather",
"gucharmap",
"gvfs-afc",
"gvfs-afp",
"gvfs-archive",
"gvfs-fuse",
"gvfs-goa",
"gvfs-gphoto2",
"gvfs-mtp",
"gvfs-smb",
"initial-setup-gui",
"libcanberra-gtk2",
"libcanberra-gtk3",
"libproxy-mozjs",
"librsvg2",
"libsane-hpaio",
"metacity",
"mousetweaks",
"nautilus",
"nautilus-sendto",
"nm-connection-editor",
"orca",
"redhat-access-gui",
"sane-backends-drivers-scanners",
"seahorse",
"setroubleshoot",
"sushi",
"totem",
"totem-nautilus",
"vinagre",
"vino",
"xdg-user-dirs-gtk",
"yelp",
],
"optional": [
"",
"alacarte",
"dconf-editor",
"dvgrab",
"fonts-tweak-tool",
"gconf-editor",
"gedit-plugins",
"gnote",
"libappindicator-gtk3",
"seahorse-nautilus",
"seahorse-sharing",
"vim-X11",
"xguest",
],
"type": "package group",
}
cmd_out = """Group: GNOME
Group-Id: gnome-desktop
Description: GNOME is a highly intuitive and user friendly desktop environment.
Mandatory Packages:
=NetworkManager-libreswan-gnome
=PackageKit-command-not-found
=PackageKit-gtk3-module
abrt-desktop
=at-spi2-atk
=at-spi2-core
=avahi
=baobab
-caribou
-caribou-gtk2-module
-caribou-gtk3-module
=cheese
=chrome-gnome-shell
=compat-cheese314
=control-center
=dconf
=empathy
=eog
=evince
=evince-nautilus
=file-roller
=file-roller-nautilus
=firewall-config
=firstboot
fprintd-pam
=gdm
=gedit
=glib-networking
=gnome-bluetooth
=gnome-boxes
=gnome-calculator
=gnome-classic-session
=gnome-clocks
=gnome-color-manager
=gnome-contacts
=gnome-dictionary
=gnome-disk-utility
=gnome-font-viewer
=gnome-getting-started-docs
=gnome-icon-theme
=gnome-icon-theme-extras
=gnome-icon-theme-symbolic
=gnome-initial-setup
=gnome-packagekit
=gnome-packagekit-updater
=gnome-screenshot
=gnome-session
=gnome-session-xsession
=gnome-settings-daemon
=gnome-shell
=gnome-software
=gnome-system-log
=gnome-system-monitor
=gnome-terminal
=gnome-terminal-nautilus
=gnome-themes-standard
=gnome-tweak-tool
=gnome-user-docs
=gnome-weather
=gucharmap
=gvfs-afc
=gvfs-afp
=gvfs-archive
=gvfs-fuse
=gvfs-goa
=gvfs-gphoto2
=gvfs-mtp
=gvfs-smb
initial-setup-gui
=libcanberra-gtk2
=libcanberra-gtk3
=libproxy-mozjs
=librsvg2
=libsane-hpaio
=metacity
=mousetweaks
=nautilus
=nautilus-sendto
=nm-connection-editor
=orca
-redhat-access-gui
=sane-backends-drivers-scanners
=seahorse
=setroubleshoot
=sushi
=totem
=totem-nautilus
=vinagre
=vino
=xdg-user-dirs-gtk
=yelp
Default Packages:
=qgnomeplatform
=xdg-desktop-portal-gtk
Optional Packages:
alacarte
dconf-editor
dvgrab
fonts-tweak-tool
gconf-editor
gedit-plugins
gnote
libappindicator-gtk3
seahorse-nautilus
seahorse-sharing
vim-X11
xguest
"""
with patch.dict(
yumpkg.__salt__, {"cmd.run_stdout": MagicMock(return_value=cmd_out)}
):
info = yumpkg.group_info("@gnome-desktop")
assert info == expected
def test_get_repo_with_existent_repo(list_repos_var):
"""
Test get_repo with an existent repository
Expected return is a populated dictionary
"""
repo = "base-source"
kwargs = {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": True,
}
parse_repo_file_return = (
"",
{
"base-source": {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": "1",
}
},
)
expected = {
"baseurl": "http://vault.centos.org/centos/$releasever/os/Source/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Base Sources",
"enabled": "1",
}
patch_list_repos = patch.object(
yumpkg, "list_repos", autospec=True, return_value=list_repos_var
)
patch_parse_repo_file = patch.object(
yumpkg,
"_parse_repo_file",
autospec=True,
return_value=parse_repo_file_return,
)
with patch_list_repos, patch_parse_repo_file:
ret = yumpkg.get_repo(repo, **kwargs)
assert ret == expected, ret
def test_get_repo_with_non_existent_repo(list_repos_var):
"""
Test get_repo with an non existent repository
Expected return is an empty dictionary
"""
repo = "non-existent-repository"
kwargs = {
"baseurl": "http://fake.centos.org/centos/$releasever/os/Non-Existent/",
"gpgkey": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7",
"name": "CentOS-$releasever - Non-Existent Repository",
"enabled": True,
}
expected = {}
patch_list_repos = patch.object(
yumpkg, "list_repos", autospec=True, return_value=list_repos_var
)
with patch_list_repos:
ret = yumpkg.get_repo(repo, **kwargs)
assert ret == expected, ret
def test_pkg_update_dnf():
"""
Tests that the proper CLI options are added when obsoletes=False
"""
name = "foo"
old = "1.2.2-1.fc31"
new = "1.2.3-1.fc31"
cmd_mock = MagicMock(return_value={"retcode": 0})
list_pkgs_mock = MagicMock(side_effect=[{name: old}, {name: new}])
parse_targets_mock = MagicMock(return_value=({"foo": None}, "repository"))
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": cmd_mock, "pkg_resource.parse_targets": parse_targets_mock},
), patch.object(yumpkg, "refresh_db", MagicMock()), patch.object(
yumpkg, "list_pkgs", list_pkgs_mock
), patch.object(
yumpkg, "_yum", MagicMock(return_value="dnf")
), patch(
"salt.utils.systemd.has_scope", MagicMock(return_value=False)
):
ret = yumpkg.update(name, setopt="obsoletes=0,plugins=0")
expected = {name: {"old": old, "new": new}}
assert ret == expected, ret
cmd_mock.assert_called_once_with(
[
"dnf",
"--quiet",
"-y",
"--setopt",
"plugins=0",
"--setopt",
"obsoletes=False",
"upgrade",
"foo",
],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_call_yum_default():
"""
Call default Yum/Dnf.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=False)},
):
yumpkg._call_yum(["-y", "--do-something"]) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["fake-yum", "-y", "--do-something"],
env={},
output_loglevel="trace",
python_shell=False,
)
@patch("salt.utils.systemd.has_scope", MagicMock(return_value=True))
def test_call_yum_in_scope():
"""
Call Yum/Dnf within the scope.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=True)},
):
yumpkg._call_yum(["-y", "--do-something"]) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["systemd-run", "--scope", "fake-yum", "-y", "--do-something"],
env={},
output_loglevel="trace",
python_shell=False,
)
def test_call_yum_with_kwargs():
"""
Call Yum/Dnf with the optinal keyword arguments.
:return:
"""
with patch.dict(yumpkg.__context__, {"yum_bin": "fake-yum"}):
with patch.dict(
yumpkg.__salt__,
{"cmd.run_all": MagicMock(), "config.get": MagicMock(return_value=False)},
):
yumpkg._call_yum(
["-y", "--do-something"],
python_shell=True,
output_loglevel="quiet",
ignore_retcode=False,
username="Darth Vader",
) # pylint: disable=W0106
yumpkg.__salt__["cmd.run_all"].assert_called_once_with(
["fake-yum", "-y", "--do-something"],
env={},
ignore_retcode=False,
output_loglevel="quiet",
python_shell=True,
username="Darth Vader",
)
@pytest.mark.skipif(not salt.utils.systemd.booted(), reason="Requires systemd")
def test_services_need_restart():
"""
Test that dnf needs-restarting output is parsed and
salt.utils.systemd.pid_to_service is called as expected.
"""
expected = ["firewalld", "salt-minion"]
dnf_mock = Mock(
return_value="123 : /usr/bin/firewalld\n456 : /usr/bin/salt-minion\n"
)
systemd_mock = Mock(side_effect=["firewalld", "salt-minion"])
with patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")):
with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": dnf_mock}), patch(
"salt.utils.systemd.pid_to_service", systemd_mock
):
assert sorted(yumpkg.services_need_restart()) == expected
systemd_mock.assert_has_calls([call("123"), call("456")])
def test_services_need_restart_requires_systemd():
"""Test that yumpkg.services_need_restart raises an error if systemd is unavailable."""
with patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")):
with patch("salt.utils.systemd.booted", Mock(return_value=False)):
pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
def test_services_need_restart_requires_dnf():
"""Test that yumpkg.services_need_restart raises an error if DNF is unavailable."""
with patch("salt.modules.yumpkg._yum", Mock(return_value="yum")):
pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs():
patch_list_pkgs = patch(
"salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True
)
patch_salt = patch.dict(
yumpkg.__salt__,
{
"pkg_resource.parse_targets": Mock(
return_value=[
{
"fnord-this-is-not-actually-a-package": "fnord-this-is-not-actually-a-package-1.2.3"
}
]
)
},
)
with patch_list_pkgs, patch_salt:
# During the 3004rc1 we discoverd that if list_pkgs was missing
# packages that were returned by parse_targets that yumpkg.remove would
# catch on fire. This ensures that won't go undetected again.
yumpkg.remove()
@pytest.fixture(
ids=["yum", "dnf"],
params=[
{
"context": {"yum_bin": "yum"},
"grains": {"os": "CentOS", "osrelease": 7},
"cmd": ["yum", "-y"],
},
{
"context": {"yum_bin": "dnf"},
"grains": {"os": "Fedora", "osrelease": 27},
"cmd": ["dnf", "-y", "--best", "--allowerasing"],
},
],
)
def yum_and_dnf(request):
with patch.dict(yumpkg.__context__, request.param["context"]), patch.dict(
yumpkg.__grains__, request.param["grains"]
), patch.dict(pkg_resource.__grains__, request.param["grains"]):
yield request.param["cmd"]
@pytest.mark.parametrize(
"new,full_pkg_string",
(
(42, "foo-42"),
(12, "foo-12"),
("99:1.2.3", "foo-1.2.3"),
),
)
def test_59705_version_as_accidental_float_should_become_text(
new, full_pkg_string, yum_and_dnf
):
name = "foo"
expected_cmd = yum_and_dnf + ["install", full_pkg_string]
cmd_mock = MagicMock(
return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""}
)
def fake_parse(*args, **kwargs):
return {name: kwargs["version"]}, "repository"
patch_yum_salt = patch.dict(
yumpkg.__salt__,
{
"cmd.run_all": cmd_mock,
"lowpkg.version_cmp": rpm.version_cmp,
"pkg_resource.parse_targets": fake_parse,
},
)
patch_list_pkgs = patch.object(
yumpkg, "list_pkgs", return_value={"foo": ["foo-42"]}
)
patch_list_downloaded = patch(
"salt.module.yumpkg.list_downloaded", autospec=True, return_value={}
)
patch_systemd = patch("salt.utils.systemd.has_scope", MagicMock(return_value=False))
with patch_list_pkgs, patch_systemd, patch_yum_salt:
yumpkg.install("foo", version=new)
call = cmd_mock.mock_calls[0][1][0]
assert call == expected_cmd
|
import unittest, ConfigParser, json, datetime, logging
import mediacloud.api
TEST_USER_EMAIL = "mc-api-test@media.mit.edu"
TEST_TAG_SET_ID = 1727
GEO_TAG_SET_ID = 1011
class ApiBaseTest(unittest.TestCase):
QUERY = '( mars OR robot )'
FILTER_QUERY = '+publish_date:[2013-01-01T00:00:00Z TO 2013-02-01T00:00:00Z] AND +media_sets_id:1'
SENTENCE_COUNT = 100
def setUp(self):
self._config = ConfigParser.ConfigParser()
self._config.read('mc-client.config')
self._mc = mediacloud.api.MediaCloud( self._config.get('api','key'))
class AdminApiBaseTest(unittest.TestCase):
def setUp(self):
self._config = ConfigParser.ConfigParser()
self._config.read('mc-client.config')
self._mc = mediacloud.api.AdminMediaCloud( self._config.get('api','key') )
class ApiBigQueryTest(ApiBaseTest):
def testBigQuery(self):
query_to_repeat = "(publish_date:[2016-05-16T00:00:00Z TO 2016-05-17T00:00:00Z]) AND (tags_id_media:(8875027))"
query_pieces = [ query_to_repeat for x in range(0,110) ] # "110" was determined experimentally
big_query = " AND ".join(query_pieces)
results = self._mc.sentenceCount(big_query)
self.assertTrue(results['count']>0)
class ApiAllFieldsOptionTest(ApiBaseTest):
def testAllFieldsOnMedia(self):
# do a regular query
media = self._mc.media(1751)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1751)
self.assertFalse('foreign_rss_links' in media)
self.assertTrue('url' in media)
# do an all fields regular query and verify extra fields are there
self._mc.setAllFields(True)
media = self._mc.media(1751)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1751)
self.assertTrue('foreign_rss_links' in media)
self.assertTrue('url' in media)
class AuthTokenTest(ApiBaseTest):
def testAuthToken(self):
valid_auth_token = self._config.get('api','key')
fake_auth_token = 'these are not the keys you are looking for'
# make sure setAuthToken workds
self._mc.setAuthToken(fake_auth_token)
self.assertEqual(self._mc._auth_token,fake_auth_token)
# see a request with a bad key fail
try:
self._mc.media(1)
self.assertFalse(True)
except:
self.assertTrue(True)
# set the key back to a valid one
self._mc.setAuthToken(valid_auth_token)
def testUserAuthToken(self):
# test failure mode
try:
self._mc.userAuthToken('user@funkytown.us','1234')
self.assertFalse(True)
except:
self.assertTrue(True)
class PublishDateQueryTest(ApiBaseTest):
def testPublishDateQuery(self):
start_date = datetime.date(2014,06,02 )
end_date = datetime.date(2014,06,03 )
date_query_default = self._mc.publish_date_query( start_date, end_date )
self.assertEqual( date_query_default, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}" )
date_query_inclusive_exclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=True, end_date_inclusive=False)
self.assertEqual( date_query_inclusive_exclusive, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}")
date_query_inclusive_inclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=True, end_date_inclusive=True)
self.assertEqual( date_query_inclusive_inclusive, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z]")
date_query_exclusive_inclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=False, end_date_inclusive=True)
self.assertEqual( date_query_exclusive_inclusive, "publish_date:{2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z]")
date_query_exclusive_exclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=False, end_date_inclusive=False)
self.assertEqual( date_query_exclusive_exclusive, "publish_date:{2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}")
self.assertTrue( self._mc.sentenceCount( date_query_default )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_inclusive_exclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_inclusive_inclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_exclusive_exclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_exclusive_inclusive )[ 'count' ] > 0 )
class ApiMediaHealthTest(ApiBaseTest):
def testMediaHealth(self):
mediaHealth = self._mc.mediaHealth(2)
self.assertEqual(mediaHealth['media_id'],'2')
self.assertEqual(mediaHealth['is_healthy'],1)
self.assertEqual(mediaHealth['coverage_gaps'],len(mediaHealth['coverage_gaps_list']))
self.assertTrue('start_date' in mediaHealth)
self.assertTrue('end_date' in mediaHealth)
class ApiMediaTest(ApiBaseTest):
def testMedia(self):
media = self._mc.media(1)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1)
self.assertEqual(media['name'],'New York Times')
self.assertTrue(len(media['media_source_tags'])>0)
def testMediaListWithName(self):
matchingList = self._mc.mediaList(name_like='new york times')
self.assertEqual(len(matchingList),3)
def testMediaList(self):
first_list = self._mc.mediaList()
for media in first_list:
self.assertTrue(media['media_id']>0)
self.assertNotEqual(first_list, None)
self.assertEqual(len(first_list),20)
last_page_one_media_id = int(first_list[19]['media_id'])-1
self.assertTrue(last_page_one_media_id > 0)
second_list = self._mc.mediaList(last_page_one_media_id)
for media in second_list:
self.assertTrue(media['media_id']>last_page_one_media_id)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['media_id'], second_list[0]['media_id'])
longer_list = self._mc.mediaList(0,200)
self.assertEqual(len(longer_list),200)
def testMediaListWithTagId(self):
matchingList = self._mc.mediaList(tags_id=8875027) # US MSM
self.assertTrue(len(matchingList)>0)
class ApiControversyTest(ApiBaseTest):
def testControversy(self):
controversy = self._mc.controversy(1)
self.assertEqual(controversy['controversies_id'],1)
self.assertEqual(controversy['name'],'trayvon')
def testControversyList(self):
# verify it pulls some
controversy_list = self._mc.controversyList()
self.assertTrue(len(controversy_list)>1)
# make sure the filtering works
pop_controversy_list = self._mc.controversyList('prop')
self.assertTrue(len(pop_controversy_list)>1)
self.assertTrue(len(pop_controversy_list)<len(controversy_list))
# make sure a failure case works
random_controversy_list = self._mc.controversyList('12335')
self.assertEqual(len(random_controversy_list),0)
class ApiControversyDumpTest(ApiBaseTest):
def testControversyDump(self):
controversy_dump = self._mc.controversyDump(557)
self.assertEqual(controversy_dump['controversy_dumps_id'],557)
self.assertEqual(controversy_dump['controversies_id'],1)
def testControversyDumpList(self):
# verify it pulls some
controversy_dump_list = self._mc.controversyDumpList()
self.assertTrue(len(controversy_dump_list)>1)
# make sure the filtering works
specific_controversy_dump_list = self._mc.controversyDumpList(1)
self.assertTrue(len(specific_controversy_dump_list)>1)
self.assertTrue(len(specific_controversy_dump_list)<len(controversy_dump_list))
# make sure a failure case works
random_controversy_dump_list = self._mc.controversyDumpList('12335')
self.assertEqual(len(random_controversy_dump_list),0)
class ApiControversyDumpTimeSliceTest(ApiBaseTest):
def testControversyDumpTimeSlice(self):
dump_time_slice = self._mc.controversyDumpTimeSlice(145)
self.assertEqual(dump_time_slice['controversy_dump_time_slices_id'],145)
self.assertEqual(dump_time_slice['controversy_dumps_id'],16)
self.assertEqual(dump_time_slice['model_num_media'],4)
def testControversyDumpTimeSliceList(self):
# verify it pulls some
dump_time_slice_list = self._mc.controversyDumpTimeSliceList()
self.assertTrue(len(dump_time_slice_list)>1)
# make sure the filtering works
specific_dump_time_slice_list = self._mc.controversyDumpTimeSliceList(controversy_dumps_id=16)
self.assertTrue(len(specific_dump_time_slice_list)>1)
self.assertTrue(len(specific_dump_time_slice_list)<=len(dump_time_slice_list))
class ApiTagsTest(ApiBaseTest):
def testTags(self):
tag = self._mc.tag(8876989)
self.assertEqual(tag['tags_id'],8876989)
self.assertEqual(tag['tag'],'JP')
self.assertEqual(tag['tag_sets_id'],597)
def testTagList(self):
# verify it only pulls tags from that one set
first_list = self._mc.tagList(597)
self.assertEqual(len(first_list),20)
[self.assertEqual(tag['tag_sets_id'],597) for tag in first_list]
# make sure paging through a set works right
second_list = self._mc.tagList(597, int(first_list[19]['tags_id'])-1)
self.assertEqual(len(second_list),20)
[self.assertEqual(tag['tag_sets_id'],597) for tag in second_list]
self.assertEqual(first_list[19]['tags_id'], second_list[0]['tags_id'])
# make sure you can pull a longer list of tags
longer_list = self._mc.tagList(597, 0, 150)
self.assertEqual(len(longer_list),150)
[self.assertEqual(tag['tag_sets_id'],597) for tag in longer_list]
longest_list = self._mc.tagList(597, 0, 200)
self.assertEqual(len(longest_list),173)
[self.assertEqual(tag['tag_sets_id'],597) for tag in longest_list]
# try getting only the public tags in the set
full_list = self._mc.tagList(6, rows=200)
public_list = self._mc.tagList(6, rows=200, public_only=True)
self.assertNotEqual( len(full_list), len(public_list))
def testTagListSearch(self):
# verify search works at all
collection_tags = self._mc.tagList(name_like="collection")
self.assertTrue(len(collection_tags)>0, "Got %d tags matching 'collection'" % len(collection_tags))
# verify search works on tags without descriptions
geo_tags = self._mc.tagList(name_like="geonames_")
self.assertTrue(len(geo_tags)>0, "Got %d tags matching 'geonames_'" % len(geo_tags))
class ApiTagSetsTest(ApiBaseTest):
def testTagSet(self):
tagSet = self._mc.tagSet(597)
self.assertEqual(tagSet['tag_sets_id'],597)
self.assertEqual(tagSet['name'],'gv_country')
def testTagSetList(self):
first_list = self._mc.tagSetList()
self.assertEqual(len(first_list),20)
second_list = self._mc.tagSetList(int(first_list[19]['tag_sets_id'])-1)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['tag_sets_id'], second_list[0]['tag_sets_id'])
longer_list = self._mc.tagSetList(0,50)
self.assertEqual(len(longer_list),50)
class ApiFeedsTest(ApiBaseTest):
def testFeed(self):
media_set = self._mc.feed(1)
self.assertEqual(media_set['feeds_id'],1)
self.assertEqual(media_set['name'],'Bits')
self.assertEqual(media_set['media_id'],1)
def testFeedList(self):
first_list = self._mc.feedList(1)
self.assertEqual(len(first_list),20)
second_list = self._mc.feedList(1,int(first_list[19]['feeds_id'])-1)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['feeds_id'], second_list[0]['feeds_id'])
longer_list = self._mc.feedList(1,0,200)
self.assertEqual(len(longer_list),142)
class AdminApiStoriesTest(AdminApiBaseTest):
def testStoryWithSentences(self):
story = self._mc.story(27456565, sentences=True)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertTrue('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
def testStoryWithText(self):
story = self._mc.story(27456565, text=True)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertFalse('story_sentences' in story)
self.assertTrue('story_text' in story)
self.assertTrue('is_fully_extracted' in story)
def testStoryList(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY)
self.assertNotEqual(len(results),0)
for story in results:
self.assertTrue('bitly_click_count' in story)
def testStoryCoreNlpList(self):
results = self._mc.storyCoreNlpList([261784668,261784669])
self.assertEqual(len(results),2)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('corenlp' in story)
self.assertTrue('stories_id' in story)
def testStoryListDefaults(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
def testStoryListWithCoreNlp(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, corenlp=True, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('corenlp' in story)
def testStoryListWithSentences(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, sentences=True, rows=10)
for story in results:
self.assertTrue('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
def testStoryListWithText(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, text=True, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertTrue('story_text' in story)
self.assertTrue('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
class ApiStoriesWordMatrixTest(ApiBaseTest):
def testStoryWordMatrix(self):
results = self._mc.storyWordMatrix("obama",
"(publish_date:[2016-05-16T00:00:00Z TO 2016-05-17T00:00:00Z]) AND (tags_id_media:(8875027))")
self.assertTrue("word_matrix" in results)
self.assertTrue("word_list" in results)
class ApiStoriesTest(ApiBaseTest):
def testStory(self):
story = self._mc.story(27456565)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('bitly_click_count' in story)
def testStoryPublic(self):
story = self._mc.story(27456565)
self.assertEqual(story['media_id'],1144)
self.assertTrue('story_sentences' not in story)
self.assertTrue('language' in story)
self.assertTrue('title' in story)
self.assertTrue('bitly_click_count' in story)
def testStoryPublicList(self):
results = self._mc.storyList(self.QUERY, self.FILTER_QUERY)
self.assertNotEqual(len(results),0)
for story in results:
self.assertTrue('bitly_click_count' in story)
def testStoryCount(self):
results = self._mc.storyCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(results['count'],3817)
class AdminApiSentencesTest(AdminApiBaseTest):
def testSentenceListSortingAscending(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_PUBLISH_DATE_ASC)
self.assertEqual(len(results['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
last_date = None
for sentence in results['response']['docs']:
this_date = datetime.datetime.strptime(sentence['publish_date'],self._mc.SENTENCE_PUBLISH_DATE_FORMAT)
this_date = this_date.replace( second=0, microsecond=0) # sorting is by minute
if last_date is not None:
self.assertTrue(last_date <= this_date, "Date wrong: "+str(last_date)+" is not <= "+str(this_date))
last_date = this_date
last_date = this_date
def testSentenceListSortingDescending(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_PUBLISH_DATE_DESC)
self.assertEqual(len(results['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
last_date = None
for sentence in results['response']['docs']:
this_date = datetime.datetime.strptime(sentence['publish_date'],self._mc.SENTENCE_PUBLISH_DATE_FORMAT)
this_date = this_date.replace( second=0, microsecond=0) # sorting is by minute
if last_date is not None:
self.assertTrue(last_date >= this_date, "Date wrong: "+str(last_date)+" is not >= "+str(this_date))
last_date = this_date
last_date = this_date
def testSentenceListSortingRadom(self):
# we do random sort by telling we want the random sort, and then offsetting to a different start index
results1 = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_RANDOM)
self.assertEqual(len(results1['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
results2 = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,ApiBaseTest.SENTENCE_COUNT+3,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_RANDOM)
self.assertEqual(len(results2['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
for idx in range(0,ApiBaseTest.SENTENCE_COUNT):
self.assertNotEqual(results1['response']['docs'][idx]['stories_id'],results2['response']['docs'][idx]['stories_id'],
"Stories in two different random sets are the same :-(")
def testSentenceList(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY)
self.assertEqual(int(results['responseHeader']['status']),0)
self.assertEqual(int(results['response']['numFound']),6784)
self.assertEqual(len(results['response']['docs']), 1000)
def testSentenceListPaging(self):
# test limiting rows returned
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY,0,100)
self.assertEqual(int(results['response']['numFound']), 6784)
self.assertEqual(len(results['response']['docs']), 100)
# test starting offset
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY,6700)
self.assertEqual(int(results['response']['numFound']), 6784)
self.assertEqual(len(results['response']['docs']), 84)
class ApiSentencesTest(ApiBaseTest):
def testSentence(self):
sentence_id = 3841125325
sentence = self._mc.sentence(sentence_id)
self.assertEqual(sentence['story_sentences_id'],sentence_id)
self.assertEqual(sentence['stories_id'],321728712)
self.assertTrue(len(sentence['sentence'])>0)
def testSentenceCount(self):
# basic counting
results = self._mc.sentenceCount('obama','+media_id:1')
self.assertTrue(int(results['count'])>10000)
# counting with a default split weekly (>180 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2013-01-01','2014-01-01')
self.assertEqual(results['split']['gap'],'+7DAYS')
self.assertEqual(len(results['split']),56)
# counting with a default split 3-day (<180 days, >90 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-06-01')
self.assertEqual(results['split']['gap'],'+3DAYS')
self.assertEqual(len(results['split']),54)
# counting with a default split daily (<90 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-01-07')
self.assertEqual(results['split']['gap'],'+1DAY')
self.assertEqual(len(results['split']),9)
# test forcing a daily split
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-06-01',True)
self.assertEqual(results['split']['gap'],'+1DAY')
self.assertEqual(len(results['split']),154)
def testFieldCount(self):
# regular call for sentence counts
sentence_results = self._mc.sentenceFieldCount('obama','+media_id:1')
self.assertFalse('stats' in sentence_results)
self.assertFalse('counts' in sentence_results)
self.assertTrue(len(sentence_results)>0)
[self.assertTrue(tag['count']) for tag in sentence_results]
# regular call for story counts
story_results = self._mc.sentenceFieldCount('obama','+media_id:1',field='tags_id_stories')
self.assertFalse('stats' in story_results)
self.assertFalse('counts' in story_results)
self.assertTrue(len(story_results)>0)
[self.assertTrue(tag['count']) for tag in story_results]
# compare
self.assertTrue(len(story_results)!=len(sentence_results))
# with stats
results = self._mc.sentenceFieldCount('obama','+media_id:1',include_stats=True)
self.assertTrue('stats' in results)
self.assertTrue('counts' in results)
# filter by tag set
sentence_results = self._mc.sentenceFieldCount('obama','+media_id:1',tag_sets_id=GEO_TAG_SET_ID)
self.assertTrue(len(sentence_results)>0)
[self.assertEqual(tag['tag_sets_id'],GEO_TAG_SET_ID) for tag in sentence_results]
class ApiWordCountTest(ApiBaseTest):
QUERY = 'robots'
def testResults(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(len(term_freq),500)
self.assertEqual(term_freq[3]['term'],u'science')
def testSort(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
# verify sorted in desc order
last_count = 10000000000
for freq in term_freq:
self.assertTrue( last_count >= freq['count'] )
last_count = freq['count']
def testNumWords(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(len(term_freq),500)
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, num_words=100)
self.assertEqual(len(term_freq),100)
def testStopWords(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(term_freq[3]['term'],u'science')
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, include_stopwords=True)
self.assertEqual(term_freq[3]['term'],u'that')
def testStats(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(term_freq[3]['term'],u'science')
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, include_stats=True)
self.assertEqual(len(term_freq),2)
self.assertTrue( 'stats' in term_freq.keys() )
self.assertTrue( 'words' in term_freq.keys() )
class AdminApiTaggingUpdateTest(AdminApiBaseTest):
def testTagUpdate(self):
example_tag_id = 9172167
# grab the tag info
tag = self._mc.tag(example_tag_id)
# change the name, label and description
result = self._mc.updateTag(example_tag_id, 'modified tag', 'modified label', 'modified description')
modified_tag = self._mc.tag(example_tag_id)
self.assertEqual(modified_tag['tag'],'modified tag')
self.assertEqual(modified_tag['label'],'modified label')
self.assertEqual(modified_tag['description'],'modified description')
# set it back
result = self._mc.updateTag(example_tag_id, 'example tag', 'example label', 'This is an exampel tag used in api client test scripts')
modified_tag = self._mc.tag(example_tag_id)
self.assertEqual(modified_tag['tag'],'example tag')
self.assertEqual(modified_tag['label'],'example label')
self.assertEqual(modified_tag['description'],'This is an exampel tag used in api client test scripts')
def testTagSetUpdate(self):
example_tag_sets_id = TEST_TAG_SET_ID
# grab the tag info
tag_set = self._mc.tagSet(example_tag_sets_id)
# change the name, label and description
result = self._mc.updateTagSet(example_tag_sets_id, TEST_USER_EMAIL, 'modified label', 'modified description')
modified_tag = self._mc.tagSet(example_tag_sets_id)
self.assertEqual(modified_tag['name'],TEST_USER_EMAIL)
self.assertEqual(modified_tag['label'],'modified label')
self.assertEqual(modified_tag['description'],'modified description')
# set it back
result = self._mc.updateTagSet(example_tag_sets_id, TEST_USER_EMAIL, 'rahulbot', 'The tag set of Rahul!')
modified_tag = self._mc.tagSet(example_tag_sets_id)
self.assertEqual(modified_tag['name'], TEST_USER_EMAIL)
self.assertEqual(modified_tag['label'],'rahulbot')
self.assertEqual(modified_tag['description'],'The tag set of Rahul!')
class AdminApiTaggingContentTest(AdminApiBaseTest):
def testTagStories(self):
test_story_id = 2
tag_set_name = TEST_USER_EMAIL
# tag a story with two things
desired_tags = [ mediacloud.api.StoryTag(test_story_id, tag_set_name, 'test_tag1'),
mediacloud.api.StoryTag(test_story_id, tag_set_name, 'test_tag2') ]
response = self._mc.tagStories(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# make sure it worked
story = self._mc.story(test_story_id,sentences=True)
tags_on_story = [t for t in story['story_tags'] if t['tag_set']==tag_set_name]
self.assertEqual(len(tags_on_story),len(desired_tags))
# now remove one
desired_tags = [ mediacloud.api.StoryTag(test_story_id,TEST_USER_EMAIL,'test_tag1') ]
response = self._mc.tagStories(desired_tags, clear_others=True)
self.assertEqual(len(response),len(desired_tags))
# and check it
story = self._mc.story(test_story_id,sentences=True)
tags_on_story = [t for t in story['story_tags'] if t['tag_set']==tag_set_name]
self.assertEqual(len(tags_on_story),len(desired_tags))
def testChunkify(self):
chunk_size = 50
data = [x for x in range(0,507)]
chunked = self._mc._chunkify(data,chunk_size)
self.assertEqual(11,len(chunked))
for x in range(0,10):
self.assertEqual(chunk_size,len(chunked[x]))
self.assertEqual(7,len(chunked[10]))
def testTagTonsOfSentences(self):
test_story_id = 435914244
test_tag_id1 = '9172171' # mc-api-test@media.mit.edu:test_tag1
test_tag_id2 = '9172168' # mc-api-test@media.mit.edu:test_tag2
tag_set_name = TEST_USER_EMAIL
# grab some sentence_ids to test with
orig_story = self._mc.story(test_story_id,sentences=True)
self.assertTrue( 'story_sentences' in orig_story )
self.assertTrue( len(orig_story['story_sentences']) > 2 )
sentence_ids = [ s['story_sentences_id'] for s in orig_story['story_sentences'][0:2] ]
# make a list of a ton of tags
desired_tags = []
for x in range(0, 80):
desired_tags = desired_tags + [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1') for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
def testTagSentences(self):
test_story_id = 435914244
test_tag_id1 = '9172171' # mc-api-test@media.mit.edu:test_tag1
test_tag_id2 = '9172168' # mc-api-test@media.mit.edu:test_tag2
tag_set_name = TEST_USER_EMAIL
# grab some sentence_ids to test with
orig_story = self._mc.story(test_story_id,sentences=True)
self.assertTrue( 'story_sentences' in orig_story )
self.assertTrue( len(orig_story['story_sentences']) > 2 )
sentence_ids = [ s['story_sentences_id'] for s in orig_story['story_sentences'][0:2] ]
# add a tag
desired_tags = [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# and verify it worked
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in orig_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
# now do two tags on each story
desired_tags = desired_tags + [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag2')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# and verify it worked
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in tagged_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
self.assertTrue(test_tag_id2 in s['tags'])
# now remove one
desired_tags = [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags, clear_others=True)
self.assertEqual(len(response),len(desired_tags))
# and check it
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in tagged_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
self.assertFalse(test_tag_id2 in s['tags'])
class AdminTopicStoryListTest(AdminApiBaseTest):
TOPIC_ID = 1
def testTopicStoryList(self):
topic_id = 1
response = self._mc.topicStoryList(1)
self.assertEqual(len(response['stories']),10)
def testTopicStoryListPaging(self):
limit = 50
responsePage1 = self._mc.topicStoryList(self.TOPIC_ID,limit=limit)
responsePage1Ids = [m['stories_id'] for m in responsePage1['stories']]
self.assertEqual(len(responsePage1['stories']),50)
self.assertTrue('continuation_id' in responsePage1)
responsePage2 = self._mc.topicStoryList(self.TOPIC_ID, continuation_id=responsePage1['continuation_id'],limit=limit)
responsePage2Ids = [m['stories_id'] for m in responsePage2['stories']]
# verify no duplicated media_ids across pages
combinedIds = set(responsePage1Ids+responsePage2Ids)
self.assertEqual(len(responsePage1Ids)+len(responsePage2Ids),len(combinedIds))
def testTopicStoryListLimit(self):
response = self._mc.topicStoryList(self.TOPIC_ID)
self.assertEqual(len(response['stories']),10)
response = self._mc.topicStoryList(self.TOPIC_ID,limit=76)
self.assertEqual(len(response['stories']),76)
response = self._mc.topicStoryList(self.TOPIC_ID,limit=500)
self.assertEqual(len(response['stories']),500)
def testTopicStoryListSortSocial(self):
response = self._mc.topicStoryList(self.TOPIC_ID, limit=500, sort='social')
last_bitly_count = 1000000000000
for story in response['stories']:
self.assertTrue(story['bitly_click_count']<=last_bitly_count)
last_bitly_count = story['bitly_click_count']
def testTopicStoryListSortInlink(self):
response = self._mc.topicStoryList(self.TOPIC_ID, limit=500, sort='inlink')
last_inlink_count = 1000000000000
for story in response['stories']:
self.assertTrue(story['inlink_count']<=last_inlink_count)
last_inlink_count = story['inlink_count']
class AdminTopicMediaListTest(AdminApiBaseTest):
TOPIC_ID = 1
def testTopicMediaList(self):
response = self._mc.topicMediaList(self.TOPIC_ID)
self.assertTrue('continuation_id' in response)
self.assertTrue('media' in response)
for media in response['media']:
self.assertTrue('media_id' in media)
def testTopicMediaListLimit(self):
response = self._mc.topicMediaList(self.TOPIC_ID)
self.assertEqual(len(response['media']),10)
response = self._mc.topicMediaList(self.TOPIC_ID,limit=76)
self.assertEqual(len(response['media']),76)
response = self._mc.topicMediaList(self.TOPIC_ID,limit=500)
self.assertEqual(len(response['media']),500)
def testTopicMediaListPaging(self):
limit = 50
responsePage1 = self._mc.topicMediaList(self.TOPIC_ID,limit=limit)
responsePage1Ids = [m['media_id'] for m in responsePage1['media']]
self.assertEqual(len(responsePage1['media']),50)
self.assertTrue('continuation_id' in responsePage1)
responsePage2 = self._mc.topicMediaList(self.TOPIC_ID, continuation_id=responsePage1['continuation_id'],limit=limit)
responsePage2Ids = [m['media_id'] for m in responsePage2['media']]
# verify no duplicated media_ids across pages
combinedIds = set(responsePage1Ids+responsePage2Ids)
self.assertEqual(len(responsePage1Ids)+len(responsePage2Ids),len(combinedIds))
def testTopicMediaListSortSocial(self):
response = self._mc.topicMediaList(self.TOPIC_ID, sort='social')
last_bitly_count = 1000000000000
for media in response['media']:
self.assertTrue(media['bitly_click_count']<=last_bitly_count)
last_bitly_count = media['bitly_click_count']
def testTopicMediaListSortInlink(self):
response = self._mc.topicMediaList(self.TOPIC_ID, sort='inlink')
last_inlink_count = 1000000000000
for media in response['media']:
self.assertTrue(media['inlink_count']<=last_inlink_count)
last_inlink_count = media['inlink_count']
class AdminTopicWordCountTest(AdminApiBaseTest):
TOPIC_ID = 1
def testResults(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
self.assertEqual(len(term_freq),500)
self.assertEqual(term_freq[3]['term'],u'george')
def testSort(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
# verify sorted in desc order
last_count = 10000000000
for freq in term_freq:
self.assertTrue( last_count >= freq['count'] )
last_count = freq['count']
def testNumWords(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
self.assertEqual(len(term_freq),500)
term_freq = self._mc.topicWordCount(self.TOPIC_ID, num_words=500)
self.assertEqual(len(term_freq),500)
class AdminTopicSentenceCountTest(AdminApiBaseTest):
TOPIC_ID = 1
def testSentenceCount(self):
results = self._mc.topicSentenceCount(self.TOPIC_ID)
self.assertTrue(int(results['count'])>10000)
results = self._mc.topicSentenceCount(self.TOPIC_ID, snapshot_id=365)
self.assertTrue(int(results['count'])>1000)
def testSentenceCountSplit(self):
results = self._mc.topicSentenceCount(self.TOPIC_ID,'*','*',True,'2013-01-01','2016-01-01')
self.assertEqual(results['split']['gap'],'+7DAYS')
self.assertEqual(len(results['split']),4)
updated test to pass
import unittest, ConfigParser, json, datetime, logging
import mediacloud.api
TEST_USER_EMAIL = "mc-api-test@media.mit.edu"
TEST_TAG_SET_ID = 1727
GEO_TAG_SET_ID = 1011
class ApiBaseTest(unittest.TestCase):
QUERY = '( mars OR robot )'
FILTER_QUERY = '+publish_date:[2013-01-01T00:00:00Z TO 2013-02-01T00:00:00Z] AND +media_sets_id:1'
SENTENCE_COUNT = 100
def setUp(self):
self._config = ConfigParser.ConfigParser()
self._config.read('mc-client.config')
self._mc = mediacloud.api.MediaCloud( self._config.get('api','key'))
class AdminApiBaseTest(unittest.TestCase):
def setUp(self):
self._config = ConfigParser.ConfigParser()
self._config.read('mc-client.config')
self._mc = mediacloud.api.AdminMediaCloud( self._config.get('api','key') )
class ApiBigQueryTest(ApiBaseTest):
def testBigQuery(self):
query_to_repeat = "(publish_date:[2016-05-16T00:00:00Z TO 2016-05-17T00:00:00Z]) AND (tags_id_media:(8875027))"
query_pieces = [ query_to_repeat for x in range(0,110) ] # "110" was determined experimentally
big_query = " AND ".join(query_pieces)
results = self._mc.sentenceCount(big_query)
self.assertTrue(results['count']>0)
class ApiAllFieldsOptionTest(ApiBaseTest):
def testAllFieldsOnMedia(self):
# do a regular query
media = self._mc.media(1751)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1751)
self.assertFalse('foreign_rss_links' in media)
self.assertTrue('url' in media)
# do an all fields regular query and verify extra fields are there
self._mc.setAllFields(True)
media = self._mc.media(1751)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1751)
self.assertTrue('foreign_rss_links' in media)
self.assertTrue('url' in media)
class AuthTokenTest(ApiBaseTest):
def testAuthToken(self):
valid_auth_token = self._config.get('api','key')
fake_auth_token = 'these are not the keys you are looking for'
# make sure setAuthToken workds
self._mc.setAuthToken(fake_auth_token)
self.assertEqual(self._mc._auth_token,fake_auth_token)
# see a request with a bad key fail
try:
self._mc.media(1)
self.assertFalse(True)
except:
self.assertTrue(True)
# set the key back to a valid one
self._mc.setAuthToken(valid_auth_token)
def testUserAuthToken(self):
# test failure mode
try:
self._mc.userAuthToken('user@funkytown.us','1234')
self.assertFalse(True)
except:
self.assertTrue(True)
class PublishDateQueryTest(ApiBaseTest):
def testPublishDateQuery(self):
start_date = datetime.date(2014,06,02 )
end_date = datetime.date(2014,06,03 )
date_query_default = self._mc.publish_date_query( start_date, end_date )
self.assertEqual( date_query_default, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}" )
date_query_inclusive_exclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=True, end_date_inclusive=False)
self.assertEqual( date_query_inclusive_exclusive, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}")
date_query_inclusive_inclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=True, end_date_inclusive=True)
self.assertEqual( date_query_inclusive_inclusive, "publish_date:[2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z]")
date_query_exclusive_inclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=False, end_date_inclusive=True)
self.assertEqual( date_query_exclusive_inclusive, "publish_date:{2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z]")
date_query_exclusive_exclusive = self._mc.publish_date_query( start_date, end_date, start_date_inclusive=False, end_date_inclusive=False)
self.assertEqual( date_query_exclusive_exclusive, "publish_date:{2014-06-02T00:00:00Z TO 2014-06-03T00:00:00Z}")
self.assertTrue( self._mc.sentenceCount( date_query_default )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_inclusive_exclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_inclusive_inclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_exclusive_exclusive )[ 'count' ] > 0 )
self.assertTrue( self._mc.sentenceCount( date_query_exclusive_inclusive )[ 'count' ] > 0 )
class ApiMediaHealthTest(ApiBaseTest):
def testMediaHealth(self):
mediaHealth = self._mc.mediaHealth(2)
self.assertEqual(mediaHealth['media_id'],'2')
self.assertEqual(mediaHealth['is_healthy'],1)
self.assertEqual(mediaHealth['coverage_gaps'],len(mediaHealth['coverage_gaps_list']))
self.assertTrue('start_date' in mediaHealth)
self.assertTrue('end_date' in mediaHealth)
class ApiMediaTest(ApiBaseTest):
def testMedia(self):
media = self._mc.media(1)
self.assertNotEqual(media, None)
self.assertEqual(media['media_id'],1)
self.assertEqual(media['name'],'New York Times')
self.assertTrue(len(media['media_source_tags'])>0)
def testMediaListWithName(self):
matchingList = self._mc.mediaList(name_like='new york times')
self.assertEqual(len(matchingList),3)
def testMediaList(self):
first_list = self._mc.mediaList()
for media in first_list:
self.assertTrue(media['media_id']>0)
self.assertNotEqual(first_list, None)
self.assertEqual(len(first_list),20)
last_page_one_media_id = int(first_list[19]['media_id'])-1
self.assertTrue(last_page_one_media_id > 0)
second_list = self._mc.mediaList(last_page_one_media_id)
for media in second_list:
self.assertTrue(media['media_id']>last_page_one_media_id)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['media_id'], second_list[0]['media_id'])
longer_list = self._mc.mediaList(0,200)
self.assertEqual(len(longer_list),200)
def testMediaListWithTagId(self):
matchingList = self._mc.mediaList(tags_id=8875027) # US MSM
self.assertTrue(len(matchingList)>0)
class ApiControversyTest(ApiBaseTest):
def testControversy(self):
controversy = self._mc.controversy(1)
self.assertEqual(controversy['controversies_id'],1)
self.assertEqual(controversy['name'],'trayvon')
def testControversyList(self):
# verify it pulls some
controversy_list = self._mc.controversyList()
self.assertTrue(len(controversy_list)>1)
# make sure the filtering works
pop_controversy_list = self._mc.controversyList('prop')
self.assertTrue(len(pop_controversy_list)>1)
self.assertTrue(len(pop_controversy_list)<len(controversy_list))
# make sure a failure case works
random_controversy_list = self._mc.controversyList('12335')
self.assertEqual(len(random_controversy_list),0)
class ApiControversyDumpTest(ApiBaseTest):
def testControversyDump(self):
controversy_dump = self._mc.controversyDump(557)
self.assertEqual(controversy_dump['controversy_dumps_id'],557)
self.assertEqual(controversy_dump['controversies_id'],1)
def testControversyDumpList(self):
# verify it pulls some
controversy_dump_list = self._mc.controversyDumpList()
self.assertTrue(len(controversy_dump_list)>1)
# make sure the filtering works
specific_controversy_dump_list = self._mc.controversyDumpList(1)
self.assertTrue(len(specific_controversy_dump_list)>1)
self.assertTrue(len(specific_controversy_dump_list)<len(controversy_dump_list))
# make sure a failure case works
random_controversy_dump_list = self._mc.controversyDumpList('12335')
self.assertEqual(len(random_controversy_dump_list),0)
class ApiControversyDumpTimeSliceTest(ApiBaseTest):
def testControversyDumpTimeSlice(self):
dump_time_slice = self._mc.controversyDumpTimeSlice(145)
self.assertEqual(dump_time_slice['controversy_dump_time_slices_id'],145)
self.assertEqual(dump_time_slice['controversy_dumps_id'],16)
self.assertEqual(dump_time_slice['model_num_media'],4)
def testControversyDumpTimeSliceList(self):
# verify it pulls some
dump_time_slice_list = self._mc.controversyDumpTimeSliceList()
self.assertTrue(len(dump_time_slice_list)>1)
# make sure the filtering works
specific_dump_time_slice_list = self._mc.controversyDumpTimeSliceList(controversy_dumps_id=16)
self.assertTrue(len(specific_dump_time_slice_list)>1)
self.assertTrue(len(specific_dump_time_slice_list)<=len(dump_time_slice_list))
class ApiTagsTest(ApiBaseTest):
def testTags(self):
tag = self._mc.tag(8876989)
self.assertEqual(tag['tags_id'],8876989)
self.assertEqual(tag['tag'],'JP')
self.assertEqual(tag['tag_sets_id'],597)
def testTagList(self):
# verify it only pulls tags from that one set
first_list = self._mc.tagList(597)
self.assertEqual(len(first_list),20)
[self.assertEqual(tag['tag_sets_id'],597) for tag in first_list]
# make sure paging through a set works right
second_list = self._mc.tagList(597, int(first_list[19]['tags_id'])-1)
self.assertEqual(len(second_list),20)
[self.assertEqual(tag['tag_sets_id'],597) for tag in second_list]
self.assertEqual(first_list[19]['tags_id'], second_list[0]['tags_id'])
# make sure you can pull a longer list of tags
longer_list = self._mc.tagList(597, 0, 150)
self.assertEqual(len(longer_list),150)
[self.assertEqual(tag['tag_sets_id'],597) for tag in longer_list]
longest_list = self._mc.tagList(597, 0, 200)
self.assertEqual(len(longest_list),173)
[self.assertEqual(tag['tag_sets_id'],597) for tag in longest_list]
# try getting only the public tags in the set
full_list = self._mc.tagList(6, rows=200)
public_list = self._mc.tagList(6, rows=200, public_only=True)
self.assertNotEqual( len(full_list), len(public_list))
def testTagListSearch(self):
# verify search works at all
collection_tags = self._mc.tagList(name_like="collection")
self.assertTrue(len(collection_tags)>0, "Got %d tags matching 'collection'" % len(collection_tags))
# verify search works on tags without descriptions
geo_tags = self._mc.tagList(name_like="geonames_")
self.assertTrue(len(geo_tags)>0, "Got %d tags matching 'geonames_'" % len(geo_tags))
class ApiTagSetsTest(ApiBaseTest):
def testTagSet(self):
tagSet = self._mc.tagSet(597)
self.assertEqual(tagSet['tag_sets_id'],597)
self.assertEqual(tagSet['name'],'gv_country')
def testTagSetList(self):
first_list = self._mc.tagSetList()
self.assertEqual(len(first_list),20)
second_list = self._mc.tagSetList(int(first_list[19]['tag_sets_id'])-1)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['tag_sets_id'], second_list[0]['tag_sets_id'])
longer_list = self._mc.tagSetList(0,50)
self.assertEqual(len(longer_list),50)
class ApiFeedsTest(ApiBaseTest):
def testFeed(self):
media_set = self._mc.feed(1)
self.assertEqual(media_set['feeds_id'],1)
self.assertEqual(media_set['name'],'Bits')
self.assertEqual(media_set['media_id'],1)
def testFeedList(self):
first_list = self._mc.feedList(1)
self.assertEqual(len(first_list),20)
second_list = self._mc.feedList(1,int(first_list[19]['feeds_id'])-1)
self.assertEqual(len(second_list),20)
self.assertEqual(first_list[19]['feeds_id'], second_list[0]['feeds_id'])
longer_list = self._mc.feedList(1,0,200)
self.assertEqual(len(longer_list),142)
class AdminApiStoriesTest(AdminApiBaseTest):
def testStoryWithSentences(self):
story = self._mc.story(27456565, sentences=True)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertTrue('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
def testStoryWithText(self):
story = self._mc.story(27456565, text=True)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertFalse('story_sentences' in story)
self.assertTrue('story_text' in story)
self.assertTrue('is_fully_extracted' in story)
def testStoryList(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY)
self.assertNotEqual(len(results),0)
for story in results:
self.assertTrue('bitly_click_count' in story)
def testStoryCoreNlpList(self):
results = self._mc.storyCoreNlpList([261784668,261784669])
self.assertEqual(len(results),2)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('corenlp' in story)
self.assertTrue('stories_id' in story)
def testStoryListDefaults(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
def testStoryListWithCoreNlp(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, corenlp=True, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('corenlp' in story)
def testStoryListWithSentences(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, sentences=True, rows=10)
for story in results:
self.assertTrue('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
def testStoryListWithText(self):
results = self._mc.storyList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY, text=True, rows=10)
for story in results:
self.assertFalse('story_sentences' in story)
self.assertTrue('story_text' in story)
self.assertTrue('is_fully_extracted' in story)
self.assertFalse('corenlp' in story)
class ApiStoriesWordMatrixTest(ApiBaseTest):
def testStoryWordMatrix(self):
results = self._mc.storyWordMatrix("obama",
"(publish_date:[2016-05-16T00:00:00Z TO 2016-05-17T00:00:00Z]) AND (tags_id_media:(8875027))")
self.assertTrue("word_matrix" in results)
self.assertTrue("word_list" in results)
class ApiStoriesTest(ApiBaseTest):
def testStory(self):
story = self._mc.story(27456565)
self.assertEqual(int(story['stories_id']),27456565)
self.assertEqual(story['media_id'],1144)
self.assertFalse('story_sentences' in story)
self.assertFalse('story_text' in story)
self.assertFalse('is_fully_extracted' in story)
self.assertTrue('bitly_click_count' in story)
def testStoryPublic(self):
story = self._mc.story(27456565)
self.assertEqual(story['media_id'],1144)
self.assertTrue('story_sentences' not in story)
self.assertTrue('language' in story)
self.assertTrue('title' in story)
self.assertTrue('bitly_click_count' in story)
def testStoryPublicList(self):
results = self._mc.storyList(self.QUERY, self.FILTER_QUERY)
self.assertNotEqual(len(results),0)
for story in results:
self.assertTrue('bitly_click_count' in story)
def testStoryCount(self):
results = self._mc.storyCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(results['count'],2083)
class AdminApiSentencesTest(AdminApiBaseTest):
def testSentenceListSortingAscending(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_PUBLISH_DATE_ASC)
self.assertEqual(len(results['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
last_date = None
for sentence in results['response']['docs']:
this_date = datetime.datetime.strptime(sentence['publish_date'],self._mc.SENTENCE_PUBLISH_DATE_FORMAT)
this_date = this_date.replace( second=0, microsecond=0) # sorting is by minute
if last_date is not None:
self.assertTrue(last_date <= this_date, "Date wrong: "+str(last_date)+" is not <= "+str(this_date))
last_date = this_date
last_date = this_date
def testSentenceListSortingDescending(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_PUBLISH_DATE_DESC)
self.assertEqual(len(results['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
last_date = None
for sentence in results['response']['docs']:
this_date = datetime.datetime.strptime(sentence['publish_date'],self._mc.SENTENCE_PUBLISH_DATE_FORMAT)
this_date = this_date.replace( second=0, microsecond=0) # sorting is by minute
if last_date is not None:
self.assertTrue(last_date >= this_date, "Date wrong: "+str(last_date)+" is not >= "+str(this_date))
last_date = this_date
last_date = this_date
def testSentenceListSortingRadom(self):
# we do random sort by telling we want the random sort, and then offsetting to a different start index
results1 = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,0,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_RANDOM)
self.assertEqual(len(results1['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
results2 = self._mc.sentenceList(ApiBaseTest.QUERY,ApiBaseTest.FILTER_QUERY,ApiBaseTest.SENTENCE_COUNT+3,ApiBaseTest.SENTENCE_COUNT,
self._mc.SORT_RANDOM)
self.assertEqual(len(results2['response']['docs']), ApiBaseTest.SENTENCE_COUNT)
for idx in range(0,ApiBaseTest.SENTENCE_COUNT):
self.assertNotEqual(results1['response']['docs'][idx]['stories_id'],results2['response']['docs'][idx]['stories_id'],
"Stories in two different random sets are the same :-(")
def testSentenceList(self):
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY)
self.assertEqual(int(results['responseHeader']['status']),0)
self.assertEqual(int(results['response']['numFound']),5793)
self.assertEqual(len(results['response']['docs']), 1000)
def testSentenceListPaging(self):
# test limiting rows returned
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY,0,100)
self.assertEqual(int(results['response']['numFound']), 5793)
self.assertEqual(len(results['response']['docs']), 100)
# test starting offset
results = self._mc.sentenceList(ApiBaseTest.QUERY, ApiBaseTest.FILTER_QUERY,5700)
self.assertEqual(int(results['response']['numFound']), 5793)
self.assertEqual(len(results['response']['docs']), 93)
class ApiSentencesTest(ApiBaseTest):
def testSentence(self):
sentence_id = 3841125325
sentence = self._mc.sentence(sentence_id)
self.assertEqual(sentence['story_sentences_id'],sentence_id)
self.assertEqual(sentence['stories_id'],321728712)
self.assertTrue(len(sentence['sentence'])>0)
def testSentenceCount(self):
# basic counting
results = self._mc.sentenceCount('obama','+media_id:1')
self.assertTrue(int(results['count'])>10000)
# counting with a default split weekly (>180 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2013-01-01','2014-01-01')
self.assertEqual(results['split']['gap'],'+7DAYS')
self.assertEqual(len(results['split']),56)
# counting with a default split 3-day (<180 days, >90 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-06-01')
self.assertEqual(results['split']['gap'],'+3DAYS')
self.assertEqual(len(results['split']),54)
# counting with a default split daily (<90 days)
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-01-07')
self.assertEqual(results['split']['gap'],'+1DAY')
self.assertEqual(len(results['split']),9)
# test forcing a daily split
results = self._mc.sentenceCount('obama','+media_id:1',True,'2014-01-01','2014-06-01',True)
self.assertEqual(results['split']['gap'],'+1DAY')
self.assertEqual(len(results['split']),154)
def testFieldCount(self):
# regular call for sentence counts
sentence_results = self._mc.sentenceFieldCount('obama','+media_id:1')
self.assertFalse('stats' in sentence_results)
self.assertFalse('counts' in sentence_results)
self.assertTrue(len(sentence_results)>0)
[self.assertTrue(tag['count']) for tag in sentence_results]
# regular call for story counts
story_results = self._mc.sentenceFieldCount('obama','+media_id:1',field='tags_id_stories')
self.assertFalse('stats' in story_results)
self.assertFalse('counts' in story_results)
self.assertTrue(len(story_results)>0)
[self.assertTrue(tag['count']) for tag in story_results]
# compare
self.assertTrue(len(story_results)!=len(sentence_results))
# with stats
results = self._mc.sentenceFieldCount('obama','+media_id:1',include_stats=True)
self.assertTrue('stats' in results)
self.assertTrue('counts' in results)
# filter by tag set
sentence_results = self._mc.sentenceFieldCount('obama','+media_id:1',tag_sets_id=GEO_TAG_SET_ID)
self.assertTrue(len(sentence_results)>0)
[self.assertEqual(tag['tag_sets_id'],GEO_TAG_SET_ID) for tag in sentence_results]
class ApiWordCountTest(ApiBaseTest):
QUERY = 'robots'
def testResults(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(len(term_freq),500)
self.assertEqual(term_freq[3]['term'],u'science')
def testSort(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
# verify sorted in desc order
last_count = 10000000000
for freq in term_freq:
self.assertTrue( last_count >= freq['count'] )
last_count = freq['count']
def testNumWords(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(len(term_freq),500)
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, num_words=100)
self.assertEqual(len(term_freq),100)
def testStopWords(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(term_freq[3]['term'],u'science')
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, include_stopwords=True)
self.assertEqual(term_freq[3]['term'],u'that')
def testStats(self):
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY)
self.assertEqual(term_freq[3]['term'],u'science')
term_freq = self._mc.wordCount(self.QUERY, self.FILTER_QUERY, include_stats=True)
self.assertEqual(len(term_freq),2)
self.assertTrue( 'stats' in term_freq.keys() )
self.assertTrue( 'words' in term_freq.keys() )
class AdminApiTaggingUpdateTest(AdminApiBaseTest):
def testTagUpdate(self):
example_tag_id = 9172167
# grab the tag info
tag = self._mc.tag(example_tag_id)
# change the name, label and description
result = self._mc.updateTag(example_tag_id, 'modified tag', 'modified label', 'modified description')
modified_tag = self._mc.tag(example_tag_id)
self.assertEqual(modified_tag['tag'],'modified tag')
self.assertEqual(modified_tag['label'],'modified label')
self.assertEqual(modified_tag['description'],'modified description')
# set it back
result = self._mc.updateTag(example_tag_id, 'example tag', 'example label', 'This is an exampel tag used in api client test scripts')
modified_tag = self._mc.tag(example_tag_id)
self.assertEqual(modified_tag['tag'],'example tag')
self.assertEqual(modified_tag['label'],'example label')
self.assertEqual(modified_tag['description'],'This is an exampel tag used in api client test scripts')
def testTagSetUpdate(self):
example_tag_sets_id = TEST_TAG_SET_ID
# grab the tag info
tag_set = self._mc.tagSet(example_tag_sets_id)
# change the name, label and description
result = self._mc.updateTagSet(example_tag_sets_id, TEST_USER_EMAIL, 'modified label', 'modified description')
modified_tag = self._mc.tagSet(example_tag_sets_id)
self.assertEqual(modified_tag['name'],TEST_USER_EMAIL)
self.assertEqual(modified_tag['label'],'modified label')
self.assertEqual(modified_tag['description'],'modified description')
# set it back
result = self._mc.updateTagSet(example_tag_sets_id, TEST_USER_EMAIL, 'rahulbot', 'The tag set of Rahul!')
modified_tag = self._mc.tagSet(example_tag_sets_id)
self.assertEqual(modified_tag['name'], TEST_USER_EMAIL)
self.assertEqual(modified_tag['label'],'rahulbot')
self.assertEqual(modified_tag['description'],'The tag set of Rahul!')
class AdminApiTaggingContentTest(AdminApiBaseTest):
def testTagStories(self):
test_story_id = 2
tag_set_name = TEST_USER_EMAIL
# tag a story with two things
desired_tags = [ mediacloud.api.StoryTag(test_story_id, tag_set_name, 'test_tag1'),
mediacloud.api.StoryTag(test_story_id, tag_set_name, 'test_tag2') ]
response = self._mc.tagStories(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# make sure it worked
story = self._mc.story(test_story_id,sentences=True)
tags_on_story = [t for t in story['story_tags'] if t['tag_set']==tag_set_name]
self.assertEqual(len(tags_on_story),len(desired_tags))
# now remove one
desired_tags = [ mediacloud.api.StoryTag(test_story_id,TEST_USER_EMAIL,'test_tag1') ]
response = self._mc.tagStories(desired_tags, clear_others=True)
self.assertEqual(len(response),len(desired_tags))
# and check it
story = self._mc.story(test_story_id,sentences=True)
tags_on_story = [t for t in story['story_tags'] if t['tag_set']==tag_set_name]
self.assertEqual(len(tags_on_story),len(desired_tags))
def testChunkify(self):
chunk_size = 50
data = [x for x in range(0,507)]
chunked = self._mc._chunkify(data,chunk_size)
self.assertEqual(11,len(chunked))
for x in range(0,10):
self.assertEqual(chunk_size,len(chunked[x]))
self.assertEqual(7,len(chunked[10]))
def testTagTonsOfSentences(self):
test_story_id = 435914244
test_tag_id1 = '9172171' # mc-api-test@media.mit.edu:test_tag1
test_tag_id2 = '9172168' # mc-api-test@media.mit.edu:test_tag2
tag_set_name = TEST_USER_EMAIL
# grab some sentence_ids to test with
orig_story = self._mc.story(test_story_id,sentences=True)
self.assertTrue( 'story_sentences' in orig_story )
self.assertTrue( len(orig_story['story_sentences']) > 2 )
sentence_ids = [ s['story_sentences_id'] for s in orig_story['story_sentences'][0:2] ]
# make a list of a ton of tags
desired_tags = []
for x in range(0, 80):
desired_tags = desired_tags + [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1') for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
def testTagSentences(self):
test_story_id = 435914244
test_tag_id1 = '9172171' # mc-api-test@media.mit.edu:test_tag1
test_tag_id2 = '9172168' # mc-api-test@media.mit.edu:test_tag2
tag_set_name = TEST_USER_EMAIL
# grab some sentence_ids to test with
orig_story = self._mc.story(test_story_id,sentences=True)
self.assertTrue( 'story_sentences' in orig_story )
self.assertTrue( len(orig_story['story_sentences']) > 2 )
sentence_ids = [ s['story_sentences_id'] for s in orig_story['story_sentences'][0:2] ]
# add a tag
desired_tags = [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# and verify it worked
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in orig_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
# now do two tags on each story
desired_tags = desired_tags + [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag2')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags)
self.assertEqual(len(response),len(desired_tags))
# and verify it worked
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in tagged_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
self.assertTrue(test_tag_id2 in s['tags'])
# now remove one
desired_tags = [ mediacloud.api.SentenceTag(sid, tag_set_name, 'test_tag1')
for sid in sentence_ids ]
response = self._mc.tagSentences(desired_tags, clear_others=True)
self.assertEqual(len(response),len(desired_tags))
# and check it
tagged_story = self._mc.story(test_story_id,sentences=True)
tagged_sentences = [ s for s in tagged_story['story_sentences'] if len(s['tags']) > 0 ]
for s in tagged_sentences:
if s['story_sentences_id'] in sentence_ids:
self.assertTrue(test_tag_id1 in s['tags'])
self.assertFalse(test_tag_id2 in s['tags'])
class AdminTopicStoryListTest(AdminApiBaseTest):
TOPIC_ID = 1
def testTopicStoryList(self):
topic_id = 1
response = self._mc.topicStoryList(1)
self.assertEqual(len(response['stories']),10)
def testTopicStoryListPaging(self):
limit = 50
responsePage1 = self._mc.topicStoryList(self.TOPIC_ID,limit=limit)
responsePage1Ids = [m['stories_id'] for m in responsePage1['stories']]
self.assertEqual(len(responsePage1['stories']),50)
self.assertTrue('continuation_id' in responsePage1)
responsePage2 = self._mc.topicStoryList(self.TOPIC_ID, continuation_id=responsePage1['continuation_id'],limit=limit)
responsePage2Ids = [m['stories_id'] for m in responsePage2['stories']]
# verify no duplicated media_ids across pages
combinedIds = set(responsePage1Ids+responsePage2Ids)
self.assertEqual(len(responsePage1Ids)+len(responsePage2Ids),len(combinedIds))
def testTopicStoryListLimit(self):
response = self._mc.topicStoryList(self.TOPIC_ID)
self.assertEqual(len(response['stories']),10)
response = self._mc.topicStoryList(self.TOPIC_ID,limit=76)
self.assertEqual(len(response['stories']),76)
response = self._mc.topicStoryList(self.TOPIC_ID,limit=500)
self.assertEqual(len(response['stories']),500)
def testTopicStoryListSortSocial(self):
response = self._mc.topicStoryList(self.TOPIC_ID, limit=500, sort='social')
last_bitly_count = 1000000000000
for story in response['stories']:
self.assertTrue(story['bitly_click_count']<=last_bitly_count)
last_bitly_count = story['bitly_click_count']
def testTopicStoryListSortInlink(self):
response = self._mc.topicStoryList(self.TOPIC_ID, limit=500, sort='inlink')
last_inlink_count = 1000000000000
for story in response['stories']:
self.assertTrue(story['inlink_count']<=last_inlink_count)
last_inlink_count = story['inlink_count']
class AdminTopicMediaListTest(AdminApiBaseTest):
TOPIC_ID = 1
def testTopicMediaList(self):
response = self._mc.topicMediaList(self.TOPIC_ID)
self.assertTrue('continuation_id' in response)
self.assertTrue('media' in response)
for media in response['media']:
self.assertTrue('media_id' in media)
def testTopicMediaListLimit(self):
response = self._mc.topicMediaList(self.TOPIC_ID)
self.assertEqual(len(response['media']),10)
response = self._mc.topicMediaList(self.TOPIC_ID,limit=76)
self.assertEqual(len(response['media']),76)
response = self._mc.topicMediaList(self.TOPIC_ID,limit=500)
self.assertEqual(len(response['media']),500)
def testTopicMediaListPaging(self):
limit = 50
responsePage1 = self._mc.topicMediaList(self.TOPIC_ID,limit=limit)
responsePage1Ids = [m['media_id'] for m in responsePage1['media']]
self.assertEqual(len(responsePage1['media']),50)
self.assertTrue('continuation_id' in responsePage1)
responsePage2 = self._mc.topicMediaList(self.TOPIC_ID, continuation_id=responsePage1['continuation_id'],limit=limit)
responsePage2Ids = [m['media_id'] for m in responsePage2['media']]
# verify no duplicated media_ids across pages
combinedIds = set(responsePage1Ids+responsePage2Ids)
self.assertEqual(len(responsePage1Ids)+len(responsePage2Ids),len(combinedIds))
def testTopicMediaListSortSocial(self):
response = self._mc.topicMediaList(self.TOPIC_ID, sort='social')
last_bitly_count = 1000000000000
for media in response['media']:
self.assertTrue(media['bitly_click_count']<=last_bitly_count)
last_bitly_count = media['bitly_click_count']
def testTopicMediaListSortInlink(self):
response = self._mc.topicMediaList(self.TOPIC_ID, sort='inlink')
last_inlink_count = 1000000000000
for media in response['media']:
self.assertTrue(media['inlink_count']<=last_inlink_count)
last_inlink_count = media['inlink_count']
class AdminTopicWordCountTest(AdminApiBaseTest):
TOPIC_ID = 1
def testResults(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
self.assertEqual(len(term_freq),500)
self.assertEqual(term_freq[3]['term'],u'george')
def testSort(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
# verify sorted in desc order
last_count = 10000000000
for freq in term_freq:
self.assertTrue( last_count >= freq['count'] )
last_count = freq['count']
def testNumWords(self):
term_freq = self._mc.topicWordCount(self.TOPIC_ID)
self.assertEqual(len(term_freq),500)
term_freq = self._mc.topicWordCount(self.TOPIC_ID, num_words=500)
self.assertEqual(len(term_freq),500)
class AdminTopicSentenceCountTest(AdminApiBaseTest):
TOPIC_ID = 1
def testSentenceCount(self):
results = self._mc.topicSentenceCount(self.TOPIC_ID)
self.assertTrue(int(results['count'])>10000)
results = self._mc.topicSentenceCount(self.TOPIC_ID, snapshot_id=365)
self.assertTrue(int(results['count'])>1000)
def testSentenceCountSplit(self):
results = self._mc.topicSentenceCount(self.TOPIC_ID,'*','*',True,'2013-01-01','2016-01-01')
self.assertEqual(results['split']['gap'],'+7DAYS')
self.assertEqual(len(results['split']),4)
|
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from collections import OrderedDict
import iso8601 # parser for timestamp format
from .ast import ASTStateChoice, ASTCompOp, ASTCompNot, ASTCompAndOr
class Timestamp(object):
"""Wrapper around a timestamp string.
Used to determine if a string is in a valid timestamp format and type it
for the parser
"""
def __init__(self, timestamp):
"""
Args:
timestamp (string): Timestamp string
Exceptions:
An exception is thrown if the string is not a valid timestamp
"""
iso8601.parse_date(timestamp)
self.timestamp = timestamp
def __str__(self):
return self.timestamp
class _StateMachineEncoder(json.JSONEncoder):
"""Custom JSONEncoder that handles the Timestamp type"""
def default(self, o):
if type(o) == Timestamp:
return str(o)
return super(_StateMachineEncoder, self).default(o)
class Branch(dict):
def __init__(self, ast):
super(Branch, self).__init__()
# Makes states be dumped in the same order they were added
# making it easier to read the output and match it to the input
self['States'] = OrderedDict()
for state in ast.states:
self['States'][state.name] = State(state)
self['StartAt'] = ast.states[0].name
class StepFunction(Branch):
def __init__(self, ast):
super(StepFunction, self).__init__(ast)
if ast.comment is not None:
self['Comment'] = ast.comment.value
if ast.version is not None:
self['Version'] = ast.version.value.value
if ast.timeout is not None:
self['TimeoutSeconds'] = ast.timeout.value.value
def definition(self, **kwargs):
"""Dump the state machine into the JSON format needed by AWS
Args:
kwargs (dict): Arguments passed to json.dumps()
"""
return json.dumps(self, cls=_StateMachineEncoder, **kwargs)
class State(dict):
def __init__(self, ast):
super(State, self).__init__()
self['Type'] = ast.state_type
# Generic Modifiers for all States
if ast.comment is not None:
# No longer a token, parsed by AST class into name/comment
self['Comment'] = ast.comment
if ast.timeout is not None:
timeout = ast.timeout.value.value
self['TimeoutSeconds'] = timeout
else:
timeout = 60 # default
if ast.heartbeat is not None:
heartbeat = ast.heartbeat.value.value
if not heartbeat < timeout:
ast.heartbeat.raise_error("Heartbeat must be less than timeout (defaults to 60)")
self['HeartbeatSeconds'] = heartbeat
if ast.input is not None:
self['InputPath'] = ast.input.value.value
if ast.result is not None:
self['ResultPath'] = ast.result.value.value
if ast.output is not None:
self['OutputPath'] = ast.output.value.value
if ast.data is not None:
self['Result'] = ast.data.value
if ast.catch is not None:
self['Catch'] = []
for catch in ast.catch:
self['Catch'].append(Catch(catch))
if ast.retry is not None:
self['Retry'] = []
for retry in ast.retry:
self['Retry'].append(Retry(retry))
# State specific arguments
if ast.state_type == 'Fail':
self['Error'] = ast.error.value
self['Cause'] = ast.cause.value
if ast.state_type == 'Task':
self['Resource'] = ast.arn.value
if ast.state_type == 'Wait':
key = ''.join([t.capitalize() for t in ast.type.value.split('_')])
self[key] = ast.val.value
if ast.state_type == 'Choice':
key = ASTStateChoice.DEFAULT
if key in ast.branches:
self['Default'] = ast.branches[key]
del ast.branches[key]
self['Choices'] = []
for comp in ast.branches:
self['Choices'].append(Choice(comp, ast.branches[comp]))
if ast.state_type == 'Parallel':
self['Branches'] = []
for branch in ast.branches:
self['Branches'].append(Branch(branch))
if ast.next is not None:
self['Next'] = ast.next
if ast.end:
self['End'] = ast.end
class Catch(dict):
def __init__(self, ast):
super(Catch, self).__init__()
errors = ast.errors
# Support a single string for error type
# ??? put this transformation in AST
if type(errors) != list:
errors = [errors]
self['ErrorEquals'] = [e.value for e in errors]
self['Next'] = ast.next
if ast.path is not None:
self['ResultPath'] = ast.path.value
class Retry(dict):
def __init__(self, ast):
super(Retry, self).__init__()
errors = ast.errors
# Support a single string for error type
# ??? put this transformation in AST
if type(errors) != list:
errors = [errors]
self['ErrorEquals'] = [e.value for e in errors]
self['IntervalSeconds'] = ast.interval.value
self['MaxAttempts'] = ast.max.value
self['BackoffRate'] = float(ast.backoff.value)
COMPARISON = {
'==': {
str: 'StringEquals',
int: 'NumericEquals',
float: 'NumericEquals',
bool: 'BooleanEquals',
Timestamp: 'TimestampEquals',
},
'<': {
str: 'StringLessThan',
int: 'NumericLessThan',
float: 'NumericLessThan',
Timestamp: 'TimestampLessThan',
},
'>': {
str: 'StringGreaterThan',
int: 'NumericGreaterThan',
float: 'NumericGreaterThan',
Timestamp: 'TimestampGreaterThan',
},
'<=': {
str: 'StringLessThanEquals',
int: 'NumericLessThanEquals',
float: 'NumericLessThanEquals',
Timestamp: 'TimestampLessThanEquals',
},
'>=': {
str: 'StringGreaterThanEquals',
int: 'NumericGreaterThanEquals',
float: 'NumericGreaterThanEquals',
Timestamp: 'TimestampGreaterThanEquals',
},
}
def Choice(ast, target=None):
if type(ast) == ASTCompOp:
var = ast.var.value
val = ast.val.value
op = ast.op.value
op_type = type(val) # The type of the operator is based on the value type
try:
if op == '!=':
op = COMPARISON['=='][op_type]
choice = OpChoice(var, op, val)
return NotChoice(choice, target)
else:
op = COMPARISON[op][op_type]
return OpChoice(var, op, val, target)
except KeyError:
msg = "Cannot make '{}' comparison with type '{}'".format(op, op_type)
ast.raise_error(msg)
elif type(ast) == ASTCompNot:
return NotChoice(Choice(ast.comp), target)
elif isinstance(ast, ASTCompAndOr):
return AndOrChoice(ast, target)
else:
ast.raise_error("Comparison support not implemented yet")
class OpChoice(dict):
"""A ChoiceState Choice wrapping a comparison and reference to state to execute"""
def __init__(self, var, op, val, target=None):
super(OpChoice, self).__init__(Variable = var)
self.op = op # for __str__ / __repr__
self[self.op] = val
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
return "({} {} {})".format(self['Variable'], self.op, self[self.op])
class NotChoice(dict):
"""Wraper around a Choice that negates the Choice"""
def __init__(self, comp, target=None):
super(NotChoice, self).__init__(Not = comp)
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
return "(Not {!r})".format(self['Not'])
class AndOrChoice(dict):
"""Wrapper arounds a list of Choices and 'and's or 'or's the results together"""
def __init__(self, ast, target=None):
super(AndOrChoice, self).__init__()
self.op = ast.op # for __str__ / __repr__
self[self.op] = [Choice(comp) for comp in ast.comps]
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
vals = map(repr, self[self.op])
return "(" + (" {} ".format(self.op.lower())).join(vals) + ")"
def _resolve(actual, defaults):
"""Break the actual arn apart and insert the defaults for the
unspecified begining parts of the arn (based on position in the
arn)
Example:
actual: 'account_id:function:FUNCTION_NAME'
defaults: ['arn', 'aws', 'lambda', 'region', 'account_id', 'function']
return: 'arn:aws:lambda:region:account_id:function:FUNCTION_NAME'
Args:
actual (string): ARN style string, potentially missing part of the
begining of the ARN. Must include the final name of
the ARN function
defaults (list): List of ARN components to use to fill in the missing
parts of the actual ARN
Returns:
(string): Complete ARN
"""
actual_ = actual.split(':')
name = actual_.pop()
offset = len(defaults) - len(actual_)
try:
# Wrap the join because an error should only be produced if we try
# to use a None value. None can be passed in the defaults if that
# default value is never used
vals = defaults[:offset]
vals.extend(actual_)
vals.append(name)
arn = ":".join(vals)
return arn
except TypeError:
raise Exception("One or more of the default values for ARN '{}' was not specified".format(actual))
def Lambda(name, region=None, account=None):
"""Resolve a partial Lambda ARN into a full ARN with the given region/account
Args:
name (string): Partial ARN
region (string): AWS region of the Lambda function
account (string): AWS account id owning the Labmda function
"""
defaults = ['arn', 'aws', 'lambda', region, account, 'function']
return _resolve(name, defaults)
def Activity(name, region=None, account=None):
"""Resolve a partial Activity ARN into a full ARN with the given region/account
Args:
name (string): Partial ARN
region (string): AWS region of the Activity ARN
account (string): AWS account id owning the Activity ARN
"""
defaults = ['arn', 'aws', 'states', region, account, 'activity']
return _resolve(name, defaults)
Set the default ErrorEquals to States.ALL
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from collections import OrderedDict
import iso8601 # parser for timestamp format
from .ast import ASTStateChoice, ASTCompOp, ASTCompNot, ASTCompAndOr
class Timestamp(object):
"""Wrapper around a timestamp string.
Used to determine if a string is in a valid timestamp format and type it
for the parser
"""
def __init__(self, timestamp):
"""
Args:
timestamp (string): Timestamp string
Exceptions:
An exception is thrown if the string is not a valid timestamp
"""
iso8601.parse_date(timestamp)
self.timestamp = timestamp
def __str__(self):
return self.timestamp
class _StateMachineEncoder(json.JSONEncoder):
"""Custom JSONEncoder that handles the Timestamp type"""
def default(self, o):
if type(o) == Timestamp:
return str(o)
return super(_StateMachineEncoder, self).default(o)
class Branch(dict):
def __init__(self, ast):
super(Branch, self).__init__()
# Makes states be dumped in the same order they were added
# making it easier to read the output and match it to the input
self['States'] = OrderedDict()
for state in ast.states:
self['States'][state.name] = State(state)
self['StartAt'] = ast.states[0].name
class StepFunction(Branch):
def __init__(self, ast):
super(StepFunction, self).__init__(ast)
if ast.comment is not None:
self['Comment'] = ast.comment.value
if ast.version is not None:
self['Version'] = ast.version.value.value
if ast.timeout is not None:
self['TimeoutSeconds'] = ast.timeout.value.value
def definition(self, **kwargs):
"""Dump the state machine into the JSON format needed by AWS
Args:
kwargs (dict): Arguments passed to json.dumps()
"""
return json.dumps(self, cls=_StateMachineEncoder, **kwargs)
class State(dict):
def __init__(self, ast):
super(State, self).__init__()
self['Type'] = ast.state_type
# Generic Modifiers for all States
if ast.comment is not None:
# No longer a token, parsed by AST class into name/comment
self['Comment'] = ast.comment
if ast.timeout is not None:
timeout = ast.timeout.value.value
self['TimeoutSeconds'] = timeout
else:
timeout = 60 # default
if ast.heartbeat is not None:
heartbeat = ast.heartbeat.value.value
if not heartbeat < timeout:
ast.heartbeat.raise_error("Heartbeat must be less than timeout (defaults to 60)")
self['HeartbeatSeconds'] = heartbeat
if ast.input is not None:
self['InputPath'] = ast.input.value.value
if ast.result is not None:
self['ResultPath'] = ast.result.value.value
if ast.output is not None:
self['OutputPath'] = ast.output.value.value
if ast.data is not None:
self['Result'] = ast.data.value
if ast.catch is not None:
self['Catch'] = []
for catch in ast.catch:
self['Catch'].append(Catch(catch))
if ast.retry is not None:
self['Retry'] = []
for retry in ast.retry:
self['Retry'].append(Retry(retry))
# State specific arguments
if ast.state_type == 'Fail':
self['Error'] = ast.error.value
self['Cause'] = ast.cause.value
if ast.state_type == 'Task':
self['Resource'] = ast.arn.value
if ast.state_type == 'Wait':
key = ''.join([t.capitalize() for t in ast.type.value.split('_')])
self[key] = ast.val.value
if ast.state_type == 'Choice':
key = ASTStateChoice.DEFAULT
if key in ast.branches:
self['Default'] = ast.branches[key]
del ast.branches[key]
self['Choices'] = []
for comp in ast.branches:
self['Choices'].append(Choice(comp, ast.branches[comp]))
if ast.state_type == 'Parallel':
self['Branches'] = []
for branch in ast.branches:
self['Branches'].append(Branch(branch))
if ast.next is not None:
self['Next'] = ast.next
if ast.end:
self['End'] = ast.end
class Catch(dict):
def __init__(self, ast):
super(Catch, self).__init__()
errors = ast.errors
# Support a single string for error type
# ??? put this transformation in AST
if type(errors) != list:
errors = [errors]
if len(errors) == 0:
errors = ['States.ALL']
self['ErrorEquals'] = [e.value for e in errors]
self['Next'] = ast.next
if ast.path is not None:
self['ResultPath'] = ast.path.value
class Retry(dict):
def __init__(self, ast):
super(Retry, self).__init__()
errors = ast.errors
# Support a single string for error type
# ??? put this transformation in AST
if type(errors) != list:
errors = [errors]
if len(errors) == 0:
errors = ['States.ALL']
self['ErrorEquals'] = [e.value for e in errors]
self['IntervalSeconds'] = ast.interval.value
self['MaxAttempts'] = ast.max.value
self['BackoffRate'] = float(ast.backoff.value)
COMPARISON = {
'==': {
str: 'StringEquals',
int: 'NumericEquals',
float: 'NumericEquals',
bool: 'BooleanEquals',
Timestamp: 'TimestampEquals',
},
'<': {
str: 'StringLessThan',
int: 'NumericLessThan',
float: 'NumericLessThan',
Timestamp: 'TimestampLessThan',
},
'>': {
str: 'StringGreaterThan',
int: 'NumericGreaterThan',
float: 'NumericGreaterThan',
Timestamp: 'TimestampGreaterThan',
},
'<=': {
str: 'StringLessThanEquals',
int: 'NumericLessThanEquals',
float: 'NumericLessThanEquals',
Timestamp: 'TimestampLessThanEquals',
},
'>=': {
str: 'StringGreaterThanEquals',
int: 'NumericGreaterThanEquals',
float: 'NumericGreaterThanEquals',
Timestamp: 'TimestampGreaterThanEquals',
},
}
def Choice(ast, target=None):
if type(ast) == ASTCompOp:
var = ast.var.value
val = ast.val.value
op = ast.op.value
op_type = type(val) # The type of the operator is based on the value type
try:
if op == '!=':
op = COMPARISON['=='][op_type]
choice = OpChoice(var, op, val)
return NotChoice(choice, target)
else:
op = COMPARISON[op][op_type]
return OpChoice(var, op, val, target)
except KeyError:
msg = "Cannot make '{}' comparison with type '{}'".format(op, op_type)
ast.raise_error(msg)
elif type(ast) == ASTCompNot:
return NotChoice(Choice(ast.comp), target)
elif isinstance(ast, ASTCompAndOr):
return AndOrChoice(ast, target)
else:
ast.raise_error("Comparison support not implemented yet")
class OpChoice(dict):
"""A ChoiceState Choice wrapping a comparison and reference to state to execute"""
def __init__(self, var, op, val, target=None):
super(OpChoice, self).__init__(Variable = var)
self.op = op # for __str__ / __repr__
self[self.op] = val
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
return "({} {} {})".format(self['Variable'], self.op, self[self.op])
class NotChoice(dict):
"""Wraper around a Choice that negates the Choice"""
def __init__(self, comp, target=None):
super(NotChoice, self).__init__(Not = comp)
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
return "(Not {!r})".format(self['Not'])
class AndOrChoice(dict):
"""Wrapper arounds a list of Choices and 'and's or 'or's the results together"""
def __init__(self, ast, target=None):
super(AndOrChoice, self).__init__()
self.op = ast.op # for __str__ / __repr__
self[self.op] = [Choice(comp) for comp in ast.comps]
if target is not None:
self['Next'] = target
def __str__(self):
return repr(self)
def __repr__(self):
vals = map(repr, self[self.op])
return "(" + (" {} ".format(self.op.lower())).join(vals) + ")"
def _resolve(actual, defaults):
"""Break the actual arn apart and insert the defaults for the
unspecified begining parts of the arn (based on position in the
arn)
Example:
actual: 'account_id:function:FUNCTION_NAME'
defaults: ['arn', 'aws', 'lambda', 'region', 'account_id', 'function']
return: 'arn:aws:lambda:region:account_id:function:FUNCTION_NAME'
Args:
actual (string): ARN style string, potentially missing part of the
begining of the ARN. Must include the final name of
the ARN function
defaults (list): List of ARN components to use to fill in the missing
parts of the actual ARN
Returns:
(string): Complete ARN
"""
actual_ = actual.split(':')
name = actual_.pop()
offset = len(defaults) - len(actual_)
try:
# Wrap the join because an error should only be produced if we try
# to use a None value. None can be passed in the defaults if that
# default value is never used
vals = defaults[:offset]
vals.extend(actual_)
vals.append(name)
arn = ":".join(vals)
return arn
except TypeError:
raise Exception("One or more of the default values for ARN '{}' was not specified".format(actual))
def Lambda(name, region=None, account=None):
"""Resolve a partial Lambda ARN into a full ARN with the given region/account
Args:
name (string): Partial ARN
region (string): AWS region of the Lambda function
account (string): AWS account id owning the Labmda function
"""
defaults = ['arn', 'aws', 'lambda', region, account, 'function']
return _resolve(name, defaults)
def Activity(name, region=None, account=None):
"""Resolve a partial Activity ARN into a full ARN with the given region/account
Args:
name (string): Partial ARN
region (string): AWS region of the Activity ARN
account (string): AWS account id owning the Activity ARN
"""
defaults = ['arn', 'aws', 'states', region, account, 'activity']
return _resolve(name, defaults)
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2019 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import Part
import Path
import PathScripts
import PathScripts.PathGui as PathGui
import PathScripts.PathLog as PathLog
import PathScripts.PathToolController as PathToolController
import PathScripts.PathToolEdit as PathToolEdit
import PathScripts.PathUtil as PathUtil
from PySide import QtCore, QtGui
# Qt tanslation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
class ViewProvider:
def __init__(self, vobj):
vobj.Proxy = self
def attach(self, vobj):
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
vobj.setEditorMode('ShapeColor', mode)
vobj.setEditorMode('Transparency', mode)
vobj.setEditorMode('Visibility', mode)
self.vobj = vobj
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def getIcon(self):
return ":/icons/Path-ToolController.svg"
def onChanged(self, vobj, prop):
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
def onDelete(self, vobj, args=None):
PathUtil.clearExpressionEngine(vobj.Object)
return True
def updateData(self, vobj, prop):
# this is executed when a property of the APP OBJECT changes
pass
def setEdit(self, vobj=None, mode=0):
if 0 == mode:
if vobj is None:
vobj = self.vobj
FreeCADGui.Control.closeDialog()
taskd = TaskPanel(vobj.Object)
FreeCADGui.Control.showDialog(taskd)
taskd.setupUi()
FreeCAD.ActiveDocument.recompute()
return True
return False
def unsetEdit(self, vobj, mode):
# this is executed when the user cancels or terminates edit mode
return False
def setupContextMenu(self, vobj, menu):
PathLog.track()
for action in menu.actions():
menu.removeAction(action)
action = QtGui.QAction(translate('Path', 'Edit'), menu)
action.triggered.connect(self.setEdit)
menu.addAction(action)
def Create(name = 'Default Tool', tool=None, toolNumber=1):
PathLog.track(tool, toolNumber)
obj = PathToolController.Create(name, tool, toolNumber)
ViewProvider(obj.ViewObject)
return obj
class CommandPathToolController:
def GetResources(self):
return {'Pixmap': 'Path-LengthOffset',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller to the Job"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller")}
def IsActive(self):
if FreeCAD.ActiveDocument is not None:
for o in FreeCAD.ActiveDocument.Objects:
if o.Name[:3] == "Job":
return True
return False
def Activated(self):
PathLog.track()
Create()
class ToolControllerEditor:
def __init__(self, obj, asDialog):
self.form = FreeCADGui.PySideUic.loadUi(":/panels/DlgToolControllerEdit.ui")
if not asDialog:
self.form.buttonBox.hide()
self.obj = obj
self.vertFeed = PathGui.QuantitySpinBox(self.form.vertFeed, obj, 'VertFeed')
self.horizFeed = PathGui.QuantitySpinBox(self.form.horizFeed, obj, 'HorizFeed')
self.vertRapid = PathGui.QuantitySpinBox(self.form.vertRapid, obj, 'VertRapid')
self.horizRapid = PathGui.QuantitySpinBox(self.form.horizRapid, obj, 'HorizRapid')
self.editor = PathToolEdit.ToolEditor(obj.Tool, self.form.toolEditor)
def updateUi(self):
tc = self.obj
self.form.tcName.setText(tc.Label)
self.form.tcNumber.setValue(tc.ToolNumber)
self.horizFeed.updateSpinBox()
self.horizRapid.updateSpinBox()
self.vertFeed.updateSpinBox()
self.vertRapid.updateSpinBox()
self.form.spindleSpeed.setValue(tc.SpindleSpeed)
index = self.form.spindleDirection.findText(tc.SpindleDir, QtCore.Qt.MatchFixedString)
if index >= 0:
self.form.spindleDirection.setCurrentIndex(index)
self.editor.updateUI()
def updateToolController(self):
tc = self.obj
try:
tc.Label = self.form.tcName.text()
tc.ToolNumber = self.form.tcNumber.value()
self.horizFeed.updateProperty()
self.vertFeed.updateProperty()
self.horizRapid.updateProperty()
self.vertRapid.updateProperty()
tc.SpindleSpeed = self.form.spindleSpeed.value()
tc.SpindleDir = self.form.spindleDirection.currentText()
self.editor.updateTool()
tc.Tool = self.editor.tool
except Exception as e:
PathLog.error(translate("PathToolController", "Error updating TC: %s") % e)
def refresh(self):
self.form.blockSignals(True)
self.updateToolController()
self.updateUi()
self.form.blockSignals(False)
def setupUi(self):
self.editor.setupUI()
self.form.tcName.editingFinished.connect(self.refresh)
self.form.horizFeed.editingFinished.connect(self.refresh)
self.form.vertFeed.editingFinished.connect(self.refresh)
self.form.horizRapid.editingFinished.connect(self.refresh)
self.form.vertRapid.editingFinished.connect(self.refresh)
class TaskPanel:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, False)
self.form = self.editor.form
self.updating = False
self.toolrep = None
self.obj = obj
def accept(self):
self.getFields()
FreeCADGui.ActiveDocument.resetEdit()
FreeCADGui.Control.closeDialog()
if self.toolrep is not None:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def reject(self):
FreeCADGui.Control.closeDialog()
if self.toolrep is not None:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def getFields(self):
self.editor.updateToolController()
self.obj.Proxy.execute(self.obj)
def setFields(self):
self.editor.updateUi()
tool = self.obj.Tool
radius = tool.Diameter / 2
length = tool.CuttingEdgeHeight
t = Part.makeCylinder(radius, length)
self.toolrep.Shape = t
def edit(self, item, column):
if not self.updating:
self.resetObject()
def resetObject(self, remove=None):
"transfers the values from the widget to the object"
FreeCAD.ActiveDocument.recompute()
def setupUi(self):
t = Part.makeCylinder(1, 1)
self.toolrep = FreeCAD.ActiveDocument.addObject("Part::Feature", "tool")
self.toolrep.Shape = t
self.setFields()
self.editor.setupUi()
class DlgToolControllerEdit:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, True)
self.editor.updateUi()
self.editor.setupUi()
self.obj = obj
def exec_(self):
restoreTC = self.obj.Proxy.templateAttrs(self.obj)
rc = False
if not self.editor.form.exec_():
PathLog.info("revert")
self.obj.Proxy.setFromTemplate(self.obj, restoreTC)
rc = True
return rc
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_ToolController', CommandPathToolController())
# and set view provider for creation from template
PathToolController.ViewProviderClass = ViewProvider
FreeCAD.Console.PrintLog("Loading PathToolControllerGui... done\n")
More TC ui import cleanup
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2019 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import Part
import Path
import PathScripts
import PathScripts.PathGui as PathGui
import PathScripts.PathLog as PathLog
import PathScripts.PathToolEdit as PathToolEdit
import PathScripts.PathUtil as PathUtil
from PySide import QtCore, QtGui
# Qt tanslation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
class ViewProvider:
def __init__(self, vobj):
vobj.Proxy = self
def attach(self, vobj):
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
vobj.setEditorMode('ShapeColor', mode)
vobj.setEditorMode('Transparency', mode)
vobj.setEditorMode('Visibility', mode)
self.vobj = vobj
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def getIcon(self):
return ":/icons/Path-ToolController.svg"
def onChanged(self, vobj, prop):
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
def onDelete(self, vobj, args=None):
PathUtil.clearExpressionEngine(vobj.Object)
return True
def updateData(self, vobj, prop):
# this is executed when a property of the APP OBJECT changes
pass
def setEdit(self, vobj=None, mode=0):
if 0 == mode:
if vobj is None:
vobj = self.vobj
FreeCADGui.Control.closeDialog()
taskd = TaskPanel(vobj.Object)
FreeCADGui.Control.showDialog(taskd)
taskd.setupUi()
FreeCAD.ActiveDocument.recompute()
return True
return False
def unsetEdit(self, vobj, mode):
# this is executed when the user cancels or terminates edit mode
return False
def setupContextMenu(self, vobj, menu):
PathLog.track()
for action in menu.actions():
menu.removeAction(action)
action = QtGui.QAction(translate('Path', 'Edit'), menu)
action.triggered.connect(self.setEdit)
menu.addAction(action)
def Create(name = 'Default Tool', tool=None, toolNumber=1):
PathLog.track(tool, toolNumber)
obj = PathScripts.PathToolController.Create(name, tool, toolNumber)
ViewProvider(obj.ViewObject)
return obj
class CommandPathToolController:
def GetResources(self):
return {'Pixmap': 'Path-LengthOffset',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller to the Job"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller")}
def IsActive(self):
if FreeCAD.ActiveDocument is not None:
for o in FreeCAD.ActiveDocument.Objects:
if o.Name[:3] == "Job":
return True
return False
def Activated(self):
PathLog.track()
Create()
class ToolControllerEditor:
def __init__(self, obj, asDialog):
self.form = FreeCADGui.PySideUic.loadUi(":/panels/DlgToolControllerEdit.ui")
if not asDialog:
self.form.buttonBox.hide()
self.obj = obj
self.vertFeed = PathGui.QuantitySpinBox(self.form.vertFeed, obj, 'VertFeed')
self.horizFeed = PathGui.QuantitySpinBox(self.form.horizFeed, obj, 'HorizFeed')
self.vertRapid = PathGui.QuantitySpinBox(self.form.vertRapid, obj, 'VertRapid')
self.horizRapid = PathGui.QuantitySpinBox(self.form.horizRapid, obj, 'HorizRapid')
self.editor = PathToolEdit.ToolEditor(obj.Tool, self.form.toolEditor)
def updateUi(self):
tc = self.obj
self.form.tcName.setText(tc.Label)
self.form.tcNumber.setValue(tc.ToolNumber)
self.horizFeed.updateSpinBox()
self.horizRapid.updateSpinBox()
self.vertFeed.updateSpinBox()
self.vertRapid.updateSpinBox()
self.form.spindleSpeed.setValue(tc.SpindleSpeed)
index = self.form.spindleDirection.findText(tc.SpindleDir, QtCore.Qt.MatchFixedString)
if index >= 0:
self.form.spindleDirection.setCurrentIndex(index)
self.editor.updateUI()
def updateToolController(self):
tc = self.obj
try:
tc.Label = self.form.tcName.text()
tc.ToolNumber = self.form.tcNumber.value()
self.horizFeed.updateProperty()
self.vertFeed.updateProperty()
self.horizRapid.updateProperty()
self.vertRapid.updateProperty()
tc.SpindleSpeed = self.form.spindleSpeed.value()
tc.SpindleDir = self.form.spindleDirection.currentText()
self.editor.updateTool()
tc.Tool = self.editor.tool
except Exception as e:
PathLog.error(translate("PathToolController", "Error updating TC: %s") % e)
def refresh(self):
self.form.blockSignals(True)
self.updateToolController()
self.updateUi()
self.form.blockSignals(False)
def setupUi(self):
self.editor.setupUI()
self.form.tcName.editingFinished.connect(self.refresh)
self.form.horizFeed.editingFinished.connect(self.refresh)
self.form.vertFeed.editingFinished.connect(self.refresh)
self.form.horizRapid.editingFinished.connect(self.refresh)
self.form.vertRapid.editingFinished.connect(self.refresh)
class TaskPanel:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, False)
self.form = self.editor.form
self.updating = False
self.toolrep = None
self.obj = obj
def accept(self):
self.getFields()
FreeCADGui.ActiveDocument.resetEdit()
FreeCADGui.Control.closeDialog()
if self.toolrep is not None:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def reject(self):
FreeCADGui.Control.closeDialog()
if self.toolrep is not None:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def getFields(self):
self.editor.updateToolController()
self.obj.Proxy.execute(self.obj)
def setFields(self):
self.editor.updateUi()
tool = self.obj.Tool
radius = tool.Diameter / 2
length = tool.CuttingEdgeHeight
t = Part.makeCylinder(radius, length)
self.toolrep.Shape = t
def edit(self, item, column):
if not self.updating:
self.resetObject()
def resetObject(self, remove=None):
"transfers the values from the widget to the object"
FreeCAD.ActiveDocument.recompute()
def setupUi(self):
t = Part.makeCylinder(1, 1)
self.toolrep = FreeCAD.ActiveDocument.addObject("Part::Feature", "tool")
self.toolrep.Shape = t
self.setFields()
self.editor.setupUi()
class DlgToolControllerEdit:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, True)
self.editor.updateUi()
self.editor.setupUi()
self.obj = obj
def exec_(self):
restoreTC = self.obj.Proxy.templateAttrs(self.obj)
rc = False
if not self.editor.form.exec_():
PathLog.info("revert")
self.obj.Proxy.setFromTemplate(self.obj, restoreTC)
rc = True
return rc
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_ToolController', CommandPathToolController())
FreeCAD.Console.PrintLog("Loading PathToolControllerGui... done\n")
|
from __future__ import print_function, division
import sys
import logging
import re
import lldb
if sys.version_info[0] == 2:
# python2-based LLDB accepts utf8-encoded ascii strings only.
def to_lldb_str(s): return s.encode('utf8', 'backslashreplace') if isinstance(s, unicode) else s
range = xrange
else:
to_lldb_str = str
log = logging.getLogger(__name__)
module = sys.modules[__name__]
rust_category = None
def initialize_category(debugger):
global module, rust_category
rust_category = debugger.CreateCategory('Rust')
# rust_category.AddLanguage(lldb.eLanguageTypeRust)
rust_category.SetEnabled(True)
#attach_summary_to_type(get_array_summary, r'^.*\[[0-9]+\]$', True)
attach_summary_to_type(get_tuple_summary, r'^\(.*\)$', True)
attach_summary_to_type(get_tuple_summary, r'^tuple<.+>$', True) # *-windows-msvc uses this name since 1.47
attach_synthetic_to_type(StrSliceSynthProvider, '&str')
attach_synthetic_to_type(StrSliceSynthProvider, 'str*')
attach_synthetic_to_type(StrSliceSynthProvider, 'str') # *-windows-msvc uses this name since 1.5?
attach_synthetic_to_type(StdStringSynthProvider, 'collections::string::String') # Before 1.20
attach_synthetic_to_type(StdStringSynthProvider, 'alloc::string::String') # Since 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^collections::vec::Vec<.+>$', True) # Before 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^alloc::vec::Vec<.+>$', True) # Since 1.20
attach_synthetic_to_type(SliceSynthProvider, r'^&(mut *)?\[.*\]$', True)
attach_synthetic_to_type(SliceSynthProvider, r'^(mut *)?slice<.+>.*$', True)
attach_synthetic_to_type(StdCStringSynthProvider, 'std::ffi::c_str::CString')
attach_synthetic_to_type(StdCStrSynthProvider, 'std::ffi::c_str::CStr')
attach_synthetic_to_type(StdOsStringSynthProvider, 'std::ffi::os_str::OsString')
attach_synthetic_to_type(StdOsStrSynthProvider, 'std::ffi::os_str::OsStr')
attach_synthetic_to_type(StdPathBufSynthProvider, 'std::path::PathBuf')
attach_synthetic_to_type(StdPathSynthProvider, 'std::path::Path')
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Rc<.+>$', True)
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Weak<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Arc<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Weak<.+>$', True)
attach_synthetic_to_type(StdMutexSynthProvider, r'^std::sync::mutex::Mutex<.+>$', True)
attach_synthetic_to_type(StdCellSynthProvider, r'^core::cell::Cell<.+>$', True)
attach_synthetic_to_type(StdRefCellSynthProvider, r'^core::cell::RefCell<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::Ref<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::RefMut<.+>$', True)
attach_synthetic_to_type(StdHashMapSynthProvider, r'^std::collections::hash::map::HashMap<.+>$', True)
attach_synthetic_to_type(StdHashSetSynthProvider, r'^std::collections::hash::set::HashSet<.+>$', True)
attach_synthetic_to_type(StdOptionSynthProvider, r'^core::option::Option<.+>$', True)
attach_synthetic_to_type(StdResultSynthProvider, r'^core::result::Result<.+>$', True)
attach_synthetic_to_type(StdCowSynthProvider, r'^alloc::borrow::Cow<.+>$', True)
def attach_synthetic_to_type(synth_class, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching synthetic %s to "%s", is_regex=%s', synth_class.__name__, type_name, is_regex)
synth = lldb.SBTypeSynthetic.CreateWithClassName(__name__ + '.' + synth_class.__name__)
synth.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSynthetic(lldb.SBTypeNameSpecifier(type_name, is_regex), synth)
def summary_fn(valobj, dict): return get_synth_summary(synth_class, valobj, dict)
# LLDB accesses summary fn's by name, so we need to create a unique one.
summary_fn.__name__ = '_get_synth_summary_' + synth_class.__name__
setattr(module, summary_fn.__name__, summary_fn)
attach_summary_to_type(summary_fn, type_name, is_regex)
def attach_summary_to_type(summary_fn, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching summary %s to "%s", is_regex=%s', summary_fn.__name__, type_name, is_regex)
summary = lldb.SBTypeSummary.CreateWithFunctionName(__name__ + '.' + summary_fn.__name__)
summary.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSummary(lldb.SBTypeNameSpecifier(type_name, is_regex), summary)
# 'get_summary' is annoyingly not a part of the standard LLDB synth provider API.
# This trick allows us to share data extraction logic between synth providers and their sibling summary providers.
def get_synth_summary(synth_class, valobj, dict):
synth = synth_class(valobj.GetNonSyntheticValue(), dict)
synth.update()
summary = synth.get_summary()
return to_lldb_str(summary)
# Chained GetChildMemberWithName lookups
def gcm(valobj, *chain):
for name in chain:
valobj = valobj.GetChildMemberWithName(name)
return valobj
# Rust-enabled LLDB using DWARF debug info will strip tuple field prefixes.
# If LLDB is not Rust-enalbed or if using PDB debug info, they will be underscore-prefixed.
def read_unique_ptr(valobj):
pointer = valobj.GetChildMemberWithName('pointer')
child = pointer.GetChildMemberWithName('__0') # Plain lldb
if child.IsValid():
return child
child = pointer.GetChildMemberWithName('0') # rust-lldb
if child.IsValid():
return child
return pointer # pointer no longer contains NonZero since Rust 1.33
def string_from_ptr(pointer, length):
if length <= 0:
return u''
error = lldb.SBError()
process = pointer.GetProcess()
data = process.ReadMemory(pointer.GetValueAsUnsigned(), length, error)
if error.Success():
return data.decode('utf8', 'replace')
else:
log.error('ReadMemory error: %s', error.GetCString())
def get_obj_summary(valobj, unavailable='{...}'):
summary = valobj.GetSummary()
if summary is not None:
return summary
summary = valobj.GetValue()
if summary is not None:
return summary
return unavailable
def sequence_summary(childern, maxsize=32):
s = ''
for child in childern:
if len(s) > 0:
s += ', '
s += get_obj_summary(child)
if len(s) > maxsize:
s += ', ...'
break
return s
def get_unqualified_type_name(type_name):
if type_name[0] in unqual_type_markers:
return type_name
return unqual_type_regex.match(type_name).group(1)
#
unqual_type_markers = ["(", "[", "&", "*"]
unqual_type_regex = re.compile(r'^(?:\w+::)*(\w+).*', re.UNICODE)
def dump_type(ty):
log.info('type %s: size=%d', ty.GetName(), ty.GetByteSize())
# ----- Summaries -----
def get_tuple_summary(valobj, dict):
fields = [get_obj_summary(valobj.GetChildAtIndex(i)) for i in range(0, valobj.GetNumChildren())]
return '(%s)' % ', '.join(fields)
def get_array_summary(valobj, dict):
return '(%d) [%s]' % (valobj.GetNumChildren(), sequence_summary(valobj))
# ----- Synth providers ------
class RustSynthProvider(object):
def __init__(self, valobj, dict={}):
self.valobj = valobj
self.initialize()
def initialize(self):
return None
def update(self):
return False
def num_children(self):
return 0
def has_children(self):
return False
def get_child_at_index(self, index):
return None
def get_child_index(self, name):
return None
def get_summary(self):
return None
class RegularEnumProvider(RustSynthProvider):
def initialize(self):
# Regular enums are represented as unions of structs, containing discriminant in the
# first field.
discriminant = self.valobj.GetChildAtIndex(0).GetChildAtIndex(0).GetValueAsUnsigned()
self.variant = self.valobj.GetChildAtIndex(discriminant)
def num_children(self):
return max(0, self.variant.GetNumChildren() - 1)
def has_children(self):
return self.num_children() > 0
def get_child_at_index(self, index):
return self.variant.GetChildAtIndex(index + 1)
def get_child_index(self, name):
return self.variant.GetIndexOfChildWithName(name) - 1
def get_summary(self):
return get_obj_summary(self.variant)
# Base class for providers that represent array-like objects
class ArrayLikeSynthProvider(RustSynthProvider):
def initialize(self):
ptr, len = self.ptr_and_len(self.valobj) # type: ignore
self.ptr = ptr
self.len = len
self.item_type = self.ptr.GetType().GetPointeeType()
self.item_size = self.item_type.GetByteSize()
def ptr_and_len(self, obj):
pass # abstract
def num_children(self):
return self.len
def has_children(self):
return True
def get_child_at_index(self, index):
try:
if not 0 <= index < self.len:
return None
offset = index * self.item_size
return self.ptr.CreateChildAtOffset('[%s]' % index, offset, self.item_type)
except Exception as e:
log.error('%s', e)
raise
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return '(%d)' % (self.len,)
class StdVectorSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
def get_summary(self):
try:
return '(%d) vec![%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
except Exception as e:
log.error('%s', e)
raise
##################################################################################################################
class SliceSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned()
)
def get_summary(self):
return '(%d) &[%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
# Base class for *String providers
class StringLikeSynthProvider(ArrayLikeSynthProvider):
def get_child_at_index(self, index):
ch = ArrayLikeSynthProvider.get_child_at_index(self, index)
ch.SetFormat(lldb.eFormatChar)
return ch
def get_summary(self):
# Limit string length to 1000 characters to cope with uninitialized values whose
# length field contains garbage.
strval = string_from_ptr(self.ptr, min(self.len, 1000))
if strval == None:
return None
if self.len > 1000:
strval += u'...'
return u'"%s"' % strval
class StrSliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
return (
gcm(valobj, 'data_ptr'),
gcm(valobj, 'length').GetValueAsUnsigned()
)
class StdStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'vec')
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class StdCStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner')
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned() - 1
)
class StdOsStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner', 'inner')
tmp = gcm(vec, 'bytes') # Windows OSString has an extra layer
if tmp.IsValid():
vec = tmp
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class FFISliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
process = valobj.GetProcess()
slice_ptr = valobj.GetLoadAddress()
data_ptr_type = valobj.GetTarget().GetBasicType(lldb.eBasicTypeChar).GetPointerType()
# Unsized slice objects have incomplete debug info, so here we just assume standard slice
# reference layout: [<pointer to data>, <data size>]
error = lldb.SBError()
pointer = valobj.CreateValueFromAddress('data', slice_ptr, data_ptr_type)
length = process.ReadPointerFromMemory(slice_ptr + process.GetAddressByteSize(), error)
return pointer, length
class StdCStrSynthProvider(FFISliceSynthProvider):
def ptr_and_len(self, valobj):
ptr, len = FFISliceSynthProvider.ptr_and_len(self, valobj)
return (ptr, len-1) # drop terminaing '\0'
class StdOsStrSynthProvider(FFISliceSynthProvider):
pass
class StdPathBufSynthProvider(StdOsStringSynthProvider):
def ptr_and_len(self, valobj):
return StdOsStringSynthProvider.ptr_and_len(self, gcm(valobj, 'inner'))
class StdPathSynthProvider(FFISliceSynthProvider):
pass
##################################################################################################################
class DerefSynthProvider(RustSynthProvider):
deref = lldb.SBValue()
def num_children(self):
return self.deref.GetNumChildren()
def has_children(self):
return self.deref.MightHaveChildren()
def get_child_at_index(self, index):
return self.deref.GetChildAtIndex(index)
def get_child_index(self, name):
return self.deref.GetIndexOfChildWithName(name)
def get_summary(self):
return get_obj_summary(self.deref)
# Base for Rc and Arc
class StdRefCountedSynthProvider(DerefSynthProvider):
weak = 0
strong = 0
def get_summary(self):
if self.weak != 0:
s = '(refs:%d,weak:%d) ' % (self.strong, self.weak)
else:
s = '(refs:%d) ' % self.strong
if self.strong > 0:
s += get_obj_summary(self.deref)
else:
s += '<disposed>'
return s
class StdRcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'value', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'value', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'value')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdArcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'v', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'v', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'data')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdMutexSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'data', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdRefCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
def get_summary(self):
borrow = gcm(self.valobj, 'borrow', 'value', 'value').GetValueAsSigned()
s = ''
if borrow < 0:
s = '(borrowed:mut) '
elif borrow > 0:
s = '(borrowed:%d) ' % borrow
return s + get_obj_summary(self.deref)
class StdRefCellBorrowSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value').Dereference()
self.deref.SetPreferSyntheticValue(True)
##################################################################################################################
ENCODED_ENUM_PREFIX = 'RUST$ENCODED$ENUM$'
ENUM_DISCRIMINANT = 'RUST$ENUM$DISR'
class EnumSynthProvider(DerefSynthProvider):
def initialize(self):
obj_type = self.valobj.GetType()
first_field_name = obj_type.GetFieldAtIndex(0).GetName()
# The first two branches are for the sake of windows-*-msvc targets and non-rust-enabled liblldb.
# Normally, we should be calling the initialize_enum().
if first_field_name.startswith(ENCODED_ENUM_PREFIX): # Niche-optimized enum
tokens = first_field_name[len(ENCODED_ENUM_PREFIX):].split("$")
discr_indices = [int(index) for index in tokens[:-1]]
null_variant = tokens[-1]
discriminant = self.valobj.GetChildAtIndex(0)
for discr_index in discr_indices:
discriminant = discriminant.GetChildAtIndex(discr_index)
# Recurse down the first field of the discriminant till we reach a non-struct type,
for i in range(20): # ... but limit the depth, just in case.
if discriminant.GetType().GetTypeClass() != lldb.eTypeClassStruct:
break
discriminant = discriminant.GetChildAtIndex(0)
if discriminant.GetValueAsUnsigned() == 0:
self.variant = null_variant
self.deref = lldb.SBValue()
else:
self.deref = self.valobj.GetChildAtIndex(0)
elif first_field_name == ENUM_DISCRIMINANT: # Regular enum
self.variant = self.valobj.GetChildAtIndex(0).GetValue()
self.deref = self.valobj.GetChildAtIndex(1)
else:
self.initialize_enum()
self.deref.SetPreferSyntheticValue(True)
def initialize_enum(self):
pass
def get_summary(self):
if self.deref.IsValid():
return self.variant + '(' + get_obj_summary(self.deref) + ')'
else:
return self.variant
class StdOptionSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Some'):
self.variant = 'Some'
self.deref = gcm(self.valobj, '0')
else:
self.variant = 'None'
self.deref = lldb.SBValue()
class StdResultSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Ok'):
self.variant = 'Ok'
else:
self.variant = 'Err'
self.deref = gcm(self.valobj, '0')
class StdCowSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Owned'):
self.variant = 'Owned'
else:
self.variant = 'Borrowed'
self.deref = gcm(self.valobj, '0')
##################################################################################################################
class StdHashMapSynthProvider(RustSynthProvider):
def initialize(self):
self.initialize_table(gcm(self.valobj, 'base', 'table'))
def initialize_table(self, table):
assert table.IsValid()
if table.type.GetNumberOfTemplateArguments() > 0:
item_ty = table.type.GetTemplateArgumentType(0)
else: # we must be on windows-msvc - try to look up item type by name
table_ty_name = table.GetType().GetName() # "hashbrown::raw::RawTable<ITEM_TY>"
item_ty_name = table_ty_name[table_ty_name.find('<')+1: table_ty_name.rfind('>')]
item_ty = table.GetTarget().FindTypes(item_ty_name).GetTypeAtIndex(0)
if item_ty.IsTypedefType():
item_ty = item_ty.GetTypedefedType()
inner_table = table.GetChildMemberWithName('table')
if inner_table.IsValid():
self.initialize_hashbrown_v2(inner_table, item_ty) # 1.52 <= std_version
else:
if not table.GetChildMemberWithName('data'):
self.initialize_hashbrown_v2(table, item_ty) # ? <= std_version < 1.52
else:
self.initialize_hashbrown_v1(table, item_ty) # 1.36 <= std_version < ?
def initialize_hashbrown_v2(self, table, item_ty):
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
# Buckets are located above `ctrl`, in reverse order.
start_addr = ctrl_ptr.GetValueAsUnsigned() - item_ty.GetByteSize() * self.num_buckets
buckets_ty = item_ty.GetArrayType(self.num_buckets)
self.buckets = self.valobj.CreateValueFromAddress('data', start_addr, buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
self.valid_indices.append(self.num_buckets - 1 - i)
def initialize_hashbrown_v1(self, table, item_ty):
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
buckets_ty = item_ty.GetArrayType(self.num_buckets)
self.buckets = gcm(table, 'data', 'pointer').Dereference().Cast(buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
self.valid_indices.append(i)
def has_children(self):
return True
def num_children(self):
return len(self.valid_indices)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return 'size=%d, capacity=%d' % (self.num_children(), self.num_buckets)
class StdHashSetSynthProvider(StdHashMapSynthProvider):
def initialize(self):
table = gcm(self.valobj, 'base', 'map', 'table') # std_version >= 1.48
if not table.IsValid():
table = gcm(self.valobj, 'map', 'base', 'table') # std_version < 1.48
self.initialize_table(table)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx).GetChildAtIndex(0)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
##################################################################################################################
def __lldb_init_module(debugger_obj, internal_dict):
log.info('Initializing')
initialize_category(debugger_obj)
Optimize synth summaries.
from __future__ import print_function, division
import sys
import logging
import re
import lldb
import weakref
if sys.version_info[0] == 2:
# python2-based LLDB accepts utf8-encoded ascii strings only.
def to_lldb_str(s): return s.encode('utf8', 'backslashreplace') if isinstance(s, unicode) else s
range = xrange
else:
to_lldb_str = str
log = logging.getLogger(__name__)
module = sys.modules[__name__]
rust_category = None
def initialize_category(debugger):
global module, rust_category
rust_category = debugger.CreateCategory('Rust')
# rust_category.AddLanguage(lldb.eLanguageTypeRust)
rust_category.SetEnabled(True)
#attach_summary_to_type(get_array_summary, r'^.*\[[0-9]+\]$', True)
attach_summary_to_type(get_tuple_summary, r'^\(.*\)$', True)
attach_summary_to_type(get_tuple_summary, r'^tuple<.+>$', True) # *-windows-msvc uses this name since 1.47
attach_synthetic_to_type(StrSliceSynthProvider, '&str')
attach_synthetic_to_type(StrSliceSynthProvider, 'str*')
attach_synthetic_to_type(StrSliceSynthProvider, 'str') # *-windows-msvc uses this name since 1.5?
attach_synthetic_to_type(StdStringSynthProvider, 'collections::string::String') # Before 1.20
attach_synthetic_to_type(StdStringSynthProvider, 'alloc::string::String') # Since 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^collections::vec::Vec<.+>$', True) # Before 1.20
attach_synthetic_to_type(StdVectorSynthProvider, r'^alloc::vec::Vec<.+>$', True) # Since 1.20
attach_synthetic_to_type(SliceSynthProvider, r'^&(mut *)?\[.*\]$', True)
attach_synthetic_to_type(SliceSynthProvider, r'^(mut *)?slice<.+>.*$', True)
attach_synthetic_to_type(StdCStringSynthProvider, 'std::ffi::c_str::CString')
attach_synthetic_to_type(StdCStrSynthProvider, 'std::ffi::c_str::CStr')
attach_synthetic_to_type(StdOsStringSynthProvider, 'std::ffi::os_str::OsString')
attach_synthetic_to_type(StdOsStrSynthProvider, 'std::ffi::os_str::OsStr')
attach_synthetic_to_type(StdPathBufSynthProvider, 'std::path::PathBuf')
attach_synthetic_to_type(StdPathSynthProvider, 'std::path::Path')
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Rc<.+>$', True)
attach_synthetic_to_type(StdRcSynthProvider, r'^alloc::rc::Weak<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Arc<.+>$', True)
attach_synthetic_to_type(StdArcSynthProvider, r'^alloc::(sync|arc)::Weak<.+>$', True)
attach_synthetic_to_type(StdMutexSynthProvider, r'^std::sync::mutex::Mutex<.+>$', True)
attach_synthetic_to_type(StdCellSynthProvider, r'^core::cell::Cell<.+>$', True)
attach_synthetic_to_type(StdRefCellSynthProvider, r'^core::cell::RefCell<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::Ref<.+>$', True)
attach_synthetic_to_type(StdRefCellBorrowSynthProvider, r'^core::cell::RefMut<.+>$', True)
attach_synthetic_to_type(StdHashMapSynthProvider, r'^std::collections::hash::map::HashMap<.+>$', True)
attach_synthetic_to_type(StdHashSetSynthProvider, r'^std::collections::hash::set::HashSet<.+>$', True)
attach_synthetic_to_type(StdOptionSynthProvider, r'^core::option::Option<.+>$', True)
attach_synthetic_to_type(StdResultSynthProvider, r'^core::result::Result<.+>$', True)
attach_synthetic_to_type(StdCowSynthProvider, r'^alloc::borrow::Cow<.+>$', True)
def attach_synthetic_to_type(synth_class, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching synthetic %s to "%s", is_regex=%s', synth_class.__name__, type_name, is_regex)
synth = lldb.SBTypeSynthetic.CreateWithClassName(__name__ + '.' + synth_class.__name__)
synth.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSynthetic(lldb.SBTypeNameSpecifier(type_name, is_regex), synth)
def summary_fn(valobj, dict): return get_synth_summary(synth_class, valobj, dict)
# LLDB accesses summary fn's by name, so we need to create a unique one.
summary_fn.__name__ = '_get_synth_summary_' + synth_class.__name__
setattr(module, summary_fn.__name__, summary_fn)
attach_summary_to_type(summary_fn, type_name, is_regex)
def attach_summary_to_type(summary_fn, type_name, is_regex=False):
global module, rust_category
#log.debug('attaching summary %s to "%s", is_regex=%s', summary_fn.__name__, type_name, is_regex)
summary = lldb.SBTypeSummary.CreateWithFunctionName(__name__ + '.' + summary_fn.__name__)
summary.SetOptions(lldb.eTypeOptionCascade)
rust_category.AddTypeSummary(lldb.SBTypeNameSpecifier(type_name, is_regex), summary)
# 'get_summary' is annoyingly not a part of the standard LLDB synth provider API.
# This trick allows us to share data extraction logic between synth providers and their sibling summary providers.
def get_synth_summary(synth_class, valobj, dict):
try:
ns_valobj = valobj.GetNonSyntheticValue()
synth = synth_by_id.get(ns_valobj.GetID())
if synth is None:
synth = synth_class(ns_valobj, dict)
return to_lldb_str(synth.get_summary())
except Exception as e:
log.error('%s', e)
raise
# Chained GetChildMemberWithName lookups
def gcm(valobj, *chain):
for name in chain:
valobj = valobj.GetChildMemberWithName(name)
return valobj
def read_unique_ptr(valobj):
# Rust-enabled LLDB using DWARF debug info will strip tuple field prefixes.
# If LLDB is not Rust-enalbed or if using PDB debug info, they will be underscore-prefixed.
pointer = valobj.GetChildMemberWithName('pointer')
child = pointer.GetChildMemberWithName('__0') # Plain lldb
if child.IsValid():
return child
child = pointer.GetChildMemberWithName('0') # rust-lldb
if child.IsValid():
return child
return pointer # pointer no longer contains NonZero since Rust 1.33
def string_from_ptr(pointer, length):
if length <= 0:
return u''
error = lldb.SBError()
process = pointer.GetProcess()
data = process.ReadMemory(pointer.GetValueAsUnsigned(), length, error)
if error.Success():
return data.decode('utf8', 'replace')
else:
log.error('ReadMemory error: %s', error.GetCString())
def get_template_params(type_name):
params = []
level = 0
start = 0
for i, c in enumerate(type_name):
if c == '<':
level += 1
if level == 1:
start = i + 1
elif c == '>':
level -= 1
if level == 0:
params.append(type_name[start:i].strip())
elif c == ',' and level == 1:
params.append(type_name[start:i].strip())
start = i + 1
return params
def get_obj_summary(valobj, unavailable='{...}'):
summary = valobj.GetSummary()
if summary is not None:
return summary
summary = valobj.GetValue()
if summary is not None:
return summary
return unavailable
def sequence_summary(childern, maxsize=32):
s = ''
for child in childern:
if len(s) > 0:
s += ', '
s += get_obj_summary(child)
if len(s) > maxsize:
s += ', ...'
break
return s
# ----- Summaries -----
def get_tuple_summary(valobj, dict={}):
fields = [get_obj_summary(valobj.GetChildAtIndex(i)) for i in range(0, valobj.GetNumChildren())]
return '(%s)' % ', '.join(fields)
def get_array_summary(valobj, dict):
return '(%d) [%s]' % (valobj.GetNumChildren(), sequence_summary(valobj))
# ----- Synth providers ------
synth_by_id = weakref.WeakValueDictionary()
class RustSynthProvider(object):
def __init__(self, valobj, dict={}):
self.valobj = valobj
self.initialize()
synth_by_id[valobj.GetID()] = self
def initialize(self):
return None
def update(self):
return False
def num_children(self):
return 0
def has_children(self):
return False
def get_child_at_index(self, index):
return None
def get_child_index(self, name):
return None
def get_summary(self):
return None
class RegularEnumProvider(RustSynthProvider):
def initialize(self):
# Regular enums are represented as unions of structs, containing discriminant in the
# first field.
discriminant = self.valobj.GetChildAtIndex(0).GetChildAtIndex(0).GetValueAsUnsigned()
self.variant = self.valobj.GetChildAtIndex(discriminant)
def num_children(self):
return max(0, self.variant.GetNumChildren() - 1)
def has_children(self):
return self.num_children() > 0
def get_child_at_index(self, index):
return self.variant.GetChildAtIndex(index + 1)
def get_child_index(self, name):
return self.variant.GetIndexOfChildWithName(name) - 1
def get_summary(self):
return get_obj_summary(self.variant)
# Base class for providers that represent array-like objects
class ArrayLikeSynthProvider(RustSynthProvider):
def initialize(self):
ptr, len = self.ptr_and_len(self.valobj) # type: ignore
self.ptr = ptr
self.len = len
self.item_type = self.ptr.GetType().GetPointeeType()
self.item_size = self.item_type.GetByteSize()
def ptr_and_len(self, obj):
pass # abstract
def num_children(self):
return self.len
def has_children(self):
return True
def get_child_at_index(self, index):
try:
if not 0 <= index < self.len:
return None
offset = index * self.item_size
return self.ptr.CreateChildAtOffset('[%s]' % index, offset, self.item_type)
except Exception as e:
log.error('%s', e)
raise
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return '(%d)' % (self.len,)
class StdVectorSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
def get_summary(self):
return '(%d) vec![%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
##################################################################################################################
class SliceSynthProvider(ArrayLikeSynthProvider):
def ptr_and_len(self, vec):
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned()
)
def get_summary(self):
return '(%d) &[%s]' % (self.len, sequence_summary((self.get_child_at_index(i) for i in range(self.len))))
# Base class for *String providers
class StringLikeSynthProvider(ArrayLikeSynthProvider):
def get_child_at_index(self, index):
ch = ArrayLikeSynthProvider.get_child_at_index(self, index)
ch.SetFormat(lldb.eFormatChar)
return ch
def get_summary(self):
# Limit string length to 1000 characters to cope with uninitialized values whose
# length field contains garbage.
strval = string_from_ptr(self.ptr, min(self.len, 1000))
if strval == None:
return None
if self.len > 1000:
strval += u'...'
return u'"%s"' % strval
class StrSliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
return (
gcm(valobj, 'data_ptr'),
gcm(valobj, 'length').GetValueAsUnsigned()
)
class StdStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'vec')
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class StdCStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner')
return (
gcm(vec, 'data_ptr'),
gcm(vec, 'length').GetValueAsUnsigned() - 1
)
class StdOsStringSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
vec = gcm(valobj, 'inner', 'inner')
tmp = gcm(vec, 'bytes') # Windows OSString has an extra layer
if tmp.IsValid():
vec = tmp
return (
read_unique_ptr(gcm(vec, 'buf', 'ptr')),
gcm(vec, 'len').GetValueAsUnsigned()
)
class FFISliceSynthProvider(StringLikeSynthProvider):
def ptr_and_len(self, valobj):
process = valobj.GetProcess()
slice_ptr = valobj.GetLoadAddress()
data_ptr_type = valobj.GetTarget().GetBasicType(lldb.eBasicTypeChar).GetPointerType()
# Unsized slice objects have incomplete debug info, so here we just assume standard slice
# reference layout: [<pointer to data>, <data size>]
error = lldb.SBError()
pointer = valobj.CreateValueFromAddress('data', slice_ptr, data_ptr_type)
length = process.ReadPointerFromMemory(slice_ptr + process.GetAddressByteSize(), error)
return pointer, length
class StdCStrSynthProvider(FFISliceSynthProvider):
def ptr_and_len(self, valobj):
ptr, len = FFISliceSynthProvider.ptr_and_len(self, valobj)
return (ptr, len-1) # drop terminaing '\0'
class StdOsStrSynthProvider(FFISliceSynthProvider):
pass
class StdPathBufSynthProvider(StdOsStringSynthProvider):
def ptr_and_len(self, valobj):
return StdOsStringSynthProvider.ptr_and_len(self, gcm(valobj, 'inner'))
class StdPathSynthProvider(FFISliceSynthProvider):
pass
##################################################################################################################
class DerefSynthProvider(RustSynthProvider):
deref = lldb.SBValue()
def num_children(self):
return self.deref.GetNumChildren()
def has_children(self):
return self.deref.MightHaveChildren()
def get_child_at_index(self, index):
return self.deref.GetChildAtIndex(index)
def get_child_index(self, name):
return self.deref.GetIndexOfChildWithName(name)
def get_summary(self):
return get_obj_summary(self.deref)
# Base for Rc and Arc
class StdRefCountedSynthProvider(DerefSynthProvider):
weak = 0
strong = 0
def get_summary(self):
if self.weak != 0:
s = '(refs:%d,weak:%d) ' % (self.strong, self.weak)
else:
s = '(refs:%d) ' % self.strong
if self.strong > 0:
s += get_obj_summary(self.deref)
else:
s += '<disposed>'
return s
class StdRcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'value', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'value', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'value')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdArcSynthProvider(StdRefCountedSynthProvider):
def initialize(self):
inner = read_unique_ptr(gcm(self.valobj, 'ptr'))
self.strong = gcm(inner, 'strong', 'v', 'value').GetValueAsUnsigned()
self.weak = gcm(inner, 'weak', 'v', 'value').GetValueAsUnsigned()
if self.strong > 0:
self.deref = gcm(inner, 'data')
self.weak -= 1 # There's an implicit weak reference communally owned by all the strong pointers
else:
self.deref = lldb.SBValue()
self.deref.SetPreferSyntheticValue(True)
class StdMutexSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'data', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
class StdRefCellSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value', 'value')
self.deref.SetPreferSyntheticValue(True)
def get_summary(self):
borrow = gcm(self.valobj, 'borrow', 'value', 'value').GetValueAsSigned()
s = ''
if borrow < 0:
s = '(borrowed:mut) '
elif borrow > 0:
s = '(borrowed:%d) ' % borrow
return s + get_obj_summary(self.deref)
class StdRefCellBorrowSynthProvider(DerefSynthProvider):
def initialize(self):
self.deref = gcm(self.valobj, 'value').Dereference()
self.deref.SetPreferSyntheticValue(True)
##################################################################################################################
ENCODED_ENUM_PREFIX = 'RUST$ENCODED$ENUM$'
ENUM_DISCRIMINANT = 'RUST$ENUM$DISR'
class EnumSynthProvider(DerefSynthProvider):
def initialize(self):
obj_type = self.valobj.GetType()
first_field_name = obj_type.GetFieldAtIndex(0).GetName()
# The first two branches are for the sake of windows-*-msvc targets and non-rust-enabled liblldb.
# Normally, we should be calling the initialize_enum().
if first_field_name.startswith(ENCODED_ENUM_PREFIX): # Niche-optimized enum
tokens = first_field_name[len(ENCODED_ENUM_PREFIX):].split("$")
discr_indices = [int(index) for index in tokens[:-1]]
null_variant = tokens[-1]
discriminant = self.valobj.GetChildAtIndex(0)
for discr_index in discr_indices:
discriminant = discriminant.GetChildAtIndex(discr_index)
# Recurse down the first field of the discriminant till we reach a non-struct type,
for i in range(20): # ... but limit the depth, just in case.
if discriminant.GetType().GetTypeClass() != lldb.eTypeClassStruct:
break
discriminant = discriminant.GetChildAtIndex(0)
if discriminant.GetValueAsUnsigned() == 0:
self.variant = null_variant
self.deref = lldb.SBValue()
else:
self.deref = self.valobj.GetChildAtIndex(0)
elif first_field_name == ENUM_DISCRIMINANT: # Regular enum
self.variant = self.valobj.GetChildAtIndex(0).GetValue()
self.deref = self.valobj.GetChildAtIndex(1)
else:
self.initialize_enum()
self.deref.SetPreferSyntheticValue(True)
def initialize_enum(self):
pass
def get_summary(self):
if self.deref.IsValid():
return self.variant + '(' + get_obj_summary(self.deref) + ')'
else:
return self.variant
class StdOptionSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Some'):
self.variant = 'Some'
self.deref = gcm(self.valobj, '0')
else:
self.variant = 'None'
self.deref = lldb.SBValue()
class StdResultSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Ok'):
self.variant = 'Ok'
else:
self.variant = 'Err'
self.deref = gcm(self.valobj, '0')
class StdCowSynthProvider(EnumSynthProvider):
def initialize_enum(self):
if self.valobj.GetTypeName().endswith('::Owned'):
self.variant = 'Owned'
else:
self.variant = 'Borrowed'
self.deref = gcm(self.valobj, '0')
##################################################################################################################
class StdHashMapSynthProvider(RustSynthProvider):
def initialize(self):
self.initialize_table(gcm(self.valobj, 'base', 'table'))
def initialize_table(self, table):
assert table.IsValid()
if table.type.GetNumberOfTemplateArguments() > 0:
item_ty = table.type.GetTemplateArgumentType(0)
else: # we must be on windows-msvc - try to look up item type by name
table_ty_name = table.GetType().GetName() # "hashbrown::raw::RawTable<ITEM_TY>"
item_ty_name = table_ty_name[table_ty_name.find('<')+1: table_ty_name.rfind('>')]
item_ty = table.GetTarget().FindTypes(item_ty_name).GetTypeAtIndex(0)
if item_ty.IsTypedefType():
item_ty = item_ty.GetTypedefedType()
inner_table = table.GetChildMemberWithName('table')
if inner_table.IsValid():
self.initialize_hashbrown_v2(inner_table, item_ty) # 1.52 <= std_version
else:
if not table.GetChildMemberWithName('data'):
self.initialize_hashbrown_v2(table, item_ty) # ? <= std_version < 1.52
else:
self.initialize_hashbrown_v1(table, item_ty) # 1.36 <= std_version < ?
def initialize_hashbrown_v2(self, table, item_ty):
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
# Buckets are located above `ctrl`, in reverse order.
start_addr = ctrl_ptr.GetValueAsUnsigned() - item_ty.GetByteSize() * self.num_buckets
buckets_ty = item_ty.GetArrayType(self.num_buckets)
self.buckets = self.valobj.CreateValueFromAddress('data', start_addr, buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
self.valid_indices.append(self.num_buckets - 1 - i)
def initialize_hashbrown_v1(self, table, item_ty):
self.num_buckets = gcm(table, 'bucket_mask').GetValueAsUnsigned() + 1
ctrl_ptr = gcm(table, 'ctrl', 'pointer')
ctrl = ctrl_ptr.GetPointeeData(0, self.num_buckets)
buckets_ty = item_ty.GetArrayType(self.num_buckets)
self.buckets = gcm(table, 'data', 'pointer').Dereference().Cast(buckets_ty)
error = lldb.SBError()
self.valid_indices = []
for i in range(self.num_buckets):
if ctrl.GetUnsignedInt8(error, i) & 0x80 == 0:
self.valid_indices.append(i)
def has_children(self):
return True
def num_children(self):
return len(self.valid_indices)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except Exception as e:
log.error('%s', e)
raise
def get_summary(self):
return 'size=%d, capacity=%d' % (self.num_children(), self.num_buckets)
class StdHashSetSynthProvider(StdHashMapSynthProvider):
def initialize(self):
table = gcm(self.valobj, 'base', 'map', 'table') # std_version >= 1.48
if not table.IsValid():
table = gcm(self.valobj, 'map', 'base', 'table') # std_version < 1.48
self.initialize_table(table)
def get_child_at_index(self, index):
bucket_idx = self.valid_indices[index]
item = self.buckets.GetChildAtIndex(bucket_idx).GetChildAtIndex(0)
return item.CreateChildAtOffset('[%d]' % index, 0, item.GetType())
##################################################################################################################
def __lldb_init_module(debugger_obj, internal_dict):
log.info('Initializing')
initialize_category(debugger_obj)
|
a84b5385-2d5f-11e5-88ca-b88d120fff5e
a855050c-2d5f-11e5-bb55-b88d120fff5e
a855050c-2d5f-11e5-bb55-b88d120fff5e |
# -*- coding: utf-8 -*-
# dydrmntion@gmail.com ~ 2013
import sys
import os
_here = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(_here, 'ext'))
from socketio.server import SocketIOServer
from gevent import monkey
monkey.patch_all()
from server import app
def start_server(host_address):
try:
server = SocketIOServer(host_address, app, resource='socket.io')
server.serve_forever()
except:
# assume for now server is already running
pass
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--host")
parser.add_argument("--port", type=int)
parser.add_argument("--debug", type=bool)
parser.add_argument("--hide-status", type=bool)
args = parser.parse_args()
host_address = (args.host, args.port)
app.debug = args.debug
app.vimfox['hide_status'] = not args.hide_status
start_server(host_address)
silence logging when debug is 0
# -*- coding: utf-8 -*-
# dydrmntion@gmail.com ~ 2013
import sys
import os
_here = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(_here, 'ext'))
from socketio.server import SocketIOServer
from gevent import monkey
monkey.patch_all()
from server import app
def start_server(host_address):
try:
server = SocketIOServer(host_address, app, resource='socket.io')
server.serve_forever()
except:
# assume for now server is already running
pass
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--host")
parser.add_argument("--port", type=int)
parser.add_argument("--debug", type=int)
parser.add_argument("--hide-status", type=int)
args = parser.parse_args()
host_address = (args.host, args.port)
app.debug = bool(args.debug)
print args.debug
if not args.debug:
import logging
app.logger.propagate = False
app.logger.setLevel(logging.ERROR)
print app.logger_name
print app.debug
app.vimfox['hide_status'] = args.hide_status
start_server(host_address)
|
"""
system type readers
"""
from xnb_parse.type_reader_manager import TypeReaderPlugin
from xnb_parse.type_reader import ValueTypeReader, GenericTypeReader, GenericValueTypeReader, ReaderError
from xnb_parse.xna_types.xna_system import XNAList, XNADict
class EnumReader(GenericValueTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Enum`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.EnumReader`1'
def init_reader(self):
GenericValueTypeReader.init_reader(self)
if not self.readers[0].is_enum_type:
ReaderError("Not enum type reader: '{}'".format(self.readers[0]))
def read(self):
return self.readers[0].read()
class NullableReader(GenericValueTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Nullable`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.NullableReader`1'
def read(self):
has_value = self.stream.read_boolean()
if has_value:
return self.readers[0].read()
else:
return None
class ArrayReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Array`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ArrayReader`1'
def read(self):
elements = self.stream.read_int32()
if self.readers[0].is_value_type:
return XNAList([self.readers[0].read() for _ in range(elements)])
else:
return XNAList([self.stream.read_object(self.readers[0]) for _ in range(elements)])
class ListReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Collections.Generic.List`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ListReader`1'
def read(self):
elements = self.stream.read_int32()
if self.readers[0].is_value_type:
return XNAList([self.readers[0].read() for _ in range(elements)])
else:
return XNAList([self.stream.read_object(self.readers[0]) for _ in range(elements)])
class DictionaryReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Collections.Generic.Dictionary`2'
generic_reader_name = 'Microsoft.Xna.Framework.Content.DictionaryReader`2'
def read(self):
elements = self.stream.read_int32()
if self.readers[1].is_value_type:
return XNADict([(self.readers[0].read(), self.readers[1].read()) for _ in range(elements)])
else:
return XNADict([(self.readers[0].read(), self.stream.read_object(self.readers[1]))
for _ in range(elements)])
class TimeSpanReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.TimeSpan'
reader_name = 'Microsoft.Xna.Framework.Content.TimeSpanReader'
def read(self):
return self.stream.read_int64()
class DateTimeReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.DateTime'
reader_name = 'Microsoft.Xna.Framework.Content.DateTimeReader'
def read(self):
return self.stream.read_int64()
class DecimalReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.Decimal'
reader_name = 'Microsoft.Xna.Framework.Content.DecimalReader'
def read(self):
return self.stream.unpack('4i')
class ExternalReferenceReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'ExternalReference'
reader_name = 'Microsoft.Xna.Framework.Content.ExternalReferenceReader'
def read(self):
return self.stream.read_external_reference()
class ReflectiveReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'Reflective'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ReflectiveReader`1'
def read(self):
return self.readers[0].read()
handle dicts where key is not a value type
"""
system type readers
"""
from xnb_parse.type_reader_manager import TypeReaderPlugin
from xnb_parse.type_reader import ValueTypeReader, GenericTypeReader, GenericValueTypeReader, ReaderError
from xnb_parse.xna_types.xna_system import XNAList, XNADict
class EnumReader(GenericValueTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Enum`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.EnumReader`1'
def init_reader(self):
GenericValueTypeReader.init_reader(self)
if not self.readers[0].is_enum_type:
ReaderError("Not enum type reader: '{}'".format(self.readers[0]))
def read(self):
return self.readers[0].read()
class NullableReader(GenericValueTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Nullable`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.NullableReader`1'
def read(self):
has_value = self.stream.read_boolean()
if has_value:
return self.readers[0].read()
else:
return None
class ArrayReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Array`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ArrayReader`1'
def read(self):
elements = self.stream.read_int32()
if self.readers[0].is_value_type:
return XNAList([self.readers[0].read() for _ in range(elements)])
else:
return XNAList([self.stream.read_object(self.readers[0]) for _ in range(elements)])
class ListReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Collections.Generic.List`1'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ListReader`1'
def read(self):
elements = self.stream.read_int32()
if self.readers[0].is_value_type:
return XNAList([self.readers[0].read() for _ in range(elements)])
else:
return XNAList([self.stream.read_object(self.readers[0]) for _ in range(elements)])
class DictionaryReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'System.Collections.Generic.Dictionary`2'
generic_reader_name = 'Microsoft.Xna.Framework.Content.DictionaryReader`2'
def read(self):
elements = self.stream.read_int32()
if self.readers[0].is_value_type:
if self.readers[1].is_value_type:
return XNADict([(self.readers[0].read(), self.readers[1].read())
for _ in range(elements)])
else:
return XNADict([(self.readers[0].read(), self.stream.read_object(self.readers[1]))
for _ in range(elements)])
else:
if self.readers[1].is_value_type:
return XNADict([(self.stream.read_object(self.readers[0]), self.readers[1].read())
for _ in range(elements)])
else:
return XNADict([(self.stream.read_object(self.readers[0]), self.stream.read_object(self.readers[1]))
for _ in range(elements)])
class TimeSpanReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.TimeSpan'
reader_name = 'Microsoft.Xna.Framework.Content.TimeSpanReader'
def read(self):
return self.stream.read_int64()
class DateTimeReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.DateTime'
reader_name = 'Microsoft.Xna.Framework.Content.DateTimeReader'
def read(self):
return self.stream.read_int64()
class DecimalReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'System.Decimal'
reader_name = 'Microsoft.Xna.Framework.Content.DecimalReader'
def read(self):
return self.stream.unpack('4i')
class ExternalReferenceReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'ExternalReference'
reader_name = 'Microsoft.Xna.Framework.Content.ExternalReferenceReader'
def read(self):
return self.stream.read_external_reference()
class ReflectiveReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'Reflective'
generic_reader_name = 'Microsoft.Xna.Framework.Content.ReflectiveReader`1'
def read(self):
return self.readers[0].read()
|
#!/usr/bin/python
# The MIT License (MIT)
#
# Copyright (c) 2014 Gluu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os.path
import Properties
import random
import shutil
import socket
import string
import time
import uuid
import json
import traceback
import subprocess
import sys
import getopt
import hashlib
import re
import glob
import base64
import ldap
from ldap.controls import SimplePagedResultsControl
import datetime
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
ldap.set_option(ldap.OPT_REFERRALS, 0)
from pyDes import *
migration_time = time.ctime().replace(' ','_')
ox_ldap_prop_fn = '/etc/gluu/conf/ox-ldap.properties'
def get_ldap_bind_pw():
prop = open(ox_ldap_prop_fn)
for l in prop:
ls = l.strip()
if ls.startswith('bindPassword'):
n = ls.find(':')
encpw = ls[n+1:].strip()
clrpw = os.popen('python /opt/gluu/bin/encode.py -D ' + encpw).read()
return clrpw.strip()
def update_ox_ldap_prop(bindDN, trustStoreFile, trustStorePin):
prop = open(ox_ldap_prop_fn).readlines()
for i, l in enumerate(prop):
ls = l.strip()
if ls.startswith('bindDN'):
prop[i] = 'bindDN: {0}\n'.format(bindDN)
elif ls.startswith('ssl.trustStoreFile'):
prop[i] = 'ssl.trustStoreFile: {0}\n'.format(trustStoreFile)
elif ls.startswith('ssl.trustStorePin'):
prop[i] = 'ssl.trustStorePin: {0}\n'.format(trustStorePin)
cmd = 'cp {0} {0}.back_{1}'.format(ox_ldap_prop_fn, migration_time)
os.system(cmd)
with open(ox_ldap_prop_fn, 'w') as w:
w.write(''.join(prop))
def post_ldap_update(ldap_bind_dn, ldap_bind_pw):
conn = ldap.initialize('ldaps://localhost:1636')
conn.protocol_version = 3
conn.simple_bind_s(ldap_bind_dn, ldap_bind_pw)
result = conn.search_s('ou=appliances,o=gluu',ldap.SCOPE_SUBTREE,'(oxIDPAuthentication=*)',['oxIDPAuthentication'])
dn = result[0][0]
oxIDPAuthentication = json.loads(result[0][1]['oxIDPAuthentication'][0])
config = json.loads(oxIDPAuthentication['config'])
if config['servers'][0]=='localhost:1636' and config['bindDN'].lower()=='cn=directory manager,o=gluu':
config['bindDN'] = 'cn=Directory Manager'
oxIDPAuthentication['config'] = json.dumps(config)
oxIDPAuthentication = json.dumps(oxIDPAuthentication, indent=2)
conn.modify_s(dn, [( ldap.MOD_REPLACE, 'oxIDPAuthentication', oxIDPAuthentication)])
result = conn.search_s('ou=appliances,o=gluu',ldap.SCOPE_SUBTREE,'(oxTrustConfCacheRefresh=*)',['oxTrustConfCacheRefresh'])
dn = result[0][0]
oxTrustConfCacheRefresh = json.loads(result[0][1]['oxTrustConfCacheRefresh'][0])
oxTrustConfCacheRefresh['inumConfig']['bindDN'] = 'cn=Directory Manager'
oxTrustConfCacheRefresh = json.dumps(oxTrustConfCacheRefresh, indent=2)
conn.modify_s(dn, [( ldap.MOD_REPLACE, 'oxTrustConfCacheRefresh', oxTrustConfCacheRefresh)])
class Setup(object):
def __init__(self, install_dir=None):
self.install_dir = install_dir
self.oxVersion = '3.1.2.Final'
self.githubBranchName = 'version_3.1.2'
# Used only if -w (get wars) options is given to setup.py
self.oxauth_war = 'https://ox.gluu.org/maven/org/xdi/oxauth-server/%s/oxauth-server-%s.war' % (self.oxVersion, self.oxVersion)
self.oxauth_rp_war = 'https://ox.gluu.org/maven/org/xdi/oxauth-rp/%s/oxauth-rp-%s.war' % (self.oxVersion, self.oxVersion)
self.oxtrust_war = 'https://ox.gluu.org/maven/org/xdi/oxtrust-server/%s/oxtrust-server-%s.war' % (self.oxVersion, self.oxVersion)
self.idp3_war = 'http://ox.gluu.org/maven/org/xdi/oxshibbolethIdp/%s/oxshibbolethIdp-%s.war' % (self.oxVersion, self.oxVersion)
self.idp3_dist_jar = 'http://ox.gluu.org/maven/org/xdi/oxShibbolethStatic/%s/oxShibbolethStatic-%s.jar' % (self.oxVersion, self.oxVersion)
self.idp3_cml_keygenerator = 'http://ox.gluu.org/maven/org/xdi/oxShibbolethKeyGenerator/%s/oxShibbolethKeyGenerator-%s.jar' % (self.oxVersion, self.oxVersion)
self.asimba_war = 'http://ox.gluu.org/maven/org/asimba/asimba-wa/%s/asimba-wa-%s.war' % (self.oxVersion, self.oxVersion)
self.cred_manager_war = 'http://ox.gluu.org/maven/org/xdi/cred-manager/%s/cred-manager-%s.war' % (self.oxVersion, self.oxVersion)
self.ce_setup_zip = 'https://github.com/GluuFederation/community-edition-setup/archive/%s.zip' % self.githubBranchName
self.java_1_8_jce_zip = 'http://download.oracle.com/otn-pub/java/jce/8/jce_policy-8.zip'
self.downloadWars = None
self.templateRenderingDict = {}
# OS commands
self.cmd_ln = '/bin/ln'
self.cmd_chmod = '/bin/chmod'
self.cmd_chown = '/bin/chown'
self.cmd_chgrp = '/bin/chgrp'
self.cmd_mkdir = '/bin/mkdir'
self.cmd_rpm = '/bin/rpm'
self.cmd_dpkg = '/usr/bin/dpkg'
self.opensslCommand = '/usr/bin/openssl'
self.sysemProfile = "/etc/profile"
# java commands
self.jre_home = '/opt/jre'
self.cmd_java = '%s/bin/java' % self.jre_home
self.cmd_keytool = '%s/bin/keytool' % self.jre_home
self.cmd_jar = '%s/bin/jar' % self.jre_home
# Component versions
self.jre_version = '112'
self.jetty_version = '9.3.15.v20161220'
self.jython_version = '2.7.0'
self.node_version = '6.9.1'
self.apache_version = None
self.opendj_version = None
# Gluu components installation status
self.installOxAuth = True
self.installOxTrust = True
self.installLdap = True
self.installHttpd = True
self.installSaml = False
self.installAsimba = False
self.installOxAuthRP = False
self.installPassport = False
self.allowPreReleasedApplications = False
self.allowDeprecatedApplications = False
self.jreDestinationPath = '/opt/jdk1.8.0_%s' % self.jre_version
self.os_types = ['centos', 'redhat', 'fedora', 'ubuntu', 'debian']
self.os_type = None
self.os_initdaemon = None
self.shibboleth_version = 'v3'
self.distFolder = '/opt/dist'
self.distAppFolder = '%s/app' % self.distFolder
self.distGluuFolder = '%s/gluu' % self.distFolder
self.distTmpFolder = '%s/tmp' % self.distFolder
self.setup_properties_fn = '%s/setup.properties' % self.install_dir
self.log = '%s/setup.log' % self.install_dir
self.logError = '%s/setup_error.log' % self.install_dir
self.savedProperties = '%s/setup.properties.last' % self.install_dir
self.gluuOptFolder = '/opt/gluu'
self.gluuOptBinFolder = '%s/bin' % self.gluuOptFolder
self.gluuOptSystemFolder = '%s/system' % self.gluuOptFolder
self.gluuOptPythonFolder = '%s/python' % self.gluuOptFolder
self.gluuBaseFolder = '/etc/gluu'
self.configFolder = '%s/conf' % self.gluuBaseFolder
self.certFolder = '/etc/certs'
self.oxBaseDataFolder = "/var/ox"
self.oxPhotosFolder = "/var/ox/photos"
self.oxTrustRemovedFolder = "/var/ox/identity/removed"
self.oxTrustCacheRefreshFolder = "/var/ox/identity/cr-snapshots"
self.etc_hosts = '/etc/hosts'
self.etc_hostname = '/etc/hostname'
# OS /etc/default folder
self.osDefault = '/etc/default'
self.jython_home = '/opt/jython'
self.node_home = '/opt/node'
self.node_initd_script = '%s/static/system/initd/node' % self.install_dir
self.node_base = '%s/node' % self.gluuOptFolder
self.node_user_home = '/home/node'
self.jetty_dist = '/opt/jetty-9.3'
self.jetty_home = '/opt/jetty'
self.jetty_base = '%s/jetty' % self.gluuOptFolder
self.jetty_user_home = '/home/jetty'
self.jetty_user_home_lib = '%s/lib' % self.jetty_user_home
self.jetty_app_configuration = {
'oxauth' : {'name' : 'oxauth',
'jetty' : {'modules' : 'deploy,http,logging,jsp,servlets,ext,http-forwarded,websocket'},
'memory' : {'ratio' : 0.3, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 4096},
'installed' : False
},
'identity' : {'name' : 'identity',
'jetty' : {'modules' : 'deploy,http,logging,jsp,ext,http-forwarded,websocket'},
'memory' : {'ratio' : 0.2, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 2048},
'installed' : False
},
'idp' : {'name' : 'idp',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded'},
'memory' : {'ratio' : 0.2, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 1024},
'installed' : False
},
'asimba' : {'name' : 'asimba',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded'},
'memory' : {'ratio' : 0.1, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 1024},
'installed' : False
},
'oxauth-rp' : {'name' : 'oxauth-rp',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded,websocket'},
'memory' : {'ratio' : 0.1, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 512},
'installed' : False
},
'passport' : {'name' : 'passport',
'node' : {},
'memory' : {'ratio' : 0.1, "max_allowed_mb" : 1024},
'installed' : False
}
}
self.app_custom_changes = {
'jetty' : {
'name' : 'jetty',
'files' : [{
'path' : '%s/etc/webdefault.xml' % self.jetty_home,
'replace' : [
{
'pattern' : r'(\<param-name\>dirAllowed<\/param-name\>)(\s*)(\<param-value\>)true(\<\/param-value\>)',
'update' : r'\1\2\3false\4'
}
]
},
{
'path' : '%s/etc/jetty.xml' % self.jetty_home,
'replace' : [
{
'pattern' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>',
'update' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler">\n\t\t\t\t <Set name="showContexts">false</Set>\n\t\t\t </New>'
}
]
}]
}
}
self.idp3Folder = "/opt/shibboleth-idp"
self.idp3MetadataFolder = "%s/metadata" % self.idp3Folder
self.idp3MetadataCredentialsFolder = "%s/credentials" % self.idp3MetadataFolder
self.idp3LogsFolder = "%s/logs" % self.idp3Folder
self.idp3LibFolder = "%s/lib" % self.idp3Folder
self.idp3ConfFolder = "%s/conf" % self.idp3Folder
self.idp3ConfAuthnFolder = "%s/conf/authn" % self.idp3Folder
self.idp3CredentialsFolder = "%s/credentials" % self.idp3Folder
self.idp3WebappFolder = "%s/webapp" % self.idp3Folder
# self.idp3WarFolder = "%s/war"
self.hostname = None
self.ip = None
self.orgName = None
self.orgShortName = None
self.countryCode = None
self.city = None
self.state = None
self.admin_email = None
self.encoded_ox_ldap_pw = None
self.encoded_ldap_pw = None
self.encoded_shib_jks_pw = None
self.application_max_ram = None # in MB
self.encode_salt = None
self.baseInum = None
self.inumOrg = None
self.inumAppliance = None
self.inumOrgFN = None
self.inumApplianceFN = None
self.ldapBaseFolderldapPass = None
self.oxauth_client_id = None
self.oxauthClient_pw = None
self.oxauthClient_encoded_pw = None
self.oxTrustConfigGeneration = None
self.oxd_hostname = '%(oxd_hostname)s'
self.oxd_port = '%(oxd_port)s'
self.outputFolder = '%s/output' % self.install_dir
self.templateFolder = '%s/templates' % self.install_dir
self.staticFolder = '%s/static' % self.install_dir
self.extensionFolder = '%s/extension' % self.staticFolder
self.oxauth_error_json = '%s/oxauth/oxauth-errors.json' % self.staticFolder
self.oxauth_openid_jwks_fn = "%s/oxauth-keys.json" % self.certFolder
self.oxauth_openid_jks_fn = "%s/oxauth-keys.jks" % self.certFolder
self.oxauth_openid_jks_pass = None
self.httpdKeyPass = None
self.httpdKeyFn = '%s/httpd.key' % self.certFolder
self.httpdCertFn = '%s/httpd.crt' % self.certFolder
self.shibJksPass = None
self.shibJksFn = '%s/shibIDP.jks' % self.certFolder
self.asimbaJksPass = None
self.asimbaJksFn = '%s/asimbaIDP.jks' % self.certFolder
self.ldapTrustStoreFn = None
self.encoded_ldapTrustStorePass = None
self.opendj_cert_fn = '%s/opendj.crt' % self.certFolder
self.opendj_p12_fn = '%s/opendj.pkcs12' % self.certFolder
self.opendj_p12_pass = None
self.ldap_type = 'opendj'
self.opendj_ldap_binddn = 'cn=directory manager'
self.ldap_hostname = "localhost"
self.ldap_port = '1389'
self.ldaps_port = '1636'
self.ldap_jmx_port = '1689'
self.ldap_admin_port = '4444'
self.ldapBaseFolder = '/opt/opendj'
self.ldapSetupCommand = '%s/setup' % self.ldapBaseFolder
self.ldapDsconfigCommand = "%s/bin/dsconfig" % self.ldapBaseFolder
self.ldapDsCreateRcCommand = "%s/bin/create-rc-script" % self.ldapBaseFolder
self.ldapDsJavaPropCommand = "%s/bin/dsjavaproperties" % self.ldapBaseFolder
self.ldap_user_home = '/home/ldap'
self.ldapPassFn = '%s/.pw' % self.ldap_user_home
self.ldap_backend_type = 'je'
self.importLdifCommand = '%s/bin/import-ldif' % self.ldapBaseFolder
self.ldapModifyCommand = '%s/bin/ldapmodify' % self.ldapBaseFolder
self.loadLdifCommand = self.ldapModifyCommand
self.gluuScriptFiles = ['%s/static/scripts/logmanager.sh' % self.install_dir,
'%s/static/scripts/testBind.py' % self.install_dir]
self.openDjIndexJson = '%s/static/opendj/index.json' % self.install_dir
self.openDjSchemaFolder = "%s/config/schema" % self.ldapBaseFolder
self.openDjschemaFiles = ["%s/static/opendj/96-eduperson.ldif" % self.install_dir,
"%s/static/opendj/101-ox.ldif" % self.install_dir,
"%s/static/opendj/77-customAttributes.ldif" % self.install_dir]
if os.path.exists(os.path.join(self.install_dir, 'static/opendj/deprecated')):
self.openDjschemaFiles = glob.glob(os.path.join(self.install_dir, 'static/opendj/deprecated/*.ldif'))
self.opendj_init_file = '%s/static/opendj/opendj' % self.install_dir
self.opendj_service_centos7 = '%s/static/opendj/systemd/opendj.service' % self.install_dir
self.redhat_services = ['httpd', 'rsyslog']
self.debian_services = ['apache2', 'rsyslog']
self.apache_start_script = '/etc/init.d/httpd'
self.defaultTrustStoreFN = '%s/jre/lib/security/cacerts' % self.jre_home
self.defaultTrustStorePW = 'changeit'
self.openldapBaseFolder = '/opt/symas'
self.openldapBinFolder = '/opt/symas/bin'
self.openldapConfFolder = '/opt/symas/etc/openldap'
self.openldapRootUser = "cn=directory manager,o=gluu"
self.openldapSiteUser = "cn=directory manager,o=site"
self.openldapKeyPass = None
self.openldapTLSCACert = '%s/openldap.pem' % self.certFolder
self.openldapTLSCert = '%s/openldap.crt' % self.certFolder
self.openldapTLSKey = '%s/openldap.key' % self.certFolder
self.openldapJksPass = None
self.openldapJksFn = '%s/openldap.jks' % self.certFolder
self.openldapP12Fn = '%s/openldap.pkcs12' % self.certFolder
self.passportSpKeyPass = None
self.passportSpTLSCACert = '%s/passport-sp.pem' % self.certFolder
self.passportSpTLSCert = '%s/passport-sp.crt' % self.certFolder
self.passportSpTLSKey = '%s/passport-sp.key' % self.certFolder
self.passportSpJksPass = None
self.passportSpJksFn = '%s/passport-sp.jks' % self.certFolder
self.openldapSlapdConf = '%s/slapd.conf' % self.outputFolder
self.openldapSymasConf = '%s/symas-openldap.conf' % self.outputFolder
self.openldapRootSchemaFolder = "%s/schema" % self.gluuOptFolder
self.openldapSchemaFolder = "%s/openldap" % self.openldapRootSchemaFolder
self.openldapLogDir = "/var/log/openldap/"
self.openldapSyslogConf = "%s/static/openldap/openldap-syslog.conf" % self.install_dir
self.openldapLogrotate = "%s/static/openldap/openldap_logrotate" % self.install_dir
self.openldapSetupAccessLog = False
self.accessLogConfFile = "%s/static/openldap/accesslog.conf" % self.install_dir
self.gluuAccessLogConf = "%s/static/openldap/o_gluu_accesslog.conf" % self.install_dir
self.opendlapIndexDef = "%s/static/openldap/index.json" % self.install_dir
# Stuff that gets rendered; filename is necessary. Full path should
# reflect final path if the file must be copied after its rendered.
self.passport_saml_config = '%s/passport-saml-config.json' % self.configFolder
self.oxauth_config_json = '%s/oxauth-config.json' % self.outputFolder
self.oxtrust_config_json = '%s/oxtrust-config.json' % self.outputFolder
self.oxtrust_cache_refresh_json = '%s/oxtrust-cache-refresh.json' % self.outputFolder
self.oxtrust_import_person_json = '%s/oxtrust-import-person.json' % self.outputFolder
self.oxidp_config_json = '%s/oxidp-config.json' % self.outputFolder
self.oxasimba_config_json = '%s/oxasimba-config.json' % self.outputFolder
self.gluu_python_base = '%s/python' % self.gluuOptFolder
self.gluu_python_readme = '%s/libs/python.txt' % self.gluuOptPythonFolder
self.ox_ldap_properties = '%s/ox-ldap.properties' % self.configFolder
self.oxauth_static_conf_json = '%s/oxauth-static-conf.json' % self.outputFolder
self.oxTrust_log_rotation_configuration = "%s/conf/oxTrustLogRotationConfiguration.xml" % self.gluuBaseFolder
self.apache2_conf = '%s/httpd.conf' % self.outputFolder
self.apache2_ssl_conf = '%s/https_gluu.conf' % self.outputFolder
self.apache2_24_conf = '%s/httpd_2.4.conf' % self.outputFolder
self.apache2_ssl_24_conf = '%s/https_gluu.conf' % self.outputFolder
self.ldif_base = '%s/base.ldif' % self.outputFolder
self.ldif_appliance = '%s/appliance.ldif' % self.outputFolder
self.ldif_attributes = '%s/attributes.ldif' % self.outputFolder
self.ldif_scopes = '%s/scopes.ldif' % self.outputFolder
self.ldif_clients = '%s/clients.ldif' % self.outputFolder
self.ldif_people = '%s/people.ldif' % self.outputFolder
self.ldif_groups = '%s/groups.ldif' % self.outputFolder
self.ldif_site = '%s/static/cache-refresh/o_site.ldif' % self.install_dir
self.ldif_scripts = '%s/scripts.ldif' % self.outputFolder
self.ldif_configuration = '%s/configuration.ldif' % self.outputFolder
self.ldif_scim = '%s/scim.ldif' % self.outputFolder
self.ldif_asimba = '%s/asimba.ldif' % self.outputFolder
self.ldif_passport = '%s/passport.ldif' % self.outputFolder
self.ldif_idp = '%s/oxidp.ldif' % self.outputFolder
self.ldif_scripts_cred_manager = '%s/scripts_cred_manager.ldif' % self.outputFolder
self.passport_config = '%s/passport-config.json' % self.configFolder
self.encode_script = '%s/bin/encode.py' % self.gluuOptFolder
self.network = "/etc/sysconfig/network"
self.system_profile_update = '%s/system_profile' % self.outputFolder
self.asimba_conf_folder = '%s/asimba' % self.configFolder
self.asimba_configuration_xml = '%s/asimba.xml' % self.asimba_conf_folder
self.asimba_configuration = '%s/asimba.xml' % self.outputFolder
self.asimba_selector_configuration = '%s/asimba-selector.xml' % self.outputFolder
self.asimba_properties = '%s/asimba.properties' % self.outputFolder
self.asimba_selector_configuration_xml = '%s/asimba-selector.xml' % self.asimba_conf_folder
self.staticIDP3FolderConf = '%s/static/idp3/conf' % self.install_dir
self.staticIDP3FolderMetadata = '%s/static/idp3/metadata' % self.install_dir
self.idp3_configuration_properties = 'idp.properties'
self.idp3_configuration_ldap_properties = 'ldap.properties'
self.idp3_configuration_saml_nameid = 'saml-nameid.properties'
self.idp3_configuration_services = 'services.properties'
self.idp3_configuration_password_authn = 'authn/password-authn-config.xml'
self.idp3_metadata = 'idp-metadata.xml'
self.cred_manager_config = '%s/cred-manager.json' % self.outputFolder
### rsyslog file customised for init.d
self.rsyslogUbuntuInitFile = "%s/static/system/ubuntu/rsyslog" % self.install_dir
self.ldap_setup_properties = '%s/opendj-setup.properties' % self.templateFolder
# oxAuth/oxTrust Base64 configuration files
self.pairwiseCalculationKey = None
self.pairwiseCalculationSalt = None
# OpenID key generation default setting
self.default_openid_jks_dn_name = 'CN=oxAuth CA Certificates'
self.default_key_algs = 'RS256 RS384 RS512 ES256 ES384 ES512'
self.default_key_expiration = 365
# oxTrust SCIM configuration
self.scim_rs_client_id = None
self.scim_rs_client_jwks = None
self.scim_rs_client_jks_fn = "%s/scim-rs.jks" % self.certFolder
self.scim_rs_client_jks_pass = None
self.scim_rs_client_jks_pass_encoded = None
self.scim_rp_client_id = None
self.scim_rp_client_jwks = None
self.scim_rp_client_jks_fn = "%s/scim-rp.jks" % self.outputFolder
self.scim_rp_client_jks_pass = 'secret'
# oxPassport Configuration
self.gluu_passport_base = '%s/passport' % self.node_base
self.ldif_passport_config = '%s/oxpassport-config.ldif' % self.outputFolder
self.passport_rs_client_id = None
self.passport_rs_client_jwks = None
self.passport_rs_client_jks_fn = "%s/passport-rs.jks" % self.certFolder
self.passport_rs_client_jks_pass = None
self.passport_rs_client_jks_pass_encoded = None
self.passport_rp_client_id = None
self.passport_rp_client_jwks = None
self.passport_rp_client_jks_fn = "%s/passport-rp.jks" % self.certFolder
self.passport_rp_client_cert_alg = "RS512"
self.passport_rp_client_cert_alias = None
self.passport_rp_client_cert_fn = "%s/passport-rp.pem" % self.certFolder
self.passport_rp_client_jks_pass = 'secret'
self.ldif_files = [self.ldif_base,
self.ldif_appliance,
self.ldif_attributes,
self.ldif_scopes,
self.ldif_clients,
self.ldif_people,
self.ldif_groups,
self.ldif_site,
self.ldif_scripts,
self.ldif_configuration,
self.ldif_scim,
self.ldif_asimba,
self.ldif_passport,
self.ldif_passport_config,
self.ldif_idp
]
self.ce_templates = {self.oxauth_config_json: False,
self.passport_saml_config:True,
self.gluu_python_readme: True,
self.oxtrust_config_json: False,
self.oxtrust_cache_refresh_json: False,
self.oxtrust_import_person_json: False,
self.oxidp_config_json: False,
self.oxasimba_config_json: False,
self.ox_ldap_properties: True,
self.oxauth_static_conf_json: False,
self.oxTrust_log_rotation_configuration: True,
self.ldap_setup_properties: False,
self.apache2_conf: False,
self.apache2_ssl_conf: False,
self.apache2_24_conf: False,
self.apache2_ssl_24_conf: False,
self.etc_hosts: False,
self.etc_hostname: False,
self.ldif_base: False,
self.ldif_appliance: False,
self.ldif_attributes: False,
self.ldif_scopes: False,
self.ldif_clients: False,
self.ldif_people: False,
self.ldif_groups: False,
self.ldif_scripts: False,
self.ldif_scim: False,
self.ldif_asimba: False,
self.ldif_passport: False,
self.ldif_passport_config: False,
self.ldif_idp: False,
self.asimba_configuration: False,
self.asimba_properties: False,
self.asimba_selector_configuration: False,
self.network: False,
self.cred_manager_config:False,
self.ldif_scripts_cred_manager: False,
}
self.oxauth_keys_utils_libs = [ 'bcprov-jdk15on-*.jar', 'bcpkix-jdk15on-*.jar', 'commons-lang-*.jar',
'log4j-*.jar', 'commons-codec-*.jar', 'commons-cli-*.jar', 'commons-io-*.jar',
'jackson-core-*.jar', 'jackson-core-asl-*.jar', 'jackson-mapper-asl-*.jar', 'jackson-xc-*.jar',
'jettison-*.jar', 'oxauth-model-*.jar', 'oxauth-client-*.jar' ]
def get_ip(self):
testIP = None
detectedIP = None
try:
testSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
detectedIP = [(testSocket.connect(('8.8.8.8', 80)),
testSocket.getsockname()[0],
testSocket.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]
except:
self.logIt("No detected IP address", True)
self.logIt(traceback.format_exc(), True)
while not testIP:
if detectedIP:
testIP = self.getPrompt("Enter IP Address", detectedIP)
else:
testIP = self.getPrompt("Enter IP Address")
if not self.isIP(testIP):
testIP = None
print 'ERROR: The IP Address is invalid. Try again\n'
return testIP
def check_properties(self):
self.logIt('Checking properties')
while not self.hostname:
testhost = raw_input('Hostname of this server: ').strip()
if len(testhost.split('.')) >= 3:
self.hostname = testhost
else:
print 'The hostname has to be at least three domain components. Try again\n'
while not self.ip:
self.ip = self.get_ip()
while not self.orgName:
self.orgName = raw_input('Organization Name: ').strip()
while not self.countryCode:
testCode = raw_input('2 Character Country Code: ').strip()
if len(testCode) == 2:
self.countryCode = testCode
else:
print 'Country code should only be two characters. Try again\n'
while not self.city:
self.city = raw_input('City: ').strip()
while not self.state:
self.state = raw_input('State or Province: ').strip()
if not self.admin_email:
tld = None
try:
tld = ".".join(self.hostname.split(".")[-2:])
except:
tld = self.hostname
self.admin_email = "support@%s" % tld
if not self.httpdKeyPass:
self.httpdKeyPass = self.getPW()
if not self.ldapPass:
self.ldapPass = self.getPW()
if not self.shibJksPass:
self.shibJksPass = self.getPW()
if not self.oxauth_openid_jks_pass:
self.oxauth_openid_jks_pass = self.getPW()
if not self.asimbaJksPass:
self.asimbaJksPass = self.getPW()
if not self.openldapKeyPass:
self.openldapKeyPass = self.getPW()
if not self.openldapJksPass:
self.openldapJksPass = self.getPW()
if not self.opendj_p12_pass:
self.opendj_p12_pass = self.getPW()
if not self.passportSpKeyPass:
self.passportSpKeyPass = self.getPW()
self.passportSpJksPass = self.getPW()
if not self.encode_salt:
self.encode_salt= self.getPW() + self.getPW()
if not self.baseInum:
self.baseInum = '@!%s.%s.%s.%s' % tuple([self.getQuad() for i in xrange(4)])
if not self.inumOrg:
orgTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.inumOrg = '%s!0001!%s' % (self.baseInum, orgTwoQuads)
if not self.inumAppliance:
applianceTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.inumAppliance = '%s!0002!%s' % (self.baseInum, applianceTwoQuads)
if not self.oxauth_client_id:
clientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.oxauth_client_id = '%s!0008!%s' % (self.inumOrg, clientTwoQuads)
if not self.scim_rs_client_id:
scimClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.scim_rs_client_id = '%s!0008!%s' % (self.inumOrg, scimClientTwoQuads)
if not self.scim_rp_client_id:
scimClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.scim_rp_client_id = '%s!0008!%s' % (self.inumOrg, scimClientTwoQuads)
if not self.passport_rs_client_id:
passportClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.passport_rs_client_id = '%s!0008!%s' % (self.inumOrg, passportClientTwoQuads)
if not self.passport_rp_client_id:
passportClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.passport_rp_client_id = '%s!0008!%s' % (self.inumOrg, passportClientTwoQuads)
if not self.inumApplianceFN:
self.inumApplianceFN = self.inumAppliance.replace('@', '').replace('!', '').replace('.', '')
if not self.inumOrgFN:
self.inumOrgFN = self.inumOrg.replace('@', '').replace('!', '').replace('.', '')
if not self.application_max_ram:
self.application_max_ram = 3072
def choose_from_list(self, list_of_choices, choice_name="item", default_choice_index=0):
return_value = None
choice_map = {}
chosen_index = 0
print "\nSelect the number for the %s from the following list:" % choice_name
for choice in list_of_choices:
choice_map[chosen_index] = choice
chosen_index += 1
print " [%i] %s" % (chosen_index, choice)
while not return_value:
choice_number = self.getPrompt("Please select a number listed above", str(default_choice_index + 1))
try:
choice_number = int(choice_number) - 1
if (choice_number >= 0) & (choice_number < len(list_of_choices)):
return_value = choice_map[choice_number]
else:
print '"%i" is not a valid choice' % (choice_number + 1)
except:
print 'Cannot convert "%s" to a number' % choice_number
self.logIt(traceback.format_exc(), True)
return return_value
# = File system =================================================================
def findFiles(self, filePatterns, filesFolder):
foundFiles = []
try:
for filePattern in filePatterns:
fileFullPathPattern = "%s/%s" % (filesFolder, filePattern)
for fileFullPath in glob.iglob(fileFullPathPattern):
foundFiles.append(fileFullPath)
except:
self.logIt("Error finding files %s in folder %s" % (":".join(filePatterns), filesFolder), True)
self.logIt(traceback.format_exc(), True)
return foundFiles
def readFile(self, inFilePath):
inFilePathText = None
try:
f = open(inFilePath)
inFilePathText = f.read()
f.close
except:
self.logIt("Error reading %s" % inFilePathText, True)
self.logIt(traceback.format_exc(), True)
return inFilePathText
def writeFile(self, outFilePath, text):
inFilePathText = None
try:
f = open(outFilePath, 'w')
f.write(text)
f.close()
except:
self.logIt("Error writing %s" % inFilePathText, True)
self.logIt(traceback.format_exc(), True)
return inFilePathText
def commentOutText(self, text):
textLines = text.split('\n')
lines = []
for textLine in textLines:
lines.append('#%s' % textLine)
return "\n".join(lines)
def replaceInText(self, text, pattern, update):
rePattern = re.compile(pattern, flags=re.DOTALL | re.M)
return rePattern.sub(update, text)
def applyChangesInFiles(self, changes):
self.logIt("Applying changes to %s files..." % changes['name'])
for change in changes['files']:
file = change['path']
text = self.readFile(file)
file_backup = '%s.bak' % file
self.writeFile(file_backup, text)
self.logIt("Created backup of %s file %s..." % (changes['name'], file_backup))
for replace in change['replace']:
text = self.replaceInText(text, replace['pattern'], replace['update'])
self.writeFile(file, text)
self.logIt("Wrote updated %s file %s..." % (changes['name'], file))
def copyFile(self, inFile, destFolder):
try:
shutil.copy(inFile, destFolder)
self.logIt("Copied %s to %s" % (inFile, destFolder))
except:
self.logIt("Error copying %s to %s" % (inFile, destFolder), True)
self.logIt(traceback.format_exc(), True)
def copyTree(self, src, dst, overwrite=False):
try:
if not os.path.exists(dst):
os.makedirs(dst)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
self.copyTree(s, d, overwrite)
else:
if overwrite and os.path.exists(d):
self.removeFile(d)
if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1:
shutil.copy2(s, d)
self.logIt("Copied tree %s to %s" % (src, dst))
except:
self.logIt("Error copying tree %s to %s" % (src, dst), True)
self.logIt(traceback.format_exc(), True)
def createDirs(self, name):
try:
if not os.path.exists(name):
os.makedirs(name, 0700)
self.logIt('Created dir: %s' % name)
except:
self.logIt("Error making directory %s" % name, True)
self.logIt(traceback.format_exc(), True)
def removeDirs(self, name):
try:
if os.path.exists(name):
shutil.rmtree(name)
self.logIt('Removed dir: %s' % name)
except:
self.logIt("Error removing directory %s" % name, True)
self.logIt(traceback.format_exc(), True)
def removeFile(self, fileName):
try:
if os.path.exists(fileName):
os.remove(fileName)
self.logIt('Removed file: %s' % fileName)
except:
self.logIt("Error removing file %s" % fileName, True)
self.logIt(traceback.format_exc(), True)
# = Utilities ====================================================================
def logIt(self, msg, errorLog=False):
if errorLog:
f = open(self.logError, 'a')
f.write('%s %s\n' % (time.strftime('%X %x'), msg))
f.close()
f = open(self.log, 'a')
f.write('%s %s\n' % (time.strftime('%X %x'), msg))
f.close()
def appendLine(self, line, fileName=False):
try:
f = open(fileName, 'a')
f.write('%s\n' % line)
f.close()
except:
self.logIt("Error loading file %s" % fileName)
def set_ulimits(self):
try:
if self.os_type in ['centos', 'redhat', 'fedora']:
apache_user = 'apache'
else:
apache_user = 'www-data'
self.appendLine("ldap soft nofile 131072", "/etc/security/limits.conf")
self.appendLine("ldap hard nofile 262144", "/etc/security/limits.conf")
self.appendLine("%s soft nofile 131072" % apache_user, "/etc/security/limits.conf")
self.appendLine("%s hard nofile 262144" % apache_user, "/etc/security/limits.conf")
self.appendLine("jetty soft nofile 131072", "/etc/security/limits.conf")
self.appendLine("jetty hard nofile 262144", "/etc/security/limits.conf")
except:
self.logIt("Could not set limits.")
self.logIt(traceback.format_exc(), True)
def load_properties(self, fn):
self.logIt('Loading Properties %s' % fn)
p = Properties.Properties()
try:
p.load(open(fn))
properties_list = p.keys()
for prop in properties_list:
try:
self.__dict__[prop] = p[prop]
if p[prop] == 'True':
self.__dict__[prop] = True
elif p[prop] == 'False':
self.__dict__[prop] = False
except:
self.logIt("Error loading property %s" % prop)
self.logIt(traceback.format_exc(), True)
except:
self.logIt("Error loading properties", True)
self.logIt(traceback.format_exc(), True)
def load_json(self, fn):
self.logIt('Loading JSON from %s' % fn)
try:
json_file = open(fn)
json_text = json_file.read()
json_file.close()
return json.loads(json_text)
except:
self.logIt("Unable to read or parse json file from %s" % fn, True)
self.logIt(traceback.format_exc(), True)
return None
def obscure(self, data=""):
engine = triple_des(self.encode_salt, ECB, pad=None, padmode=PAD_PKCS5)
data = data.encode('ascii')
en_data = engine.encrypt(data)
return base64.b64encode(en_data)
def copy_output(self):
self.logIt("Copying rendered templates to final destination")
for dest_fn in self.ce_templates.keys():
if self.ce_templates[dest_fn]:
fn = os.path.split(dest_fn)[-1]
output_fn = os.path.join(self.outputFolder, fn)
try:
self.logIt("Copying %s to %s" % (output_fn, dest_fn))
dest_dir = os.path.dirname(dest_fn)
if not os.path.exists(dest_dir):
self.logIt("Created destination folder %s" % dest_dir)
os.makedirs(dest_dir)
shutil.copyfile(output_fn, dest_fn)
except:
self.logIt("Error writing %s to %s" % (output_fn, dest_fn), True)
self.logIt(traceback.format_exc(), True)
def detect_os_type(self):
# TODO: Change this to support more distros. For example according to
# http://unix.stackexchange.com/questions/6345/how-can-i-get-distribution-name-and-version-number-in-a-simple-shell-script
distro_info = self.readFile('/etc/redhat-release')
if distro_info == None:
distro_info = self.readFile('/etc/os-release')
if 'CentOS' in distro_info:
return self.os_types[0]
elif 'Red Hat' in distro_info:
return self.os_types[1]
elif 'Ubuntu' in distro_info:
return self.os_types[3]
elif 'Debian' in distro_info:
return self.os_types[4]
else:
return self.choose_from_list(self.os_types, "Operating System")
def detect_initd(self):
return open(os.path.join('/proc/1/status'), 'r').read().split()[1]
def determineOpenDJVersion(self):
f = open('/opt/opendj/template/config/buildinfo', 'r')
encode_script = f.read().split()[0]
f.close()
if re.match(r'2\.6\.0\..*', encode_script):
return "2.6"
return "3.0"
def getPW(self, size=12, chars=string.ascii_uppercase + string.digits + string.lowercase):
return ''.join(random.choice(chars) for _ in range(size))
def ldap_encode(self, password):
salt = os.urandom(4)
sha = hashlib.sha1(password)
sha.update(salt)
b64encoded = '{0}{1}'.format(sha.digest(), salt).encode('base64').strip()
encrypted_password = '{{SSHA}}{0}'.format(b64encoded)
return encrypted_password
def encode_passwords(self):
self.logIt("Encoding passwords")
try:
self.encoded_ldap_pw = self.ldap_encode(self.ldapPass)
self.encoded_shib_jks_pw = self.obscure(self.shibJksPass)
self.encoded_ox_ldap_pw = self.obscure(self.ldapPass)
self.encoded_openldapJksPass = self.obscure(self.openldapJksPass)
self.encoded_opendj_p12_pass = self.obscure(self.opendj_p12_pass)
self.oxauthClient_pw = self.getPW()
self.oxauthClient_encoded_pw = self.obscure(self.oxauthClient_pw)
except:
self.logIt("Error encoding passwords", True)
self.logIt(traceback.format_exc(), True)
def encode_test_passwords(self):
self.logIt("Encoding test passwords")
try:
self.templateRenderingDict['oxauthClient_2_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_2_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_2_pw'])
self.templateRenderingDict['oxauthClient_3_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_3_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_3_pw'])
self.templateRenderingDict['oxauthClient_4_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_4_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_4_pw'])
except:
self.logIt("Error encoding test passwords", True)
self.logIt(traceback.format_exc(), True)
def fomatWithDict(self, text, dictionary):
text = re.sub(r"%([^\(])", r"%%\1", text)
text = re.sub(r"%$", r"%%", text) # There was a % at the end?
return text % dictionary
def renderTemplateInOut(self, filePath, templateFolder, outputFolder):
self.logIt("Rendering template %s" % filePath)
fn = os.path.split(filePath)[-1]
f = open(os.path.join(templateFolder, fn))
template_text = f.read()
f.close()
# Create output folder if needed
if not os.path.exists(outputFolder):
os.makedirs(outputFolder)
newFn = open(os.path.join(outputFolder, fn), 'w+')
newFn.write(self.fomatWithDict(template_text, self.merge_dicts(self.__dict__, self.templateRenderingDict)))
newFn.close()
def renderTemplate(self, filePath):
self.renderTemplateInOut(filePath, self.templateFolder, self.outputFolder)
def render_templates(self):
self.logIt("Rendering templates")
for fullPath in self.ce_templates.keys():
try:
self.renderTemplate(fullPath)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_custom_templates(self, fullPath):
output_dir = fullPath + '.output'
self.logIt("Rendering custom templates")
self.logIt("Rendering custom templates from %s to %s" % (fullPath, output_dir))
try:
self.run([self.cmd_mkdir, '-p', output_dir])
except:
self.logIt("Error creating output directory %s" % output_dir, True)
self.logIt(traceback.format_exc(), True)
try:
for filename in self.get_filepaths(fullPath):
self.renderTemplateInOut(filename, fullPath, output_dir)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_configuration_template(self):
self.logIt("Rendering configuration templates")
fullPath = self.ldif_configuration
try:
self.renderTemplate(fullPath)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_templates_folder(self, templatesFolder):
self.logIt("Rendering templates folder: %s" % templatesFolder)
for templateBase, templateDirectories, templateFiles in os.walk(templatesFolder):
for templateFile in templateFiles:
fullPath = '%s/%s' % (templateBase, templateFile)
try:
self.logIt("Rendering test template %s" % fullPath)
# Remove ./template/ and everything left of it from fullPath
fn = re.match(r'(^.+/templates/)(.*$)', fullPath).groups()[1]
f = open(os.path.join(self.templateFolder, fn))
template_text = f.read()
f.close()
fullOutputFile = os.path.join(self.outputFolder, fn)
# Create full path to the output file
fullOutputDir = os.path.dirname(fullOutputFile)
if not os.path.exists(fullOutputDir):
os.makedirs(fullOutputDir)
newFn = open(fullOutputFile, 'w+')
newFn.write(template_text % self.merge_dicts(self.__dict__, self.templateRenderingDict))
newFn.close()
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_test_templates(self):
self.logIt("Rendering test templates")
testTepmplatesFolder = '%s/test/' % self.templateFolder
self.render_templates_folder(testTepmplatesFolder)
# args = command + args, i.e. ['ls', '-ltr']
def run(self, args, cwd=None, env=None, useWait=False, shell=False):
print "Run:", args
self.logIt('Running: %s' % ' '.join(args))
try:
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env, shell=shell)
if useWait:
code = p.wait()
self.logIt('Run: %s with result code: %d' % (' '.join(args), code) )
else:
output, err = p.communicate()
if output:
self.logIt(output)
if err:
self.logIt(err, True)
except:
self.logIt("Error running command : %s" % " ".join(args), True)
self.logIt(traceback.format_exc(), True)
def createLdapPw(self):
f = open(self.ldapPassFn, 'w')
f.write(self.ldapPass)
f.close()
self.run([self.cmd_chown, 'ldap:ldap', self.ldapPassFn])
def deleteLdapPw(self):
if os.path.exists(self.ldapPassFn):
os.remove(self.ldapPassFn)
if os.path.exists(os.path.join(self.ldapBaseFolder, 'opendj-setup.properties')):
os.remove(os.path.join(self.ldapBaseFolder, 'opendj-setup.properties'))
def configure_opendj(self):
self.logIt("Configuring OpenDJ")
opendj_prop_name = 'global-aci:\'(targetattr!="userPassword||authPassword||debugsearchindex||changes||changeNumber||changeType||changeTime||targetDN||newRDN||newSuperior||deleteOldRDN")(version 3.0; acl "Anonymous read access"; allow (read,search,compare) userdn="ldap:///anyone";)\''
config_changes = [['set-global-configuration-prop', '--set', 'single-structural-objectclass-behavior:accept'],
['set-attribute-syntax-prop', '--syntax-name', '"Directory String"', '--set', 'allow-zero-length-values:true'],
['set-password-policy-prop', '--policy-name', '"Default Password Policy"', '--set', 'allow-pre-encoded-passwords:true'],
['set-log-publisher-prop', '--publisher-name', '"File-Based Audit Logger"', '--set', 'enabled:true'],
['create-backend', '--backend-name', 'site', '--set', 'base-dn:o=site', '--type %s' % self.ldap_backend_type, '--set', 'enabled:true'],
['set-connection-handler-prop', '--handler-name', '"LDAP Connection Handler"', '--set', 'enabled:false'],
['set-access-control-handler-prop', '--remove', '%s' % opendj_prop_name],
['set-global-configuration-prop', '--set', 'reject-unauthenticated-requests:true'],
['set-password-policy-prop', '--policy-name', '"Default Password Policy"', '--set', 'default-password-storage-scheme:"Salted SHA-512"'],
['set-global-configuration-prop', '--set', 'reject-unauthenticated-requests:true']
]
for changes in config_changes:
dsconfigCmd = " ".join(['cd %s/bin ; ' % self.ldapBaseFolder,
self.ldapDsconfigCommand,
'--trustAll',
'--no-prompt',
'--hostname',
self.ldap_hostname,
'--port',
self.ldap_admin_port,
'--bindDN',
'"%s"' % self.ldap_binddn,
'--bindPasswordFile',
self.ldapPassFn] + changes)
self.run(['/bin/su',
'ldap',
'-c',
dsconfigCmd])
def export_opendj_public_cert(self):
# Load password to acces OpenDJ truststore
self.logIt("Reading OpenDJ truststore")
openDjPinFn = '%s/config/keystore.pin' % self.ldapBaseFolder
openDjTruststoreFn = '%s/config/truststore' % self.ldapBaseFolder
openDjPin = None
try:
f = open(openDjPinFn)
openDjPin = f.read().splitlines()[0]
f.close()
except:
self.logIt("Error reding OpenDJ truststore", True)
self.logIt(traceback.format_exc(), True)
# Export public OpenDJ certificate
self.logIt("Exporting OpenDJ certificate")
self.run([self.cmd_keytool,
'-exportcert',
'-keystore',
openDjTruststoreFn,
'-storepass',
openDjPin,
'-file',
self.opendj_cert_fn,
'-alias',
'server-cert',
'-rfc'])
# Convert OpenDJ certificate to PKCS12
self.logIt("Converting OpenDJ truststore")
self.run([self.cmd_keytool,
'-importkeystore',
'-srckeystore',
openDjTruststoreFn,
'-srcstoretype',
'jks',
'-srcstorepass',
openDjPin,
'-destkeystore',
self.opendj_p12_fn,
'-deststoretype',
'pkcs12',
'-deststorepass',
self.opendj_p12_pass,
'-srcalias',
'server-cert'])
# Import OpenDJ certificate into java truststore
self.logIt("Import OpenDJ certificate")
self.run([self.cmd_keytool, "-import", "-trustcacerts", "-alias", "%s_opendj" % self.hostname, \
"-file", self.opendj_cert_fn, "-keystore", self.defaultTrustStoreFN, \
"-storepass", "changeit", "-noprompt"])
def index_opendj_backend(self, backend):
index_command = 'create-backend-index'
try:
self.logIt("Running LDAP index creation commands for " + backend + " backend")
# This json file contains a mapping of the required indexes.
# [ { "attribute": "inum", "type": "string", "index": ["equality"] }, ...}
index_json = self.load_json(self.openDjIndexJson)
if index_json:
for attrDict in index_json:
attr_name = attrDict['attribute']
index_types = attrDict['index']
for index_type in index_types:
backend_names = attrDict['backend']
for backend_name in backend_names:
if (backend_name == backend):
self.logIt("Creating %s index for attribute %s" % (index_type, attr_name))
indexCmd = " ".join(['cd %s/bin ; ' % self.ldapBaseFolder,
self.ldapDsconfigCommand,
index_command,
'--backend-name',
backend,
'--type',
'generic',
'--index-name',
attr_name,
'--set',
'index-type:%s' % index_type,
'--set',
'index-entry-limit:4000',
'--hostName',
self.ldap_hostname,
'--port',
self.ldap_admin_port,
'--bindDN',
'"%s"' % self.ldap_binddn,
'-j', self.ldapPassFn,
'--trustAll',
'--noPropertiesFile',
'--no-prompt'])
self.run(['/bin/su',
'ldap',
'-c',
indexCmd])
else:
self.logIt('NO indexes found %s' % self.indexJson, True)
except:
self.logIt("Error occured during backend " + backend + " LDAP indexing", True)
self.logIt(traceback.format_exc(), True)
def index_opendj(self):
self.index_opendj_backend('userRoot')
self.index_opendj_backend('site')
def prepare_opendj_schema(self):
self.logIt("Copying OpenDJ schema")
for schemaFile in self.openDjschemaFiles:
self.copyFile(schemaFile, self.openDjSchemaFolder)
self.run([self.cmd_chmod, '-R', 'a+rX', self.ldapBaseFolder])
self.run([self.cmd_chown, '-R', 'ldap:ldap', self.ldapBaseFolder])
def setup_opendj_service(self):
service_path = self.detect_service_path()
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
opendj_script_name = os.path.split(self.opendj_service_centos7)[-1]
opendj_dest_folder = "/etc/systemd/system"
try:
self.copyFile(self.opendj_service_centos7, opendj_dest_folder)
self.run([service_path, 'daemon-reload'])
self.run([service_path, 'enable', 'opendj.service'])
self.run([service_path, 'start', 'opendj.service'])
except:
self.logIt("Error copying script file %s to %s" % (opendj_script_name, opendj_dest_folder))
self.logIt(traceback.format_exc(), True)
else:
self.run([self.ldapDsCreateRcCommand, "--outputFile", "/etc/init.d/opendj", "--userName", "ldap"])
if self.os_type in ['centos', 'fedora', 'redhat']:
self.run(["/sbin/chkconfig", 'opendj', "on"])
self.run([service_path, 'opendj', 'start'])
elif self.os_type in ['ubuntu', 'debian']:
self.run(["/usr/sbin/update-rc.d", 'opendj', 'start', '40', '3', "."])
self.run(["/usr/sbin/update-rc.d", 'opendj', 'enable'])
self.run([service_path, 'opendj', 'start'])
def downloadAndExtractOpenDJ(self):
openDJArchive = os.path.join(self.distFolder, 'app/opendj-server-3.0.1.gluu.zip')
self.run(['mv', '/opt/opendj', '/opt/opendj.back_'+migration_time])
self.logIt("Downloading opendj Server")
self.run(['wget', 'https://ox.gluu.org/maven/org/forgerock/opendj/opendj-server-legacy/3.0.1.gluu/opendj-server-legacy-3.0.1.gluu.zip', '-O', openDJArchive])
self.logIt("Unzipping %s in /opt/" % openDJArchive)
self.run(['unzip', '-n', '-q', openDJArchive, '-d', '/opt/' ])
realLdapBaseFolder = os.path.realpath(self.ldapBaseFolder)
self.run([self.cmd_chown, '-R', 'ldap:ldap', realLdapBaseFolder])
def install_opendj(self):
self.logIt("Running OpenDJ Setup")
# Copy opendj-setup.properties so user ldap can find it in /opt/opendj
setupPropsFN = os.path.join(self.ldapBaseFolder, 'opendj-setup.properties')
shutil.copy("%s/opendj-setup.properties" % self.outputFolder, setupPropsFN)
ldapSetupCommand = '%s/setup' % self.ldapBaseFolder
setupCmd = "cd /opt/opendj ; export OPENDJ_JAVA_HOME=" + self.jre_home + " ; " + " ".join([ldapSetupCommand,
'--no-prompt',
'--cli',
'--propertiesFilePath',
setupPropsFN,
'--acceptLicense'
])
self.run(['/bin/su',
'ldap',
'-c',
setupCmd])
dsjavaCmd = "cd /opt/opendj/bin ; %s" % self.ldapDsJavaPropCommand
self.run(['/bin/su',
'ldap',
'-c',
dsjavaCmd
])
stopDsJavaPropCommand = "%s/bin/stop-ds" % self.ldapBaseFolder
dsjavaCmd = "cd /opt/opendj/bin ; %s" % stopDsJavaPropCommand
self.run(['/bin/su',
'ldap',
'-c',
dsjavaCmd
])
def detect_service_path(self):
service_path = '/sbin/service'
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
service_path = '/usr/bin/systemctl'
elif self.os_type in ['debian', 'ubuntu']:
service_path = '/usr/sbin/service'
return service_path
def run_service_command(self, service, operation):
service_path = self.detect_service_path()
try:
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
self.run([service_path, operation, service], None, None, True)
else:
self.run([service_path, service, operation], None, None, True)
except:
self.logIt("Error starting Jetty service '%s'" % operation)
self.logIt(traceback.format_exc(), True)
def install_ldap_server(self):
self.logIt("Running OpenDJ Setup")
self.opendj_version = self.determineOpenDJVersion()
self.createLdapPw()
if self.ldap_type == 'opendj':
self.setup_opendj_service()
self.prepare_opendj_schema()
self.run_service_command('opendj', 'stop')
self.run_service_command('opendj', 'start')
self.configure_opendj()
self.export_opendj_public_cert()
self.index_opendj()
self.deleteLdapPw()
def merge_dicts(self, *dict_args):
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def load_properties(self, fn):
self.logIt('Loading Properties %s' % fn)
p = Properties.Properties()
try:
p.load(open(fn))
properties_list = p.keys()
for prop in properties_list:
try:
self.__dict__[prop] = p[prop]
if p[prop] == 'True':
self.__dict__[prop] = True
elif p[prop] == 'False':
self.__dict__[prop] = False
except:
self.logIt("Error loading property %s" % prop)
self.logIt(traceback.format_exc(), True)
except:
self.logIt("Error loading properties", True)
self.logIt(traceback.format_exc(), True)
def get_missing_files(self):
if os.path.exists(os.path.join(self.install_dir, 'static/opendj/deprecated')):
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/master/schema/generator.py', '-O', './schema/generator.py'])
cmd_l = ['python ./schema/manager.py generate --type opendj --filename ./schema/gluu_schema.json > ./static/opendj/deprecated/101-ox.ldif',
'python ./schema/manager.py generate --type opendj --filename ./schema/custom_schema.json > ./static/opendj/deprecated/77-customAttributes.ldif']
for cmd in cmd_l:
self.logIt('Running: ' + cmd)
os.system(cmd)
self.openDjschemaFiles = glob.glob(os.path.join(self.install_dir, 'static/opendj/deprecated/*.ldif'))
if not os.path.exists(self.openDjIndexJson):
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/version_3.1.2/static/opendj/index.json', '-O', self.openDjIndexJson])
if not os.path.exists(self.opendj_service_centos7):
os.system('mkdir %s/static/opendj/systemd' % self.install_dir)
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/version_3.1.2/static/opendj/systemd/opendj.service', '-O', self.opendj_service_centos7])
def fix_shib_idp(self):
shib_ldap_prop_fn = '/opt/shibboleth-idp/conf/ldap.properties'
if os.path_exists(shib_ldap_prop_fn):
pass
if __name__ == '__main__':
setupOptions = {
'install_dir': '.',
'setup_properties': None,
'noPrompt': False,
'downloadWars': False,
'installOxAuth': True,
'installOxTrust': True,
'installLDAP': True,
'installHTTPD': True,
'installSaml': False,
'installAsimba': False,
'installOxAuthRP': False,
'installPassport': False,
'allowPreReleasedApplications': False,
'allowDeprecatedApplications': False,
'installJce': False
}
installObject = Setup(setupOptions['install_dir'])
installObject.load_properties(installObject.setup_properties_fn)
installObject.check_properties()
installObject.ldapPass = get_ldap_bind_pw()
installObject.ldap_binddn='cn=Directory Manager'
installObject.ldap_type = 'opendj'
installObject.encode_passwords()
# Get the OS type
installObject.os_type = installObject.detect_os_type()
# Get the init type
installObject.os_initdaemon = installObject.detect_initd()
if len(sys.argv) > 1:
if sys.argv[1] == '-p':
post_ldap_update(installObject.ldap_binddn, installObject.ldapPass)
sys.exit("Completed")
else:
sys.exit("Unrecognized argument")
installObject.get_missing_files()
installObject.createLdapPw()
installObject.downloadAndExtractOpenDJ()
installObject.install_opendj()
installObject.install_ldap_server()
update_ox_ldap_prop(installObject.ldap_binddn, installObject.opendj_p12_fn, installObject.encoded_opendj_p12_pass)
installObject.fix_shib_idp()
print "\n", "-"*50,"\n"
print "OpenDJ installation finished. Please import your data as described in document,"
print "and re-run this script with -p argument."
fix shib ldap.properties in openldap2opendj
#!/usr/bin/python
# The MIT License (MIT)
#
# Copyright (c) 2014 Gluu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os.path
import Properties
import random
import shutil
import socket
import string
import time
import uuid
import json
import traceback
import subprocess
import sys
import getopt
import hashlib
import re
import glob
import base64
import ldap
from ldap.controls import SimplePagedResultsControl
import datetime
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
ldap.set_option(ldap.OPT_REFERRALS, 0)
from pyDes import *
migration_time = time.ctime().replace(' ','_')
ox_ldap_prop_fn = '/etc/gluu/conf/ox-ldap.properties'
def get_ldap_bind_pw():
prop = open(ox_ldap_prop_fn)
for l in prop:
ls = l.strip()
if ls.startswith('bindPassword'):
n = ls.find(':')
encpw = ls[n+1:].strip()
clrpw = os.popen('python /opt/gluu/bin/encode.py -D ' + encpw).read()
return clrpw.strip()
def update_ox_ldap_prop(bindDN, trustStoreFile, trustStorePin):
prop = open(ox_ldap_prop_fn).readlines()
for i, l in enumerate(prop):
ls = l.strip()
if ls.startswith('bindDN'):
prop[i] = 'bindDN: {0}\n'.format(bindDN)
elif ls.startswith('ssl.trustStoreFile'):
prop[i] = 'ssl.trustStoreFile: {0}\n'.format(trustStoreFile)
elif ls.startswith('ssl.trustStorePin'):
prop[i] = 'ssl.trustStorePin: {0}\n'.format(trustStorePin)
cmd = 'cp {0} {0}.back_{1}'.format(ox_ldap_prop_fn, migration_time)
os.system(cmd)
with open(ox_ldap_prop_fn, 'w') as w:
w.write(''.join(prop))
def post_ldap_update(ldap_bind_dn, ldap_bind_pw):
conn = ldap.initialize('ldaps://localhost:1636')
conn.protocol_version = 3
conn.simple_bind_s(ldap_bind_dn, ldap_bind_pw)
result = conn.search_s('ou=appliances,o=gluu',ldap.SCOPE_SUBTREE,'(oxIDPAuthentication=*)',['oxIDPAuthentication'])
dn = result[0][0]
oxIDPAuthentication = json.loads(result[0][1]['oxIDPAuthentication'][0])
config = json.loads(oxIDPAuthentication['config'])
if config['servers'][0]=='localhost:1636' and config['bindDN'].lower()=='cn=directory manager,o=gluu':
config['bindDN'] = 'cn=Directory Manager'
oxIDPAuthentication['config'] = json.dumps(config)
oxIDPAuthentication = json.dumps(oxIDPAuthentication, indent=2)
conn.modify_s(dn, [( ldap.MOD_REPLACE, 'oxIDPAuthentication', oxIDPAuthentication)])
result = conn.search_s('ou=appliances,o=gluu',ldap.SCOPE_SUBTREE,'(oxTrustConfCacheRefresh=*)',['oxTrustConfCacheRefresh'])
dn = result[0][0]
oxTrustConfCacheRefresh = json.loads(result[0][1]['oxTrustConfCacheRefresh'][0])
oxTrustConfCacheRefresh['inumConfig']['bindDN'] = 'cn=Directory Manager'
oxTrustConfCacheRefresh = json.dumps(oxTrustConfCacheRefresh, indent=2)
conn.modify_s(dn, [( ldap.MOD_REPLACE, 'oxTrustConfCacheRefresh', oxTrustConfCacheRefresh)])
class Setup(object):
def __init__(self, install_dir=None):
self.install_dir = install_dir
self.oxVersion = '3.1.2.Final'
self.githubBranchName = 'version_3.1.2'
# Used only if -w (get wars) options is given to setup.py
self.oxauth_war = 'https://ox.gluu.org/maven/org/xdi/oxauth-server/%s/oxauth-server-%s.war' % (self.oxVersion, self.oxVersion)
self.oxauth_rp_war = 'https://ox.gluu.org/maven/org/xdi/oxauth-rp/%s/oxauth-rp-%s.war' % (self.oxVersion, self.oxVersion)
self.oxtrust_war = 'https://ox.gluu.org/maven/org/xdi/oxtrust-server/%s/oxtrust-server-%s.war' % (self.oxVersion, self.oxVersion)
self.idp3_war = 'http://ox.gluu.org/maven/org/xdi/oxshibbolethIdp/%s/oxshibbolethIdp-%s.war' % (self.oxVersion, self.oxVersion)
self.idp3_dist_jar = 'http://ox.gluu.org/maven/org/xdi/oxShibbolethStatic/%s/oxShibbolethStatic-%s.jar' % (self.oxVersion, self.oxVersion)
self.idp3_cml_keygenerator = 'http://ox.gluu.org/maven/org/xdi/oxShibbolethKeyGenerator/%s/oxShibbolethKeyGenerator-%s.jar' % (self.oxVersion, self.oxVersion)
self.asimba_war = 'http://ox.gluu.org/maven/org/asimba/asimba-wa/%s/asimba-wa-%s.war' % (self.oxVersion, self.oxVersion)
self.cred_manager_war = 'http://ox.gluu.org/maven/org/xdi/cred-manager/%s/cred-manager-%s.war' % (self.oxVersion, self.oxVersion)
self.ce_setup_zip = 'https://github.com/GluuFederation/community-edition-setup/archive/%s.zip' % self.githubBranchName
self.java_1_8_jce_zip = 'http://download.oracle.com/otn-pub/java/jce/8/jce_policy-8.zip'
self.downloadWars = None
self.templateRenderingDict = {}
# OS commands
self.cmd_ln = '/bin/ln'
self.cmd_chmod = '/bin/chmod'
self.cmd_chown = '/bin/chown'
self.cmd_chgrp = '/bin/chgrp'
self.cmd_mkdir = '/bin/mkdir'
self.cmd_rpm = '/bin/rpm'
self.cmd_dpkg = '/usr/bin/dpkg'
self.opensslCommand = '/usr/bin/openssl'
self.sysemProfile = "/etc/profile"
# java commands
self.jre_home = '/opt/jre'
self.cmd_java = '%s/bin/java' % self.jre_home
self.cmd_keytool = '%s/bin/keytool' % self.jre_home
self.cmd_jar = '%s/bin/jar' % self.jre_home
# Component versions
self.jre_version = '112'
self.jetty_version = '9.3.15.v20161220'
self.jython_version = '2.7.0'
self.node_version = '6.9.1'
self.apache_version = None
self.opendj_version = None
# Gluu components installation status
self.installOxAuth = True
self.installOxTrust = True
self.installLdap = True
self.installHttpd = True
self.installSaml = False
self.installAsimba = False
self.installOxAuthRP = False
self.installPassport = False
self.allowPreReleasedApplications = False
self.allowDeprecatedApplications = False
self.jreDestinationPath = '/opt/jdk1.8.0_%s' % self.jre_version
self.os_types = ['centos', 'redhat', 'fedora', 'ubuntu', 'debian']
self.os_type = None
self.os_initdaemon = None
self.shibboleth_version = 'v3'
self.distFolder = '/opt/dist'
self.distAppFolder = '%s/app' % self.distFolder
self.distGluuFolder = '%s/gluu' % self.distFolder
self.distTmpFolder = '%s/tmp' % self.distFolder
self.setup_properties_fn = '%s/setup.properties' % self.install_dir
self.log = '%s/setup.log' % self.install_dir
self.logError = '%s/setup_error.log' % self.install_dir
self.savedProperties = '%s/setup.properties.last' % self.install_dir
self.gluuOptFolder = '/opt/gluu'
self.gluuOptBinFolder = '%s/bin' % self.gluuOptFolder
self.gluuOptSystemFolder = '%s/system' % self.gluuOptFolder
self.gluuOptPythonFolder = '%s/python' % self.gluuOptFolder
self.gluuBaseFolder = '/etc/gluu'
self.configFolder = '%s/conf' % self.gluuBaseFolder
self.certFolder = '/etc/certs'
self.oxBaseDataFolder = "/var/ox"
self.oxPhotosFolder = "/var/ox/photos"
self.oxTrustRemovedFolder = "/var/ox/identity/removed"
self.oxTrustCacheRefreshFolder = "/var/ox/identity/cr-snapshots"
self.etc_hosts = '/etc/hosts'
self.etc_hostname = '/etc/hostname'
# OS /etc/default folder
self.osDefault = '/etc/default'
self.jython_home = '/opt/jython'
self.node_home = '/opt/node'
self.node_initd_script = '%s/static/system/initd/node' % self.install_dir
self.node_base = '%s/node' % self.gluuOptFolder
self.node_user_home = '/home/node'
self.jetty_dist = '/opt/jetty-9.3'
self.jetty_home = '/opt/jetty'
self.jetty_base = '%s/jetty' % self.gluuOptFolder
self.jetty_user_home = '/home/jetty'
self.jetty_user_home_lib = '%s/lib' % self.jetty_user_home
self.jetty_app_configuration = {
'oxauth' : {'name' : 'oxauth',
'jetty' : {'modules' : 'deploy,http,logging,jsp,servlets,ext,http-forwarded,websocket'},
'memory' : {'ratio' : 0.3, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 4096},
'installed' : False
},
'identity' : {'name' : 'identity',
'jetty' : {'modules' : 'deploy,http,logging,jsp,ext,http-forwarded,websocket'},
'memory' : {'ratio' : 0.2, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 2048},
'installed' : False
},
'idp' : {'name' : 'idp',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded'},
'memory' : {'ratio' : 0.2, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 1024},
'installed' : False
},
'asimba' : {'name' : 'asimba',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded'},
'memory' : {'ratio' : 0.1, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 1024},
'installed' : False
},
'oxauth-rp' : {'name' : 'oxauth-rp',
'jetty' : {'modules' : 'deploy,http,logging,jsp,http-forwarded,websocket'},
'memory' : {'ratio' : 0.1, "jvm_heap_ration" : 0.7, "max_allowed_mb" : 512},
'installed' : False
},
'passport' : {'name' : 'passport',
'node' : {},
'memory' : {'ratio' : 0.1, "max_allowed_mb" : 1024},
'installed' : False
}
}
self.app_custom_changes = {
'jetty' : {
'name' : 'jetty',
'files' : [{
'path' : '%s/etc/webdefault.xml' % self.jetty_home,
'replace' : [
{
'pattern' : r'(\<param-name\>dirAllowed<\/param-name\>)(\s*)(\<param-value\>)true(\<\/param-value\>)',
'update' : r'\1\2\3false\4'
}
]
},
{
'path' : '%s/etc/jetty.xml' % self.jetty_home,
'replace' : [
{
'pattern' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>',
'update' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler">\n\t\t\t\t <Set name="showContexts">false</Set>\n\t\t\t </New>'
}
]
}]
}
}
self.idp3Folder = "/opt/shibboleth-idp"
self.idp3MetadataFolder = "%s/metadata" % self.idp3Folder
self.idp3MetadataCredentialsFolder = "%s/credentials" % self.idp3MetadataFolder
self.idp3LogsFolder = "%s/logs" % self.idp3Folder
self.idp3LibFolder = "%s/lib" % self.idp3Folder
self.idp3ConfFolder = "%s/conf" % self.idp3Folder
self.idp3ConfAuthnFolder = "%s/conf/authn" % self.idp3Folder
self.idp3CredentialsFolder = "%s/credentials" % self.idp3Folder
self.idp3WebappFolder = "%s/webapp" % self.idp3Folder
# self.idp3WarFolder = "%s/war"
self.hostname = None
self.ip = None
self.orgName = None
self.orgShortName = None
self.countryCode = None
self.city = None
self.state = None
self.admin_email = None
self.encoded_ox_ldap_pw = None
self.encoded_ldap_pw = None
self.encoded_shib_jks_pw = None
self.application_max_ram = None # in MB
self.encode_salt = None
self.baseInum = None
self.inumOrg = None
self.inumAppliance = None
self.inumOrgFN = None
self.inumApplianceFN = None
self.ldapBaseFolderldapPass = None
self.oxauth_client_id = None
self.oxauthClient_pw = None
self.oxauthClient_encoded_pw = None
self.oxTrustConfigGeneration = None
self.oxd_hostname = '%(oxd_hostname)s'
self.oxd_port = '%(oxd_port)s'
self.outputFolder = '%s/output' % self.install_dir
self.templateFolder = '%s/templates' % self.install_dir
self.staticFolder = '%s/static' % self.install_dir
self.extensionFolder = '%s/extension' % self.staticFolder
self.oxauth_error_json = '%s/oxauth/oxauth-errors.json' % self.staticFolder
self.oxauth_openid_jwks_fn = "%s/oxauth-keys.json" % self.certFolder
self.oxauth_openid_jks_fn = "%s/oxauth-keys.jks" % self.certFolder
self.oxauth_openid_jks_pass = None
self.httpdKeyPass = None
self.httpdKeyFn = '%s/httpd.key' % self.certFolder
self.httpdCertFn = '%s/httpd.crt' % self.certFolder
self.shibJksPass = None
self.shibJksFn = '%s/shibIDP.jks' % self.certFolder
self.asimbaJksPass = None
self.asimbaJksFn = '%s/asimbaIDP.jks' % self.certFolder
self.ldapTrustStoreFn = None
self.encoded_ldapTrustStorePass = None
self.opendj_cert_fn = '%s/opendj.crt' % self.certFolder
self.opendj_p12_fn = '%s/opendj.pkcs12' % self.certFolder
self.opendj_p12_pass = None
self.ldap_type = 'opendj'
self.opendj_ldap_binddn = 'cn=directory manager'
self.ldap_hostname = "localhost"
self.ldap_port = '1389'
self.ldaps_port = '1636'
self.ldap_jmx_port = '1689'
self.ldap_admin_port = '4444'
self.ldapBaseFolder = '/opt/opendj'
self.ldapSetupCommand = '%s/setup' % self.ldapBaseFolder
self.ldapDsconfigCommand = "%s/bin/dsconfig" % self.ldapBaseFolder
self.ldapDsCreateRcCommand = "%s/bin/create-rc-script" % self.ldapBaseFolder
self.ldapDsJavaPropCommand = "%s/bin/dsjavaproperties" % self.ldapBaseFolder
self.ldap_user_home = '/home/ldap'
self.ldapPassFn = '%s/.pw' % self.ldap_user_home
self.ldap_backend_type = 'je'
self.importLdifCommand = '%s/bin/import-ldif' % self.ldapBaseFolder
self.ldapModifyCommand = '%s/bin/ldapmodify' % self.ldapBaseFolder
self.loadLdifCommand = self.ldapModifyCommand
self.gluuScriptFiles = ['%s/static/scripts/logmanager.sh' % self.install_dir,
'%s/static/scripts/testBind.py' % self.install_dir]
self.openDjIndexJson = '%s/static/opendj/index.json' % self.install_dir
self.openDjSchemaFolder = "%s/config/schema" % self.ldapBaseFolder
self.openDjschemaFiles = ["%s/static/opendj/96-eduperson.ldif" % self.install_dir,
"%s/static/opendj/101-ox.ldif" % self.install_dir,
"%s/static/opendj/77-customAttributes.ldif" % self.install_dir]
if os.path.exists(os.path.join(self.install_dir, 'static/opendj/deprecated')):
self.openDjschemaFiles = glob.glob(os.path.join(self.install_dir, 'static/opendj/deprecated/*.ldif'))
self.opendj_init_file = '%s/static/opendj/opendj' % self.install_dir
self.opendj_service_centos7 = '%s/static/opendj/systemd/opendj.service' % self.install_dir
self.redhat_services = ['httpd', 'rsyslog']
self.debian_services = ['apache2', 'rsyslog']
self.apache_start_script = '/etc/init.d/httpd'
self.defaultTrustStoreFN = '%s/jre/lib/security/cacerts' % self.jre_home
self.defaultTrustStorePW = 'changeit'
self.openldapBaseFolder = '/opt/symas'
self.openldapBinFolder = '/opt/symas/bin'
self.openldapConfFolder = '/opt/symas/etc/openldap'
self.openldapRootUser = "cn=directory manager,o=gluu"
self.openldapSiteUser = "cn=directory manager,o=site"
self.openldapKeyPass = None
self.openldapTLSCACert = '%s/openldap.pem' % self.certFolder
self.openldapTLSCert = '%s/openldap.crt' % self.certFolder
self.openldapTLSKey = '%s/openldap.key' % self.certFolder
self.openldapJksPass = None
self.openldapJksFn = '%s/openldap.jks' % self.certFolder
self.openldapP12Fn = '%s/openldap.pkcs12' % self.certFolder
self.passportSpKeyPass = None
self.passportSpTLSCACert = '%s/passport-sp.pem' % self.certFolder
self.passportSpTLSCert = '%s/passport-sp.crt' % self.certFolder
self.passportSpTLSKey = '%s/passport-sp.key' % self.certFolder
self.passportSpJksPass = None
self.passportSpJksFn = '%s/passport-sp.jks' % self.certFolder
self.openldapSlapdConf = '%s/slapd.conf' % self.outputFolder
self.openldapSymasConf = '%s/symas-openldap.conf' % self.outputFolder
self.openldapRootSchemaFolder = "%s/schema" % self.gluuOptFolder
self.openldapSchemaFolder = "%s/openldap" % self.openldapRootSchemaFolder
self.openldapLogDir = "/var/log/openldap/"
self.openldapSyslogConf = "%s/static/openldap/openldap-syslog.conf" % self.install_dir
self.openldapLogrotate = "%s/static/openldap/openldap_logrotate" % self.install_dir
self.openldapSetupAccessLog = False
self.accessLogConfFile = "%s/static/openldap/accesslog.conf" % self.install_dir
self.gluuAccessLogConf = "%s/static/openldap/o_gluu_accesslog.conf" % self.install_dir
self.opendlapIndexDef = "%s/static/openldap/index.json" % self.install_dir
# Stuff that gets rendered; filename is necessary. Full path should
# reflect final path if the file must be copied after its rendered.
self.passport_saml_config = '%s/passport-saml-config.json' % self.configFolder
self.oxauth_config_json = '%s/oxauth-config.json' % self.outputFolder
self.oxtrust_config_json = '%s/oxtrust-config.json' % self.outputFolder
self.oxtrust_cache_refresh_json = '%s/oxtrust-cache-refresh.json' % self.outputFolder
self.oxtrust_import_person_json = '%s/oxtrust-import-person.json' % self.outputFolder
self.oxidp_config_json = '%s/oxidp-config.json' % self.outputFolder
self.oxasimba_config_json = '%s/oxasimba-config.json' % self.outputFolder
self.gluu_python_base = '%s/python' % self.gluuOptFolder
self.gluu_python_readme = '%s/libs/python.txt' % self.gluuOptPythonFolder
self.ox_ldap_properties = '%s/ox-ldap.properties' % self.configFolder
self.oxauth_static_conf_json = '%s/oxauth-static-conf.json' % self.outputFolder
self.oxTrust_log_rotation_configuration = "%s/conf/oxTrustLogRotationConfiguration.xml" % self.gluuBaseFolder
self.apache2_conf = '%s/httpd.conf' % self.outputFolder
self.apache2_ssl_conf = '%s/https_gluu.conf' % self.outputFolder
self.apache2_24_conf = '%s/httpd_2.4.conf' % self.outputFolder
self.apache2_ssl_24_conf = '%s/https_gluu.conf' % self.outputFolder
self.ldif_base = '%s/base.ldif' % self.outputFolder
self.ldif_appliance = '%s/appliance.ldif' % self.outputFolder
self.ldif_attributes = '%s/attributes.ldif' % self.outputFolder
self.ldif_scopes = '%s/scopes.ldif' % self.outputFolder
self.ldif_clients = '%s/clients.ldif' % self.outputFolder
self.ldif_people = '%s/people.ldif' % self.outputFolder
self.ldif_groups = '%s/groups.ldif' % self.outputFolder
self.ldif_site = '%s/static/cache-refresh/o_site.ldif' % self.install_dir
self.ldif_scripts = '%s/scripts.ldif' % self.outputFolder
self.ldif_configuration = '%s/configuration.ldif' % self.outputFolder
self.ldif_scim = '%s/scim.ldif' % self.outputFolder
self.ldif_asimba = '%s/asimba.ldif' % self.outputFolder
self.ldif_passport = '%s/passport.ldif' % self.outputFolder
self.ldif_idp = '%s/oxidp.ldif' % self.outputFolder
self.ldif_scripts_cred_manager = '%s/scripts_cred_manager.ldif' % self.outputFolder
self.passport_config = '%s/passport-config.json' % self.configFolder
self.encode_script = '%s/bin/encode.py' % self.gluuOptFolder
self.network = "/etc/sysconfig/network"
self.system_profile_update = '%s/system_profile' % self.outputFolder
self.asimba_conf_folder = '%s/asimba' % self.configFolder
self.asimba_configuration_xml = '%s/asimba.xml' % self.asimba_conf_folder
self.asimba_configuration = '%s/asimba.xml' % self.outputFolder
self.asimba_selector_configuration = '%s/asimba-selector.xml' % self.outputFolder
self.asimba_properties = '%s/asimba.properties' % self.outputFolder
self.asimba_selector_configuration_xml = '%s/asimba-selector.xml' % self.asimba_conf_folder
self.staticIDP3FolderConf = '%s/static/idp3/conf' % self.install_dir
self.staticIDP3FolderMetadata = '%s/static/idp3/metadata' % self.install_dir
self.idp3_configuration_properties = 'idp.properties'
self.idp3_configuration_ldap_properties = 'ldap.properties'
self.idp3_configuration_saml_nameid = 'saml-nameid.properties'
self.idp3_configuration_services = 'services.properties'
self.idp3_configuration_password_authn = 'authn/password-authn-config.xml'
self.idp3_metadata = 'idp-metadata.xml'
self.cred_manager_config = '%s/cred-manager.json' % self.outputFolder
### rsyslog file customised for init.d
self.rsyslogUbuntuInitFile = "%s/static/system/ubuntu/rsyslog" % self.install_dir
self.ldap_setup_properties = '%s/opendj-setup.properties' % self.templateFolder
# oxAuth/oxTrust Base64 configuration files
self.pairwiseCalculationKey = None
self.pairwiseCalculationSalt = None
# OpenID key generation default setting
self.default_openid_jks_dn_name = 'CN=oxAuth CA Certificates'
self.default_key_algs = 'RS256 RS384 RS512 ES256 ES384 ES512'
self.default_key_expiration = 365
# oxTrust SCIM configuration
self.scim_rs_client_id = None
self.scim_rs_client_jwks = None
self.scim_rs_client_jks_fn = "%s/scim-rs.jks" % self.certFolder
self.scim_rs_client_jks_pass = None
self.scim_rs_client_jks_pass_encoded = None
self.scim_rp_client_id = None
self.scim_rp_client_jwks = None
self.scim_rp_client_jks_fn = "%s/scim-rp.jks" % self.outputFolder
self.scim_rp_client_jks_pass = 'secret'
# oxPassport Configuration
self.gluu_passport_base = '%s/passport' % self.node_base
self.ldif_passport_config = '%s/oxpassport-config.ldif' % self.outputFolder
self.passport_rs_client_id = None
self.passport_rs_client_jwks = None
self.passport_rs_client_jks_fn = "%s/passport-rs.jks" % self.certFolder
self.passport_rs_client_jks_pass = None
self.passport_rs_client_jks_pass_encoded = None
self.passport_rp_client_id = None
self.passport_rp_client_jwks = None
self.passport_rp_client_jks_fn = "%s/passport-rp.jks" % self.certFolder
self.passport_rp_client_cert_alg = "RS512"
self.passport_rp_client_cert_alias = None
self.passport_rp_client_cert_fn = "%s/passport-rp.pem" % self.certFolder
self.passport_rp_client_jks_pass = 'secret'
self.ldif_files = [self.ldif_base,
self.ldif_appliance,
self.ldif_attributes,
self.ldif_scopes,
self.ldif_clients,
self.ldif_people,
self.ldif_groups,
self.ldif_site,
self.ldif_scripts,
self.ldif_configuration,
self.ldif_scim,
self.ldif_asimba,
self.ldif_passport,
self.ldif_passport_config,
self.ldif_idp
]
self.ce_templates = {self.oxauth_config_json: False,
self.passport_saml_config:True,
self.gluu_python_readme: True,
self.oxtrust_config_json: False,
self.oxtrust_cache_refresh_json: False,
self.oxtrust_import_person_json: False,
self.oxidp_config_json: False,
self.oxasimba_config_json: False,
self.ox_ldap_properties: True,
self.oxauth_static_conf_json: False,
self.oxTrust_log_rotation_configuration: True,
self.ldap_setup_properties: False,
self.apache2_conf: False,
self.apache2_ssl_conf: False,
self.apache2_24_conf: False,
self.apache2_ssl_24_conf: False,
self.etc_hosts: False,
self.etc_hostname: False,
self.ldif_base: False,
self.ldif_appliance: False,
self.ldif_attributes: False,
self.ldif_scopes: False,
self.ldif_clients: False,
self.ldif_people: False,
self.ldif_groups: False,
self.ldif_scripts: False,
self.ldif_scim: False,
self.ldif_asimba: False,
self.ldif_passport: False,
self.ldif_passport_config: False,
self.ldif_idp: False,
self.asimba_configuration: False,
self.asimba_properties: False,
self.asimba_selector_configuration: False,
self.network: False,
self.cred_manager_config:False,
self.ldif_scripts_cred_manager: False,
}
self.oxauth_keys_utils_libs = [ 'bcprov-jdk15on-*.jar', 'bcpkix-jdk15on-*.jar', 'commons-lang-*.jar',
'log4j-*.jar', 'commons-codec-*.jar', 'commons-cli-*.jar', 'commons-io-*.jar',
'jackson-core-*.jar', 'jackson-core-asl-*.jar', 'jackson-mapper-asl-*.jar', 'jackson-xc-*.jar',
'jettison-*.jar', 'oxauth-model-*.jar', 'oxauth-client-*.jar' ]
def get_ip(self):
testIP = None
detectedIP = None
try:
testSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
detectedIP = [(testSocket.connect(('8.8.8.8', 80)),
testSocket.getsockname()[0],
testSocket.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]
except:
self.logIt("No detected IP address", True)
self.logIt(traceback.format_exc(), True)
while not testIP:
if detectedIP:
testIP = self.getPrompt("Enter IP Address", detectedIP)
else:
testIP = self.getPrompt("Enter IP Address")
if not self.isIP(testIP):
testIP = None
print 'ERROR: The IP Address is invalid. Try again\n'
return testIP
def check_properties(self):
self.logIt('Checking properties')
while not self.hostname:
testhost = raw_input('Hostname of this server: ').strip()
if len(testhost.split('.')) >= 3:
self.hostname = testhost
else:
print 'The hostname has to be at least three domain components. Try again\n'
while not self.ip:
self.ip = self.get_ip()
while not self.orgName:
self.orgName = raw_input('Organization Name: ').strip()
while not self.countryCode:
testCode = raw_input('2 Character Country Code: ').strip()
if len(testCode) == 2:
self.countryCode = testCode
else:
print 'Country code should only be two characters. Try again\n'
while not self.city:
self.city = raw_input('City: ').strip()
while not self.state:
self.state = raw_input('State or Province: ').strip()
if not self.admin_email:
tld = None
try:
tld = ".".join(self.hostname.split(".")[-2:])
except:
tld = self.hostname
self.admin_email = "support@%s" % tld
if not self.httpdKeyPass:
self.httpdKeyPass = self.getPW()
if not self.ldapPass:
self.ldapPass = self.getPW()
if not self.shibJksPass:
self.shibJksPass = self.getPW()
if not self.oxauth_openid_jks_pass:
self.oxauth_openid_jks_pass = self.getPW()
if not self.asimbaJksPass:
self.asimbaJksPass = self.getPW()
if not self.openldapKeyPass:
self.openldapKeyPass = self.getPW()
if not self.openldapJksPass:
self.openldapJksPass = self.getPW()
if not self.opendj_p12_pass:
self.opendj_p12_pass = self.getPW()
if not self.passportSpKeyPass:
self.passportSpKeyPass = self.getPW()
self.passportSpJksPass = self.getPW()
if not self.encode_salt:
self.encode_salt= self.getPW() + self.getPW()
if not self.baseInum:
self.baseInum = '@!%s.%s.%s.%s' % tuple([self.getQuad() for i in xrange(4)])
if not self.inumOrg:
orgTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.inumOrg = '%s!0001!%s' % (self.baseInum, orgTwoQuads)
if not self.inumAppliance:
applianceTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.inumAppliance = '%s!0002!%s' % (self.baseInum, applianceTwoQuads)
if not self.oxauth_client_id:
clientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.oxauth_client_id = '%s!0008!%s' % (self.inumOrg, clientTwoQuads)
if not self.scim_rs_client_id:
scimClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.scim_rs_client_id = '%s!0008!%s' % (self.inumOrg, scimClientTwoQuads)
if not self.scim_rp_client_id:
scimClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.scim_rp_client_id = '%s!0008!%s' % (self.inumOrg, scimClientTwoQuads)
if not self.passport_rs_client_id:
passportClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.passport_rs_client_id = '%s!0008!%s' % (self.inumOrg, passportClientTwoQuads)
if not self.passport_rp_client_id:
passportClientTwoQuads = '%s.%s' % tuple([self.getQuad() for i in xrange(2)])
self.passport_rp_client_id = '%s!0008!%s' % (self.inumOrg, passportClientTwoQuads)
if not self.inumApplianceFN:
self.inumApplianceFN = self.inumAppliance.replace('@', '').replace('!', '').replace('.', '')
if not self.inumOrgFN:
self.inumOrgFN = self.inumOrg.replace('@', '').replace('!', '').replace('.', '')
if not self.application_max_ram:
self.application_max_ram = 3072
def choose_from_list(self, list_of_choices, choice_name="item", default_choice_index=0):
return_value = None
choice_map = {}
chosen_index = 0
print "\nSelect the number for the %s from the following list:" % choice_name
for choice in list_of_choices:
choice_map[chosen_index] = choice
chosen_index += 1
print " [%i] %s" % (chosen_index, choice)
while not return_value:
choice_number = self.getPrompt("Please select a number listed above", str(default_choice_index + 1))
try:
choice_number = int(choice_number) - 1
if (choice_number >= 0) & (choice_number < len(list_of_choices)):
return_value = choice_map[choice_number]
else:
print '"%i" is not a valid choice' % (choice_number + 1)
except:
print 'Cannot convert "%s" to a number' % choice_number
self.logIt(traceback.format_exc(), True)
return return_value
# = File system =================================================================
def findFiles(self, filePatterns, filesFolder):
foundFiles = []
try:
for filePattern in filePatterns:
fileFullPathPattern = "%s/%s" % (filesFolder, filePattern)
for fileFullPath in glob.iglob(fileFullPathPattern):
foundFiles.append(fileFullPath)
except:
self.logIt("Error finding files %s in folder %s" % (":".join(filePatterns), filesFolder), True)
self.logIt(traceback.format_exc(), True)
return foundFiles
def readFile(self, inFilePath):
inFilePathText = None
try:
f = open(inFilePath)
inFilePathText = f.read()
f.close
except:
self.logIt("Error reading %s" % inFilePathText, True)
self.logIt(traceback.format_exc(), True)
return inFilePathText
def writeFile(self, outFilePath, text):
inFilePathText = None
try:
f = open(outFilePath, 'w')
f.write(text)
f.close()
except:
self.logIt("Error writing %s" % inFilePathText, True)
self.logIt(traceback.format_exc(), True)
return inFilePathText
def commentOutText(self, text):
textLines = text.split('\n')
lines = []
for textLine in textLines:
lines.append('#%s' % textLine)
return "\n".join(lines)
def replaceInText(self, text, pattern, update):
rePattern = re.compile(pattern, flags=re.DOTALL | re.M)
return rePattern.sub(update, text)
def applyChangesInFiles(self, changes):
self.logIt("Applying changes to %s files..." % changes['name'])
for change in changes['files']:
file = change['path']
text = self.readFile(file)
file_backup = '%s.bak' % file
self.writeFile(file_backup, text)
self.logIt("Created backup of %s file %s..." % (changes['name'], file_backup))
for replace in change['replace']:
text = self.replaceInText(text, replace['pattern'], replace['update'])
self.writeFile(file, text)
self.logIt("Wrote updated %s file %s..." % (changes['name'], file))
def copyFile(self, inFile, destFolder):
try:
shutil.copy(inFile, destFolder)
self.logIt("Copied %s to %s" % (inFile, destFolder))
except:
self.logIt("Error copying %s to %s" % (inFile, destFolder), True)
self.logIt(traceback.format_exc(), True)
def copyTree(self, src, dst, overwrite=False):
try:
if not os.path.exists(dst):
os.makedirs(dst)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
self.copyTree(s, d, overwrite)
else:
if overwrite and os.path.exists(d):
self.removeFile(d)
if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1:
shutil.copy2(s, d)
self.logIt("Copied tree %s to %s" % (src, dst))
except:
self.logIt("Error copying tree %s to %s" % (src, dst), True)
self.logIt(traceback.format_exc(), True)
def createDirs(self, name):
try:
if not os.path.exists(name):
os.makedirs(name, 0700)
self.logIt('Created dir: %s' % name)
except:
self.logIt("Error making directory %s" % name, True)
self.logIt(traceback.format_exc(), True)
def removeDirs(self, name):
try:
if os.path.exists(name):
shutil.rmtree(name)
self.logIt('Removed dir: %s' % name)
except:
self.logIt("Error removing directory %s" % name, True)
self.logIt(traceback.format_exc(), True)
def removeFile(self, fileName):
try:
if os.path.exists(fileName):
os.remove(fileName)
self.logIt('Removed file: %s' % fileName)
except:
self.logIt("Error removing file %s" % fileName, True)
self.logIt(traceback.format_exc(), True)
# = Utilities ====================================================================
def logIt(self, msg, errorLog=False):
if errorLog:
f = open(self.logError, 'a')
f.write('%s %s\n' % (time.strftime('%X %x'), msg))
f.close()
f = open(self.log, 'a')
f.write('%s %s\n' % (time.strftime('%X %x'), msg))
f.close()
def appendLine(self, line, fileName=False):
try:
f = open(fileName, 'a')
f.write('%s\n' % line)
f.close()
except:
self.logIt("Error loading file %s" % fileName)
def set_ulimits(self):
try:
if self.os_type in ['centos', 'redhat', 'fedora']:
apache_user = 'apache'
else:
apache_user = 'www-data'
self.appendLine("ldap soft nofile 131072", "/etc/security/limits.conf")
self.appendLine("ldap hard nofile 262144", "/etc/security/limits.conf")
self.appendLine("%s soft nofile 131072" % apache_user, "/etc/security/limits.conf")
self.appendLine("%s hard nofile 262144" % apache_user, "/etc/security/limits.conf")
self.appendLine("jetty soft nofile 131072", "/etc/security/limits.conf")
self.appendLine("jetty hard nofile 262144", "/etc/security/limits.conf")
except:
self.logIt("Could not set limits.")
self.logIt(traceback.format_exc(), True)
def load_properties(self, fn):
self.logIt('Loading Properties %s' % fn)
p = Properties.Properties()
try:
p.load(open(fn))
properties_list = p.keys()
for prop in properties_list:
try:
self.__dict__[prop] = p[prop]
if p[prop] == 'True':
self.__dict__[prop] = True
elif p[prop] == 'False':
self.__dict__[prop] = False
except:
self.logIt("Error loading property %s" % prop)
self.logIt(traceback.format_exc(), True)
except:
self.logIt("Error loading properties", True)
self.logIt(traceback.format_exc(), True)
def load_json(self, fn):
self.logIt('Loading JSON from %s' % fn)
try:
json_file = open(fn)
json_text = json_file.read()
json_file.close()
return json.loads(json_text)
except:
self.logIt("Unable to read or parse json file from %s" % fn, True)
self.logIt(traceback.format_exc(), True)
return None
def obscure(self, data=""):
engine = triple_des(self.encode_salt, ECB, pad=None, padmode=PAD_PKCS5)
data = data.encode('ascii')
en_data = engine.encrypt(data)
return base64.b64encode(en_data)
def copy_output(self):
self.logIt("Copying rendered templates to final destination")
for dest_fn in self.ce_templates.keys():
if self.ce_templates[dest_fn]:
fn = os.path.split(dest_fn)[-1]
output_fn = os.path.join(self.outputFolder, fn)
try:
self.logIt("Copying %s to %s" % (output_fn, dest_fn))
dest_dir = os.path.dirname(dest_fn)
if not os.path.exists(dest_dir):
self.logIt("Created destination folder %s" % dest_dir)
os.makedirs(dest_dir)
shutil.copyfile(output_fn, dest_fn)
except:
self.logIt("Error writing %s to %s" % (output_fn, dest_fn), True)
self.logIt(traceback.format_exc(), True)
def detect_os_type(self):
# TODO: Change this to support more distros. For example according to
# http://unix.stackexchange.com/questions/6345/how-can-i-get-distribution-name-and-version-number-in-a-simple-shell-script
distro_info = self.readFile('/etc/redhat-release')
if distro_info == None:
distro_info = self.readFile('/etc/os-release')
if 'CentOS' in distro_info:
return self.os_types[0]
elif 'Red Hat' in distro_info:
return self.os_types[1]
elif 'Ubuntu' in distro_info:
return self.os_types[3]
elif 'Debian' in distro_info:
return self.os_types[4]
else:
return self.choose_from_list(self.os_types, "Operating System")
def detect_initd(self):
return open(os.path.join('/proc/1/status'), 'r').read().split()[1]
def determineOpenDJVersion(self):
f = open('/opt/opendj/template/config/buildinfo', 'r')
encode_script = f.read().split()[0]
f.close()
if re.match(r'2\.6\.0\..*', encode_script):
return "2.6"
return "3.0"
def getPW(self, size=12, chars=string.ascii_uppercase + string.digits + string.lowercase):
return ''.join(random.choice(chars) for _ in range(size))
def ldap_encode(self, password):
salt = os.urandom(4)
sha = hashlib.sha1(password)
sha.update(salt)
b64encoded = '{0}{1}'.format(sha.digest(), salt).encode('base64').strip()
encrypted_password = '{{SSHA}}{0}'.format(b64encoded)
return encrypted_password
def encode_passwords(self):
self.logIt("Encoding passwords")
try:
self.encoded_ldap_pw = self.ldap_encode(self.ldapPass)
self.encoded_shib_jks_pw = self.obscure(self.shibJksPass)
self.encoded_ox_ldap_pw = self.obscure(self.ldapPass)
self.encoded_openldapJksPass = self.obscure(self.openldapJksPass)
self.encoded_opendj_p12_pass = self.obscure(self.opendj_p12_pass)
self.oxauthClient_pw = self.getPW()
self.oxauthClient_encoded_pw = self.obscure(self.oxauthClient_pw)
except:
self.logIt("Error encoding passwords", True)
self.logIt(traceback.format_exc(), True)
def encode_test_passwords(self):
self.logIt("Encoding test passwords")
try:
self.templateRenderingDict['oxauthClient_2_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_2_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_2_pw'])
self.templateRenderingDict['oxauthClient_3_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_3_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_3_pw'])
self.templateRenderingDict['oxauthClient_4_pw'] = self.getPW()
self.templateRenderingDict['oxauthClient_4_encoded_pw'] = self.obscure(self.templateRenderingDict['oxauthClient_4_pw'])
except:
self.logIt("Error encoding test passwords", True)
self.logIt(traceback.format_exc(), True)
def fomatWithDict(self, text, dictionary):
text = re.sub(r"%([^\(])", r"%%\1", text)
text = re.sub(r"%$", r"%%", text) # There was a % at the end?
return text % dictionary
def renderTemplateInOut(self, filePath, templateFolder, outputFolder):
self.logIt("Rendering template %s" % filePath)
fn = os.path.split(filePath)[-1]
f = open(os.path.join(templateFolder, fn))
template_text = f.read()
f.close()
# Create output folder if needed
if not os.path.exists(outputFolder):
os.makedirs(outputFolder)
newFn = open(os.path.join(outputFolder, fn), 'w+')
newFn.write(self.fomatWithDict(template_text, self.merge_dicts(self.__dict__, self.templateRenderingDict)))
newFn.close()
def renderTemplate(self, filePath):
self.renderTemplateInOut(filePath, self.templateFolder, self.outputFolder)
def render_templates(self):
self.logIt("Rendering templates")
for fullPath in self.ce_templates.keys():
try:
self.renderTemplate(fullPath)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_custom_templates(self, fullPath):
output_dir = fullPath + '.output'
self.logIt("Rendering custom templates")
self.logIt("Rendering custom templates from %s to %s" % (fullPath, output_dir))
try:
self.run([self.cmd_mkdir, '-p', output_dir])
except:
self.logIt("Error creating output directory %s" % output_dir, True)
self.logIt(traceback.format_exc(), True)
try:
for filename in self.get_filepaths(fullPath):
self.renderTemplateInOut(filename, fullPath, output_dir)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_configuration_template(self):
self.logIt("Rendering configuration templates")
fullPath = self.ldif_configuration
try:
self.renderTemplate(fullPath)
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_templates_folder(self, templatesFolder):
self.logIt("Rendering templates folder: %s" % templatesFolder)
for templateBase, templateDirectories, templateFiles in os.walk(templatesFolder):
for templateFile in templateFiles:
fullPath = '%s/%s' % (templateBase, templateFile)
try:
self.logIt("Rendering test template %s" % fullPath)
# Remove ./template/ and everything left of it from fullPath
fn = re.match(r'(^.+/templates/)(.*$)', fullPath).groups()[1]
f = open(os.path.join(self.templateFolder, fn))
template_text = f.read()
f.close()
fullOutputFile = os.path.join(self.outputFolder, fn)
# Create full path to the output file
fullOutputDir = os.path.dirname(fullOutputFile)
if not os.path.exists(fullOutputDir):
os.makedirs(fullOutputDir)
newFn = open(fullOutputFile, 'w+')
newFn.write(template_text % self.merge_dicts(self.__dict__, self.templateRenderingDict))
newFn.close()
except:
self.logIt("Error writing template %s" % fullPath, True)
self.logIt(traceback.format_exc(), True)
def render_test_templates(self):
self.logIt("Rendering test templates")
testTepmplatesFolder = '%s/test/' % self.templateFolder
self.render_templates_folder(testTepmplatesFolder)
# args = command + args, i.e. ['ls', '-ltr']
def run(self, args, cwd=None, env=None, useWait=False, shell=False):
print "Run:", args
self.logIt('Running: %s' % ' '.join(args))
try:
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env, shell=shell)
if useWait:
code = p.wait()
self.logIt('Run: %s with result code: %d' % (' '.join(args), code) )
else:
output, err = p.communicate()
if output:
self.logIt(output)
if err:
self.logIt(err, True)
except:
self.logIt("Error running command : %s" % " ".join(args), True)
self.logIt(traceback.format_exc(), True)
def createLdapPw(self):
f = open(self.ldapPassFn, 'w')
f.write(self.ldapPass)
f.close()
self.run([self.cmd_chown, 'ldap:ldap', self.ldapPassFn])
def deleteLdapPw(self):
if os.path.exists(self.ldapPassFn):
os.remove(self.ldapPassFn)
if os.path.exists(os.path.join(self.ldapBaseFolder, 'opendj-setup.properties')):
os.remove(os.path.join(self.ldapBaseFolder, 'opendj-setup.properties'))
def configure_opendj(self):
self.logIt("Configuring OpenDJ")
opendj_prop_name = 'global-aci:\'(targetattr!="userPassword||authPassword||debugsearchindex||changes||changeNumber||changeType||changeTime||targetDN||newRDN||newSuperior||deleteOldRDN")(version 3.0; acl "Anonymous read access"; allow (read,search,compare) userdn="ldap:///anyone";)\''
config_changes = [['set-global-configuration-prop', '--set', 'single-structural-objectclass-behavior:accept'],
['set-attribute-syntax-prop', '--syntax-name', '"Directory String"', '--set', 'allow-zero-length-values:true'],
['set-password-policy-prop', '--policy-name', '"Default Password Policy"', '--set', 'allow-pre-encoded-passwords:true'],
['set-log-publisher-prop', '--publisher-name', '"File-Based Audit Logger"', '--set', 'enabled:true'],
['create-backend', '--backend-name', 'site', '--set', 'base-dn:o=site', '--type %s' % self.ldap_backend_type, '--set', 'enabled:true'],
['set-connection-handler-prop', '--handler-name', '"LDAP Connection Handler"', '--set', 'enabled:false'],
['set-access-control-handler-prop', '--remove', '%s' % opendj_prop_name],
['set-global-configuration-prop', '--set', 'reject-unauthenticated-requests:true'],
['set-password-policy-prop', '--policy-name', '"Default Password Policy"', '--set', 'default-password-storage-scheme:"Salted SHA-512"'],
['set-global-configuration-prop', '--set', 'reject-unauthenticated-requests:true']
]
for changes in config_changes:
dsconfigCmd = " ".join(['cd %s/bin ; ' % self.ldapBaseFolder,
self.ldapDsconfigCommand,
'--trustAll',
'--no-prompt',
'--hostname',
self.ldap_hostname,
'--port',
self.ldap_admin_port,
'--bindDN',
'"%s"' % self.ldap_binddn,
'--bindPasswordFile',
self.ldapPassFn] + changes)
self.run(['/bin/su',
'ldap',
'-c',
dsconfigCmd])
def export_opendj_public_cert(self):
# Load password to acces OpenDJ truststore
self.logIt("Reading OpenDJ truststore")
openDjPinFn = '%s/config/keystore.pin' % self.ldapBaseFolder
openDjTruststoreFn = '%s/config/truststore' % self.ldapBaseFolder
openDjPin = None
try:
f = open(openDjPinFn)
openDjPin = f.read().splitlines()[0]
f.close()
except:
self.logIt("Error reding OpenDJ truststore", True)
self.logIt(traceback.format_exc(), True)
# Export public OpenDJ certificate
self.logIt("Exporting OpenDJ certificate")
self.run([self.cmd_keytool,
'-exportcert',
'-keystore',
openDjTruststoreFn,
'-storepass',
openDjPin,
'-file',
self.opendj_cert_fn,
'-alias',
'server-cert',
'-rfc'])
# Convert OpenDJ certificate to PKCS12
self.logIt("Converting OpenDJ truststore")
self.run([self.cmd_keytool,
'-importkeystore',
'-srckeystore',
openDjTruststoreFn,
'-srcstoretype',
'jks',
'-srcstorepass',
openDjPin,
'-destkeystore',
self.opendj_p12_fn,
'-deststoretype',
'pkcs12',
'-deststorepass',
self.opendj_p12_pass,
'-srcalias',
'server-cert'])
# Import OpenDJ certificate into java truststore
self.logIt("Import OpenDJ certificate")
self.run([self.cmd_keytool, "-import", "-trustcacerts", "-alias", "%s_opendj" % self.hostname, \
"-file", self.opendj_cert_fn, "-keystore", self.defaultTrustStoreFN, \
"-storepass", "changeit", "-noprompt"])
def index_opendj_backend(self, backend):
index_command = 'create-backend-index'
try:
self.logIt("Running LDAP index creation commands for " + backend + " backend")
# This json file contains a mapping of the required indexes.
# [ { "attribute": "inum", "type": "string", "index": ["equality"] }, ...}
index_json = self.load_json(self.openDjIndexJson)
if index_json:
for attrDict in index_json:
attr_name = attrDict['attribute']
index_types = attrDict['index']
for index_type in index_types:
backend_names = attrDict['backend']
for backend_name in backend_names:
if (backend_name == backend):
self.logIt("Creating %s index for attribute %s" % (index_type, attr_name))
indexCmd = " ".join(['cd %s/bin ; ' % self.ldapBaseFolder,
self.ldapDsconfigCommand,
index_command,
'--backend-name',
backend,
'--type',
'generic',
'--index-name',
attr_name,
'--set',
'index-type:%s' % index_type,
'--set',
'index-entry-limit:4000',
'--hostName',
self.ldap_hostname,
'--port',
self.ldap_admin_port,
'--bindDN',
'"%s"' % self.ldap_binddn,
'-j', self.ldapPassFn,
'--trustAll',
'--noPropertiesFile',
'--no-prompt'])
self.run(['/bin/su',
'ldap',
'-c',
indexCmd])
else:
self.logIt('NO indexes found %s' % self.indexJson, True)
except:
self.logIt("Error occured during backend " + backend + " LDAP indexing", True)
self.logIt(traceback.format_exc(), True)
def index_opendj(self):
self.index_opendj_backend('userRoot')
self.index_opendj_backend('site')
def prepare_opendj_schema(self):
self.logIt("Copying OpenDJ schema")
for schemaFile in self.openDjschemaFiles:
self.copyFile(schemaFile, self.openDjSchemaFolder)
self.run([self.cmd_chmod, '-R', 'a+rX', self.ldapBaseFolder])
self.run([self.cmd_chown, '-R', 'ldap:ldap', self.ldapBaseFolder])
def setup_opendj_service(self):
service_path = self.detect_service_path()
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
opendj_script_name = os.path.split(self.opendj_service_centos7)[-1]
opendj_dest_folder = "/etc/systemd/system"
try:
self.copyFile(self.opendj_service_centos7, opendj_dest_folder)
self.run([service_path, 'daemon-reload'])
self.run([service_path, 'enable', 'opendj.service'])
self.run([service_path, 'start', 'opendj.service'])
except:
self.logIt("Error copying script file %s to %s" % (opendj_script_name, opendj_dest_folder))
self.logIt(traceback.format_exc(), True)
else:
self.run([self.ldapDsCreateRcCommand, "--outputFile", "/etc/init.d/opendj", "--userName", "ldap"])
if self.os_type in ['centos', 'fedora', 'redhat']:
self.run(["/sbin/chkconfig", 'opendj', "on"])
self.run([service_path, 'opendj', 'start'])
elif self.os_type in ['ubuntu', 'debian']:
self.run(["/usr/sbin/update-rc.d", 'opendj', 'start', '40', '3', "."])
self.run(["/usr/sbin/update-rc.d", 'opendj', 'enable'])
self.run([service_path, 'opendj', 'start'])
def downloadAndExtractOpenDJ(self):
openDJArchive = os.path.join(self.distFolder, 'app/opendj-server-3.0.1.gluu.zip')
self.run(['mv', '/opt/opendj', '/opt/opendj.back_'+migration_time])
self.logIt("Downloading opendj Server")
self.run(['wget', 'https://ox.gluu.org/maven/org/forgerock/opendj/opendj-server-legacy/3.0.1.gluu/opendj-server-legacy-3.0.1.gluu.zip', '-O', openDJArchive])
self.logIt("Unzipping %s in /opt/" % openDJArchive)
self.run(['unzip', '-n', '-q', openDJArchive, '-d', '/opt/' ])
realLdapBaseFolder = os.path.realpath(self.ldapBaseFolder)
self.run([self.cmd_chown, '-R', 'ldap:ldap', realLdapBaseFolder])
def install_opendj(self):
self.logIt("Running OpenDJ Setup")
# Copy opendj-setup.properties so user ldap can find it in /opt/opendj
setupPropsFN = os.path.join(self.ldapBaseFolder, 'opendj-setup.properties')
shutil.copy("%s/opendj-setup.properties" % self.outputFolder, setupPropsFN)
ldapSetupCommand = '%s/setup' % self.ldapBaseFolder
setupCmd = "cd /opt/opendj ; export OPENDJ_JAVA_HOME=" + self.jre_home + " ; " + " ".join([ldapSetupCommand,
'--no-prompt',
'--cli',
'--propertiesFilePath',
setupPropsFN,
'--acceptLicense'
])
self.run(['/bin/su',
'ldap',
'-c',
setupCmd])
dsjavaCmd = "cd /opt/opendj/bin ; %s" % self.ldapDsJavaPropCommand
self.run(['/bin/su',
'ldap',
'-c',
dsjavaCmd
])
stopDsJavaPropCommand = "%s/bin/stop-ds" % self.ldapBaseFolder
dsjavaCmd = "cd /opt/opendj/bin ; %s" % stopDsJavaPropCommand
self.run(['/bin/su',
'ldap',
'-c',
dsjavaCmd
])
def detect_service_path(self):
service_path = '/sbin/service'
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
service_path = '/usr/bin/systemctl'
elif self.os_type in ['debian', 'ubuntu']:
service_path = '/usr/sbin/service'
return service_path
def run_service_command(self, service, operation):
service_path = self.detect_service_path()
try:
if self.os_type in ['centos', 'redhat', 'fedora'] and self.os_initdaemon == 'systemd':
self.run([service_path, operation, service], None, None, True)
else:
self.run([service_path, service, operation], None, None, True)
except:
self.logIt("Error starting Jetty service '%s'" % operation)
self.logIt(traceback.format_exc(), True)
def install_ldap_server(self):
self.logIt("Running OpenDJ Setup")
self.opendj_version = self.determineOpenDJVersion()
self.createLdapPw()
if self.ldap_type == 'opendj':
self.setup_opendj_service()
self.prepare_opendj_schema()
self.run_service_command('opendj', 'stop')
self.run_service_command('opendj', 'start')
self.configure_opendj()
self.export_opendj_public_cert()
self.index_opendj()
self.deleteLdapPw()
def merge_dicts(self, *dict_args):
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def load_properties(self, fn):
self.logIt('Loading Properties %s' % fn)
p = Properties.Properties()
try:
p.load(open(fn))
properties_list = p.keys()
for prop in properties_list:
try:
self.__dict__[prop] = p[prop]
if p[prop] == 'True':
self.__dict__[prop] = True
elif p[prop] == 'False':
self.__dict__[prop] = False
except:
self.logIt("Error loading property %s" % prop)
self.logIt(traceback.format_exc(), True)
except:
self.logIt("Error loading properties", True)
self.logIt(traceback.format_exc(), True)
def get_missing_files(self):
if os.path.exists(os.path.join(self.install_dir, 'static/opendj/deprecated')):
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/master/schema/generator.py', '-O', './schema/generator.py'])
cmd_l = ['python ./schema/manager.py generate --type opendj --filename ./schema/gluu_schema.json > ./static/opendj/deprecated/101-ox.ldif',
'python ./schema/manager.py generate --type opendj --filename ./schema/custom_schema.json > ./static/opendj/deprecated/77-customAttributes.ldif']
for cmd in cmd_l:
self.logIt('Running: ' + cmd)
os.system(cmd)
self.openDjschemaFiles = glob.glob(os.path.join(self.install_dir, 'static/opendj/deprecated/*.ldif'))
if not os.path.exists(self.openDjIndexJson):
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/version_3.1.2/static/opendj/index.json', '-O', self.openDjIndexJson])
if not os.path.exists(self.opendj_service_centos7):
os.system('mkdir %s/static/opendj/systemd' % self.install_dir)
self.run(['wget', 'https://raw.githubusercontent.com/GluuFederation/community-edition-setup/version_3.1.2/static/opendj/systemd/opendj.service', '-O', self.opendj_service_centos7])
def fix_shib_idp(self):
shib_ldap_prop_fn = '/opt/shibboleth-idp/conf/ldap.properties'
if os.path.exists(shib_ldap_prop_fn):
shib_ldap_prop = self.readFile(shib_ldap_prop_fn)
shib_ldap_prop = shib_ldap_prop.replace('cn=directory manager,o=gluu', 'cn=Directory Manager')
self.writeFile(shib_ldap_prop_fn, shib_ldap_prop)
if __name__ == '__main__':
setupOptions = {
'install_dir': '.',
'setup_properties': None,
'noPrompt': False,
'downloadWars': False,
'installOxAuth': True,
'installOxTrust': True,
'installLDAP': True,
'installHTTPD': True,
'installSaml': False,
'installAsimba': False,
'installOxAuthRP': False,
'installPassport': False,
'allowPreReleasedApplications': False,
'allowDeprecatedApplications': False,
'installJce': False
}
installObject = Setup(setupOptions['install_dir'])
installObject.load_properties(installObject.setup_properties_fn)
installObject.check_properties()
installObject.ldapPass = get_ldap_bind_pw()
installObject.ldap_binddn='cn=Directory Manager'
installObject.ldap_type = 'opendj'
installObject.encode_passwords()
# Get the OS type
installObject.os_type = installObject.detect_os_type()
# Get the init type
installObject.os_initdaemon = installObject.detect_initd()
if len(sys.argv) > 1:
if sys.argv[1] == '-p':
post_ldap_update(installObject.ldap_binddn, installObject.ldapPass)
sys.exit("Completed")
else:
sys.exit("Unrecognized argument")
installObject.get_missing_files()
installObject.createLdapPw()
installObject.downloadAndExtractOpenDJ()
installObject.install_opendj()
installObject.install_ldap_server()
update_ox_ldap_prop(installObject.ldap_binddn, installObject.opendj_p12_fn, installObject.encoded_opendj_p12_pass)
installObject.fix_shib_idp()
print "\n", "-"*50,"\n"
print "OpenDJ installation finished. Please import your data as described in document,"
print "and re-run this script with -p argument."
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from raven import Client
from .server import Lights433Server
import clip
app = clip.App()
DEFAULT_SWITCH_CONF = "/etc/lights-433/switches.conf"
DEFAULT_SENTRY_CONF = "/etc/lights-433/sentry.conf"
log = logging.getLogger(__name__)
@app.main(description='An HTTP server daemon for controlling 433MHz '
'light switches')
@clip.arg('serial', required=True, help='The port to the serial device '
'that generates the signals')
@clip.opt('--host', default='127.0.0.1', type=str,
help='The interface to listen and permit connections on')
@clip.opt('--port', default=8080, type=int,
help='The port to run the server on')
@clip.opt('--baud', required=False, default=9600, type=int,
help='Baud rate of the serial interface')
@clip.opt('--timeout', required=False, default=1, type=int,
help='The timeout of the serial interface')
@clip.arg('--switches', required=True, default=DEFAULT_SWITCH_CONF, type=str,
help='Path to the config file for users and signals')
@clip.opt('--sentry', required=False, default=DEFAULT_SENTRY_CONF, type=str,
help='Path to the config file containing the Sentry capture URL')
@clip.flag('--resettable',
help='Enables device resetting over pin 3 (assumed RPi)')
def lights433(host, port, resettable, serial, baud, timeout, switches, sentry):
if sentry:
with open(sentry, 'r') as f:
url = f.read()
if not url and sentry == DEFAULT_SENTRY_CONF:
log.warn("No sentry URL specified in [%s]" % DEFAULT_SENTRY_CONF)
else:
sentry_client = Client(url)
log.info("Sentry client configured!")
log.info("Loading switch configurations from [%s]" % DEFAULT_SWITCH_CONF)
server = Lights433Server(host, port, serial, baud, timeout, switches,
resettable, locals().get('sentry_client', None))
server.run()
def main():
try:
app.run()
except clip.ClipExit:
pass
if __name__ == '__main__':
main()
Fixed required flag
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from raven import Client
from .server import Lights433Server
import clip
app = clip.App()
DEFAULT_SWITCH_CONF = "/etc/lights-433/switches.conf"
DEFAULT_SENTRY_CONF = "/etc/lights-433/sentry.conf"
log = logging.getLogger(__name__)
@app.main(description='An HTTP server daemon for controlling 433MHz '
'light switches')
@clip.arg('serial', required=True, help='The port to the serial device '
'that generates the signals')
@clip.opt('--host', default='127.0.0.1', type=str,
help='The interface to listen and permit connections on')
@clip.opt('--port', default=8080, type=int,
help='The port to run the server on')
@clip.opt('--baud', required=False, default=9600, type=int,
help='Baud rate of the serial interface')
@clip.opt('--timeout', default=1, type=int,
help='The timeout of the serial interface')
@clip.arg('--switches', default=DEFAULT_SWITCH_CONF, type=str,
help='Path to the config file for users and signals')
@clip.opt('--sentry', default=DEFAULT_SENTRY_CONF, type=str,
help='Path to the config file containing the Sentry capture URL')
@clip.flag('--resettable',
help='Enables device resetting over pin 3 (assumed RPi)')
def lights433(host, port, resettable, serial, baud, timeout, switches, sentry):
if sentry:
with open(sentry, 'r') as f:
url = f.read()
if not url and sentry == DEFAULT_SENTRY_CONF:
log.warn("No sentry URL specified in [%s]" % DEFAULT_SENTRY_CONF)
else:
sentry_client = Client(url)
log.info("Sentry client configured!")
log.info("Loading switch configurations from [%s]" % DEFAULT_SWITCH_CONF)
server = Lights433Server(host, port, serial, baud, timeout, switches,
resettable, locals().get('sentry_client', None))
server.run()
def main():
try:
app.run()
except clip.ClipExit:
pass
if __name__ == '__main__':
main()
|
Multiple images for win32 icon
Index: pyglet/window/win32/__init__.py
===================================================================
--- pyglet/window/win32/__init__.py (revision 721)
+++ pyglet/window/win32/__init__.py (working copy)
@@ -482,40 +482,63 @@
return Win32MouseCursor(cursor)
def set_icon(self, *images):
- # TODO multiple icon sizes
-
# XXX Undocumented AFAICT, but XP seems happy to resize an image
- # of any size, so no scaling to 16x16,32x32 necessary.
- image = images[0].image_data
- image.format = 'BGRA'
- image.pitch = len(image.format) * image.width
+ # of any size, so no scaling necessary.
- info = BITMAPINFO()
- info.bmiHeader.biSize = sizeof(info.bmiHeader)
- info.bmiHeader.biWidth = image.width
- info.bmiHeader.biHeight = image.height
- info.bmiHeader.biPlanes = 1
- info.bmiHeader.biBitCount = 32
- info.bmiHeader.biCompression = BI_RGB
+ def best_image(width, height):
+ # A heuristic for finding closest sized image to required size.
+ image = images[0]
+ for img in images:
+ if img.width == width and img.height == height:
+ # Exact match always used
+ return img
+ elif img.width >= width and \
+ img.width * img.height > image.width * image.height:
+ # At least wide enough, and largest area
+ image = img
+ return image
- dc = _gdi32.CreateDCA('DISPLAY', None, None, None)
- bitmap = _gdi32.CreateDIBitmap(
- dc, info.bmiHeader, CBM_INIT, image.data, info, DIB_RGB_COLORS)
+ def get_icon(image):
+ image.format = 'BGRA'
+ image.pitch = len(image.format) * image.width
- # XXX Undocumented AFAICT, XP seems happy to use an 8-bit alpha
- # as mask instead of a bitmask.
- image.format = 'AAAA'
- bitmap_mask = _gdi32.CreateDIBitmap(
- dc, info.bmiHeader, CBM_INIT, image.data, info, DIB_RGB_COLORS)
+ info = BITMAPINFO()
+ info.bmiHeader.biSize = sizeof(info.bmiHeader)
+ info.bmiHeader.biWidth = image.width
+ info.bmiHeader.biHeight = image.height
+ info.bmiHeader.biPlanes = 1
+ info.bmiHeader.biBitCount = 32
+ info.bmiHeader.biCompression = BI_RGB
- iconinfo = ICONINFO()
- iconinfo.fIcon = True
- iconinfo.hbmMask = bitmap_mask
- iconinfo.hbmColor = bitmap
- icon = _user32.CreateIconIndirect(byref(iconinfo))
+ dc = _gdi32.CreateDCA('DISPLAY', None, None, None)
+ bitmap = _gdi32.CreateDIBitmap(
+ dc, info.bmiHeader, CBM_INIT, image.data, info, DIB_RGB_COLORS)
+ # XXX Undocumented AFAICT, XP seems happy to use an 8-bit alpha
+ # as mask instead of a bitmask.
+ image.format = 'AAAA'
+ bitmap_mask = _gdi32.CreateDIBitmap(
+ dc, info.bmiHeader, CBM_INIT, image.data, info, DIB_RGB_COLORS)
+
+ iconinfo = ICONINFO()
+ iconinfo.fIcon = True
+ iconinfo.hbmMask = bitmap_mask
+ iconinfo.hbmColor = bitmap
+ icon = _user32.CreateIconIndirect(byref(iconinfo))
+ return icon
+
+ # Set large icon
+ image = best_image(_user32.GetSystemMetrics(SM_CXICON),
+ _user32.GetSystemMetrics(SM_CYICON))
+ icon = get_icon(image)
_user32.SetClassLongA(self._hwnd, GCL_HICON, icon)
+ # Set small icon
+ image = best_image(_user32.GetSystemMetrics(SM_CXSMICON),
+ _user32.GetSystemMetrics(SM_CYSMICON))
+ icon = get_icon(image)
+ _user32.SetClassLongA(self._hwnd, GCL_HICONSM, icon)
+
# Private util
def _client_to_window_size(self, width, height):
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40722
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 - 2012 -- Lars Heuer <heuer[at]semagia.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the project name nor the names of the contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""\
Tests classificationist parsing.
:author: Lars Heuer (heuer[at]semagia.com)
:organization: Semagia - <http://www.semagia.com/>
:license: BSD license
"""
from nose.tools import eq_
from cablemap.core.reader import parse_classificationists
_TEST_DATA = (
(u'10TOKYO397', u'Marc Wall', u'''FIELD
REF: STATE 015541
Classified By: Acting Deputy Chief of Mission Marc Wall for Reasons 1.4
(b) and (d)
¶1. (C) SUM'''),
(u'10GENEVA249', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 231 (SFO-GVA-VIII-088) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) '''),
(u'10GENEVA247', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 245 (SFO-GVA-VIII-086) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) ¶1. (U) This '''),
(u'10UNVIEVIENNA77', u'Glyn T. Davies', u'''Classified By: Ambassador Glyn T. Davies for reasons 1.4 b and d '''),
(u'10WARSAW117', u'F. Daniel Sainz', u'''Classified By: Political Counselor F. Daniel Sainz for Reasons 1.4 (b) and (d) '''),
(u'10STATE16019', u'Karin L. Look', u'''Classified By: Karin L. Look, Acting ASSISTANT SECRETARY, VCI. Reason: 1.4 (b) and (d).'''),
(u'10LILONGWE59', u'Bodde Peter', u'''CLASSIFIED BY: Bodde Peter, Ambassador; REASON: 1.4(B) '''),
(u'95ZAGREB4339', u'ROBERT P. FINN', u'''
1. (U) CLASSIFIED BY ROBERT P. FINN, DEPUTY CHIEF OF
MISSION. REASON: 1.5 (D)
'''),
(u'95DAMASCUS5748', u'CHRISTOPHER W.S. ROSS', u'''SUBJECT: HAFIZ AL-ASAD: LAST DEFENDER OF ARABS
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY:
CHRISTOPHER W.S. ROSS, AMBASSADOR. REASON: 1.5 (D) .
2. SUMMAR'''),
(u'95TELAVIV17504', (), u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY SECTION 1.5 (B)
AND (D). NIACT PRECEDENCE BECAUSE OF GOVERNMENT CRISIS IN
ISRAEL.
2. SU'''),
(u'95RIYADH5221', u'THEODORE KATTOUF', u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY DCM
THEODORE KATTOUF - 1.5 B,D.
2. (C)'''),
(u'96ADDISABABA1545', u'JEFFREY JACOBS', u'''
1. (U) CLASSIFIED BY POLOFF JEFFREY JACOBS, 1.5 (D).
2. (C)'''),
(u'96AMMAN2094', u'ROBERT BEECROFT', u'''
1. (U) CLASSIFIED BY CHARGE ROBERT BEECROFT; REASON 1.5 (D).
2. (C) '''),
(u'96STATE86789', u'MARY BETH LEONARD', u'''
1. CLASSIFIED BY AF/C - MARY BETH LEONARD, REASON 1.5
(D). '''),
(u'96NAIROBI6573', u'TIMOTHY CARNEY', u'''
1. CLASSIFIED BY AMBASSADOR TO SUDAN TIMOTHY CARNEY.
REASON 1.5(D).
'''),
(u'96RIYADH2406', u'THEODORE KATTOUF', u'''SUBJECT: CROWN PRINCE ABDULLAH THE DIPLOMAT
1. (U) CLASSIFIED BY CDA THEODORE KATTOUF, REASON 1.5.D.
2. '''),
(u'96RIYADH2696', u'THEODORE KATTOUF', u'''
1. (U) CLASSIFIED BY CHARGE D'AFFAIRES THEODORE
KATTOUF: 1.5 B, D.
'''),
(u'96ISLAMABAD5972', u'THOMAS W. SIMONS, JR.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
'''),
(u'96ISLAMABAD5972', u'Thomas W. Simons, Jr.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
''', True),
(u'96STATE183372', u'LEE 0. COLDREN', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96STATE183372', u'Lee O. Coldren', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
''', True),
(u'96ASHGABAT2612', u'TATIANA C. GFOELLER', u'''
1. (U) CLASSIFIED BY CHARGE TATIANA C. GFOELLER.
REASON: 1.5 D.
'''),
(u'96BOGOTA8773', u'S.K. ABEYTA', u'''
1. CLASSIFIED BY POL/ECONOFF. S.K. ABEYTA. REASON: 1.5(D)
'''),
(u'96STATE194868', u'E. GIBSON LANPHER, JR.', u'''
1. (U) CLASSIFIED BY E. GIBSON LANPHER, JR., ACTING
ASSISTANT SECRETARY OF STATE FOR SOUTH ASIAN AFFAIRS,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96JAKARTA7841', u'ED MCWILLIAMS', u'''
1. (U) CLASSIFIED BY POL COUNSELOR ED MCWILLIAMS;
REASON 1.5(D)
'''),
(u'96JERUSALEM3094', u'EDWARD G. ABINGTON, JR.', u'''
1. CLASSIFIED BY CONSUL GENERAL EDWARD G. ABINGTON, JR. REASON
1.5 (B) AND (D).
'''),
(u'96BOGOTA10967', u'S.K. ABEYTA', u'''
1. (U) CLASSIFIED BY POL/ECONOFF S.K. ABEYTA. REASON 1.5(D).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
''', True),
(u'05OTTAWA1975', u'Patricia Kim-Scott', u'''
Classified By: Pol/Mil Officer Patricia Kim-Scott. Reason E.O. 12958,
1.4 (b) and (d).
'''),
(u'05BOGOTA6208', u'William B. Wood', u'''
Classified By: Ambassador William B. Wood; reasons 1.4
(b) and (d)
'''),
(u'05TAIPEI2839', u'Douglas Paal', u'''
Classified By: AIT Director Douglas Paal, Reason(s): 1.4 (B/D).
'''),
(u'05DHAKA3073', u'D.C. McCullough', u'''
Classified By: A/DCM D.C. McCullough, reason para 1.4 (b)
'''),
(u'09NAIROBI1132', u'Jessica Davis Ba', u'''
Classified By: Pol/Econ Officer Jessica Davis Ba for reasons 1.4(b) and
(d)
'''),
(u'08ROME1541', u'Liz Dibble', u'''
Classified By: Classified by DCM Liz Dibble for reasons 1.4 (b) and
(d).
'''),
(u'06BAGHDAD2082', u'DANIEL SPECKHARD', ur'''
Classified By: CHARGE D\'AFFAIRES DANIEL SPECKHARD FOR REASONS 1.4 (A),
(B) AND (D)
'''),
(u'05ANKARA4653', u'Nancy McEldowney', u'''
Classified By: (U) CDA Nancy McEldowney; E.O. 12958, reasons 1.4 (b,d)
'''),
(u'05QUITO2057', u'LARRY L. MEMMOTT', u'''
Classified By: ECON LARRY L. MEMMOTT, REASONS 1.4 (B,D)
'''),
(u'06HONGKONG3559', u'LAURENT CHARBONNET', u'''
CLASSIFIED BY: ACTING DEPUTY PRINCIPAL OFFICER LAURENT CHARBONNET. REA
SONS: 1.4 (B,D)
'''),
(u'09BAGHDAD791', u'Patricia Butenis', u'''
Classified By: Charge d\' Affairs Patricia Butenis for reasons 1.4 (b) a
nd (d)
'''),
(u'06OSLO19', u'Christopher W. Webster', u'''
Classified By: Charge d\'Affaires a.i. Christopher W. Webster,
reason 1.4 (b) and (d)
'''),
(u'08BEIJING3386', u'Aubrey Carlson', u'''
Classified By: Political Section Minister Counselor Aubrey Carlson. Re
asons 1.4 (b/d).
'''),
(u'09MOSCOW2393', u'Susan M. Elliott', u'''
Classified By: Political Minister Counselor Susan M. Elliott for reason
s: 1.4 (b), (d).
'''),
(u'10BRUSSELS66', u'Christopher R. Davis', u'''
Classified By: Political Minister-Counselor Christopher R. Davis for re
ason 1.4 (b/d)
'''),
(u'06BEIJING22125', u'ROBERT LUKE', u'''
Classified By: (C) CLASSIFIED BY MINISTER COUNSELOR FOR ECONOMIC AFFAIR
S ROBERT LUKE; REASON 1.4 (B) AND (D).
'''),
(u'07CAIRO622', u'William R. Stewart', u'''
Classified by: Minister Counselor for Economic and
Political Affairs William R. Stewart for reasons 1.4(b) and
(d).
'''),
(u'07BAGHDAD1188', u'Daniel Speckhard', u'''
Classified By: Charge Affaires Daniel Speckhard. Reasons: 1.4 (b) and
(d).
'''),
(u'08PARIS1131', u'STUART DWYER', u'''
Classified By: ECONCOUNS STUART DWYER FOR REASONS 1.4 B AND D
'''),
(u'08ATHENS985', u'Jeff Hovenier', u'''
Classified By: A/Political Counselor Jeff Hovenier for
1.4 (b) and (d)
'''),
(u'09BEIJING2690', u'William Weinstein', u'''
Classified By: This message classified by Econ Minister Counselor
William Weinstein for reasons 1.4 (b), (d) and (e).
'''),
(u'06VILNIUS945', u'Rebecca Dunham', u'''
Classified By: Political and Economic Section Chief Rebecca Dunham for
reasons 1.4 (b) and (d)
'''),
(u'07BAGHDAD2781', u'Howard Keegan', u'''
Classified By: Kirkuk PRT Team Leader Howard Keegan for reason 1.4 (b)
and(d).
'''),
(u'09HARARE864', u'Donald Petterson', u'''
Classified By: Charge d\'affaires, a.i. Donald Petterson for reason 1.4
(b).
'''),
(u'04MANAMA525', u'Robert S. Ford', u'''
Classified By: Charge de Affaires Robert S. Ford for reasons
1.4 (b) and (d).
'''),
(u'08STATE56778', u'Patricia A. McNerney', u'''
Classified By: ISN Acting Assistant Secretary
Patricia A. McNerney, Reasons 1.4 b, c, and d
'''),
(u'07BRUSSELS1462', u'Larry Wohlers', u'''
Classified By: USEU Political Minister Counselor Larry Wohlers
for reasons 1.4 (b) and (d).
'''),
(u'09KABUL2261', u'Hoyt Yee', u'''
Classified By: Interagency Provincial Affairs Deputy Coordinator Hoyt Y
ee for reasons 1.4 (b) and (d)
'''),
(u'09KABUL1233', u'Patricia A McNerney', u'''
Classified By: PRT and Sub-National Governance Acting Director Patricia
A McNerney for reasons 1.4 (b) and (d)
'''),
(u'09BRUSSELS1288', u'CHRISTOPHER DAVIS', u'''
Classified By: CLASSIFIED BY USEU MCOUNSELOR CHRISTOPHER DAVIS, FOR REA
SONS 1.4 (B) AND (D)
'''),
(u'06TAIPEI3165', u'Stephen M. Young', u'''
Classified By: Classified by AIT DIR Stephen M. Young.
Reasons: 1.4 b, d.
'''),
(u'07BRUSSELS1208', u'Courtney Nemroff', u'''
Classified By: Institutional Affairs Unit Chief Courtney Nemroff for re
asons 1.4 (b) & (d)
'''),
(u'05CAIRO8602', u'Michael Corbin', u'''
Classified by ECPO Minister-Counselour Michael Corbin for
reasons 1.4 (b) and (d).
'''),
(u'09MADRID1210', u'Arnold A. Chacon', u'''
Classified By: Charge d'Affaires, a.i., Arnold A. Chacon
1.(C) Summary: In his meetings with Spanish officials,
Special Envoy for Eurasian Energy'''),
(u'05SINGAPORE887', u'Laurent Charbonnet', u'''
Classified By: E/P Counselor Laurent Charbonnet, Reasons 1.4(b)(d)
'''),
(u'09SINGAPORE677', u'Dan Jassem', u'''
Classified By: Acting E/P Counselor Dan Jassem for reasons 1.4 (b) and
(d)
'''),
(u'08BELGRADE1189', u'Thatcher Scharpf', u'''
Classified By: Acting Deputy Chief of Mission Thatcher Scharpf for reas
ons 1.4(b/d).
'''),
(u'09BAGHDAD3319', u'Rachna Korhonen', u'''
Classified By: PRT Kirkuk Governance Section Head Rachna Korhonen for r
easons 1.4 (b) and (d).
'''),
(u'04ANKARA5897', u'Thomas Goldberger', u'''
Classified By: (U) Classified by Economic Counselor Thomas Goldberger f
or reasons 1.4 b,d.
'''),
(u'00HARARE3759', u'TOM MCDONALD', u'''
CLASSIFIED BY AMBASSADOR TOM MCDONALD.
CONFIDENTIAL
PAGE 02 HARARE 03759 01 OF 03 111533Z
REASONS: 1.5 (B) AND (D).
1. (C) SUMMARY: ALTHOUGH WIDESPREAD FEARS OF A
SPIKE'''),
(u'07STATE156455', u'Glyn T. Davies', u'''
Classified By: Glyn T. Davies
SUMMARY
-------
'''),
(u'03GUATEMALA1727', u'Erik Hall', u'''
Classified By: Labor Attache Erik Hall. Reason 1.5 (d).
'''),
(u'05VILNIUS503', u'LARRY BEISEL', u'''
Classified By: DEFENSE ATTACHE LTC LARRY BEISEL FOR REASONS 1.4 (B) AND
(D).
'''),
(u'08USUNNEWYORK729', u'Carolyn L. Willson', u'''
Classified By: USUN Legal Adviser Carolyn L. Willson, for reasons
1.4(b) and (d)
'''),
(u'04BRUSSELS4688', u'Jeremy Brenner', u'''
Classified By: USEU polmil officer Jeremy Brenner for reasons 1.4 (b) a
nd (d)
'''),
(u'08GUATEMALA1416', u'Drew G. Blakeney', u'''
Classified By: Pol/Econ Couns Drew G. Blakeney for reasons 1.4 (b&d).
'''),
(u'08STATE77798', u'Brian H. Hook', u'''
Classified By: IO Acting A/S Brian H. Hook, E.O. 12958,
Reasons: 1.4(b) and (d)
'''),
(u'05ANKARA1071', u'Margaret H. Nardi', u'''
Classified By: Acting Counselor for Political-Military Affiars Margaret
H. Nardi for reasons 1.4 (b) and (d).
'''),
(u'08MOSCOW3655', u'David Kostelancik', u'''
Classified By: Deputy Political M/C David Kostelancik. Reasons 1.4 (b)
and (d).
'''),
(u'09STATE75025', u'Richard C. Holbrooke', u'''
Classified By: Special Representative for Afghanistan and Pakistan
Richard C. Holbrooke
1. (U) This is an action request; see paragraph 4.
'''),
(u'10KABUL688', u'Joseph Mussomeli', u'''
Classified By: Assistant Chief of Mission Joseph Mussomeli for Reasons
1.4 (b) and (d)
'''),
(u'98USUNNEWYORK1638', u'HOWARD STOFFER', u'''
CLASSIFIED BY DEPUTY POLITICAL COUNSEL0R HOWARD STOFFER
PER 1.5 (B) AND (D). ACTION REQUEST IN PARA 10 BELOW.
'''),
(u'02ROME3119', u'PIERRE-RICHARD PROSPER', u'''
CLASSIFIED BY: AMBASSADOR-AT-LARGE PIERRE-RICHARD PROSPER
FOR REASONS 1.5 (B) AND (D)
'''),
(u'02ANKARA8447', u'Greta C. Holtz', u'''
Classified by Consul Greta C. Holtz for reasons 1.5 (b) & (d).
'''),
(u'09USUNNEWYORK282', u'SUSAN RICE', u'''
Classified By: U.S. PERMANENT REPRESENATIVE AMBASSADOR SUSAN RICE
FOR REASONS 1.4 B/D
'''),
(u'09DHAKA339', u'Geeta Pasi', u'''
Classified By: Charge d'Affaires, a.i. Geeta Pasi. Reasons 1.4 (b) and
(d)
'''),
(u'06USUNNEWYORK2273', u'Alejandro D. Wolff', u'''
Classified By: Acting Permanent Representative Alejandro D. Wolff
per reasons 1.4 (b) and (d)
'''),
(u'08ISLAMABAD1494', u'Anne W. Patterson', u'''
Classified By: Ambassador Anne W. Patterson for reaons 1.4 (b) and (d).
1. (C) Summary: During'''),
(u'08BERLIN1150', u'Robert Pollard', u'''
Classified By: Classified by Economic Minister-Counsellor
Robert Pollard for reasons 1.4 (b) and (d)
'''),
(u'08STATE104902', u'DAVID WELCH', u'''
Classified By: 1. CLASSIFIED BY NEA ASSISTANT SECRETARY DAVID WELCH
REASONS: 1.4 (B) AND (D)
'''),
(u'07VIENTIANE454', u'Mary Grace McGeehan', u'''
Classified By: Charge de'Affairs ai. Mary Grace McGeehan for reasons 1.
4 (b) and (d)
'''),
(u'07ROME1948', u'William Meara', u'''
Classified By: Acting Ecmin William Meara for reasons 1.4 (b) and (d)
'''),
(u'07USUNNEWYORK545', u'Jackie Sanders', u'''
Classified By: Amb. Jackie Sanders. E.O 12958. Reasons 1.4 (B&D).
'''),
(u'06USOSCE113', u'Bruce Connuck', u'''
Classified By: Classified by Political Counselor Bruce Connuck for Reas
(b) and (d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
'''),
# (u'09DOHA404', u'Joseph LeBaron', u'''
#Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
# ''', True),
(u'09RANGOON575', u'Thomas Vajda', u'''
Classified By: Charge d'Afairs (AI) Thomas Vajda for Reasons 1.4 (b) &
(d
'''),
(u'03ROME3107', u'TOM COUNTRYMAN', u'''
Classified By: POL MIN COUN TOM COUNTRYMAN, REASON 1.5(B)&(D).
'''),
(u'06USUNNEWYORK732', u'Molly Phee', u'''
Classified By: Deputy Political Counselor Molly Phee,
for Reasons 1.4 (B and D)
'''),
(u'06BAGHDAD1552', u'David M. Satterfield', u'''
Classified By: Charge d'Affaires David M. Satterfield for reasons 1.4 (
b) and (d)
'''),
(u'06ABUJA232', u'Erin Y. Tariot', u'''
Classified By: USDEL Member Erin Y. Tariot, reasons 1.4 (b,d)
'''),
(u'09ASTANA184', u'RICAHRD E. HOAGLAND', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
'''),
# (u'09ASTANA184', u'Richard E. Hoagland', u'''
#Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
# ''', True),
(u'09CANBERRA428', u'John W. Crowley', u'''
Classified By: Deputy Political Counselor: John W. Crowley, for reasons
1.4 (b) and (d)
'''),
(u'08TASHKENT706', u'Molly Stephenson', u'''
Classified By: Classfied By: IO Molly Stephenson for reasons 1.4 (b) a
nd (d).
'''),
(u'08CONAKRY348', u'T. SCOTT BROWN', u'''
Classified By: ECONOFF T. SCOTT BROWN FOR REASONS 1.4 (B) and (D)
'''),
(u'07STATE125576', u'Margaret McKelvey', u'''
Classified By: PRM/AFR Dir. Margaret McKelvey-reasons 1.4(b/d)
'''),
(u'09BUDAPEST372', u'Steve Weston', u'''
Classified By: Acting Pol/Econ Counselor:Steve Weston,
reasons 1.4 (b and d)
'''),
(u'04TAIPEI3162', u'David J. Keegan', u''''
Classified By: AIT Deputy Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3521', u'David J. Keegan', u'''
Classified By: AIT Acting Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3919', u'David J. Keegan', u'''
Classified By: AIT Director David J. Keegan, Reason 1.4 (B/D)
'''),
(u'08JAKARTA1142', u'Stanley A. Harsha', u'''
Classified By: Acting Pol/C Stanley A. Harsha for reasons 1.4 (b+d).
'''),
(u'06ISLAMABAD16739', u'MARY TOWNSWICK', u'''
Classified By: DOS CLASSIFICATION GUIDE BY MARY TOWNSWICK
1. (C) Summary. With limited government support, Islamic
banking has gained momentum in Pakistan in the past three
years. The State Bank of Pakistan (SBP) reports that the
capital base of the Islamic banking system has more than
doubled since 2003 as the number of Islamic banks operating
in Pakistan rose from one to four. A media analysis of
Islamic banking in Pakistan cites an increase in the number
of conventional banks'''),
(u'05DJIBOUTI802', u'JEFFREY PURSELL', u'''
(U) CLASSIFIED BY TDY RSO JEFFREY PURSELL FOR REASON 1.5 C.
'''),
(u'09STATE82567', u'Eliot Kang', u'''
Classified By: Acting DAS for ISN Eliot Kang. Reasons 1.4 (b) and (d)
'''),
(u'04ANKARA5764', u'Charles O. Blah a', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
# (u'04ANKARA5764', u'Charles O. Blaha', u'''
#Classified By: Classified by Deputy Political Counselor Charles O. Blah
#a, E.O. 12958, reasons 1.4 (b) and (d).
# ''', True),
(u'04ANKARA5764', u'Charles O. Blah a', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
(u'10VIENNA195', u'J. Dean Yap', u'''
Classified by: DCM J. Dean Yap (acting) for reasons 1.4 (b)
and (d).
'''),
(u'03HARARE175', u'JOHN S. DICARLO', u'''
Classified By: RSO - JOHN S. DICARLO. REASON 1.5(D)
'''),
(u'08LONDON2968', u'Greg Berry', u'''
Classified By: PolMinCons Greg Berry, reasons 1.4 (b/d).
'''),
(u'08HAVANA956', u'Jonathan Farrar', u'''
Classified By: COM Jonathan Farrar for reasons 1.5 (b) and (d)
'''),
(u'09BAGHDAD253', u'Robert Ford', u'''
Classified By: Acting Deputy Robert Ford. Reasons 1.4 (b) and (d)
'''),
(u'09TIRANA81', u'JOHN L. WITHERS II', u'''
Classified By: AMBASSADOR JOHN L. WITHERS II FR REASONS 1.4 (b) AND (d
).
'''),
(u'05HARARE383', u'Eric T. Schultz', u'''
Classified By: Charge d'Affaires a.i. Eric T. Schultz under Section 1.4
b/d
'''),
(u'07LISBON2591', u'Jenifer Neidhart', u'''
Classified By: Pol/Econ Off Jenifer Neidhart for reasons 1.4 (b) and (d
)
'''),
(u'07STATE171234', u'Lawrence E. Butler', u'''
Classified By: NEA Lawrence E. Butler for reasons EO 12958
1.4(b),(d), and (e).
'''),
(u'04AMMAN8544', u'David Hale', u'''
Classified By: Charge d'Affaries David Hale for Reasons 1.4 (b), (d)
'''),
(u'07NEWDELHI5334', u'Ted Osius', u'''
Classified By: Acting DCM/Ted Osius for reasons 1.4 (b and d)
'''),
(u'04JAKARTA5072', u'ANTHONY C. WOODS', u'''
Classified By: EST&H OFFICER ANTHONY C. WOODS FOR REASON 1.5 (b, d)
'''),
(u'03AMMAN2822', u'Edward W. Gnehm', u'''
Classified By: Ambassador Edward W. Gnehm. Resons 1.5 (B) and (D)
'''),
(u'08CANBERRA1335', u'Daniel A. Clune', u'''
Classified By: Deputy Chief of Mission: Daniel A. Clune: Reason: 1.4 (c
) and (d)
'''),
(u'09HAVANA665', u'Charles Barclay', u'''
Classified By: CDA: Charles Barclay for reQ#8$UQ8ML#C may choke oQhQGTzovisional\" controls, such as
price caps and limits on the amount any one person could buy.
3. (SBU) Furthering speculation that the private markets
were under the gun, official reports have resurfaced in
recent months accusing private markets of artificially
maintaining higher'''),
(u'08STATE8993', u'Gregory B. Starr', u'''
1. (U) Classified by Acting Assistant Secretary for Diplomatic
Security Gregory B. Starr for E.O. 12958 reasons 1.4 (c) and
(d).
'''),
(u'09ISTANBUL137', u'Sandra Oudkirk', u'''
Classified By: ConGen Istanbul DPO Sandra Oudkirk; Reason 1.5 (d)
'''),
(u'08BANGKOK1778', u'James F. Entwistle', u'''
Classified By: Charge, d,Affaires a. i. James F. Entwistle, reason 1.4
(b) and (d).
'''),
(u'08MANAMA301', u'Christopher Henzel', u'''
Classified By: Charge d,Affaires a.i. Christopher Henzel, reasons 1.4(b
) and (d).
'''),
(u'06COLOMBO123', u'Robert O. Blake, Jr.', u'''
Classified By: Abassador Robert O. Blake, Jr. for reasons
1.4 (b and (d).
'''),
(u'08YEREVAN907', u'Marie Yovanovitch', u'''
Classified By: Amabassador Marie Yovanovitch. Reason 1.4 (B/D)
'''),
(u'09QUITO329', u'Heather M. Hodges', u'''
Classified By: AMB Heather M. Hodges for reason 1.4 (D)
'''),
#TODO: Should be: [Karl Wycoff, Shari Villarosa]
(u'09STATE38028', u'KARL WYCOFF', u'''
CLASSIFIED BY AF KARL WYCOFF, ACTING AND S/CT DAS SHARI
VILLAROSA ; E.O. 12958 REASON: 1.4 (B) AND (D)
'''),
(u'04ABUJA2060', u'BRUCE EHRNMAN', u'''
Classified By: AF SPECIAL ADVISOR BRUCE EHRNMAN FOR REASONS 1.5 (B) AND
(D)
'''),
(u'06ISLAMABAD3684', u'RCROCKER', u'''
Classified By: AMB:RCROCKER, Reasons 1.4 (b) and (c)
'''),
(u'06MANAMA184', u'William T.Monroe', u'''
Classified By: Classified by Ambassadior William T.Monroe. Reasons: 1.
4 (b)(d)
'''),
(u'07SANSALVADOR263', u'Charles Glazer', u'''
Classified By: Ambasasdor Charles Glazer, Reasons
1.4 (b) and (d)
'''),
(u'05BRUSSELS1549', u'Michael Ranneberger', u'''
Classified By: AF PDAS Michael Ranneberger. Reasons 1.5 (b) and (d).
'''),
(u'09STATE14163', u'Mark Boulware', u'''
Classified By: AF Acting DAS Mark Boulware, Reasons 1.4 (b) and (d).
'''),
(u'06AITTAIPEI1142', u'Michael R. Wheeler', u'''
Classified By: IPO Michael R. Wheeler for reason 1.4(G)(E)
'''),
(u'08TAIPEI1038', u'Stephen M. Young', u'''
Classified By: AIT Chairman Stephen M. Young,
Reasons: 1.4 (b/d)
'''),
(u'09STATE96519', u'Ellen O. Tauscher', u'''
Classified By: T U/S Ellen O. Tauscher for Reasons 1.4 a,b,and d.
'''),
(u'08NAIROBI232', u'JOHN M. YATES', u'''
Classified By: SPECIAL ENVOY JOHN M. YATES
1. (C) '''),
(u'07COLOMBO769', u'Robert O. Blake, Jr.', u'''
Classified By: Ambassodor Robert O. Blake, Jr. for reasons 1.4 (b, d).
'''),
(u'04DJIBOUTI1541', u'MARGUERITA D. RAGSDALE', u'''
Classified By: AMBASSSADOR MARGUERITA D. RAGSDALE.
REASONS 1.4 (B) AND (D).
'''),
)
def test_parse_classificationist():
def check(cable_id, expected, content, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
eq_(expected, tuple(parse_classificationists(content, normalize)))
for testcase in _TEST_DATA:
if len(testcase) == 3:
cable_id, expected, content = testcase
normalize = False
else:
cable_id, expected, content, normalize = testcase
yield check, cable_id, expected, content, normalize
if __name__ == '__main__':
import nose
nose.core.runmodule()
Updated test case
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 - 2012 -- Lars Heuer <heuer[at]semagia.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the project name nor the names of the contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""\
Tests classificationist parsing.
:author: Lars Heuer (heuer[at]semagia.com)
:organization: Semagia - <http://www.semagia.com/>
:license: BSD license
"""
from nose.tools import eq_
from cablemap.core.reader import parse_classificationists
_TEST_DATA = (
(u'10TOKYO397', u'Marc Wall', u'''FIELD
REF: STATE 015541
Classified By: Acting Deputy Chief of Mission Marc Wall for Reasons 1.4
(b) and (d)
¶1. (C) SUM'''),
(u'10GENEVA249', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 231 (SFO-GVA-VIII-088) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) '''),
(u'10GENEVA247', u'Rose E. Gottemoeller', u'''REF: 10 GENEVA 245 (SFO-GVA-VIII-086) CLASSIFIED BY: Rose E. Gottemoeller, Assistant Secretary, Department of State, VCI; REASON: 1.4(B), (D) ¶1. (U) This '''),
(u'10UNVIEVIENNA77', u'Glyn T. Davies', u'''Classified By: Ambassador Glyn T. Davies for reasons 1.4 b and d '''),
(u'10WARSAW117', u'F. Daniel Sainz', u'''Classified By: Political Counselor F. Daniel Sainz for Reasons 1.4 (b) and (d) '''),
(u'10STATE16019', u'Karin L. Look', u'''Classified By: Karin L. Look, Acting ASSISTANT SECRETARY, VCI. Reason: 1.4 (b) and (d).'''),
(u'10LILONGWE59', u'Bodde Peter', u'''CLASSIFIED BY: Bodde Peter, Ambassador; REASON: 1.4(B) '''),
(u'95ZAGREB4339', u'ROBERT P. FINN', u'''
1. (U) CLASSIFIED BY ROBERT P. FINN, DEPUTY CHIEF OF
MISSION. REASON: 1.5 (D)
'''),
(u'95DAMASCUS5748', u'CHRISTOPHER W.S. ROSS', u'''SUBJECT: HAFIZ AL-ASAD: LAST DEFENDER OF ARABS
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY:
CHRISTOPHER W.S. ROSS, AMBASSADOR. REASON: 1.5 (D) .
2. SUMMAR'''),
(u'95TELAVIV17504', (), u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY SECTION 1.5 (B)
AND (D). NIACT PRECEDENCE BECAUSE OF GOVERNMENT CRISIS IN
ISRAEL.
2. SU'''),
(u'95RIYADH5221', u'THEODORE KATTOUF', u'''
1. CONFIDENTIAL - ENTIRE TEXT. CLASSIFIED BY DCM
THEODORE KATTOUF - 1.5 B,D.
2. (C)'''),
(u'96ADDISABABA1545', u'JEFFREY JACOBS', u'''
1. (U) CLASSIFIED BY POLOFF JEFFREY JACOBS, 1.5 (D).
2. (C)'''),
(u'96AMMAN2094', u'ROBERT BEECROFT', u'''
1. (U) CLASSIFIED BY CHARGE ROBERT BEECROFT; REASON 1.5 (D).
2. (C) '''),
(u'96STATE86789', u'MARY BETH LEONARD', u'''
1. CLASSIFIED BY AF/C - MARY BETH LEONARD, REASON 1.5
(D). '''),
(u'96NAIROBI6573', u'TIMOTHY CARNEY', u'''
1. CLASSIFIED BY AMBASSADOR TO SUDAN TIMOTHY CARNEY.
REASON 1.5(D).
'''),
(u'96RIYADH2406', u'THEODORE KATTOUF', u'''SUBJECT: CROWN PRINCE ABDULLAH THE DIPLOMAT
1. (U) CLASSIFIED BY CDA THEODORE KATTOUF, REASON 1.5.D.
2. '''),
(u'96RIYADH2696', u'THEODORE KATTOUF', u'''
1. (U) CLASSIFIED BY CHARGE D'AFFAIRES THEODORE
KATTOUF: 1.5 B, D.
'''),
(u'96ISLAMABAD5972', u'THOMAS W. SIMONS, JR.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
'''),
(u'96ISLAMABAD5972', u'Thomas W. Simons, Jr.', u'''
1. (U) CLASSIFIED BY THOMAS W. SIMONS, JR., AMBASSADOR.
REASON: 1.5 (B), (C) AND (D).
''', True),
(u'96STATE183372', u'LEE 0. COLDREN', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96STATE183372', u'Lee O. Coldren', u'''
1. (U) CLASSIFIED BY LEE 0. COLDREN, DIRECTOR, SA/PAB,
DEPARTMENT OF STATE. REASON: 1.5(D).
''', True),
(u'96ASHGABAT2612', u'TATIANA C. GFOELLER', u'''
1. (U) CLASSIFIED BY CHARGE TATIANA C. GFOELLER.
REASON: 1.5 D.
'''),
(u'96BOGOTA8773', u'S.K. ABEYTA', u'''
1. CLASSIFIED BY POL/ECONOFF. S.K. ABEYTA. REASON: 1.5(D)
'''),
(u'96STATE194868', u'E. GIBSON LANPHER, JR.', u'''
1. (U) CLASSIFIED BY E. GIBSON LANPHER, JR., ACTING
ASSISTANT SECRETARY OF STATE FOR SOUTH ASIAN AFFAIRS,
DEPARTMENT OF STATE. REASON: 1.5(D).
'''),
(u'96JAKARTA7841', u'ED MCWILLIAMS', u'''
1. (U) CLASSIFIED BY POL COUNSELOR ED MCWILLIAMS;
REASON 1.5(D)
'''),
(u'96JERUSALEM3094', u'EDWARD G. ABINGTON, JR.', u'''
1. CLASSIFIED BY CONSUL GENERAL EDWARD G. ABINGTON, JR. REASON
1.5 (B) AND (D).
'''),
(u'96BOGOTA10967', u'S.K. ABEYTA', u'''
1. (U) CLASSIFIED BY POL/ECONOFF S.K. ABEYTA. REASON 1.5(D).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
'''),
(u'04MUSCAT2112', u'Richard L. Baltimore, III', u'''
Classified By: Ambassador Richard L. Baltimore, III.
Reasons: 1.4 (b) and (d).
''', True),
(u'05OTTAWA1975', u'Patricia Kim-Scott', u'''
Classified By: Pol/Mil Officer Patricia Kim-Scott. Reason E.O. 12958,
1.4 (b) and (d).
'''),
(u'05BOGOTA6208', u'William B. Wood', u'''
Classified By: Ambassador William B. Wood; reasons 1.4
(b) and (d)
'''),
(u'05TAIPEI2839', u'Douglas Paal', u'''
Classified By: AIT Director Douglas Paal, Reason(s): 1.4 (B/D).
'''),
(u'05DHAKA3073', u'D.C. McCullough', u'''
Classified By: A/DCM D.C. McCullough, reason para 1.4 (b)
'''),
(u'09NAIROBI1132', u'Jessica Davis Ba', u'''
Classified By: Pol/Econ Officer Jessica Davis Ba for reasons 1.4(b) and
(d)
'''),
(u'08ROME1541', u'Liz Dibble', u'''
Classified By: Classified by DCM Liz Dibble for reasons 1.4 (b) and
(d).
'''),
(u'06BAGHDAD2082', u'DANIEL SPECKHARD', ur'''
Classified By: CHARGE D\'AFFAIRES DANIEL SPECKHARD FOR REASONS 1.4 (A),
(B) AND (D)
'''),
(u'05ANKARA4653', u'Nancy McEldowney', u'''
Classified By: (U) CDA Nancy McEldowney; E.O. 12958, reasons 1.4 (b,d)
'''),
(u'05QUITO2057', u'LARRY L. MEMMOTT', u'''
Classified By: ECON LARRY L. MEMMOTT, REASONS 1.4 (B,D)
'''),
(u'06HONGKONG3559', u'LAURENT CHARBONNET', u'''
CLASSIFIED BY: ACTING DEPUTY PRINCIPAL OFFICER LAURENT CHARBONNET. REA
SONS: 1.4 (B,D)
'''),
(u'09BAGHDAD791', u'Patricia Butenis', u'''
Classified By: Charge d\' Affairs Patricia Butenis for reasons 1.4 (b) a
nd (d)
'''),
(u'06OSLO19', u'Christopher W. Webster', u'''
Classified By: Charge d\'Affaires a.i. Christopher W. Webster,
reason 1.4 (b) and (d)
'''),
(u'08BEIJING3386', u'Aubrey Carlson', u'''
Classified By: Political Section Minister Counselor Aubrey Carlson. Re
asons 1.4 (b/d).
'''),
(u'09MOSCOW2393', u'Susan M. Elliott', u'''
Classified By: Political Minister Counselor Susan M. Elliott for reason
s: 1.4 (b), (d).
'''),
(u'10BRUSSELS66', u'Christopher R. Davis', u'''
Classified By: Political Minister-Counselor Christopher R. Davis for re
ason 1.4 (b/d)
'''),
(u'06BEIJING22125', u'ROBERT LUKE', u'''
Classified By: (C) CLASSIFIED BY MINISTER COUNSELOR FOR ECONOMIC AFFAIR
S ROBERT LUKE; REASON 1.4 (B) AND (D).
'''),
(u'07CAIRO622', u'William R. Stewart', u'''
Classified by: Minister Counselor for Economic and
Political Affairs William R. Stewart for reasons 1.4(b) and
(d).
'''),
(u'07BAGHDAD1188', u'Daniel Speckhard', u'''
Classified By: Charge Affaires Daniel Speckhard. Reasons: 1.4 (b) and
(d).
'''),
(u'08PARIS1131', u'STUART DWYER', u'''
Classified By: ECONCOUNS STUART DWYER FOR REASONS 1.4 B AND D
'''),
(u'08ATHENS985', u'Jeff Hovenier', u'''
Classified By: A/Political Counselor Jeff Hovenier for
1.4 (b) and (d)
'''),
(u'09BEIJING2690', u'William Weinstein', u'''
Classified By: This message classified by Econ Minister Counselor
William Weinstein for reasons 1.4 (b), (d) and (e).
'''),
(u'06VILNIUS945', u'Rebecca Dunham', u'''
Classified By: Political and Economic Section Chief Rebecca Dunham for
reasons 1.4 (b) and (d)
'''),
(u'07BAGHDAD2781', u'Howard Keegan', u'''
Classified By: Kirkuk PRT Team Leader Howard Keegan for reason 1.4 (b)
and(d).
'''),
(u'09HARARE864', u'Donald Petterson', u'''
Classified By: Charge d\'affaires, a.i. Donald Petterson for reason 1.4
(b).
'''),
(u'04MANAMA525', u'Robert S. Ford', u'''
Classified By: Charge de Affaires Robert S. Ford for reasons
1.4 (b) and (d).
'''),
(u'08STATE56778', u'Patricia A. McNerney', u'''
Classified By: ISN Acting Assistant Secretary
Patricia A. McNerney, Reasons 1.4 b, c, and d
'''),
(u'07BRUSSELS1462', u'Larry Wohlers', u'''
Classified By: USEU Political Minister Counselor Larry Wohlers
for reasons 1.4 (b) and (d).
'''),
(u'09KABUL2261', u'Hoyt Yee', u'''
Classified By: Interagency Provincial Affairs Deputy Coordinator Hoyt Y
ee for reasons 1.4 (b) and (d)
'''),
(u'09KABUL1233', u'Patricia A McNerney', u'''
Classified By: PRT and Sub-National Governance Acting Director Patricia
A McNerney for reasons 1.4 (b) and (d)
'''),
(u'09BRUSSELS1288', u'CHRISTOPHER DAVIS', u'''
Classified By: CLASSIFIED BY USEU MCOUNSELOR CHRISTOPHER DAVIS, FOR REA
SONS 1.4 (B) AND (D)
'''),
(u'06TAIPEI3165', u'Stephen M. Young', u'''
Classified By: Classified by AIT DIR Stephen M. Young.
Reasons: 1.4 b, d.
'''),
(u'07BRUSSELS1208', u'Courtney Nemroff', u'''
Classified By: Institutional Affairs Unit Chief Courtney Nemroff for re
asons 1.4 (b) & (d)
'''),
(u'05CAIRO8602', u'Michael Corbin', u'''
Classified by ECPO Minister-Counselour Michael Corbin for
reasons 1.4 (b) and (d).
'''),
(u'09MADRID1210', u'Arnold A. Chacon', u'''
Classified By: Charge d'Affaires, a.i., Arnold A. Chacon
1.(C) Summary: In his meetings with Spanish officials,
Special Envoy for Eurasian Energy'''),
(u'05SINGAPORE887', u'Laurent Charbonnet', u'''
Classified By: E/P Counselor Laurent Charbonnet, Reasons 1.4(b)(d)
'''),
(u'09SINGAPORE677', u'Dan Jassem', u'''
Classified By: Acting E/P Counselor Dan Jassem for reasons 1.4 (b) and
(d)
'''),
(u'08BELGRADE1189', u'Thatcher Scharpf', u'''
Classified By: Acting Deputy Chief of Mission Thatcher Scharpf for reas
ons 1.4(b/d).
'''),
(u'09BAGHDAD3319', u'Rachna Korhonen', u'''
Classified By: PRT Kirkuk Governance Section Head Rachna Korhonen for r
easons 1.4 (b) and (d).
'''),
(u'04ANKARA5897', u'Thomas Goldberger', u'''
Classified By: (U) Classified by Economic Counselor Thomas Goldberger f
or reasons 1.4 b,d.
'''),
(u'00HARARE3759', u'TOM MCDONALD', u'''
CLASSIFIED BY AMBASSADOR TOM MCDONALD.
CONFIDENTIAL
PAGE 02 HARARE 03759 01 OF 03 111533Z
REASONS: 1.5 (B) AND (D).
1. (C) SUMMARY: ALTHOUGH WIDESPREAD FEARS OF A
SPIKE'''),
(u'07STATE156455', u'Glyn T. Davies', u'''
Classified By: Glyn T. Davies
SUMMARY
-------
'''),
(u'03GUATEMALA1727', u'Erik Hall', u'''
Classified By: Labor Attache Erik Hall. Reason 1.5 (d).
'''),
(u'05VILNIUS503', u'LARRY BEISEL', u'''
Classified By: DEFENSE ATTACHE LTC LARRY BEISEL FOR REASONS 1.4 (B) AND
(D).
'''),
(u'08USUNNEWYORK729', u'Carolyn L. Willson', u'''
Classified By: USUN Legal Adviser Carolyn L. Willson, for reasons
1.4(b) and (d)
'''),
(u'04BRUSSELS4688', u'Jeremy Brenner', u'''
Classified By: USEU polmil officer Jeremy Brenner for reasons 1.4 (b) a
nd (d)
'''),
(u'08GUATEMALA1416', u'Drew G. Blakeney', u'''
Classified By: Pol/Econ Couns Drew G. Blakeney for reasons 1.4 (b&d).
'''),
(u'08STATE77798', u'Brian H. Hook', u'''
Classified By: IO Acting A/S Brian H. Hook, E.O. 12958,
Reasons: 1.4(b) and (d)
'''),
(u'05ANKARA1071', u'Margaret H. Nardi', u'''
Classified By: Acting Counselor for Political-Military Affiars Margaret
H. Nardi for reasons 1.4 (b) and (d).
'''),
(u'08MOSCOW3655', u'David Kostelancik', u'''
Classified By: Deputy Political M/C David Kostelancik. Reasons 1.4 (b)
and (d).
'''),
(u'09STATE75025', u'Richard C. Holbrooke', u'''
Classified By: Special Representative for Afghanistan and Pakistan
Richard C. Holbrooke
1. (U) This is an action request; see paragraph 4.
'''),
(u'10KABUL688', u'Joseph Mussomeli', u'''
Classified By: Assistant Chief of Mission Joseph Mussomeli for Reasons
1.4 (b) and (d)
'''),
(u'98USUNNEWYORK1638', u'HOWARD STOFFER', u'''
CLASSIFIED BY DEPUTY POLITICAL COUNSEL0R HOWARD STOFFER
PER 1.5 (B) AND (D). ACTION REQUEST IN PARA 10 BELOW.
'''),
(u'02ROME3119', u'PIERRE-RICHARD PROSPER', u'''
CLASSIFIED BY: AMBASSADOR-AT-LARGE PIERRE-RICHARD PROSPER
FOR REASONS 1.5 (B) AND (D)
'''),
(u'02ANKARA8447', u'Greta C. Holtz', u'''
Classified by Consul Greta C. Holtz for reasons 1.5 (b) & (d).
'''),
(u'09USUNNEWYORK282', u'SUSAN RICE', u'''
Classified By: U.S. PERMANENT REPRESENATIVE AMBASSADOR SUSAN RICE
FOR REASONS 1.4 B/D
'''),
(u'09DHAKA339', u'Geeta Pasi', u'''
Classified By: Charge d'Affaires, a.i. Geeta Pasi. Reasons 1.4 (b) and
(d)
'''),
(u'06USUNNEWYORK2273', u'Alejandro D. Wolff', u'''
Classified By: Acting Permanent Representative Alejandro D. Wolff
per reasons 1.4 (b) and (d)
'''),
(u'08ISLAMABAD1494', u'Anne W. Patterson', u'''
Classified By: Ambassador Anne W. Patterson for reaons 1.4 (b) and (d).
1. (C) Summary: During'''),
(u'08BERLIN1150', u'Robert Pollard', u'''
Classified By: Classified by Economic Minister-Counsellor
Robert Pollard for reasons 1.4 (b) and (d)
'''),
(u'08STATE104902', u'DAVID WELCH', u'''
Classified By: 1. CLASSIFIED BY NEA ASSISTANT SECRETARY DAVID WELCH
REASONS: 1.4 (B) AND (D)
'''),
(u'07VIENTIANE454', u'Mary Grace McGeehan', u'''
Classified By: Charge de'Affairs ai. Mary Grace McGeehan for reasons 1.
4 (b) and (d)
'''),
(u'07ROME1948', u'William Meara', u'''
Classified By: Acting Ecmin William Meara for reasons 1.4 (b) and (d)
'''),
(u'07USUNNEWYORK545', u'Jackie Sanders', u'''
Classified By: Amb. Jackie Sanders. E.O 12958. Reasons 1.4 (B&D).
'''),
(u'06USOSCE113', u'Bruce Connuck', u'''
Classified By: Classified by Political Counselor Bruce Connuck for Reas
(b) and (d).
'''),
(u'09DOHA404', u'Joseph LeBaron', u'''
Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
'''),
# (u'09DOHA404', u'Joseph LeBaron', u'''
#Classified By: Ambassaor Joseph LeBaron for reasons 1.4 (b and d).
# ''', True),
(u'09RANGOON575', u'Thomas Vajda', u'''
Classified By: Charge d'Afairs (AI) Thomas Vajda for Reasons 1.4 (b) &
(d
'''),
(u'03ROME3107', u'TOM COUNTRYMAN', u'''
Classified By: POL MIN COUN TOM COUNTRYMAN, REASON 1.5(B)&(D).
'''),
(u'06USUNNEWYORK732', u'Molly Phee', u'''
Classified By: Deputy Political Counselor Molly Phee,
for Reasons 1.4 (B and D)
'''),
(u'06BAGHDAD1552', u'David M. Satterfield', u'''
Classified By: Charge d'Affaires David M. Satterfield for reasons 1.4 (
b) and (d)
'''),
(u'06ABUJA232', u'Erin Y. Tariot', u'''
Classified By: USDEL Member Erin Y. Tariot, reasons 1.4 (b,d)
'''),
(u'09ASTANA184', u'RICAHRD E. HOAGLAND', u'''
Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
'''),
# (u'09ASTANA184', u'Richard E. Hoagland', u'''
#Classified By: AMBASSADOR RICAHRD E. HOAGLAND: 1.2 (B), (D)
# ''', True),
(u'09CANBERRA428', u'John W. Crowley', u'''
Classified By: Deputy Political Counselor: John W. Crowley, for reasons
1.4 (b) and (d)
'''),
(u'08TASHKENT706', u'Molly Stephenson', u'''
Classified By: Classfied By: IO Molly Stephenson for reasons 1.4 (b) a
nd (d).
'''),
(u'08CONAKRY348', u'T. SCOTT BROWN', u'''
Classified By: ECONOFF T. SCOTT BROWN FOR REASONS 1.4 (B) and (D)
'''),
(u'07STATE125576', u'Margaret McKelvey', u'''
Classified By: PRM/AFR Dir. Margaret McKelvey-reasons 1.4(b/d)
'''),
(u'09BUDAPEST372', u'Steve Weston', u'''
Classified By: Acting Pol/Econ Counselor:Steve Weston,
reasons 1.4 (b and d)
'''),
(u'04TAIPEI3162', u'David J. Keegan', u''''
Classified By: AIT Deputy Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3521', u'David J. Keegan', u'''
Classified By: AIT Acting Director David J. Keegan, Reason: 1.4 (B/D)
'''),
(u'04TAIPEI3919', u'David J. Keegan', u'''
Classified By: AIT Director David J. Keegan, Reason 1.4 (B/D)
'''),
(u'08JAKARTA1142', u'Stanley A. Harsha', u'''
Classified By: Acting Pol/C Stanley A. Harsha for reasons 1.4 (b+d).
'''),
(u'06ISLAMABAD16739', u'MARY TOWNSWICK', u'''
Classified By: DOS CLASSIFICATION GUIDE BY MARY TOWNSWICK
1. (C) Summary. With limited government support, Islamic
banking has gained momentum in Pakistan in the past three
years. The State Bank of Pakistan (SBP) reports that the
capital base of the Islamic banking system has more than
doubled since 2003 as the number of Islamic banks operating
in Pakistan rose from one to four. A media analysis of
Islamic banking in Pakistan cites an increase in the number
of conventional banks'''),
(u'05DJIBOUTI802', u'JEFFREY PURSELL', u'''
(U) CLASSIFIED BY TDY RSO JEFFREY PURSELL FOR REASON 1.5 C.
'''),
(u'09STATE82567', u'Eliot Kang', u'''
Classified By: Acting DAS for ISN Eliot Kang. Reasons 1.4 (b) and (d)
'''),
(u'04ANKARA5764', u'Charles O. Blah a', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
# (u'04ANKARA5764', u'Charles O. Blaha', u'''
#Classified By: Classified by Deputy Political Counselor Charles O. Blah
#a, E.O. 12958, reasons 1.4 (b) and (d).
# ''', True),
(u'04ANKARA5764', u'Charles O. Blah a', u'''
Classified By: Classified by Deputy Political Counselor Charles O. Blah
a, E.O. 12958, reasons 1.4 (b) and (d).
'''),
(u'10VIENNA195', u'J. Dean Yap', u'''
Classified by: DCM J. Dean Yap (acting) for reasons 1.4 (b)
and (d).
'''),
(u'03HARARE175', u'JOHN S. DICARLO', u'''
Classified By: RSO - JOHN S. DICARLO. REASON 1.5(D)
'''),
(u'08LONDON2968', u'Greg Berry', u'''
Classified By: PolMinCons Greg Berry, reasons 1.4 (b/d).
'''),
(u'08HAVANA956', u'Jonathan Farrar', u'''
Classified By: COM Jonathan Farrar for reasons 1.5 (b) and (d)
'''),
(u'09BAGHDAD253', u'Robert Ford', u'''
Classified By: Acting Deputy Robert Ford. Reasons 1.4 (b) and (d)
'''),
(u'09TIRANA81', u'JOHN L. WITHERS II', u'''
Classified By: AMBASSADOR JOHN L. WITHERS II FR REASONS 1.4 (b) AND (d
).
'''),
(u'05HARARE383', u'Eric T. Schultz', u'''
Classified By: Charge d'Affaires a.i. Eric T. Schultz under Section 1.4
b/d
'''),
(u'07LISBON2591', u'Jenifer Neidhart', u'''
Classified By: Pol/Econ Off Jenifer Neidhart for reasons 1.4 (b) and (d
)
'''),
(u'07STATE171234', u'Lawrence E. Butler', u'''
Classified By: NEA Lawrence E. Butler for reasons EO 12958
1.4(b),(d), and (e).
'''),
(u'04AMMAN8544', u'David Hale', u'''
Classified By: Charge d'Affaries David Hale for Reasons 1.4 (b), (d)
'''),
(u'07NEWDELHI5334', u'Ted Osius', u'''
Classified By: Acting DCM/Ted Osius for reasons 1.4 (b and d)
'''),
(u'04JAKARTA5072', u'ANTHONY C. WOODS', u'''
Classified By: EST&H OFFICER ANTHONY C. WOODS FOR REASON 1.5 (b, d)
'''),
(u'03AMMAN2822', u'Edward W. Gnehm', u'''
Classified By: Ambassador Edward W. Gnehm. Resons 1.5 (B) and (D)
'''),
(u'08CANBERRA1335', u'Daniel A. Clune', u'''
Classified By: Deputy Chief of Mission: Daniel A. Clune: Reason: 1.4 (c
) and (d)
'''),
(u'09HAVANA665', u'Charles Barclay', u'''
Classified By: CDA: Charles Barclay for reQ#8$UQ8ML#C may choke oQhQGTzovisional\" controls, such as
price caps and limits on the amount any one person could buy.
3. (SBU) Furthering speculation that the private markets
were under the gun, official reports have resurfaced in
recent months accusing private markets of artificially
maintaining higher'''),
(u'08STATE8993', u'Gregory B. Starr', u'''
1. (U) Classified by Acting Assistant Secretary for Diplomatic
Security Gregory B. Starr for E.O. 12958 reasons 1.4 (c) and
(d).
'''),
(u'09ISTANBUL137', u'Sandra Oudkirk', u'''
Classified By: ConGen Istanbul DPO Sandra Oudkirk; Reason 1.5 (d)
'''),
(u'08BANGKOK1778', u'James F. Entwistle', u'''
Classified By: Charge, d,Affaires a. i. James F. Entwistle, reason 1.4
(b) and (d).
'''),
(u'08MANAMA301', u'Christopher Henzel', u'''
Classified By: Charge d,Affaires a.i. Christopher Henzel, reasons 1.4(b
) and (d).
'''),
(u'06COLOMBO123', u'Robert O. Blake, Jr.', u'''
Classified By: Abassador Robert O. Blake, Jr. for reasons
1.4 (b and (d).
'''),
(u'08YEREVAN907', u'Marie Yovanovitch', u'''
Classified By: Amabassador Marie Yovanovitch. Reason 1.4 (B/D)
'''),
(u'09QUITO329', u'Heather M. Hodges', u'''
Classified By: AMB Heather M. Hodges for reason 1.4 (D)
'''),
#TODO: Should be: [Karl Wycoff, Shari Villarosa]
(u'09STATE38028', u'KARL WYCOFF', u'''
CLASSIFIED BY AF KARL WYCOFF, ACTING AND S/CT DAS SHARI
VILLAROSA ; E.O. 12958 REASON: 1.4 (B) AND (D)
'''),
(u'04ABUJA2060', u'BRUCE EHRNMAN', u'''
Classified By: AF SPECIAL ADVISOR BRUCE EHRNMAN FOR REASONS 1.5 (B) AND
(D)
'''),
(u'06ISLAMABAD3684', u'RCROCKER', u'''
Classified By: AMB:RCROCKER, Reasons 1.4 (b) and (c)
'''),
(u'06MANAMA184', u'William T.Monroe', u'''
Classified By: Classified by Ambassadior William T.Monroe. Reasons: 1.
4 (b)(d)
'''),
(u'07SANSALVADOR263', u'Charles Glazer', u'''
Classified By: Ambasasdor Charles Glazer, Reasons
1.4 (b) and (d)
'''),
(u'05BRUSSELS1549', u'Michael Ranneberger', u'''
Classified By: AF PDAS Michael Ranneberger. Reasons 1.5 (b) and (d).
'''),
(u'09STATE14163', u'Mark Boulware', u'''
Classified By: AF Acting DAS Mark Boulware, Reasons 1.4 (b) and (d).
'''),
(u'06AITTAIPEI1142', u'Michael R. Wheeler', u'''
Classified By: IPO Michael R. Wheeler for reason 1.4(G)(E)
'''),
(u'08TAIPEI1038', u'Stephen M. Young', u'''
Classified By: AIT Chairman Stephen M. Young,
Reasons: 1.4 (b/d)
'''),
(u'09STATE96519', u'Ellen O. Tauscher', u'''
Classified By: T U/S Ellen O. Tauscher for Reasons 1.4 a,b,and d.
'''),
(u'08NAIROBI232', u'JOHN M. YATES', u'''
Classified By: SPECIAL ENVOY JOHN M. YATES
1. (C) '''),
(u'07COLOMBO769', u'Robert O. Blake, Jr.', u'''
Classified By: Ambassodor Robert O. Blake, Jr. for reasons 1.4 (b, d).
'''),
(u'04DJIBOUTI1541', u'MARGUERITA D. RAGSDALE', u'''
Classified By: AMBASSSADOR MARGUERITA D. RAGSDALE.
REASONS 1.4 (B) AND (D).
'''),
(u'08MOSCOW3202', u'David Kostelancik', u'''
Classified By: Acting Political MC David Kostelancik for reasons 1.4(b)
and (d).
'''),
)
def test_parse_classificationist():
def check(cable_id, expected, content, normalize):
if not isinstance(expected, tuple):
expected = (expected,)
eq_(expected, tuple(parse_classificationists(content, normalize)))
for testcase in _TEST_DATA:
if len(testcase) == 3:
cable_id, expected, content = testcase
normalize = False
else:
cable_id, expected, content, normalize = testcase
yield check, cable_id, expected, content, normalize
if __name__ == '__main__':
import nose
nose.core.runmodule()
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
from __future__ import absolute_import
from __future__ import division
import numpy as _np
import mxnet as mx
from mxnet import gluon, autograd, np
from mxnet.test_utils import use_np, assert_almost_equal
from common import with_seed
@with_seed()
def test_create_np_param():
M, K, N = 10, 9, 20
def check_block_params(x, TestBlock, hybridize, expected_type, initializer):
net = TestBlock()
net.initialize(initializer())
if hybridize:
net.hybridize()
net(x)
params = net.collect_params()
for k, v in params.items():
assert type(v.data()) is expected_type
class TestBlock1(gluon.HybridBlock):
def __init__(self):
super(TestBlock1, self).__init__()
with self.name_scope():
self.w = self.params.get('w', shape=(K, N), allow_deferred_init=True)
def hybrid_forward(self, F, x, w):
return F.dot(x, w)
@use_np
class TestBlock2(gluon.HybridBlock):
def __init__(self):
super(TestBlock2, self).__init__()
with self.name_scope():
self.w = self.params.get('w', shape=(K, N), allow_deferred_init=True)
def hybrid_forward(self, F, x, w):
return F.np.dot(x, w)
x = mx.nd.random.uniform(shape=(M, K))
for initializer in [mx.initializer.Uniform, mx.initializer.Normal]:
check_block_params(x, TestBlock1, False, mx.nd.NDArray, initializer)
check_block_params(x, TestBlock1, True, mx.nd.NDArray, initializer)
check_block_params(x.as_np_ndarray(), TestBlock2, False, np.ndarray, initializer)
check_block_params(x.as_np_ndarray(), TestBlock2, True, np.ndarray, initializer)
@with_seed()
@use_np
def test_optimizer_with_np_ndarrays():
class LinearRegression(gluon.HybridBlock):
def __init__(self, num_input_dim=0, num_hidden_dim=100, num_output_dim=10):
super(LinearRegression, self).__init__()
with self.name_scope():
self.w1 = self.params.get('w1', shape=(num_input_dim, num_hidden_dim),
allow_deferred_init=True)
self.w2 = self.params.get('w2', shape=(num_hidden_dim, num_output_dim),
allow_deferred_init=True)
def hybrid_forward(self, F, x, w1, w2):
h = x.dot(w1) # equivalent to F.np.dot(x, w1)
h_relu = F.npx.relu(h) # equivalent to F.relu(h) but generating np.ndarray
y_pred = h_relu.dot(w2) # equivalent to F.np.dot(h_relu, w2)
return y_pred
class TotalLoss(gluon.HybridBlock):
def hybrid_forward(self, F, pred, label):
return ((pred - label) ** 2).sum() # equivalent to F.np.sum(F.np.square(pred - label))
regressor = LinearRegression()
regressor.initialize(mx.init.Uniform())
regressor.hybridize()
# Create random input and output data
x = np.random.uniform(size=(64, 1000)) # x is of type mxnet.numpy.ndarray
regressor(x)
y = np.random.uniform(size=(64, 10)) # y is of type mxnet.numpy.ndarray
total_loss = TotalLoss()
total_loss.hybridize()
trainer = gluon.Trainer(regressor.collect_params(),
'sgd',
{'learning_rate': 1e-3, 'momentum': 0.9})
for t in range(2):
with autograd.record():
output = regressor(x) # output is a type of np.ndarray because np.dot is the last op in the network
loss = total_loss(output, y) # loss is a scalar np.ndarray
loss.backward()
trainer.step(1)
@with_seed()
@use_np
def test_optimizer_backward_compat():
optimizer = mx.optimizer.SGD()
delattr(optimizer, "allow_np_array")
updater = mx.optimizer.Updater(optimizer)
updater(0, np.ones((0, 0)), np.zeros((0, 0)))
@with_seed()
@use_np
def test_np_loss_ndarray():
# Ported from test_loss.test_loss_ndarray
output = np.array([1, 2, 3, 4])
label = np.array([1, 3, 5, 7])
weighting = np.array([0.5, 1, 0.5, 1])
loss = gluon.loss.L1Loss()
assert float(np.sum(loss(output, label))) == 6.
loss = gluon.loss.L1Loss(weight=0.5)
assert float(np.sum(loss(output, label))) == 3.
loss = gluon.loss.L1Loss()
assert float(np.sum(loss(output, label, weighting))) == 5.
loss = gluon.loss.L2Loss()
assert float(np.sum(loss(output, label))) == 7.
loss = gluon.loss.L2Loss(weight=0.25)
assert float(np.sum(loss(output, label))) == 1.75
loss = gluon.loss.L2Loss()
assert float(np.sum(loss(output, label, weighting))) == 6
output = np.array([[0, 2], [1, 4]])
label = np.array([0, 1])
weighting = np.array([[0.5], [1.0]])
loss = gluon.loss.SoftmaxCrossEntropyLoss()
L = loss(output, label).asnumpy()
assert_almost_equal(L, _np.array([2.12692809, 0.04858733]), use_broadcast=False)
L = loss(output, label, weighting).asnumpy()
assert_almost_equal(L, _np.array([1.06346405, 0.04858733]), use_broadcast=False)
@with_seed()
@use_np
def test_np_get_constant():
const_arr = _np.random.uniform(0, 100, size=(10, 10)).astype(_np.float32)
class Foo(gluon.HybridBlock):
def __init__(self, prefix=None, params=None):
super(Foo, self).__init__(prefix=prefix, params=params)
self.weight = self.params.get_constant('const', const_arr)
def hybrid_forward(self, F, x, weight):
return x + weight.astype(np.float32)
x = np.random.uniform(size=const_arr.shape, dtype=const_arr.dtype)
for hybridize in [False, True]:
foo = Foo()
if hybridize:
foo.hybridize()
foo.initialize()
out = foo(x)
assert_almost_equal(out.asnumpy(), (x.asnumpy() + const_arr), atol=1e-5, rtol=1e-4, use_broadcast=False)
if __name__ == '__main__':
import nose
nose.runmodule()
add test
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
from __future__ import absolute_import
from __future__ import division
import numpy as _np
import mxnet as mx
from mxnet import gluon, autograd, np
from mxnet.test_utils import use_np, assert_almost_equal
from common import with_seed
@with_seed()
def test_create_np_param():
M, K, N = 10, 9, 20
def check_block_params(x, TestBlock, hybridize, expected_type, initializer):
net = TestBlock()
net.initialize(initializer())
if hybridize:
net.hybridize()
net(x)
params = net.collect_params()
for k, v in params.items():
assert type(v.data()) is expected_type
class TestBlock1(gluon.HybridBlock):
def __init__(self):
super(TestBlock1, self).__init__()
with self.name_scope():
self.w = self.params.get('w', shape=(K, N), allow_deferred_init=True)
def hybrid_forward(self, F, x, w):
return F.dot(x, w)
@use_np
class TestBlock2(gluon.HybridBlock):
def __init__(self):
super(TestBlock2, self).__init__()
with self.name_scope():
self.w = self.params.get('w', shape=(K, N), allow_deferred_init=True)
def hybrid_forward(self, F, x, w):
return F.np.dot(x, w)
x = mx.nd.random.uniform(shape=(M, K))
for initializer in [mx.initializer.Uniform, mx.initializer.Normal]:
check_block_params(x, TestBlock1, False, mx.nd.NDArray, initializer)
check_block_params(x, TestBlock1, True, mx.nd.NDArray, initializer)
check_block_params(x.as_np_ndarray(), TestBlock2, False, np.ndarray, initializer)
check_block_params(x.as_np_ndarray(), TestBlock2, True, np.ndarray, initializer)
@with_seed()
@use_np
def test_optimizer_with_np_ndarrays():
class LinearRegression(gluon.HybridBlock):
def __init__(self, num_input_dim=0, num_hidden_dim=100, num_output_dim=10):
super(LinearRegression, self).__init__()
with self.name_scope():
self.w1 = self.params.get('w1', shape=(num_input_dim, num_hidden_dim),
allow_deferred_init=True)
self.w2 = self.params.get('w2', shape=(num_hidden_dim, num_output_dim),
allow_deferred_init=True)
def hybrid_forward(self, F, x, w1, w2):
h = x.dot(w1) # equivalent to F.np.dot(x, w1)
h_relu = F.npx.relu(h) # equivalent to F.relu(h) but generating np.ndarray
y_pred = h_relu.dot(w2) # equivalent to F.np.dot(h_relu, w2)
return y_pred
class TotalLoss(gluon.HybridBlock):
def hybrid_forward(self, F, pred, label):
return ((pred - label) ** 2).sum() # equivalent to F.np.sum(F.np.square(pred - label))
regressor = LinearRegression()
regressor.initialize(mx.init.Uniform())
regressor.hybridize()
# Create random input and output data
x = np.random.uniform(size=(64, 1000)) # x is of type mxnet.numpy.ndarray
regressor(x)
y = np.random.uniform(size=(64, 10)) # y is of type mxnet.numpy.ndarray
total_loss = TotalLoss()
total_loss.hybridize()
trainer = gluon.Trainer(regressor.collect_params(),
'sgd',
{'learning_rate': 1e-3, 'momentum': 0.9})
for t in range(2):
with autograd.record():
output = regressor(x) # output is a type of np.ndarray because np.dot is the last op in the network
loss = total_loss(output, y) # loss is a scalar np.ndarray
loss.backward()
trainer.step(1)
@with_seed()
@use_np
def test_optimizer_backward_compat():
optimizer = mx.optimizer.SGD()
delattr(optimizer, "allow_np_array")
updater = mx.optimizer.Updater(optimizer)
updater(0, np.ones((0, 0)), np.zeros((0, 0)))
@with_seed()
@use_np
def test_np_loss_ndarray():
# Ported from test_loss.test_loss_ndarray
output = np.array([1, 2, 3, 4])
label = np.array([1, 3, 5, 7])
weighting = np.array([0.5, 1, 0.5, 1])
loss = gluon.loss.L1Loss()
assert float(np.sum(loss(output, label))) == 6.
loss = gluon.loss.L1Loss(weight=0.5)
assert float(np.sum(loss(output, label))) == 3.
loss = gluon.loss.L1Loss()
assert float(np.sum(loss(output, label, weighting))) == 5.
loss = gluon.loss.L2Loss()
assert float(np.sum(loss(output, label))) == 7.
loss = gluon.loss.L2Loss(weight=0.25)
assert float(np.sum(loss(output, label))) == 1.75
loss = gluon.loss.L2Loss()
assert float(np.sum(loss(output, label, weighting))) == 6
output = np.array([[0, 2], [1, 4]])
label = np.array([0, 1])
weighting = np.array([[0.5], [1.0]])
loss = gluon.loss.SoftmaxCrossEntropyLoss()
L = loss(output, label).asnumpy()
assert_almost_equal(L, _np.array([2.12692809, 0.04858733]), use_broadcast=False)
L = loss(output, label, weighting).asnumpy()
assert_almost_equal(L, _np.array([1.06346405, 0.04858733]), use_broadcast=False)
@with_seed()
@use_np
def test_np_get_constant():
const_arr = _np.random.uniform(0, 100, size=(10, 10)).astype(_np.float32)
class Foo(gluon.HybridBlock):
def __init__(self, prefix=None, params=None):
super(Foo, self).__init__(prefix=prefix, params=params)
self.weight = self.params.get_constant('const', const_arr)
def hybrid_forward(self, F, x, weight):
return x + weight.astype(np.float32)
x = np.random.uniform(size=const_arr.shape, dtype=const_arr.dtype)
for hybridize in [False, True]:
foo = Foo()
if hybridize:
foo.hybridize()
foo.initialize()
out = foo(x)
assert_almost_equal(out.asnumpy(), (x.asnumpy() + const_arr), atol=1e-5, rtol=1e-4, use_broadcast=False)
@use_np
def test_parameters_zero_grad():
for hybridize in [False, True]:
net = gluon.nn.HybridSequential()
for _ in range(5):
net.add(gluon.nn.Dense(10))
if hybridize:
net.hybridize()
net.initialize()
out = net(mx.np.ones((32, 8)))
for v in net.collect_params().values():
v.grad()[()] = 1
net.collect_params().zero_grad()
for v in net.collect_params().values():
assert_almost_equal(v.grad().asnumpy(), mx.np.zeros_like(v.grad()).asnumpy())
if __name__ == '__main__':
import nose
nose.runmodule()
|
from IStorage import IStorage, AlreadyPersistentError
from hecuba import config, log
from hfetch import Hcache
import uuid
from collections import namedtuple
import numpy as np
class StorageNumpy(np.ndarray, IStorage):
_storage_id = None
_build_args = None
_class_name = None
_hcache_params = None
_hcache = None
_prepared_store_meta = config.session.prepare('INSERT INTO hecuba.istorage'
'(storage_id, class_name, name)'
'VALUES (?,?,?)')
args_names = ["storage_id", "class_name", "name"]
args = namedtuple('StorageNumpyArgs', args_names)
def __new__(cls, input_array=None, storage_id=None, name=None, **kwargs):
if input_array is None and name is not None and storage_id is not None:
input_array = cls.load_array(storage_id, name)
obj = np.asarray(input_array).view(cls)
obj._is_persistent = True
elif name is None and storage_id is not None:
raise RuntimeError("hnumpy received storage id but not a name")
elif (input_array is not None and name is not None and storage_id is not None) \
or (storage_id is None and name is not None):
obj = np.asarray(input_array).view(cls)
obj.make_persistent(name)
else:
obj = np.asarray(input_array).view(cls)
obj._is_persistent = False
# Input array is an already formed ndarray instance
# We first cast to be our class type
# add the new attribute to the created instance
obj._storage_id = storage_id
# Finally, we must return the newly created object:
obj._class_name = '%s.%s' % (cls.__module__, cls.__name__)
return obj
# used as copy constructor
def __array_finalize__(self, obj):
if obj is None:
return
self._storage_id = getattr(obj, '_storage_id', None)
@staticmethod
def build_remotely(new_args):
"""
Launches the StorageNumpy.__init__ from the uuid api.getByID
Args:
new_args: a list of all information needed to create again the StorageNumpy
Returns:
so: the created StorageNumpy
"""
log.debug("Building StorageNumpy object with %s", new_args)
return StorageNumpy(new_args.storage_id)
@staticmethod
def _store_meta(storage_args):
"""
Saves the information of the object in the istorage table.
Args:.
storage_args (object): contains all data needed to restore the object from the workers
"""
log.debug("StorageObj: storing media %s", storage_args)
try:
config.session.execute(StorageNumpy._prepared_store_meta,
[storage_args.storage_id, storage_args.class_name,
storage_args.name])
except Exception as ex:
log.warn("Error creating the StorageNumpy metadata with args: %s" % str(storage_args))
raise ex
@staticmethod
def load_array(storage_id, name):
(ksp, table) = IStorage._extract_ks_tab(name)
_hcache_params = (ksp, table + '_numpies',
storage_id, [], ['storage_id', 'cluster_id', 'block_id'],
[{'name': "payload", 'type': 'numpy'}],
{'cache_size': config.max_cache_size,
'writer_par': config.write_callbacks_number,
'write_buffer': config.write_buffer_size})
_hcache = Hcache(*_hcache_params)
result = _hcache.get_row([storage_id, -1, -1])
if len(result) == 1:
return result[0]
else:
raise KeyError
def make_persistent(self, name):
if self._is_persistent:
raise AlreadyPersistentError("This StorageNumpy is already persistent [Before:{}.{}][After:{}]",
self._ksp, self._table, name)
self._is_persistent = True
(self._ksp, self._table) = self._extract_ks_tab(name)
if self._storage_id is None:
self._storage_id = uuid.uuid3(uuid.NAMESPACE_DNS, self._ksp + '.' + self._table + '_numpies')
self._build_args = self.args(self._storage_id, self._class_name, name)
log.info("PERSISTING DATA INTO %s %s", self._ksp, self._table)
query_keyspace = "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = %s" % (self._ksp, config.replication)
config.session.execute(query_keyspace)
config.session.execute('CREATE TABLE IF NOT EXISTS ' + self._ksp + '.' + self._table + '_numpies'
'(storage_id uuid , '
'cluster_id int, '
'block_id int, '
'payload blob, '
'PRIMARY KEY((storage_id,cluster_id),block_id))')
self._hcache_params = (self._ksp, self._table + '_numpies',
self._storage_id, [], ['storage_id', 'cluster_id', 'block_id'],
[{'name': "payload", 'type': 'numpy'}],
{'cache_size': config.max_cache_size,
'writer_par': config.write_callbacks_number,
'write_buffer': config.write_buffer_size})
self._hcache = Hcache(*self._hcache_params)
if len(self.shape) != 0:
self._hcache.put_row([self._storage_id, -1, -1], [self])
self._store_meta(self._build_args)
def delete_persistent(self):
"""
Deletes the Cassandra table where the persistent StorageObj stores data
"""
self._is_persistent = False
query = "DELETE FROM %s.%s WHERE storage_id = %s;" % (self._ksp, self._table + '_numpies', self._storage_id)
log.debug("DELETE PERSISTENT: %s", query)
config.session.execute(query)
# TODO should I also drop the table when empty?
# TODO DELETE THE METAS
##to overload [] override __set_item__ and __get_item__
StorageNumpy adds ufunc to support new numpy behaviour
As noted in issue #132 numpy module introduces a new
behaviour which changes how methods behave. The idea
is that using ufunc the input and output of the methods
can be intercepted and changed.
from IStorage import IStorage, AlreadyPersistentError
from hecuba import config, log
from hfetch import Hcache
import uuid
from collections import namedtuple
import numpy as np
class StorageNumpy(np.ndarray, IStorage):
_storage_id = None
_build_args = None
_class_name = None
_hcache_params = None
_hcache = None
_prepared_store_meta = config.session.prepare('INSERT INTO hecuba.istorage'
'(storage_id, class_name, name)'
'VALUES (?,?,?)')
args_names = ["storage_id", "class_name", "name"]
args = namedtuple('StorageNumpyArgs', args_names)
def __new__(cls, input_array=None, storage_id=None, name=None, **kwargs):
if input_array is None and name is not None and storage_id is not None:
input_array = cls.load_array(storage_id, name)
obj = np.asarray(input_array).view(cls)
obj._is_persistent = True
elif name is None and storage_id is not None:
raise RuntimeError("hnumpy received storage id but not a name")
elif (input_array is not None and name is not None and storage_id is not None) \
or (storage_id is None and name is not None):
obj = np.asarray(input_array).view(cls)
obj.make_persistent(name)
else:
obj = np.asarray(input_array).view(cls)
obj._is_persistent = False
# Input array is an already formed ndarray instance
# We first cast to be our class type
# add the new attribute to the created instance
obj._storage_id = storage_id
# Finally, we must return the newly created object:
obj._class_name = '%s.%s' % (cls.__module__, cls.__name__)
return obj
# used as copy constructor
def __array_finalize__(self, obj):
if obj is None:
return
self._storage_id = getattr(obj, '_storage_id', None)
@staticmethod
def build_remotely(new_args):
"""
Launches the StorageNumpy.__init__ from the uuid api.getByID
Args:
new_args: a list of all information needed to create again the StorageNumpy
Returns:
so: the created StorageNumpy
"""
log.debug("Building StorageNumpy object with %s", new_args)
return StorageNumpy(new_args.storage_id)
@staticmethod
def _store_meta(storage_args):
"""
Saves the information of the object in the istorage table.
Args:.
storage_args (object): contains all data needed to restore the object from the workers
"""
log.debug("StorageObj: storing media %s", storage_args)
try:
config.session.execute(StorageNumpy._prepared_store_meta,
[storage_args.storage_id, storage_args.class_name,
storage_args.name])
except Exception as ex:
log.warn("Error creating the StorageNumpy metadata with args: %s" % str(storage_args))
raise ex
@staticmethod
def load_array(storage_id, name):
(ksp, table) = IStorage._extract_ks_tab(name)
_hcache_params = (ksp, table + '_numpies',
storage_id, [], ['storage_id', 'cluster_id', 'block_id'],
[{'name': "payload", 'type': 'numpy'}],
{'cache_size': config.max_cache_size,
'writer_par': config.write_callbacks_number,
'write_buffer': config.write_buffer_size})
_hcache = Hcache(*_hcache_params)
result = _hcache.get_row([storage_id, -1, -1])
if len(result) == 1:
return result[0]
else:
raise KeyError
def make_persistent(self, name):
if self._is_persistent:
raise AlreadyPersistentError("This StorageNumpy is already persistent [Before:{}.{}][After:{}]",
self._ksp, self._table, name)
self._is_persistent = True
(self._ksp, self._table) = self._extract_ks_tab(name)
if self._storage_id is None:
self._storage_id = uuid.uuid3(uuid.NAMESPACE_DNS, self._ksp + '.' + self._table + '_numpies')
self._build_args = self.args(self._storage_id, self._class_name, name)
log.info("PERSISTING DATA INTO %s %s", self._ksp, self._table)
query_keyspace = "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = %s" % (self._ksp, config.replication)
config.session.execute(query_keyspace)
config.session.execute('CREATE TABLE IF NOT EXISTS ' + self._ksp + '.' + self._table + '_numpies'
'(storage_id uuid , '
'cluster_id int, '
'block_id int, '
'payload blob, '
'PRIMARY KEY((storage_id,cluster_id),block_id))')
self._hcache_params = (self._ksp, self._table + '_numpies',
self._storage_id, [], ['storage_id', 'cluster_id', 'block_id'],
[{'name': "payload", 'type': 'numpy'}],
{'cache_size': config.max_cache_size,
'writer_par': config.write_callbacks_number,
'write_buffer': config.write_buffer_size})
self._hcache = Hcache(*self._hcache_params)
if len(self.shape) != 0:
self._hcache.put_row([self._storage_id, -1, -1], [self])
self._store_meta(self._build_args)
def delete_persistent(self):
"""
Deletes the Cassandra table where the persistent StorageObj stores data
"""
query = "DELETE FROM %s.%s WHERE storage_id = %s;" % (self._ksp, self._table + '_numpies', self._storage_id)
log.debug("DELETE PERSISTENT: %s", query)
config.session.execute(query)
self._is_persistent = False
# TODO delete the data
# to overload [] override __set_item__ and __get_item__
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
args = []
in_no = []
for i, input_ in enumerate(inputs):
if isinstance(input_, StorageNumpy):
in_no.append(i)
args.append(input_.view(np.ndarray))
else:
args.append(input_)
outputs = kwargs.pop('out', None)
out_no = []
if outputs:
out_args = []
for j, output in enumerate(outputs):
if isinstance(output, StorageNumpy):
out_no.append(j)
out_args.append(output.view(np.ndarray))
else:
out_args.append(output)
kwargs['out'] = tuple(out_args)
else:
outputs = (None,) * ufunc.nout
results = super(StorageNumpy, self).__array_ufunc__(ufunc, method,
*args, **kwargs)
if results is NotImplemented:
return NotImplemented
if method == 'at':
return
if ufunc.nout == 1:
results = (results,)
results = tuple((result
if output is None else output)
for result, output in zip(results, outputs))
return results[0] if len(results) == 1 else results
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from os.path import join, abspath, dirname
# Project
PROJECT_ROOT = abspath(dirname(dirname(dirname(__file__))))
DEBUG = True
ALLOWED_HOSTS = ('*', )
ADMINS = (
('Mathieu Comandon', 'strider@strycore.com'),
)
MANAGERS = ADMINS
SITE_ID = 1
ROOT_URLCONF = 'megascops.urls'
WSGI_APPLICATION = 'megascops.wsgi.application'
SECRET_KEY = 'q-vep-mg6!hcrcgp=8-5ngu)!bs2limcdt1w(vvt=qup%0anak'
# Databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'megascops',
'USER': 'megascops',
'PASSWORD': 'admin',
'HOST': 'localhost'
}
}
# Apps
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'sorl.thumbnail',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'video',
)
# Localization
TIME_ZONE = 'Europe/Paris'
LANGUAGE_CODE = 'en-us'
# Static files
MEDIA_ROOT = join(PROJECT_ROOT, "media")
MEDIA_URL = "/media/"
STATIC_ROOT = join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
join(PROJECT_ROOT, "public"),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'DIRS': (join(PROJECT_ROOT, 'templates'),),
'LOADERS': (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
),
'OPTIONS': {
'context_processors': [
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': True
},
}
]
# Middleware
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
# Authentication
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_SIGNUP_PASSWORD_VERIFICATION = False
SOCIALACCOUNT_PROVIDERS = {
'google': {
'SCOPE': ['https://www.googleapis.com/auth/userinfo.profile'],
'AUTH_PARAMS': {'access_type': 'online'}
}
}
LOGIN_URL = "/accounts/login/"
LOGIN_REDIRECT_URL = "/"
LOGIN_ERROR_URL = "/accounts/login/error/"
# Email
EMAIL_SUBJECT_PREFIX = "[Megascops]"
DEFAULT_FROM_EMAIL = "strider@strycore.com"
# Celery
BROKER_URL = 'amqp://guest:guest@localhost:5672//'
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'include_html': True,
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(PROJECT_ROOT, 'megascops.log')
},
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'file'],
'level': 'ERROR',
'propagate': True,
},
'django.contrib.messages': {
'handlers': ['mail_admins', 'file'],
'level': 'ERROR',
'propagate': True,
},
'video': {
'handlers': ['mail_admins', 'file'],
'level': 'DEBUG',
'propagate': True,
},
}
}
# Megascops
DEFAULT_SIZE_QUOTA = 52428800
DEFAULT_VIDEO_QUOTA = 5
Fix TEMPLATES setting
# -*- coding: utf8 -*-
from __future__ import absolute_import
from os.path import join, abspath, dirname
# Project
PROJECT_ROOT = abspath(dirname(dirname(dirname(__file__))))
DEBUG = True
ALLOWED_HOSTS = ('*', )
ADMINS = (
('Mathieu Comandon', 'strider@strycore.com'),
)
MANAGERS = ADMINS
SITE_ID = 1
ROOT_URLCONF = 'megascops.urls'
WSGI_APPLICATION = 'megascops.wsgi.application'
SECRET_KEY = 'q-vep-mg6!hcrcgp=8-5ngu)!bs2limcdt1w(vvt=qup%0anak'
# Databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'megascops',
'USER': 'megascops',
'PASSWORD': 'admin',
'HOST': 'localhost'
}
}
# Apps
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'sorl.thumbnail',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'video',
)
# Localization
TIME_ZONE = 'Europe/Paris'
LANGUAGE_CODE = 'en-us'
# Static files
MEDIA_ROOT = join(PROJECT_ROOT, "media")
MEDIA_URL = "/media/"
STATIC_ROOT = join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
join(PROJECT_ROOT, "public"),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': (join(PROJECT_ROOT, 'templates'),),
'OPTIONS': {
'context_processors': [
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'debug': True
},
}
]
# Middleware
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
# Authentication
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_SIGNUP_PASSWORD_VERIFICATION = False
SOCIALACCOUNT_PROVIDERS = {
'google': {
'SCOPE': ['https://www.googleapis.com/auth/userinfo.profile'],
'AUTH_PARAMS': {'access_type': 'online'}
}
}
LOGIN_URL = "/accounts/login/"
LOGIN_REDIRECT_URL = "/"
LOGIN_ERROR_URL = "/accounts/login/error/"
# Email
EMAIL_SUBJECT_PREFIX = "[Megascops]"
DEFAULT_FROM_EMAIL = "strider@strycore.com"
# Celery
BROKER_URL = 'amqp://guest:guest@localhost:5672//'
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'include_html': True,
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(PROJECT_ROOT, 'megascops.log')
},
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'file'],
'level': 'ERROR',
'propagate': True,
},
'django.contrib.messages': {
'handlers': ['mail_admins', 'file'],
'level': 'ERROR',
'propagate': True,
},
'video': {
'handlers': ['mail_admins', 'file'],
'level': 'DEBUG',
'propagate': True,
},
}
}
# Megascops
DEFAULT_SIZE_QUOTA = 52428800
DEFAULT_VIDEO_QUOTA = 5
|
61cc8785-2d5f-11e5-8c5f-b88d120fff5e
|
# -*- coding: utf-8 -*-
""" This module fills the gap between Python 2.6 upwards to Python 3
since version 3.2.
"""
import sys
#: True if the current interpreter is of version 2.6
PY26 = sys.version_info[:2] == (2, 6)
#: True if the current interpreter is of version 3
PY3K = sys.version_info[0] == 3
if PY3K:
str_type = str
else:
str_type = (str, unicode)
if PY3K: # pragma: no cover
comp_bytes = bytes
else:
comp_bytes = (lambda a, _: bytes(a))
if PY3K: # pragma: no cover
comp_unicode = str
else:
comp_unicode = unicode
if PY26: # pragma: no cover
import zipfile
class ZipFile(zipfile.ZipFile):
def __enter__(self):
return self
def __exit__(self, *exc_inf):
self.close()
return True
else:
from zipfile import ZipFile
if PY26: # pragma: no cover
import gzip
class GzipFile(gzip.GzipFile):
def __enter__(self):
return self
def __exit__(self, *exc_inf):
self.close()
return True
else:
from gzip import GzipFile
if PY3K: # pragma: no cover
imap = map
else:
from itertools import imap
if PY3K: # pragma: no cover
ifilter = filter
else:
from itertools import ifilter
if PY3K: # pragma: no cover
from urllib.parse import urlparse
else:
from urlparse import urlparse
if PY3K: # pragma: no cover
from urllib.error import HTTPError
else:
from urllib2 import HTTPError
if PY3K: # pragma: no cover
from urllib.request import urlopen
else:
from urllib2 import urlopen
if PY3K: # pragma: no cover
from io import StringIO
else:
from StringIO import StringIO
if PY3K: # pragma: no cover
from collections import UserList
else:
from UserList import UserList
if PY3K: # pragma: no cover
from configparser import ConfigParser
else:
from ConfigParser import SafeConfigParser
class ConfigParser(SafeConfigParser):
def read_string(self, s):
""" Puts the string `s` into a :class:`StringIO.StringIO`
instance and passes it to :meth:`readfp`.
"""
buf = StringIO(s)
buf.seek(0)
self.readfp(buf)
if PY26: # pragma: no cover
import unittest2 as unittest
else:
import unittest
normalize different methods to iterate over a dictionary
# -*- coding: utf-8 -*-
""" This module fills the gap between Python 2.6 upwards to Python 3
since version 3.2.
"""
import sys
#: True if the current interpreter is of version 2.6
PY26 = sys.version_info[:2] == (2, 6)
#: True if the current interpreter is of version 3
PY3K = sys.version_info[0] == 3
if PY3K:
str_type = str
else:
str_type = (str, unicode)
if PY3K: # pragma: no cover
comp_bytes = bytes
else:
comp_bytes = (lambda a, _: bytes(a))
if PY3K: # pragma: no cover
comp_unicode = str
else:
comp_unicode = unicode
if PY26: # pragma: no cover
import zipfile
class ZipFile(zipfile.ZipFile):
def __enter__(self):
return self
def __exit__(self, *exc_inf):
self.close()
return True
else:
from zipfile import ZipFile
if PY26: # pragma: no cover
import gzip
class GzipFile(gzip.GzipFile):
def __enter__(self):
return self
def __exit__(self, *exc_inf):
self.close()
return True
else:
from gzip import GzipFile
if PY3K: # pragma: no cover
imap = map
else:
from itertools import imap
if PY3K: # pragma: no cover
ifilter = filter
else:
from itertools import ifilter
if PY3K: # pragma: no cover
from urllib.parse import urlparse
else:
from urlparse import urlparse
if PY3K: # pragma: no cover
from urllib.error import HTTPError
else:
from urllib2 import HTTPError
if PY3K: # pragma: no cover
from urllib.request import urlopen
else:
from urllib2 import urlopen
if PY3K: # pragma: no cover
from io import StringIO
else:
from StringIO import StringIO
if PY3K: # pragma: no cover
from collections import UserList
else:
from UserList import UserList
if PY3K: # pragma: no cover
from configparser import ConfigParser
else:
from ConfigParser import SafeConfigParser
class ConfigParser(SafeConfigParser):
def read_string(self, s):
""" Puts the string `s` into a :class:`StringIO.StringIO`
instance and passes it to :meth:`readfp`.
"""
buf = StringIO(s)
buf.seek(0)
self.readfp(buf)
if PY26: # pragma: no cover
import unittest2 as unittest
else:
import unittest
if PY3K: # pragma: no cover
d_iterkeys = dict.keys
d_itervalues = dict.values
d_iteritems = dict.items
else:
d_iterkeys = dict.iterkeys
d_itervalues = dict.itervalues
d_iteritems = dict.iteritems
|
import logging
import sys
import traceback
from decimal import Decimal
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth import logout
from django.core.cache import cache
from django.db.models import F
from django.utils.text import slugify
from rest_framework import mixins, status, viewsets
from rest_framework.authentication import (
SessionAuthentication,
TokenAuthentication
)
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.decorators import api_view, list_route
from rest_framework.exceptions import ValidationError
from rest_framework.response import Response
from utils.mixins import VersionedViewSetMixin
from utils.oauth import verify_token
from utils.user_utils import get_client_ip, username_hash
from . import models
from . import permissions
from .serializers import v1, v2
logger = logging.getLogger(__name__)
class PlaceViewSet(VersionedViewSetMixin, viewsets.ReadOnlyModelViewSet):
"""ViewSet for Places. See userprofile/api_docs for more info."""
queryset = models.Place.objects.filter(primary=True)
serializer_class_v1 = v1.PlaceSerializer
serializer_class_v2 = v2.PlaceSerializer
docstring_prefix = "userprofile/api_docs"
class UserPlaceViewSet(VersionedViewSetMixin,
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""ViewSet for UserPlaces. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserPlace.objects.all()
serializer_class_v1 = v1.UserPlaceSerializer
serializer_class_v2 = v2.UserPlaceSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(user__id=self.request.user.id)
def _quant(self, request, field, default=None):
assert field in ['longitude', 'latitude']
if field in request.data:
value = Decimal(request.data[field]).quantize(Decimal('0.0001'))
return str(value)
return default
def create(self, request, *args, **kwargs):
place = request.data.get('place')
if place:
place, _ = models.Place.objects.get_or_create(name=place)
request.data['longitude'] = self._quant(request, 'longitude')
request.data['latitude'] = self._quant(request, 'latitude')
request.data['place'] = place.id
request.data['user'] = request.user.id
request.data['profile'] = request.user.userprofile.id
return super().create(request, *args, **kwargs)
content = {'error': "'{}' is not a valid Place".format(place)}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, *args, **kwargs):
up = self.get_object()
place = request.data.get('place')
if place:
place, _ = models.Place.objects.get_or_create(name=place)
request.data['place'] = place.id
else:
request.data['place'] = up.place.id
request.data['longitude'] = self._quant(request, 'longitude', up.longitude)
request.data['latitude'] = self._quant(request, 'latitude', up.latitude)
request.data['user'] = request.user.id
request.data['profile'] = request.user.userprofile.id
return super().update(request, *args, **kwargs)
class UserViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""ViewSet for Users. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserSerializer
serializer_class_v2 = v2.UserSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
qs = self.queryset.select_related("userprofile", "auth_token")
qs = qs.filter(id=self.request.user.id)
return qs
@list_route(methods=['get', 'post'], url_path='oauth')
def oauth_create(self, request, pk=None):
"""GET: List the current user's profile / google details.
POST: Create the user if they don't already exist and return their
profile details. The POST payload should include the following:
{
'email': '...',
'first_name': '...',
'last_name': '...',
'image_url': '...',
'oauth_token': '...',
}
Of the above values, the `email` and `oauth_token` fields are required.
"""
content = {}
authed = request.user.is_authenticated()
user = request.user if authed else None
result_status = status.HTTP_200_OK
# Not authenticated, return empty list.
if not authed and request.method == 'GET':
return Response(content, status=result_status)
# Not authenticated & this is a POST: get or create the user.
elif not authed and request.method == "POST":
User = get_user_model()
try:
data = request.data
# Verify the given token info: https://goo.gl/MIKN9X
token = verify_token(data.get('oauth_token'))
if token is None:
return Response(
data={'error': 'Invalid auth token'},
status=status.HTTP_400_BAD_REQUEST
)
# Note: email + (a verified) token serves as username + password.
email = data.get('email').strip().lower()
# XXX This is a hack to keep these users from getting the
# XXX `selected_by_default` content from the `goals` app.
# XXX We *must* set this before we craete the user, hence the
# XXX use of the email in the key.
_key = "omit-default-selections-{}".format(slugify(email))
cache.set(_key, True, 30)
user, created = User.objects.get_or_create(
username=username_hash(email),
email=email
)
# Update the Profile fields.
profile = user.userprofile
profile.google_token = token # This will change periodically
profile.google_image = data.get('image_url', '')
profile.app_logins += 1
if created:
# Save the IP address on the user's profile
try:
profile.ip_address = get_client_ip(request)
except: # XXX: Don't let any exception prevent signup.
pass
profile.save()
# Update user fields.
user.first_name = data.get('first_name', '')
user.last_name = data.get('last_name', '')
user.save()
if created:
result_status = status.HTTP_201_CREATED
else:
result_status = status.HTTP_200_OK
except Exception as err:
# Log the traceback.
exc_type, exc_value, exc_traceback = sys.exc_info()
tb = traceback.format_exception(exc_type, exc_value, exc_traceback)
tb_string = "{}\n".format("\n".join(tb))
logger.error(tb_string)
return Response(
data={'error': '{}'.format(err)},
status=status.HTTP_400_BAD_REQUEST
)
if user:
content = {
'id': user.id,
'profile_id': user.userprofile.id,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
'google_image': user.userprofile.google_image,
'google_token': user.userprofile.google_token,
'phone': user.userprofile.phone,
'token': user.auth_token.key,
'needs_onboarding': user.userprofile.needs_onboarding,
}
return Response(content, status=result_status)
def create(self, request, *args, **kwargs):
"""Handle the optional username/email scenario and include an Auth
token for the API in the returned response.
"""
# We typically expect an email address to be given, here, but this api
# also supports a username. If we receive a username, but no email
# address, we'll check to see if we should swap them, which may prevent
# an edge case where we might end up with duplicate accounts.
username = request.data.get('username')
if username:
username = username.lower()
request.data['username'] = username
email = request.data.get('email')
if email:
email = email.lower()
request.data['email'] = email
if email is None and username is not None and '@' in username:
request.data['email'] = username
request.data.pop('username')
resp = super(UserViewSet, self).create(request, *args, **kwargs)
# Include the newly-created User's auth token (if we have a user)
if hasattr(self, 'object') and hasattr(self.object, 'auth_token'):
resp.data['token'] = self.object.auth_token.key
# Save the IP address on the user's profile
try:
uid = resp.data.get('userprofile_id')
userprofile = models.UserProfile.objects.get(pk=uid)
userprofile.ip_address = get_client_ip(request)
userprofile.save()
except: # XXX: Don't let any exception prevent user signup.
pass
return resp
class UserDataViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""ViewSet for User Data. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserDataSerializer
serializer_class_v2 = v2.UserDataSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
qs = self.queryset.select_related("userprofile", "auth_token")
qs = qs.filter(id=self.request.user.id)
return qs
class UserFeedViewSet(VersionedViewSetMixin, viewsets.ReadOnlyModelViewSet):
"""ViewSet for the Feed. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserFeedSerializer
serializer_class_v2 = v2.UserFeedSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(id=self.request.user.id)
class UserAccountViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""The User's account info. See userprofile/api_docs for more info"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserAccountSerializer
serializer_class_v2 = v2.UserAccountSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(id=self.request.user.id)
class SimpleProfileViewSet(VersionedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""A simpler viewset for the UserProfile model."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserProfile.objects.all()
serializer_class_v1 = v2.SimpleProfileSerializer
serializer_class_v2 = v2.SimpleProfileSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
self.queryset = super().get_queryset()
if self.request.user.is_authenticated():
self.queryset = self.queryset.filter(user=self.request.user)
else:
self.queryset = self.queryset.none()
return self.queryset
def update(self, request, *args, **kwargs):
request.data['user'] = request.user.id
return super().update(request, *args, **kwargs)
class UserProfileViewSet(VersionedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""The User's account info. See userprofile/api_docs for more info"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserProfile.objects.all()
serializer_class_v1 = v1.UserProfileSerializer
serializer_class_v2 = v2.UserProfileSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(user__id=self.request.user.id)
def list(self, request, *args, **kwargs):
resp = super(UserProfileViewSet, self).list(request, *args, **kwargs)
# Hack the data to include FQDNs for the response_url item.
for profile in resp.data['results']:
for q in profile['bio']:
q['response_url'] = self.request.build_absolute_uri(q['response_url'])
q['question_url'] = self.request.build_absolute_uri(q['question_url'])
return resp
def retrieve(self, request, *args, **kwargs):
resp = super(UserProfileViewSet, self).retrieve(request, *args, **kwargs)
profile = resp.data
for q in profile['bio']:
q['response_url'] = self.request.build_absolute_uri(q['response_url'])
q['question_url'] = self.request.build_absolute_uri(q['question_url'])
return resp
def update(self, request, *args, **kwargs):
"""Allow setting `timezone` or `needs_onboarding`.
* timezone: A timezone string, e.g. "America/Chicago"
* timezone: A timezone string, e.g. "America/Chicago"
"""
if not settings.DEBUG:
log_msg = "User %s setting timezone to %s"
logger.info(log_msg % (request.user.id, request.data.get('timezone', None)))
request.data['user'] = request.user.id
return super(UserProfileViewSet, self).update(request, *args, **kwargs)
@api_view(['POST'])
def api_logout(request):
"""This view allows a user to log out via the api (note that this returns
rest_framework Response instances), and send additional details with the
logout request. Here's an example scenario:
A user logs out of their device, and sends their registration_id for
GCM along with the logout request. That request data gets bundled with
the logout signal, to which the notifications app is subscribed, so
that app knows to remove the user's device & queued messages.
To implement the above scenario, the client would a POST request containing
the following information:
{registration_id: 'YOUR-REGISTRATION-ID'}
Returns a 404 if the request does not contain an authenticated user. Returns
a 200 request upon success.
----
"""
if hasattr(request, "user") and request.user:
logout(request) # Sends the user_logged_out signal
return Response(None, status=status.HTTP_200_OK)
return Response(None, status=status.HTTP_404_NOT_FOUND)
class ObtainAuthorization(ObtainAuthToken):
"""Custom Authorization view that, in addition to the user's auth token
(default for the superclass), returns some additional user information:
* token
* username
* user_id
* userprofile_id
* first_name
* last_name
* full_name
* email
* needs_onboarding
USAGE: Send a POST request to this view containing username/password
data and receive a JSON-encoded response.
"""
serializer_class = v1.AuthTokenSerializer
def post(self, request):
try:
serializer = self.serializer_class(data=request.data)
if serializer.is_valid(raise_exception=True):
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
# Update the number of times the user has logged in
profiles = models.UserProfile.objects.filter(user=user)
profiles.update(app_logins=F('app_logins') + 1)
return Response({
'token': token.key,
'username': user.username,
'id': user.id,
'user_id': user.id,
'userprofile_id': user.userprofile.id,
'first_name': user.first_name,
'last_name': user.last_name,
'full_name': user.get_full_name(),
'email': user.email,
'zipcode': user.userprofile.zipcode,
'birthday': user.userprofile.birthday, # TODO: serialize
'sex': user.userprofile.sex,
'gender': user.userprofile.sex,
'employed': user.userprofile.employed,
'is_parent': user.userprofile.is_parent,
'in_relationship': user.userprofile.in_relationship,
'has_degree': user.userprofile.has_degree,
'maximum_daily_notifications': user.userprofile.maximum_daily_notifications,
'needs_onboarding': user.userprofile.needs_onboarding,
'object_type': 'user',
})
except ValidationError as e:
# Failed login attempt, record with axes
# username = request.data.get(settings.AXES_USERNAME_FORM_FIELD, None)
# if username is None:
# username = request.data.get('username', None)
# AccessLog.objects.create(
# user_agent=request.META.get('HTTP_USER_AGENT', '<unknown>')[:255],
# ip_address=get_ip(request),
# username=username,
# http_accept=request.META.get('HTTP_ACCEPT', '<unknown>'),
# path_info=request.META.get('PATH_INFO', '<unknown>'),
# trusted=False
# )
raise e
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
obtain_auth_token = ObtainAuthorization.as_view()
automatically activate accounts that are created via Google auth
import logging
import sys
import traceback
from decimal import Decimal
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth import logout
from django.core.cache import cache
from django.db.models import F
from django.utils.text import slugify
from rest_framework import mixins, status, viewsets
from rest_framework.authentication import (
SessionAuthentication,
TokenAuthentication
)
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.decorators import api_view, list_route
from rest_framework.exceptions import ValidationError
from rest_framework.response import Response
from utils.mixins import VersionedViewSetMixin
from utils.oauth import verify_token
from utils.user_utils import get_client_ip, username_hash
from . import models
from . import permissions
from .serializers import v1, v2
logger = logging.getLogger(__name__)
class PlaceViewSet(VersionedViewSetMixin, viewsets.ReadOnlyModelViewSet):
"""ViewSet for Places. See userprofile/api_docs for more info."""
queryset = models.Place.objects.filter(primary=True)
serializer_class_v1 = v1.PlaceSerializer
serializer_class_v2 = v2.PlaceSerializer
docstring_prefix = "userprofile/api_docs"
class UserPlaceViewSet(VersionedViewSetMixin,
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""ViewSet for UserPlaces. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserPlace.objects.all()
serializer_class_v1 = v1.UserPlaceSerializer
serializer_class_v2 = v2.UserPlaceSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(user__id=self.request.user.id)
def _quant(self, request, field, default=None):
assert field in ['longitude', 'latitude']
if field in request.data:
value = Decimal(request.data[field]).quantize(Decimal('0.0001'))
return str(value)
return default
def create(self, request, *args, **kwargs):
place = request.data.get('place')
if place:
place, _ = models.Place.objects.get_or_create(name=place)
request.data['longitude'] = self._quant(request, 'longitude')
request.data['latitude'] = self._quant(request, 'latitude')
request.data['place'] = place.id
request.data['user'] = request.user.id
request.data['profile'] = request.user.userprofile.id
return super().create(request, *args, **kwargs)
content = {'error': "'{}' is not a valid Place".format(place)}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, *args, **kwargs):
up = self.get_object()
place = request.data.get('place')
if place:
place, _ = models.Place.objects.get_or_create(name=place)
request.data['place'] = place.id
else:
request.data['place'] = up.place.id
request.data['longitude'] = self._quant(request, 'longitude', up.longitude)
request.data['latitude'] = self._quant(request, 'latitude', up.latitude)
request.data['user'] = request.user.id
request.data['profile'] = request.user.userprofile.id
return super().update(request, *args, **kwargs)
class UserViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""ViewSet for Users. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserSerializer
serializer_class_v2 = v2.UserSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
qs = self.queryset.select_related("userprofile", "auth_token")
qs = qs.filter(id=self.request.user.id)
return qs
@list_route(methods=['get', 'post'], url_path='oauth')
def oauth_create(self, request, pk=None):
"""GET: List the current user's profile / google details.
POST: Create the user if they don't already exist and return their
profile details. The POST payload should include the following:
{
'email': '...',
'first_name': '...',
'last_name': '...',
'image_url': '...',
'oauth_token': '...',
}
Of the above values, the `email` and `oauth_token` fields are required.
"""
content = {}
authed = request.user.is_authenticated()
user = request.user if authed else None
result_status = status.HTTP_200_OK
# Not authenticated, return empty list.
if not authed and request.method == 'GET':
return Response(content, status=result_status)
# Not authenticated & this is a POST: get or create the user.
elif not authed and request.method == "POST":
User = get_user_model()
try:
data = request.data
# Verify the given token info: https://goo.gl/MIKN9X
token = verify_token(data.get('oauth_token'))
if token is None:
return Response(
data={'error': 'Invalid auth token'},
status=status.HTTP_400_BAD_REQUEST
)
# Note: email + (a verified) token serves as username + password.
email = data.get('email').strip().lower()
# XXX This is a hack to keep these users from getting the
# XXX `selected_by_default` content from the `goals` app.
# XXX We *must* set this before we craete the user, hence the
# XXX use of the email in the key.
_key = "omit-default-selections-{}".format(slugify(email))
cache.set(_key, True, 30)
user, created = User.objects.get_or_create(
username=username_hash(email),
email=email
)
# Update the Profile fields.
profile = user.userprofile
profile.google_token = token # This will change periodically
profile.google_image = data.get('image_url', '')
profile.app_logins += 1
if created:
# Save the IP address on the user's profile
try:
profile.ip_address = get_client_ip(request)
except: # XXX: Don't let any exception prevent signup.
pass
profile.save()
# Update user fields.
user.first_name = data.get('first_name', '')
user.last_name = data.get('last_name', '')
user.is_active = True # Auto-activate accounts from Google
user.save()
if created:
result_status = status.HTTP_201_CREATED
else:
result_status = status.HTTP_200_OK
except Exception as err:
# Log the traceback.
exc_type, exc_value, exc_traceback = sys.exc_info()
tb = traceback.format_exception(exc_type, exc_value, exc_traceback)
tb_string = "{}\n".format("\n".join(tb))
logger.error(tb_string)
return Response(
data={'error': '{}'.format(err)},
status=status.HTTP_400_BAD_REQUEST
)
if user:
content = {
'id': user.id,
'profile_id': user.userprofile.id,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
'google_image': user.userprofile.google_image,
'google_token': user.userprofile.google_token,
'phone': user.userprofile.phone,
'token': user.auth_token.key,
'needs_onboarding': user.userprofile.needs_onboarding,
}
return Response(content, status=result_status)
def create(self, request, *args, **kwargs):
"""Handle the optional username/email scenario and include an Auth
token for the API in the returned response.
"""
# We typically expect an email address to be given, here, but this api
# also supports a username. If we receive a username, but no email
# address, we'll check to see if we should swap them, which may prevent
# an edge case where we might end up with duplicate accounts.
username = request.data.get('username')
if username:
username = username.lower()
request.data['username'] = username
email = request.data.get('email')
if email:
email = email.lower()
request.data['email'] = email
if email is None and username is not None and '@' in username:
request.data['email'] = username
request.data.pop('username')
resp = super(UserViewSet, self).create(request, *args, **kwargs)
# Include the newly-created User's auth token (if we have a user)
if hasattr(self, 'object') and hasattr(self.object, 'auth_token'):
resp.data['token'] = self.object.auth_token.key
# Save the IP address on the user's profile
try:
uid = resp.data.get('userprofile_id')
userprofile = models.UserProfile.objects.get(pk=uid)
userprofile.ip_address = get_client_ip(request)
userprofile.save()
except: # XXX: Don't let any exception prevent user signup.
pass
return resp
class UserDataViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""ViewSet for User Data. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserDataSerializer
serializer_class_v2 = v2.UserDataSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
qs = self.queryset.select_related("userprofile", "auth_token")
qs = qs.filter(id=self.request.user.id)
return qs
class UserFeedViewSet(VersionedViewSetMixin, viewsets.ReadOnlyModelViewSet):
"""ViewSet for the Feed. See userprofile/api_docs for more info."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserFeedSerializer
serializer_class_v2 = v2.UserFeedSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(id=self.request.user.id)
class UserAccountViewSet(VersionedViewSetMixin, viewsets.ModelViewSet):
"""The User's account info. See userprofile/api_docs for more info"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = get_user_model().objects.all()
serializer_class_v1 = v1.UserAccountSerializer
serializer_class_v2 = v2.UserAccountSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(id=self.request.user.id)
class SimpleProfileViewSet(VersionedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""A simpler viewset for the UserProfile model."""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserProfile.objects.all()
serializer_class_v1 = v2.SimpleProfileSerializer
serializer_class_v2 = v2.SimpleProfileSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
self.queryset = super().get_queryset()
if self.request.user.is_authenticated():
self.queryset = self.queryset.filter(user=self.request.user)
else:
self.queryset = self.queryset.none()
return self.queryset
def update(self, request, *args, **kwargs):
request.data['user'] = request.user.id
return super().update(request, *args, **kwargs)
class UserProfileViewSet(VersionedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""The User's account info. See userprofile/api_docs for more info"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
queryset = models.UserProfile.objects.all()
serializer_class_v1 = v1.UserProfileSerializer
serializer_class_v2 = v2.UserProfileSerializer
docstring_prefix = "userprofile/api_docs"
permission_classes = [permissions.IsSelf]
def get_queryset(self):
return self.queryset.filter(user__id=self.request.user.id)
def list(self, request, *args, **kwargs):
resp = super(UserProfileViewSet, self).list(request, *args, **kwargs)
# Hack the data to include FQDNs for the response_url item.
for profile in resp.data['results']:
for q in profile['bio']:
q['response_url'] = self.request.build_absolute_uri(q['response_url'])
q['question_url'] = self.request.build_absolute_uri(q['question_url'])
return resp
def retrieve(self, request, *args, **kwargs):
resp = super(UserProfileViewSet, self).retrieve(request, *args, **kwargs)
profile = resp.data
for q in profile['bio']:
q['response_url'] = self.request.build_absolute_uri(q['response_url'])
q['question_url'] = self.request.build_absolute_uri(q['question_url'])
return resp
def update(self, request, *args, **kwargs):
"""Allow setting `timezone` or `needs_onboarding`.
* timezone: A timezone string, e.g. "America/Chicago"
* timezone: A timezone string, e.g. "America/Chicago"
"""
if not settings.DEBUG:
log_msg = "User %s setting timezone to %s"
logger.info(log_msg % (request.user.id, request.data.get('timezone', None)))
request.data['user'] = request.user.id
return super(UserProfileViewSet, self).update(request, *args, **kwargs)
@api_view(['POST'])
def api_logout(request):
"""This view allows a user to log out via the api (note that this returns
rest_framework Response instances), and send additional details with the
logout request. Here's an example scenario:
A user logs out of their device, and sends their registration_id for
GCM along with the logout request. That request data gets bundled with
the logout signal, to which the notifications app is subscribed, so
that app knows to remove the user's device & queued messages.
To implement the above scenario, the client would a POST request containing
the following information:
{registration_id: 'YOUR-REGISTRATION-ID'}
Returns a 404 if the request does not contain an authenticated user. Returns
a 200 request upon success.
----
"""
if hasattr(request, "user") and request.user:
logout(request) # Sends the user_logged_out signal
return Response(None, status=status.HTTP_200_OK)
return Response(None, status=status.HTTP_404_NOT_FOUND)
class ObtainAuthorization(ObtainAuthToken):
"""Custom Authorization view that, in addition to the user's auth token
(default for the superclass), returns some additional user information:
* token
* username
* user_id
* userprofile_id
* first_name
* last_name
* full_name
* email
* needs_onboarding
USAGE: Send a POST request to this view containing username/password
data and receive a JSON-encoded response.
"""
serializer_class = v1.AuthTokenSerializer
def post(self, request):
try:
serializer = self.serializer_class(data=request.data)
if serializer.is_valid(raise_exception=True):
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
# Update the number of times the user has logged in
profiles = models.UserProfile.objects.filter(user=user)
profiles.update(app_logins=F('app_logins') + 1)
return Response({
'token': token.key,
'username': user.username,
'id': user.id,
'user_id': user.id,
'userprofile_id': user.userprofile.id,
'first_name': user.first_name,
'last_name': user.last_name,
'full_name': user.get_full_name(),
'email': user.email,
'zipcode': user.userprofile.zipcode,
'birthday': user.userprofile.birthday, # TODO: serialize
'sex': user.userprofile.sex,
'gender': user.userprofile.sex,
'employed': user.userprofile.employed,
'is_parent': user.userprofile.is_parent,
'in_relationship': user.userprofile.in_relationship,
'has_degree': user.userprofile.has_degree,
'maximum_daily_notifications': user.userprofile.maximum_daily_notifications,
'needs_onboarding': user.userprofile.needs_onboarding,
'object_type': 'user',
})
except ValidationError as e:
# Failed login attempt, record with axes
# username = request.data.get(settings.AXES_USERNAME_FORM_FIELD, None)
# if username is None:
# username = request.data.get('username', None)
# AccessLog.objects.create(
# user_agent=request.META.get('HTTP_USER_AGENT', '<unknown>')[:255],
# ip_address=get_ip(request),
# username=username,
# http_accept=request.META.get('HTTP_ACCEPT', '<unknown>'),
# path_info=request.META.get('PATH_INFO', '<unknown>'),
# trusted=False
# )
raise e
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
obtain_auth_token = ObtainAuthorization.as_view()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.