text stringlengths 12 1.05M | repo_name stringlengths 5 86 | path stringlengths 4 191 | language stringclasses 1 value | license stringclasses 15 values | size int32 12 1.05M | keyword listlengths 1 23 | text_hash stringlengths 64 64 |
|---|---|---|---|---|---|---|---|
"""
Acceptance tests for Studio related to edit/save peer grading interface.
"""
from ...fixtures.course import XBlockFixtureDesc
from ...pages.studio.export import ExportPage
from ...pages.studio.component_editor import ComponentEditorView
from ...pages.studio.overview import CourseOutlinePage
from base_studio_test import StudioCourseTest
from ..helpers import load_data_str
class ORAComponentTest(StudioCourseTest):
"""
Tests tht edit/save is working correctly when link_to_location
is given in peer grading interface settings.
"""
def setUp(self):
super(ORAComponentTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
self.export_page = ExportPage(self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run'])
def populate_course_fixture(self, course_fixture):
"""
Return a test course fixture containing a discussion component.
"""
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('combinedopenended', "Peer Problem",
data=load_data_str('ora_peer_problem.xml'), metadata={'graded': True}),
XBlockFixtureDesc('peergrading', 'Peer Module'),
)
)
)
)
def _go_to_unit_page(self, section_name='Test Section', subsection_name='Test Subsection', unit_name='Test Unit'):
self.course_outline_page.visit()
subsection = self.course_outline_page.section(section_name).subsection(subsection_name)
return subsection.toggle_expand().unit(unit_name).go_to()
def test_edit_save_and_export(self):
"""
Ensure that edit/save is working correctly with link_to_location
in peer interface settings.
"""
self.course_outline_page.visit()
unit = self._go_to_unit_page()
peer_problem_location = unit.xblocks[1].locator
# Problem location should contain "combinedopeneneded".
self.assertIn("combinedopenended", peer_problem_location)
component = unit.xblocks[2]
# Interface component name should be "Peer Module".
self.assertEqual(component.name, "Peer Module")
component.edit()
component_editor = ComponentEditorView(self.browser, component.locator)
component_editor.set_field_value_and_save('Link to Problem Location', peer_problem_location)
# Verify that we can edit component again after saving and link_to_location is present.
component.edit()
location_input_element = component_editor.get_setting_element("Link to Problem Location")
self.assertEqual(
location_input_element.get_attribute('value'),
peer_problem_location
)
| dsajkl/reqiop | common/test/acceptance/tests/studio/test_studio_with_ora_component.py | Python | agpl-3.0 | 3,131 | [
"VisIt"
] | 004fc45bf7f55d50f8b235b1f2705c0854332a6e72378cfda9a4c5481a401c55 |
#!/usr/bin/env python
import os
import re
import urllib
import csv
import datetime
from string import Template
from optparse import OptionParser
username = 'xclaesse'
upload_server = 'master.gnome.org'
template = '''\
$name $version is now available for download from:
$download
$md5sums
What is it?
===========
$about
You can visit the project web site:
$website
What's New?
===========
$news
$footer'''
class Bug:
number = ''
author = ''
class Project:
def __init__(self):
f = open('config.h', 'r')
s = f.read()
f.close()
key = {}
key['package'] = '#define PACKAGE_NAME "'
key['version'] = '#define PACKAGE_VERSION "'
key['bugreport'] = '#define PACKAGE_BUGREPORT "'
for line in s.splitlines(1):
if line.startswith(key['package']):
p1 = len(key['package'])
p2 = line.rfind('"')
self.package_name = line[p1:p2]
elif line.startswith(key['version']):
p1 = len(key['version'])
p2 = line.rfind('"')
self.package_version = line[p1:p2]
elif line.startswith(key['bugreport']):
p2 = line.rfind('"')
p1 = line.rfind('=') + 1
self.package_module = line[p1:p2]
first = self.package_version.find('.')
second = self.package_version.find('.', first + 1)
if first == -1 or second == -1 or first == second:
version_dir = self.package_version
else:
version_dir = self.package_version[:second]
self.package_dl_url = 'http://download.gnome.org/sources/%s/%s/' % (self.package_name.lower(),
version_dir)
def exec_cmd(self,cmd):
return os.popen(cmd).read()
def get_news(self):
f = open ('NEWS', 'r')
s = f.read()
f.close()
start = s.find ('NEW in '+ self.package_version)
if start != -1:
start = s.find ('\n', start) + 1
start = s.find ('\n', start) + 1
end = s.find ('NEW in', start) - 1
return s[start:end].strip()
def get_md5sums(self):
md5sums = ''
cmd = 'md5sum %s-%s.tar.gz' % (self.package_name.lower(), self.package_version)
md5sums += self.exec_cmd(cmd)
cmd = 'md5sum %s-%s.tar.bz2' % (self.package_name.lower(), self.package_version)
md5sums += self.exec_cmd(cmd).strip()
return md5sums
def get_bugzilla_info(self):
query = 'http://bugzilla.gnome.org/browse.cgi?product=%s' % (self.package_module)
f = urllib.urlopen(query)
s = f.read()
f.close()
s1 = '<p><i>'
i = s.find(s1)
start = i + len(s1)
s2 = '</i></p>'
end = s.find(s2, i + 1)
description = s[start:end]
s1 = "GNOME SVN"
i = s.find(s1)
s1 = "href"
i = s.find(s1, i)
start = i + 6
s2 = '">'
end = s.find(s2, start)
project_url = s[start:end]
return (description, project_url)
def get_release_notes(self):
name = self.package_name
version = self.package_version
download = self.package_dl_url
md5sums = self.get_md5sums()
(about, website) = self.get_bugzilla_info()
news = self.get_news()
footer = '%s\n%s team' % (datetime.date.today().strftime('%d %B %Y'),\
self.package_name)
t = Template(template)
return t.substitute(locals())
def get_last_tag(self):
tags_str = self.exec_cmd('git-tag')
tags = tags_str.splitlines()
return tags[len(tags)-1]
def parse_commit(self, ref, author, date, message):
p1 = message.rfind('(')
p2 = message.rfind (')')
if len(message) - p2 <= 2:
author = message[p1+1:p2]
message = message[:p1]
msg = message.lower()
if msg.find('translation') != -1 and\
msg.find('updated') != -1:
self.translations += ' - ' + message + ' (' + author + ').\n'
elif message.find('#') != -1:
p1 = message.find('#')
while p1 != -1:
bug = Bug()
p2 = message.find(' ', p1)
bug.number = message[p1+1:p2]
bug.author = author
self.bug_commits.append(bug)
p1 = message.find('#', p2)
else:
self.commits += ' - ' + message + ' (' + author + ').\n'
def query_bug_commits(self):
bugs = ''
for bug in self.bug_commits:
bugs += bug.number + ','
# Bugzilla query to use
query = 'http://bugzilla.gnome.org/buglist.cgi?ctype=csv' \
'&bug_status=RESOLVED,CLOSED,VERIFIED' \
'&resolution=FIXED' \
'&bug_id=' + bugs.replace(',', '%2c')
f = urllib.urlopen(query)
s = f.read()
f.close()
col_bug_id = -1
col_description = -1
reader = csv.reader(s.splitlines(1))
header = reader.next()
i = 0
for col in header:
if col == 'bug_id':
col_bug_id = i
if col == 'short_short_desc':
col_description = i
i = i + 1
for row in reader:
bug_number = row[col_bug_id]
description = row[col_description]
for bug in self.bug_commits:
if bug.number == bug_number:
self.bugs += ' - Fixed #%s, %s (%s)\n' % (bug.number, description, bug.author)
break
def get_commits(self):
self.commits = ''
self.translations = ''
self.bugs = ''
self.bug_commits = []
last_tag = self.get_last_tag()
ref = None
changes = self.exec_cmd ("git-log " + last_tag + "..")
for line in changes.splitlines(1):
if line.startswith('commit'):
if ref != None:
self.parse_commit (ref, author, date, message)
p1 = line.find(' ')
ref = line[p1:].strip()
author = ''
date = ''
message = ''
elif line.startswith('Author:'):
p1 = line.find(' ')
p2 = line.find('<')
author = line[p1:p2].strip()
elif line.startswith('Date:'):
p1 = line.find(' ')
date = line[p1:].strip()
elif line.startswith(' git-svn-id:'):
continue
elif line.startswith('Merge:'):
continue
else:
msg = line.strip()
if msg == '':
continue
if message != '':
message += '\n'
message += msg
self.query_bug_commits ()
def make_tag(self):
new_tag = self.package_name.upper() + '_' +\
self.package_version.replace('.', '_')
url1 = self.exec_cmd('git-config svn-remote.svn.url').strip()
url2 = url1[:url1.rfind('/')] + '/tags/' + new_tag
self.exec_cmd('svn copy %s %s -m "Tagged for release %s."' % (url1, url2, self.package_version))
self.exec_cmd('git-tag -m "Tagged for release %s." %s' % ( self.package_version, new_tag))
def generate_news(self):
self.get_commits()
news = 'NEW in '+ self.package_version + '\n==============\n'
news += self.commits + '\nBugs fixed:\n' + self.bugs + '\nTranslations:\n' + self.translations + '\n'
return news
def write_news(self):
news = self.generate_news()
f = open ('/tmp/NEWS', 'w')
s = f.write(news)
f.close()
self.exec_cmd('cat NEWS >> /tmp/NEWS')
self.exec_cmd('mv /tmp/NEWS .')
def upload_tarball(self):
tarball = '%s-%s.tar.gz' % (self.package_name.lower(), self.package_version)
cmd = 'scp %s %s@%s:' % (tarball, username, upload_server)
self.exec_cmd(cmd)
cmd = 'ssh %s@%s install-module -u %s' % (username, upload_server, tarball)
self.exec_cmd(cmd)
def send_email(self):
notes = self.get_release_notes()
cmd = 'xdg-email ' \
' --cc telepathy@lists.freedesktop.org' \
' --subject "ANNOUNCE: Empathy %s"' \
' --body "%s"' \
' gnome-announce-list@gnome.org' % (self.package_version, notes)
self.exec_cmd(cmd)
def release(self):
self.make_tag()
self.upload_tarball()
self.send_email()
if __name__ == '__main__':
p = Project()
parser = OptionParser()
parser.add_option("-n", "--print-news", action="store_true",\
dest="print_news", help="Generate and print news")
parser.add_option("-p", "--print-notes", action="store_true",\
dest="print_notes", help="Generate and print the release notes")
parser.add_option("-w", "--write-news", action="store_true",\
dest="write_news", help="Generate and write news into the NEWS file")
parser.add_option("-r", "--release", action="store_true",\
dest="release", help="Release the tarball")
(options, args) = parser.parse_args ()
if (options.print_news):
print p.generate_news ()
if (options.print_notes):
print p.get_release_notes ()
if (options.write_news):
p.write_news ()
if (options.release):
p.release ()
| gcorvala/gsoc2008 | release.py | Python | gpl-2.0 | 8,011 | [
"VisIt"
] | 8649f10d2a6aeacb421f7e91114d340995f0f6183019beea417586432ed073e8 |
import matplotlib
matplotlib.use('Agg')
import numpy as np
import netCDF4
from datetime import datetime
import pyroms
import pyroms_toolbox
import sys
def create_HYCOM_file(name, time, lon, lat, z, var):
print 'Write with file %s' %name
#create netCDF file
nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT')
nc.Author = sys._getframe().f_code.co_name
nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
nc.title = 'HYCOM + NCODA Global 1/12 Analysis (GLBa0.08)'
#create dimensions
Mp, Lp = lon.shape
N = len(z)
nc.createDimension('lon', Lp)
nc.createDimension('lat', Mp)
nc.createDimension('z', N)
nc.createDimension('ocean_time', None)
#create variables
nc.createVariable('lon', 'f', ('lat', 'lon'))
nc.variables['lon'].long_name = 'longitude'
nc.variables['lon'].units = 'degrees_east'
nc.variables['lon'][:] = lon
nc.createVariable('lat', 'f', ('lat', 'lon'))
nc.variables['lat'].long_name = 'latitude'
nc.variables['lat'].units = 'degrees_north'
nc.variables['lat'][:] = lat
nc.createVariable('z', 'f', ('z'))
nc.variables['z'].long_name = 'depth'
nc.variables['z'].units = 'meter'
nc.variables['z'][:] = z
nc.createVariable('ocean_time', 'f', ('ocean_time'))
nc.variables['ocean_time'].units = 'days since 1900-01-01 00:00:00'
nc.variables['ocean_time'].calendar = 'LEAP'
nc.variables['ocean_time'][0] = time
nc.createVariable(outvarname, 'f', ('ocean_time', 'z', 'lat', 'lon'), fill_value=spval)
nc.variables[outvarname].long_name = long_name
nc.variables[outvarname].units = units
nc.variables[outvarname].coordinates = 'lon lat'
nc.variables[outvarname][0] = var
nc.close()
print 'Done with file %s' %name
# get HYCOM Northeast Pacific data from 2007 to 2011
year = 2015
retry='True'
invarname = 'u'
outvarname = 'u'
#read grid and variable attributes from the first file
url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/uvel/archv.2015_001_00_3zu.nc'
dataset = netCDF4.Dataset(url)
lon = dataset.variables['Longitude'][1500-9:1800,600:940]
lat = dataset.variables['Latitude'][1500-9:1800,600:940]
z = dataset.variables['Depth'][:]
#spval = dataset.variables[invarname]._FillValue
units = dataset.variables[invarname].units
long_name = dataset.variables[invarname].long_name
dataset.close()
retry_day = []
# loop over daily files
if year%4 == 0:
daysinyear = 366
else:
# daysinyear = 365
daysinyear = 32
for day in range(1,daysinyear+1):
print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year)
url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day)
#get data from server
try:
dataset = netCDF4.Dataset(url)
var = dataset.variables[invarname][0,:,1500-9:1800,600:940]
spval = var.get_fill_value()
dataset.close()
print 'Got %s from server...' %invarname
except:
print 'No file on the server... We skip this day.'
retry_day.append(day)
continue
#create netCDF file
outfile = 'data/HYCOM_GLBa0.08_%s_%04d_%03d.nc' %(outvarname,year,day)
jday = pyroms_toolbox.date2jday(datetime(year, 1, 1)) + day - 1
create_HYCOM_file(outfile, jday, lon, lat, z, var)
if retry == 'True':
if len(retry_day) != 0:
print "Some file have not been downloded... Let's try again"
while len(retry_day) != 0:
for day in retry_day:
print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year)
url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day)
#get data from server
try:
dataset = netCDF4.Dataset(url)
var = dataset.variables[invarname][0,:,1500-9:1800,600:940]
spval = var.get_fill_value()
dataset.close()
print 'Got %s from server...' %invarname
except:
print 'No file on the server... We skip this day.'
continue
#create netCDF file
outfile = 'data/HYCOM_GLBa0.08_%s_%04d_%03d.nc' %(outvarname,year,day)
jday = pyroms_toolbox.date2jday(datetime(year, 1, 1)) + day - 1
create_HYCOM_file(outfile, jday, lon, lat, z, var)
retry_day.remove(day)
| kshedstrom/pyroms | examples/Palau_HYCOM/get_hycom_GLBa0.08_u_2015.py | Python | bsd-3-clause | 4,472 | [
"NetCDF"
] | ce4f415611ba74ec9975262cd9e9f5a850fc5aa4c38c2ee4173f7116fb6e6997 |
# Copyright (c) 2013, the GPy Authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ..core import GP
from .. import likelihoods
from .. import kern
import numpy as np
from ..inference.latent_function_inference.expectation_propagation import EP
class GPClassification(GP):
"""
Gaussian Process classification
This is a thin wrapper around the models.GP class, with a set of sensible defaults
:param X: input observations
:param Y: observed values, can be None if likelihood is not None
:param kernel: a GPy kernel, defaults to rbf
:param likelihood: a GPy likelihood, defaults to Bernoulli
:param inference_method: Latent function inference to use, defaults to EP
:type inference_method: :class:`GPy.inference.latent_function_inference.LatentFunctionInference`
.. Note:: Multiple independent outputs are allowed using columns of Y
"""
def __init__(self, X, Y, kernel=None,Y_metadata=None, mean_function=None, inference_method=None,
likelihood=None, normalizer=False):
if kernel is None:
kernel = kern.RBF(X.shape[1])
if likelihood is None:
likelihood = likelihoods.Bernoulli()
if inference_method is None:
inference_method = EP()
GP.__init__(self, X=X, Y=Y, kernel=kernel, likelihood=likelihood, inference_method=inference_method,
mean_function=mean_function, name='gp_classification', normalizer=normalizer)
@staticmethod
def from_gp(gp):
from copy import deepcopy
gp = deepcopy(gp)
GPClassification(gp.X, gp.Y, gp.kern, gp.likelihood, gp.inference_method, gp.mean_function, name='gp_classification')
def to_dict(self, save_data=True):
model_dict = super(GPClassification,self).to_dict(save_data)
model_dict["class"] = "GPy.models.GPClassification"
return model_dict
@staticmethod
def from_dict(input_dict, data=None):
import GPy
m = GPy.core.model.Model.from_dict(input_dict, data)
return GPClassification.from_gp(m)
def save_model(self, output_filename, compress=True, save_data=True):
self._save_model(output_filename, compress=True, save_data=True)
@staticmethod
def _build_from_input_dict(input_dict, data=None):
input_dict = GPClassification._format_input_dict(input_dict, data)
input_dict.pop('name', None) # Name parameter not required by GPClassification
return GPClassification(**input_dict)
| esiivola/GPYgradients | GPy/models/gp_classification.py | Python | bsd-3-clause | 2,545 | [
"Gaussian"
] | fa5d0d7a4c58e2de89ee6ed6e61bb4e7c93fea67578cc25ae55a90c9aa9ffc73 |
#! /usr/bin/env python
"""
Plot several runs of the iaf_cond_exp_sfa_rr neuron with no input and
various initial values for the membrane potential.
"""
import cynest as nest
import numpy
import pylab
for vinit in numpy.arange(-100, -50, 10, float):
nest.ResetKernel()
cbn = nest.Create('iaf_cond_exp_sfa_rr')
# set the initial membrane potential
nest.SetStatus(cbn, 'V_m', vinit)
voltmeter = nest.Create('voltmeter')
nest.SetStatus(voltmeter, {'withtime': True})
nest.Connect(voltmeter, cbn)
nest.Simulate(75.0)
t = nest.GetStatus(voltmeter,"events")[0]["times"]
v = nest.GetStatus(voltmeter,"events")[0]["V_m"]
pylab.plot(t, v, label="initial V_m=%.2f mV" % vinit)
pylab.legend(loc=4)
pylab.xlabel("time (ms)")
pylab.ylabel("V_m (mV)")
pylab.show()
| gewaltig/cython-neuron | cynest/examples/vinit_example.py | Python | gpl-2.0 | 806 | [
"NEURON"
] | 4b0ea6df5fa7a5c4323264353e5a739aac3e1f3d297b5ddfbb839234ee39dcdf |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""WebUpload web interface"""
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
import cgi
from invenio.webinterface_handler_wsgi_utils import Field
from invenio.access_control_engine import acc_authorize_action
from invenio.config import CFG_SITE_URL
from invenio.urlutils import redirect_to_url
from invenio.messages import gettext_set_language
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.webuser import getUid, page_not_authorized, collect_user_info
from invenio.webpage import page
from invenio.batchuploader_engine import metadata_upload, cli_upload, \
get_user_metadata_uploads, get_user_document_uploads, document_upload, \
get_daemon_doc_files, get_daemon_meta_files, cli_allocate_record
import re
import calendar
try:
import invenio.template
batchuploader_templates = invenio.template.load('batchuploader')
except:
pass
def check_date(date):
""" Check if date is correct
@return:
0 - Default or correct date
3 - Incorrect format
4 - Date does not exist
"""
if not date or date == "yyyy-mm-dd":
return 0
correct_format = re.match("2[01]\d\d-[01]?\d-[0-3]?\d", date)
if not correct_format:
return 3
#separate year, month, day
date = correct_format.group(0).split("-")
try:
calendar.weekday(int(date[0]), int(date[1]), int(date[2]))
except ValueError:
return 4
return 0
def check_time(time):
""" Check if time is correct
@return:
0 - Default or correct time
1 - Incorrect format
"""
if not time or time == "hh:mm:ss":
return 0
correct_format = re.match("[0-2]\d:[0-5]\d:[0-5]\d", time)
if not correct_format:
return 1
return 0
def check_file(name):
""" Simple check to avoid blank filename and bad extensions
@return:
0 - Correct file name
1 - File name not correct
"""
if not name.endswith('.xml'):
return 1
return 0
def user_authorization(req, ln):
""" Check user authorization to visit page """
_ = gettext_set_language(ln)
user_info = collect_user_info(req)
if user_info['email'] == 'guest':
auth_code, auth_message = acc_authorize_action(req, 'runbatchuploader')
referer = '/batchuploader/'
error_msg = _("Guests are not authorized to run batchuploader")
return page_not_authorized(req=req, referer=referer,
text=error_msg, navmenuid="batchuploader")
else:
auth_code, auth_message = acc_authorize_action(req, 'runbatchuploader')
if auth_code != 0:
referer = '/batchuploader/'
error_msg = _("The user '%s' is not authorized to run batchuploader" % \
(cgi.escape(user_info['nickname'])))
return page_not_authorized(req=req, referer=referer,
text=error_msg, navmenuid="batchuploader")
class WebInterfaceBatchUploaderPages(WebInterfaceDirectory):
"""Defines the set of /batchuploader pages."""
_exports = ['', 'metadata', 'robotupload', 'metasubmit', 'history', 'documents', 'docsubmit', 'daemon', 'allocaterecord']
def index(self, req, form):
""" The function called by default
"""
redirect_to_url(req, "%s/batchuploader/metadata" % (CFG_SITE_URL))
def metadata(self, req, form):
""" Display Metadata file upload form """
argd = wash_urlargd(form, {'error': (int, 0),
'mode': (str, ""),
'submit_date': (str, "yyyy-mm-dd"),
'submit_time': (str, "hh:mm:ss")})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
uid = getUid(req)
body = batchuploader_templates.tmpl_display_menu(argd['ln'], ref="metadata")
body += batchuploader_templates.tmpl_display_web_metaupload_form(argd['ln'],
argd['error'], argd['mode'], argd['submit_date'],
argd['submit_time'])
title = _("Metadata batch upload")
return page(title = title,
body = body,
metaheaderadd = batchuploader_templates.tmpl_styles(),
uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def documents(self, req, form):
""" Display document upload form """
argd = wash_urlargd(form, {
})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
uid = getUid(req)
body = batchuploader_templates.tmpl_display_menu(argd['ln'], ref="documents")
body += batchuploader_templates.tmpl_display_web_docupload_form(argd['ln'])
title = _("Document batch upload")
return page(title = title,
body = body,
metaheaderadd = batchuploader_templates.tmpl_styles(),
uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def docsubmit(self, req, form):
""" Function called after submitting the document upload form.
Performs the appropiate action depending on the input parameters
"""
argd = wash_urlargd(form, {'docfolder': (str, ""),
'matching': (str, ""),
'mode': (str, ""),
'submit_date': (str, ""),
'submit_time': (str, "")})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
#Check if input fields are correct, if not, redirect to upload form
correct_date = check_date(argd['submit_date'])
correct_time = check_time(argd['submit_time'])
if correct_time != 0:
redirect_to_url(req,
"%s/batchuploader/documents?error=1&mode=%s&docfolder=%s&matching=%s&submit_date=%s"
% (CFG_SITE_URL, argd['mode'], argd['docfolder'], argd['matching'], argd['submit_date']))
if correct_date != 0:
redirect_to_url(req,
"%s/batchuploader/documents?error=%s&mode=%s&docfolder=%s&matching=%s&submit_time=%s"
% (CFG_SITE_URL, correct_date, argd['mode'], argd['docfolder'], argd['matching'], argd['submit_time']))
date = argd['submit_date'] not in ['yyyy-mm-dd', ''] \
and argd['submit_date'] or ''
time = argd['submit_time'] not in ['hh:mm:ss', ''] \
and argd['submit_time'] or ''
if date != '' and time == '':
redirect_to_url(req, "%s/batchuploader/documents?error=1&mode=%s&docfolder=%s&matching=%s&submit_date=%s"
% (CFG_SITE_URL, argd['mode'], argd['docfolder'], argd['matching'], argd['submit_date']))
elif date == '' and time != '':
redirect_to_url(req, "%s/batchuploader/documents?error=4&mode=%s&docfolder=%s&matching=%s&submit_time=%s"
% (CFG_SITE_URL, argd['mode'], argd['docfolder'], argd['matching'], argd['submit_time']))
errors, info = document_upload(req, argd['docfolder'], argd['matching'], argd['mode'], date, time, argd['ln'])
body = batchuploader_templates.tmpl_display_menu(argd['ln'])
uid = getUid(req)
navtrail = '''<a class="navtrail" href="%s/batchuploader/documents">%s</a>''' % \
(CFG_SITE_URL, _("Document batch upload"))
body += batchuploader_templates.tmpl_display_web_docupload_result(argd['ln'], errors, info)
title = _("Document batch upload result")
return page(title = title,
body = body,
metaheaderadd = batchuploader_templates.tmpl_styles(),
uid = uid,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def robotupload(self, req, form):
"""Interface for robots used like this:
$ curl -F 'file=@localfile.xml' -F 'mode=-i' http://cdsweb.cern.ch/batchuploader/robotupload -A invenio_webupload
"""
argd = wash_urlargd(form, {'file': (Field, None),
'mode': (str,None)})
cli_upload(req, argd['file'], argd['mode'])
def allocaterecord(self, req, form):
"""
Interface for robots to allocate a record and obtain a record identifier
"""
return cli_allocate_record(req)
def metasubmit(self, req, form):
""" Function called after submitting the metadata upload form.
Checks if input fields are correct before uploading.
"""
argd = wash_urlargd(form, {'metafile': (Field, None),
'mode': (str,None),
'submit_date': (str, None),
'submit_time': (str, None),
'filename': (str, None)})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
#Check if input fields are correct, if not, redirect to upload form
correct_date = check_date(argd['submit_date'])
correct_time = check_time(argd['submit_time'])
correct_file = check_file(argd['filename'])
if correct_time != 0:
redirect_to_url(req,
"%s/batchuploader/metadata?error=1&mode=%s&submit_date=%s"
% (CFG_SITE_URL, argd['mode'], argd['submit_date']))
if correct_file != 0:
redirect_to_url(req,
"%s/batchuploader/metadata?error=2&mode=%s&submit_date=%s&submit_time=%s"
% (CFG_SITE_URL, argd['mode'], argd['submit_date'],
argd['submit_time']))
if correct_date != 0:
redirect_to_url(req,
"%s/batchuploader/metadata?error=%s&mode=%s&submit_time=%s"
% (CFG_SITE_URL, correct_date, argd['mode'], argd['submit_time']))
date = argd['submit_date'] not in ['yyyy-mm-dd', ''] \
and argd['submit_date'] or ''
time = argd['submit_time'] not in ['hh:mm:ss', ''] \
and argd['submit_time'] or ''
if date != '' and time == '':
redirect_to_url(req, "%s/batchuploader/metadata?error=1&mode=%s&submit_date=%s"
% (CFG_SITE_URL, argd['mode'], argd['submit_date']))
elif date == '' and time != '':
redirect_to_url(req, "%s/batchuploader/metadata?error=4&mode=%s&submit_time=%s"
% (CFG_SITE_URL, argd['mode'], argd['submit_time']))
#Function where bibupload queues the file
auth_code, auth_message = metadata_upload(req,
argd['metafile'], argd['mode'].split()[0],
date, time, argd['filename'], argd['ln'])
if auth_code != 0:
referer = '/batchuploader/'
return page_not_authorized(req=req, referer=referer,
text=auth_message, navmenuid="batchuploader")
else:
uid = getUid(req)
body = batchuploader_templates.tmpl_display_menu(argd['ln'])
body += batchuploader_templates.tmpl_upload_successful(argd['ln'])
title = _("Upload successful")
navtrail = '''<a class="navtrail" href="%s/batchuploader/metadata">%s</a>''' % \
(CFG_SITE_URL, _("Metadata batch upload"))
return page(title = title,
body = body,
uid = uid,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def history(self, req, form):
"""Display upload history of the current user"""
argd = wash_urlargd(form, {})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
uploaded_meta_files = get_user_metadata_uploads(req)
uploaded_doc_files = get_user_document_uploads(req)
uid = getUid(req)
body = batchuploader_templates.tmpl_display_menu(argd['ln'], ref="history")
body += batchuploader_templates.tmpl_upload_history(argd['ln'], uploaded_meta_files, uploaded_doc_files)
title = _("Upload history")
return page(title = title,
body = body,
metaheaderadd = batchuploader_templates.tmpl_styles(),
uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def daemon(self, req, form):
""" Display content of folders where the daemon will look into """
argd = wash_urlargd(form, {})
_ = gettext_set_language(argd['ln'])
not_authorized = user_authorization(req, argd['ln'])
if not_authorized:
return not_authorized
docs = get_daemon_doc_files()
metadata = get_daemon_meta_files()
uid = getUid(req)
body = batchuploader_templates.tmpl_display_menu(argd['ln'], ref="daemon")
body += batchuploader_templates.tmpl_daemon_content(argd['ln'], docs, metadata)
title = _("Batch Uploader: Daemon monitor")
return page(title = title,
body = body,
metaheaderadd = batchuploader_templates.tmpl_styles(),
uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
navmenuid = "batchuploader")
def __call__(self, req, form):
"""Redirect calls without final slash."""
redirect_to_url(req, '%s/batchuploader/metadata' % CFG_SITE_URL)
| kaplun/Invenio-OpenAIRE | modules/bibupload/lib/batchuploader_webinterface.py | Python | gpl-2.0 | 15,561 | [
"VisIt"
] | 4dff59553563f3db203060ddb4c58bdbf167d1487e15f830ff501c5e4a411f72 |
#!/usr/bin/python
# Submits a command to the queue
import argparse
parser = argparse.ArgumentParser(description='Run a command on the queue')
parser.add_argument('name', help='Job name')
parser.add_argument('command', help='Command to execute')
parser.add_argument('--mem', type=int, default=1, help='Amount of memory to allocate (GB)')
parser.add_argument('--comment', default='', help='Adds a comment to the end of the script')
parser.add_argument('--dry', action='store_true', default=False, \
help='Does not actually submit the job to the queue')
parser.add_argument('--no_release', action='store_true', default=False, \
help='Does not release held jobs')
# For OSG
parser.add_argument('--disk', default='350MB', help='Amount of disk space to allocate')
parser.add_argument('--input_files', default=[], nargs='+', help='Input files for the job')
parser.add_argument('--output_files', default=[], nargs='+', help='Output files for the job')
parser.add_argument('--output_remaps', default=[], nargs='+', help='New output file names')
parser.add_argument('--min_job_time', default=20, type=int, \
help='Will resubmit job if there is an error and the job takes less than this time (in minutes)')
parser.add_argument('--max_job_time', default=12, type=int, \
help='Will hold job that takes longer than this time (in hours)')
# For CCB and DSCR
parser.add_argument('--nodes', type=int, default=1, help='Number of nodes to run the job on')
parser.add_argument('--ppn', type=int, default=1, help='Number of processors per node to run the job on')
parser.add_argument('--ambertools', action='store_true', default=False, \
help='Load the ambertools/14 module')
parser.add_argument('--email', default='', help='Adds email to job')
parser.add_argument('--email_options', default='abe', help='Options for email notifications. When job begins (b), job ends (e), and/or aborted (a)')
args = parser.parse_args()
# Find unique name for the submission script
import os
curdir = os.getcwd()
n = 0
submit_FN = os.path.join(curdir,'jobs','%s-%d.submit'%(args.name,n))
while os.path.exists(submit_FN):
n = n + 1
submit_FN = os.path.join(curdir,'jobs','%s-%d.submit'%(args.name,n))
sh_FN = os.path.join(curdir,'jobs','%s-%d.sh'%(args.name,n))
out_FN = os.path.join(curdir,'jobs','%s-%d.out'%(args.name,n))
err_FN = os.path.join(curdir,'jobs','%s-%d.err'%(args.name,n))
# Sets up the submission and execution scripts
submit_script = ''
execute_script = ''
if os.path.exists('/home/dminh/scripts/qsub_command.py'): # CCB Cluster
cluster = 'CCB'
# Split the command onto multiple lines
command_list = args.command.split(';')
command = '\n'.join([c.strip() for c in command_list])
# By default, use Enthought Canopy python
if command.find('python')>-1:
modules = 'module load canopy/1.5.0\n'
else:
modules = ''
if command.find('chimera')>-1:
modules += 'module load chimera/1.8.1\n'
if command.find('modeller')>-1:
modules += 'module load modeller/9.14\n'
if command.find('cores')>-1:
cores = command[command.find('cores')+5:]
cores = cores[:cores.find('\n')].strip()
cores = cores.split(' ')[0]
if cores!='':
args.ppn = int(cores)
if args.ambertools:
modules += 'module load ambertools/14\n'
email_specified = ''
if args.email == '':
emailed_specified = '#'
# Write script
submit_script = '''#!/bin/bash
#
#PBS -S /bin/bash
#PBS -N {0}
#PBS -l mem={1}GB,nodes={2}:ppn={3},walltime=168:00:00
#PBS -d {4}
#PBS -o {5}
#PBS -e {6}
{10}#PBS -M {11}
{10}#PBS -m {12}
{7}
{8}
# {9}
'''.format(args.name, args.mem, args.nodes, args.ppn, \
curdir, out_FN, err_FN, \
modules, command, args.comment, email_specified, args.email, args.email_options)
elif os.path.exists('/stash'): # Open Science Grid
cluster = 'OSG'
# Split the command onto multiple lines
command_list = args.command.split(';')
command = '\n'.join([c.strip() for c in command_list])
# Determine the input files
# All specified input files
input_files = set([os.path.abspath(FN) for FN in args.input_files])
# The _external_paths.py script
input_files = input_files.union(\
['/home/daveminh/public/AlGDock-0.0.1/Pipeline/_external_paths.py'])
# Any files mentioned in the command
command_list = command.split(' ')
input_files = input_files.union(\
set([os.path.abspath(FN) for FN in command_list if os.path.isfile(FN)]))
# Use the basename for any file, the current directory for any directory
command = ' '.join([\
{True:os.path.basename(item), False:
{True:'.', False:item}[os.path.isdir(item)]}[os.path.isfile(item)] \
for item in command_list])
# Any output file that already exists
input_files = input_files.union(\
set([os.path.abspath(FN) for FN in args.output_files if os.path.isfile(FN)]))
transfer_input_files = ', '.join(input_files)
# Format the output files for the script
touches = ""
if len(args.output_remaps)>0:
transfer_output_files = 'transfer_output_remaps = "' + \
'; '.join(['%s = %s'%(FNo,FNn) for (FNo,FNn) in zip(\
args.output_remaps[::2],args.output_remaps[1::2])]) + '"'
for FN in args.output_remaps[::2]:
touches += "if [ ! -e {0} ]\n then\n touch {0}\nfi\n".format(FN)
if command.find('$ALGDOCK')!=-1:
hold_string = 'on_exit_hold = (ExitCode == 100)'
requirements_string = '&& (HAS_CVMFS_oasis_opensciencegrid_org =?= TRUE)'
else:
hold_string = '''
# stay in queue if there was an error and
# the job ran for less than min_job_time minutes
on_exit_hold = (ExitCode != 0) && ((CurrentTime - JobStartDate) < ({0}*60))
'''.format(args.min_job_time)
requirements_string = ''
# Write the submission script
submit_script = """Universe = vanilla
Executable = {0}
Error = jobs/{1}.$(Cluster)-$(Process).err
Output = jobs/{1}.$(Cluster)-$(Process).out
Log = jobs/{1}.$(Cluster).log
Requirements = (FileSystemDomain != "") && (OpSys == "LINUX" ) && (Arch == "X86_64") {2}
request_disk = {3}
request_memory = {4}GB
# File transfer
should_transfer_files = YES
transfer_input_files = {5}
{6}
when_to_transfer_output = ON_EXIT_OR_EVICT
want_graceful_removal = (ExitCode == 100)
{7}
# protect against hung jobs (taking more than max_job_time hours)
periodic_hold = (JobStatus==2) && ((CurrentTime - EnteredCurrentStatus) > {8}*60*60)
# make sure the job is being retried and rematched
{9}
+ProjectName="AlGDock"
Queue 1
""".format(sh_FN, args.name, requirements_string, args.disk, args.mem, \
transfer_input_files, transfer_output_files, \
hold_string, args.max_job_time, \
{True:'',
False:'periodic_release = ((CurrentTime - EnteredCurrentStatus) > 60) && (NumJobStarts < 40)'}[\
args.no_release])
if command.find('$ALGDOCK')!=-1:
command = """
module load libgfortran
# Download data
wget --no-verbose --no-check-certificate http://stash.osgconnect.net/+daveminh/algdock.tar.gz
tar xzf algdock.tar.gz
# Modify paths
echo "
search_paths = {
'gaff.dat':[None],
'namd':[None],
'sander':[None],
'apbs':[None],
'ambpdb':[None],
'molsurf':[None],
'MMTK':['$WORK_DIR/AlGDock/MMTK'],
'vmd':[None],
'convert':[None],
'font':[None]}
" | cat AlGDock/AlGDock/_external_paths.py - > AlGDock/AlGDock/paths.py
mv AlGDock/AlGDock/paths.py AlGDock/AlGDock/_external_paths.py
export ALGDOCK=$WORK_DIR/AlGDock/BindingPMF
""" + command + """
rm -rf AlGDock namd* sander* ambpdb* molsurf* APBS*
rm -f algdock.tar.gz*
rm -f *.inpcrd* *.prmtop* *.frcmod *.pdb *.db
rm -f *.out *.namd *.dcd
rm -f .lock
"""
execute_script = """#!/bin/bash
WORK_DIR=`pwd`
echo Working in $WORK_DIR
echo Directory before command:
ls -ltr
"""+command+touches+"""
echo Directory after command:
ls -ltr
"""
if command.find('$ALGDOCK')!=-1 and command.find('timed')!=-1:
# -s means file is not zero size
execute_script += """
if [ ! -s f_RL.pkl.gz ]
then
exit 100
fi"""
else:
cluster = None
submit_script = args.command
# Write and submit scripts
if submit_script!='':
if not os.path.isdir('jobs'):
os.makedirs('jobs')
submit_F = open(submit_FN,'w')
submit_F.write(submit_script)
submit_F.close()
if execute_script!='':
if not os.path.isdir('jobs'):
os.makedirs('jobs')
sh_F = open(sh_FN,'w')
sh_F.write(execute_script)
sh_F.close()
if (not args.dry) and cluster in ['OSG','CCB','DSCR']:
print 'qsub_command: Submitting job script: ' + submit_FN
print('qsub_command: Job name: ' + args.name)
# print('Script contents: ' + submit_script)
if not args.dry:
if cluster=='OSG':
os.system('condor_submit %s'%submit_FN)
elif cluster=='CCB' or cluster=='DSCR':
os.system('qsub %s >> jobs.txt'%submit_FN) #Store the job id in jobs.txt
else:
os.system(args.command)
| luizcieslak/AlGDock | Pipeline/qsub_command.py | Python | mit | 8,783 | [
"NAMD",
"VMD"
] | 61fcab32a3e5ed2f42f07f453d27a0a79dda14dcde9000bdde1d453c723387e5 |
"""
Test the about xblock
"""
import datetime
import pytz
from ccx_keys.locator import CCXLocator
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from mock import patch
from nose.plugins.attrib import attr
from course_modes.models import CourseMode
from track.tests import EventTrackingTestCase
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_MODULESTORE
from xmodule.modulestore.tests.utils import TEST_DATA_DIR
from xmodule.modulestore.xml_importer import import_course_from_xml
from student.models import CourseEnrollment
from student.tests.factories import AdminFactory, CourseEnrollmentAllowedFactory, UserFactory
from shoppingcart.models import Order, PaidCourseRegistration
from xmodule.course_module import CATALOG_VISIBILITY_ABOUT, CATALOG_VISIBILITY_NONE
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase,
SharedModuleStoreTestCase,
TEST_DATA_SPLIT_MODULESTORE
)
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from util.milestones_helpers import (
set_prerequisite_courses,
get_prerequisite_courses_display,
)
from milestones.tests.utils import MilestonesTestCaseMixin
from lms.djangoapps.ccx.tests.factories import CcxFactory
from .helpers import LoginEnrollmentTestCase
# HTML for registration button
REG_STR = "<form id=\"class_enroll_form\" method=\"post\" data-remote=\"true\" action=\"/change_enrollment\">"
SHIB_ERROR_STR = "The currently logged-in user account does not have permission to enroll in this course."
@attr(shard=1)
class AboutTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase, EventTrackingTestCase, MilestonesTestCaseMixin):
"""
Tests about xblock.
"""
@classmethod
def setUpClass(cls):
super(AboutTestCase, cls).setUpClass()
cls.course = CourseFactory.create()
cls.course_without_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_NONE)
cls.course_with_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_ABOUT)
cls.purchase_course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_without_about.location,
data="WITHOUT ABOUT", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_with_about.location,
data="WITH ABOUT", display_name="overview"
)
def setUp(self):
super(AboutTestCase, self).setUp()
self.course_mode = CourseMode(
course_id=self.purchase_course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10
)
self.course_mode.save()
def test_anonymous_user(self):
"""
This test asserts that a non-logged in user can visit the course about page
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
def test_logged_in(self):
"""
This test asserts that a logged-in user can visit the course about page
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
def test_already_enrolled(self):
"""
Asserts that the end user sees the appropriate messaging
when he/she visits the course about page, but is already enrolled
"""
self.setup_user()
self.enroll(self.course, True)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
@override_settings(COURSE_ABOUT_VISIBILITY_PERMISSION="see_about_page")
def test_visible_about_page_settings(self):
"""
Verify that the About Page honors the permission settings in the course module
"""
url = reverse('about_course', args=[self.course_with_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("WITH ABOUT", resp.content)
url = reverse('about_course', args=[self.course_without_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_logged_in_marketing(self):
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
# should be redirected
self.assertEqual(resp.status_code, 302)
# follow this time, and check we're redirected to the course info page
resp = self.client.get(url, follow=True)
target_url = resp.redirect_chain[-1][0]
info_url = reverse('info', args=[self.course.id.to_deprecated_string()])
self.assertTrue(target_url.endswith(info_url))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_pre_requisite_course(self):
pre_requisite_course = CourseFactory.create(org='edX', course='900', display_name='pre requisite course')
course = CourseFactory.create(pre_requisite_courses=[unicode(pre_requisite_course.id)])
self.setup_user()
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_about_page_unfulfilled_prereqs(self):
pre_requisite_course = CourseFactory.create(
org='edX',
course='901',
display_name='pre requisite course',
)
pre_requisite_courses = [unicode(pre_requisite_course.id)]
# for this failure to occur, the enrollment window needs to be in the past
course = CourseFactory.create(
org='edX',
course='1000',
# closed enrollment
enrollment_start=datetime.datetime(2013, 1, 1),
enrollment_end=datetime.datetime(2014, 1, 1),
start=datetime.datetime(2013, 1, 1),
end=datetime.datetime(2030, 1, 1),
pre_requisite_courses=pre_requisite_courses,
)
set_prerequisite_courses(course.id, pre_requisite_courses)
self.setup_user()
self.enroll(self.course, True)
self.enroll(pre_requisite_course, True)
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
url = reverse('about_course', args=[unicode(pre_requisite_course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
@attr(shard=1)
class AboutTestCaseXML(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Tests for the course about page
"""
MODULESTORE = TEST_DATA_MIXED_MODULESTORE
def setUp(self):
"""
Set up the tests
"""
super(AboutTestCaseXML, self).setUp()
# The following test course (which lives at common/test/data/2014)
# is closed; we're testing that an about page still appears when
# the course is already closed
self.xml_course_id = self.store.make_course_key('edX', 'detached_pages', '2014')
import_course_from_xml(
self.store,
'test_user',
TEST_DATA_DIR,
source_dirs=['2014'],
static_content_store=None,
target_id=self.xml_course_id,
raise_on_failure=True,
create_if_not_present=True,
)
# this text appears in that course's about page
# common/test/data/2014/about/overview.html
self.xml_data = "about page 463139"
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_logged_in_xml(self):
self.setup_user()
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_anonymous_user_xml(self):
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@attr(shard=1)
class AboutWithCappedEnrollmentsTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test case will check the About page when a course has a capped enrollment
"""
@classmethod
def setUpClass(cls):
super(AboutWithCappedEnrollmentsTestCase, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"max_student_enrollments_allowed": 1})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def setUp(self):
"""
Set up the tests
"""
super(AboutWithCappedEnrollmentsTestCase, self).setUp()
def test_enrollment_cap(self):
"""
This test will make sure that enrollment caps are enforced
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn('<a href="#" class="register">', resp.content)
self.enroll(self.course, verify=True)
# create a new account since the first account is already enrolled in the course
self.email = 'foo_second@test.com'
self.password = 'bar'
self.username = 'test_second'
self.create_account(self.username, self.email, self.password)
self.activate_user(self.email)
self.login(self.email, self.password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
# Try to enroll as well
result = self.enroll(self.course)
self.assertFalse(result)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
@attr(shard=1)
class AboutWithInvitationOnly(SharedModuleStoreTestCase):
"""
This test case will check the About page when a course is invitation only.
"""
@classmethod
def setUpClass(cls):
super(AboutWithInvitationOnly, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"invitation_only": True})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
display_name="overview"
)
def setUp(self):
super(AboutWithInvitationOnly, self).setUp()
def test_invitation_only(self):
"""
Test for user not logged in, invitation only course.
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_invitation_only_but_allowed(self):
"""
Test for user logged in and allowed to enroll in invitation only course.
"""
# Course is invitation only, student is allowed to enroll and logged in
user = UserFactory.create(username='allowed_student', password='test', email='allowed_student@test.com')
CourseEnrollmentAllowedFactory(email=user.email, course_id=self.course.id)
self.client.login(username=user.username, password='test')
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
@attr(shard=1)
@patch.dict(settings.FEATURES, {'RESTRICT_ENROLL_BY_REG_METHOD': True})
class AboutTestCaseShibCourse(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
Test cases covering about page behavior for courses that use shib enrollment domain ("shib courses")
"""
@classmethod
def setUpClass(cls):
super(AboutTestCaseShibCourse, cls).setUpClass()
cls.course = CourseFactory.create(enrollment_domain="shib:https://idp.stanford.edu/")
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def setUp(self):
super(AboutTestCaseShibCourse, self).setUp()
def test_logged_in_shib_course(self):
"""
For shib courses, logged in users will see the enroll button, but get rejected once they click there
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
def test_anonymous_user_shib_course(self):
"""
For shib courses, anonymous users will also see the enroll button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
@attr(shard=1)
class AboutWithClosedEnrollment(ModuleStoreTestCase):
"""
This test case will check the About page for a course that has enrollment start/end
set but it is currently outside of that period.
"""
def setUp(self):
super(AboutWithClosedEnrollment, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": False})
# Setup enrollment period to be in future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_closed_enrollmement(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_course_price_is_not_visble_in_sidebar(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
# course price is not visible ihe course_about page when the course
# mode is not set to honor
self.assertNotIn('<span class="important-dates-item-text">$10</span>', resp.content)
# Stanford About Sidebar tests
@attr(shard=1)
class AboutSidebarHTMLTestCase(SharedModuleStoreTestCase):
"""
This test case will check the About page for the content in the HTML sidebar.
"""
def setUp(self):
super(AboutSidebarHTMLTestCase, self).setUp()
self.course = CourseFactory.create()
def test_html_sidebar_empty(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("About Sidebar HTML Heading", resp.content)
def test_html_sidebar_has_content(self):
ItemFactory.create(
category="about", parent_location=self.course.location,
data="About Sidebar HTML Heading", display_name="about_sidebar_html"
)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("About Sidebar HTML Heading", resp.content)
# / Stanford About Sidebar tests
@attr(shard=1)
@patch.dict(settings.FEATURES, {'ENABLE_SHOPPING_CART': True})
@patch.dict(settings.FEATURES, {'ENABLE_PAID_COURSE_REGISTRATION': True})
class AboutPurchaseCourseTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test class runs through a suite of verifications regarding
purchaseable courses
"""
@classmethod
def setUpClass(cls):
super(AboutPurchaseCourseTestCase, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
cls.closed_course = CourseFactory.create(
org='MITx',
number='closed',
display_name='Closed Course To Buy',
enrollment_start=tomorrow,
enrollment_end=nextday
)
def setUp(self):
super(AboutPurchaseCourseTestCase, self).setUp()
self._set_ecomm(self.course)
self._set_ecomm(self.closed_course)
def _set_ecomm(self, course):
"""
Helper method to turn on ecommerce on the course
"""
course_mode = CourseMode(
course_id=course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10,
)
course_mode.save()
def test_anonymous_user(self):
"""
Make sure an anonymous user sees the purchase button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_logged_in(self):
"""
Make sure a logged in user sees the purchase button
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_in_cart(self):
"""
This makes sure if a user has this course in the cart, that the expected message
appears
"""
self.setup_user()
cart = Order.get_cart_for_user(self.user)
PaidCourseRegistration.add_to_order(cart, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("This course is in your", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_enrolled(self):
"""
This makes sure that the already enrolled message appears for paywalled courses
"""
self.setup_user()
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_closed_enrollment(self):
"""
This makes sure that paywalled courses also honor the registration
window
"""
self.setup_user()
url = reverse('about_course', args=[self.closed_course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
self.assertNotIn("Add closed to Cart <span>($10 USD)</span>", resp.content)
# course price is visible ihe course_about page when the course
# mode is set to honor and it's price is set
self.assertIn('<span class="important-dates-item-text">$10</span>', resp.content)
def test_invitation_only(self):
"""
This makes sure that the invitation only restirction takes prescendence over
any purchase enablements
"""
course = CourseFactory.create(metadata={"invitation_only": True})
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
def test_enrollment_cap(self):
"""
Make sure that capped enrollments work even with
paywalled courses
"""
course = CourseFactory.create(
metadata={
"max_student_enrollments_allowed": 1,
"display_coursenumber": "buyme",
}
)
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, course.id)
# create a new account since the first account is already enrolled in the course
email = 'foo_second@test.com'
password = 'bar'
username = 'test_second'
self.create_account(username,
email, password)
self.activate_user(email)
self.login(email, password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_free_course_display(self):
"""
Make sure other courses that don't have shopping cart enabled don't display the add-to-cart button
and don't display the course_price field if Cosmetic Price is disabled.
"""
course = CourseFactory.create(org='MITx', number='free', display_name='Course For Free')
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Add free to Cart (Free)", resp.content)
self.assertNotIn('<p class="important-dates-item-title">Price</p>', resp.content)
class CourseAboutTestCaseCCX(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test for unenrolled student tries to access ccx.
Note: Only CCX coach can enroll a student in CCX. In sum self-registration not allowed.
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
@classmethod
def setUpClass(cls):
super(CourseAboutTestCaseCCX, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(CourseAboutTestCaseCCX, self).setUp()
# Create ccx coach account
self.coach = coach = AdminFactory.create(password="test")
self.client.login(username=coach.username, password="test")
def test_redirect_to_dashboard_unenrolled_ccx(self):
"""
Assert that when unenrolled user tries to access CCX do not allow the user to self-register.
Redirect him to his student dashboard
"""
# create ccx
ccx = CcxFactory(course_id=self.course.id, coach=self.coach)
ccx_locator = CCXLocator.from_course_locator(self.course.id, unicode(ccx.id))
self.setup_user()
url = reverse('info', args=[ccx_locator])
response = self.client.get(url)
expected = reverse('dashboard')
self.assertRedirects(response, expected, status_code=302, target_status_code=200)
| caesar2164/edx-platform | lms/djangoapps/courseware/tests/test_about.py | Python | agpl-3.0 | 27,317 | [
"VisIt"
] | 6419011371fb32abf05e6bf54c2e05c11172a0c1279c80ea6e0e8e30e3cfc9c3 |
#!/usr/bin/python
#=================================================================#
# Script to plot the bandstructure from an abinit bandstructure #
# _EIG.nc netcdf file or from a wannier bandstructure, or from #
# an _EIG.nc file+GW file+ bandstructure _EIG.nc file #
#=================================================================#
#########
#IMPORTS#
#########
import numpy as N
import matplotlib.pyplot as P
import netCDF4 as nc
import sys
import os
import argparse
import time
#############
##VARIABLES##
#############
class VariableContainer:pass
#Constants
csts = VariableContainer()
csts.hartree2ev = N.float(27.211396132)
csts.ev2hartree = N.float(1/csts.hartree2ev)
csts.sqrtpi = N.float(N.sqrt(N.pi))
csts.invsqrtpi = N.float(1/csts.sqrtpi)
csts.TOLKPTS = N.float(0.00001)
###########
##CLASSES##
###########
class PolynomialFit(object):
def __init__(self):
self.degree = 2
class EigenvalueContainer(object):
nsppol = None
nkpt = None
mband = None
eigenvalues = None
units = None
wtk = None
filename = None
filefullpath = None
bd_indices = None
eigenvalue_type = None
kpoints = None
#kpoint_sampling_type: can be Monkhorst-Pack or Bandstructure
KPT_W90_TOL = N.float(1.0e-6)
KPT_DFT_TOL = N.float(1.0e-8)
kpoint_sampling_type = 'Monkhorst-Pack'
inputgvectors = None
gvectors = None
special_kpoints = None
special_kpoints_names = None
special_kpoints_indices = None
kpoint_path_values = None
kpoint_reduced_path_values = None
kpoint_path_length = None
#reduced_norm = None
norm_paths = None
norm_reduced_paths = None
def __init__(self,directory=None,filename=None):
if filename == None:return
if directory == None:directory='.'
self.filename = filename
self.filefullpath = '%s/%s' %(directory,filename)
self.file_open(self.filefullpath)
def set_kpoint_sampling_type(self,kpoint_sampling_type):
if kpoint_sampling_type != 'Monkhorst-Pack' and kpoint_sampling_type != 'Bandstructure':
print 'ERROR: kpoint_sampling_type "%s" does not exists' %kpoint_sampling_type
print ' it should be "Monkhorst-Pack" or "Bandstructure" ... exit'
sys.exit()
self.kpoint_sampling_type = kpoint_sampling_type
def correct_kpt(self,kpoint,tolerance=N.float(1.0e-6)):
kpt_correct = N.array(kpoint,N.float)
changed = False
for ii in range(3):
if N.allclose(kpoint[ii],N.float(1.0/3.0),atol=tolerance):
kpt_correct[ii] = N.float(1.0/3.0)
changed = True
elif N.allclose(kpoint[ii],N.float(1.0/6.0),atol=tolerance):
kpt_correct[ii] = N.float(1.0/6.0)
changed = True
elif N.allclose(kpoint[ii],N.float(-1.0/6.0),atol=tolerance):
kpt_correct[ii] = N.float(-1.0/6.0)
changed = True
elif N.allclose(kpoint[ii],N.float(-1.0/3.0),atol=tolerance):
kpt_correct[ii] = N.float(-1.0/3.0)
changed = True
if changed:
print 'COMMENT: kpoint %15.12f %15.12f %15.12f has been changed to %15.12f %15.12f %15.12f' %(kpoint[0],kpoint[1],kpoint[2],kpt_correct[0],kpt_correct[1],kpt_correct[2])
return kpt_correct
def find_special_kpoints(self,gvectors=None):
if self.kpoint_sampling_type != 'Bandstructure':
print 'ERROR: special kpoints are usefull only for bandstructures ... returning find_special_kpoints'
return
if self.eigenvalue_type == 'W90':
correct_kpt_tolerance = N.float(1.0e-4)
KPT_TOL = self.KPT_W90_TOL
elif self.eigenvalue_type == 'DFT':
correct_kpt_tolerance = N.float(1.0e-6)
KPT_TOL = self.KPT_DFT_TOL
else:
print 'ERROR: eigenvalue_type is "%s" while it should be "W90" or "DFT" ... returning find_special_kpoints' %self.eigenvalue_type
return
if gvectors == None:
self.inputgvectors = False
self.gvectors = N.identity(3,N.float)
else:
if N.shape(gvectors) != (3, 3):
print 'ERROR: wrong gvectors ... exiting now'
sys.exit()
self.inputgvectors = True
self.gvectors = gvectors
full_kpoints = N.zeros((self.nkpt,3),N.float)
for ikpt in range(self.nkpt):
full_kpoints[ikpt,:] = self.kpoints[ikpt,0]*self.gvectors[0,:]+self.kpoints[ikpt,1]*self.gvectors[1,:]+self.kpoints[ikpt,2]*self.gvectors[2,:]
delta_kpt = full_kpoints[1,:]-full_kpoints[0,:]
self.special_kpoints_indices = list()
self.special_kpoints = list()
self.special_kpoints_indices.append(0)
self.special_kpoints.append(self.correct_kpt(self.kpoints[0,:],tolerance=correct_kpt_tolerance))
for ikpt in range(1,self.nkpt-1):
thisdelta = full_kpoints[ikpt+1,:]-full_kpoints[ikpt,:]
if not N.allclose(thisdelta,delta_kpt,atol=KPT_TOL):
delta_kpt = thisdelta
self.special_kpoints_indices.append(ikpt)
self.special_kpoints.append(self.correct_kpt(self.kpoints[ikpt,:],tolerance=correct_kpt_tolerance))
self.special_kpoints_indices.append(N.shape(self.kpoints)[0]-1)
self.special_kpoints.append(self.correct_kpt(self.kpoints[-1,:],tolerance=correct_kpt_tolerance))
print 'Special Kpoints : '
print ' {0:d} : {1[0]: 8.8f} {1[1]: 8.8f} {1[2]: 8.8f}'.format(1,self.kpoints[0,:])
self.norm_paths = N.zeros((N.shape(self.special_kpoints_indices)[0]-1),N.float)
self.norm_reduced_paths = N.zeros((N.shape(self.special_kpoints_indices)[0]-1),N.float)
for ispkpt in range(1,N.shape(self.special_kpoints_indices)[0]):
self.norm_paths[ispkpt-1] = N.linalg.norm(full_kpoints[self.special_kpoints_indices[ispkpt]]-full_kpoints[self.special_kpoints_indices[ispkpt-1]])
self.norm_reduced_paths[ispkpt-1] = N.linalg.norm(self.special_kpoints[ispkpt]-self.special_kpoints[ispkpt-1])
print ' {2:d}-{3:d} path length : {0: 8.8f} | reduced path length : {1: 8.8f}'.\
format(self.norm_paths[ispkpt-1],self.norm_reduced_paths[ispkpt-1],ispkpt,ispkpt+1)
print ' {0:d} : {1[0]: 8.8f} {1[1]: 8.8f} {1[2]: 8.8f}'.format(ispkpt+1,self.kpoints[self.special_kpoints_indices[ispkpt],:])
self.kpoint_path_length = N.sum(self.norm_paths)
self.kpoint_reduced_path_length = N.sum(self.norm_reduced_paths)
self.normalized_kpoint_path_norm = self.norm_paths/self.kpoint_path_length
self.normalized_kpoint_reduced_path_norm = self.norm_reduced_paths/self.kpoint_reduced_path_length
kptredpathval = list()
kptpathval = list()
kptredpathval.append(N.float(0.0))
kptpathval.append(N.float(0.0))
curlen = N.float(0.0)
redcurlen = N.float(0.0)
for ispkpt in range(1,N.shape(self.special_kpoints_indices)[0]):
kptredpathval.extend(N.linspace(redcurlen,redcurlen+self.norm_reduced_paths[ispkpt-1],self.special_kpoints_indices[ispkpt]-self.special_kpoints_indices[ispkpt-1]+1)[1:])
kptpathval.extend(N.linspace(curlen,curlen+self.norm_paths[ispkpt-1],self.special_kpoints_indices[ispkpt]-self.special_kpoints_indices[ispkpt-1]+1)[1:])
redcurlen = redcurlen + self.norm_reduced_paths[ispkpt-1]
curlen = curlen + self.norm_paths[ispkpt-1]
self.kpoint_path_values = N.array(kptpathval,N.float)
self.kpoint_reduced_path_values = N.array(kptredpathval,N.float)
self.normalized_kpoint_path_values = self.kpoint_path_values/self.kpoint_path_length
self.normalized_kpoint_reduced_path_values = self.kpoint_reduced_path_values/self.kpoint_reduced_path_length
self.special_kpoints = N.array(self.special_kpoints,N.float)
def file_open(self,filefullpath):
if filefullpath[-3:] == '_GW':
self.gw_file_open(filefullpath)
elif filefullpath[-7:] == '_EIG.nc':
self.nc_eig_open(filefullpath)
elif filefullpath[-4:] == '.dat':
self.wannier_bs_file_open(filefullpath)
def has_eigenvalue(self,nsppol,isppol,kpoint,iband):
if self.nsppol != nsppol:
return False
for ikpt in range(self.nkpt):
if N.absolute(self.kpoints[ikpt,0]-kpoint[0]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,1]-kpoint[1]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,2]-kpoint[2]) < csts.TOLKPTS:
if iband >= self.bd_indices[isppol,ikpt,0]-1 and iband < self.bd_indices[isppol,ikpt,1]:
return True
return False
return False
def get_eigenvalue(self,nsppol,isppol,kpoint,iband):
for ikpt in range(self.nkpt):
if N.absolute(self.kpoints[ikpt,0]-kpoint[0]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,1]-kpoint[1]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,2]-kpoint[2]) < csts.TOLKPTS:
return self.eigenvalues[isppol,ikpt,iband]
def wannier_bs_file_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
print 'WARNING: no spin polarization reading yet for Wannier90 bandstructure files!'
self.eigenvalue_type = 'W90'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
reader = open(self.filefullpath,'r')
filedata = reader.readlines()
reader.close()
for iline in range(len(filedata)):
if filedata[iline].strip() == '':
self.nkpt = iline
break
self.mband = N.int(len(filedata)/self.nkpt)
self.nsppol = 1
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
self.kpoints = N.zeros([self.nkpt,3],N.float)
iline = 0
kpt_file = '%s.kpt' %filefullpath[:-4]
if os.path.isfile(kpt_file):
reader = open(kpt_file,'r')
kptdata = reader.readlines()
reader.close()
if N.int(kptdata[0]) != self.nkpt:
print 'ERROR : the number of kpoints in file "%s" is not the same as in "%s" ... exit' %(self.filefullpath,kpt_file)
sys.exit()
for ikpt in range(self.nkpt):
linesplit = kptdata[ikpt+1].split()
self.kpoints[ikpt,0] = N.float(linesplit[0])
self.kpoints[ikpt,1] = N.float(linesplit[1])
self.kpoints[ikpt,2] = N.float(linesplit[2])
else:
for ikpt in range(self.nkpt):
self.kpoints[ikpt,0] = N.float(filedata[ikpt].split()[0])
for iband in range(self.mband):
for ikpt in range(self.nkpt):
self.eigenvalues[0,ikpt,iband] = N.float(filedata[iline].split()[1])
iline = iline+1
iline = iline+1
self.eigenvalues = self.eigenvalues*csts.ev2hartree
self.units = 'Hartree'
def gw_file_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
self.eigenvalue_type = 'GW'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
reader = open(self.filefullpath,'r')
filedata = reader.readlines()
reader.close()
self.nkpt = N.int(filedata[0].split()[0])
self.kpoints = N.ones([self.nkpt,3],N.float)
self.nsppol = N.int(filedata[0].split()[1])
self.bd_indices = N.zeros((self.nsppol,self.nkpt,2),N.int)
icur = 1
nbd_kpt = N.zeros([self.nsppol,self.nkpt],N.int)
for isppol in range(self.nsppol):
for ikpt in range(self.nkpt):
self.kpoints[ikpt,:] = N.array(filedata[icur].split()[:],N.float)
icur = icur + 1
nbd_kpt[isppol,ikpt] = N.int(filedata[icur])
self.bd_indices[isppol,ikpt,0] = N.int(filedata[icur+1].split()[0])
self.bd_indices[isppol,ikpt,1] = N.int(filedata[icur+nbd_kpt[isppol,ikpt]].split()[0])
icur = icur + nbd_kpt[isppol,ikpt] + 1
self.mband = N.max(self.bd_indices[:,:,1])
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
self.eigenvalues[:,:,:] = N.nan
ii = 3
for isppol in range(self.nsppol):
for ikpt in range(self.nkpt):
for iband in range(self.bd_indices[isppol,ikpt,0]-1,self.bd_indices[isppol,ikpt,1]):
self.eigenvalues[isppol,ikpt,iband] = N.float(filedata[ii].split()[1])
ii = ii + 1
ii = ii + 2
self.eigenvalues = csts.ev2hartree*self.eigenvalues
self.units = 'Hartree'
def pfit_gw_file_write(self,polyfitlist,directory=None,filename=None,bdgw=None,energy_pivots=None,gwec=None):
if filename == None:return
if directory == None:directory='.'
filefullpath = '%s/%s' %(directory,filename)
if (os.path.isfile(filefullpath)):
user_input = raw_input('WARNING : file "%s" exists, do you want to overwrite it ? (y/n)' %filefullpath)
if not (user_input == 'y' or user_input == 'Y'):
return
writer = open(filefullpath,'w')
writer.write('%12s%12s\n' %(self.nkpt,self.nsppol))
if gwec == None:
for ikpt in range(self.nkpt):
for isppol in range(self.nsppol):
writer.write('%10.6f%10.6f%10.6f\n' %(self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.write('%4i\n' %(bdgw[1]-bdgw[0]+1))
for iband in range(bdgw[0]-1,bdgw[1]):
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta,delta,0.0))
else:
for ikpt in range(self.nkpt):
for isppol in range(self.nsppol):
writer.write('%10.6f%10.6f%10.6f\n' %(self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.write('%4i\n' %(bdgw[1]-bdgw[0]+1))
for iband in range(bdgw[0]-1,bdgw[1]):
if gwec.has_eigenvalue(self.nsppol,isppol,self.kpoints[ikpt],iband):
gw_eig = gwec.get_eigenvalue(self.nsppol,isppol,self.kpoints[ikpt],iband)
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*gw_eig,csts.hartree2ev*(gw_eig-self.eigenvalues[isppol,ikpt,iband]),0.0))
else:
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta,delta,0.0))
writer.close()
def pfit_dft_to_gw_bs_write(self,polyfitlist,directory=None,filename=None,bdgw=None,energy_pivots=None,gwec=None):
if filename == None:return
if directory == None:directory='.'
filefullpath = '%s/%s' %(directory,filename)
if (os.path.isfile(filefullpath)):
user_input = raw_input('WARNING : file "%s" exists, do you want to overwrite it ? (y/n)' %filefullpath)
if not (user_input == 'y' or user_input == 'Y'):
return
writer = open(filefullpath,'w')
if gwec == None:
for ikpt in range(self.nkpt):
writer.write('%s' %ikpt)
for isppol in range(self.nsppol):
for iband in range(bdgw[0]-1,bdgw[1]):
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write(' %s' %(csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta))
writer.write('\n')
else:
print 'NOT SUPPORTED YET'
sys.exit()
writer.close()
def nc_eig_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
ncdata = nc.Dataset(filefullpath)
self.eigenvalue_type = 'DFT'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
for dimname,dimobj in ncdata.dimensions.iteritems():
if dimname == 'nsppol':self.nsppol = N.int(len(dimobj))
if dimname == 'nkpt':self.nkpt = N.int(len(dimobj))
if dimname == 'mband':self.mband = N.int(len(dimobj))
for varname in ncdata.variables:
if varname == 'Eigenvalues':
varobj = ncdata.variables[varname]
varshape = N.shape(varobj[:])
self.units = None
for attrname in varobj.ncattrs():
if attrname == 'units':
self.units = varobj.getncattr(attrname)
if self.units == None:
print 'WARNING : units are not specified'
print '... assuming "Hartree" units ...'
self.units = 'Hartree'
elif self.units != 'Hartree':
print 'ERROR : units are unknown : "%s"' %self.units
print '... exiting now ...'
sys.exit()
self.eigenvalues = N.reshape(N.array(varobj,N.float),varshape)
self.nsppol = varshape[0]
self.nkpt = varshape[1]
self.kpoints = -1*N.ones((self.nkpt,3),N.float)
self.mband = varshape[2]
self.bd_indices = N.zeros((self.nsppol,self.nkpt,2),N.int)
self.bd_indices[:,:,0] = 1
self.bd_indices[:,:,1] = self.mband
break
for varname in ncdata.variables:
if varname == 'Kptns':
varobj = ncdata.variables[varname]
varshape = N.shape(varobj[:])
self.kpoints = N.reshape(N.array(varobj,N.float),varshape)
def write_bandstructure_to_file(self,filename,option_kpts='bohrm1_units'):
#if option_kpts is set to 'normalized', the path of the bandstructure will be normalized to 1 (and special k-points correctly chosen)
if self.kpoint_sampling_type != 'Bandstructure':
print 'ERROR: kpoint_sampling_type is not "Bandstructure" ... returning from write_bandstructure_to_file'
return
if self.nsppol > 1:
print 'ERROR: number of spins is more than 1, this is not fully tested ... use with care !'
writer = open(filename,'w')
writer.write('# BANDSTRUCTURE FILE FROM DAVID\'S SCRIPT\n')
writer.write('# nsppol = %s\n' %self.nsppol)
writer.write('# nband = %s\n' %self.mband)
writer.write('# eigenvalue_type = %s\n' %self.eigenvalue_type)
if self.inputgvectors:
writer.write('# inputgvectors = 1 (%s)\n' %self.inputgvectors)
else:
writer.write('# inputgvectors = 0 (%s)\n' %self.inputgvectors)
writer.write('# gvectors(1) = %20.17f %20.17f %20.17f \n' %(self.gvectors[0,0],self.gvectors[0,1],self.gvectors[0,2]))
writer.write('# gvectors(2) = %20.17f %20.17f %20.17f \n' %(self.gvectors[1,0],self.gvectors[1,1],self.gvectors[1,2]))
writer.write('# gvectors(3) = %20.17f %20.17f %20.17f \n' %(self.gvectors[2,0],self.gvectors[2,1],self.gvectors[2,2]))
writer.write('# special_kpoints_number = %s\n' %(len(self.special_kpoints_indices)))
writer.write('# list of special kpoints : (given in reduced coordinates, value_path is in Bohr^-1, value_red_path has its total path normalized to 1)\n')
for ii in range(len(self.special_kpoints_indices)):
ispkpt = self.special_kpoints_indices[ii]
spkpt = self.special_kpoints[ii]
writer.write('# special_kpt_index %5s : %20.17f %20.17f %20.17f (value_path = %20.17f | value_red_path = %20.17f)\n' %(ispkpt,spkpt[0],spkpt[1],spkpt[2],self.kpoint_path_values[ispkpt],self.kpoint_reduced_path_values[ispkpt]))
writer.write('# special_kpoints_names :\n')
for ii in range(len(self.special_kpoints_indices)):
ispkpt = self.special_kpoints_indices[ii]
spkpt = self.special_kpoints[ii]
writer.write('# special_kpt_name %3s : "%s" : %20.17f %20.17f %20.17f\n' %(ii+1,self.special_kpoints_names[ii],spkpt[0],spkpt[1],spkpt[2]))
writer.write('# kpoint_path_length = %20.17f \n' %(self.kpoint_path_length))
writer.write('# kpoint_path_number = %s \n' %(self.nkpt))
if self.inputgvectors:
writer.write('# kpoint_path_units = %s\n' %(option_kpts))
else:
writer.write('# kpoint_path_units = %s (!!! CONSIDERING UNITARY GVECTORS MATRIX !!!)\n' %(option_kpts))
writer.write('#BEGIN\n')
if option_kpts == 'bohrm1_units':
values_path = self.kpoint_path_values
elif option_kpts == 'reduced':
values_path = self.kpoint_reduced_path_values
elif option_kpts == 'bohrm1_units_normalized':
values_path = self.normalized_kpoint_path_values
elif option_kpts == 'reduced_normalized':
values_path = self.normalized_kpoint_reduced_path_values
else:
print 'ERROR: wrong option_kpts ... exit'
writer.write('... CANCELLED (wrong option_kpts)')
writer.close()
sys.exit()
for isppol in range(self.nsppol):
writer.write('#isppol %s\n' %isppol)
for iband in range(self.mband):
writer.write('#iband %5s (band number %s)\n' %(iband,iband+1))
for ikpt in range(self.nkpt):
writer.write('%20.17f %20.17f\n' %(values_path[ikpt],self.eigenvalues[isppol,ikpt,iband]))
writer.write('\n')
writer.write('#END\n')
writer.write('\n#KPT_LIST\n')
for ikpt in range(self.nkpt):
writer.write('# %6d : %20.17f %20.17f %20.17f\n' %(ikpt,self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.close()
def read_bandstructure_from_file(self,filename):
reader = open(filename,'r')
bs_data = reader.readlines()
reader.close()
self.gvectors = N.identity(3,N.float)
self.kpoint_sampling_type = 'Bandstructure'
self.special_kpoints_indices = list()
self.special_kpoints = list()
for ii in range(len(bs_data)):
if bs_data[ii] == '#BEGIN\n':
ibegin = ii
break
elif bs_data[ii][:10] == '# nsppol =':
self.nsppol = N.int(bs_data[ii][10:])
elif bs_data[ii][:9] == '# nband =':
self.mband = N.int(bs_data[ii][9:])
elif bs_data[ii][:19] == '# eigenvalue_type =':
self.eigenvalue_type = bs_data[ii][19:].strip()
elif bs_data[ii][:17] == '# inputgvectors =':
tt = N.int(bs_data[ii][18])
if tt == 1:
self.inputgvectors = True
elif tt == 0:
self.inputgvectors = False
else:
print 'ERROR: reading inputgvectors ... exit'
sys.exit()
elif bs_data[ii][:15] == '# gvectors(1) =':
sp = bs_data[ii][15:].split()
self.gvectors[0,0] = N.float(sp[0])
self.gvectors[0,1] = N.float(sp[1])
self.gvectors[0,2] = N.float(sp[2])
elif bs_data[ii][:15] == '# gvectors(2) =':
sp = bs_data[ii][15:].split()
self.gvectors[1,0] = N.float(sp[0])
self.gvectors[1,1] = N.float(sp[1])
self.gvectors[1,2] = N.float(sp[2])
elif bs_data[ii][:15] == '# gvectors(3) =':
sp = bs_data[ii][15:].split()
self.gvectors[2,0] = N.float(sp[0])
self.gvectors[2,1] = N.float(sp[1])
self.gvectors[2,2] = N.float(sp[2])
elif bs_data[ii][:26] == '# special_kpoints_number =':
special_kpoints_number = N.int(bs_data[ii][26:])
self.special_kpoints_names = ['']*special_kpoints_number
elif bs_data[ii][:22] == '# special_kpt_index':
sp = bs_data[ii][22:].split()
self.special_kpoints_indices.append(N.int(sp[0]))
self.special_kpoints.append(N.array([sp[2],sp[3],sp[4]]))
elif bs_data[ii][:21] == '# special_kpt_name':
sp = bs_data[ii][21:].split()
ispkpt = N.int(sp[0])-1
self.special_kpoints_names[ispkpt] = sp[2][1:-1]
elif bs_data[ii][:22] == '# kpoint_path_length =':
self.kpoint_path_length = N.float(bs_data[ii][22:])
elif bs_data[ii][:22] == '# kpoint_path_number =':
self.nkpt = N.int(bs_data[ii][22:])
elif bs_data[ii][:21] == '# kpoint_path_units =':
kpoint_path_units = bs_data[ii][21:].strip()
self.special_kpoints_indices = N.array(self.special_kpoints_indices,N.int)
self.special_kpoints = N.array(self.special_kpoints,N.float)
if len(self.special_kpoints_indices) != special_kpoints_number or len(self.special_kpoints) != special_kpoints_number:
print 'ERROR: reading the special kpoints ... exit'
sys.exit()
self.kpoint_path_values = N.zeros([self.nkpt],N.float)
self.kpoint_reduced_path_values = N.zeros([self.nkpt],N.float)
if kpoint_path_units == 'bohrm1_units':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.kpoint_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.normalized_kpoint_path_values = self.kpoint_path_values/self.kpoint_path_length
if kpoint_path_units == 'bohrm1_units_normalized':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.normalized_kpoint_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.kpoint_path_values = self.normalized_kpoint_path_values*self.kpoint_path_length
elif kpoint_path_units == 'reduced_normalized':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.normalized_kpoint_reduced_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.kpoint_reduced_path_values = self.normalized_kpoint_reduced_path_values/self.kpoint_reduced_path_length
elif kpoint_path_units == 'reduced':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.kpoint_reduced_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.normalized_kpoint_reduced_path_values = self.kpoint_reduced_path_values/self.kpoint_reduced_path_length
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
check_nband = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol':
isppol = N.int(bs_data[ii][7:])
elif bs_data[ii][:6] == '#iband':
iband = N.int(bs_data[ii][6:].split()[0])
ikpt = 0
elif bs_data[ii][:4] == '#END':
break
elif bs_data[ii] == '\n':
check_nband = check_nband + 1
else:
self.eigenvalues[isppol,ikpt,iband] = N.float(bs_data[ii].split()[1])
ikpt = ikpt + 1
def check_gw_vs_dft_parameters(dftec,gwec):
if gwec.eigenvalue_type != 'GW' or dftec.eigenvalue_type != 'DFT':
print 'ERROR: eigenvalue files do not contain GW and DFT eigenvalues ... exiting now'
sys.exit()
if dftec.nsppol != gwec.nsppol or dftec.nkpt != gwec.nkpt:
print 'ERROR: the number of spins/kpoints is not the same in the GW and DFT files used to make the interpolation ... exiting now'
sys.exit()
for ikpt in range(dftec.nkpt):
if N.absolute(dftec.kpoints[ikpt,0]-gwec.kpoints[ikpt,0]) > csts.TOLKPTS or \
N.absolute(dftec.kpoints[ikpt,1]-gwec.kpoints[ikpt,1]) > csts.TOLKPTS or \
N.absolute(dftec.kpoints[ikpt,2]-gwec.kpoints[ikpt,2]) > csts.TOLKPTS:
print 'ERROR: the kpoints are not the same in the GW and DFT files used to make the interpolation ... exiting now'
sys.exit()
def plot_gw_vs_dft_eig(dftec,gwec,vbm_index,energy_pivots=None,polyfit_degrees=None):
if gwec.eigenvalue_type != 'GW' or dftec.eigenvalue_type != 'DFT':
print 'ERROR: eigenvalue containers do not contain GW and DFT eigenvalues ... exiting now'
sys.exit()
if dftec.nsppol != gwec.nsppol or dftec.nkpt != gwec.nkpt:
print 'ERROR: the number of spins/kpoints is not the same in the GW and DFT containers ... exiting now'
sys.exit()
valdftarray = N.array([],N.float)
conddftarray = N.array([],N.float)
valgwarray = N.array([],N.float)
condgwarray = N.array([],N.float)
for isppol in range(dftec.nsppol):
for ikpt in range(dftec.nkpt):
ibdmin = N.max([dftec.bd_indices[isppol,ikpt,0],gwec.bd_indices[isppol,ikpt,0]])-1
ibdmax = N.min([dftec.bd_indices[isppol,ikpt,1],gwec.bd_indices[isppol,ikpt,1]])-1
valdftarray = N.append(valdftarray,csts.hartree2ev*dftec.eigenvalues[isppol,ikpt,ibdmin:vbm_index])
valgwarray = N.append(valgwarray,csts.hartree2ev*gwec.eigenvalues[isppol,ikpt,ibdmin:vbm_index])
conddftarray = N.append(conddftarray,csts.hartree2ev*dftec.eigenvalues[isppol,ikpt,vbm_index:ibdmax+1])
condgwarray = N.append(condgwarray,csts.hartree2ev*gwec.eigenvalues[isppol,ikpt,vbm_index:ibdmax+1])
if energy_pivots == None:
if plot_figures == 1:
P.figure(1)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray,'bx')
P.plot(conddftarray,condgwarray,'rx')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW eigenvalues (in eV)')
P.figure(2)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray-valdftarray,'bx')
P.plot(conddftarray,condgwarray-conddftarray,'rx')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW correction to the DFT eigenvalues (in eV)')
P.show()
return
polyfitlist = list()
if len(polyfit_degrees) == 1:
print 'ERROR: making a fit with only one interval is not allowed ... exiting now'
sys.exit()
dftarray = N.append(valdftarray,conddftarray)
gwarray = N.append(valgwarray,condgwarray)
dftarray_list = list()
gwarray_list = list()
for iinterval in range(len(polyfit_degrees)):
tmpdftarray = N.array([],N.float)
tmpgwarray = N.array([],N.float)
if iinterval == 0:
emin = None
emax = energy_pivots[0]
for ii in range(len(dftarray)):
if dftarray[ii] <= emax:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
elif iinterval == len(polyfit_degrees)-1:
emin = energy_pivots[-1]
emax = None
for ii in range(len(dftarray)):
if dftarray[ii] >= emin:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
else:
emin = energy_pivots[iinterval-1]
emax = energy_pivots[iinterval]
for ii in range(len(dftarray)):
if dftarray[ii] >= emin and dftarray[ii] <= emax:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
dftarray_list.append(tmpdftarray)
gwarray_list.append(tmpgwarray)
pfit = N.polyfit(tmpdftarray,tmpgwarray-tmpdftarray,polyfit_degrees[iinterval])
polyfitlist.append(pfit)
if plot_figures == 1:
linspace_npoints = 200
valpoly_x = N.linspace(N.min(valdftarray),N.max(valdftarray),linspace_npoints)
condpoly_x = N.linspace(N.min(conddftarray),N.max(conddftarray),linspace_npoints)
P.figure(3)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray-valdftarray,'bx')
P.plot(conddftarray,condgwarray-conddftarray,'rx')
[x_min,x_max] = P.xlim()
for iinterval in range(len(polyfit_degrees)):
if iinterval == 0:
tmppoly_x = N.linspace(x_min,energy_pivots[iinterval],linspace_npoints)
elif iinterval == len(polyfit_degrees)-1:
tmppoly_x = N.linspace(energy_pivots[iinterval-1],x_max,linspace_npoints)
else:
tmppoly_x = N.linspace(energy_pivots[iinterval-1],energy_pivots[iinterval],linspace_npoints)
P.plot(tmppoly_x,N.polyval(polyfitlist[iinterval],tmppoly_x),'k')
for ipivot in range(len(energy_pivots)):
en = energy_pivots[ipivot]
P.plot([en,en],[N.polyval(polyfitlist[ipivot],en),N.polyval(polyfitlist[ipivot+1],en)],'k-.')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW correction to the DFT eigenvalues (in eV)')
P.figure(4)
P.hold(True)
P.grid(True)
for iinterval in range(len(polyfit_degrees)):
P.plot(dftarray_list[iinterval],gwarray_list[iinterval]-dftarray_list[iinterval]-N.polyval(polyfitlist[iinterval],dftarray_list[iinterval]),'bx')
[x_min,x_max] = P.xlim()
P.plot([x_min,x_max],[0,0],'k-')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('Error in the fit (in eV)')
P.show()
return polyfitlist
def compare_bandstructures(ec_ref,ec_test):
nspkpt_ref = len(ec_ref.special_kpoints)
nspkpt_test = len(ec_test.special_kpoints)
if nspkpt_ref != nspkpt_test:
print 'ERROR: The number of special kpoints is different in the two files ... exit'
sys.exit()
eig_type_ref = ec_ref.eigenvalue_type
eig_type_test = ec_test.eigenvalue_type
print eig_type_ref,eig_type_test
if eig_type_ref == 'DFT' and eig_type_test == 'W90':
TOL_KPTS = N.float(1.0e-4)
else:
TOL_KPTS = N.float(1.0e-6)
print TOL_KPTS
for ispkpt in range(nspkpt_ref):
print 'difference between the two :',ec_ref.special_kpoints[ispkpt,:]-ec_test.special_kpoints[ispkpt,:]
if not N.allclose(ec_ref.special_kpoints[ispkpt,:],ec_test.special_kpoints[ispkpt,:],atol=TOL_KPTS):
print 'ERROR: The kpoints are not the same :'
print ' Kpt #%s ' %ispkpt
print ' Reference => %20.17f %20.17f %20.17f' %(ec_ref.special_kpoints[ispkpt,0],ec_ref.special_kpoints[ispkpt,1],ec_ref.special_kpoints[ispkpt,2])
print ' Compared => %20.17f %20.17f %20.17f' %(ec_test.special_kpoints[ispkpt,0],ec_test.special_kpoints[ispkpt,1],ec_test.special_kpoints[ispkpt,2])
print ' ... exit'
sys.exit()
mband_comparison = N.min([ec_ref.mband,ec_test.mband])
if mband_comparison < ec_ref.mband:
print 'Number of bands in the test bandstructure is lower than the number of bands in the reference (%s)' %ec_ref.mband
print ' => Comparison will proceed with %s bands' %ec_test.mband
elif mband_comparison < ec_test.mband:
print 'Number of bands in the reference bandstructure is lower than the number of bands in the test bandstructure (%s)' %ec_test.mband
print ' => Comparison will only proceed with %s bands of the test bandstructure' %ec_ref.mband
else:
print 'Number of bands in the reference and test bandstructure is the same'
print ' => Comparison will proceed with %s bands' %mband_comparison
# eig_test_ref_path = ec_ref.eigenvalues[:,:,:mband_comparison]
rmsd_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
nrmsd_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
mae_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
for isppol in range(ec_ref.nsppol):
for iband in range(mband_comparison):
interp = N.interp(ec_ref.normalized_kpoint_path_values,ec_test.normalized_kpoint_path_values,ec_test.eigenvalues[isppol,:,iband])
rmsd_per_band[isppol,iband] = N.sqrt(N.sum((csts.hartree2ev*interp-csts.hartree2ev*ec_ref.eigenvalues[isppol,:,iband])**2)/ec_ref.nkpt)
mae_per_band[isppol,iband] = N.sum(N.abs(csts.hartree2ev*interp-csts.hartree2ev*ec_ref.eigenvalues[isppol,:,iband]))/ec_ref.nkpt
P.figure(1)
P.plot(mae_per_band[0,:])
P.figure(2)
P.plot(rmsd_per_band[0,:])
P.show()
def get_gvectors():
if os.path.isfile('.gvectors.bsinfo'):
print 'File ".gvectors.bsinfo found with the following gvectors information :"'
try:
gvectors_reader = open('.gvectors.bsinfo','r')
gvectors_data = gvectors_reader.readlines()
gvectors_reader.close()
trial_gvectors = N.identity(3,N.float)
trial_gvectors[0,0] = N.float(gvectors_data[0].split()[0])
trial_gvectors[0,1] = N.float(gvectors_data[0].split()[1])
trial_gvectors[0,2] = N.float(gvectors_data[0].split()[2])
trial_gvectors[1,0] = N.float(gvectors_data[1].split()[0])
trial_gvectors[1,1] = N.float(gvectors_data[1].split()[1])
trial_gvectors[1,2] = N.float(gvectors_data[1].split()[2])
trial_gvectors[2,0] = N.float(gvectors_data[2].split()[0])
trial_gvectors[2,1] = N.float(gvectors_data[2].split()[1])
trial_gvectors[2,2] = N.float(gvectors_data[2].split()[2])
print ' gvectors(1) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[0,0],trial_gvectors[0,1],trial_gvectors[0,2])
print ' gvectors(2) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[1,0],trial_gvectors[1,1],trial_gvectors[1,2])
print ' gvectors(3) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[2,0],trial_gvectors[2,1],trial_gvectors[2,2])
except:
print 'ERROR: file ".gvectors.bsinfo" might be corrupted (empty or not formatted correctly ...)'
print ' you should remove the file and start again or check the file ... exit'
sys.exit()
test = raw_input('Press <ENTER> to use these gvectors (any other character to enter manually other gvectors)\n')
if test == '':
gvectors = trial_gvectors
else:
gvectors = N.identity(3,N.float)
test = raw_input('Enter G1 (example : "0.153 0 0") : \n')
gvectors[0,0] = N.float(test.split()[0])
gvectors[0,1] = N.float(test.split()[1])
gvectors[0,2] = N.float(test.split()[2])
test = raw_input('Enter G2 (example : "0.042 1.023 0") : \n')
gvectors[1,0] = N.float(test.split()[0])
gvectors[1,1] = N.float(test.split()[1])
gvectors[1,2] = N.float(test.split()[2])
test = raw_input('Enter G3 (example : "0 0 1.432") : \n')
gvectors[2,0] = N.float(test.split()[0])
gvectors[2,1] = N.float(test.split()[1])
gvectors[2,2] = N.float(test.split()[2])
test = raw_input('Do you want to overwrite the gvectors contained in the file ".gvectors.bsinfo" ? (<ENTER> for yes, anything else for no)\n')
if test == '':
print 'Writing gvectors to file ".gvectors.bsinfo" ...'
gvectors_writer = open('.gvectors.bsinfo','w')
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[0,0],trial_gvectors[0,1],trial_gvectors[0,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[1,0],trial_gvectors[1,1],trial_gvectors[1,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[2,0],trial_gvectors[2,1],trial_gvectors[2,2]))
gvectors_writer.close()
print '... done'
else:
test = raw_input('Do you want to enter the the reciprocal space primitive vectors (y/n)\n')
if test == 'y':
gvectors = N.identity(3,N.float)
test = raw_input('Enter G1 (example : "0.153 0 0") : ')
gvectors[0,0] = N.float(test.split()[0])
gvectors[0,1] = N.float(test.split()[1])
gvectors[0,2] = N.float(test.split()[2])
test = raw_input('Enter G2 (example : "0.042 1.023 0") : ')
gvectors[1,0] = N.float(test.split()[0])
gvectors[1,1] = N.float(test.split()[1])
gvectors[1,2] = N.float(test.split()[2])
test = raw_input('Enter G3 (example : "0 0 1.432") : ')
gvectors[2,0] = N.float(test.split()[0])
gvectors[2,1] = N.float(test.split()[1])
gvectors[2,2] = N.float(test.split()[2])
test = raw_input('Do you want to write the gvectors to file ".gvectors.bsinfo" ? (<ENTER> for yes, anything else for no)\n')
if test == '':
print 'Writing gvectors to file ".gvectors.bsinfo" ...'
gvectors_writer = open('.gvectors.bsinfo','w')
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[0,0],gvectors[0,1],gvectors[0,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[1,0],gvectors[1,1],gvectors[1,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[2,0],gvectors[2,1],gvectors[2,2]))
gvectors_writer.close()
print '... done'
else:
gvectors = None
return gvectors
# Parse the command line options
parser = argparse.ArgumentParser(description='Tool for plotting dft bandstructures')
parser.add_argument('files',help='files to be opened',nargs=1)
args = parser.parse_args()
args_dict = vars(args)
if args_dict['files']:
print 'will open the file'
else:
print 'ERROR: you should provide some bandstructure file ! exiting now ...'
sys.exit()
dft_file = args_dict['files'][0]
gvectors = get_gvectors()
ec_dft = EigenvalueContainer(directory='.',filename=dft_file)
ec_dft.set_kpoint_sampling_type('Bandstructure')
ec_dft.find_special_kpoints(gvectors)
print 'Number of bands in the file : %s' %(N.shape(ec_dft.eigenvalues)[2])
test = raw_input('Enter the number of bands to be plotted (<ENTER> : %s) : \n' %(N.shape(ec_dft.eigenvalues)[2]))
if test == '':
nbd_plot = N.shape(ec_dft.eigenvalues)[2]
else:
nbd_plot = N.int(test)
if nbd_plot > N.shape(ec_dft.eigenvalues)[2]:
print 'ERROR: the number of bands to be plotted is larger than the number available ... exit'
sys.exit()
ec_dft.special_kpoints_names = ['']*len(ec_dft.special_kpoints_indices)
for ii in range(len(ec_dft.special_kpoints_indices)):
ec_dft.special_kpoints_names[ii] = 'k%s' %(ii+1)
print 'List of special kpoints :'
for ii in range(len(ec_dft.special_kpoints_indices)):
spkpt = ec_dft.kpoints[ec_dft.special_kpoints_indices[ii]]
print ' Kpoint %s : %s %s %s' %(ii+1,spkpt[0],spkpt[1],spkpt[2])
print 'Enter the name of the %s special k-points :' %(len(ec_dft.special_kpoints_indices))
test = raw_input('')
if len(test.split()) == len(ec_dft.special_kpoints_indices):
for ii in range(len(ec_dft.special_kpoints_indices)):
ec_dft.special_kpoints_names[ii] = test.split()[ii]
test = raw_input('Enter base name for bandstructure file : \n')
ec_dft.write_bandstructure_to_file('%s.bandstructure' %test)
P.figure(1,figsize=(3.464,5))
P.hold('on')
P.grid('on')
P.xticks(N.take(ec_dft.kpoint_reduced_path_values,N.array(ec_dft.special_kpoints_indices,N.int)),ec_dft.special_kpoints_names)
if ec_dft.nsppol == 1:
for iband in range(nbd_plot):
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[0,:,iband]*csts.hartree2ev,'k-',linewidth=2)
elif ec_dft.nsppol == 2:
for iband in range(nbd_plot):
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[0,:,iband]*csts.hartree2ev,'k-',linewidth=2)
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[1,:,iband]*csts.hartree2ev,'r-',linewidth=2)
P.show()
| jmbeuken/abinit | scripts/post_processing/abinit_eignc_to_bandstructure.py | Python | gpl-3.0 | 47,417 | [
"ABINIT",
"NetCDF",
"Wannier90"
] | 5a6da0104972297d17464200624bdb387e72bfe5dc439f7ab34d2ed7b2a50014 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bwa(Package):
"""Burrow-Wheeler Aligner for pairwise alignment between DNA sequences."""
homepage = "http://github.com/lh3/bwa"
url = "https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2"
version('0.7.17', '82cba7ef695538e6a38b9d4156837381',
url="https://github.com/lh3/bwa/releases/download/v0.7.17/bwa-0.7.17.tar.bz2")
version('0.7.15', 'fcf470a46a1dbe2f96a1c5b87c530554',
url="https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2")
version('0.7.13', 'f094f609438511766c434178a3635ab4',
url="https://github.com/lh3/bwa/releases/download/v0.7.13/bwa-0.7.13.tar.bz2")
version('0.7.12', 'e24a587baaad411d5da89516ad7a261a',
url='https://github.com/lh3/bwa/archive/0.7.12.tar.gz')
depends_on('zlib')
def install(self, spec, prefix):
filter_file(r'^INCLUDES=',
"INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile')
filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib,
'Makefile')
make()
mkdirp(prefix.bin)
install('bwa', join_path(prefix.bin, 'bwa'))
set_executable(join_path(prefix.bin, 'bwa'))
mkdirp(prefix.doc)
install('README.md', prefix.doc)
install('NEWS.md', prefix.doc)
mkdirp(prefix.man.man1)
install('bwa.1', prefix.man.man1)
| lgarren/spack | var/spack/repos/builtin/packages/bwa/package.py | Python | lgpl-2.1 | 2,671 | [
"BWA"
] | d8f02d51f608be8ed2a4a962b5fad33575fcaa08a83a234733ba42ef736acefd |
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os
import shutil
import sys
import tarfile
from utils import logger
try:
if hasattr(sys, '_run_from_cmdl') is True:
raise ImportError
from pycompss.api.parameter import FILE_IN, FILE_OUT
from pycompss.api.task import task
# from pycompss.api.api import compss_wait_on
except ImportError:
logger.warn("[Warning] Cannot import \"pycompss\" API packages.")
logger.warn(" Using mock decorators.")
from utils.dummy_pycompss import FILE_IN, FILE_OUT # pylint: disable=ungrouped-imports
from utils.dummy_pycompss import task # pylint: disable=ungrouped-imports
# from utils.dummy_pycompss import compss_wait_on # pylint: disable=ungrouped-imports
from basic_modules.tool import Tool
from basic_modules.metadata import Metadata
from tool.aligner_utils import alignerUtils
from tool.common import common
# ------------------------------------------------------------------------------
class bowtieIndexerTool(Tool): # pylint: disable=invalid-name
"""
Tool for running indexers over a genome FASTA file
"""
def __init__(self, configuration=None):
"""
Initialise the tool with its configuration.
Parameters
----------
configuration : dict
a dictionary containing parameters that define how the operation
should be carried out, which are specific to each Tool.
"""
logger.info("Bowtie2 Indexer")
Tool.__init__(self)
if configuration is None:
configuration = {}
self.configuration.update(configuration)
@task(file_loc=FILE_IN, index_loc=FILE_OUT)
def bowtie2_indexer(self, file_loc, index_loc): # pylint: disable=unused-argument, no-self-use
"""
Bowtie2 Indexer
Parameters
----------
file_loc : str
Location of the genome assembly FASTA file
idx_loc : str
Location of the output index file
"""
au_handle = alignerUtils()
bt2_1, bt2_2, bt2_3, bt2_4, bt2_rev1, bt2_rev2 = au_handle.bowtie_index_genome(file_loc)
try:
# tar.gz the index
logger.info("BOWTIE2 - index_loc", index_loc, index_loc.replace('.tar.gz', ''))
idx_out_pregz = index_loc.replace('.tar.gz', '.tar')
index_dir = index_loc.replace('.tar.gz', '')
os.mkdir(index_dir)
shutil.move(bt2_1, index_dir)
shutil.move(bt2_2, index_dir)
shutil.move(bt2_3, index_dir)
shutil.move(bt2_4, index_dir)
shutil.move(bt2_rev1, index_dir)
shutil.move(bt2_rev2, index_dir)
tar = tarfile.open(idx_out_pregz, "w")
tar.add(index_dir, arcname=os.path.split(index_dir)[1])
tar.close()
except (OSError, IOError) as error:
logger.fatal("I/O error({0}): {1}".format(error.errno, error.strerror))
return False
common.zip_file(idx_out_pregz)
shutil.rmtree(index_dir)
return True
def run(self, input_files, input_metadata, output_files):
"""
Tool for generating assembly aligner index files for use with the
Bowtie 2 aligner
Parameters
----------
input_files : list
List with a single str element with the location of the genome
assembly FASTA file
metadata : list
Returns
-------
array : list
First element is a list of the index files. Second element is a
list of the matching metadata
"""
self.bowtie2_indexer(
input_files["genome"],
output_files["index"]
)
output_metadata = {
"index": Metadata(
data_type="sequence_mapping_index_bowtie",
file_type="TAR",
file_path=output_files["index"],
sources=[input_metadata["genome"].file_path],
taxon_id=input_metadata["genome"].taxon_id,
meta_data={
"assembly": input_metadata["genome"].meta_data["assembly"],
"tool": "bowtie_indexer"
}
)
}
return (output_files, output_metadata)
# ------------------------------------------------------------------------------
| Multiscale-Genomics/mg-process-fastq | tool/bowtie_indexer.py | Python | apache-2.0 | 5,081 | [
"Bowtie"
] | 10e4aa1096cd4d36e8eb746dc3014486a9643ceabb1b2c9ae2197de26e20462a |
from __future__ import print_function, division
import os
import scipy as sp
from ase import units
from time import sleep
kcalmol2eV = units.kcal / units.mol
def replace_positions_lammps(atoms, datafile='data.input', preloaded_data=None):
if preloaded_data is None:
with open(datafile, 'r') as file:
data = file.readlines()
else:
data = preloaded_data
n = atoms.get_number_of_atoms()
idx = 28
# atom positions start on line idx
for i, (line, position) in enumerate(zip(data[idx:idx+n], atoms.get_positions())):
line = line.split()
line[-3:] = position.astype('string')
data[idx+i] = ' '.join(line) + '\n'
# and write everything back
with open(datafile, 'w') as file:
file.writelines(data)
def load_lammps_forces(atoms, dumpfile='lmp_dump.xyz'):
# WARNING: MUST declare "sort" option in LAMMPS input file for an ordered dump file.
# Atoms will be ordered randomly otherwise
with open(dumpfile, 'r') as file:
data = file.readlines()
n = atoms.get_number_of_atoms()
fstrings = data[-n:]
# print(*fstrings)
adata = sp.asarray([s.strip().split() for s in fstrings]).astype('float')
indices = adata[:,0].astype('int') - 1
forces = adata[indices][:,-3:]
# convert from kcal mol-1 A-1 to eV A-1
forces *= kcalmol2eV
# print ("Forces:\n", forces)
return forces
def load_lammps_pote(atoms, log='log'):
with open(log, 'r') as file:
logo = file.readlines()
for i, line in enumerate(logo):
idx = line.find("PotEng")
if idx > -1:
column = idx
row = i
break
line = logo[row].split()
pot_energy = float(line[column + 2])
pot_energy *= kcalmol2eV
return pot_energy
def calc_lammps(atoms, datafile='data.input', preloaded_data=None, dumpfile='lmp_dump.xyz'):
lmp_exe = os.environ["LAMMPS_COMMAND"]
# set the atoms coordinates
replace_positions_lammps(atoms, datafile=datafile, preloaded_data=preloaded_data)
# run the lammps executable with the given inputs
os.system('%s < input > log' % lmp_exe)
# sleep(0.05)
# load the results in
pot_energy = load_lammps_pote(atoms)
forces = load_lammps_forces(atoms, dumpfile=dumpfile)
return pot_energy, forces
| marcocaccin/LearningMetaDynamics | lammps2py_interface.py | Python | gpl-2.0 | 2,355 | [
"ASE",
"LAMMPS"
] | 64dceea397784673e0a387defffd78239d3a97682b3ba1c07cef7cd80a46ed47 |
# -*- coding: utf-8 -*-
"""
.. _disc-stats:
=====================
Statistical inference
=====================
Here we will briefly cover multiple concepts of inferential statistics in an
introductory manner, and demonstrate how to use some MNE statistical functions.
.. contents:: Topics
:local:
:depth: 3
"""
# Authors: Eric Larson <larson.eric.d@gmail.com>
# License: BSD (3-clause)
from functools import partial
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # noqa, analysis:ignore
import mne
from mne.stats import (ttest_1samp_no_p, bonferroni_correction, fdr_correction,
permutation_t_test, permutation_cluster_1samp_test)
print(__doc__)
###############################################################################
# Hypothesis testing
# ------------------
# Null hypothesis
# ^^^^^^^^^^^^^^^
# From `Wikipedia <https://en.wikipedia.org/wiki/Null_hypothesis>`__:
#
# In inferential statistics, a general statement or default position that
# there is no relationship between two measured phenomena, or no
# association among groups.
#
# We typically want to reject a **null hypothesis** with
# some probability (e.g., p < 0.05). This probability is also called the
# significance level :math:`\alpha`.
# To think about what this means, let's follow the illustrative example from
# [1]_ and construct a toy dataset consisting of a 40 x 40 square with a
# "signal" present in the center with white noise added and a Gaussian
# smoothing kernel applied.
width = 40
n_subjects = 10
signal_mean = 100
signal_sd = 100
noise_sd = 0.01
gaussian_sd = 5
sigma = 1e-3 # sigma for the "hat" method
n_permutations = 'all' # run an exact test
n_src = width * width
# For each "subject", make a smoothed noisy signal with a centered peak
rng = np.random.RandomState(2)
X = noise_sd * rng.randn(n_subjects, width, width)
# Add a signal at the center
X[:, width // 2, width // 2] = signal_mean + rng.randn(n_subjects) * signal_sd
# Spatially smooth with a 2D Gaussian kernel
size = width // 2 - 1
gaussian = np.exp(-(np.arange(-size, size + 1) ** 2 / float(gaussian_sd ** 2)))
for si in range(X.shape[0]):
for ri in range(X.shape[1]):
X[si, ri, :] = np.convolve(X[si, ri, :], gaussian, 'same')
for ci in range(X.shape[2]):
X[si, :, ci] = np.convolve(X[si, :, ci], gaussian, 'same')
###############################################################################
# The data averaged over all subjects looks like this:
fig, ax = plt.subplots()
ax.imshow(X.mean(0), cmap='inferno')
ax.set(xticks=[], yticks=[], title="Data averaged over subjects")
###############################################################################
# In this case, a null hypothesis we could test for each voxel is:
#
# There is no difference between the mean value and zero
# (:math:`H_0 \colon \mu = 0`).
#
# The alternative hypothesis, then, is that the voxel has a non-zero mean
# (:math:`H_1 \colon \mu \neq 0`).
# This is a *two-tailed* test because the mean could be less than
# or greater than zero, whereas a *one-tailed* test would test only one of
# these possibilities, i.e. :math:`H_1 \colon \mu \geq 0` or
# :math:`H_1 \colon \mu \leq 0`.
#
# .. note:: Here we will refer to each spatial location as a "voxel".
# In general, though, it could be any sort of data value,
# including cortical vertex at a specific time, pixel in a
# time-frequency decomposition, etc.
#
# Parametric tests
# ^^^^^^^^^^^^^^^^
# Let's start with a **paired t-test**, which is a standard test
# for differences in paired samples. Mathematically, it is equivalent
# to a 1-sample t-test on the difference between the samples in each condition.
# The paired t-test is **parametric**
# because it assumes that the underlying sample distribution is Gaussian, and
# is only valid in this case. This happens to be satisfied by our toy dataset,
# but is not always satisfied for neuroimaging data.
#
# In the context of our toy dataset, which has many voxels
# (:math:`40 \cdot 40 = 1600`), applying the paired t-test is called a
# *mass-univariate* approach as it treats each voxel independently.
titles = ['t']
out = stats.ttest_1samp(X, 0, axis=0)
ts = [out[0]]
ps = [out[1]]
mccs = [False] # these are not multiple-comparisons corrected
def plot_t_p(t, p, title, mcc, axes=None):
if axes is None:
fig = plt.figure(figsize=(6, 3))
axes = [fig.add_subplot(121, projection='3d'), fig.add_subplot(122)]
show = True
else:
show = False
p_lims = [0.1, 0.001]
t_lims = -stats.distributions.t.ppf(p_lims, n_subjects - 1)
p_lims = [-np.log10(p) for p in p_lims]
# t plot
x, y = np.mgrid[0:width, 0:width]
surf = axes[0].plot_surface(x, y, np.reshape(t, (width, width)),
rstride=1, cstride=1, linewidth=0,
vmin=t_lims[0], vmax=t_lims[1], cmap='viridis')
axes[0].set(xticks=[], yticks=[], zticks=[],
xlim=[0, width - 1], ylim=[0, width - 1])
axes[0].view_init(30, 15)
cbar = plt.colorbar(ax=axes[0], shrink=0.75, orientation='horizontal',
fraction=0.1, pad=0.025, mappable=surf)
cbar.set_ticks(t_lims)
cbar.set_ticklabels(['%0.1f' % t_lim for t_lim in t_lims])
cbar.set_label('t-value')
cbar.ax.get_xaxis().set_label_coords(0.5, -0.3)
if not show:
axes[0].set(title=title)
if mcc:
axes[0].title.set_weight('bold')
# p plot
use_p = -np.log10(np.reshape(np.maximum(p, 1e-5), (width, width)))
img = axes[1].imshow(use_p, cmap='inferno', vmin=p_lims[0], vmax=p_lims[1],
interpolation='nearest')
axes[1].set(xticks=[], yticks=[])
cbar = plt.colorbar(ax=axes[1], shrink=0.75, orientation='horizontal',
fraction=0.1, pad=0.025, mappable=img)
cbar.set_ticks(p_lims)
cbar.set_ticklabels(['%0.1f' % p_lim for p_lim in p_lims])
cbar.set_label(r'$-\log_{10}(p)$')
cbar.ax.get_xaxis().set_label_coords(0.5, -0.3)
if show:
text = fig.suptitle(title)
if mcc:
text.set_weight('bold')
plt.subplots_adjust(0, 0.05, 1, 0.9, wspace=0, hspace=0)
mne.viz.utils.plt_show()
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# "Hat" variance adjustment
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# The "hat" technique regularizes the variance values used in the t-test
# calculation [1]_ to compensate for implausibly small variances.
ts.append(ttest_1samp_no_p(X, sigma=sigma))
ps.append(stats.distributions.t.sf(np.abs(ts[-1]), len(X) - 1) * 2)
titles.append(r'$\mathrm{t_{hat}}$')
mccs.append(False)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Non-parametric tests
# ^^^^^^^^^^^^^^^^^^^^
# Instead of assuming an underlying Gaussian distribution, we could instead
# use a **non-parametric resampling** method. In the case of a paired t-test
# between two conditions A and B, which is mathematically equivalent to a
# one-sample t-test between the difference in the conditions A-B, under the
# null hypothesis we have the principle of **exchangeability**. This means
# that, if the null is true, we can exchange conditions and not change
# the distribution of the test statistic.
#
# When using a paired t-test, exchangeability thus means that we can flip the
# signs of the difference between A and B. Therefore, we can construct the
# **null distribution** values for each voxel by taking random subsets of
# samples (subjects), flipping the sign of their difference, and recording the
# absolute value of the resulting statistic (we record the absolute value
# because we conduct a two-tailed test). The absolute value of the statistic
# evaluated on the veridical data can then be compared to this distribution,
# and the p-value is simply the proportion of null distribution values that
# are smaller.
#
# .. warning:: In the case of a true one-sample t-test, i.e. analyzing a single
# condition rather than the difference between two conditions,
# it is not clear where/how exchangeability applies; see
# `this FieldTrip discussion <ft_exch_>`_.
#
# In the case where ``n_permutations`` is large enough (or "all") so
# that the complete set of unique resampling exchanges can be done
# (which is :math:`2^{N_{samp}}-1` for a one-tailed and
# :math:`2^{N_{samp}-1}-1` for a two-tailed test, not counting the
# veridical distribution), instead of randomly exchanging conditions
# the null is formed from using all possible exchanges. This is known
# as a permutation test (or exact test).
# Here we have to do a bit of gymnastics to get our function to do
# a permutation test without correcting for multiple comparisons:
X.shape = (n_subjects, n_src) # flatten the array for simplicity
titles.append('Permutation')
ts.append(np.zeros(width * width))
ps.append(np.zeros(width * width))
mccs.append(False)
for ii in range(n_src):
ts[-1][ii], ps[-1][ii] = permutation_t_test(X[:, [ii]], verbose=False)[:2]
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Multiple comparisons
# --------------------
# So far, we have done no correction for multiple comparisons. This is
# potentially problematic for these data because there are
# :math:`40 \cdot 40 = 1600` tests being performed. If we use a threshold
# p < 0.05 for each individual test, we would expect many voxels to be declared
# significant even if there were no true effect. In other words, we would make
# many **type I errors** (adapted from `here <errors_>`_):
#
# .. rst-class:: skinnytable
#
# +----------+--------+------------------+------------------+
# | | Null hypothesis |
# | +------------------+------------------+
# | | True | False |
# +==========+========+==================+==================+
# | | | Type I error | Correct |
# | | Yes | False positive | True positive |
# + Reject +--------+------------------+------------------+
# | | | Correct | Type II error |
# | | No | True Negative | False negative |
# +----------+--------+------------------+------------------+
#
# To see why, consider a standard :math:`\alpha = 0.05`.
# For a single test, our probability of making a type I error is 0.05.
# The probability of making at least one type I error in
# :math:`N_{\mathrm{test}}` independent tests is then given by
# :math:`1 - (1 - \alpha)^{N_{\mathrm{test}}}`:
N = np.arange(1, 80)
alpha = 0.05
p_type_I = 1 - (1 - alpha) ** N
fig, ax = plt.subplots(figsize=(4, 3))
ax.scatter(N, p_type_I, 3)
ax.set(xlim=N[[0, -1]], ylim=[0, 1], xlabel=r'$N_{\mathrm{test}}$',
ylabel=u'Probability of at least\none type I error')
ax.grid(True)
fig.tight_layout()
fig.show()
###############################################################################
# To combat this problem, several methods exist. Typically these
# provide control over either one of the following two measures:
#
# 1. `Familywise error rate (FWER) <fwer_>`_
# The probability of making one or more type I errors:
#
# .. math::
# \mathrm{P}(N_{\mathrm{type\ I}} >= 1 \mid H_0)
#
# 2. `False discovery rate (FDR) <fdr_>`_
# The expected proportion of rejected null hypotheses that are
# actually true:
#
# .. math::
# \mathrm{E}(\frac{N_{\mathrm{type\ I}}}{N_{\mathrm{reject}}}
# \mid N_{\mathrm{reject}} > 0) \cdot
# \mathrm{P}(N_{\mathrm{reject}} > 0 \mid H_0)
#
# We cover some techniques that control FWER and FDR below.
#
# Bonferroni correction
# ^^^^^^^^^^^^^^^^^^^^^
# Perhaps the simplest way to deal with multiple comparisons, `Bonferroni
# correction <https://en.wikipedia.org/wiki/Bonferroni_correction>`__
# conservatively multiplies the p-values by the number of comparisons to
# control the FWER.
titles.append('Bonferroni')
ts.append(ts[-1])
ps.append(bonferroni_correction(ps[0])[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# False discovery rate (FDR) correction
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Typically FDR is performed with the Benjamini-Hochberg procedure, which
# is less restrictive than Bonferroni correction for large numbers of
# comparisons (fewer type II errors), but provides less strict control of type
# I errors.
titles.append('FDR')
ts.append(ts[-1])
ps.append(fdr_correction(ps[0])[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Non-parametric resampling test with a maximum statistic
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# **Non-parametric resampling tests** can also be used to correct for multiple
# comparisons. In its simplest form, we again do permutations using
# exchangeability under the null hypothesis, but this time we take the
# *maximum statistic across all voxels* in each permutation to form the
# null distribution. The p-value for each voxel from the veridical data
# is then given by the proportion of null distribution values
# that were smaller.
#
# This method has two important features:
#
# 1. It controls FWER.
# 2. It is non-parametric. Even though our initial test statistic
# (here a 1-sample t-test) is parametric, the null
# distribution for the null hypothesis rejection (the mean value across
# subjects is indistinguishable from zero) is obtained by permutations.
# This means that it makes no assumptions of Gaussianity
# (which do hold for this example, but do not in general for some types
# of processed neuroimaging data).
titles.append(r'$\mathbf{Perm_{max}}$')
out = permutation_t_test(X, verbose=False)[:2]
ts.append(out[0])
ps.append(out[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Clustering
# ^^^^^^^^^^
# Each of the aforementioned multiple comparisons corrections have the
# disadvantage of not fully incorporating the correlation structure of the
# data, namely that points close to one another (e.g., in space or time) tend
# to be correlated. However, by defining the connectivity/adjacency/neighbor
# structure in our data, we can use **clustering** to compensate.
#
# To use this, we need to rethink our null hypothesis. Instead
# of thinking about a null hypothesis about means per voxel (with one
# independent test per voxel), we consider a null hypothesis about sizes
# of clusters in our data, which could be stated like:
#
# The distribution of spatial cluster sizes observed in two experimental
# conditions are drawn from the same probability distribution.
#
# Here we only have a single condition and we contrast to zero, which can
# be thought of as:
#
# The distribution of spatial cluster sizes is independent of the sign
# of the data.
#
# In this case, we again do permutations with a maximum statistic, but, under
# each permutation, we:
#
# 1. Compute the test statistic for each voxel individually.
# 2. Threshold the test statistic values.
# 3. Cluster voxels that exceed this threshold (with the same sign) based on
# adjacency.
# 4. Retain the size of the largest cluster (measured, e.g., by a simple voxel
# count, or by the sum of voxel t-values within the cluster) to build the
# null distribution.
#
# After doing these permutations, the cluster sizes in our veridical data
# are compared to this null distribution. The p-value associated with each
# cluster is again given by the proportion of smaller null distribution
# values. This can then be subjected to a standard p-value threshold
# (e.g., p < 0.05) to reject the null hypothesis (i.e., find an effect of
# interest).
#
# This reframing to consider *cluster sizes* rather than *individual means*
# maintains the advantages of the standard non-parametric permutation
# test -- namely controlling FWER and making no assumptions of parametric
# data distribution.
# Critically, though, it also accounts for the correlation structure in the
# data -- which in this toy case is spatial but in general can be
# multidimensional (e.g., spatio-temporal) -- because the null distribution
# will be derived from data in a way that preserves these correlations.
#
# .. sidebar:: Effect size
#
# For a nice description of how to compute the effect size obtained
# in a cluster test, see this
# `FieldTrip mailing list discussion <ft_cluster_effect_size_>`_.
#
# However, there is a drawback. If a cluster significantly deviates from
# the null, no further inference on the cluster (e.g., peak location) can be
# made, as the entire cluster as a whole is used to reject the null.
# Moreover, because the test statistic concerns the full data, the null
# hypothesis (and our rejection of it) refers to the structure of the full
# data. For more information, see also the comprehensive
# `FieldTrip tutorial <ft_cluster_>`_.
#
# Defining the connectivity/neighbor/adjacency matrix
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# First we need to define our connectivity/neighbor/adjacency matrix.
# This is a square array (or sparse matrix) of shape ``(n_src, n_src)`` that
# contains zeros and ones to define which spatial points are connected, i.e.,
# which voxels are adjacent to each other. In our case this
# is quite simple, as our data are aligned on a rectangular grid.
#
# Let's pretend that our data were smaller -- a 3 x 3 grid. Thinking about
# each voxel as being connected to the other voxels it touches, we would
# need a 9 x 9 connectivity matrix. The first row of this matrix contains the
# voxels in the flattened data that the first voxel touches. Since it touches
# the second element in the first row and the first element in the second row
# (and is also a neighbor to itself), this would be::
#
# [1, 1, 0, 1, 0, 0, 0, 0, 0]
#
# :mod:`sklearn.feature_extraction` provides a convenient function for this:
from sklearn.feature_extraction.image import grid_to_graph # noqa: E402
mini_connectivity = grid_to_graph(3, 3).toarray()
assert mini_connectivity.shape == (9, 9)
print(mini_connectivity[0])
###############################################################################
# In general the connectivity between voxels can be more complex, such as
# those between sensors in 3D space, or time-varying activation at brain
# vertices on a cortical surface. MNE provides several convenience functions
# for computing connectivity/neighbor/adjacency matrices (see the
# :ref:`Statistics API <api_reference_statistics>`).
#
# Standard clustering
# ~~~~~~~~~~~~~~~~~~~
# Here, since our data are on a grid, we can use ``connectivity=None`` to
# trigger optimized grid-based code, and run the clustering algorithm.
titles.append('Clustering')
# Reshape data to what is equivalent to (n_samples, n_space, n_time)
X.shape = (n_subjects, width, width)
# Compute threshold from t distribution (this is also the default)
threshold = stats.distributions.t.ppf(1 - alpha, n_subjects - 1)
t_clust, clusters, p_values, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold, connectivity=None,
n_permutations=n_permutations)
# Put the cluster data in a viewable format
p_clust = np.ones((width, width))
for cl, p in zip(clusters, p_values):
p_clust[cl] = p
ts.append(t_clust)
ps.append(p_clust)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# "Hat" variance adjustment
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# This method can also be used in this context to correct for small
# variances [1]_:
titles.append(r'$\mathbf{C_{hat}}$')
stat_fun_hat = partial(ttest_1samp_no_p, sigma=sigma)
t_hat, clusters, p_values, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold, connectivity=None,
n_permutations=n_permutations, stat_fun=stat_fun_hat, buffer_size=None)
p_hat = np.ones((width, width))
for cl, p in zip(clusters, p_values):
p_hat[cl] = p
ts.append(t_hat)
ps.append(p_hat)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# .. _tfce_example:
#
# Threshold-free cluster enhancement (TFCE)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# TFCE eliminates the free parameter initial ``threshold`` value that
# determines which points are included in clustering by approximating
# a continuous integration across possible threshold values with a standard
# `Riemann sum <https://en.wikipedia.org/wiki/Riemann_sum>`__ [2]_.
# This requires giving a starting threshold ``start`` and a step
# size ``step``, which in MNE is supplied as a dict.
# The smaller the ``step`` and closer to 0 the ``start`` value,
# the better the approximation, but the longer it takes.
#
# A significant advantage of TFCE is that, rather than modifying the
# statistical null hypothesis under test (from one about individual voxels
# to one about the distribution of clusters in the data), it modifies the *data
# under test* while still controlling for multiple comparisons.
# The statistical test is then done at the level of individual voxels rather
# than clusters. This allows for evaluation of each point
# independently for significance rather than only as cluster groups.
titles.append(r'$\mathbf{C_{TFCE}}$')
threshold_tfce = dict(start=0, step=0.2)
t_tfce, _, p_tfce, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold_tfce, connectivity=None,
n_permutations=n_permutations)
ts.append(t_tfce)
ps.append(p_tfce)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# We can also combine TFCE and the "hat" correction:
titles.append(r'$\mathbf{C_{hat,TFCE}}$')
t_tfce_hat, _, p_tfce_hat, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold_tfce, connectivity=None,
n_permutations=n_permutations, stat_fun=stat_fun_hat, buffer_size=None)
ts.append(t_tfce_hat)
ps.append(p_tfce_hat)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Visualize and compare methods
# -----------------------------
# Let's take a look at these statistics. The top row shows each test statistic,
# and the bottom shows p-values for various statistical tests, with the ones
# with proper control over FWER or FDR with bold titles.
fig = plt.figure(facecolor='w', figsize=(14, 3))
assert len(ts) == len(titles) == len(ps)
for ii in range(len(ts)):
ax = [fig.add_subplot(2, 10, ii + 1, projection='3d'),
fig.add_subplot(2, 10, 11 + ii)]
plot_t_p(ts[ii], ps[ii], titles[ii], mccs[ii], ax)
fig.tight_layout(pad=0, w_pad=0.05, h_pad=0.1)
plt.show()
###############################################################################
# The first three columns show the parametric and non-parametric statistics
# that are not corrected for multiple comparisons:
#
# - Mass univariate **t-tests** result in jagged edges.
# - **"Hat" variance correction** of the t-tests produces less peaky edges,
# correcting for sharpness in the statistic driven by low-variance voxels.
# - **Non-parametric resampling tests** are very similar to t-tests. This is to
# be expected: the data are drawn from a Gaussian distribution, and thus
# satisfy parametric assumptions.
#
# The next three columns show multiple comparison corrections of the
# mass univariate tests (parametric and non-parametric). These
# too conservatively correct for multiple comparisons because neighboring
# voxels in our data are correlated:
#
# - **Bonferroni correction** eliminates any significant activity.
# - **FDR correction** is less conservative than Bonferroni.
# - A **permutation test with a maximum statistic** also eliminates any
# significant activity.
#
# The final four columns show the non-parametric cluster-based permutation
# tests with a maximum statistic:
#
# - **Standard clustering** identifies the correct region. However, the whole
# area must be declared significant, so no peak analysis can be done.
# Also, the peak is broad.
# - **Clustering with "hat" variance adjustment** tightens the estimate of
# significant activity.
# - **Clustering with TFCE** allows analyzing each significant point
# independently, but still has a broadened estimate.
# - **Clustering with TFCE and "hat" variance adjustment** tightens the area
# declared significant (again FWER corrected).
#
# Statistical functions in MNE
# ----------------------------
# The complete listing of statistical functions provided by MNE are in
# the :ref:`Statistics API list <api_reference_statistics>`, but we will give
# a brief overview here.
#
# MNE provides several convenience parametric testing functions that can be
# used in conjunction with the non-parametric clustering methods. However,
# the set of functions we provide is not meant to be exhaustive.
#
# If the univariate statistical contrast of interest is not listed here
# (e.g., interaction term in an unbalanced ANOVA), consider checking out the
# :mod:`statsmodels` package. It offers many functions for computing
# statistical contrasts, e.g., :func:`statsmodels.stats.anova.anova_lm`.
# To use these functions in clustering:
#
# 1. Determine which test statistic (e.g., t-value, F-value) you would use
# in a univariate context to compute your contrast of interest. In other
# words, if there were only a single output such as reaction times, what
# test statistic might you compute on the data?
# 2. Wrap the call to that function within a function that takes an input of
# the same shape that is expected by your clustering function,
# and returns an array of the same shape without the "samples" dimension
# (e.g., :func:`mne.stats.permutation_cluster_1samp_test` takes an array
# of shape ``(n_samples, p, q)`` and returns an array of shape ``(p, q)``).
# 3. Pass this wrapped function to the ``stat_fun`` argument to the clustering
# function.
# 4. Set an appropriate ``threshold`` value (float or dict) based on the
# values your statistical contrast function returns.
#
# Parametric methods provided by MNE
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# - :func:`mne.stats.ttest_1samp_no_p`
# Paired t-test, optionally with hat adjustment.
# This is used by default for contrast enhancement in paired cluster tests.
#
# - :func:`mne.stats.f_oneway`
# One-way ANOVA for independent samples.
# This can be used to compute various F-contrasts. It is used by default
# for contrast enhancement in non-paired cluster tests.
#
# - :func:`mne.stats.f_mway_rm`
# M-way ANOVA for repeated measures and balanced designs.
# This returns F-statistics and p-values. The associated helper function
# :func:`mne.stats.f_threshold_mway_rm` can be used to determine the
# F-threshold at a given significance level.
#
# - :func:`mne.stats.linear_regression`
# Compute ordinary least square regressions on multiple targets, e.g.,
# sensors, time points across trials (samples).
# For each regressor it returns the beta value, t-statistic, and
# uncorrected p-value. While it can be used as a test, it is
# particularly useful to compute weighted averages or deal with
# continuous predictors.
#
# Non-parametric methods
# ^^^^^^^^^^^^^^^^^^^^^^
#
# - :func:`mne.stats.permutation_cluster_test`
# Unpaired contrasts with connectivity.
#
# - :func:`mne.stats.spatio_temporal_cluster_test`
# Unpaired contrasts with spatio-temporal connectivity.
#
# - :func:`mne.stats.permutation_t_test`
# Paired contrast with no connectivity.
#
# - :func:`mne.stats.permutation_cluster_1samp_test`
# Paired contrasts with connectivity.
#
# - :func:`mne.stats.spatio_temporal_cluster_1samp_test`
# Paired contrasts with spatio-temporal connectivity.
#
# .. warning:: In most MNE functions, data has shape
# ``(..., n_space, n_time)``, where the spatial dimension can
# be e.g. sensors or source vertices. But for our spatio-temporal
# clustering functions, the spatial dimensions need to be **last**
# for computational efficiency reasons. For example, for
# :func:`mne.stats.spatio_temporal_cluster_1samp_test`, ``X``
# needs to be of shape ``(n_samples, n_time, n_space)``. You can
# use :func:`numpy.transpose` to transpose axes if necessary.
#
# References
# ----------
# .. [1] Ridgway et al. 2012, "The problem of low variance voxels in
# statistical parametric mapping; a new hat avoids a 'haircut'",
# NeuroImage. 2012 Feb 1;59(3):2131-41.
#
# .. [2] Smith and Nichols 2009, "Threshold-free cluster enhancement:
# addressing problems of smoothing, threshold dependence, and
# localisation in cluster inference", NeuroImage 44 (2009) 83-98.
#
# .. include:: ../../links.inc
| mne-tools/mne-tools.github.io | 0.20/_downloads/64111b11ecb97eff546771c32aa6b7ff/plot_background_statistics.py | Python | bsd-3-clause | 29,518 | [
"Gaussian"
] | 34d00dac1db716755d75d71ab1b53d7c54d130a186e1925c75ce67dfead01bb7 |
import logging
import os
from pulsar.managers.util.pykube_util import (
ensure_pykube,
find_job_object_by_name,
find_pod_object_by_name,
galaxy_instance_id,
Job,
job_object_dict,
produce_unique_k8s_job_name,
pull_policy,
pykube_client_from_dict,
stop_job,
)
from .action_mapper import (
actions,
path_type,
)
from .amqp_exchange import ACK_FORCE_NOACK_KEY
from .decorators import parseJson
from .decorators import retry
from .destination import submit_params
from .job_directory import RemoteJobDirectory
from .setup_handler import build as build_setup_handler
from .util import copy
from .util import ensure_directory
from .util import json_dumps
from .util import json_loads
from .util import to_base64_json
log = logging.getLogger(__name__)
CACHE_WAIT_SECONDS = 3
TOOL_EXECUTION_CONTAINER_COMMAND_TEMPLATE = """
path='%s/command_line';
while [ ! -e $path ];
do sleep 1; echo "waiting for job script $path";
done;
echo 'running script';
sh $path;
echo 'ran script'"""
class OutputNotFoundException(Exception):
def __init__(self, path):
self.path = path
def __str__(self):
return "No remote output found for path %s" % self.path
class BaseJobClient:
def __init__(self, destination_params, job_id):
destination_params = destination_params or {}
self.destination_params = destination_params
self.assign_job_id(job_id)
for attr in ["ssh_key", "ssh_user", "ssh_host", "ssh_port"]:
setattr(self, attr, destination_params.get(attr, None))
self.env = destination_params.get("env", [])
self.files_endpoint = destination_params.get("files_endpoint", None)
default_file_action = self.destination_params.get("default_file_action", "transfer")
if default_file_action not in actions:
raise Exception("Unknown Pulsar default file action type %s" % default_file_action)
self.default_file_action = default_file_action
self.action_config_path = self.destination_params.get("file_action_config", None)
if self.action_config_path is None:
self.file_actions = self.destination_params.get("file_actions", {})
else:
self.file_actions = None
self.setup_handler = build_setup_handler(self, destination_params)
def assign_job_id(self, job_id):
self.job_id = job_id
self._set_job_directory()
def _set_job_directory(self):
if "jobs_directory" in self.destination_params:
staging_directory = self.destination_params["jobs_directory"]
sep = self.destination_params.get("remote_sep", os.sep)
job_directory = RemoteJobDirectory(
remote_staging_directory=staging_directory,
remote_id=self.job_id,
remote_sep=sep,
)
else:
job_directory = None
self.job_directory = job_directory
def setup(self, tool_id=None, tool_version=None, preserve_galaxy_python_environment=None):
"""
Setup remote Pulsar server to run this job.
"""
setup_args = {"job_id": self.job_id}
if tool_id:
setup_args["tool_id"] = tool_id
if tool_version:
setup_args["tool_version"] = tool_version
if preserve_galaxy_python_environment:
setup_args["preserve_galaxy_python_environment"] = preserve_galaxy_python_environment
return self.setup_handler.setup(**setup_args)
@property
def prefer_local_staging(self):
# If doing a job directory is defined, calculate paths here and stage
# remotely.
return self.job_directory is None
class JobClient(BaseJobClient):
"""
Objects of this client class perform low-level communication with a remote Pulsar server.
**Parameters**
destination_params : dict or str
connection parameters, either url with dict containing url (and optionally `private_token`).
job_id : str
Galaxy job/task id.
"""
def __init__(self, destination_params, job_id, job_manager_interface):
super().__init__(destination_params, job_id)
self.job_manager_interface = job_manager_interface
def launch(self, command_line, dependencies_description=None, env=None, remote_staging=None, job_config=None, dynamic_file_sources=None):
"""
Queue up the execution of the supplied `command_line` on the remote
server. Called launch for historical reasons, should be renamed to
enqueue or something like that.
**Parameters**
command_line : str
Command to execute.
"""
launch_params = dict(command_line=command_line, job_id=self.job_id)
submit_params_dict = submit_params(self.destination_params)
if submit_params_dict:
launch_params['params'] = json_dumps(submit_params_dict)
if dependencies_description:
launch_params['dependencies_description'] = json_dumps(dependencies_description.to_dict())
if env:
launch_params['env'] = json_dumps(env)
if remote_staging:
launch_params['remote_staging'] = json_dumps(remote_staging)
if job_config and 'touch_outputs' in job_config:
# message clients pass the entire job config
launch_params['submit_extras'] = json_dumps({'touch_outputs': job_config['touch_outputs']})
if job_config and self.setup_handler.local:
# Setup not yet called, job properties were inferred from
# destination arguments. Hence, must have Pulsar setup job
# before queueing.
setup_params = _setup_params_from_job_config(job_config)
launch_params['setup_params'] = json_dumps(setup_params)
if dynamic_file_sources is not None:
launch_params["dynamic_file_sources"] = json_dumps(dynamic_file_sources)
return self._raw_execute("submit", launch_params)
def full_status(self):
""" Return a dictionary summarizing final state of job.
"""
return self.raw_check_complete()
def kill(self):
"""
Cancel remote job, either removing from the queue or killing it.
"""
return self._raw_execute("cancel", {"job_id": self.job_id})
@retry()
@parseJson()
def raw_check_complete(self):
"""
Get check_complete response from the remote server.
"""
check_complete_response = self._raw_execute("status", {"job_id": self.job_id})
return check_complete_response
def get_status(self):
check_complete_response = self.raw_check_complete()
# Older Pulsar instances won't set status so use 'complete', at some
# point drop backward compatibility.
status = check_complete_response.get("status", None)
return status
def clean(self):
"""
Cleanup the remote job.
"""
self._raw_execute("clean", {"job_id": self.job_id})
@parseJson()
def remote_setup(self, **setup_args):
"""
Setup remote Pulsar server to run this job.
"""
return self._raw_execute("setup", setup_args)
def put_file(self, path, input_type, name=None, contents=None, action_type='transfer'):
if not name:
name = os.path.basename(path)
args = {"job_id": self.job_id, "name": name, "type": input_type}
input_path = path
if contents:
input_path = None
# action type == 'message' should either copy or transfer
# depending on default not just fallback to transfer.
if action_type in ['transfer', 'message']:
if isinstance(contents, str):
contents = contents.encode("utf-8")
message = "Uploading path [%s] (action_type: [%s])"
log.debug(message, path, action_type)
return self._upload_file(args, contents, input_path)
elif action_type == 'copy':
path_response = self._raw_execute('path', args)
pulsar_path = json_loads(path_response)['path']
_copy(path, pulsar_path)
return {'path': pulsar_path}
def fetch_output(self, path, name, working_directory, action_type, output_type):
"""
Fetch (transfer, copy, etc...) an output from the remote Pulsar server.
**Parameters**
path : str
Local path of the dataset.
name : str
Remote name of file (i.e. path relative to remote staging output
or working directory).
working_directory : str
Local working_directory for the job.
action_type : str
Where to find file on Pulsar (output_workdir or output). legacy is also
an option in this case Pulsar is asked for location - this will only be
used if targetting an older Pulsar server that didn't return statuses
allowing this to be inferred.
"""
if output_type in ['output_workdir', 'output_metadata']:
self._populate_output_path(name, path, action_type, output_type)
elif output_type == 'output':
self._fetch_output(path=path, name=name, action_type=action_type)
else:
raise Exception("Unknown output_type %s" % output_type)
def _raw_execute(self, command, args=None, data=None, input_path=None, output_path=None):
if args is None:
args = {}
return self.job_manager_interface.execute(command, args, data, input_path, output_path)
def _fetch_output(self, path, name=None, check_exists_remotely=False, action_type='transfer'):
if not name:
# Extra files will send in the path.
name = os.path.basename(path)
self._populate_output_path(name, path, action_type, path_type.OUTPUT)
def _populate_output_path(self, name, output_path, action_type, path_type):
ensure_directory(output_path)
if action_type == 'transfer':
self.__raw_download_output(name, self.job_id, path_type, output_path)
elif action_type == 'copy':
pulsar_path = self._output_path(name, self.job_id, path_type)['path']
_copy(pulsar_path, output_path)
@parseJson()
def _upload_file(self, args, contents, input_path):
return self._raw_execute("upload_file", args, contents, input_path)
@parseJson()
def _output_path(self, name, job_id, output_type):
return self._raw_execute("path",
{"name": name,
"job_id": self.job_id,
"type": output_type})
@retry()
def __raw_download_output(self, name, job_id, output_type, output_path):
output_params = {
"name": name,
"job_id": self.job_id,
"type": output_type
}
self._raw_execute("download_output", output_params, output_path=output_path)
def job_ip(self):
"""Return a entry point ports dict (if applicable)."""
return None
class BaseMessageJobClient(BaseJobClient):
def __init__(self, destination_params, job_id, client_manager):
super().__init__(destination_params, job_id)
if not self.job_directory:
error_message = "Message-queue based Pulsar client requires destination define a remote job_directory to stage files into."
raise Exception(error_message)
self.client_manager = client_manager
def clean(self):
del self.client_manager.status_cache[self.job_id]
def full_status(self):
full_status = self.client_manager.status_cache.get(self.job_id, None)
if full_status is None:
raise Exception("full_status() called before a final status was properly cached with cilent manager.")
return full_status
def _build_setup_message(self, command_line, dependencies_description, env, remote_staging, job_config, dynamic_file_sources):
"""
"""
launch_params = dict(command_line=command_line, job_id=self.job_id)
submit_params_dict = submit_params(self.destination_params)
if submit_params_dict:
launch_params['submit_params'] = submit_params_dict
if dependencies_description:
launch_params['dependencies_description'] = dependencies_description.to_dict()
if env:
launch_params['env'] = env
if remote_staging:
launch_params['remote_staging'] = remote_staging
launch_params['remote_staging']['ssh_key'] = self.ssh_key
launch_params['dynamic_file_sources'] = dynamic_file_sources
if job_config and self.setup_handler.local:
# Setup not yet called, job properties were inferred from
# destination arguments. Hence, must have Pulsar setup job
# before queueing.
setup_params = _setup_params_from_job_config(job_config)
launch_params["setup_params"] = setup_params
return launch_params
def _build_status_request_message(self):
# Because this is used to poll, status requests will not be resent if we do not receive an acknowledgement
update_params = {
'request': 'status',
'job_id': self.job_id,
ACK_FORCE_NOACK_KEY: True,
}
return update_params
class MessageJobClient(BaseMessageJobClient):
def launch(self, command_line, dependencies_description=None, env=None, remote_staging=None, job_config=None, dynamic_file_sources=None):
"""
"""
launch_params = self._build_setup_message(
command_line,
dependencies_description=dependencies_description,
env=env,
remote_staging=remote_staging,
job_config=job_config,
dynamic_file_sources=dynamic_file_sources,
)
response = self.client_manager.exchange.publish("setup", launch_params)
log.info("Job published to setup message queue: %s", self.job_id)
return response
def get_status(self):
status_params = self._build_status_request_message()
response = self.client_manager.exchange.publish("setup", status_params)
log.info("Job status request published to setup message queue: %s", self.job_id)
return response
def kill(self):
self.client_manager.exchange.publish("kill", dict(job_id=self.job_id))
class MessageCLIJobClient(BaseMessageJobClient):
def __init__(self, destination_params, job_id, client_manager, shell):
super().__init__(destination_params, job_id, client_manager)
self.remote_pulsar_path = destination_params["remote_pulsar_path"]
self.shell = shell
def launch(self, command_line, dependencies_description=None, env=None, remote_staging=None, job_config=None, dynamic_file_sources=None):
"""
"""
launch_params = self._build_setup_message(
command_line,
dependencies_description=dependencies_description,
env=env,
remote_staging=remote_staging,
job_config=job_config,
dynamic_file_sources=dynamic_file_sources,
)
base64_message = to_base64_json(launch_params)
submit_command = os.path.join(self.remote_pulsar_path, "scripts", "submit.bash")
# TODO: Allow configuration of manager, app, and ini path...
self.shell.execute("nohup {} --base64 {} &".format(submit_command, base64_message))
def kill(self):
# TODO
pass
class MessageCoexecutionPodJobClient(BaseMessageJobClient):
def __init__(self, destination_params, job_id, client_manager):
ensure_pykube()
super().__init__(destination_params, job_id, client_manager)
self.instance_id = galaxy_instance_id(destination_params)
self.pulsar_container_image = destination_params.get("pulsar_container_image", "galaxy/pulsar-pod-staging:0.13.0")
self._default_pull_policy = pull_policy(destination_params)
def launch(
self,
command_line,
dependencies_description=None,
env=None,
remote_staging=None,
job_config=None,
dynamic_file_sources=None,
container_info=None,
pulsar_app_config=None
):
"""
"""
launch_params = self._build_setup_message(
command_line,
dependencies_description=dependencies_description,
env=env,
remote_staging=remote_staging,
job_config=job_config,
dynamic_file_sources=dynamic_file_sources,
)
container = None
guest_ports = None
if container_info is not None:
container = container_info.get("container_id")
guest_ports = container_info.get("guest_ports")
manager_name = self.client_manager.manager_name
manager_type = "coexecution" if container is not None else "unqueued"
if "manager" not in pulsar_app_config and "managers" not in pulsar_app_config:
pulsar_app_config["managers"] = {manager_name: {"type": manager_type}}
elif "manager" in pulsar_app_config and manager_name != '_default_':
log.warning(
"'manager' set in app config but client has non-default manager '%s', this will cause communication"
" failures, remove `manager` from app or client config to fix", manager_name)
manager_args = []
if manager_name != "_default_":
manager_args = ["--manager", manager_name]
using_dependencies = container is None and dependencies_description is not None
if using_dependencies and "dependency_resolution" not in pulsar_app_config:
# Setup default dependency resolution for container above...
dependency_resolution = {
"cache": False,
"use": True,
"default_base_path": "/pulsar_dependencies",
"cache_dir": "/pulsar_dependencies/_cache",
"resolvers": [{ # TODO: add CVMFS resolution...
"type": "conda",
"auto_init": True,
"auto_install": True,
"prefix": '/pulsar_dependencies/conda',
}, {
"type": "conda",
"auto_init": True,
"auto_install": True,
"prefix": '/pulsar_dependencies/conda',
"versionless": True,
}]
}
pulsar_app_config["dependency_resolution"] = dependency_resolution
base64_message = to_base64_json(launch_params)
base64_app_conf = to_base64_json(pulsar_app_config)
job_name = self._k8s_job_name
params = self.destination_params
pulsar_container_image = self.pulsar_container_image
pulsar_container_resources = self._pulsar_container_resources(params)
job_directory = self.job_directory
volumes = [
{"name": "staging-directory", "emptyDir": {}},
]
volume_mounts = [
{"mountPath": "/pulsar_staging", "name": "staging-directory"},
]
pulsar_container_dict = {
"name": "pulsar-container",
"image": pulsar_container_image,
"command": ["pulsar-submit"],
"args": ["--base64", base64_message, "--app_conf_base64", base64_app_conf] + manager_args,
"workingDir": "/",
"volumeMounts": volume_mounts,
}
if pulsar_container_resources:
pulsar_container_dict["resources"] = pulsar_container_resources
tool_container_image = container
tool_container_resources = self._tool_container_resources(params)
container_dicts = [pulsar_container_dict]
if container:
command = TOOL_EXECUTION_CONTAINER_COMMAND_TEMPLATE % job_directory.job_directory
tool_container_spec = {
"name": "tool-container",
"image": tool_container_image,
"command": ["sh"],
"args": ["-c", command],
"workingDir": "/",
"volumeMounts": volume_mounts,
}
if tool_container_resources:
tool_container_spec["resources"] = tool_container_resources
if guest_ports:
tool_container_spec["ports"] = [{"containerPort": int(p)} for p in guest_ports]
container_dicts.append(tool_container_spec)
for container_dict in container_dicts:
if self._default_pull_policy:
container_dict["imagePullPolicy"] = self._default_pull_policy
template = {
"metadata": {
"labels": {"app": job_name},
},
"spec": {
"volumes": volumes,
"restartPolicy": "Never",
"containers": container_dicts,
}
}
spec = {"template": template}
if "k8s_walltime_limit" in params:
spec["activeDeadlineSeconds"] = int(params["k8s_walltime_limit"])
k8s_job_obj = job_object_dict(params, job_name, spec)
pykube_client = self._pykube_client
job = Job(pykube_client, k8s_job_obj)
job.create()
def kill(self):
job_name = self._k8s_job_name
pykube_client = self._pykube_client
job = find_job_object_by_name(pykube_client, job_name)
if job:
log.info("Kill k8s job with name %s" % job_name)
stop_job(job)
else:
log.info("Attempted to kill k8s job but it is unavailable.")
def job_ip(self):
job_name = self._k8s_job_name
pykube_client = self._pykube_client
pod = find_pod_object_by_name(pykube_client, job_name)
if pod:
status = pod.obj['status']
else:
status = {}
if 'podIP' in status:
pod_ip = status['podIP']
return pod_ip
else:
log.debug("Attempted to get ports dict but k8s pod unavailable")
@property
def _pykube_client(self):
return pykube_client_from_dict(self.destination_params)
@property
def _k8s_job_name(self):
job_id = self.job_id
job_name = produce_unique_k8s_job_name(app_prefix="pulsar", job_id=job_id, instance_id=self.instance_id)
return job_name
def _pulsar_container_resources(self, params):
return self._container_resources(params, container='pulsar')
def _tool_container_resources(self, params):
return self._container_resources(params, container='tool')
def _container_resources(self, params, container=None):
resources = {}
for resource_param in ('requests_cpu', 'requests_memory', 'limits_cpu', 'limits_memory'):
subkey, resource = resource_param.split('_', 1)
if resource_param in params:
if subkey not in resources:
resources[subkey] = {}
resources[subkey][resource] = params[resource_param]
if container is not None and container + '_' + resource_param in params:
if subkey not in resources:
resources[subkey] = {}
resources[subkey][resource] = params[container + '_' + resource_param]
return resources
class InputCachingJobClient(JobClient):
"""
Beta client that cache's staged files to prevent duplication.
"""
def __init__(self, destination_params, job_id, job_manager_interface, client_cacher):
super().__init__(destination_params, job_id, job_manager_interface)
self.client_cacher = client_cacher
@parseJson()
def _upload_file(self, args, contents, input_path):
action = "upload_file"
if contents:
input_path = None
return self._raw_execute(action, args, contents, input_path)
else:
event_holder = self.client_cacher.acquire_event(input_path)
cache_required = self.cache_required(input_path)
if cache_required:
self.client_cacher.queue_transfer(self, input_path)
while not event_holder.failed:
available = self.file_available(input_path)
if available['ready']:
token = available['token']
args["cache_token"] = token
return self._raw_execute(action, args)
event_holder.event.wait(30)
if event_holder.failed:
raise Exception("Failed to transfer file %s" % input_path)
@parseJson()
def cache_required(self, path):
return self._raw_execute("cache_required", {"path": path})
@parseJson()
def cache_insert(self, path):
return self._raw_execute("cache_insert", {"path": path}, None, path)
@parseJson()
def file_available(self, path):
return self._raw_execute("file_available", {"path": path})
def _copy(from_path, to_path):
message = "Copying path [%s] to [%s]"
log.debug(message, from_path, to_path)
copy(from_path, to_path)
def _setup_params_from_job_config(job_config):
job_id = job_config.get("job_id", None)
tool_id = job_config.get("tool_id", None)
tool_version = job_config.get("tool_version", None)
preserve_galaxy_python_environment = job_config.get("preserve_galaxy_python_environment", None)
# use_metadata ignored post Pulsar 0.14.12+ but keep setting it for older Pulsar's that
# had hacks for pre-2017 Galaxies.
return dict(
job_id=job_id,
tool_id=tool_id,
tool_version=tool_version,
use_metadata=True,
preserve_galaxy_python_environment=preserve_galaxy_python_environment,
)
| galaxyproject/pulsar | pulsar/client/client.py | Python | apache-2.0 | 25,908 | [
"Galaxy"
] | 9336592731c6afeee3d8347a1c559333b067248fb253b330a36115522554c394 |
#!/usr/bin/python
# Converts AMBER prmtop (and inpcrd) files to a MMTK database,
# which is basically a python script defining a molecule's
# atoms, bonds, and default coordinates
import os, sys, time, numpy
#################
### CONSTANTS ###
#################
# Map flag to variable names
varnames = ['POINTERS','TITLE','ATOM_NAME','AMBER_ATOM_TYPE','CHARGE','MASS','NONBONDED_PARM_INDEX','LENNARD_JONES_ACOEF','LENNARD_JONES_BCOEF','ATOM_TYPE_INDEX','BONDS_INC_HYDROGEN','BONDS_WITHOUT_HYDROGEN']
# Masses for atoms defined in the GAFF force field
mass2symbols = {12.01:'C',1.008:'H',19.00:'F',35.45:'Cl',79.90:'Br',126.9:'I',14.01:'N',16.00:'O',30.97:'P',32.06:'S'}
#################
### FUNCTIONS ###
#################
# Loads record from AMBER parameter file
def loadRecord(record):
items = []
lines = record.split('\n')
lines.pop(0) # Name
FORMAT = lines.pop(0).strip()[8:-1] # Format
if FORMAT.find('a')>-1: # Text
w = int(FORMAT[FORMAT.find('a')+1:])
for line in lines:
items = items + [line[x:x+w] for x in range(0,len(line),w)]
elif FORMAT.find('I')>-1: # Integer
w = int(FORMAT[FORMAT.find('I')+1:])
for line in lines:
items = items + [int(line[x:x+w]) for x in range(0,len(line),w)]
elif FORMAT.find('E')>-1: # Scientific
w = int(FORMAT[FORMAT.find('E')+1:FORMAT.find('.')])
for line in lines:
items = items + [float(line[x:x+w]) for x in range(0,len(line),w)]
return numpy.array(items)
############
### MAIN ###
############
import argparse
parser = argparse.ArgumentParser(
description='Convert AMBER prmtop and inpcrd files to a MMTK database file')
parser.add_argument('prmtop_FN', help='AMBER prmtop file')
parser.add_argument('inpcrd_FN', help='AMBER inpcrd file')
parser.add_argument('db_FN', help='MMTK Database file')
args = parser.parse_args()
print "Creating database "+args.db_FN
### Loads AMBER parameter file
prmtopF = open(args.prmtop_FN,'r')
prmtopData = prmtopF.read().split('%FLAG ')
prmtopF.close()
del prmtopF
prmtop = {}
for record in prmtopData:
name = record[:record.find('\n')].strip()
if name in varnames:
prmtop[name] = loadRecord(record)
del name, record, varnames, loadRecord, prmtopData
prmtop['ATOM_NAME'] = [a.replace("'","p") for a in prmtop['ATOM_NAME']]
NATOM = prmtop['POINTERS'][0]
NTYPES = prmtop['POINTERS'][1]
### Extract Lennard-Jones well depth and radii for each atom
LJ_radius = numpy.ndarray(shape=(NTYPES), dtype=float)
LJ_depth = numpy.ndarray(shape=(NTYPES), dtype=float)
for i in range(NTYPES):
LJ_index = prmtop['NONBONDED_PARM_INDEX'][NTYPES*i+i]-1
if prmtop['LENNARD_JONES_ACOEF'][LJ_index]<1.0e-6:
LJ_radius[i] = 0
LJ_depth[i] = 0
else:
factor = 2 * prmtop['LENNARD_JONES_ACOEF'][LJ_index] / prmtop['LENNARD_JONES_BCOEF'][LJ_index]
LJ_radius[i] = pow(factor, 1.0/6.0) * 0.5
LJ_depth[i] = prmtop['LENNARD_JONES_BCOEF'][LJ_index] / 2 / factor
# More useful for later calculations
root_LJ_depth = numpy.sqrt(LJ_depth)
LJ_diameter = LJ_radius*2
del i, LJ_index, factor
### Loads AMBER inpcrd file
inpcrdF = open(args.inpcrd_FN,'r')
inpcrd = inpcrdF.read().split('\n')
inpcrdF.close()
inpcrd.pop(0) # Title
NATOM = int(inpcrd.pop(0)) # Number of atoms
w = 12 # Width of field
coords = []
for line in inpcrd:
coords = coords + [float(line[x:x+w]) for x in range(0,len(line),w)]
del w, inpcrdF, inpcrd
### Writes database
db_dir = os.path.dirname(args.db_FN)
if not (db_dir=='' or os.path.exists(db_dir)):
os.system('mkdir -p '+db_dir)
db = open(args.db_FN,'w')
db.write("name='%s'\n"%os.path.basename(args.db_FN))
for [name,mass] in zip(prmtop['ATOM_NAME'],prmtop['MASS']):
if mass in mass2symbols.keys():
db.write(name.strip()+" = Atom('"+mass2symbols[mass]+"')\n")
else:
raise Exception('Unknown atom with mass: %f!'%mass)
sys.exit()
# The order of atoms in the prmtop file (not used by MMTK)
db.write("prmtop_order = [" + ", ".join(["%s"%name.strip() for name in prmtop['ATOM_NAME']]) + "]\n")
bondList = list(prmtop['BONDS_INC_HYDROGEN']) + list(prmtop['BONDS_WITHOUT_HYDROGEN'])
db.write("bonds = [" + ", ".join(["Bond(%s, %s)"%(prmtop['ATOM_NAME'][bondList[i]/3].strip(),prmtop['ATOM_NAME'][bondList[i+1]/3].strip()) for i in range(0,len(bondList),3)]) + "]\n")
db.write("pdbmap = [('"+prmtop['TITLE'][0].strip()+"', {"+', '.join(["'%s': %s"%(name.strip(),name.strip()) for name in prmtop['ATOM_NAME']]) + "})]\n")
db.write("amber12_atom_type = {" + ", ".join(["%s: '%s'"%(name.strip(),type.strip()) for (name,type) in zip(prmtop['ATOM_NAME'],prmtop['AMBER_ATOM_TYPE'])]) + "}\n")
# Write the charge, converted to units of electric charge
# AMBER prmtop files multiply the actual charge by 18.2223, hence the division
db.write("amber_charge = {" + ", ".join(["%s: '%f'"%(name.strip(),charge/18.2223) for (name,charge) in zip(prmtop['ATOM_NAME'],prmtop['CHARGE'])]) + "}\n")
# Write the grid scaling factors
# Because the grids are in units of kcal/mol, the scaling factors are multiplied by 4.184 to convert to kJ/mol
db.write("scaling_factor_electrostatic = {" + ", ".join(["%s: '%f'"%(name.strip(),4.184*charge/18.2223) for (name,charge) in zip(prmtop['ATOM_NAME'],prmtop['CHARGE'])]) + "}\n")
atom_type_indicies = [prmtop['ATOM_TYPE_INDEX'][atom_index]-1 for atom_index in range(NATOM)]
db.write("scaling_factor_LJr = {" + ", ".join(["%s: '%f'"%(name.strip(),4.184*root_LJ_depth[type_index]*(LJ_diameter[type_index]**6)) for (name,type_index) in zip(prmtop['ATOM_NAME'],atom_type_indicies)]) + "}\n")
db.write("scaling_factor_LJa = {" + ", ".join(["%s: '%f'"%(name.strip(),4.184*root_LJ_depth[type_index]*(LJ_diameter[type_index]**3)) for (name,type_index) in zip(prmtop['ATOM_NAME'],atom_type_indicies)]) + "}\n")
# Write the coordinates, converted from Angstroms to nanometers
db.write("configurations = {\n")
db.write("'default': Cartesian({" + ", ".join(["%s: (%f, %f, %f)"%(name.strip(), coords[i]/10.0, coords[i+1]/10.0, coords[i+2]/10.0) for (name,i) in zip(prmtop['ATOM_NAME'],range(0,NATOM*3,3))]) + "})}\n")
db.close()
sys.exit()
### Write grid force file
gridF = open(os.path.join(os.path.dirname(args.db_FN),'lig.grid.pdb'),'w')
for atom_index in range(NATOM):
atom_type = prmtop['ATOM_TYPE_INDEX'][atom_index]-1
heavy = not(prmtop['ATOM_NAME'][atom_index].strip()[0]=='H')
gridF.write('ATOM {0:5d}{1:>4s} {2:>3s} {3:4} {4:8.3f}{5:8.3f}{6:8.3f}{7:6.2f}{8:6.2f}\n'.format(
atom_index+1, # Atom serial number
prmtop['ATOM_NAME'][atom_index].strip(), # Atom name
'LIG', # Residue name
1, # Residue sequence number
prmtop['CHARGE'][atom_index]/18.2223, # x (electrostatic)
root_LJ_depth[atom_type]*(LJ_diameter[atom_type]**6), # y (LJ repulsion)
root_LJ_depth[atom_type]*(LJ_diameter[atom_type]**3), # z (LJ attraction)
heavy, 1.0))
gridF.close()
del gridF, atom_index, atom_type | gkumar7/AlGDock | Pipeline/prmtop2database.py | Python | mit | 6,847 | [
"Amber"
] | 3e2a4422d0020b04e205e798a90c162bec8f502b730201dfb0ac7f1722501bda |
"""
Example 3: Saving all minima found to an xyz file
"""
from pele.systems import LJCluster
from pele.utils.xyz import write_xyz
natoms = 12
niter = 100
system = LJCluster(natoms)
db = system.create_database()
bh = system.get_basinhopping(database=db)
bh.run(niter)
with open("lowest", "w") as fout:
for minimum in db.minima():
title = "energy = ", str(minimum.energy)
write_xyz(fout, minimum.coords, title)
############################################################
# some visualization
############################################################
try:
import pele.utils.pymolwrapper as pym
pym.start()
frame=1
for minimum in db.minima():
pym.draw_spheres(minimum.coords.reshape(-1, 3), "A", frame)
frame=frame+1
except:
print "Could not draw using pymol, skipping this step"
| js850/pele | examples/basinhopping/3_savelowest.py | Python | gpl-3.0 | 863 | [
"PyMOL"
] | 48afad2b7e14c917f2acd0aa64da6200e7c89502cb457b44c48dbd4a4525a9aa |
#!/usr/bin/env python
########################################################################
# $HeadURL$
""" File Catalog Client Command Line Interface. """
__RCSID__ = "$Id$"
import cmd
import sys
import pprint
import os
import atexit
import readline
import datetime
from DIRAC.Core.Utilities.ColorCLI import colorize
from DIRAC.FrameworkSystem.Client.SystemAdministratorClient import SystemAdministratorClient
from DIRAC.FrameworkSystem.Client.SystemAdministratorIntegrator import SystemAdministratorIntegrator
from DIRAC.FrameworkSystem.Client.ComponentMonitoringClient import ComponentMonitoringClient
from DIRAC.FrameworkSystem.Utilities import MonitoringUtilities
import DIRAC.Core.Utilities.InstallTools as InstallTools
from DIRAC.ConfigurationSystem.Client.Helpers import getCSExtensions
from DIRAC.Core.Utilities import List
from DIRAC.Core.Utilities.PromptUser import promptUser
from DIRAC import gConfig
from DIRAC import gLogger
from DIRAC.Core.Utilities.PrettyPrint import printTable
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
class SystemAdministratorClientCLI( cmd.Cmd ):
"""
"""
def __errMsg( self, errMsg ):
gLogger.error( "%s %s" % ( colorize( "[ERROR]", "red" ), errMsg ) )
def __init__( self, host = None ):
cmd.Cmd.__init__( self )
# Check if Port is given
self.host = None
self.port = None
self.prompt = '[%s]> ' % colorize( "no host", "yellow" )
if host:
self.__setHost( host )
self.cwd = ''
self.previous_cwd = ''
self.homeDir = ''
self.runitComponents = [ "service", "agent", "executor" ]
# store history
histfilename = os.path.basename(sys.argv[0])
historyFile = os.path.expanduser( "~/.dirac/%s.history" % histfilename[0:-3])
if not os.path.exists( os.path.dirname(historyFile) ):
os.makedirs( os.path.dirname(historyFile) )
if os.path.isfile( historyFile ):
readline.read_history_file( historyFile )
readline.set_history_length(1000)
atexit.register( readline.write_history_file, historyFile )
def __setHost( self, host ):
hostList = host.split( ':' )
self.host = hostList[0]
if len( hostList ) == 2:
self.port = hostList[1]
else:
self.port = None
gLogger.notice( "Pinging %s..." % self.host )
result = self.__getClient().ping()
if result[ 'OK' ]:
colorHost = colorize( host, "green" )
else:
self.__errMsg( "Could not connect to %s: %s" % ( self.host, result[ 'Message' ] ) )
colorHost = colorize( host, "red" )
self.prompt = '[%s]> ' % colorHost
def __getClient( self ):
return SystemAdministratorClient( self.host, self.port )
def do_set( self, args ):
"""
Set options
usage:
set host <hostname> - Set the hostname to work with
set project <project> - Set the project to install/upgrade in the host
"""
if not args:
gLogger.notice( self.do_set.__doc__ )
return
cmds = { 'host' : ( 1, self.__do_set_host ),
'project' : ( 1, self.__do_set_project ) }
args = List.fromChar( args, " " )
for cmd in cmds:
if cmd == args[0]:
if len( args ) != 1 + cmds[ cmd ][0]:
self.__errMsg( "Missing arguments" )
gLogger.notice( self.do_set.__doc__ )
return
return cmds[ cmd ][1]( args[1:] )
self.__errMsg( "Invalid command" )
gLogger.notice( self.do_set.__doc__ )
return
def __do_set_host( self, args ):
host = args[0]
if host.find( '.' ) == -1 and host != "localhost":
self.__errMsg( "Provide the full host name including its domain" )
return
self.__setHost( host )
def __do_set_project( self, args ):
project = args[0]
result = self.__getClient().setProject( project )
if not result[ 'OK' ]:
self.__errMsg( "Cannot set project: %s" % result[ 'Message' ] )
else:
gLogger.notice( "Project set to %s" % project )
def do_show( self, args ):
"""
Show list of components with various related information
usage:
show software - show components for which software is available
show installed - show components installed in the host with runit system
show setup - show components set up for automatic running in the host
show project - show project to install or upgrade
show status - show status of the installed components
show database - show status of the databases
show mysql - show status of the MySQL server
show log <system> <service|agent> [nlines]
- show last <nlines> lines in the component log file
show info - show version of software and setup
show doc <type> <system> <name>
- show documentation for a given service or agent
show host - show host related parameters
show hosts - show all available hosts
show installations [ list | current | -n <Name> | -h <Host> | -s <System> | -m <Module> | -t <Type> | -itb <InstallationTime before>
| -ita <InstallationTime after> | -utb <UnInstallationTime before> | -uta <UnInstallationTime after> ]*
- show all the installations of components that match the given parameters
show errors [*|<system> <service|agent>]
- show error count for the given component or all the components
in the last hour and day
"""
argss = args.split()
if not argss:
gLogger.notice( self.do_show.__doc__ )
return
option = argss[0]
del argss[0]
if option == 'software':
client = SystemAdministratorClient( self.host, self.port )
result = client.getSoftwareComponents()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( '' )
pprint.pprint( result['Value'] )
elif option == 'installed':
client = SystemAdministratorClient( self.host, self.port )
result = client.getInstalledComponents()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( '' )
pprint.pprint( result['Value'] )
elif option == 'setup':
client = SystemAdministratorClient( self.host, self.port )
result = client.getSetupComponents()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( '' )
pprint.pprint( result['Value'] )
elif option == 'project':
result = SystemAdministratorClient( self.host, self.port ).getProject()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( "Current project is %s" % result[ 'Value' ] )
elif option == 'status':
client = SystemAdministratorClient( self.host, self.port )
result = client.getOverallStatus()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
fields = ["System",'Name','Module','Type','Setup','Installed','Runit','Uptime','PID']
records = []
rDict = result['Value']
for compType in rDict:
for system in rDict[compType]:
components = rDict[compType][system].keys()
components.sort()
for component in components:
record = []
if rDict[compType][system][component]['Installed']:
module = str( rDict[compType][system][component]['Module'] )
record += [ system,component,module,compType.lower()[:-1]]
if rDict[compType][system][component]['Setup']:
record += ['Setup']
else:
record += ['NotSetup']
if rDict[compType][system][component]['Installed']:
record += ['Installed']
else:
record += ['NotInstalled']
record += [str( rDict[compType][system][component]['RunitStatus'] )]
record += [str( rDict[compType][system][component]['Timeup'] )]
record += [str( rDict[compType][system][component]['PID'] )]
records.append(record)
printTable(fields,records)
elif option == 'database' or option == 'databases':
client = SystemAdministratorClient( self.host, self.port )
if not InstallTools.mysqlPassword:
InstallTools.mysqlPassword = "LocalConfig"
InstallTools.getMySQLPasswords()
result = client.getDatabases( InstallTools.mysqlRootPwd )
if not result['OK']:
self.__errMsg( result['Message'] )
return
resultSW = client.getAvailableDatabases()
if not resultSW['OK']:
self.__errMsg( resultSW['Message'] )
return
sw = resultSW['Value']
installed = result['Value']
gLogger.notice( '' )
for db in sw:
if db in installed:
gLogger.notice( db.rjust( 25 ), ': Installed' )
else:
gLogger.notice( db.rjust( 25 ), ': Not installed' )
if not sw:
gLogger.notice( "No database found" )
elif option == 'mysql':
client = SystemAdministratorClient( self.host, self.port )
result = client.getMySQLStatus()
if not result['OK']:
self.__errMsg( result['Message'] )
elif result['Value']:
gLogger.notice( '' )
for par, value in result['Value'].items():
gLogger.notice( ( par.rjust( 28 ), ':', value ) )
else:
gLogger.notice( "No MySQL database found" )
elif option == "log":
self.getLog( argss )
elif option == "info":
client = SystemAdministratorClient( self.host, self.port )
result = client.getInfo()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( '' )
gLogger.notice( "Setup:", result['Value']['Setup'] )
gLogger.notice( "DIRAC version:", result['Value']['DIRAC'] )
if result['Value']['Extensions']:
for e, v in result['Value']['Extensions'].items():
gLogger.notice( "%s version" % e, v )
gLogger.notice( '' )
elif option == "host":
client = SystemAdministratorClient( self.host, self.port )
result = client.getHostInfo()
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( '' )
gLogger.notice( "Host info:" )
gLogger.notice( '' )
fields = ['Parameter','Value']
records = []
for key, value in result['Value'].items():
records.append( [key, str( value ) ] )
printTable( fields, records )
elif option == "hosts":
client = ComponentMonitoringClient()
result = client.getHosts( {}, False, False )
if not result[ 'OK' ]:
self.__errMsg( 'Error retrieving the list of hosts: %s' % ( result[ 'Message' ] ) )
else:
hostList = result[ 'Value' ]
gLogger.notice( '' )
gLogger.notice( ' ' + 'Host'.center( 32 ) + ' ' + 'CPU'.center( 34 ) + ' ' )
gLogger.notice( ( '-' * 69 ) )
for element in hostList:
gLogger.notice( '|' + element[ 'HostName' ].center( 32 ) + '|' + element[ 'CPU' ].center( 34 ) + '|' )
gLogger.notice( ( '-' * 69 ) )
gLogger.notice( '' )
elif option == "errors":
self.getErrors( argss )
elif option == "installations":
self.getInstallations( argss )
elif option == "doc":
if len( argss ) > 2:
if argss[0] in [ 'service', 'agent' ]:
compType = argss[0]
compSystem = argss[1]
compModule = argss[2]
client = SystemAdministratorClient( self.host, self.port )
result = client.getComponentDocumentation( compType, compSystem, compModule )
if result[ 'OK' ]:
gLogger.notice( result[ 'Value' ] )
else:
self.__errMsg( result[ 'Message' ] )
else:
gLogger.notice( self.do_show.__doc__ )
else:
gLogger.notice( self.do_show.__doc__ )
else:
gLogger.notice( "Unknown option:", option )
def getErrors( self, argss ):
""" Get and gLogger.notice( out errors from the logs of specified components )
"""
component = ''
if len( argss ) < 1:
component = '*'
else:
system = argss[0]
if system == "*":
component = '*'
else:
if len( argss ) < 2:
gLogger.notice( '' )
gLogger.notice( self.do_show.__doc__ )
return
comp = argss[1]
component = '/'.join( [system, comp] )
client = SystemAdministratorClient( self.host, self.port )
result = client.checkComponentLog( component )
if not result['OK']:
self.__errMsg( result['Message'] )
else:
fields = ['System', 'Component', 'Last hour', 'Last day', 'Last error']
records = []
for cname in result['Value']:
system, component = cname.split( '/' )
errors_1 = result['Value'][cname]['ErrorsHour']
errors_24 = result['Value'][cname]['ErrorsDay']
lastError = result['Value'][cname]['LastError']
lastError.strip()
if len( lastError ) > 80:
lastError = lastError[:80] + '...'
records.append( [system, component, str( errors_1 ), str( errors_24 ), lastError] )
records.sort()
printTable( fields, records )
def getInstallations( self, argss ):
""" Get data from the component monitoring database
"""
display = 'table'
installationFilter = {}
componentFilter = {}
hostFilter = {}
key = None
for arg in argss:
if not key:
if arg == 'list':
display = 'list'
elif arg == 'current':
installationFilter[ 'UnInstallationTime' ] = None
elif arg == '-t':
key = 'Component.Type'
elif arg == '-m':
key = 'Component.Module'
elif arg == '-s':
key = 'Component.System'
elif arg == '-h':
key = 'Host.HostName'
elif arg == '-n':
key = 'Instance'
elif arg == '-itb':
key = 'InstallationTime.smaller'
elif arg == '-ita':
key = 'InstallationTime.bigger'
elif arg == '-utb':
key = 'UnInstallationTime.smaller'
elif arg == '-uta':
key = 'UnInstallationTime.bigger'
else:
if 'Component.' in key:
componentFilter[ key.replace( 'Component.', '' ) ] = arg
elif 'Host.' in key:
hostFilter[ key.replace( 'Host.', '' ) ] = arg
else:
if 'Time.' in key:
arg = datetime.datetime.strptime( arg, '%d-%m-%Y' )
installationFilter[ key ] = arg
key = None
client = ComponentMonitoringClient()
result = client.getInstallations( installationFilter, componentFilter, hostFilter, True )
if not result[ 'OK' ]:
self.__errMsg( 'Could not retrieve the installations: %s' % ( result[ 'Message' ] ) )
installations = None
else:
installations = result[ 'Value' ]
if installations:
if display == 'table':
gLogger.notice( '' )
gLogger.notice( ' ' + 'Num'.center( 5 ) + ' ' \
+ 'Host'.center( 20 ) + ' ' \
+ 'Name'.center( 20 ) + ' ' \
+ 'Module'.center( 20 ) + ' ' \
+ 'System'.center( 16 ) + ' ' \
+ 'Type'.center( 12 ) + ' ' \
+ 'Installed on'.center( 18 ) + ' ' \
+ 'Install by'.center( 12 ) + ' ' \
+ 'Uninstalled on'.center( 18 ) + ' ' \
+ 'Uninstall by'.center( 12 ) )
gLogger.notice( ( '-' ) * 164 )
for i, installation in enumerate( installations ):
if not installation[ 'InstalledBy' ]:
installedBy = ''
else:
installedBy = installation[ 'InstalledBy' ]
if not installation[ 'UnInstalledBy' ]:
uninstalledBy = ''
else:
uninstalledBy = installation[ 'UnInstalledBy' ]
if installation[ 'UnInstallationTime' ]:
uninstalledOn = installation[ 'UnInstallationTime' ].strftime( "%d-%m-%Y %H:%M" )
isInstalled = 'No'
else:
uninstalledOn = ''
isInstalled = 'Yes'
if display == 'table':
gLogger.notice( '|' + str( i + 1 ).center( 5 ) + '|' \
+ installation[ 'Host' ][ 'HostName' ].center( 20 ) + '|' \
+ installation[ 'Instance' ].center( 20 ) + '|' \
+ installation[ 'Component' ][ 'Module' ].center( 20 ) + '|' \
+ installation[ 'Component' ][ 'System' ].center( 16 ) + '|' \
+ installation[ 'Component' ][ 'Type' ].center( 12 ) + '|' \
+ installation[ 'InstallationTime' ].strftime( "%d-%m-%Y %H:%M" ).center( 18 ) + '|' \
+ installedBy.center( 12 ) + '|' \
+ uninstalledOn.center( 18 ) + '|' \
+ uninstalledBy.center( 12 ) + '|' )
gLogger.notice( ( '-' ) * 164 )
elif display == 'list':
gLogger.notice( '' )
gLogger.notice( 'Installation: '.rjust( 20 ) + str ( i + 1 ) )
gLogger.notice( 'Installed: '.rjust( 20 ) + isInstalled )
gLogger.notice( 'Host: '.rjust( 20 ) + installation[ 'Host' ][ 'HostName' ] )
gLogger.notice( 'Name: '.rjust( 20 ) + installation[ 'Instance' ] )
gLogger.notice( 'Module: '.rjust( 20 ) + installation[ 'Component' ][ 'Module' ] )
gLogger.notice( 'System: '.rjust( 20 ) + installation[ 'Component' ][ 'System' ] )
gLogger.notice( 'Type: '.rjust( 20 ) + installation[ 'Component' ][ 'Type' ] )
gLogger.notice( 'Installed on: '.rjust( 20 ) + installation[ 'InstallationTime' ].strftime( "%d-%m-%Y %H:%M" ) )
if installedBy != '':
gLogger.notice( 'Installed by: '.rjust( 20 ) + installedBy )
if uninstalledOn != '':
gLogger.notice( 'Uninstalled on: '.rjust( 20 ) + uninstalledOn )
gLogger.notice( 'Uninstalled by: '.rjust( 20 ) + uninstalledBy )
else:
self.__errMsg( 'No display mode was selected' )
gLogger.notice( '' )
def getLog( self, argss ):
""" Get the tail of the log file of the given component
"""
if len( argss ) < 2:
gLogger.notice( '' )
gLogger.notice( self.do_show.__doc__ )
return
system = argss[0]
component = argss[1]
nLines = 40
if len( argss ) > 2:
nLines = int( argss[2] )
client = SystemAdministratorClient( self.host, self.port )
result = client.getLogTail( system, component, nLines )
if not result['OK']:
self.__errMsg( result['Message'] )
elif result['Value']:
for line in result['Value']['_'.join( [system, component] )].split( '\n' ):
gLogger.notice( ' ', line )
else:
gLogger.notice( "No logs found" )
def do_install( self, args ):
"""
Install various DIRAC components
usage:
install mysql
install db <database>
install service <system> <service> [-m <ModuleName>] [-p <Option>=<Value>] [-p <Option>=<Value>] ...
install agent <system> <agent> [-m <ModuleName>] [-p <Option>=<Value>] [-p <Option>=<Value>] ...
install executor <system> <executor> [-m <ModuleName>] [-p <Option>=<Value>] [-p <Option>=<Value>] ...
"""
argss = args.split()
if not argss:
gLogger.notice( self.do_install.__doc__ )
return
# Retrieve user installing the component
result = getProxyInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message'] )
user = result[ 'Value' ][ 'username' ]
option = argss[0]
del argss[0]
if option == "mysql":
gLogger.notice( "Installing MySQL database, this can take a while ..." )
client = SystemAdministratorClient( self.host, self.port )
if InstallTools.mysqlPassword == 'LocalConfig':
InstallTools.mysqlPassword = ''
InstallTools.getMySQLPasswords()
result = client.installMySQL( InstallTools.mysqlRootPwd, InstallTools.mysqlPassword )
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( "MySQL:", result['Value'] )
gLogger.notice( "You might need to restart SystemAdministrator service to take new settings into account" )
elif option == "db":
if not argss:
gLogger.notice( self.do_install.__doc__ )
return
database = argss[0]
client = SystemAdministratorClient( self.host, self.port )
result = client.getAvailableDatabases()
if not result['OK']:
self.__errMsg( "Can not get database list: %s" % result['Message'] )
return
if not result['Value'].has_key( database ):
self.__errMsg( "Unknown database %s: " % database )
return
system = result['Value'][database]['System']
setup = gConfig.getValue( '/DIRAC/Setup', '' )
if not setup:
self.__errMsg( "Unknown current setup" )
return
instance = gConfig.getValue( '/DIRAC/Setups/%s/%s' % ( setup, system ), '' )
if not instance:
self.__errMsg( "No instance defined for system %s" % system )
self.__errMsg( "\tAdd new instance with 'add instance %s <instance_name>'" % system )
return
if not InstallTools.mysqlPassword:
InstallTools.mysqlPassword = 'LocalConfig'
InstallTools.getMySQLPasswords()
result = client.installDatabase( database, InstallTools.mysqlRootPwd )
if not result['OK']:
self.__errMsg( result['Message'] )
return
extension, system = result['Value']
result = client.getHostInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
else:
cpu = result[ 'Value' ][ 'CPUModel' ]
hostname = self.host
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
if database != 'InstalledComponentsDB':
result = MonitoringUtilities.monitorInstallation( 'DB', system.replace( 'System', '' ), database, cpu = cpu, hostname = hostname )
if not result['OK']:
self.__errMsg( result['Message'] )
return
# result = client.addDatabaseOptionsToCS( system, database )
InstallTools.mysqlHost = self.host
result = client.getInfo()
if not result['OK']:
self.__errMsg( result['Message'] )
hostSetup = result['Value']['Setup']
result = InstallTools.addDatabaseOptionsToCS( gConfig, system, database, hostSetup, overwrite = True )
if not result['OK']:
self.__errMsg( result['Message'] )
return
gLogger.notice( "Database %s from %s/%s installed successfully" % ( database, extension, system ) )
elif option in self.runitComponents:
if len( argss ) < 2:
gLogger.notice( self.do_install.__doc__ )
return
system = argss[0]
del argss[0]
component = argss[0]
del argss[0]
specialOptions = {}
module = ''
for i in range(len(argss)):
if argss[i] == "-m":
specialOptions['Module'] = argss[i+1]
module = argss[i+1]
if argss[i] == "-p":
opt,value = argss[i+1].split('=')
specialOptions[opt] = value
if module == component:
module = ''
client = SystemAdministratorClient( self.host, self.port )
# First need to update the CS
# result = client.addDefaultOptionsToCS( option, system, component )
InstallTools.host = self.host
result = client.getInfo()
if not result['OK']:
self.__errMsg( result['Message'] )
return
hostSetup = result['Value']['Setup']
# Install Module section if not yet there
if module:
result = InstallTools.addDefaultOptionsToCS( gConfig, option, system, module,
getCSExtensions(), hostSetup )
# Add component section with specific parameters only
result = InstallTools.addDefaultOptionsToCS( gConfig, option, system, component,
getCSExtensions(), hostSetup, specialOptions,
addDefaultOptions = False )
else:
# Install component section
result = InstallTools.addDefaultOptionsToCS( gConfig, option, system, component,
getCSExtensions(), hostSetup, specialOptions )
if not result['OK']:
self.__errMsg( result['Message'] )
return
# Then we can install and start the component
result = client.setupComponent( option, system, component, module )
if not result['OK']:
self.__errMsg( result['Message'] )
return
compType = result['Value']['ComponentType']
runit = result['Value']['RunitStatus']
gLogger.notice( "%s %s_%s is installed, runit status: %s" % ( compType, system, component, runit ) )
# And register it in the database
result = client.getHostInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
else:
cpu = result[ 'Value' ][ 'CPUModel' ]
hostname = self.host
if component == 'ComponentMonitoring':
result = MonitoringUtilities.monitorInstallation( 'DB', system, 'InstalledComponentsDB', cpu = cpu, hostname = hostname )
if not result['OK']:
self.__errMsg( 'Error registering installation into database: %s' % result[ 'Message' ] )
return
result = MonitoringUtilities.monitorInstallation( option, system, component, module, cpu = cpu, hostname = hostname )
if not result['OK']:
self.__errMsg( 'Error registering installation into database: %s' % result[ 'Message' ] )
return
else:
gLogger.notice( "Unknown option:", option )
def do_uninstall( self, args ):
"""
Uninstall DIRAC component
usage:
uninstall db <database>
uninstall <-f ForceLogUninstall> <system> <component>
"""
argss = args.split()
if not argss:
gLogger.notice( self.do_uninstall.__doc__ )
return
# Retrieve user uninstalling the component
result = getProxyInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message'] )
user = result[ 'Value' ][ 'username' ]
option = argss[0]
if option == 'db':
component = argss[1]
client = SystemAdministratorClient( self.host, self.port )
result = client.getHostInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
else:
cpu = result[ 'Value' ][ 'CPUModel' ]
hostname = self.host
result = client.getAvailableDatabases()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
system = result[ 'Value' ][ component ][ 'System' ]
result = MonitoringUtilities.monitorUninstallation( system , component, hostname = hostname, cpu = cpu )
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
result = client.uninstallDatabase( component )
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
else:
gLogger.notice( "Successfully uninstalled %s" % ( component ) )
else:
if option == '-f':
force = True
del argss[0]
else:
force = False
if len( argss ) != 2:
gLogger.notice( self.do_uninstall.__doc__ )
return
system, component = argss
client = SystemAdministratorClient( self.host, self.port )
monitoringClient = ComponentMonitoringClient()
result = monitoringClient.getInstallations( { 'Instance': component, 'UnInstallationTime': None },
{ 'System': system },
{ 'HostName': self.host }, True )
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
if len( result[ 'Value' ] ) < 1:
self.__errMsg( "Given component does not exist" )
return
if len( result[ 'Value' ] ) > 1:
self.__errMsg( "Too many components match" )
return
removeLogs = False
if force:
removeLogs = True
else:
if result[ 'Value' ][0][ 'Component' ][ 'Type' ] in self.runitComponents:
result = promptUser( 'Remove logs?', ['y', 'n'], 'n' )
if result[ 'OK' ]:
removeLogs = result[ 'Value' ] == 'y'
result = client.uninstallComponent( system, component, removeLogs )
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
else:
gLogger.notice( "Successfully uninstalled %s/%s" % ( system, component ) )
result = client.getHostInfo()
if not result[ 'OK' ]:
self.__errMsg( result[ 'Message' ] )
return
else:
cpu = result[ 'Value' ][ 'CPUModel' ]
hostname = self.host
result = MonitoringUtilities.monitorUninstallation( system, component, hostname = hostname, cpu = cpu )
if not result[ 'OK' ]:
return result
def do_start( self, args ):
""" Start services or agents or database server
usage:
start <system|*> <service|agent|*>
start mysql
"""
argss = args.split()
if len( argss ) < 2:
gLogger.notice( self.do_start.__doc__ )
return
option = argss[0]
del argss[0]
if option != 'mysql':
if len( argss ) < 1:
gLogger.notice( self.do_start.__doc__ )
return
system = option
if system != '*':
component = argss[0]
else:
component = '*'
client = SystemAdministratorClient( self.host, self.port )
result = client.startComponent( system, component )
if not result['OK']:
self.__errMsg( result['Message'] )
else:
if system != '*' and component != '*':
gLogger.notice( "\n%s_%s started successfully, runit status:\n" % ( system, component ) )
else:
gLogger.notice( "\nComponents started successfully, runit status:\n" )
for comp in result['Value']:
gLogger.notice( ( comp.rjust( 32 ), ':', result['Value'][comp]['RunitStatus'] ) )
else:
gLogger.notice( "Not yet implemented" )
def do_restart( self, args ):
""" Restart services or agents or database server
usage:
restart <system|*> <service|agent|*>
restart mysql
"""
if not args:
gLogger.notice( self.do_restart.__doc__ )
return
argss = args.split()
option = argss[0]
del argss[0]
if option != 'mysql':
if option != "*":
if len( argss ) < 1:
gLogger.notice( self.do_restart.__doc__ )
return
system = option
if system != '*':
component = argss[0]
else:
component = '*'
client = SystemAdministratorClient( self.host, self.port )
result = client.restartComponent( system, component )
if not result['OK']:
if system == '*':
gLogger.notice( "All systems are restarted, connection to SystemAdministrator is lost" )
else:
self.__errMsg( result['Message'] )
else:
if system != '*' and component != '*':
gLogger.notice( "\n%s_%s started successfully, runit status:\n" % ( system, component ) )
else:
gLogger.notice( "\nComponents started successfully, runit status:\n" )
for comp in result['Value']:
gLogger.notice( ( comp.rjust( 32 ), ':', result['Value'][comp]['RunitStatus'] ) )
else:
gLogger.notice( "Not yet implemented" )
def do_stop( self, args ):
""" Stop services or agents or database server
usage:
stop <system|*> <service|agent|*>
stop mysql
"""
if not args:
gLogger.notice( self.do_stop.__doc__ )
return
argss = args.split()
if argss[0] != 'mysql':
system = argss[0]
if system != '*':
component = argss[1]
else:
component = '*'
client = SystemAdministratorClient( self.host, self.port )
result = client.stopComponent( system, component )
if not result['OK']:
self.__errMsg( result['Message'] )
else:
if system != '*' and component != '*':
gLogger.notice( "\n%s_%s stopped successfully, runit status:\n" % ( system, component ) )
else:
gLogger.notice( "\nComponents stopped successfully, runit status:\n" )
for comp in result['Value']:
gLogger.notice( ( comp.rjust( 32 ), ':', result['Value'][comp]['RunitStatus'] ) )
else:
gLogger.notice( "Not yet implemented" )
def do_update( self, args ):
""" Update the software on the target host to a given version
usage:
update <version> [ -r <rootPath> ] [ -g <lcgVersion> ]
where rootPath - path to the DIRAC installation
lcgVersion - version of the LCG bindings to install
"""
try:
argss = args.split()
version = argss[0]
rootPath = ''
lcgVersion = ''
del argss[0]
while len( argss ) > 0:
if argss[0] == '-r':
rootPath = argss[1]
del argss[0]
del argss[0]
elif argss[0] == '-g':
lcgVersion = argss[1]
del argss[0]
del argss[0]
except Exception, x:
gLogger.notice( "ERROR: wrong input:", str( x ) )
gLogger.notice( self.do_update.__doc__ )
return
client = SystemAdministratorClient( self.host, self.port )
gLogger.notice( "Software update can take a while, please wait ..." )
result = client.updateSoftware( version, rootPath, lcgVersion, timeout = 300 )
if not result['OK']:
self.__errMsg( "Failed to update the software" )
gLogger.notice( result['Message'] )
else:
gLogger.notice( "Software successfully updated." )
gLogger.notice( "You should restart the services to use the new software version." )
gLogger.notice( "Think of updating /Operations/<vo>/<setup>/Pilot/Versions section in the CS" )
def do_revert( self, args ):
""" Revert the last installed version of software to the previous one
usage:
revert
"""
client = SystemAdministratorClient( self.host, self.port )
result = client.revertSoftware()
if not result['OK']:
gLogger.notice( "Error:", result['Message'] )
else:
gLogger.notice( "Software reverted to", result['Value'] )
def do_add( self, args ):
"""
Add new entity to the Configuration Service
usage:
add system <system> <instance>
"""
if not args:
gLogger.notice( self.do_add.__doc__ )
return
argss = args.split()
option = argss[0]
del argss[0]
if option == "instance" or option == "system":
system = argss[0]
instance = argss[1]
client = SystemAdministratorClient( self.host, self.port )
result = client.getInfo()
if not result['OK']:
self.__errMsg( result['Message'] )
hostSetup = result['Value']['Setup']
instanceName = gConfig.getValue( '/DIRAC/Setups/%s/%s' % ( hostSetup, system ), '' )
if instanceName:
if instanceName == instance:
gLogger.notice( "System %s already has instance %s defined in %s Setup" % ( system, instance, hostSetup ) )
else:
self.__errMsg( "System %s already has instance %s defined in %s Setup" % ( system, instance, hostSetup ) )
return
result = InstallTools.addSystemInstance( system, instance, hostSetup )
if not result['OK']:
self.__errMsg( result['Message'] )
else:
gLogger.notice( "%s system instance %s added successfully" % ( system, instance ) )
else:
gLogger.notice( "Unknown option:", option )
def do_exec( self, args ):
""" Execute a shell command on the remote host and get back the output
usage:
exec <cmd> [<arguments>]
"""
client = SystemAdministratorClient( self.host, self.port )
command = 'cd %s;' % self.cwd + args
result = client.executeCommand( command )
if not result['OK']:
self.__errMsg( result['Message'] )
return
status, output, error = result['Value']
gLogger.notice( '' )
for line in output.split( '\n' ):
gLogger.notice( line )
if error:
self.__errMsg( status )
for line in error.split( '\n' ):
gLogger.notice( line )
def do_execfile( self, args ):
""" Execute a series of administrator CLI commands from a given file
usage:
execfile <filename>
"""
if not args:
gLogger.notice( self.do_execfile.__doc__ )
return
argss = args.split()
fname = argss[0]
execfile = open( fname, 'r' )
lines = execfile.readlines()
execfile.close()
for line in lines:
if line.find( '#' ) != -1 :
line = line[:line.find( '#' )]
line = line.strip()
if not line:
continue
gLogger.notice( "\n--> Executing %s\n" % line )
elements = line.split()
command = elements[0]
args = ' '.join( elements[1:] )
eval( "self.do_%s(args)" % command )
def do_cd( self, args ):
""" Change the current working directory on the target host
Usage:
cd <dirpath>
"""
argss = args.split()
if len( argss ) == 0:
# Return to $HOME
if self.homeDir:
self.previous_cwd = self.cwd
self.cwd = self.homeDir
else:
client = SystemAdministratorClient( self.host, self.port )
command = 'echo $HOME'
result = client.executeCommand( command )
if not result['OK']:
self.__errMsg( result['Message'] )
return
status, output, _error = result['Value']
if not status and output:
self.homeDir = output.strip()
self.previous_cwd = self.cwd
self.cwd = self.homeDir
self.prompt = '[%s:%s]> ' % ( self.host, self.cwd )
return
newPath = argss[0]
if newPath == '-':
if self.previous_cwd:
cwd = self.cwd
self.cwd = self.previous_cwd
self.previous_cwd = cwd
elif newPath.startswith( '/' ):
self.previous_cwd = self.cwd
self.cwd = newPath
else:
newPath = self.cwd + '/' + newPath
self.previous_cwd = self.cwd
self.cwd = os.path.normpath( newPath )
self.prompt = '[%s:%s]> ' % ( self.host, self.cwd )
def do_showall( self, args ):
""" Show status of all the components in all the hosts
Usage:
showall [-snmth] [-ASE] [-N name] [-H host] - show status of components
Options:
-d extra debug printout
Sorting options:
-s system
-n component name
-m component module
-t component type
-h component host
Selection options:
-A select agents
-S select services
-E select executors
-N <component pattern> select component with the name containing the pattern
-H <host name> select the given host
-T <setup name> select the given setup
"""
argss = args.split()
sortOption = ''
componentType = ''
componentName = ''
hostName = ''
setupName = ''
debug = False
while len( argss ) > 0:
option = argss[0]
del argss[0]
sortOption = ''
if option == '-s':
sortOption = "System"
elif option == '-n':
sortOption = "Name"
elif option == '-m':
sortOption = "Module"
elif option == '-t':
sortOption = "Type"
elif option == '-h':
sortOption = "Host"
elif option == "-A":
componentType = 'Agents'
elif option == "-S":
componentType = 'Services'
elif option == "-E":
componentType = 'Executors'
elif option == "-d":
debug = True
elif option == "-N":
componentName = argss[0]
del argss[0]
elif option == "-H":
hostName = argss[0]
del argss[0]
elif option == "-T":
setupName = argss[0]
del argss[0]
else:
self.__errMsg( 'Invalid option %s' % option )
return
client = SystemAdministratorIntegrator()
silentHosts = client.getSilentHosts()
respondingHosts = client.getRespondingHosts()
resultAll = client.getOverallStatus()
resultInfo = client.getInfo()
if not resultAll['OK']:
self.__errMsg( resultAll['Message'] )
else:
fields = ["System",'Name','Module','Type','Setup','Host','Runit','Uptime']
records = []
for host in resultAll['Value']:
if hostName and not hostName in host:
continue
result = resultAll['Value'][host]
if not result['OK']:
if debug:
self.__errMsg( "Host %s: %s" % (host,result['Message']) )
continue
rDict = result['Value']
for compType in rDict:
if componentType and componentType != compType:
continue
for system in rDict[compType]:
components = rDict[compType][system].keys()
components.sort()
for component in components:
if componentName and not componentName in component:
continue
record = []
if rDict[compType][system][component]['Installed']:
module = str( rDict[compType][system][component]['Module'] )
record += [ system,component,module,compType.lower()[:-1]]
if resultInfo['OK'] and host in resultInfo['Value'] and resultInfo['Value'][host]['OK']:
setup = resultInfo['Value'][host]['Value']['Setup']
else:
setup = 'Unknown'
if setupName and not setupName in setup:
continue
record += [setup]
record += [host]
record += [str( rDict[compType][system][component]['RunitStatus'] )]
record += [str( rDict[compType][system][component]['Timeup'] )]
records.append(record)
printTable( fields, records, sortOption )
if silentHosts:
print "\n %d out of %d hosts did not respond" % ( len( silentHosts ), len( respondingHosts ) )
def default( self, args ):
argss = args.split()
command = argss[0]
if command in ['ls','cat','pwd','chown','chmod','chgrp',
'id','date','uname','cp','mv','scp']:
self.do_exec( args )
def do_exit( self, args ):
""" Exit the shell.
usage: exit
"""
gLogger.notice( '' )
sys.exit( 0 )
def do_quit( self, args ):
""" Exit the shell.
usage: quit
"""
gLogger.notice( '' )
sys.exit( 0 )
def emptyline( self ):
pass
| vmendez/DIRAC | FrameworkSystem/Client/SystemAdministratorClientCLI.py | Python | gpl-3.0 | 43,605 | [
"ASE",
"DIRAC"
] | 189b90bf20c823e1c587680869842ac9390d709742781921517937884127df6d |
#
# Regression test driver for cmd-line tools
#
# Usage: test_cmdline_tool.py [<options>] <tool> <arguments>
#
# If the -g option is given or the TEST_GENERATE environment variable is set to 1,
# *-expected.<suffix> files will be generated instead of running the tests.
#
# Any generated output is written to the file `basename <argument`-actual.<suffix>
# Any warning or errors are written to stderr.
#
# The test is run with OPENSCAD_FONT_PATH set to the testdata/ttf directory. This
# should ensure we fetch the fonts from there even if they are also installed
# on the system. (E.g. the C glyph is actually different from Debian/Jessie
# installation and what we ship as Liberation-2.00.1).
#
# Returns 0 on passed test
# 1 on error
# 2 on invalid cmd-line options
#
# Author: Marius Kintel <marius@kintel.net>
#
import sys
import os
import glob
import subprocess
import re
import getopt
import shutil
import platform
import string
import difflib
#_debug_tcct = True
_debug_tcct = False
def debug(*args):
global _debug_tcct
if _debug_tcct:
print 'test_cmdline_tool:',
for a in args: print a,
print
def initialize_environment():
if not options.generate: options.generate = bool(os.getenv("TEST_GENERATE"))
return True
def init_expected_filename():
global expecteddir, expectedfilename # fixme - globals are hard to use
expected_testname = options.testname
if hasattr(options, "expecteddir"):
expected_dirname = options.expecteddir
else:
expected_dirname = expected_testname
expecteddir = os.path.join(options.regressiondir, expected_dirname)
expectedfilename = os.path.join(expecteddir, options.filename + "-expected." + options.suffix)
expectedfilename = os.path.normpath(expectedfilename)
def init_actual_filename():
global actualdir, actualfilename # fixme - globals are hard to use
cmdname = os.path.split(options.cmd)[1]
actualdir = os.path.join(os.getcwd(), options.testname + "-output")
actualfilename = os.path.join(actualdir, options.filename + "-actual." + options.suffix)
actualfilename = os.path.normpath(actualfilename)
def verify_test(testname, cmd):
global expectedfilename, actualfilename
if not options.generate:
if not os.path.isfile(expectedfilename):
print >> sys.stderr, "Error: test '%s' is missing expected output in %s" % (testname, expectedfilename)
# next 2 imgs parsed by test_pretty_print.py
print >> sys.stderr, ' actual image: ' + actualfilename + '\n'
print >> sys.stderr, ' expected image: ' + expectedfilename + '\n'
return False
return True
def execute_and_redirect(cmd, params, outfile):
retval = -1
try:
proc = subprocess.Popen([cmd] + params, stdout=outfile, stderr=subprocess.STDOUT)
out = proc.communicate()[0]
retval = proc.wait()
except:
print >> sys.stderr, "Error running subprocess: ", sys.exc_info()[1]
print >> sys.stderr, " cmd:", cmd
print >> sys.stderr, " params:", params
print >> sys.stderr, " outfile:", outfile
if outfile == subprocess.PIPE: return (retval, out)
else: return retval
def normalize_string(s):
"""Apply all modifications to an output string which would have been
applied if OPENSCAD_TESTING was defined at build time of the executable.
This truncates all floats, removes ', timestamp = ...' parts. The function
is idempotent.
This also normalizes away import paths from 'file = ' arguments."""
s = re.sub(', timestamp = [0-9]+', '', s)
def floatrep(match):
value = float(match.groups()[0])
if abs(value) < 10**-12:
return "0"
if abs(value) >= 10**6:
return "%d"%value
return "%.6g"%value
s = re.sub('(-?[0-9]+\\.[0-9]+(e[+-][0-9]+)?)', floatrep, s)
def pathrep(match):
return match.groups()[0] + match.groups()[2]
s = re.sub('(file = ")([^"/]*/)*([^"]*")', pathrep, s)
return s
def get_normalized_text(filename):
try:
f = open(filename)
text = f.read()
except:
text = ''
text = normalize_string(text)
return text.strip("\r\n").replace("\r\n", "\n") + "\n"
def compare_text(expected, actual):
return get_normalized_text(expected) == get_normalized_text(actual)
def compare_default(resultfilename):
print >> sys.stderr, 'text comparison: '
print >> sys.stderr, ' expected textfile: ', expectedfilename
print >> sys.stderr, ' actual textfile: ', resultfilename
expected_text = get_normalized_text(expectedfilename)
actual_text = get_normalized_text(resultfilename)
if not expected_text == actual_text:
if resultfilename:
differences = difflib.unified_diff(
[line.strip() for line in expected_text.splitlines()],
[line.strip() for line in actual_text.splitlines()])
line = None
for line in differences: sys.stderr.write(line + '\n')
if not line: return True
return False
return True
def compare_png(resultfilename):
compare_method = 'pixel'
#args = [expectedfilename, resultfilename, "-alpha", "Off", "-compose", "difference", "-composite", "-threshold", "10%", "-blur", "2", "-threshold", "30%", "-format", "%[fx:w*h*mean]", "info:"]
args = [expectedfilename, resultfilename, "-alpha", "Off", "-compose", "difference", "-composite", "-threshold", "10%", "-morphology", "Erode", "Square", "-format", "%[fx:w*h*mean]", "info:"]
# for systems with older imagemagick that doesnt support '-morphology'
# http://www.imagemagick.org/Usage/morphology/#alturnative
if options.comparator == 'old':
args = [expectedfilename, resultfilename, "-alpha", "Off", "-compose", "difference", "-composite", "-threshold", "10%", "-gaussian-blur","3x65535", "-threshold", "99.99%", "-format", "%[fx:w*h*mean]", "info:"]
if options.comparator == 'ncc':
# for systems where imagemagick crashes when using the above comparators
args = [expectedfilename, resultfilename, "-alpha", "Off", "-compose", "difference", "-metric", "NCC", "tmp.png"]
options.comparison_exec = 'compare'
compare_method = 'NCC'
if options.comparator == 'diffpng':
# alternative to imagemagick based on Yee's algorithm
# Writing the 'difference image' with --output is very useful for debugging but takes a long time
# args = [expectedfilename, resultfilename, "--output", resultfilename+'.diff.png']
args = [expectedfilename, resultfilename]
compare_method = 'diffpng'
print >> sys.stderr, 'Image comparison cmdline: '
print >> sys.stderr, '["'+str(options.comparison_exec) + '"],' + str(args)
# these two lines are parsed by the test_pretty_print.py
print >> sys.stderr, ' actual image: ' + resultfilename + '\n'
print >> sys.stderr, ' expected image: ' + expectedfilename + '\n'
if not resultfilename:
print >> sys.stderr, "Error: Error during test image generation"
return False
(retval, output) = execute_and_redirect(options.comparison_exec, args, subprocess.PIPE)
print "Image comparison return:", retval, "output:", output
if retval == 0:
if compare_method=='pixel':
pixelerr = int(float(output.strip()))
if pixelerr < 32: return True
else: print >> sys.stderr, pixelerr, ' pixel errors'
elif compare_method=='NCC':
thresh = 0.95
ncc_err = float(output.strip())
if ncc_err > thresh or ncc_err==0.0: return True
else: print >> sys.stderr, ncc_err, ' Images differ: NCC comparison < ', thresh
elif compare_method=='diffpng':
if 'MATCHES:' in output: return True
if 'DIFFERS:' in output: return False
return False
def compare_with_expected(resultfilename):
if not options.generate:
if "compare_" + options.suffix in globals(): return globals()["compare_" + options.suffix](resultfilename)
else: return compare_default(resultfilename)
return True
def run_test(testname, cmd, args):
cmdname = os.path.split(options.cmd)[1]
if options.generate:
if not os.path.exists(expecteddir):
try:
os.makedirs(expecteddir)
except OSError as e:
if e.errno != 17: raise e # catch File Exists to allow parallel runs
outputname = expectedfilename
else:
if not os.path.exists(actualdir):
try:
os.makedirs(actualdir)
except OSError as e:
if e.errno != 17: raise e # catch File Exists to allow parallel runs
outputname = actualfilename
outputname = os.path.normpath(outputname)
outfile = open(outputname, "wb")
try:
cmdline = [cmd] + args + [outputname]
print 'run_test() cmdline:',cmdline
fontdir = os.path.join(os.path.dirname(cmd), "..", "testdata")
fontenv = os.environ.copy()
fontenv["OPENSCAD_FONT_PATH"] = fontdir
print 'using font directory:', fontdir
sys.stdout.flush()
proc = subprocess.Popen(cmdline, env = fontenv, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
comresult = proc.communicate()
stdouttext, errtext = comresult[0],comresult[1]
if errtext != None and len(errtext) > 0:
print >> sys.stderr, "stderr output: " + errtext
if stdouttext != None and len(stdouttext) > 0:
print >> sys.stderr, "stdout output: " + stdouttext
outfile.close()
if proc.returncode != 0:
print >> sys.stderr, "Error: %s failed with return code %d" % (cmdname, proc.returncode)
return outputname
except OSError, err:
print >> sys.stderr, "Error: %s \"%s\"" % (err.strerror, cmd)
return None
class Options:
def __init__(self):
self.__dict__['options'] = {}
def __setattr__(self, name, value):
self.options[name] = value
def __getattr__(self, name):
return self.options[name]
def usage():
print >> sys.stderr, "Usage: " + sys.argv[0] + " [<options>] <cmdline-tool> <argument>"
print >> sys.stderr, "Options:"
print >> sys.stderr, " -g, --generate Generate expected output for the given tests"
print >> sys.stderr, " -s, --suffix=<suffix> Write -expected and -actual files with the given suffix instead of .txt"
print >> sys.stderr, " -e, --expected-dir=<dir> Use -expected files from the given dir (to share files between test drivers)"
print >> sys.stderr, " -t, --test=<name> Specify test name instead of deducting it from the argument (defaults to basename <exe>)"
print >> sys.stderr, " -f, --file=<name> Specify test file instead of deducting it from the argument (default to basename <first arg>)"
print >> sys.stderr, " -c, --convexec=<name> Path to ImageMagick 'convert' executable"
if __name__ == '__main__':
# Handle command-line arguments
try:
debug('args:'+str(sys.argv))
opts, args = getopt.getopt(sys.argv[1:], "gs:e:c:t:f:m", ["generate", "convexec=", "suffix=", "expected_dir=", "test=", "file=", "comparator="])
debug('getopt args:'+str(sys.argv))
except getopt.GetoptError, err:
usage()
sys.exit(2)
global options
options = Options()
options.regressiondir = os.path.join(os.path.split(sys.argv[0])[0], "regression")
options.generate = False
options.suffix = "txt"
options.comparator = ""
for o, a in opts:
if o in ("-g", "--generate"): options.generate = True
elif o in ("-s", "--suffix"):
if a[0] == '.': options.suffix = a[1:]
else: options.suffix = a
elif o in ("-e", "--expected-dir"):
options.expecteddir = a
elif o in ("-t", "--test"):
options.testname = a
elif o in ("-f", "--file"):
options.filename = a
elif o in ("-c", "--compare-exec"):
options.comparison_exec = os.path.normpath( a )
elif o in ("-m", "--comparator"):
options.comparator = a
# <cmdline-tool> and <argument>
if len(args) < 2:
usage()
sys.exit(2)
options.cmd = args[0]
# If only one test file, we can usually deduct the test name from the file
if len(args) == 2:
basename = os.path.splitext(args[1])[0]
path, options.filename = os.path.split(basename)
print >> sys.stderr, basename
print >> sys.stderr, path, options.filename
print >> sys.stderr, options.filename
if not hasattr(options, "filename"):
print >> sys.stderr, "Filename cannot be deducted from arguments. Specify test filename using the -f option"
sys.exit(2)
if not hasattr(options, "testname"):
options.testname = os.path.split(args[0])[1]
# Initialize and verify run-time environment
if not initialize_environment(): sys.exit(1)
init_expected_filename()
init_actual_filename()
# Verify test environment
verification = verify_test(options.testname, options.cmd)
resultfile = run_test(options.testname, options.cmd, args[1:])
if not resultfile: exit(1)
if not verification or not compare_with_expected(resultfile): exit(1)
| thdtjsdn/openscad | tests/test_cmdline_tool.py | Python | gpl-2.0 | 13,344 | [
"Gaussian"
] | e557f15626c40ca17f2d75b4b4c0775f914a0af413106cd6d9cdcd7dad101ef3 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Custom astroid checker for config calls."""
import sys
import pathlib
import yaml
import astroid
from pylint import interfaces, checkers
from pylint.checkers import utils
OPTIONS = None
FAILED_LOAD = False
class ConfigChecker(checkers.BaseChecker):
"""Custom astroid checker for config calls."""
__implements__ = interfaces.IAstroidChecker
name = 'config'
msgs = {
'E9998': ('%s is no valid config option.', # flake8: disable=S001
'bad-config-option',
None),
}
priority = -1
printed_warning = False
@utils.check_messages('bad-config-option')
def visit_attribute(self, node):
"""Visit a getattr node."""
# At the end of a config.val.foo.bar chain
if not isinstance(node.parent, astroid.Attribute):
# FIXME:conf do some proper check for this...
node_str = node.as_string()
prefix = 'config.val.'
if node_str.startswith(prefix):
self._check_config(node, node_str[len(prefix):])
def _check_config(self, node, name):
"""Check that we're accessing proper config options."""
if FAILED_LOAD:
if not ConfigChecker.printed_warning:
print("[WARN] Could not find configdata.yml. Please run "
"pylint from qutebrowser root.", file=sys.stderr)
print("Skipping some checks...", file=sys.stderr)
ConfigChecker.printed_warning = True
return
if name not in OPTIONS:
self.add_message('bad-config-option', node=node, args=name)
def register(linter):
"""Register this checker."""
linter.register_checker(ConfigChecker(linter))
global OPTIONS
global FAILED_LOAD
yaml_file = pathlib.Path('qutebrowser') / 'config' / 'configdata.yml'
if not yaml_file.exists():
OPTIONS = None
FAILED_LOAD = True
return
with yaml_file.open(mode='r', encoding='utf-8') as f:
OPTIONS = list(yaml.safe_load(f))
| t-wissmann/qutebrowser | scripts/dev/pylint_checkers/qute_pylint/config.py | Python | gpl-3.0 | 2,843 | [
"VisIt"
] | 64ce9fe511121df87aadade4c2a139e52df4f46c90f2bdaa56727bd2c08c909a |
# -*- coding: utf-8 -*-
from collections import defaultdict
import itertools
import math
import networkx as nx
import re
from typing import Any, DefaultDict, Dict, List, Union
from django.db import connection
from django.http import HttpRequest, JsonResponse
from django.shortcuts import get_object_or_404
from rest_framework.decorators import api_view
from catmaid import state
from catmaid.models import UserRole, Treenode, ClassInstance, \
TreenodeConnector, Location, SamplerInterval
from catmaid.control.authentication import requires_user_role, \
can_edit_class_instance_or_fail, can_edit_or_fail
from catmaid.control.common import (get_relation_to_id_map,
get_class_to_id_map, insert_into_log, _create_relation,
get_request_bool, get_request_list)
from catmaid.control.neuron import _delete_if_empty
from catmaid.control.node import _fetch_location, _fetch_locations
from catmaid.control.link import create_connector_link
from catmaid.util import Point3D, is_collinear
def can_edit_treenode_or_fail(user, project_id, treenode_id) -> bool:
""" Tests if a user has permissions to edit the neuron which the skeleton of
the treenode models. Will return true or throw an exception. Cannot return false. """
info = _treenode_info(project_id, treenode_id)
return can_edit_class_instance_or_fail(user, info['neuron_id'], 'neuron')
def can_edit_skeleton_or_fail(user, project_id, skeleton_id, model_of_relation_id) -> bool:
"""Test if a user has permission to edit a neuron modeled by a skeleton. Will return true
or throw an exception. Cannot return false."""
cursor = connection.cursor()
cursor.execute("""
SELECT
ci2.id as neuron_id
FROM
class_instance ci,
class_instance ci2,
class_instance_class_instance cici
WHERE ci.project_id = %s
AND ci.id = %s
AND ci.id = cici.class_instance_a
AND ci2.id = cici.class_instance_b
AND cici.relation_id = %s
""", (project_id, skeleton_id, model_of_relation_id))
if cursor.rowcount == 0:
raise ValueError('No neuron modeled by skeleton %s' % skeleton_id)
neuron_id = cursor.fetchone()[0]
return can_edit_class_instance_or_fail(user, neuron_id, 'neuron')
@requires_user_role(UserRole.Annotate)
def create_treenode(request:HttpRequest, project_id=None) -> JsonResponse:
"""
Add a new treenode to the database
----------------------------------
1. Add new treenode for a given skeleton id. Parent should not be empty.
return: new treenode id
If the parent's skeleton has a single node and belongs to the
'Isolated synaptic terminals' group, then reassign ownership
of the skeleton and the neuron to the user. The treenode remains
property of the original user who created it.
2. Add new treenode (root) and create a new skeleton (maybe for a given
neuron) return: new treenode id and skeleton id.
If a neuron id is given, use that one to create the skeleton as a model of
it.
"""
params = {}
float_values = {
'x': 0,
'y': 0,
'z': 0,
'radius': 0}
int_values = {
'confidence': 0,
'useneuron': -1,
'parent_id': -1}
string_values:Dict = {}
for p in float_values.keys():
params[p] = float(request.POST.get(p, float_values[p]))
for p in int_values.keys():
params[p] = int(request.POST.get(p, int_values[p]))
for p in string_values.keys():
params[p] = request.POST.get(p, string_values[p])
# Get optional initial links to connectors, expect each entry to be a list
# of connector ID, relation ID and confidence.
links = get_request_list(request.POST, 'links', [], map_fn=int)
# Make sure the back-end is in the expected state if the node should have a
# parent and will therefore become part of another skeleton.
parent_id = int(params['parent_id'])
has_parent = parent_id and parent_id != -1
if has_parent:
state.validate_state(parent_id, request.POST.get('state'),
parent_edittime=has_parent, lock=True)
new_treenode = _create_treenode(project_id, request.user, request.user,
params['x'], params['y'], params['z'], params['radius'],
params['confidence'], params['useneuron'], params['parent_id'],
neuron_name=request.POST.get('neuron_name', None))
# Create all initial links
if links:
created_links = create_connector_link(project_id, request.user.id,
new_treenode.treenode_id, new_treenode.skeleton_id, links)
else:
created_links = []
return JsonResponse({
'treenode_id': new_treenode.treenode_id,
'skeleton_id': new_treenode.skeleton_id,
'edition_time': new_treenode.edition_time,
'parent_edition_time': new_treenode.parent_edition_time,
'created_links': created_links
})
@requires_user_role(UserRole.Annotate)
def insert_treenode(request:HttpRequest, project_id=None) -> JsonResponse:
"""
Create a new treenode between two existing nodes. Its creator and
creation_date information will be set to information of child node. No node
will be created, if the node on the edge between the given child and parent
node.
"""
# Use creation time, if part of parameter set
params:Dict[str, float] = {}
float_values = {
'x': 0,
'y': 0,
'z': 0,
'radius': 0
}
int_values = {
'confidence': 0,
'parent_id': -1,
'child_id': -1
}
for p in float_values.keys():
params[p] = float(request.POST.get(p, float_values[p]))
for p in int_values.keys():
params[p] = int(request.POST.get(p, int_values[p]))
# If siblings should be taken over, all children of the parent node will be
# come children of the inserted node. This requires extra state
# information: the child state for the paren.
takeover_child_ids = get_request_list(request.POST,
'takeover_child_ids', None, int)
# Get optional initial links to connectors, expect each entry to be a list
# of connector ID and relation ID.
try:
links = get_request_list(request.POST, 'links', [], int)
except Exception as e:
raise ValueError(f"Couldn't parse list parameter: {e}")
# Make sure the back-end is in the expected state if the node should have a
# parent and will therefore become part of another skeleton.
parent_id = params.get('parent_id')
child_id = params.get('child_id')
if parent_id not in (-1, None):
s = request.POST.get('state')
# Testing egular edge insertion is assumed if a child ID is provided
partial_child_checks = [] if child_id in (-1, None) else [child_id]
if takeover_child_ids:
partial_child_checks.extend(takeover_child_ids)
state.validate_state(parent_id, s, node=True,
children=partial_child_checks or False, lock=True),
# Find child and parent of new treenode
child = Treenode.objects.get(pk=params['child_id'])
parent = Treenode.objects.get(pk=params['parent_id'])
# Make sure both nodes are actually child and parent
if not child.parent == parent:
raise ValueError('The provided nodes need to be child and parent')
# Make sure the requested location for the new node is on the edge between
# both existing nodes if the user has no edit permissions on the neuron.
try:
can_edit_treenode_or_fail(request.user, project_id, parent.id)
user, time = request.user, None
except:
child_loc = Point3D(child.location_x, child.location_y, child.location_z)
parent_loc = Point3D(parent.location_x, parent.location_y, parent.location_z)
new_node_loc = Point3D(params['x'], params['y'], params['z'])
if not is_collinear(child_loc, parent_loc, new_node_loc, True, 0.001):
raise ValueError('New node location has to be between child and parent')
# Use creator and creation time for neighboring node that was created last.
if child.creation_time < parent.creation_time:
user, time = parent.user, parent.creation_time
else:
user, time = child.user, child.creation_time
# Create new treenode
new_treenode = _create_treenode(project_id,
user, request.user, params['x'], params['y'], params['z'],
params['radius'], params['confidence'], -1, params['parent_id'], time)
# Update parent of child to new treenode, do this in raw SQL to also get the
# updated edition time Update also takeover children
cursor = connection.cursor()
paramlist = [new_treenode.treenode_id, child.id]
if takeover_child_ids:
paramlist.extend(takeover_child_ids)
child_template = ",".join(("%s",) * (len(takeover_child_ids) + 1))
else:
child_template = "%s"
cursor.execute(f"""
UPDATE treenode SET parent_id = %s
WHERE id IN ({child_template})
RETURNING id, edition_time
""", paramlist)
result = cursor.fetchall()
if not result or (len(paramlist) - 1) != len(result):
raise ValueError("Couldn't update parent of inserted node's child: " + child.id)
child_edition_times = [[k,v] for k,v in result]
# Create all initial links
if links:
created_links = create_connector_link(project_id, request.user.id,
new_treenode.treenode_id, new_treenode.skeleton_id, links)
else:
created_links = []
return JsonResponse({
'treenode_id': new_treenode.treenode_id,
'skeleton_id': new_treenode.skeleton_id,
'edition_time': new_treenode.edition_time,
'parent_edition_time': new_treenode.parent_edition_time,
'child_edition_times': child_edition_times,
'created_links': created_links
})
class NewTreenode(object):
"""Represent a newly created treenode and all the information that is
returned to the client
"""
def __init__(self, treenode_id, edition_time, skeleton_id,
parent_edition_time):
self.treenode_id = treenode_id
self.edition_time = edition_time
self.skeleton_id = skeleton_id
self.parent_edition_time = parent_edition_time
def _create_treenode(project_id, creator, editor, x, y, z, radius, confidence,
neuron_id, parent_id, creation_time=None, neuron_name=None) -> NewTreenode:
relation_map = get_relation_to_id_map(project_id)
class_map = get_class_to_id_map(project_id)
def insert_new_treenode(parent_id=None, skeleton_id=None):
""" If the parent_id is not None and the skeleton_id of the parent does
not match with the skeleton.id, then the database will throw an error
given that the skeleton_id, being defined as foreign key in the
treenode table, will not meet the being-foreign requirement.
"""
new_treenode = Treenode()
new_treenode.user = creator
new_treenode.editor = editor
new_treenode.project_id = project_id
if creation_time:
new_treenode.creation_time = creation_time
new_treenode.location_x = float(x)
new_treenode.location_y = float(y)
new_treenode.location_z = float(z)
new_radius = int(radius if (radius and not math.isnan(radius)) else 0)
new_treenode.radius = new_radius
new_treenode.skeleton_id = skeleton_id
new_confidence = int(confidence if not math.isnan(confidence) and (confidence or confidence == 0) else 5)
new_treenode.confidence = new_confidence
if parent_id:
new_treenode.parent_id = parent_id
new_treenode.save()
return new_treenode
def relate_neuron_to_skeleton(neuron, skeleton):
return _create_relation(creator, project_id,
relation_map['model_of'], skeleton, neuron)
response_on_error = ''
try:
if -1 != int(parent_id): # A root node and parent node exist
# Select the parent treenode for update to prevent race condition
# updates to its skeleton ID while this node is being created.
cursor = connection.cursor()
cursor.execute('''
SELECT t.skeleton_id, t.edition_time FROM treenode t
WHERE t.id = %s FOR NO KEY UPDATE OF t
''', (parent_id,))
if cursor.rowcount != 1:
raise ValueError('Parent treenode %s does not exist' % parent_id)
parent_node = cursor.fetchone()
parent_skeleton_id = parent_node[0]
parent_edition_time = parent_node[1]
# Raise an Exception if the user doesn't have permission to edit
# the neuron the skeleton of the treenode is modeling.
can_edit_skeleton_or_fail(editor, project_id, parent_skeleton_id,
relation_map['model_of'])
response_on_error = 'Could not insert new treenode!'
new_treenode = insert_new_treenode(parent_id, parent_skeleton_id)
return NewTreenode(new_treenode.id, new_treenode.edition_time,
parent_skeleton_id, parent_edition_time)
else:
# No parent node: We must create a new root node, which needs a
# skeleton and a neuron to belong to.
response_on_error = 'Could not insert new treenode instance!'
new_skeleton = ClassInstance()
new_skeleton.user = creator
new_skeleton.project_id = project_id
new_skeleton.class_column_id = class_map['skeleton']
new_skeleton.name = 'skeleton'
new_skeleton.save()
new_skeleton.name = 'skeleton %d' % new_skeleton.id
new_skeleton.save()
if -1 != neuron_id:
# Check that the neuron to use exists
if 0 == ClassInstance.objects.filter(pk=neuron_id).count():
neuron_id = -1
if -1 != neuron_id:
# Raise an Exception if the user doesn't have permission to
# edit the existing neuron.
can_edit_class_instance_or_fail(editor, neuron_id, 'neuron')
# A neuron already exists, so we use it
response_on_error = 'Could not relate the neuron model to ' \
'the new skeleton!'
relate_neuron_to_skeleton(neuron_id, new_skeleton.id)
response_on_error = 'Could not insert new treenode!'
new_treenode = insert_new_treenode(None, new_skeleton.id)
return NewTreenode(new_treenode.id, new_treenode.edition_time,
new_skeleton.id, None)
else:
# A neuron does not exist, therefore we put the new skeleton
# into a new neuron.
response_on_error = 'Failed to insert new instance of a neuron.'
new_neuron = ClassInstance()
new_neuron.user = creator
new_neuron.project_id = project_id
new_neuron.class_column_id = class_map['neuron']
if neuron_name:
# Create a regular expression to find allowed patterns. The
# first group is the whole {nX} part, while the second group
# is X only.
counting_pattern = re.compile(r"(\{n(\d+)\})")
# Look for patterns, replace all {n} with {n1} to normalize.
neuron_name = neuron_name.replace("{n}", "{n1}")
if counting_pattern.search(neuron_name):
# Find starting values for each substitution.
counts = [int(m.groups()[1]) for m in counting_pattern.finditer(neuron_name)]
# Find existing matching neurons in database.
name_match = counting_pattern.sub(r"(\d+)", neuron_name)
name_pattern = re.compile(name_match)
matching_neurons = ClassInstance.objects.filter(
project_id=project_id,
class_column_id=class_map['neuron'],
name__regex=name_match).order_by('name')
# Increment substitution values based on existing neurons.
for n in matching_neurons:
for i, (count, g) in enumerate(zip(counts, name_pattern.search(n.name).groups())): # type: ignore
if count == int(g):
counts[i] = count + 1
# Substitute values.
count_ind = 0
m = counting_pattern.search(neuron_name)
while m:
neuron_name = m.string[:m.start()] + str(counts[count_ind]) + m.string[m.end():]
count_ind = count_ind + 1
m = counting_pattern.search(neuron_name)
new_neuron.name = neuron_name
else:
new_neuron.name = 'neuron'
new_neuron.save()
new_neuron.name = 'neuron %d' % new_neuron.id
new_neuron.save()
response_on_error = 'Could not relate the neuron model to ' \
'the new skeleton!'
relate_neuron_to_skeleton(new_neuron.id, new_skeleton.id)
response_on_error = 'Failed to insert instance of treenode.'
new_treenode = insert_new_treenode(None, new_skeleton.id)
response_on_error = 'Failed to write to logs.'
new_location = (new_treenode.location_x, new_treenode.location_y,
new_treenode.location_z)
insert_into_log(project_id, creator.id, 'create_neuron',
new_location, 'Create neuron %d and skeleton '
'%d' % (new_neuron.id, new_skeleton.id))
return NewTreenode(new_treenode.id, new_treenode.edition_time,
new_skeleton.id, None)
except Exception as e:
import traceback
raise ValueError("%s: %s %s" % (response_on_error, str(e),
str(traceback.format_exc())))
@requires_user_role(UserRole.Annotate)
def update_parent(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
treenode_id = int(treenode_id)
parent_id = int(request.POST.get('parent_id', -1))
can_edit_treenode_or_fail(request.user, project_id, treenode_id)
# Make sure the back-end is in the expected state
state.validate_state(treenode_id, request.POST.get('state'),
neighborhood=True, lock=True)
child = get_object_or_404(Treenode, pk=treenode_id, project_id=project_id)
parent = get_object_or_404(Treenode, pk=parent_id, project_id=project_id)
if child.skeleton_id != parent.skeleton_id:
raise ValueError("Child node %s is in skeleton %s but parent node %s is in skeleton %s!", \
treenode_id, child.skeleton_id, parent_id, parent.skeleton_id)
child.parent_id = parent_id
child.save()
return JsonResponse({
'success': True,
'node_id': child.id,
'parent_id': child.parent_id,
'skeleton_id': child.skeleton_id
})
def update_node_radii(node_ids, radii, cursor=None) -> Dict:
"""Update radius of a list of nodes, returns old radii.
Both lists/tupples and single values can be supplied.
"""
# Make sure we deal with lists
type_nodes = type(node_ids)
if type_nodes not in (list, tuple):
node_ids = (node_ids,)
# If only one a single radius value is available, use it for every input
# node ID.
type_radii = type(radii)
if type_radii not in (list, tuple):
radii = len(node_ids) * (radii,)
if len(node_ids) != len(radii):
raise ValueError("Number of treenode doesn't match number of radii")
invalid_radii = [r for r in radii if math.isnan(r)]
if invalid_radii:
raise ValueError("Some radii where not numbers: " +
", ".join(invalid_radii))
# Make sure we have a database cursor
cursor = cursor or connection.cursor()
# Create a list of the form [(node id, radius), ...]
node_radii = "(" + "),(".join(map(lambda pair: f"{pair[0]},{pair[1]}",
zip(node_ids, radii))) + ")"
cursor.execute(f'''
UPDATE treenode t SET radius = target.new_radius
FROM (SELECT x.id, x.radius AS old_radius, y.new_radius
FROM treenode x
INNER JOIN (VALUES {node_radii}) y(id, new_radius)
ON x.id=y.id FOR NO KEY UPDATE) target
WHERE t.id = target.id
RETURNING t.id, target.old_radius, target.new_radius,
t.edition_time, t.skeleton_id;
''')
updated_rows = cursor.fetchall()
if len(node_ids) != len(updated_rows):
missing_ids = frozenset(node_ids) - frozenset([r[0] for r in updated_rows])
raise ValueError('Coudn\'t find treenodes ' +
','.join([str(ni) for ni in missing_ids]))
return {r[0]: {
'old': r[1],
'new': float(r[2]),
'edition_time': r[3],
'skeleton_id': r[4]
} for r in updated_rows}
@requires_user_role(UserRole.Annotate)
def update_radii(request:HttpRequest, project_id=None) -> JsonResponse:
"""Update the radius of one or more nodes"""
treenode_ids = [int(v) for k,v in request.POST.items() \
if k.startswith('treenode_ids[')]
radii = [float(v) for k,v in request.POST.items() \
if k.startswith('treenode_radii[')]
# Make sure the back-end is in the expected state
cursor = connection.cursor()
state.validate_state(treenode_ids, request.POST.get('state'),
multinode=True, lock=True, cursor=cursor)
updated_nodes = update_node_radii(treenode_ids, radii, cursor)
return JsonResponse({
'success': True,
'updated_nodes': updated_nodes
})
@requires_user_role(UserRole.Annotate)
def update_radius(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
treenode_id = int(treenode_id)
radius = float(request.POST.get('radius', -1))
if math.isnan(radius):
raise ValueError("Radius '%s' is not a number!" % request.POST.get('radius'))
option = int(request.POST.get('option', 0))
cursor = connection.cursor()
# Make sure the back-end is in the expected state
state.validate_state(treenode_id, request.POST.get('state'),
node=True, lock=True, cursor=cursor)
def create_update_response(updated_nodes, radius) -> JsonResponse:
return JsonResponse({
'success': True,
'updated_nodes': updated_nodes,
'new_radius': radius
})
if 0 == option:
# Update radius only for the passed in treenode and return the old
# radius.
old_radii = update_node_radii(treenode_id, radius, cursor)
return create_update_response(old_radii, radius)
cursor.execute('''
SELECT id, parent_id, radius
FROM treenode
WHERE skeleton_id = (SELECT t.skeleton_id FROM treenode t WHERE id = %s)
''' % treenode_id)
if 1 == option:
# Update radius from treenode_id to next branch or end node (included)
children:DefaultDict[Any, List] = defaultdict(list)
for row in cursor.fetchall():
children[row[1]].append(row[0])
include = [treenode_id]
c = children[treenode_id]
while 1 == len(c):
child = c[0]
include.append(child)
c = children[child]
old_radii = update_node_radii(include, radius, cursor)
return create_update_response(old_radii, radius)
if 2 == option:
# Update radius from treenode_id to prev branch node or root (excluded)
parents = {}
children = defaultdict(list)
for row in cursor.fetchall():
parents[row[0]] = row[1]
children[row[1]].append(row[0])
include = [treenode_id]
parent = parents[treenode_id]
while parent and parents[parent] and 1 == len(children[parent]):
include.append(parent)
parent = parents[parent]
old_radii = update_node_radii(include, radius, cursor)
return create_update_response(old_radii, radius)
if 3 == option:
# Update radius from treenode_id to prev node with radius (excluded)
parents = {}
for row in cursor.fetchall():
if row[2] < 0 or row[0] == treenode_id: # DB default radius is 0 but is initialized to -1 elsewhere
parents[row[0]] = row[1]
include = [treenode_id]
parent = parents[treenode_id]
while parent in parents:
include.append(parent)
parent = parents[parent]
old_radii = update_node_radii(include, radius, cursor)
return create_update_response(old_radii, radius)
if 4 == option:
# Update radius from treenode_id to root (included)
parents = {row[0]: row[1] for row in cursor.fetchall()}
include = [treenode_id]
parent = parents[treenode_id]
while parent:
include.append(parent)
parent = parents[parent]
old_radii = update_node_radii(include, radius, cursor)
return create_update_response(old_radii, radius)
if 5 == option:
# Update radius of all nodes (in a single query)
skeleton_id = Treenode.objects.get(pk=treenode_id).skeleton_id
include = list(Treenode.objects.filter(skeleton_id=skeleton_id) \
.values_list('id', flat=True))
old_radii = update_node_radii(include, radius, cursor)
return create_update_response(old_radii, radius)
@requires_user_role(UserRole.Annotate)
def delete_treenode(request:HttpRequest, project_id=None) -> JsonResponse:
""" Deletes a treenode. If the skeleton has a single node, deletes the
skeleton and its neuron. Returns the parent_id, if any."""
treenode_id = int(request.POST.get('treenode_id', -1))
# Raise an exception if the user doesn't have permission to edit the
# treenode.
can_edit_or_fail(request.user, treenode_id, 'treenode')
# Raise an Exception if the user doesn't have permission to edit the neuron
# the skeleton of the treenode is modeling.
can_edit_treenode_or_fail(request.user, project_id, treenode_id)
# Make sure the back-end is in the expected state
state.validate_state(treenode_id, request.POST.get('state'), lock=True,
neighborhood=True)
treenode = Treenode.objects.get(pk=treenode_id)
parent_id = treenode.parent_id
# Get information about linked connectors
links = list(TreenodeConnector.objects.filter(project_id=project_id,
treenode_id=treenode_id).values_list('id', 'relation_id',
'connector_id', 'confidence'))
# Prevent deletion if node is referenced from sampler or sampler domain. The
# deletion would fail regardless, but this way we can provide a nicer error
# message.
cursor = connection.cursor()
cursor.execute("""
SELECT
EXISTS(
SELECT 1 FROM catmaid_samplerinterval
WHERE project_id = %(project_id)s AND
(start_node_id = %(treenode_id)s OR end_node_id = %(treenode_id)s)),
EXISTS(
SELECT 1 FROM catmaid_samplerdomain
WHERE project_id = %(project_id)s AND
(start_node_id = %(treenode_id)s)),
EXISTS(
SELECT 1 FROM catmaid_samplerdomainend
WHERE end_node_id = %(treenode_id)s)
""", {
'project_id': project_id,
'treenode_id': treenode_id,
})
sampler_refs = cursor.fetchone()
has_sampler_interval_refs = sampler_refs[0]
has_sampler_domain_refs = sampler_refs[1] or sampler_refs[2]
if has_sampler_interval_refs:
raise ValueError("Can't delete node, it is used in at least one sampler interval")
if has_sampler_domain_refs:
raise ValueError("Can't delete node, it is used in at least one sampler domain")
response_on_error = ''
deleted_neuron = False
cursor = connection.cursor()
try:
if not parent_id:
children:List = []
# This treenode is root.
response_on_error = 'Could not retrieve children for ' \
'treenode #%s' % treenode_id
n_children = Treenode.objects.filter(parent=treenode).count()
response_on_error = "Could not delete root node"
if n_children > 0:
# TODO yes you can, the new root is the first of the children,
# and other children become independent skeletons
raise ValueError("You can't delete the root node when it "
"has children.")
# Get the neuron before the skeleton is deleted. It can't be
# accessed otherwise anymore.
neuron = ClassInstance.objects.get(project_id=project_id,
cici_via_b__relation__relation_name='model_of',
cici_via_b__class_instance_a=treenode.skeleton)
# Remove the original skeleton. It is OK to remove it if it only had
# one node, even if the skeleton's user does not match or the user
# is not superuser. Delete the skeleton, which triggers deleting
# the ClassInstanceClassInstance relationship with neuron_id
response_on_error = 'Could not delete skeleton.'
# Extra check for errors, like having two root nodes
count = Treenode.objects.filter(skeleton_id=treenode.skeleton_id) \
.count()
if 1 == count:
# deletes as well treenodes that refer to the skeleton
ClassInstance.objects.filter(pk=treenode.skeleton_id) \
.delete()
else:
raise ValueError("Can't delete isolated node: erroneously, " \
"its skeleton contains more than one treenode! " \
"Check for multiple root nodes.")
# If the neuron modeled by the skeleton of the treenode is empty,
# delete it.
response_on_error = 'Could not delete neuron #%s' % neuron.id
deleted_neuron = _delete_if_empty(neuron.id)
if deleted_neuron:
# Insert log entry for neuron deletion
insert_into_log(project_id, request.user.id, 'remove_neuron',
(treenode.location_x, treenode.location_y, treenode.location_z),
'Deleted neuron %s and skeleton(s) %s.' % (neuron.id, treenode.skeleton_id))
else:
# Treenode is not root, it has a parent and perhaps children.
# Reconnect all the children to the parent.
response_on_error = 'Could not update parent id of children nodes'
cursor.execute("""
UPDATE treenode SET parent_id = %s
WHERE project_id = %s AND parent_id = %s
RETURNING id, edition_time
""", (treenode.parent_id, project_id, treenode.id))
# Children will be a list of two-element lists, just what we want to
# return as child info.
children = cursor.fetchall()
# Remove treenode. Set the current user name in a transaction local
# variable. This is done to communicate the current user to the trigger
# that updates the skeleton summary table.
response_on_error = 'Could not delete treenode.'
cursor.execute("SET LOCAL catmaid.user_id=%(user_id)s", {
'user_id': request.user.id,
})
Treenode.objects.filter(project_id=project_id, pk=treenode_id).delete()
return JsonResponse({
'x': treenode.location_x,
'y': treenode.location_y,
'z': treenode.location_z,
'parent_id': parent_id,
'children': children,
'links': links,
'radius': treenode.radius,
'confidence': treenode.confidence,
'skeleton_id': treenode.skeleton_id,
'deleted_neuron': deleted_neuron,
'success': "Removed treenode successfully."
})
except Exception as e:
raise ValueError(response_on_error + ': ' + str(e))
def _compact_detail_list(project_id, treenode_ids=None, label_ids=None,
label_names=None, skeleton_ids=None):
"""
Return a list with information on the passed in node IDs or on treenodes
that match the optional label refrences. The result has the form:
[ID, parent ID, x, y, z, confidence, radius, skeleton_id, edition_time, user_id]
The returned edition time is an epoch number.
"""
if not any((treenode_ids, label_ids, label_names, skeleton_ids)):
raise ValueError("No treenode IDs, label IDs, label names or skeleton IDs provided")
extra_joins = []
extra_where = []
if treenode_ids:
extra_joins.append("""
JOIN UNNEST(%(treenode_ids)s::bigint[]) query(id)
ON t.id = query.id
""")
labeled_as = None
if label_ids or label_names:
relation_map = get_relation_to_id_map(project_id, ('labeled_as',))
labeled_as = relation_map['labeled_as']
if label_ids:
extra_joins.append("""
JOIN treenode_class_instance tci
ON tci.treenode_id = t.id
JOIN UNNEST(%(label_ids)s::bigint[]) label(id)
ON label.id = tci.class_instance_id
""")
extra_where.append("""
tci.relation_id = %(labeled_as)s
""")
if label_names:
extra_joins.append("""
JOIN treenode_class_instance tci
ON tci.treenode_id = t.id
JOIN class_instance ci
ON ci.id = tci.class_instance_id
JOIN UNNEST(%(label_names)s::text[]) label(name)
ON label.name = ci.name
""")
extra_where.append("""
tci.relation_id = %(labeled_as)s
""")
if skeleton_ids:
extra_joins.append("""
JOIN UNNEST(%(skeleton_ids)s::bigint[]) skeleton(id)
ON skeleton.id = t.skeleton_id
""")
cursor = connection.cursor()
cursor.execute("""
SELECT t.id, t.parent_id, t.location_x, t.location_y, t.location_z, t.confidence,
t.radius, t.skeleton_id,
EXTRACT(EPOCH FROM t.edition_time), t.user_id
FROM treenode t
{extra_joins}
WHERE t.project_id=%(project_id)s
{extra_where}
""".format(**{
'extra_joins': '\n'.join(extra_joins),
'extra_where': ('AND ' + ' AND\n'.join(extra_where)) if extra_where else '',
}), {
'project_id': project_id,
'treenode_ids': treenode_ids,
'labeled_as': labeled_as,
'label_ids': label_ids,
'label_names': label_names,
'skeleton_ids': skeleton_ids
})
rows = cursor.fetchall()
return rows
def _compact_detail(project_id, treenode_id):
"""
Return a list with information on the passed in node. It has the form:
[ID, parent ID, x, y, z, confidence, radius, skeleton_id, edition_time, user_id]
The returned edition time is an epoch number.
"""
cursor = connection.cursor()
cursor.execute("""
SELECT id, parent_id, location_x, location_y, location_z, confidence,
radius, skeleton_id, EXTRACT(EPOCH FROM edition_time), user_id
FROM treenode
WHERE id=%(treenode_id)s
AND project_id=%(project_id)s
""", {
'project_id': project_id,
'treenode_id': treenode_id
})
rows = cursor.fetchall()
if len(rows) == 0:
raise ValueError(f"Could not find treenode with ID {treenode_id}")
if len(rows) > 1:
raise ValueError(f"Found {len(rows)} treenodes with ID {treenode_id}, expected one")
return rows[0]
def _treenode_info(project_id, treenode_id):
c = connection.cursor()
# (use raw SQL since we are returning values from several different models)
c.execute("""
SELECT
treenode.skeleton_id,
ci.name as skeleton_name,
ci2.id as neuron_id,
ci2.name as neuron_name
FROM
treenode,
relation r,
class_instance ci,
class_instance ci2,
class_instance_class_instance cici
WHERE ci.project_id = %s
AND treenode.id = %s
AND treenode.skeleton_id = ci.id
AND ci.id = cici.class_instance_a
AND ci2.id = cici.class_instance_b
AND cici.relation_id = r.id
AND r.relation_name = 'model_of'
""", (project_id, treenode_id))
results = [
dict(zip([col[0] for col in c.description], row))
for row in c.fetchall()
]
if len(results) > 1:
raise ValueError('Found more than one skeleton and neuron for '
'treenode %s' % treenode_id)
elif len(results) == 0:
raise ValueError('No skeleton and neuron for treenode %s' % treenode_id)
return results[0]
@api_view(['GET'])
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def treenode_info(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
"""Retrieve skeleton and neuron information about this treenode.
---
type:
skeleton_id:
description: ID of the treenode's skeleton
type: integer
required: true
skeleton_name:
description: Name of the treenode's skeleton
type: string
required: true
neuron_id:
description: ID of the treenode's neuron
type: integer
required: true
neuron_name:
description: Name of the treenode's neuron
type: string
required: true
"""
info = _treenode_info(int(project_id), int(treenode_id))
return JsonResponse(info)
@api_view(['GET'])
@requires_user_role(UserRole.Browse)
def compact_detail(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
"""
Retrieve node information in a compact form. A list of the following form
is returned:
[ID, parent ID, x, y, z, confidence, radius, skeleton_id, edition_time, user_id]
The returned edition time is an epoch number.
"""
info = _compact_detail(int(project_id), int(treenode_id))
return JsonResponse(info, safe=False)
@api_view(['POST'])
@requires_user_role(UserRole.Browse)
def compact_detail_list(request:HttpRequest, project_id=None) -> JsonResponse:
"""
Retrieve node information in a compact form. A list of elements of the
following form is returned:
[ID, parent ID, x, y, z, confidence, radius, skeleton_id, edition_time, user_id]
The returned edition time is an epoch number.
---
parameters:
- name: project_id
description: Project to work in
required: true
- name: treenode_ids
description: A list of treeonde IDs to return information on
required: false
- name: label_ids
description: |
A list of label IDs that must be linked to result treenodes. Alternative
to explicit treenode IDs and label names.
required: false
- name: label_names
description: |
A list of label names that must be linked to result treenodes.
Alternative to explicit treenode IDs and label IDs
required: false
- name: skeleton_ids
description: |
A list of skeleton IDs that result skeletons have to be part of.
required: false
"""
treenode_ids = get_request_list(request.POST, 'treenode_ids', None, int)
label_ids = get_request_list(request.POST, 'label_ids', None, int)
label_names = get_request_list(request.POST, 'label_names')
skeleton_ids = get_request_list(request.POST, 'skeleton_ids', None, int)
if not any((treenode_ids, label_ids, label_names, skeleton_ids)):
raise ValueError("No treenode IDs, label IDs, label names or skeleton IDs provided")
info = _compact_detail_list(int(project_id), treenode_ids, label_ids,
label_names, skeleton_ids)
return JsonResponse(info, safe=False)
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def find_children(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
try:
tnid = int(treenode_id)
cursor = connection.cursor()
cursor.execute('''
SELECT id, location_x, location_y, location_z
FROM treenode
WHERE parent_id = %s
''', (tnid,))
children = [[row] for row in cursor.fetchall()]
return JsonResponse(children, safe=False)
except Exception as e:
raise ValueError('Could not obtain next branch node or leaf: ' + str(e))
@api_view(['POST'])
@requires_user_role(UserRole.Annotate)
def update_confidence(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
"""Update confidence of edge between a node to either its parent or its
connectors.
The connection between a node and its parent or the connectors it is linked
to can be rated with a confidence value in the range 1-5. If connector links
should be updated, one can limit the affected connections to a specific
connector. Returned is an object, mapping updated partners to their old
confidences.
---
parameters:
- name: new_confidence
description: New confidence, value in range 1-5
type: integer
required: true
- name: to_connector
description: Whether all linked connectors instead of parent should be updated
type: boolean
required: false
- name: partner_ids
description: Limit update to a set of connectors if to_connector is true
type: array
items: integer
required: false
- name: partner_confidences
description: Set different confidences to connectors in <partner_ids>
type: array
items: integer
required: false
type:
message:
type: string
required: true
updated_partners:
type: object
required: true
"""
tnid = int(treenode_id)
can_edit_treenode_or_fail(request.user, project_id, tnid)
cursor = connection.cursor()
state.validate_state(tnid, request.POST.get('state'),
node=True, lock=True, cursor=cursor)
to_connector = get_request_bool(request.POST, 'to_connector', False)
partner_ids = get_request_list(request.POST, 'partner_ids', None, int)
partner_confidences = get_request_list(request.POST, 'partner_confidences',
None, int)
new_confidence = int(request.POST.get('new_confidence', 0))
# If partner confidences are specified, make sure there are exactly as many
# as there are partners. Otherwise validate passed in confidence
if partner_ids and partner_confidences:
if len(partner_confidences) != len(partner_ids):
raise ValueError("There have to be as many partner confidences as"
"there are partner IDs")
else:
if new_confidence < 1 or new_confidence > 5:
raise ValueError('Confidence not in range 1-5 inclusive.')
if partner_ids:
# Prepare new confidences for connector query
partner_confidences = (new_confidence,) * len(partner_ids)
if to_connector:
if partner_ids:
partner_template = ",".join(("(%s,%s)",) * len(partner_ids))
partner_data = [p for v in zip(partner_ids, partner_confidences) for p in v]
cursor.execute(f'''
UPDATE treenode_connector tc
SET confidence = target.new_confidence
FROM (SELECT x.id, x.confidence AS old_confidence,
new_values.confidence AS new_confidence
FROM treenode_connector x
JOIN (VALUES {partner_template}) new_values(cid, confidence)
ON x.connector_id = new_values.cid
WHERE x.treenode_id = %s) target
WHERE tc.id = target.id
RETURNING tc.connector_id, tc.edition_time, target.old_confidence
''', partner_data + [tnid])
else:
cursor.execute('''
UPDATE treenode_connector tc
SET confidence = %s
FROM (SELECT x.id, x.confidence AS old_confidence
FROM treenode_connector x
WHERE treenode_id = %s) target
WHERE tc.id = target.id
RETURNING tc.connector_id, tc.edition_time, target.old_confidence
''', (new_confidence, tnid))
else:
cursor.execute('''
UPDATE treenode t
SET confidence = %s, editor_id = %s
FROM (SELECT x.id, x.confidence AS old_confidence
FROM treenode x
WHERE id = %s) target
WHERE t.id = target.id
RETURNING t.parent_id, t.edition_time, target.old_confidence
''', (new_confidence, request.user.id, tnid))
updated_partners = cursor.fetchall()
if len(updated_partners) > 0:
location = Location.objects.filter(id=tnid).values_list(
'location_x', 'location_y', 'location_z')[0]
insert_into_log(project_id, request.user.id, "change_confidence",
location, "Changed to %s" % new_confidence)
return JsonResponse({
'message': 'success',
'updated_partners': {
r[0]: {
'edition_time': r[1],
'old_confidence': r[2]
} for r in updated_partners
}
})
# Else, signal error
if to_connector:
raise ValueError('Failed to update confidence between treenode %s and '
'connector.' % tnid)
else:
raise ValueError('Failed to update confidence at treenode %s.' % tnid)
def _skeleton_as_graph(skeleton_id) -> nx.DiGraph:
# Fetch all nodes of the skeleton
cursor = connection.cursor()
cursor.execute('''
SELECT id, parent_id
FROM treenode
WHERE skeleton_id=%s''', [skeleton_id])
# Create a directed graph of the skeleton
graph = nx.DiGraph()
for row in cursor.fetchall():
# row[0]: id
# row[1]: parent_id
graph.add_node(row[0])
if row[1]:
# Create directional edge from parent to child
graph.add_edge(row[1], row[0])
return graph
def _find_first_interesting_node(sequence):
""" Find the first node that:
1. Has confidence lower than 5
2. Has a tag
3. Has any connector (e.g. receives/makes synapse, markes as abutting, ...)
Otherwise return the last node.
"""
if not sequence:
raise ValueError('No nodes ahead!')
if 1 == len(sequence):
return sequence[0]
cursor = connection.cursor()
cursor.execute('''
SELECT t.id, t.confidence, tc.relation_id, tci.relation_id
FROM treenode t
LEFT OUTER JOIN treenode_connector tc ON (tc.treenode_id = t.id)
LEFT OUTER JOIN treenode_class_instance tci ON (tci.treenode_id = t.id)
WHERE t.id IN (%s)
''' % ",".join(map(str, sequence)))
nodes = {row[0]: row for row in cursor.fetchall()}
for node_id in sequence:
if node_id in nodes:
props = nodes[node_id]
# [1]: confidence
# [2]: a treenode_connector.relation_id, e.g. presynaptic_to or postsynaptic_to
# [3]: a treenode_class_instance.relation_id, e.g. labeled_as
# 2 and 3 may be None
if props[1] < 5 or props[2] or props[3]:
return node_id
else:
raise ValueError('Nodes of this skeleton changed while inspecting them.')
return sequence[-1]
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def find_previous_branchnode_or_root(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
try:
tnid = int(treenode_id)
alt = 1 == int(request.POST['alt'])
skid = Treenode.objects.get(pk=tnid).skeleton_id
graph = _skeleton_as_graph(skid)
# Travel upstream until finding a parent node with more than one child
# or reaching the root node
seq = [] # Does not include the starting node tnid
while True:
parents = graph.predecessors(tnid)
if parents: # list of parents is not empty
tnid = parents[0] # Can ony have one parent
seq.append(tnid)
if 1 != len(graph.successors(tnid)):
break # Found a branch node
else:
break # Found the root node
if seq and alt:
tnid = _find_first_interesting_node(seq)
return JsonResponse(_fetch_location(project_id, tnid), safe=False)
except Exception as e:
raise ValueError('Could not obtain previous branch node or root:' + str(e))
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def find_next_branchnode_or_end(request:HttpRequest, project_id=None, treenode_id=None) -> JsonResponse:
try:
tnid = int(treenode_id)
skid = Treenode.objects.get(pk=tnid).skeleton_id
graph = _skeleton_as_graph(skid)
children = graph.successors(tnid)
branches = []
for child_node_id in children:
# Travel downstream until finding a child node with more than one
# child or reaching an end node
seq = [child_node_id] # Does not include the starting node tnid
branch_end = child_node_id
while True:
branch_children = graph.successors(branch_end)
if 1 == len(branch_children):
branch_end = branch_children[0]
seq.append(branch_end)
else:
break # Found an end node or a branch node
branches.append([child_node_id,
_find_first_interesting_node(seq),
branch_end])
# If more than one branch exists, sort based on downstream arbor size.
if len(children) > 1:
branches.sort(
key=lambda b: len(nx.algorithms.traversal.depth_first_search.dfs_successors(graph, b[0])),
reverse=True)
# Leaf nodes will have no branches
if len(children) > 0:
# Create a dict of node ID -> node location
node_ids_flat = list(itertools.chain.from_iterable(branches))
node_locations = {row[0]: row for row in _fetch_locations(project_id, node_ids_flat)}
branches = [[node_locations[node_id] for node_id in branch] for branch in branches]
return JsonResponse(branches, safe=False)
except Exception as e:
raise ValueError('Could not obtain next branch node or leaf: ' + str(e))
def _importing_user(project_id, treenode_id):
cursor = connection.cursor()
cursor.execute(f"""
SELECT t_origin_tx.user_id
FROM (
SELECT txid, edition_time
FROM treenode__with_history th
WHERE th.id = %(obj_id)s
ORDER BY edition_time ASC
LIMIT 1
) t_origin
JOIN LATERAL (
SELECT cti.user_id
FROM catmaid_transaction_info cti
WHERE cti.transaction_id = t_origin.txid
-- Transaction ID wraparound match protection. A transaction
-- ID is only unique together with a date.
AND cti.execution_time = t_origin.edition_time
AND label = 'skeletons.import'
LIMIT 1
) t_origin_tx
ON TRUE
""", {
'obj_id': treenode_id,
})
result = cursor.fetchone()
return result[0] if result else None
@api_view(['GET'])
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def importing_user(request:HttpRequest, project_id:int, treenode_id:int) -> JsonResponse:
"""Retrieve the user ID of the user who imported the passed in treenode. If
this node wasn't imported, return None.
---
type:
importing_user:
description: ID of the importer of this node
type: integer
required: true
"""
importing_user_id = _importing_user(int(project_id), int(treenode_id))
return JsonResponse({
'importing_user_id': importing_user_id,
})
| tomka/CATMAID | django/applications/catmaid/control/treenode.py | Python | gpl-3.0 | 53,294 | [
"NEURON"
] | 355a18f7939364ac02012f66b914533437e16c6c9020f823cce3e328d4807b34 |
"""Example code to convert Structured Commons objects to JSON."""
from __future__ import print_function
import sys
import json
import base64
from sc import fp
try:
unichr(0)
except:
unichr = chr
class pyjson_visitor(object):
"""Convert an abstract object tree to a JSON-serializable Python concrete object.
- object files are transformed to either unicode strings or an array
containing one string containing the base64-encoded data.
- object dictionaries are transformed to Python dictionaries, with
names transformed to unicode strings
- fingerprint references in dictionaries are transformed to
an arrays containing one string containing the fingerprint compact representation.
This visitor is suitable to process an object that implements
the ``fingerprintable`` interface.
pyrepr_visitor :: Fingerprintable a => a -> PyObject
"""
def __init__(self, use_base64):
self._b64 = use_base64
def enter_file(self, sz):
self._sz = sz
self._cnt = 0
if self._b64:
self._value = bytearray()
else:
self._value = u''
def visit_data(self, b):
assert isinstance(b, bytearray) or isinstance(b, bytes)
self._cnt += len(b)
if self._b64:
self._value += b
else:
for c in b:
self._value += unichr(c)
def leave_file(self):
assert self._sz == self._cnt
assert len(self._value) == self._sz
if self._b64:
self._value = [base64.urlsafe_b64encode(self._value).decode('ascii')]
def enter_dict(self):
self._value = {}
def visit_entry(self, name, t, obj):
fp.validate_name(name)
assert name not in self._value, "duplicate name %r" % name
if t == 'l' and isinstance(obj, fp.fingerprint):
self._value[name] = [obj.compact()]
elif isinstance(obj, fp.fingerprintable):
v = pyjson_visitor(self._b64)
obj.visit(v)
self._value[name] = v.value()
else:
raise TypeError("invalid object type: %r" % obj)
def leave_dict(self):
pass
def value(self):
"""Returns the Python object computed by this visitor."""
return self._value
class pyjson_wrap(fp.fingerprintable):
"""Wrap a JSON-serializable Python concrete dictionary tree in the fingerprintable
interface.
This wrapper can then be provided to compute fingerprints,
(fp.compute_visitor), save to filesystem (fs.encode_visitor), etc.
"""
def __init__(self, obj):
if isinstance(obj, str) or isinstance(obj, type(u'')):
obj = bytearray((ord(c) for c in obj))
elif isinstance(obj, list) and len(obj) == 1:
obj = base64.urlsafe_b64decode(str(obj[0]))
self._obj = obj
def visit(self, v):
if isinstance(self._obj, dict):
v.enter_dict()
for k, val in self._obj.items():
if isinstance(val, str) or isinstance(val, type(u'')):
v.visit_entry(k, 's', pyjson_wrap(val))
elif isinstance(val, dict):
v.visit_entry(k, 't', pyjson_wrap(val))
elif isinstance(val, list) and len(val) == 1:
if val[0][:3].lower() == 'fp:':
v.visit_entry(k, 'l', fp.fingerprint(val[0]))
else:
v.visit_entry(k, 's', pyjson_wrap(val))
else:
raise TypeError("invalid object type: %r" % val)
v.leave_dict()
else:
v.enter_file(len(self._obj))
v.visit_data(self._obj)
v.leave_file()
def decode(json_src):
"""Return a fingerprintable interface to the JSON object given as argument."""
obj = json.load(json_src)
return pyjson_wrap(obj)
def encode(obj, json_dst, use_base64 = False):
"""Encode a fingerprintable object to a JSON object."""
assert isinstance(obj, fp.fingerprintable)
v = pyjson_visitor(use_base64)
obj.visit(v)
json.dump(v.value(), json_dst)
| structured-commons/tools | sc/js.py | Python | unlicense | 3,963 | [
"VisIt"
] | f447ee022132255552a912bdf42e3dab63ff0f86275caded89509411ef9fdef9 |
# -*- Mode: Python; coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2005-2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Implementation of sales application. """
import decimal
from datetime import date
from dateutil.relativedelta import relativedelta
import pango
import gtk
from kiwi.currency import currency
from storm.expr import And
from stoqlib.api import api
from stoqlib.database.expr import Date
from stoqlib.domain.events import ECFIsLastSaleEvent
from stoqlib.domain.invoice import InvoicePrinter
from stoqlib.domain.sale import Sale, SaleView, SaleComment
from stoqlib.enums import SearchFilterPosition
from stoqlib.gui.dialogs.invoicedialog import SaleInvoicePrinterDialog
from stoqlib.gui.editors.saleeditor import SaleClientEditor, SalesPersonEditor
from stoqlib.gui.editors.noteeditor import NoteEditor
from stoqlib.gui.search.callsearch import ClientCallsSearch
from stoqlib.gui.search.commissionsearch import CommissionSearch
from stoqlib.gui.search.deliverysearch import DeliverySearch
from stoqlib.gui.search.loansearch import LoanItemSearch, LoanSearch
from stoqlib.gui.search.returnedsalesearch import ReturnedSaleSearch
from stoqlib.gui.search.personsearch import (ClientSearch,
ClientsWithSaleSearch,
ClientsWithCreditSearch)
from stoqlib.gui.search.productsearch import ProductSearch
from stoqlib.gui.search.creditcheckhistorysearch import CreditCheckHistorySearch
from stoqlib.gui.slaves.saleslave import SaleListToolbar
from stoqlib.gui.search.salespersonsearch import SalesPersonSalesSearch
from stoqlib.gui.search.salesearch import (SalesByPaymentMethodSearch,
SoldItemsByBranchSearch,
SoldItemsByClientSearch,
UnconfirmedSaleItemsSearch)
from stoqlib.gui.search.searchcolumns import IdentifierColumn, SearchColumn
from stoqlib.gui.search.searchfilters import ComboSearchFilter
from stoqlib.gui.search.servicesearch import ServiceSearch
from stoqlib.gui.stockicons import (STOQ_PRODUCTS, STOQ_SERVICES,
STOQ_CLIENTS, STOQ_DELIVERY)
from stoqlib.gui.utils.keybindings import get_accels
from stoqlib.gui.wizards.loanwizard import NewLoanWizard, CloseLoanWizard
from stoqlib.gui.wizards.salequotewizard import SaleQuoteWizard
from stoqlib.gui.wizards.workorderquotewizard import WorkOrderQuoteWizard
from stoqlib.lib.formatters import format_quantity
from stoqlib.lib.invoice import SaleInvoice, print_sale_invoice
from stoqlib.lib.message import info, warning
from stoqlib.lib.permissions import PermissionManager
from stoqlib.lib.translation import stoqlib_gettext as _
from stoqlib.reporting.sale import SalesReport
from stoq.gui.shell.shellapp import ShellApp
class FilterItem(object):
def __init__(self, name, value=None):
self.name = name
self.value = value
self.id = value
SALES_FILTERS = {
'sold': Sale.status == Sale.STATUS_CONFIRMED,
'sold-today': And(Date(Sale.open_date) == date.today(),
Sale.status == Sale.STATUS_CONFIRMED),
'sold-7days': And(Date(Sale.open_date) <= date.today(),
Date(Sale.open_date) >= date.today() - relativedelta(days=7),
Sale.status == Sale.STATUS_CONFIRMED),
'sold-28days': And(Date(Sale.open_date) <= date.today(),
Date(Sale.open_date) >= date.today() - relativedelta(days=28),
Sale.status == Sale.STATUS_CONFIRMED),
'expired-quotes': And(Date(Sale.expire_date) < date.today(),
Sale.status == Sale.STATUS_QUOTE),
}
class SalesApp(ShellApp):
app_title = _('Sales')
gladefile = 'sales_app'
search_spec = SaleView
search_label = _('matching:')
report_table = SalesReport
cols_info = {Sale.STATUS_INITIAL: 'open_date',
Sale.STATUS_CONFIRMED: 'confirm_date',
Sale.STATUS_ORDERED: 'open_date',
Sale.STATUS_CANCELLED: 'cancel_date',
Sale.STATUS_QUOTE: 'open_date',
Sale.STATUS_RETURNED: 'return_date',
Sale.STATUS_RENEGOTIATED: 'close_date'}
action_permissions = {
"SalesPrintInvoice": ('app.sales.print_invoice', PermissionManager.PERM_SEARCH),
}
def __init__(self, window, store=None):
self.summary_label = None
self._visible_date_col = None
ShellApp.__init__(self, window, store=store)
#
# Application
#
def create_actions(self):
group = get_accels('app.sales')
actions = [
# File
("SaleQuote", None, _("Sale quote..."), '',
_('Create a new quote for a sale')),
("WorkOrderQuote", None, _("Sale with work order..."), '',
_('Create a new quote for a sale with work orders')),
("LoanNew", None, _("Loan...")),
("LoanClose", None, _("Close loan...")),
# Search
("SearchSoldItemsByBranch", None, _("Sold items by branch..."),
group.get("search_sold_items_by_branch"),
_("Search for sold items by branch")),
("SearchSalesByPaymentMethod", None,
_("Sales by payment method..."),
group.get("search_sales_by_payment")),
("SearchSalesPersonSales", None,
_("Total sales made by salesperson..."),
group.get("search_salesperson_sales"),
_("Search for sales by payment method")),
("SearchProduct", STOQ_PRODUCTS, _("Products..."),
group.get("search_products"),
_("Search for products")),
("SearchService", STOQ_SERVICES, _("Services..."),
group.get("search_services"),
_("Search for services")),
("SearchDelivery", STOQ_DELIVERY, _("Deliveries..."),
group.get("search_deliveries"),
_("Search for deliveries")),
("SearchClient", STOQ_CLIENTS, _("Clients..."),
group.get("search_clients"),
_("Search for clients")),
("SearchClientCalls", None, _("Client Calls..."),
group.get("search_client_calls"),
_("Search for client calls")),
("SearchCreditCheckHistory", None,
_("Client credit check history..."),
group.get("search_credit_check_history"),
_("Search for client check history")),
("SearchCommission", None, _("Commissions..."),
group.get("search_commissions"),
_("Search for salespersons commissions")),
("LoanSearch", None, _("Loans..."),
group.get("search_loans")),
("LoanSearchItems", None, _("Loan items..."),
group.get("search_loan_items")),
("ReturnedSaleSearch", None, _("Returned sales..."),
group.get("returned_sales")),
("SearchUnconfirmedSaleItems", None, _("Unconfirmed sale items..."),
group.get("search_reserved_product"),
_("Search for unconfirmed sale items")),
("SearchClientsWithSale", None, _("Clients with sales..."),
None,
_("Search for regular clients")),
("SearchClientsWithCredit", None, _("Clients with credit..."),
None,
_("Search for clients that have credit")),
("SearchSoldItemsByClient", None, _("Sold items by client..."),
None,
_("Search for products sold by client")),
# Sale
("SaleMenu", None, _("Sale")),
("SalesCancel", None, _("Cancel...")),
("ChangeClient", gtk.STOCK_EDIT, _("Change client...")),
("ChangeSalesperson", gtk.STOCK_EDIT, _("Change salesperson...")),
("SalesPrintInvoice", gtk.STOCK_PRINT, _("_Print invoice...")),
("Return", gtk.STOCK_CANCEL, _("Return..."), '',
_("Return the selected sale, canceling it's payments")),
("Edit", gtk.STOCK_EDIT, _("Edit..."), '',
_("Edit the selected sale, allowing you to change the details "
"of it")),
("Details", gtk.STOCK_INFO, _("Details..."), '',
_("Show details of the selected sale"))
]
self.sales_ui = self.add_ui_actions("", actions,
filename="sales.xml")
self.SaleQuote.set_short_label(_("New Sale Quote"))
self.SaleQuote.set_short_label(_("New Sale Quote with Work Order"))
self.SearchClient.set_short_label(_("Clients"))
self.SearchProduct.set_short_label(_("Products"))
self.SearchService.set_short_label(_("Services"))
self.SearchDelivery.set_short_label(_("Deliveries"))
self.SalesCancel.set_short_label(_("Cancel"))
self.ChangeClient.set_short_label(_("Change Client"))
self.ChangeSalesperson.set_short_label(_("Change Salesperson"))
self.Edit.set_short_label(_("Edit"))
self.Return.set_short_label(_("Return"))
self.Details.set_short_label(_("Details"))
self.set_help_section(_("Sales help"), 'app-sales')
def create_ui(self):
if api.sysparam.get_bool('SMART_LIST_LOADING'):
self.search.enable_lazy_search()
if not api.sysparam.get_bool('CHANGE_CLIENT_AFTER_CONFIRMED'):
self.ChangeClient.set_visible(False)
if not api.sysparam.get_bool('CHANGE_SALESPERSON_AFTER_CONFIRMED'):
self.ChangeSalesperson.set_visible(False)
self.popup = self.uimanager.get_widget('/SaleSelection')
self._setup_columns()
self._setup_widgets()
self.window.add_new_items([self.SaleQuote, self.WorkOrderQuote])
self.window.add_search_items([
self.SearchProduct,
self.SearchClient,
self.SearchService,
self.SearchDelivery])
self.window.Print.set_tooltip(_("Print a report of these sales"))
def activate(self, refresh=True):
if refresh:
self.refresh()
self.check_open_inventory()
self._update_toolbar()
self.search.focus_search_entry()
def setup_focus(self):
self.refresh()
def deactivate(self):
self.uimanager.remove_ui(self.sales_ui)
def new_activate(self):
self._new_sale_quote(wizard=SaleQuoteWizard)
def search_activate(self):
self._search_product()
def set_open_inventory(self):
self.set_sensitive(self._inventory_widgets, False)
def create_filters(self):
self.set_text_field_columns(['client_name', 'salesperson_name',
'identifier_str'])
status_filter = ComboSearchFilter(_('Show sales'),
self._get_filter_options())
status_filter.combo.set_row_separator_func(
lambda model, titer: model[titer][0] == 'sep')
executer = self.search.get_query_executer()
executer.add_filter_query_callback(
status_filter, self._get_status_query)
self.add_filter(status_filter, position=SearchFilterPosition.TOP)
self.create_branch_filter(column=Sale.branch_id)
def get_columns(self):
self._status_col = SearchColumn('status_name', title=_('Status'),
data_type=str, width=80, visible=False,
search_attribute='status',
valid_values=self._get_status_values())
cols = [IdentifierColumn('identifier', title=_('Sale #'),
sorted=True),
SearchColumn('coupon_id', title=_('Coupon #'), width=100,
data_type=int, visible=False),
SearchColumn('paid', title=_('Paid'), width=120,
data_type=bool, visible=False),
SearchColumn('open_date', title=_('Open date'), width=120,
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False),
SearchColumn('close_date', title=_('Close date'), width=120,
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False),
SearchColumn('confirm_date', title=_('Confirm date'),
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False, width=120),
SearchColumn('cancel_date', title=_('Cancel date'), width=120,
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False),
SearchColumn('return_date', title=_('Return date'), width=120,
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False),
SearchColumn('expire_date', title=_('Expire date'), width=120,
data_type=date, justify=gtk.JUSTIFY_RIGHT,
visible=False),
self._status_col,
SearchColumn('client_name', title=_('Client'),
data_type=str, width=140, expand=True,
ellipsize=pango.ELLIPSIZE_END),
SearchColumn('salesperson_name', title=_('Salesperson'),
data_type=str, width=130,
ellipsize=pango.ELLIPSIZE_END),
SearchColumn('total_quantity', title=_('Items'),
data_type=decimal.Decimal, width=60,
format_func=format_quantity),
SearchColumn('total', title=_('Total'), data_type=currency,
width=120, search_attribute='_total')]
return cols
#
# Private
#
def _create_summary_label(self):
self.search.set_summary_label(column='total',
label='<b>Total:</b>',
format='<b>%s</b>',
parent=self.get_statusbar_message_area())
def _setup_widgets(self):
self._setup_slaves()
self._inventory_widgets = [self.sale_toolbar.return_sale_button,
self.Return, self.LoanNew, self.LoanClose]
self.register_sensitive_group(self._inventory_widgets,
lambda: not self.has_open_inventory())
def _setup_slaves(self):
# This is only here to reuse the logic in it.
self.sale_toolbar = SaleListToolbar(self.store, self.results,
parent=self)
def _update_toolbar(self, *args):
sale_view = self.results.get_selected()
# FIXME: Disable invoice printing if the sale was returned. Remove this
# when we add proper support for returned sales invoice.
can_print_invoice = bool(sale_view and
sale_view.client_name is not None and
sale_view.status != Sale.STATUS_RETURNED)
self.set_sensitive([self.SalesPrintInvoice], can_print_invoice)
self.set_sensitive([self.SalesCancel],
bool(sale_view and sale_view.can_cancel()))
self.set_sensitive([self.sale_toolbar.return_sale_button, self.Return],
bool(sale_view and sale_view.can_return()))
self.set_sensitive([self.sale_toolbar.return_sale_button, self.Details],
bool(sale_view))
self.set_sensitive([self.sale_toolbar.edit_button, self.Edit],
bool(sale_view and sale_view.can_edit()))
# If the sale cannot be edit anymore, we only allow to change the client
self.set_sensitive([self.ChangeClient],
bool(sale_view and not sale_view.can_edit()))
self.set_sensitive([self.ChangeSalesperson],
bool(sale_view and not sale_view.can_edit()))
self.sale_toolbar.set_report_filters(self.search.get_search_filters())
def _print_invoice(self):
sale_view = self.results.get_selected()
assert sale_view
sale = sale_view.sale
station = api.get_current_station(self.store)
printer = InvoicePrinter.get_by_station(station, self.store)
if printer is None:
info(_("There are no invoice printer configured for this station"))
return
assert printer.layout
invoice = SaleInvoice(sale, printer.layout)
if not invoice.has_invoice_number() or sale.invoice_number:
print_sale_invoice(invoice, printer)
else:
store = api.new_store()
retval = self.run_dialog(SaleInvoicePrinterDialog, store,
store.fetch(sale), printer)
store.confirm(retval)
store.close()
def _setup_columns(self, sale_status=Sale.STATUS_CONFIRMED):
self._status_col.visible = False
if sale_status is None:
# When there is no filter for sale status, show the
# 'date started' column by default
sale_status = Sale.STATUS_INITIAL
self._status_col.visible = True
if self._visible_date_col:
self._visible_date_col.visible = False
col = self.search.get_column_by_attribute(self.cols_info[sale_status])
if col is not None:
self._visible_date_col = col
col.visible = True
self.results.set_columns(self.search.columns)
# Adding summary label again and make it properly aligned with the
# new columns setup
self._create_summary_label()
def _get_status_values(self):
items = [(value, key) for key, value in Sale.statuses.items()]
items.insert(0, (_('Any'), None))
return items
def _get_filter_options(self):
options = [
(_('All Sales'), None),
(_('Confirmed today'), FilterItem('custom', 'sold-today')),
(_('Confirmed in the last 7 days'), FilterItem('custom', 'sold-7days')),
(_('Confirmed in the last 28 days'), FilterItem('custom', 'sold-28days')),
(_('Expired quotes'), FilterItem('custom', 'expired-quotes')),
('sep', None),
]
for key, value in Sale.statuses.items():
options.append((value, FilterItem('status', key)))
return options
def _get_status_query(self, state):
if state.value is None:
# FIXME; We cannot return None here, otherwise, the query will have
# a 'AND NULL', which will return nothing.
return True
if state.value.name == 'custom':
self._setup_columns(None)
return SALES_FILTERS[state.value.value]
elif state.value.name == 'status':
self._setup_columns(state.value.value)
return SaleView.status == state.value.value
raise AssertionError(state.value.name, state.value.value)
def _new_sale_quote(self, wizard):
if self.check_open_inventory():
warning(_("You cannot create a quote with an open inventory."))
return
store = api.new_store()
model = self.run_dialog(wizard, store)
store.confirm(model)
store.close()
if model:
self.refresh()
def _search_product(self):
hide_cost_column = not api.sysparam.get_bool('SHOW_COST_COLUMN_IN_SALES')
self.run_dialog(ProductSearch, self.store, hide_footer=True,
hide_toolbar=True, hide_cost_column=hide_cost_column)
def _change_sale_client(self):
sale_view = self.results.get_selected()
with api.new_store() as store:
sale = store.fetch(sale_view.sale)
self.run_dialog(SaleClientEditor, store=store, model=sale)
if store.committed:
self.refresh()
def _change_salesperson(self):
sale_view = self.results.get_selected()
with api.new_store() as store:
sale = store.fetch(sale_view.sale)
self.run_dialog(SalesPersonEditor, store=store, model=sale)
if store.committed:
self.refresh()
#
# Kiwi callbacks
#
def _on_sale_toolbar__sale_returned(self, toolbar, sale):
self.refresh()
def _on_sale_toolbar__sale_edited(self, toolbar, sale):
self.refresh()
def on_results__selection_changed(self, results, sale):
self._update_toolbar()
def on_results__has_rows(self, results, has_rows):
self._update_toolbar()
def on_results__right_click(self, results, result, event):
self.popup.popup(None, None, None, event.button, event.time)
# Sales
def on_SaleQuote__activate(self, action):
self._new_sale_quote(wizard=SaleQuoteWizard)
def on_WorkOrderQuote__activate(self, action):
self._new_sale_quote(wizard=WorkOrderQuoteWizard)
def on_SalesCancel__activate(self, action):
sale_view = self.results.get_selected()
can_cancel = api.sysparam.get_bool('ALLOW_CANCEL_LAST_COUPON')
if can_cancel and ECFIsLastSaleEvent.emit(sale_view.sale):
info(_("That is last sale in ECF. Return using the menu "
"ECF - Cancel Last Document"))
return
store = api.new_store()
sale = store.fetch(sale_view.sale)
msg_text = _(u"This will cancel the sale, Are you sure?")
model = SaleComment(store=store, sale=sale,
author=api.get_current_user(store))
retval = self.run_dialog(
NoteEditor, store, model=model, attr_name='comment',
message_text=msg_text, label_text=_(u"Reason"),
mandatory=True, ok_button_label=_(u"Cancel sale"),
cancel_button_label=_(u"Don't cancel"))
if not retval:
store.rollback()
return
sale.cancel()
store.commit(close=True)
self.refresh()
def on_ChangeClient__activate(self, action):
self._change_sale_client()
def on_ChangeSalesperson__activate(self, action):
self._change_salesperson()
def on_SalesPrintInvoice__activate(self, action):
return self._print_invoice()
# Loan
def on_LoanNew__activate(self, action):
if self.check_open_inventory():
return
store = api.new_store()
model = self.run_dialog(NewLoanWizard, store)
store.confirm(model)
store.close()
def on_LoanClose__activate(self, action):
if self.check_open_inventory():
return
store = api.new_store()
model = self.run_dialog(CloseLoanWizard, store)
store.confirm(model)
store.close()
def on_LoanSearch__activate(self, action):
self.run_dialog(LoanSearch, self.store)
def on_LoanSearchItems__activate(self, action):
self.run_dialog(LoanItemSearch, self.store)
def on_ReturnedSaleSearch__activate(self, action):
self.run_dialog(ReturnedSaleSearch, self.store)
def on_SearchUnconfirmedSaleItems__activate(self, action):
self.run_dialog(UnconfirmedSaleItemsSearch, self.store)
# Search
def on_SearchClient__activate(self, button):
self.run_dialog(ClientSearch, self.store, hide_footer=True)
def on_SearchProduct__activate(self, button):
self._search_product()
def on_SearchCommission__activate(self, button):
self.run_dialog(CommissionSearch, self.store)
def on_SearchClientCalls__activate(self, action):
self.run_dialog(ClientCallsSearch, self.store)
def on_SearchCreditCheckHistory__activate(self, action):
self.run_dialog(CreditCheckHistorySearch, self.store)
def on_SearchService__activate(self, button):
self.run_dialog(ServiceSearch, self.store, hide_toolbar=True)
def on_SearchSoldItemsByBranch__activate(self, button):
self.run_dialog(SoldItemsByBranchSearch, self.store)
def on_SearchSalesByPaymentMethod__activate(self, button):
self.run_dialog(SalesByPaymentMethodSearch, self.store)
def on_SearchDelivery__activate(self, action):
self.run_dialog(DeliverySearch, self.store)
def on_SearchSalesPersonSales__activate(self, action):
self.run_dialog(SalesPersonSalesSearch, self.store)
def on_SearchClientsWithSale__activate(self, action):
self.run_dialog(ClientsWithSaleSearch, self.store)
def on_SearchClientsWithCredit__activate(self, action):
self.run_dialog(ClientsWithCreditSearch, self.store)
def on_SearchSoldItemsByClient__activate(self, action):
self.run_dialog(SoldItemsByClientSearch, self.store)
# Toolbar
def on_Edit__activate(self, action):
self.sale_toolbar.edit()
def on_Details__activate(self, action):
self.sale_toolbar.show_details()
def on_Return__activate(self, action):
if self.check_open_inventory():
return
self.sale_toolbar.return_sale()
# Sale toobar
def on_sale_toolbar__sale_edited(self, widget, sale):
self.refresh()
def on_sale_toolbar__sale_returned(self, widget, sale):
self.refresh()
| andrebellafronte/stoq | stoq/gui/sales.py | Python | gpl-2.0 | 26,301 | [
"VisIt"
] | a42292da3db39e4738c6acf56cabca73acb1de08fbdc9e6e6ecda7bd0d1db792 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from django.test import TestCase as TestCaseBase
from .scraper import beatport, discogs, itunes, junodownload, metalarchives, musicbrainz, bandcamp, musiksammler
from .result import ReleaseResult, ListResult, NotFoundResult, Result
import unittest
def todict(obj):
if hasattr(obj, "__iter__"):
return [todict(v) for v in obj]
elif hasattr(obj, "__dict__"):
return dict([(key, todict(value))
for key, value in obj.__dict__.iteritems()
if not callable(value) and not key.startswith('_')])
else:
return obj
class TestCase(TestCaseBase):
maxDiff = None
def setUp(self):
self.addTypeEqualityFunc(ReleaseResult, 'assertResultEqual')
self.addTypeEqualityFunc(ListResult, 'assertResultEqual')
self.addTypeEqualityFunc(NotFoundResult, 'assertResultEqual')
def assertResultEqual(self, d1, d2, msg=None):
self.assertTrue(issubclass(d1.__class__, Result), 'First argument is not a Result')
self.assertTrue(issubclass(d2.__class__, Result), 'Second argument is not a Result')
self.assertEqual(d1.__class__.__name__, d2.__class__.__name__)
self.assertEqual(todict(d1), todict(d2), msg)
class DiscogsTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'03 Nov 2000')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Richterskala')
label_id.append_catalogue_nr(u'TRI 070 CD')
expected.append_label_id(label_id)
expected.set_title(u'Hast Du Mich Vermisst?')
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'Goth Rock')
expected.append_style(u'Synth-pop')
expected.set_url(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Schwarzer Schmetterling')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Where Do The Gods Go')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Dancing')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'K\xfcss Mich')
track.set_length(311)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(239)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Teach Me War')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Imbecile Anthem')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Blinded')
track.set_length(443)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 May 2007')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'4 \xd7 CD, Compilation, Limited Edition, Digipak, Box Set, Limited Edition, Hand-Numbered')
label_id = expected.create_label_id()
label_id.set_label(u'[Trisol] Music Group GmbH')
label_id.append_catalogue_nr(u'TRI 303 CD')
expected.append_label_id(label_id)
expected.set_title(u"The 'Once In A Lifetime' Recollection Box")
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name(u'Chamber')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Classical')
expected.append_genre(u'Non-Music')
expected.append_genre(u'Rock')
expected.append_style(u'Acoustic')
expected.append_style(u'Goth Rock')
expected.append_style(u'Classical')
expected.append_style(u'Speech')
expected.set_url(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Once In A Lifetime, Part 1')
track.set_length(351)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u"A Dead Man's Song")
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Versuchung')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Torn')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Demon Love')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'The Paperhearted Ghost')
track.set_length(283)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'A Tale Of Real Love')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Hunger')
track.set_length(289)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'The Truth About Snow-White')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'She Wore Shadows')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once In A Lifetime, Part 2 (Reprise)')
track.set_length(164)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'K\xfcss Mich')
track.set_length(384)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Silence - Release')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Solitude')
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Die Ballade Von Der Erweckung')
track.set_length(527)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Another Conversation')
track.set_length(201)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Ich Will Brennen')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Toscana')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ride On')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Hometown')
track.set_length(181)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Werben')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once In A Lifetime, Part 3 (Finale)')
track.set_length(608)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'H\xe4sslich')
track.set_length(145)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Backstage (All Areas)')
track.set_length(573)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Paracetamoltr\xe4ume')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Ausszug Aus "Tremendista" Feat. Ralph M\xfcller/Gitarre')
track.set_length(1473)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Campari O')
track.set_length(159)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Asp, Soundcheck-Outtake: "Sicamore Trees"')
track.set_length(94)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Demon Love')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The Truth About Snow-White')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'She Wore Shadows')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Hometown')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Silence-Release')
track.set_length(208)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Asp, Soundcheck-Outtake: "She Moved Through The Fair"')
track.set_length(120)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_featuring_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'01 Apr 2011')
release_event.set_country(u'Europe')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Rootdown Records')
label_id.append_catalogue_nr(u'RDM13074-2')
expected.append_label_id(label_id)
expected.set_title(u'Unter Freunden')
artist = expected.create_artist()
artist.set_name(u'Mono & Nikitaman')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Reggae')
expected.append_style(u'Dancehall')
expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Intro')
track.set_length(13)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Unter Freunden')
track.set_length(184)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Karma')
track.set_length(189)
track_artist = expected.create_artist()
track_artist.set_name(u"Ce'cile")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Zeit Steht Still')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Komplizen')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Wenn Sich Der Nebel Verzieht')
track.set_length(197)
track_artist = expected.create_artist()
track_artist.set_name(u'Gentleman')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Schwerelos')
track.set_length(227)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Ein Paar Meter')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Cash')
track.set_length(188)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Dezibel')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Kontrast')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'R\xfcckkehr Der Clowns')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Superstar')
track.set_length(227)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Underground')
track.set_length(204)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Showdown')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'Rebellion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_remix_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'04 Jul 2005')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album, Limited Edition, Digibook, CD, Compilation, Limited Edition')
label_id = expected.create_label_id()
label_id.set_label(u'Trisol')
label_id.append_catalogue_nr(u'TRI 231 CD')
expected.append_label_id(label_id)
expected.set_title(u'Aus Der Tiefe')
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'Alternative Rock')
expected.set_url(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Beschw\xf6rung')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Willkommen Zur\xfcck')
track.set_length(137)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Schwarzes Blut')
track.set_length(212)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Im Dunklen Turm')
track.set_length(101)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Me')
track.set_length(278)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Schattenschreie')
track.set_length(21)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(321)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Fremde Erinnerungen')
track.set_length(72)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ballade Von Der Erweckung')
track.set_length(533)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Tiefenrausch')
track.set_length(245)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Schmetterling, Du Kleines Ding')
track.set_length(42)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Ich Komm Dich Holn')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Werben')
track.set_length(268)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Aus Der Tiefe')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Spiegelaugen')
track.set_length(204)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Tiefenrausch (Reprise)')
track.set_length(67)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Panik')
track.set_length(252)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Spiegel')
track.set_length(331)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Schwarzes Blut (Haltung Version)')
track.set_length(249)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Werben (Subtil Edit)')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Me (Single Version)')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Tiefenrausch (Feat. Sara Noxx)')
track.set_length(245)
track_artist = expected.create_artist()
track_artist.set_name(u'Sara Noxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Hunger (Single Mix)')
track.set_length(259)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Panik (Ganz Rauf-Verison)')
track.set_length(273)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Beschw\xf6rung (Siegeszug Instrumental)')
track.set_length(205)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Buch Des Vergessens (Unreines Spiegelsonett)')
track.set_length(115)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Kokon (Brandneu-Remix Von Umbra Et Imago)')
track.set_length(279)
track_artist = expected.create_artist()
track_artist.set_name(u'Umbra Et Imago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Me (Me And You Remix Von Blutengel)')
track.set_length(344)
track_artist = expected.create_artist()
track_artist.set_name(u'Blutengel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe) (Live)')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Ich Will Brennen (Live)')
track.set_length(369)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Starfucker: In Der Folterkammer')
track.set_length(127)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_vinyl(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2008')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'2 \xd7 Vinyl, LP')
label_id = expected.create_label_id()
label_id.set_label(u'Rootdown Records')
label_id.append_catalogue_nr(u'RDM 13051-1')
expected.append_label_id(label_id)
expected.set_title(u'Ausser Kontrolle')
artist = expected.create_artist()
artist.set_name(u'Mono & Nikitaman')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Reggae')
expected.append_style(u'Dancehall')
expected.append_style(u'Reggae-Pop')
expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'A1')
track.set_title(u'Intro')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A2')
track.set_title(u'Schlag Alarm')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A3')
track.set_title(u'Kann Ja Mal Passieren')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A4')
track.set_title(u'Ausser Kontrolle')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A5')
track.set_title("Hol's Dir")
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B1')
track.set_title(u'Das Alles')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B2')
track.set_title(u'Digge Digge')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B3')
track.set_title(u'Nur So')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B4')
track.set_title(u'Yeah')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C1')
track.set_title(u'Von Osten Bis Westen')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Russkaja')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C2')
track.set_title(u'Wenn Ihr Schlaft')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C3')
track.set_title(u'Unterwegs')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C4')
track.set_title(u'Tiktak')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D1')
track.set_title(u'Tut Mir Leid')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Nosliw')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D2')
track.set_title(u'Es Kommt Anders')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D3')
track.set_title(u'Das Alles (Zion Train Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zion Train')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_main_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'08 Feb 2011')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 File, MP3, 320 kbps')
label_id = expected.create_label_id()
label_id.set_label(u'Redux Recordings')
label_id.append_catalogue_nr(u'RDX062')
expected.append_label_id(label_id)
expected.set_title(u'In My Dreams')
artist = expected.create_artist()
artist.set_name(u'Lifted Emotion')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name(u'Anastasiia Purple')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'Trance')
expected.set_url(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'In My Dreams (Original Vocal Mix)')
track.set_length(558)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In My Dreams (Original Dub Mix)')
track.set_length(558)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In My Dreams (Ost & Meyer Extraodinary Mix)')
track.set_length(472)
track_artist = expected.create_artist()
track_artist.set_name(u'Ost & Meyer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2010')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Batbeliever Releases')
label_id.append_catalogue_nr(u'BAT 075')
expected.append_label_id(label_id)
expected.set_title(u'Gothic File 14')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'EBM')
expected.append_style(u'Darkwave')
expected.append_style(u'Industrial')
expected.append_style(u'Goth Rock')
expected.append_style(u'Electro')
expected.set_url(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Echo In Me')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Diary Of Dreams')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Liar (Version)')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'Gothminister')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The End Of It All (Edit)')
track.set_length(237)
track_artist = expected.create_artist()
track_artist.set_name(u'Sirenia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Sanctuary')
track.set_length(239)
track_artist = expected.create_artist()
track_artist.set_name(u'Merciful Nuns')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Worlds Collide (Demo Version)')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'Covenant')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Drowning World')
track.set_length(253)
track_artist = expected.create_artist()
track_artist.set_name(u'Ien Oblique')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'In The Name Of God')
track.set_length(297)
track_artist = expected.create_artist()
track_artist.set_name(u'Betamorphose')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'PsychoCop (Folge 8)')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Don Harris')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493')
r = s.get_result()
self.assertEqual(expected, r)
def test_label_with_suffix(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 Nov 2005')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Indigo')
label_id.append_catalogue_nr(u'CD 55182')
expected.append_label_id(label_id)
expected.set_title(u'Prima Nocte')
artist = expected.create_artist()
artist.set_name(u'Feuerschwanz')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Folk')
expected.append_genre(u'World')
expected.append_genre(u'Country')
expected.append_genre(u'Rock')
expected.append_style(u'Medieval')
expected.set_url(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Es War Einmal')
track.set_length(172)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Das Mittelalter')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Drachentanz')
track.set_length(224)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Das Turnier')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Prima Nocte')
track.set_length(331)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'B\xe4rentanz')
track.set_length(232)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Herren Der Winde')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Der Teufel')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Schneewittchen')
track.set_length(377)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Der Traum')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'R\xe4uber')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sauflied')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Teufelsgeschenk')
track.set_length(264)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'La\xdft Die Ritter Schlafen')
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Gute Nacht')
track.set_length(420)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_unicode_dash_in_title(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 Jun 2012')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Ash International')
label_id.append_catalogue_nr(u'Ash 9.5')
expected.append_label_id(label_id)
expected.set_title(u'AUN \u2013 The Beginning And The End Of All Things')
artist = expected.create_artist()
artist.set_name(u'Christian Fennesz')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Stage & Screen')
expected.append_style(u'Abstract')
expected.append_style(u'Ambient')
expected.append_style(u'Modern Classical')
expected.append_style(u'Soundtrack')
expected.set_url(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Kae')
track.set_length(131)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Aware')
track.set_length(288)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Haru')
track.set_length(282)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Sekai')
track.set_length(134)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Euclides')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sasazuka')
track.set_length(231)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Trace')
track.set_length(349)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Mori')
track.set_length(75)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'AUN40')
track.set_length(306)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Namuru')
track.set_length(170)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Himitsu')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'AUN80')
track.set_length(217)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Nympha')
track.set_length(150)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Shinu')
track.set_length(215)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Hikari')
track.set_length(256)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000')
r = s.get_result()
self.assertEqual(expected, r)
def test_master_release(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'10", Pic, Ltd | Love Peace And Unity Recordings | LOVE-03 | UK | 1997')
item.set_query(u'http://www.discogs.com/release/146468')
item.set_url(u'http://www.discogs.com/release/146468')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'10", Pic, Ltd, S/Sided | Love Peace And Unity Recordings | LOVE-03 | UK | 1997')
item.set_query(u'http://www.discogs.com/release/1503116')
item.set_url(u'http://www.discogs.com/release/1503116')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'12", Ltd, Pic | Love Peace And Unity Recordings | LOVE-06 | UK | 2006-04-18')
item.set_query(u'http://www.discogs.com/release/670448')
item.set_url(u'http://www.discogs.com/release/670448')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12", Promo | Classic Confetti | CC 02 | UK | 2001')
item.set_query(u'http://www.discogs.com/release/2093234')
item.set_url(u'http://www.discogs.com/release/2093234')
expected.append_item(item)
item = expected.create_item()
item.set_name(u"Mystic Matt & Anthill Mob \u2013 'Burnin (Let The Music)'")
item.set_info(u'12" | Classic Confetti | CC 02 (2) | UK | 2001')
item.set_query(u'http://www.discogs.com/release/284437')
item.set_url(u'http://www.discogs.com/release/284437')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12" | Classic Confetti | CC 02 | UK | 2001')
item.set_query(u'http://www.discogs.com/release/149302')
item.set_url(u'http://www.discogs.com/release/149302')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12" | Classic Confetti | CC02(3F/02) | UK & Europe | 2002')
item.set_query(u'http://www.discogs.com/release/739159')
item.set_url(u'http://www.discogs.com/release/739159')
expected.append_item(item)
s = discogs.MasterScraper.from_string('http://www.discogs.com/Mystic-Matt-Anthill-Mob-Burning/master/181860')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_face_in_track_numbers(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'1984')
release_event.set_country(u'Sweden')
expected.append_release_event(release_event)
expected.set_format(u'Vinyl, 7"')
label_id = expected.create_label_id()
label_id.set_label(u'Mamma')
label_id.append_catalogue_nr(u'MA-501')
expected.append_label_id(label_id)
expected.set_title(u'Another Story')
artist = expected.create_artist()
artist.set_name(u'General Belgrano')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Rock')
expected.append_style(u'New Wave')
expected.set_url(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'Face I')
track.set_title(u'Another Story')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'Face II')
track.set_title("War Isn't Gold")
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_roman_track_numbers(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'22 Apr 2014')
release_event.set_country(u'US')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album, Deluxe Edition, Target Edition')
label_id = expected.create_label_id()
label_id.set_label(u'Goodbye Records')
label_id.append_catalogue_nr(u'GLS-0161-02')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'Glassnote')
label_id.append_catalogue_nr(u'GLS-0161-02')
expected.append_label_id(label_id)
expected.set_title(u'The Bones Of What You Believe')
artist = expected.create_artist()
artist.set_name(u'Chvrches')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Pop')
expected.append_style(u'Indie Pop')
expected.append_style(u'Synth-pop')
expected.set_url(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'I')
track.set_title(u'The Mother We Share')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'II')
track.set_title(u'We Sink')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'III')
track.set_title(u'Gun')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'IV')
track.set_title(u'Tether')
track.set_length(286)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'V')
track.set_title(u'Lies')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VI')
track.set_title(u'Under The Tide')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VII')
track.set_title(u'Recover')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VIII')
track.set_title(u'Night Sky')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'IX')
track.set_title(u'Science/Visions')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'X')
track.set_title(u'Lungs')
track.set_length(183)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XI')
track.set_title(u'By The Throat')
track.set_length(249)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XII')
track.set_title(u'You Caught The Light')
track.set_length(337)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XIII')
track.set_title(u'Recover (Alucard Session)')
track.set_length(252)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XIV')
track.set_title(u'The Mother We Share (Alucard Session)')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XV')
track.set_title(u'Gun (Alucard Session)')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XVI')
track.set_title(u'Tightrope')
track.set_length(209)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_track_artist_in_artist_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2014')
release_event.set_country(u'Portugal')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Vidisco')
label_id.append_catalogue_nr(u'11.80.9534')
expected.append_label_id(label_id)
expected.set_title(u'Caribe Grande \xcaxitos 2014')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Latin')
expected.append_genre(u'Pop')
expected.append_genre(u'Folk')
expected.append_genre(u'World')
expected.append_genre(u'Country')
expected.append_style(u'African')
expected.append_style(u'Electro House')
expected.append_style(u'Forr\xf3')
expected.append_style(u'Latin')
expected.append_style(u'House')
expected.set_url(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Show Das Poderosas')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Kelly Pink')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u"Me Agarra So' No Uhm")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Landrick')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Mi Ni\xf1a')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Pedro Amorim')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Menina Loka')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Y2K')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Meu Eu Em Vo\xe7\xea')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Tayla Riddel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'N\xe3o P\xe1ra (Ela S\xf3 Quer Dan\xe7ar)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u"Johne D'luka")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Bam Bam Bam')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Rogerinho')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Vem A Mi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bheaven')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Pecado')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Irm\xe3os Verdades')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'We Gonna Party')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Pedro Amorim')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Daduh King')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Gao Percussion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Ela \xc9 Top')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Bola')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Rodriguez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Love Love')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u"R'Bros")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Vamos Zuar')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Wilson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sebastian Crayn')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Bola')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Bailando Asi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ BodySoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Hugo Bessa')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Laroye')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stape')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Quem \xc9 Essa Mulher')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Valdemiro Jos\xe9')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Matias Dam\xe1sio')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Ensa\xf1ame (Conexi\xf3n)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Carlitos Rossy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pipe Calderon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'El Cata')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Nova La Amenaza')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'M\xe1s')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Hoje N\xe3o Saio Daqui (Oh Tcha Tcharara)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'3 Beatz Muzik')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Bailando')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mike Moonnight')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mark F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Vic J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Alex B')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Noche De Fiesta')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jose Delgado')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'20')
track.set_title(u'Beijo Bom')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Hallux')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marcus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lilian Raquel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'21')
track.set_title(u'Mexe Assim')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bodysoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'22')
track.set_title(u'Malandro')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'NGA')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Trair A Minha Namorada (Hoje Eu Quero Trair) (DJ Bruno F Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Ricardo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bruno F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Quem Bate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u"R'Bros")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Taty Agressivo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Bango')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Lima')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Otro Dia (Mastkisoul Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gregor Salto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Kit (Kuenta / Tambu)')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mastiksoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Mina Loca')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bodytalk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Tiago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pm Akordeon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Thiago Martins')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'I Found You (The Spacemakers Dirty Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Wilson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sebastian Crayn')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Quero Bail\xe1 (Mastik Jay Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Ademar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Morena (Massivedrum Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Franklin Rodriques')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'William')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Massivedrum')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Stronger (DJ Bruno F Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Da Fonseca')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Lion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Daniela Pimenta')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bruno Soares Sax')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bruno F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Xibita (DJ Mike C Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Meith')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Y.a.m.a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Guy H.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Mike C')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Africanism')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Alvaro Corz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'M\xfasica')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Dalton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Te Voy Amar (Soul Beatz Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Pedrito')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mike Moonnight')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Vic J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Players')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brian Chundro')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Santos')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marlldexx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Vem Rebolando')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'James Noyer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Vale A Pena 2k14')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'D-Rashid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Praia Del Sol')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Dan\xe7a Do Tchira')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mika G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Bagulho')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Mike C')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Nrg')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Glowinthedark')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Chuckie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Mila')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Canto Da Cidade')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Beleza Rara')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Chorando Se Foi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bahia Tropical')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Amor Perfeito')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Ax\xe9 Bahia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Ranpuzel')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Sorte Grande')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bahia Pagode Tropical')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Saia E Bicicletinha')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Ka\xe7amba')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'T\xf4 Nem A\xed')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Daniela')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Sozinho "Dance "')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
# TODO: decide how to handle this weird Discogs track numbering corner case
# track = disc.create_track()
# track.set_number(u'11')
# track.set_title(u'Pout-Pourri 1')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'12')
# track.set_title(u'Pout-Pourri 2')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'13')
# track.set_title(u'Pout-Pourri 3 (Marchas)')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'14')
# track.set_title(u'Pout-Pourri 4')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'15')
# track.set_title(u'Pout-Pourri 5')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'16')
# track.set_title(u'Los Mayos- Disco Samba')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'17')
# track.set_title(u'Pout-porri 6')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'18')
# track.set_title(u'Pout-porri 7')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Marcha Do Cord\xe3o Do Bola Preta')
track.set_length(None)
disc.append_track(track)
# track = disc.create_track()
# track.set_number(u'20')
# track.set_title(u'Pout-porri 8')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_in_artist_and_track_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'20 Nov 1996')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'2 \xd7 CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Sony Music Media')
label_id.append_catalogue_nr(u'SMM 486760 2')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'Sony Music Media')
label_id.append_catalogue_nr(u'486760 2')
expected.append_label_id(label_id)
expected.set_title(u'Dream Dance Vol. 3')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'Trance')
expected.set_url(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Salva Mea (Radio Edit)')
track.set_length(224)
track_artist = expected.create_artist()
track_artist.set_name(u'Faithless')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'A Neverending Dream (Dream Dance Anthem Mix)')
track.set_length(221)
track_artist = expected.create_artist()
track_artist.set_name(u'Trance X')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'7 Seconds (Radio-Video-Single)')
track.set_length(248)
track_artist = expected.create_artist()
track_artist.set_name(u'Nomansland')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'One And One (Space Edit)')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Ronald Snypes')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sweet Memories (Radio Edit)')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Groove Solution')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Fall Down On Me (Zhi-Vago In Mission Radio Edit)')
track.set_length(245)
track_artist = expected.create_artist()
track_artist.set_name(u'Solid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zhi-Vago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Cybertrance')
track.set_length(252)
track_artist = expected.create_artist()
track_artist.set_name(u'Blue Alphabet')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Beautiful Place (Airwaves Mix)')
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Paul van Dyk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Floating (7\u2033-Mix)')
track.set_length(249)
track_artist = expected.create_artist()
track_artist.set_name(u'Terra Ferma')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'The Nighttrain (Dream Station Remix)')
track.set_length(369)
track_artist = expected.create_artist()
track_artist.set_name(u'Kadoc')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Silencer I (Extended Mix)')
track.set_length(336)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ The Crow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title("Insomniak: I'll Be Your Nightmare (Industrial Mix)")
track.set_length(341)
track_artist = expected.create_artist()
track_artist.set_name(u'The Grooveman')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Masterpiece')
track.set_length(238)
track_artist = expected.create_artist()
track_artist.set_name(u'P-Casso')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'My Dimension (Radio Edit)')
track.set_length(205)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Panda')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Secret World (Radio Mix)')
track.set_length(222)
track_artist = expected.create_artist()
track_artist.set_name(u'Vector Mode')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Secret Love (Single Edit)')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Magnetic Pulstar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Atlanta (Sunshine State Club Extravaganza)')
track.set_length(402)
track_artist = expected.create_artist()
track_artist.set_name(u'Sunshine State')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Snake Davis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title("Walk On By (JPO's & Beam's Radio Mix)")
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'M.R.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'JPO & Beam')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Up To No Good (Radio Edit)')
track.set_length(210)
track_artist = expected.create_artist()
track_artist.set_name(u'Porn Kings')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Reality (Too Short Video Radio)')
track.set_length(231)
track_artist = expected.create_artist()
track_artist.set_name(u'RMB')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Da Beat Goes\u2026 (Radio Mix)')
track.set_length(228)
track_artist = expected.create_artist()
track_artist.set_name(u'Red 5')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title("Why Don't You Dance With Me (Phuture Mix)")
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Futura Nostra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Love And Fate (Part 2)')
track.set_length(296)
track_artist = expected.create_artist()
track_artist.set_name(u'Love And Fate')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Go (Woodtick Mix)')
track.set_length(323)
track_artist = expected.create_artist()
track_artist.set_name(u'Moby')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Waters (Phase 2)')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name(u'Taucher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Do You See The Light (Dance 2 Trance Mix)')
track.set_length(481)
track_artist = expected.create_artist()
track_artist.set_name(u'Snap!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dance 2 Trance')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Right In The Night (Fall In Love With Music) (Microbots Remix)')
track.set_length(383)
track_artist = expected.create_artist()
track_artist.set_name(u'Jam & Spoon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Plavka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Microbots')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Te Quierro (Trance Mix)')
track.set_length(331)
track_artist = expected.create_artist()
track_artist.set_name(u'Intrance Feat. D-Sign')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Hablando (Acordeon Mix)')
track.set_length(391)
track_artist = expected.create_artist()
track_artist.set_name(u'Ramirez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pizarro')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Outsiders (Marusha 7\u2033 Edit)')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name(u'Yves Deruyter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marusha')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'The Wildlife')
track.set_length(217)
track_artist = expected.create_artist()
track_artist.set_name("Mijk's Magic Marble Box")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Rock Your Body (Radio Mix)')
track.set_length(221)
track_artist = expected.create_artist()
track_artist.set_name(u'The M. Experience III')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title("It's A Dream Song")
track.set_length(228)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Hooligan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Rhapsody In E')
track.set_length(363)
track_artist = expected.create_artist()
track_artist.set_name(u'Scooter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664')
r = s.get_result()
self.assertEqual(expected, r)
def test_special_track_row_class(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'1999')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Red Wharf')
label_id.append_catalogue_nr(u'RWCD004')
expected.append_label_id(label_id)
expected.set_title(u'Pilgrim')
artist = expected.create_artist()
artist.set_name(u'Graham Bowers')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Jazz')
expected.append_style(u'Modern Classical')
expected.set_url(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1a')
track.set_title(u'Unconditional')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'1b')
track.set_title(u'Loss Of Innocence')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'1c')
track.set_title(u'Mechanistics')
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_special_arists_for_same_track(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2000')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 CD, Mixed')
label_id = expected.create_label_id()
label_id.set_label(u'Pure Silk Records')
label_id.append_catalogue_nr(u'PURESCD3')
expected.append_label_id(label_id)
expected.set_title(u'Pure Silk: The Third Dimension')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'UK Garage')
expected.set_url(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sunshine (Wookie Main Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gabrielle')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Baby Gonna Rock Dis (Original Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Chris Mack')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Nuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Girls Like Us')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'B-15 Project')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Crissy D')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lady G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Streetlife (Original 12" Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Daryl B')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mark Yardley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title("Don't Waste My Time (4 Beat Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wideboys')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Everybody Come On (Stanton Warriors Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Skribble')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Get Loose')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Double G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Right Before My Eyes (The Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'What Ya Gonna Do')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Artful Dodger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title("You Don't Know (Marvel & Eli Remix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'702')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marvel & Eli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'I Keep')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Lenny Fontana')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bump & Flex')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brasstooth')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sean Mitchell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title("Baby (You're So Sexy) (Dub)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'United Grooves Collective')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Steve Gurley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Find The Path (Sweet Release Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'New Horizons')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Give Some To Me (Masterplan)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Suiceyed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'That Sound')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Michael Moog')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sweeter Than Wine')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dionne Rakeem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Feel It')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("London's Unique 3")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Neighbourhood')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Rumpus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Madness On The Street')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Boy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Klasse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title("Jump 'n' Shout (Dem 2 Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Basement Jaxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title("Somebody Else's Guy (Stanton Warriors Vocal Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jocelyn Brown')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Let Me Know')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'K.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'For Real')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'M Dubs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'J.P.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Tingles 2000 (Zed Bias Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Valerie M')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title("Buddy X '99 (Original Dreem Teem Dub Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem Teem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Neneh Cherry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Apparently Nothing (Artful Dodger Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'The Brand New Heavies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Artful Dodger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sometimes It Snows In April (Dreem House Dub Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Amar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Ranking')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Down On Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Hold Me Tight')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Lewi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Beautiful (Dreem House Dub Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Groove Control')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Call It Fate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Dan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'A Little Bit Of Luck')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Luck & MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'I Want You')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Rosie Gaines')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Down On Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Everybody Come On (Stanton Warriors Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Skribble')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'I Keep')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brasstooth')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sean Mitchell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sunshine (Wookie Main Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gabrielle')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Neighbourhood')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Rumpus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Give Some To Me (Masterplan)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Suiceyed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title("You Don't Know (Marvel & Eli Remix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'702')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marvel & Eli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title("Somebody Else's Guy (Stanton Warriors Filthy Silk Dub)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jocelyn Brown')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Right Before My Eyes (The Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Baby Gonna Rock Dis')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Chris Mack')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Nuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Girls Like Us')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'B-15 Project')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Crissy D')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lady G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title("Jump 'n' Shout (Dem 2 Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Basement Jaxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Lenny Fontana')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bump & Flex')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'United Grooves Collective')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Steve Gurley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Call It Fate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Dan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634')
r = s.get_result()
self.assertEqual(expected, r)
def test_weird_subtracks_in_tracklist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2005')
release_event.set_country(u'Netherlands')
expected.append_release_event(release_event)
expected.set_format(u'CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Stubko Entertainment')
label_id.append_catalogue_nr(u'255034')
expected.append_label_id(label_id)
expected.set_title(u'De Beste Liedjes Van Ome Henk')
artist = expected.create_artist()
artist.set_name(u'Ome Henk')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Non-Music')
expected.append_genre(u'Pop')
expected.append_style(u'Comedy')
expected.append_style(u'Euro House')
expected.append_style(u'Parody')
expected.set_url('http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Het Is Weer Tijd Voor Ome Henk! (Radio Tune)')
track.set_length(85)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Opblaaskrokodil (Super Extra Mix)')
track.set_length(148)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Op De Camping')
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Ik Zing Dit Lied Voor Ome Henk!')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Neem Een Ander In De Maling!')
track.set_length(198)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Mambo Nr. 6')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Heftig!')
track.set_length(225)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Lekker Lekker (Ga Maar Met Me Mee)')
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Samba La Bamba!')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u"'T Leven Gaat Niet Over Rozen!")
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sieb van der Kast')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Ome Henk Viert Feest!')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sambal Bij?')
track.set_length(175)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Ik Ben Verkouwe!')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Sju Tem')
track.set_length(195)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Kim Holland')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Ploem Ploem Jenka (Hup Hop Versie Met Trea Dobbs)')
track.set_length(162)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Trea Dobbs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Aaai Oehoe Aai')
track.set_length(191)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Oranje!! (We Worden Kampioen!) (Radio Mix)')
track.set_length(223)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Olee Olee Sinterklaas Is Here To Stay! (Single Versie)')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Een Heel Gelukkig Kerstfeest')
track.set_length(214)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'20')
track.set_title(u'Opblaaskrokodil 2005 (Bonustrack)')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = discogs.ReleaseScraper.from_string('http://www.discogs.com/Various-Gothic-File-14/release/999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = discogs.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class MusicbrainzTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2004-09-23')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format('CD, Album')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 070 CD')
expected.append_label_id(label_id)
expected.set_title('Hast Du mich vermisst? Der schwarze Schmetterling, Teil I')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Intro: In meiner Vorstellung')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Schwarzer Schmetterling')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Where Do the Gods Go')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Dancing')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'K\xfcss mich')
track.set_length(311)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Teach Me War')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Imbecile Anthem')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Blinded')
track.set_length(444)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Where Do the Gods Go (re-unleashed club edit)')
track.set_length(279)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2007-05-25')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format(u'4\xd7CD, Album + Live')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 303 CD')
expected.append_label_id(label_id)
expected.set_title('Once in a Lifetime')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Chamber')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Once in a Lifetime, Part 1')
track.set_length(351)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'A Dead Man\u2019s Song')
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Versuchung')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Torn')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Demon Love')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'The Paperhearted Ghost')
track.set_length(283)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'A Tale of Real Love')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Hunger')
track.set_length(289)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'The Truth About Snow-White')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'She Wore Shadows')
track.set_length(276)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once in a Lifetime, Part 2 (reprise)')
track.set_length(164)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'K\xfcss mich')
track.set_length(384)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Silence - Release')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Solitude')
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Die Ballade von der Erweckung')
track.set_length(527)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Another Conversation')
track.set_length(201)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Ich will brennen')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Toscana')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ride On')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Hometown')
track.set_length(181)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Werben')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once in a Lifetime, Part 3 (Finale)')
track.set_length(608)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'H\xe4sslich')
track.set_length(145)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Backstage (All Areas)')
track.set_length(573)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Paracetamoltr\xe4ume')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Auszug aus \u201eTremendista\u201c')
track.set_length(1473)
track_artist = expected.create_artist()
track_artist.set_name(u'Ralph M\xfcller')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Campari O')
track.set_length(159)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sicamore Trees (ASP soundcheck out-take)')
track.set_length(94)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Demon Love')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The Truth About Snow-White')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'She Wore Shadows')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Hometown')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Silence - Release')
track.set_length(208)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'She Moved Through the Fair (ASP soundcheck out-take)')
track.set_length(120)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists_and_track_remixer(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2010')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format('CD, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label('Batbeliever Releases')
label_id.append_catalogue_nr('BAT 065')
expected.append_label_id(label_id)
expected.set_title('Gothic File 11')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Carrie Satan')
track.set_length(312)
track_artist = expected.create_artist()
track_artist.set_name('Spectra Paris')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Countdown')
track.set_length(253)
track_artist = expected.create_artist()
track_artist.set_name('Absurd Minds')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'M\xe4dchen in Uniform (Faderhead remix)')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name('Nachtmahr')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Faderhead')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Fucking Invective')
track.set_length(273)
track_artist = expected.create_artist()
track_artist.set_name('Noisuf-X')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Loyal to My Hate (Solar Fake remix)')
track.set_length(264)
track_artist = expected.create_artist()
track_artist.set_name(':wumpscut:')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Solar Fake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Melancholie (382edit)')
track.set_length(232)
track_artist = expected.create_artist()
track_artist.set_name('KiEw')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Gegen die Welt')
track.set_length(287)
track_artist = expected.create_artist()
track_artist.set_name('Mantus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title("Ready or Not (I'm Coming)")
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Oomph!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('What?')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name('Rob Zombie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Ebenbild (Die Krupps remix)')
track.set_length(343)
track_artist = expected.create_artist()
track_artist.set_name('Megaherz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Die Krupps')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Vergissmeinnicht (live)')
track.set_length(239)
track_artist = expected.create_artist()
track_artist.set_name('Eisbrecher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Industrypeople')
track.set_length(254)
track_artist = expected.create_artist()
track_artist.set_name('Zeromancer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Kick the Bass')
track.set_length(222)
track_artist = expected.create_artist()
track_artist.set_name('Julien-K')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Black Hole')
track.set_length(325)
track_artist = expected.create_artist()
track_artist.set_name('Nosferatu')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Swimming in Dirty Water')
track.set_length(264)
track_artist = expected.create_artist()
track_artist.set_name('Die Art')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Wreckhouse Stomp')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name('Mad Sin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740')
r = s.get_result()
self.assertEqual(expected, r)
def test_disc_titles(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008')
release_event.set_country('Europe')
expected.append_release_event(release_event)
expected.set_format(u'5\xd7CD, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label('Epic')
label_id.append_catalogue_nr('88697304842')
expected.append_label_id(label_id)
expected.set_title('Original Album Classics')
artist = expected.create_artist()
artist.set_name('The Isley Brothers')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title('The Brothers: Isley')
track = disc.create_track()
track.set_number('1')
track.set_title('I Turned You On')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Vacuum Cleaner')
track.set_length(176)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('I Got to Get Myself Together')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Was It Good to You?')
track.set_length(164)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Blacker the Berry (a.k.a. Black Berries)')
track.set_length(353)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('My Little Girl')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Get Down Off of the Train')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Holding On')
track.set_length(156)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Feels Like the World')
track.set_length(206)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title('Get Into Something')
track = disc.create_track()
track.set_number('1')
track.set_title('Get Into Something')
track.set_length(450)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Freedom')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Take Inventory')
track.set_length(167)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title("Keep on Doin'")
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Girls Will Be Girls')
track.set_length(171)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('I Need You So')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('If He Can You Can')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('I Got to Find Me One')
track.set_length(278)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Beautiful')
track.set_length(186)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Bless Your Heart')
track.set_length(183)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title("Givin' It Back")
track = disc.create_track()
track.set_number('1')
track.set_title('Ohio - Machine Gun')
track.set_length(554)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Fire and Rain')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Lay Lady Lay')
track.set_length(622)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Spill the Wine')
track.set_length(392)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Nothing to Do But Today')
track.set_length(219)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Cold Bologna')
track.set_length(179)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("Love the One You're With")
track.set_length(219)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title('Brother, Brother, Brother')
track = disc.create_track()
track.set_number('1')
track.set_title('Brother, Brother')
track.set_length(197)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Put A Little Love In Your Heart')
track.set_length(182)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title("Sweet Season / Keep On Walkin'")
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Work To Do')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Pop That Thang')
track.set_length(174)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Lay Away')
track.set_length(203)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("It's Too Late")
track.set_length(631)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love Put Me On The Corner')
track.set_length(390)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(5)
disc.set_title('3 + 3')
track = disc.create_track()
track.set_number('1')
track.set_title('That Lady, Parts 1 & 2')
track.set_length(335)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Don't Let Me Be Lonely Tonight")
track.set_length(239)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('If You Were There')
track.set_length(203)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('You Walk Your Way')
track.set_length(186)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Listen to the Music')
track.set_length(246)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('What It Comes Down To')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sunshine (Go Away Today)')
track.set_length(262)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Summer Breeze')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('The Highways of My Life')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('That Lady (live)')
track.set_length(222)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310')
r = s.get_result()
self.assertEqual(expected, r)
def test_special_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-11-16')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format(u'2\xd7CD, EP')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 460 CD')
expected.append_label_id(label_id)
expected.set_title('Die GeistErfahrer EP: Fremder-Zyklus, Teil 1.1')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'GeistErfahrer')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In Sack und Asche')
track.set_length(440)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\xdcberH\xe4rte')
track.set_length(376)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Carpe noctem')
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Weichen(t)stellung (GeistErfahrer Reprise)')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Danach')
track.set_length(516)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sing Child')
track.set_length(404)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Duett (Minnelied der Incubi)')
track.set_length(251)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Krabat')
track.set_length(358)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Unverwandt')
track.set_length(667)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Werben')
track.set_length(440)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_group(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'ASP & Chamber \u2013 Humility')
item.set_info(u'CD | Tracks: 8 | Date: 2006-04-21 | Country: DE | Barcode: 4260063942730')
item.set_query('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44')
item.set_url('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'ASP & Chamber \u2013 Humility: Die verschollenen Archive 2')
item.set_info(u'CD | Tracks: 7 | Date: 2012-03-09 | Country: DE | Barcode: 4260063944505')
item.set_query('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3')
item.set_url('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3')
expected.append_item(item)
s = musicbrainz.ReleaseGroupScraper.from_string('http://musicbrainz.org/release-group/9c8ec90f-dcef-3fc7-904d-27f535454e44')
r = s.get_result()
self.assertEqual(expected, r)
def test_vinyl_not_cd_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2004-02-24')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('12" Vinyl, Album')
label_id = expected.create_label_id()
label_id.set_label('Sundazed Music')
label_id.append_catalogue_nr('LP 5103')
expected.append_label_id(label_id)
expected.set_title('Four Sail')
artist = expected.create_artist()
artist.set_name('Love')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('A1')
track.set_title('August')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number('A2')
track.set_title("Your Friend and Mine - Neil's Song")
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number('A3')
track.set_title("I'm With You")
track.set_length(165)
disc.append_track(track)
track = disc.create_track()
track.set_number('A4')
track.set_title('Good Times')
track.set_length(210)
disc.append_track(track)
track = disc.create_track()
track.set_number('A5')
track.set_title('Singing Cowboy')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('B1')
track.set_title('Dream')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('B2')
track.set_title('Robert Montgomery')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number('B3')
track.set_title('Nothing')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('B4')
track.set_title('Talking in My Sleep')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('B5')
track.set_title('Always See Your Face')
track.set_length(210)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0')
r = s.get_result()
self.assertEqual(expected, r)
def test_medium_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
expected.set_format('Album')
expected.set_title('Welcome to the Dopehouse')
artist = expected.create_artist()
artist.set_name('The Dayton Family')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Intro')
track.set_length(93)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Mac 11')
track.set_length(276)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Do You Remember?')
track.set_length(200)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Welcome to Flint')
track.set_length(228)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Feds')
track.set_length(237)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Gangstarism')
track.set_length(251)
track_artist = expected.create_artist()
track_artist.set_name('Goldfish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Young Thugs')
track.set_length(241)
track_artist = expected.create_artist()
track_artist.set_name('Ghetto E')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Lori')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Drugstore')
track.set_length(207)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Set Up')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('We Keep It Ghetto')
track.set_length(200)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Dope House')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Shadows')
track.set_length(242)
track_artist = expected.create_artist()
track_artist.set_name('Kalonda & Ryan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Outlaws')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Weed Song')
track.set_length(228)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Simple Wish')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Outro')
track.set_length(96)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade')
r = s.get_result()
self.assertEqual(expected, r)
def test_digital_media_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-04-08')
release_event.set_country('Sweden')
expected.append_release_event(release_event)
expected.set_format('Digital Media, Album')
expected.set_title(u'J\xe4rnb\xe4rarland')
artist = expected.create_artist()
artist.set_name('Stiko Per Larsson')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Zombien f\xf6ds')
track.set_length(167)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'J\xe4rnb\xe4rarland')
track.set_length(219)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Veteraner')
track.set_length(197)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Resande Man')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Du h\xf6r inte hemma h\xe4r')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Irrbloss')
track.set_length(187)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'J\xe4mtlands president')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Gilla falla')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Inga v\xe4gar')
track.set_length(259)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Krus & detaljer')
track.set_length(241)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'En kosmonauts testamente')
track.set_length(375)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c')
r = s.get_result()
self.assertEqual(expected, r)
def test_other_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1999')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('Other, Album')
label_id = expected.create_label_id()
label_id.set_label('Epic')
expected.append_label_id(label_id)
expected.set_title('Retro Futuristo')
artist = expected.create_artist()
artist.set_name('Jack Herrera')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'City Lights')
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'What U Feel')
track.set_length(295)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Diamond in the Rough')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'High Off You')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Say You Gotta Man')
track.set_length(262)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Jack Shuffle (feat. Will.I.Am)')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Up Above My Head')
track.set_length(320)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Jack Herrera for President')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Free to Believe')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'For You')
track.set_length(378)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Silver & Gold (feat. Black Thought)')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Revolution (interlude)')
track.set_length(78)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Revolution')
track.set_length(315)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Be Free')
track.set_length(341)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'D\xe9ja Vu')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Jewel')
track.set_length(339)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b')
r = s.get_result()
self.assertEqual(expected, r)
def test_dvd_and_cd_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2009-01-14')
release_event.set_country('Japan')
expected.append_release_event(release_event)
expected.set_format('CD + DVD-Video, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'MusicRay\u2019n')
label_id.append_catalogue_nr('SMCL-163')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'MusicRay\u2019n')
label_id.append_catalogue_nr('SMCL-164')
expected.append_label_id(label_id)
expected.set_title('CODE GEASS COMPLETE BEST')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 1')
track.set_length(31)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'COLORS')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33')
track.set_length(262)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 2')
track.set_length(35)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd')
track.set_length(186)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9')
track.set_length(278)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'\u77b3\u30ce\u7ffc')
track.set_length(212)
track_artist = expected.create_artist()
track_artist.set_name(u'access')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 3')
track.set_length(35)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c')
track.set_length(238)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed')
track.set_length(259)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 4')
track.set_length(32)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'WORLD END')
track.set_length(229)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'\u308f\u304c\u81c8\u305f\u3057\u60aa\u306e\u83ef')
track.set_length(273)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 5')
track.set_length(46)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'COLORS type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'COLORS type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'\u77b3\u30ce\u7ffc')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'access')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'WORLD END type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'WORLD END type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type1')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type2')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca')
r = s.get_result()
self.assertEqual(expected, r)
def test_cdr_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-06-27')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('CD-R, Album')
label_id = expected.create_label_id()
label_id.set_label('[no label]')
expected.append_label_id(label_id)
expected.set_title('Thaw')
artist = expected.create_artist()
artist.set_name('Buckethead')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('[untitled]')
track.set_length(486)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('[untitled]')
track.set_length(147)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('[untitled]')
track.set_length(191)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('[untitled]')
track.set_length(166)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('[untitled]')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('[untitled]')
track.set_length(413)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('[untitled]')
track.set_length(133)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('[untitled]')
track.set_length(113)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('[untitled]')
track.set_length(91)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_release_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-03-19')
release_event.set_country('United Kingdom')
expected.append_release_event(release_event)
expected.set_format('CD, Single')
label_id = expected.create_label_id()
label_id.set_label('Interscope Records')
label_id.append_catalogue_nr('0602527974569')
expected.append_label_id(label_id)
expected.set_title("Give Me All Your Luvin'")
artist = expected.create_artist()
artist.set_name('Madonna')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Nicki Minaj')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('M.I.A.')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title("Give Me All Your Luvin'")
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Nicki Minaj')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('M.I.A.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Give Me All Your Luvin' (Party Rock remix)")
track.set_length(243)
track_artist = expected.create_artist()
track_artist.set_name('LMFAO')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Nicki Minaj')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12345-abcdefg')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = musicbrainz.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class BeatportTest(TestCase):
def test_remix_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2006-04-19')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Karatemusik')
label_id.append_catalogue_nr('KM013')
expected.append_label_id(label_id)
expected.set_title('Love Spy / Love Dies')
artist = expected.create_artist()
artist.set_name(u'Polygamy Boys')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Electro House')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/love-spy-love-dies/27944')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Love Spy / Love Dies [Error Error Remix]')
track.set_length(447)
track_artist = expected.create_artist()
track_artist.set_name('Error Error')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Love Spy / Love Dies')
track.set_length(427)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Reply 23')
track.set_length(418)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/love-spy-love-dies/27944')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-01-05')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Carlo Cavalli Music Group')
label_id.append_catalogue_nr('CMG117')
expected.append_label_id(label_id)
expected.set_title('DJ Tunes Compilation')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.append_genre('House')
expected.append_genre('Deep House')
expected.append_genre('Tech House')
expected.append_genre('Minimal')
expected.set_url('http://www.beatport.com/release/dj-tunes-compilation/851318')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Forever Loved')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name('Sam Be-Kay')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Sweep [Alex Faraci Remix]')
track.set_length(398)
track_artist = expected.create_artist()
track_artist.set_name('Eros Locatelli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Alex Faraci')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Humo Y Neon [David Ahumada Remix]')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Babette Duwez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Joel Reichert')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('David Ahumada')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Night Melody [Massimo Russo La Guitarra Remix]')
track.set_length(377)
track_artist = expected.create_artist()
track_artist.set_name('Alex Faraci')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Massimo Russo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('30 m')
track.set_length(393)
track_artist = expected.create_artist()
track_artist.set_name('Fingers Clear')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Just Begin')
track.set_length(429)
track_artist = expected.create_artist()
track_artist.set_name('Erion Gjuzi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Achakkar')
track.set_length(388)
track_artist = expected.create_artist()
track_artist.set_name('Dany Cohiba')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Raveline [Italianbeat Guys Remix]')
track.set_length(406)
track_artist = expected.create_artist()
track_artist.set_name('Massimo Russo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Italianbeat Guys')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Grey 2 Fade feat. Babette Duwez [Jurgen Cecconi Mix]')
track.set_length(653)
track_artist = expected.create_artist()
track_artist.set_name('Jurgen Cecconi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Beethoven Tbs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Tanzmania')
track.set_length(420)
track_artist = expected.create_artist()
track_artist.set_name('Carlo Cavalli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/dj-tunes-compilation/851318')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_with_empty_track_length(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008-10-13')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Bedrock Records')
label_id.append_catalogue_nr('BEDPPF10')
expected.append_label_id(label_id)
expected.set_title('Bedrock 10: Past Present Future')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.append_genre('House')
expected.append_genre('Breaks')
expected.append_genre('Techno')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/bedrock-10-past-present-future/138250')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Past Present Future (Part 1) [Continuous DJ Mix]')
track.set_length(4454)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Emerald [Seismic Dub]')
track.set_length(501)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Seizmic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Imagine [Estroe Remix]')
track.set_length(383)
track_artist = expected.create_artist()
track_artist.set_name('Fortunato & Montresor')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Estroe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Dust [Charlie Mayhem Cold Shoulder Dub]')
track.set_length(540)
track_artist = expected.create_artist()
track_artist.set_name('Pole Folder')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('CP')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Charlie May')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Chutney [Tom Mangan 2008 Remix]')
track.set_length(483)
track_artist = expected.create_artist()
track_artist.set_name('Tom Mangan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Rise In [Steve Lawler Powder Powder Remix]')
track.set_length(712)
track_artist = expected.create_artist()
track_artist.set_name('Steve Lawler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Santiago [Chab Remix]')
track.set_length(522)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Chab')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Warung Beach [Jamie Stevens Remix]')
track.set_length(516)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Jamie Stevens')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Emerald [Seismic Dub]')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Charlie May')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u"Dirtbox [Bruce Aisher's Resurrection Rewind Mix]")
track.set_length(464)
track_artist = expected.create_artist()
track_artist.set_name('Gutterstylz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Bruce Aisher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Repercussion [Danny Howells Remix]')
track.set_length(594)
track_artist = expected.create_artist()
track_artist.set_name('Science Dept.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Danny Howells')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u"Beautiful Strange [John Digweed & Nick Muir's Even Stranger Remix]")
track.set_length(439)
track_artist = expected.create_artist()
track_artist.set_name('Nick Muir')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u'Save Me [James Talk Remix]')
track.set_length(485)
track_artist = expected.create_artist()
track_artist.set_name('Guy J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('James Talk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Heaven Scent [M.O.D.E. Remix]')
track.set_length(675)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('M.O.D.E.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'Past Present Future (Part 2) [Continuous DJ Mix]')
track.set_length(4333)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Forge [Tom Middleton Remix]')
track.set_length(481)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Tom Middleton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Empathy')
track.set_length(478)
track_artist = expected.create_artist()
track_artist.set_name('Jim Rivers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title(u'Pushin Too Hard [Bruce Aisher Remix]')
track.set_length(460)
track_artist = expected.create_artist()
track_artist.set_name('Saints & Sinners')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Bruce Aisher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title(u'U Get So Give [Paolo Mojo Remix]')
track.set_length(497)
track_artist = expected.create_artist()
track_artist.set_name('Moonface')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Paolo Mojo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title(u'Chilling Moments [Kazell Influx Audio Remix]')
track.set_length(422)
track_artist = expected.create_artist()
track_artist.set_name('Shmuel Flash')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Kazell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title(u'K10 [Alan Fitzpatrick Remix]')
track.set_length(512)
track_artist = expected.create_artist()
track_artist.set_name('Misstress Barbara')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Alan Fitzpatrick')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title(u'Chutney [Size 9 Reinterpretation]')
track.set_length(506)
track_artist = expected.create_artist()
track_artist.set_name('Tom Mangan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('WiNK')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title('Stoppage Time')
track.set_length(636)
track_artist = expected.create_artist()
track_artist.set_name('Guy Gerber')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title('Six Hours Later')
track.set_length(459)
track_artist = expected.create_artist()
track_artist.set_name('Bradler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Dualton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title(u'All I Know feat. Astrid Suryanto [Jim Rivers Space Disco Remix]')
track.set_length(476)
track_artist = expected.create_artist()
track_artist.set_name('Morgan Page')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Gregory Shiff')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Jim Rivers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title('Gravitation')
track.set_length(323)
track_artist = expected.create_artist()
track_artist.set_name('Nick Muir')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title(u'Rise In [Steve Lawler Powder Powder Remix (Edit)]')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Steve Lawler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title(u"Santiago [Guy Gerber's Hotrod Dub]")
track.set_length(512)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Guy Gerber')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('29')
track.set_title('Sensei')
track.set_length(441)
track_artist = expected.create_artist()
track_artist.set_name("Funk D'Void")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Toby Izui')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('30')
track.set_title('Madhouse')
track.set_length(649)
track_artist = expected.create_artist()
track_artist.set_name('Pindrop')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('31')
track.set_title(u'Lunar [Late Night Friday Remix]')
track.set_length(349)
track_artist = expected.create_artist()
track_artist.set_name('Guy J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/bedrock-10-past-present-future/138250')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_with_various_artists_in_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-07-10')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('040 Recordings')
label_id.append_catalogue_nr('040REC012C')
expected.append_label_id(label_id)
expected.set_title('040 Recordings Ibiza 2013 Vol. 1')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Minimal')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Japanese Floor')
track.set_length(422)
track_artist = expected.create_artist()
track_artist.set_name('Cudder & Mulder')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Persian')
track.set_length(379)
track_artist = expected.create_artist()
track_artist.set_name('Carlo Ruetz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Me And My Violin')
track.set_length(483)
track_artist = expected.create_artist()
track_artist.set_name('Andree Wischnewski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Aurra Che')
track.set_length(432)
track_artist = expected.create_artist()
track_artist.set_name('Dompe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Cuture Club')
track.set_length(432)
track_artist = expected.create_artist()
track_artist.set_name('Debirski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Yaman!')
track.set_length(435)
track_artist = expected.create_artist()
track_artist.set_name('Robbe Rabone')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Flint Westwood')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Jazzy Groovie [Thomas Lizzara Remix]')
track.set_length(409)
track_artist = expected.create_artist()
track_artist.set_name('Kanzler & Wischnewski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Thomas Lizzara')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Alright')
track.set_length(416)
track_artist = expected.create_artist()
track_artist.set_name('Dompe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'The Sun Ft. Jan Hilde [Heinrich & Heine Remix]')
track.set_length(372)
track_artist = expected.create_artist()
track_artist.set_name('NECRO & Reichmann')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Libre')
track.set_length(376)
track_artist = expected.create_artist()
track_artist.set_name('Neal Porter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Closer')
track.set_length(430)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Society Today')
track.set_length(343)
track_artist = expected.create_artist()
track_artist.set_name('Maurice Deek')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Music For The People')
track.set_length(428)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Dont Touch My Phone')
track.set_length(472)
track_artist = expected.create_artist()
track_artist.set_name('Eric Kanzler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Glare')
track.set_length(399)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'040 Recordings Ibiza 2013 Vol. 1 [Heinrich & Heine In The Mix]')
track.set_length(4440)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/blubb/123')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = beatport.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class MetalarchivesTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('June 24th, 2002')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Full-length, CD')
label_id = expected.create_label_id()
label_id.set_label('Spinefarm Records')
label_id.append_catalogue_nr('spi149CD / 018459-2')
expected.append_label_id(label_id)
expected.set_title('Century Child')
artist = expected.create_artist()
artist.set_name('Nightwish')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Bless the Child')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('End of All Hope')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Dead to the World')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Ever Dream')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Slaying the Dreamer')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Forever Yours')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Ocean Soul')
track.set_length(255)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Feel for You')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('The Phantom of the Opera')
track.set_length(250)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Beauty of the Beast')
track.set_length(622)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('September 22nd, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(u'Live album, 2CD')
label_id = expected.create_label_id()
label_id.set_label('GUN Records')
label_id.append_catalogue_nr('88697 342672')
expected.append_label_id(label_id)
expected.set_title('Black Symphony')
artist = expected.create_artist()
artist.set_name('Within Temptation')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Ouverture')
track.set_length(463)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Jillian (I'd Give My Heart)")
track.set_length(279)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('The Howling')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Stand My Ground')
track.set_length(273)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Cross')
track.set_length(322)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('What Have You Done?')
track.set_length(298)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hand of Sorrow')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('The Heart of Everything')
track.set_length(348)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Forgiven')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Somewhere')
track.set_length(264)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('The Swan Song')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Memories')
track.set_length(243)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Our Solemn Hour')
track.set_length(322)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('The Other Half (of Me)')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Frozen')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('The Promise')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Angels')
track.set_length(495)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Mother Earth')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('The Truth Beneath the Rose')
track.set_length(443)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Deceiver of Fools')
track.set_length(458)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('All I Need')
track.set_length(295)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Ice Queen')
track.set_length(435)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_release_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('April 14th, 2007')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Split, 7" vinyl (45 RPM)')
label_id = expected.create_label_id()
label_id.set_label('New Iron Age Records')
label_id.append_catalogue_nr('NIA 002')
expected.append_label_id(label_id)
expected.set_title('Clash of Iron Vol. I - Live at Keep It True')
artist = expected.create_artist()
artist.set_name('Manilla Road')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Brocas Helm')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Manilla Road - Death by the Hammer')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Brocas Helm - Ravenwreck')
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = metalarchives.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class JunodownloadTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('3 July, 2011')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('3 Beat')
label_id.append_catalogue_nr('3BEAT 051')
expected.append_label_id(label_id)
expected.set_title('Love')
artist = expected.create_artist()
artist.set_name('Inna')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Funky')
expected.append_genre('Club House')
expected.set_url('http://www.junodownload.com/products/love/1774811-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Love (UK radio edit)')
track.set_length(151)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Love (club mix)')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love (eSquire radio edit)')
track.set_length(233)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Love (eSquire mix)')
track.set_length(357)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Love (7th Heaven radio edit)')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Love (7th Heaven mix)')
track.set_length(394)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Love (Dandeej mix)')
track.set_length(315)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love (DJ Andi mix)')
track.set_length(341)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love (Klubfiller mix)')
track.set_length(395)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Love (Klubfiller dub mix)')
track.set_length(389)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1774811-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_main_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('12 November, 2010')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Staff Productions')
label_id.append_catalogue_nr('SFP 012')
expected.append_label_id(label_id)
expected.set_title('Love')
artist = expected.create_artist()
artist.set_name('Mustafa')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Tasita D mour')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.append_genre('Broken Beat')
expected.append_genre('Nu Jazz')
expected.set_url('http://www.junodownload.com/products/love/1662955-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Love (original Miami mix)')
track.set_length(301)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Love (Mustafa's Deep Piano mix)")
track.set_length(308)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love (D-Malice Afro-edit vocal)')
track.set_length(381)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Love (RY meets Mustafa vocal mix)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Love (Ospina & Oscar P remix)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Love (Ospina & Oscar P Drum dub)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Love (Steven Stone remix)')
track.set_length(389)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love (David Mateo & Rafix club mix)')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love (Rafael Yapudjian Meets RyB remix)')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Love (acoustic mix)')
track.set_length(232)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Love (D-Malice Afro edit instrumental)')
track.set_length(381)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Love (Ospina & Oscar P intru-mental)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Love (Steven Stone instrumental remix)')
track.set_length(388)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Love (David Mateo & Rafix radio club mix instrumental)')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Love (Rafael Yapudjian Meets RyB dub remix)')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Love (RY Meets Mustafa instrumental mix)')
track.set_length(365)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1662955-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_mixed_various_main_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('25 July, 2011')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Vacation Australia')
label_id.append_catalogue_nr('VRCD 003')
expected.append_label_id(label_id)
expected.set_title('Bass Kleph Presents')
artist = expected.create_artist()
artist.set_name('Bass Kleph')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Funky')
expected.append_genre('Club House')
expected.set_url('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Bass Kleph & Filthy Rich - These Mornings')
track.set_length(368)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bass Kleph & Alex Kenji - Melocoton')
track.set_length(370)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Bass Kleph - Hey Ya')
track.set_length(380)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Bass Kleph & Chris Arnott & BKCA - We Feel Love')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Bass Kleph - Oh Yeah')
track.set_length(403)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Stella MC & Bass Kleph - $pend My Money (original club mix)')
track.set_length(490)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("Bass Kleph - I'll Be OK")
track.set_length(434)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Danny T & Oh Snap!! - Whine Ya Waistline (Bass Kleph remix)')
track.set_length(404)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Joan Reyes - Shakedown (Bass Kleph remix)')
track.set_length(438)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Bass Kleph & Prok & Fitch - Disco Ate My Baby')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Moguai & Westbam - Original Hardcore (Bass Kleph remix)')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Jesse Vorn - Somewhere (Bass Kleph remix)')
track.set_length(376)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Bass Kleph & Chris Arnott & Kid Massive - All Right (Bass Kleph remix)')
track.set_length(456)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Bass Kleph & Joan Reyes - Salida (original club mix)')
track.set_length(427)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Bass Kleph & D Ramirez - Pulse')
track.set_length(396)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Various - Bass Kleph Presents (continuous DJ mix by Bass Kleph)')
track.set_length(4439)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('30 December, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('NuZone Tools')
label_id.append_catalogue_nr('NZT 015')
expected.append_label_id(label_id)
expected.set_title('2008 MOST USEFUL TOOLS')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.set_url('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Sygma - Nightlights')
track.set_length(522)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Adolfo Morrone - I'm Nervhouse")
track.set_length(455)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Jonathan Carey - The Science Of Music')
track.set_length(354)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Lorenzo Venturini - New Era')
track.set_length(415)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('E-Mark - Anthem For Deejays Part 2')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Alex Spadoni - Sunset')
track.set_length(451)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Jordan Baxxter feat Aedo - What It Feels Like For A Girl?')
track.set_length(470)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Hildebrand - Raindrops')
track.set_length(519)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Dario Maffia - Phaelon')
track.set_length(545)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Emerald Coast - Exhausted')
track.set_length(338)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Sygma - Children')
track.set_length(539)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('GoldSaint - Tonight')
track.set_length(405)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Peter Santos - Back To You')
track.set_length(454)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Oscar Burnside - Dark Side')
track.set_length(334)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('GoldSaint - Recharge')
track.set_length(510)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Luca Lux - Wildest Dream')
track.set_length(428)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('SimoX DJ - Star')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title('Greek S - The Sound (09 mix)')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title('Various - Mixed Tools 2008 (Part 1 - mixed by Sygma)')
track.set_length(2494)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title('Various - Mixed Tools 2008 (Part 2 - mixed by Peter Santos)')
track.set_length(2334)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/99999999/')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = junodownload.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class ITunesTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1985')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Love (Remastered)')
artist = expected.create_artist()
artist.set_name('The Cult')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Adult Alternative')
expected.append_genre('Hard Rock')
expected.append_genre('Alternative')
expected.append_genre('Goth Rock')
expected.append_genre('College Rock')
expected.set_url('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Nirvana')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Neon Glitter')
track.set_length(291)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Brother Wolf, Sister Moon')
track.set_length(407)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Rain')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Phoenix')
track.set_length(306)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hollow Man')
track.set_length(285)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Revolution')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('She Sells Sanctuary')
track.set_length(263)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Black Angel')
track.set_length(322)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Aug 15, 2007')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Dark Passion Play (Double Disc Version)')
artist = expected.create_artist()
artist.set_name('Nightwish')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Metal')
expected.append_genre('Alternative')
expected.append_genre('Goth Rock')
expected.append_genre('Death Metal/Black Metal')
expected.set_url('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('The Poet and the Pendulum')
track.set_length(834)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bye Bye Beautiful')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Amaranth')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Cadence of Her Last Breath')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Master Passion Greed')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Eva')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sahara')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Whoever Brings the Night')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('For the Heart I Once Had')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('The Islander')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Last of the Wilds')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('7 Days to the Wolves')
track.set_length(423)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Meadows of Heaven')
track.set_length(430)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Reach (Amaranth Demo Version) [Bonus Track]')
track.set_length(232)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('The Poet and the Pendulum (Instrumental)')
track.set_length(834)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bye Bye Beautiful (Instrumental)')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Amaranth (Instrumental)')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Cadence of Her Last Breath (Instrumental)')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Master Passion Greed (Instrumental)')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Eva (Instrumental)')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sahara (Instrumental)')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Whoever Brings the Night (Instrumental)')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('For the Heart I Once Had (Instrumental)')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('The Islander (Instrumental)')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Last of the Wilds (Instrumental)')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('7 Days to the Wolves (Instrumental)')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Meadows of Heaven (Instrumental)')
track.set_length(429)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Oct 28, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Twilight (Original Motion Picture Soundtrack)')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Supermassive Black Hole')
track.set_length(209)
track_artist = expected.create_artist()
track_artist.set_name('Muse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Decode')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name('Paramore')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Full Moon')
track.set_length(230)
track_artist = expected.create_artist()
track_artist.set_name('The Black Ghosts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Leave Out All the Rest')
track.set_length(199)
track_artist = expected.create_artist()
track_artist.set_name('LINKIN PARK')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Spotlight (Twilight Mix)')
track.set_length(200)
track_artist = expected.create_artist()
track_artist.set_name('MuteMath')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Go All the Way (Into the Twilight)')
track.set_length(207)
track_artist = expected.create_artist()
track_artist.set_name('Perry Farrell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Tremble for My Beloved')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name('Collective Soul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('I Caught Myself')
track.set_length(235)
track_artist = expected.create_artist()
track_artist.set_name('Paramore')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Eyes On Fire')
track.set_length(301)
track_artist = expected.create_artist()
track_artist.set_name('Blue Foundation')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Never Think')
track.set_length(269)
track_artist = expected.create_artist()
track_artist.set_name('Rob Pattinson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Flightless Bird, American Mouth')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name('Iron & Wine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title("Bella's Lullaby")
track.set_length(138)
track_artist = expected.create_artist()
track_artist.set_name('Carter Burwell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Let Me Sign (Bonus Track)')
track.set_length(138)
track_artist = expected.create_artist()
track_artist.set_name('Rob Pattinson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('La Traviata (Bonus Track)')
track.set_length(185)
track_artist = expected.create_artist()
track_artist.set_name('Royal Philharmonic Orchestra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Clair de Lune (Bonus Track)')
track.set_length(358)
track_artist = expected.create_artist()
track_artist.set_name('The APM Orchestra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_non_us_store(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1985')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Love (Remastered)')
artist = expected.create_artist()
artist.set_name('The Cult')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Musique')
expected.append_genre('Alternative adulte')
expected.append_genre('Hard rock')
expected.append_genre('Alternative')
expected.append_genre('Rock gothique')
expected.append_genre('College rock')
expected.set_url('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Nirvana')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Neon Glitter')
track.set_length(291)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Brother Wolf, Sister Moon')
track.set_length(407)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Rain')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Phoenix')
track.set_length(306)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hollow Man')
track.set_length(285)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Revolution')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('She Sells Sanctuary')
track.set_length(263)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Black Angel')
track.set_length(322)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_tracknum_in_name_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Jun 01, 2005')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Chopin: Piano Works')
artist = expected.create_artist()
artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Classical')
expected.set_url('https://itunes.apple.com/us/album/chopin-piano-works/id77261376')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('12 Etudes, Op. 10: No. 1. in C')
track.set_length(136)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('12 Etudes, Op.10: No. 2. in A Minor "chromatique"')
track.set_length(84)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('12 Etudes, Op.10: No. 3. in E "Tristesse"')
track.set_length(243)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('12 Etudes, Op.10: No. 4. in C-Sharp Minor')
track.set_length(128)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('12 Etudes, Op.10: No. 5. in G-Flat "Black Keys"')
track.set_length(102)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('12 Etudes, Op.10: No. 6. in E-Flat Minor')
track.set_length(195)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('12 Etudes, Op.10: No. 7. in C')
track.set_length(92)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('12 Etudes, Op.10: No. 8. in F')
track.set_length(163)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('12 Etudes, Op.10: No. 9. in F Minor')
track.set_length(137)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('12 Etudes, Op.10: No. 10. in A-Flat')
track.set_length(140)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('12 Etudes, Op.10: No. 11. in E-Flat')
track.set_length(135)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('12 Etudes, Op.10: No. 12. in C Minor "Revolutionary"')
track.set_length(173)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('12 Etudes, Op. 25: No. 1 in A-Flat - "Harp Study"')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('12 Etudes, Op.25: No. 2 in F Minor')
track.set_length(92)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('12 Etudes, Op.25: No. 3 in F Major')
track.set_length(103)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('12 Etudes, Op.25: No. 4 in A Minor')
track.set_length(88)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('12 Etudes, Op.25: No. 5 in E Minor')
track.set_length(198)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title('12 Etudes, Op.25: No. 6 in G-Sharp Minor')
track.set_length(117)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title('12 Etudes, Op.25: No. 7 in C-Sharp Minor')
track.set_length(312)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title('12 Etudes, Op.25: No. 8 in D-Flat')
track.set_length(66)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title('12 Etudes, Op.25: No. 9 in G-Flat, "Butterfly Wings"')
track.set_length(62)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title('12 Etudes, Op.25: No. 10 in B Minor')
track.set_length(244)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title('12 Etudes, Op.25: No. 11 in A Minor "Winter Wind"')
track.set_length(215)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title('12 Etudes, Op. 25: No. 12 in C Minor')
track.set_length(168)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title('Impromptu No. 1 in A-Flat, Op.29')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title('Impromptu No. 2 in F-Sharp, Op.36')
track.set_length(351)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title('Impromptu No. 3 in G-Flat, Op.51')
track.set_length(284)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title('Impromptu No. 4 in C-Sharp Minor, Op. 66 "Fantaisie-Impromptu"')
track.set_length(291)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: I. Grave - Doppio Movimento')
track.set_length(331)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: II. Scherzo - Pi\xf9 Lento - Tempo I')
track.set_length(397)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: III. Marche Fun\xe8bre (Lento)')
track.set_length(503)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: IV. Finale (Presto)')
track.set_length(97)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: I. Allegro Maestoso')
track.set_length(533)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: II. Scherzo (Molto Vivace)')
track.set_length(170)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: III. Largo')
track.set_length(561)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: IV. Finale (Presto Non Tanto)')
track.set_length(309)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Mazurka No. 54 in D: Allegro Non Troppo')
track.set_length(71)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Mazurka No. 46 in C Op.67 No.3: Allegretto')
track.set_length(88)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Mazurka No. 49 in A Minor Op. 68, No. 2: Lento')
track.set_length(155)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Mazurka No. 5 in B-Flat Op. 7, No. 1: Vivace')
track.set_length(140)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Introduction and Variations On a German National Air Op.posth. (KK 925-927)')
track.set_length(387)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Mazurka No. 58 in A-Flat: Poco Mosso')
track.set_length(77)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Berceuse in D-Flat, Op. 57: Andante')
track.set_length(316)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Polonaise No. 6 in A-Flat, Op. 53 -"Heroic": Maestoso')
track.set_length(413)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: I. Allegro Maestoso')
track.set_length(1215)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: II. Romance (Larghetto)')
track.set_length(636)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: III. Rondo (Vivace)')
track.set_length(619)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: I. Maestoso')
track.set_length(901)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: II. Larghetto')
track.set_length(583)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: III. Allegro Vivace')
track.set_length(524)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/chopin-piano-works/id77261376')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/blubb/id999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_non_us_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/blubb/id999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = itunes.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class BandcampTest(TestCase):
def test_album_with_band_name(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Love Sex Machine')
artist = expected.create_artist()
artist.set_name(u'Love Sex Machine')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.throatruinerrecords.com/album/love-sex-machine')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Anal On Deceased Virgin')
track.set_length(335)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Deafening Peepshow')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Fucking Battle')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Antagonism Can STFU')
track.set_length(179)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Plenty Of Feelings')
track.set_length(147)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Vagina Curse')
track.set_length(320)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Killed With A Monster Cock')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Warstrike Takes The Piss')
track.set_length(275)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.throatruinerrecords.com/album/love-sex-machine')
r = s.get_result()
self.assertEqual(expected, r)
# there was a test with this name, but I don't know what it did...
# def test_album_without_band_name(self):
# pass
def test_album_with_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2013')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Indietracks Compilation 2013')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://indietracks.bandcamp.com/album/indietracks-compilation-2013')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'If You Still Want Him')
track.set_length(250)
track_artist = expected.create_artist()
track_artist.set_name(u'The French Defence')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Is Anybody Out There?')
track.set_length(246)
track_artist = expected.create_artist()
track_artist.set_name(u'The Ballet')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Rulers And The States')
track.set_length(165)
track_artist = expected.create_artist()
track_artist.set_name(u'bis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Temporary Tattoo')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Lardpony')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Always Want Us To')
track.set_length(192)
track_artist = expected.create_artist()
track_artist.set_name(u'EXPENSIVE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Stockport')
track.set_length(328)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Secret')
track.set_length(132)
track_artist = expected.create_artist()
track_artist.set_name(u'Frozy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Jackie')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'The Understudies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Ticket Machine')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Making Marks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Echoing Days')
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Monnone Alone')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Swanwick Junction')
track.set_length(172)
track_artist = expected.create_artist()
track_artist.set_name(u'Northern Spies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u'Terrible Things')
track.set_length(141)
track_artist = expected.create_artist()
track_artist.set_name(u'Owl & Mouse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u"She'll Come Back for Indian Summer")
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Alpaca Sports')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Glockelbar')
track.set_length(137)
track_artist = expected.create_artist()
track_artist.set_name(u'Haiku Salut')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'Astronaut')
track.set_length(190)
track_artist = expected.create_artist()
track_artist.set_name(u'Woog Riots')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Tut Tut Tut')
track.set_length(150)
track_artist = expected.create_artist()
track_artist.set_name(u'The Tuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title(u'Mosaic')
track.set_length(161)
track_artist = expected.create_artist()
track_artist.set_name(u'Fear Of Men')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title(u'Only You')
track.set_length(194)
track_artist = expected.create_artist()
track_artist.set_name(u'Big Wave')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title(u'The Things That You Said')
track.set_length(200)
track_artist = expected.create_artist()
track_artist.set_name(u'The Fireworks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title(u'Glue')
track.set_length(276)
track_artist = expected.create_artist()
track_artist.set_name(u'Fever Dream')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title(u'Slackjawed')
track.set_length(175)
track_artist = expected.create_artist()
track_artist.set_name(u'Tunabunny')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title(u'Lie')
track.set_length(224)
track_artist = expected.create_artist()
track_artist.set_name(u'Cars Can Be Blue')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title(u'Br\xe4nn\xf6')
track.set_length(223)
track_artist = expected.create_artist()
track_artist.set_name(u'Finnmark!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title(u'Sorry')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name(u'The Art Club')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title(u'Food')
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'The Lovely Eggs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title(u'Clean Up Yr Own Shit, Pal')
track.set_length(132)
track_artist = expected.create_artist()
track_artist.set_name(u'Good Grief')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title(u'Sycamore')
track.set_length(162)
track_artist = expected.create_artist()
track_artist.set_name(u'Martha')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title(u'Disappear')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Bloomer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('29')
track.set_title(u'You Held My Hand')
track.set_length(158)
track_artist = expected.create_artist()
track_artist.set_name(u'Flowers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('30')
track.set_title(u'J.K.')
track.set_length(139)
track_artist = expected.create_artist()
track_artist.set_name(u'David Leach')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('31')
track.set_title(u'Always Thought')
track.set_length(294)
track_artist = expected.create_artist()
track_artist.set_name(u'Jupiter In Jars')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('32')
track.set_title(u'My Old Friend')
track.set_length(164)
track_artist = expected.create_artist()
track_artist.set_name(u"Enderby's Room")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('33')
track.set_title(u'I Got The Answer')
track.set_length(172)
track_artist = expected.create_artist()
track_artist.set_name(u'The Magic Theatre')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('34')
track.set_title(u'I Love You')
track.set_length(178)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wave Pictures')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('35')
track.set_title(u'Pilot Light')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Pete Green')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('36')
track.set_title(u"Let's Go Surfing")
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'Helen Love')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('37')
track.set_title(u'Summer, You And Me')
track.set_length(180)
track_artist = expected.create_artist()
track_artist.set_name(u'When Nalda Became Punk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('38')
track.set_title(u'Secret Wish')
track.set_length(89)
track_artist = expected.create_artist()
track_artist.set_name(u'The McTells')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('39')
track.set_title(u'Better Than Love')
track.set_length(163)
track_artist = expected.create_artist()
track_artist.set_name(u'Pale Spectres')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('40')
track.set_title(u'Without You')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Milky Wimpshake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('41')
track.set_title(u"Let's Stay Undecided")
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'The Soulboy Collective mit Antenne Danger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('42')
track.set_title(u'Age Of Victoria')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'The Secret History')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('43')
track.set_title(u'Eating Me, Eating You')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name(u'The Beautiful Word')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('44')
track.set_title(u'Scared And Worried')
track.set_length(142)
track_artist = expected.create_artist()
track_artist.set_name(u'Without Feathers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('45')
track.set_title(u'Save Me')
track.set_length(155)
track_artist = expected.create_artist()
track_artist.set_name(u'The Choo Choo Trains')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('46')
track.set_title(u'Evil/Shy (Acoustic Version)')
track.set_length(187)
track_artist = expected.create_artist()
track_artist.set_name(u'The Mini Skips')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('47')
track.set_title(u'Slow Trains')
track.set_length(201)
track_artist = expected.create_artist()
track_artist.set_name(u'anaesthetics')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://indietracks.bandcamp.com/album/indietracks-compilation-2013')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Free WEB release')
expected.set_title(u'Love Everyday EP')
artist = expected.create_artist()
artist.set_name(u'Dumbfoundead')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.dumbfoundead.com/album/love-everyday-ep')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'For You')
track.set_length(91)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Love Everyday')
track.set_length(211)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Stole the Show')
track.set_length(177)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Love is a Song')
track.set_length(292)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Body High ft. Breezy Lovejoy & Jose Rios')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Not Right Now ft. Wax')
track.set_length(173)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Breezy Lovejoy - Paradise')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name(u'Breezy Lovejoy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.dumbfoundead.com/album/love-everyday-ep')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_utf8_characters(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Illusions')
artist = expected.create_artist()
artist.set_name(u'Approaching Nirvana')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.approachingnirvana.com/album/illusions')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Sugar High')
track.set_length(162)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Illusion (feat. Brenton Mattheus)')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Beer Remastered')
track.set_length(281)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Snowfall')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Love Theory')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Canc\xfan')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'South Side')
track.set_length(268)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Illusion (Instrumental)')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Love Theory (Instrumental)')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Illusion (Extended Mix) [feat. Brenton Mattheus]')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Beer Remastered (Extended Mix)')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u'Snowfall (Extended Mix)')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u'Love Theory (Extended Mix)')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Canc\xfan (Extended Mix)')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'South Side (Extended Mix)')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Illusions Continuous Mix')
track.set_length(2018)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title(u'Illusions Continuous Instrumental Mix')
track.set_length(2018)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.approachingnirvana.com/album/illusions')
r = s.get_result()
self.assertEqual(expected, r)
def test_band_discography(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer [Alternate Tracks]')
item.set_info('Release date: 2008-12-24')
item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'8in8 \u2013 Nighty Night')
item.set_info('Release date: 2011-04-26')
item.set_query(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer & The Grand Theft Orchestra \u2013 Theatre Is Evil')
item.set_info('Release date: 2012-09-11')
item.set_query(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 The Art of Asking Playlist')
item.set_info('Release date: 2014-11-11')
item.set_query(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Performs The Popular Hits Of Radiohead On Her Magical Ukulele')
item.set_info('Release date: 2010-07-20')
item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Tristan Allen \u2013 Tristan Allen')
item.set_info('Release date: 2010-12-12')
item.set_query(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u"Amanda Palmer & Friends \u2013 AFP's Webcastacular NYC Extravaganzaca!")
item.set_info('Release date: 2010-09-23')
item.set_query(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer')
item.set_info('Release date: 2008-09-16')
item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer & Murder By Death \u2013 7 Series (Part 3)')
item.set_info('Release date: 2009-05-15')
item.set_query(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Goes Down Under')
item.set_info('Release date: 2011-01-21')
item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer, The Young Punx, and Peaches \u2013 Map of Tasmania: The Remix Project')
item.set_info('Release date: 2011-04-10')
item.set_query(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Neil Gaiman and Amanda Palmer \u2013 An Evening With Neil Gaiman and Amanda Palmer')
item.set_info('Release date: 2013-11-19')
item.set_query(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459')
expected.append_item(item)
s = bandcamp.DiscographyScraper.from_string('http://amandapalmer.bandcamp.com')
r = s.get_result()
self.assertEqual(expected, r)
def test_band_discogrpahy_non_bandcamp_url(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 All Delighted People EP')
item.set_info('Release date: 2010-08-20')
item.set_query(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459')
item.set_url(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Illinois')
item.set_info('Release date: 2005-07-05')
item.set_query(u'http://music.sufjan.com/album/illinois?pk=459')
item.set_url(u'http://music.sufjan.com/album/illinois?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Songs for Christmas')
item.set_info('Release date: 2006-11-21')
item.set_query(u'http://music.sufjan.com/album/songs-for-christmas?pk=459')
item.set_url(u'http://music.sufjan.com/album/songs-for-christmas?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 A Sun Came')
item.set_info('Release date: 2004-07-20')
item.set_query(u'http://music.sufjan.com/album/a-sun-came?pk=459')
item.set_url(u'http://music.sufjan.com/album/a-sun-came?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The Avalanche')
item.set_info('Release date: 2006-07-11')
item.set_query(u'http://music.sufjan.com/album/the-avalanche?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-avalanche?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The BQE')
item.set_info('Release date: 2009-10-20')
item.set_query(u'http://music.sufjan.com/album/the-bqe?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-bqe?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Silver & Gold')
item.set_info('Release date: 2012-11-13')
item.set_query(u'http://music.sufjan.com/album/silver-gold?pk=459')
item.set_url(u'http://music.sufjan.com/album/silver-gold?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Michigan')
item.set_info('Release date: 2003-07-01')
item.set_query(u'http://music.sufjan.com/album/michigan?pk=459')
item.set_url(u'http://music.sufjan.com/album/michigan?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Carrie & Lowell')
item.set_info('Release date: 2015-03-31')
item.set_query(u'http://music.sufjan.com/album/carrie-lowell?pk=459')
item.set_url(u'http://music.sufjan.com/album/carrie-lowell?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Enjoy Your Rabbit')
item.set_info('Release date: 2002-04-16')
item.set_query(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459')
item.set_url(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The Age of Adz')
item.set_info('Release date: 2010-10-12')
item.set_query(u'http://music.sufjan.com/album/the-age-of-adz?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-age-of-adz?pk=459')
expected.append_item(item)
s = bandcamp.DiscographyScraper('http://music.sufjan.com')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = bandcamp.ReleaseScraper.from_string('http://blubb.bla.com/album/blubb')
r = s.get_result()
self.assertEqual(expected, r)
@unittest.skip("skipping Musik-Sammler tests until scraper is fixed")
class MusikSammlerTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1994')
release_event.set_country('Niederlande')
expected.append_release_event(release_event)
expected.set_format(u'CD, Re-Release, Remastered')
label_id = expected.create_label_id()
label_id.set_label('EMI Records Ltd.')
label_id.append_catalogue_nr('7243 8 29752 2 9')
expected.append_label_id(label_id)
expected.set_title('Dark Side Of The Moon')
artist = expected.create_artist()
artist.set_name('Pink Floyd')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Progressive Rock')
expected.append_genre('Psychedelic Rock')
expected.set_url('http://www.musik-sammler.de/media/830798/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('(a) Speak To Me (b) Breathe')
track.set_length(237)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('On The Run')
track.set_length(215)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Time')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('The Great Gig In The Sky')
track.set_length(287)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Money')
track.set_length(382)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Us And Them')
track.set_length(470)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Any Colour You Like')
track.set_length(205)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Brain Damage')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Eclipse')
track.set_length(121)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/830798/')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_discs(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2011')
release_event.set_country('Japan')
expected.append_release_event(release_event)
expected.set_format(u'2-CD, Pappschuber, Re-Release, Remastered, Digisleeve')
label_id = expected.create_label_id()
label_id.set_label('EMI Japan')
label_id.append_catalogue_nr('TOCP 71163 64')
expected.append_label_id(label_id)
expected.set_title('The Dark Side Of The Moon')
artist = expected.create_artist()
artist.set_name('Pink Floyd')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Psychedelic Rock')
expected.set_url('http://www.musik-sammler.de/media/883773')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Speak To Me')
track.set_length(67)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Breathe (In The Air)')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('On The Run')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Time')
track.set_length(413)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Great Gig In The Sky')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Money')
track.set_length(383)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Us And Them')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Any Colour You Like')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Brain Damage')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Eclipse')
track.set_length(132)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Speak To Me')
track.set_length(165)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Breathe (In The Air)')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('On The Run')
track.set_length(308)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Time')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Great Gig In The Sky')
track.set_length(410)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Money')
track.set_length(521)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Us And Them')
track.set_length(489)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Any Colour You Like')
track.set_length(490)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Brain Damage')
track.set_length(223)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Eclipse')
track.set_length(139)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/883773')
r = s.get_result()
self.assertEqual(expected, r)
def test_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2002')
release_event.set_country(u'\xd6sterreich')
expected.append_release_event(release_event)
expected.set_format(u'Split-CD, Cardsleeve')
label_id = expected.create_label_id()
label_id.set_label('Din Records')
label_id.append_catalogue_nr('din cds 2 / EFA 51665-2')
expected.append_label_id(label_id)
expected.set_title('Icol Diston')
artist = expected.create_artist()
artist.set_name('Arovane')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Dynamo')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Techno')
expected.append_genre('Electronic')
expected.append_genre('Ambient')
expected.append_genre('Electro')
expected.append_genre('Freeform')
expected.set_url('http://www.musik-sammler.de/media/512755')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('I.O.')
track.set_length(374)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Parf')
track.set_length(374)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Torn')
track.set_length(417)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Andar')
track.set_length(464)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Icol Diston')
track.set_length(19)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Yua:E')
track.set_length(491)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Icol Vern')
track.set_length(303)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Nacrath')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Acval')
track.set_length(306)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Au\xdfen Vor Amx')
track.set_length(560)
track_artist = expected.create_artist()
track_artist.set_name('Dynamo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('No. 8 Amx')
track.set_length(825)
track_artist = expected.create_artist()
track_artist.set_name(None)
track_artist.set_various(True)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/512755')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(None)
release_event.set_country('Deutschland')
expected.append_release_event(release_event)
expected.set_format(u'2-CD, Erstauflage')
label_id = expected.create_label_id()
label_id.set_label('BCM Records GmbH')
label_id.append_catalogue_nr('55359')
expected.append_label_id(label_id)
expected.set_title('Grooves Loops & Patterns Vol.1 + Vol.2')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Techno')
expected.append_genre('Electronic')
expected.append_genre('Breakbeat')
expected.append_genre('Electro')
expected.set_url('http://www.musik-sammler.de/media/313881')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Latin Disco [130 Bpm]')
track.set_length(174)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Straight Disco [131 Bpm]')
track.set_length(187)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Medium Disco [116 Bpm]')
track.set_length(195)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Slow Disco [87 Bpm]')
track.set_length(215)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('UK Happy Disco I [118 Bpm]')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('UK Happy Disco II [116 Bpm]')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('UK Happy Disco III [121 Bpm]')
track.set_length(250)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Sexy Disco [107 Bpm]')
track.set_length(288)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Ethno Disco [98 Bpm]')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Us Disco [120 Bpm]')
track.set_length(160)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Cuba Disco [122 Bpm]')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Dance Floor Disco I [125 Bpm]')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Dance Floor Disco II [122,5 Bpm]')
track.set_length(240)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Straight Rock [120 Bpm]')
track.set_length(175)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Medium Rock [132 Bpm]')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Fast Rock [160 Bpm]')
track.set_length(162)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Rock Ballad [71 Bpm]')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Medium Rock Balad [106 Bpm]')
track.set_length(195)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Funk Rock [108 Bpm]')
track.set_length(191)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Latin Rock [122 Bpm]')
track.set_length(175)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Hard Rock Shuffle [132 Bpm]')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Medium Rock Shuffle [99 Bpm]')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Rhythm & Blues [118 Bpm]')
track.set_length(159)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('5/4 Freak Rock [165 Bpm]')
track.set_length(140)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Rockabilly [123 Bpm]')
track.set_length(154)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Country Rock [92 Bpm]')
track.set_length(204)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/313881')
r = s.get_result()
self.assertEqual(expected, r)
def test_va_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1986')
release_event.set_country('USA')
expected.append_release_event(release_event)
expected.set_format(u'LP')
label_id = expected.create_label_id()
label_id.set_label('Capitol Records, Inc.')
label_id.append_catalogue_nr('SV-12499')
expected.append_label_id(label_id)
expected.set_title('Iron Eagle - Original Motion Picture Soundtrack')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('http://www.musik-sammler.de/media/43567')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('One Vision')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name('Queen')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Iron Eagle (Never Say Die)')
track.set_length(208)
track_artist = expected.create_artist()
track_artist.set_name('King Kobra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('These Are The Good Times')
track.set_length(225)
track_artist = expected.create_artist()
track_artist.set_name('Eric Martin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Maniac House')
track.set_length(294)
track_artist = expected.create_artist()
track_artist.set_name('Katrina & The Waves')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Intense')
track.set_length(270)
track_artist = expected.create_artist()
track_artist.set_name('George Clinton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Hide The Rainbow')
track.set_length(235)
track_artist = expected.create_artist()
track_artist.set_name('Dio')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("It's Too Late")
track.set_length(186)
track_artist = expected.create_artist()
track_artist.set_name('Helix')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Road Of The Gypsy')
track.set_length(268)
track_artist = expected.create_artist()
track_artist.set_name('Adrenalin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love Can Make You Cry')
track.set_length(258)
track_artist = expected.create_artist()
track_artist.set_name('Urgent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('This Raging Fire')
track.set_length(246)
track_artist = expected.create_artist()
track_artist.set_name('Jon Butcher Axis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/43567')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008')
release_event.set_country('Deutschland')
expected.append_release_event(release_event)
expected.set_format(u'CD, Heftbeilage, Digipak')
label_id = expected.create_label_id()
label_id.set_label('Batbeliever Releases')
label_id.append_catalogue_nr('BAT 048')
expected.append_label_id(label_id)
expected.set_title('Gothic File 05')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Dark Wave')
expected.append_genre('Gothic')
expected.set_url('http://www.musik-sammler.de/media/257802')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('O Varium Fortune')
track.set_length(352)
track_artist = expected.create_artist()
track_artist.set_name('Corvus Corax')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Zaubererbruder [EP-Version]')
track.set_length(285)
track_artist = expected.create_artist()
track_artist.set_name('ASP')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Eric Fish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Mein Gral')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name('Megaherz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Komm S\xfc\xdfer Tod')
track.set_length(275)
track_artist = expected.create_artist()
track_artist.set_name('Eisbrecher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Get Some Sleep [Exclusive Version]')
track.set_length(252)
track_artist = expected.create_artist()
track_artist.set_name('Mono Inc.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Prayer Before Birth [Underwaterpilots Remix]')
track.set_length(263)
track_artist = expected.create_artist()
track_artist.set_name('Anne Clark')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Haufenweise Scheisse (XL)')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name(u'Grossstadtgefl\xfcster')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'D\xe9cadence')
track.set_length(191)
track_artist = expected.create_artist()
track_artist.set_name('Charles De Goal')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Burning Up')
track.set_length(248)
track_artist = expected.create_artist()
track_artist.set_name('Ladytron')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Horizon [Remastered]')
track.set_length(326)
track_artist = expected.create_artist()
track_artist.set_name('Black Orchid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Play Games')
track.set_length(212)
track_artist = expected.create_artist()
track_artist.set_name('The Rorschach Garden')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('All Ends')
track.set_length(256)
track_artist = expected.create_artist()
track_artist.set_name('Imatem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('All About The Now')
track.set_length(289)
track_artist = expected.create_artist()
track_artist.set_name('Miserylab')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Hymn Of The Shades')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Descendants Of Cain')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Bleed')
track.set_length(244)
track_artist = expected.create_artist()
track_artist.set_name('ELA')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Never Stop Crying')
track.set_length(254)
track_artist = expected.create_artist()
track_artist.set_name("Jennie Tebler's Out Of Oblivion")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Killhoney')
track.set_length(299)
track_artist = expected.create_artist()
track_artist.set_name('End Of Green')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/257802')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_multiple_unsplit_artist_names(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2005')
release_event.set_country('USA')
expected.append_release_event(release_event)
expected.set_format(u'CD')
label_id = expected.create_label_id()
label_id.set_label('G Unit / Interscope Records')
expected.append_label_id(label_id)
expected.set_title("Get Rich Or Die Tryin' (Music From And Inspired By The Motion Picture)")
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('http://www.musik-sammler.de/media/154887')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title("Hustler's Ambition")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('What If')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Things Change')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Spider Loc')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('You Already Know')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('When Death Becomes You')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('M.O.P.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Have A Party')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Mobb Deep')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Nate Dogg')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('We Both Think Alike')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Olivia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title("Don't Need No Help")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Get Low')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Fake Love')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Tony Yayo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Window Shopper')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Born Alone, Die Alone')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('You A Shooter')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Mobb Deep')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title("I Don't Know Officer")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks, Prodigy, Spider Loc & Mase')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Talk About Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('When It Rains It Pours')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Best Friend')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title("I'll Whip Ya Head Boy")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/154887')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/99999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = musiksammler.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0) | Slack06/yadg | descgen/tests.py | Python | mit | 429,432 | [
"Brian",
"Dalton"
] | 33f956eb1b31950ed7027e52e76e4d63c42b1d4c49d8f2882538f99d4701c563 |
import numpy as np
from time import time
from gpaw.utilities.lapack import diagonalize, diagonalize_mr3
seed = 43
gen = np.random.RandomState(seed)
def main(i,seed=42,dtype=float):
if (dtype==complex):
epsilon = 0.1j
else:
epsilon = 0.1
x = i + 1
N = x*100
print "N =",N
H0 = np.zeros((N,N),dtype=dtype) + gen.rand(*(N,N))
H1 = H0 + epsilon*np.tri(N,N, k=-1)
W0 = np.zeros((N))
Z0 = np.zeros_like(H0)
t0 = time()
diagonalize(H1.copy(), W0)
t1 = time() - t0
print dtype
print "diagonalize", t1
t2 = time()
diagonalize_mr3(H1.copy(), W0, Z0)
t3 = time() - t2
print "diagonalize_mr3",t3
print "---------------"
# diagonalize_mr3 must be faster than diagonalize
assert(t3 < t1)
if __name__ in ['__main__', '__builtin__']:
for i in range(8): # Check matrix sizes only up to 800
main(i,dtype=float)
main(i,dtype=complex)
| qsnake/gpaw | gpaw/test/eigh_perf.py | Python | gpl-3.0 | 941 | [
"GPAW"
] | 951e141faa567cd00108a227aa5d3fa6d3eb01db3f3526ec853545514de6abf1 |
# Copyright 2000-2004 by Kevin Atkinson
# Python wrapper by Johann C. Rocholl
#
# Permission to use, copy, modify, distribute and sell these word
# lists, the associated scripts, the output created from the scripts,
# and its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appears in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation. Kevin Atkinson makes no representations
# about the suitability of this array for any purpose. It is provided
# "as is" without express or implied warranty.
SCOWL10 = set("""
a abilities ability able about above absence absolute absolutely abuse
academic accept acceptable accepted accepting accepts access
accessible accident accidental accidentally accord accorded according
accordingly accords account accounts accuracy accurate achieve
achieved achieves achieving acquire acquired acquires acquiring across
act acted acting action actions active activities activity acts actual
actually add added adding addition additional address addressed
addresses addressing adds adequate adjust administration admit admits
admitted admittedly admitting adopt adopted adopting adopts advance
advanced advances advancing advantage advantages advertise advertised
advertises advertising advice advise advised advises advising affair
affairs affect affected affecting affects afford afraid after
afternoon again against age agency ages ago agree agreed agreeing
agreement agrees ahead aid aim aimed aiming aims air alarm album
algorithm algorithms alias alive all allow allowed allowing allows
almost alone along already also alter altered altering alternate
alternative alternatively alternatives alters although altogether
always am ambiguous among amongst amount amounts amuse amused amuses
amusing an analogue analysis ancient and angle angry animal announce
announcement annoy annoyed annoying annoys annual anonymous another
answer answered answering answers any anybody anyone anyplace anything
anyway anywhere apart apologies apology apparent apparently appeal
appear appearance appeared appearing appears apple application
applications applied applies apply applying appreciate appreciated
appreciates appreciating approach appropriate approval approve
approved approves approving arbitrary are area areas argue argued
argues arguing argument arguments arise arises arithmetic arm army
around arrange arranged arrangement arrangements arranges arranging
arrive arrived arrives arriving art article articles artificial artist
as aside ask asked asking asks asleep aspect aspects assembler
assembly assistant associate associated associates associating
association assume assumed assumes assuming assumption assure assured
assures assuring at ate atmosphere attach attached attaching attack
attempt attempted attempting attempts attend attended attending
attends attention attitude attract attractive audience author
authorities authority authors automatic automatically automobile
autumn available average avoid avoided avoiding avoids awake award
aware away awful awkward back backed background backing backs
backwards bad badly balance ball ban band bank bar bars base based
bases basic basically basing basis battery be bear bearing bears
beautiful became because become becomes becoming bed been before
beforehand began begin beginning begins begun behalf behave behind
being believe believed believes believing belong belongs below benefit
benefits besides best bet bets better betting between beyond bid
bidding bids big bigger biggest bill binary bind binding binds biology
bit bite bites biting bits bitten bizarre black blame blank block blow
blue board boards boat bodies body book books boot bore borne borrow
borrowed borrowing borrows both bother bothered bothering bothers
bottle bottom bought bound box boxes boy bracket brackets branch
branches brand breach break breaking breaks bridge brief briefly
bright bring bringing brings broadcast broadcasting broadcasts broke
broken brother brought brown bucket budget buffer bug bugs build
building buildings builds built bulk bulletin buried buries bury
burying bus business busy but button buy buying buys by byte bytes
calculate calculation calculations call called calling calls came
campaign can candidate cannot capable capacity capital captain car
card cardboard cards care careful carefully cares carried carries
carry carrying case cases cassette cat catch catches catching
categories category caught cause caused causes causing cease cell cent
central century certain certainly chain chair chairman chance chances
change changed changes changing channel channels chaos chapter char
character characters charge charged charges charging chars cheap
cheaper cheapest checked checking chemical child children chip chips
choice choose chooses choosing chose chosen church circle circuit
circulation circumstance circumstances citizen city claim claimed
claiming claims clarify class classes clean clear cleared clearer
clearest clearing clearly clears clever clock close closed closely
closer closes closest closing club clue code coded codes coding coffee
cold collapse collect collected collecting collection collects college
colleges column combination combinations combine combined combines
combining come comes coming command commands comment commented
commenting comments commercial commission commitment committee common
commonly communicate communication communications community company
comparable comparatively compare compared compares comparing
comparison compatibility compatible competition compiler complain
complained complaining complains complaint complaints complete
completed completely completes completing complex complexity
complicate complicated complicates complicating component components
compose composed composes composing composition comprehensive
compromise compulsory compute computed computer computers computes
computing concept concern concerned concerning concerns conclusion
concrete condition conditions conference confident confirm confirmed
confirming confirms confuse confused confuses confusing confusion
connect connected connecting connection connections connects
consequence consequences consequently consider considerable
considerably consideration considered considering considers consist
consistency consistent consists constant constraint constraints
construct consumption contact contain contained containing contains
content contents context continually continuation continue continued
continues continuing continuous continuously contract contrary
contrast contribute contribution contributions control controlled
controlling controls convenient convention conventional conventions
conversation convert convince convinced convinces convincing cope
copied copies copy copying core corner corners correct corrected
correcting correction correctly corrects corrupt corrupted corrupting
corrupts cost costing costs could council count counted counter
counting country counts county couple course courses court cover
covered covering covers crash crashed crashes crashing crazy create
created creates creating creation creature credit crisis crisp crisps
critical criticism cross cry cs culture cumming cums cup cure curious
current currently cursor customer cut cuts cutting cycle cycles daily
damage damaged damages damaging danger dangerous dare dark data
database date dated dates dating datum day days dead deal dealing
deals dealt dear death debate decade decent decide decided decides
deciding decision decisions declare declared declares declaring
decrease dedicate dedicated dedicates dedicating deduce deem deemed
deeming deems deep deeply default define defined defines defining
definite definitely definition definitions definitive degree degrees
delay delete deleted deletes deleting deliberate deliberately deliver
delivered delivering delivers delivery demand demands democratic
demonstrate demonstration department depend depended depending depends
depth derive derived derives deriving describe described describes
describing description descriptions design designed designing designs
desirable desire desired desires desiring desk desperate despite
destroy destroyed destroying destroys detail detailed detailing
details detect detected detecting detects determine determined
determines determining develop developed developing development
develops device devices devote devoted devotes devoting dictionary did
die died dies differ difference differences different differently
difficult difficulties difficulty digit digital digits dinner direct
directed directing direction directions directly director directory
directs dirty disadvantage disagree disappear disappeared disappearing
disappears disaster disc discipline discount discourage discouraged
discourages discouraging discover discovered discovering discovers
discs discuss discussed discusses discussing discussion discussions
disk dislike display displayed displaying displays distance distant
distinct distinction distinctly distinguish distribute distributed
distributes distributing distribution district disturb disturbed
disturbing disturbs ditto divide divided divides dividing division do
document documentation documented documenting documents doe does dog
doing dollar domain done door doors double doubt doubtful down dozen
dozens drastic draw drawing drawn draws dream drew drink drive driven
driver drivers drives driving drop dropped dropping drops drove dry
dubious due dumb dump during duty dying each earlier earliest early
earth ease easier easiest easily east easy eat eaten eating eats
economic economy edge edit edited editing edition editor editors edits
education educational effect effective effectively effects efficient
effort efforts eight either elect elected electing election electric
electronic electronics elects element elements elevator else elsewhere
embarrass embarrassed embarrasses embarrassing emergency emphasis
employee empty enable enables encounter encountered encountering
encounters encourage encouraged encourages encouraging end ended
ending ends enemy engineer engineered engineering engineers enjoy
enormous enough ensure ensured ensures ensuring enter entered entering
enters entire entirely entitle entitled entitles entitling entity
entrance entries entry environment equal equally equipment equivalent
eraser err error errors escape especially essential essentially
establish established establishes establishing establishment estimate
even evened evening evenings evens event events eventually ever every
everybody everyone everything everywhere evidence exact exactly
examine examined examines examining example examples excellent except
exception exceptions excess excessive exchange exclude excluded
excludes excluding exclusive excuse execute executed executes
executing exercise exist existed existence existing exists expand
expanded expanding expands expansion expect expected expecting expects
expense expensive experience experienced experiences experiencing
experiment experimental experiments expert experts explain explained
explaining explains explanation explicit express expressed expresses
expressing expression extend extended extending extends extension
extensive extent external extra extract extreme extremely eye eyes
face facilities facility fact factor factors facts fail failed failing
fails failure fair fairly faith fall fallen falling falls false
familiar family famous fan fancy far farm farther farthest fashion
fast faster fastest fatal fate father fault faults fear feasible
feature features fed federal feed feedback feeding feeds feel feeling
feels feet fell felt few fewer fewest field fields fight figure
figures file filed files filing fill filled filling fills film final
finally financial find finding finds fine finger fingers finish
finished finishes finishing finite fire firm firmly first firstly
fiscal fish fishes fit fits fitted fitting five fix fixed fixes fixing
flag flash flashed flashes flashing flat flew flexible flied flies
flight float floated floating floats floor flow flown fly flying folk
folks follow followed following follows food foot for force forced
forces forcing foreign forever forget forgets forgetting forgot
forgotten form formal format formed former forming forms forth
forthcoming fortunately fortune forward found four fourth fraction
frame free freedom freely french frequent frequently fresh friend
friendly friends fries from front fry full fully fun function
functions fund fundamental fundamentally funds funny further furthest
future gain gained gaining gains game games gap garbage garden gas
gasoline gather gave general generally generate generated generates
generating generation genuine get gets getting girl give given gives
giving glad glass global go goes going gone good goods got gotten
government governor gradually graduate grand grands grant granted
granting grants graph graphic graphics grateful grave great greater
greatest greatly green grew grind grinding grinds gross grosses ground
grounds group groups grow growing grown grows growth guarantee
guaranteed guaranteeing guarantees guard guess guessed guesses
guessing guide gun guy habit habits hack had hair half hall hand
handed handing handle handled handles handling hands handy hang hanged
hanging hangs happen happened happening happens happily happy hard
harder hardest hardly hardware harm harmful harmless has hat hate have
having he head headed header heading heads health healthy hear heard
hearing hears heart heat heavily heavy held hell hello help helped
helpful helping helps hence her here hereby herself hid hidden hide
hides hiding high higher highest highly hill him himself hint hints
his historical history hit hits hitting hold holding holds hole holes
holiday holidays home honest hope hoped hopefully hopes hoping
horrible horse horses hospital host hot hotel hour hours house how
however huge human hundred hundreds hung hunt hurry husband ice idea
ideal ideas identical identify identity if ignore ignored ignores
ignoring ill illegal image images imagination imagine immediate
immediately impact implement implemented implementing implements
implication implications implied implies imply implying importance
important importantly impose imposed imposes imposing impossible
impression improve improved improvement improvements improves
improving in inability inadequate inch inches incident incidentally
incline inclined inclines inclining include included includes
including income incompatible incomplete inconsistent inconvenience
incorrect increase increased increases increasing indeed independent
independently index indicate indicates indication individual
individually individuals industrial industry inevitably inferior
infinite influence info inform information informed informing informs
initial initially initials inner innocent input inputs inputted
inputting insert inserted inserting inserts inside insist insisted
insisting insists install installed installing installs instance
instant instantly instead institution institutions instruction
instructions insurance integer integers integral intelligence
intelligent intend intended intending intends intention interact
interest interested interesting interests interface internal
international interpret interpretation interpreted interpreting
interprets interval intervals intervention into introduce introduced
introduces introducing introduction invalid invariably invent invented
inventing invents investigate invisible invitation invite invited
invites inviting involve involved involves involving irrelevant
irritate irritated irritates irritating is isolate isolated isolates
isolating issue issued issues issuing it item items its itself job
jobs join joined joining joins joint joke joy judge jump jumps junk
just justification justified justifies justify justifying keen keep
keeping keeps kept key keyboard keys kid kill killed killing kills
kind kindly kinds king knew knock knocked knocking knocks know knowing
knowledge known knows label labels laboratory lack lacked lacking
lacks ladies lady lain land landed landing lands language languages
large largely larger largest last lasts late later latest latter law
laws lay layout lazy leach lead leaded leader leading leads leaf learn
learning learns least leave leaved leaves leaving lecture lectures led
left leg legal legally legs lend length less lesser lesson lessons let
lets letter letters letting level levels liable libraries library lie
lied lies life lifetime lift light lights like liked likely likes
likewise liking limit limited limiting limits line linear lines link
linked linking links list listed listen listing lists literally
literature little live lived lives living load loaded loading loads
loan local location locations lock locked locking locks log logged
logging logic logical logs long longer longest look looked looking
looks loop loose lorry lose loses losing loss lost lot lots loudly
love low lower lowest luck lucky lunch lying machine machines mad made
magic magnetic magnitude mail main mainly maintain maintained
maintaining maintains major majority make makes making man manage
managed manager manages managing manipulation manner manual manuals
many map march mark marked market marking marks marriage marry mass
massive master match matches material materials mathematical
mathematics matter matters maximum may maybe me mean meaning
meaningful meaningless meanings means meant measure measured measures
measuring mechanic mechanics mechanism media medical medium mediums
meet meeting meetings meets member members membership memory men
mention mentioned mentioning mentions mere merely merit merits mess
message messages messy met metal method methods middle midnight might
mile miles military million millions mind minded minding minds mine
minimal minimum minor minority minute minutes mislead misleading
misleads misled miss missed misses missing mistake mistaken mistakes
mistaking mistook misunderstand misunderstanding misunderstands
misunderstood misuse mix mixed mixes mixing mod mode model models
modern modified modifies modify modifying moment money monitor month
months moral more morning mornings most mostly mother motion mouth
move moved movement movements moves movie moving much multiple music
must my myself mysterious naive name named namely names naming nasty
nation national natural naturally nature naughty near nearby nearer
nearest nearly necessarily necessary necessity neck need needed
needing needs negative neither nervous net network networks never
nevertheless new news next nice nicer nicest night nine no nobody
noise noisy none nonsense nor normal normally north not note noted
notes nothing notice noticed notices noticing notify noting novel now
nowadays nowhere numb number numbers numbest numerical numerous obey
object objected objecting objection objections objects obscure
observation observe observed observes observing obtain obtained
obtaining obtains obvious obviously occasion occasional occasionally
occasions occupied occupies occupy occupying occur occurred occurring
occurs odd odds of off offer offered offering offers office officer
offices official often oh oil old older oldest omit omits omitted
omitting on once one ones only onto open opened opening opens operate
operated operates operating operation operations operator operators
opinion opinions opportunities opportunity oppose opposed opposes
opposing opposite opposition option optional options or order ordered
ordering orders ordinary origin original originally other others
otherwise ought our ours ourselves out outer output outside over
overall owe owed owes owing own owner owners pack package packages
packet page pages paid pain painful pair pairs paper papers paragraph
parallel parent park part partial partially particular particularly
parties partly parts party pass passed passes passing past patch path
patient pattern patterns pause pay payed paying pays peace peak
peculiar pen people per perfect perfectly perform performance
performed performing performs perhaps period permanent permanently
permission permit permits permitted permitting person personal
personally persons persuade persuaded persuades persuading petrol
phase phenomenon philosophy phone phrase phrases physical pi pick
picked picking picks picture pictures piece pieces pile pint pipe
place placed places placing plain plan plane planet planned planning
plans plant plastic play played playing plays plea pleasant please
pleased pleases pleasing plenty plot plots plug plus pocket poem poet
point pointed pointing pointless points police policies policy
political poll pool poor pop popular population port position
positions positive possibilities possibility possible possibly post
posted posting postmaster posts potential potentially pound pounds
power powerful powers practical practically precise precisely prefer
preferable preferably preference preferred preferring prefers
preparation prepare prepared prepares preparing presence present
presented presenting presents preserve president press pressed presses
pressing pressure presumably presume pretty prevent prevented
preventing prevents previous previously price prices primary prime
primitive principle principles print printed printer printers printing
printout prints prior private probably problem problems procedure
process processed processes processing processor processors produce
produced produces producing product production products professional
programmer programmers progress project projects promise promised
promises promising prompt promptly prone proof proper properly
properties property proportion proposal propose proposed proposes
proposing prospect protect protected protecting protection protects
protest prove proved proves provide provided provides providing
proving public publication publicity publicly publish published
publishes publishing pull pulled pulling pulls punctuation puncture
purchase pure purely purpose purposes push pushed pushes pushing put
puts putt putted putting putts qualified qualifies qualify qualifying
quality quantities quantity quarter question questions queue quick
quicker quickest quickly quiet quietly quit quite quits quitting quote
quoted quotes quoting race radio rain raise raised raises raising ran
random randomly range rapid rapidly rare rarely rate rates rather raw
re reach reached reaches reaching react reaction read readable reader
readers readily reading reads ready real reality really reason
reasonable reasonably reasons recall receive received receives
receiving recent recently reception recognition recommend
recommendation recommended recommending recommends record recorded
recording records recover recovered recovering recovers red reduce
reduced reduces reducing reduction redundant refer reference
references referred referring refers reflect reflected reflecting
reflection reflects refuse refused refuses refusing regard regarded
regarding regardless regards region register registered registering
registers regret regular regularly regulation regulations reject
rejected rejecting rejects relate related relates relating relation
relationship relative relatively release released releases releasing
relevance relevant reliable religion religious reluctant rely remain
remained remaining remains remark remarks remember remembered
remembering remembers remind reminded reminding reminds remote
remotely removal remove removed removes removing repair repeat
repeated repeatedly repeating repeats replace replaced replacement
replaces replacing replied replies reply replying report reported
reporting reports represent representation representative represented
representing represents reproduce request requested requesting
requests require required requirement requirements requires requiring
research reserve reserved reserves reserving resident resolution
resort resource resourced resources resourcing respect respectively
respects respond response responses responsibility responsible rest
restart restore restored restores restoring restrict restricted
restricting restricts result resulted resulting results retain return
returned returning returns reveal revealed revealing reveals reverse
review rewrite rid ridding ride ridiculous rids right rights ring rise
risk river road role roll room rooms root rough roughly round route
routine row rubber rubbish rule rules run running runs rush sad sadly
safe safely safer safest safety said saint sake sale sales same sample
sat satisfied satisfies satisfy satisfying save saved saves saving saw
say saying says scale scan scene scheme school schools science
sciences scientific score scores scrap scratch screen screens script
search searched searches searching season second secondary secondly
seconds secret secretary section sections secure security see seeing
seek seeking seeks seem seemed seeming seems seen sees select selected
selecting selection selects self sell selling sells seminar send
sending sends senior sense sensible sensibly sensitive sent sentence
sentences separate separately sequence sequences serial series serious
seriously serve served server serves service services serving session
sessions set sets setting settle settled settles settling seven
several severe severely sex shall shame shape share shared shares
sharing sharp she sheet shelf shell shift ship shoot shop shopped
shopping shops short shortage shorter shortest shortly should show
showed showing shown shows shut shuts shutting side sides sight sign
signal signals signed significance significant significantly signing
signs silly similar similarly simple simpler simplest simply
simultaneous simultaneously since sincerely single sit site sites sits
sitting situation situations six size sizes skill skills sleep slight
slightly slip slow slower slowest slowly small smaller smallest smile
smooth so social society soft software sold solely solid solution
solutions solve solved solves solving some somebody somehow someone
someplace something sometime sometimes somewhat somewhere son soon
sooner soonest sophisticate sophisticated sophisticates sophisticating
sorry sort sorted sorting sorts sought sound sounded sounding sounds
source sources south southern space spaces spare speak speaker
speakers speaking speaks special specially specific specifically
specified specifies specify specifying speech speed spell spelling
spells spend spending spends spent spirit spite split splits splitting
spoke spoken spot spots spotted spotting spread spreading spreads
spring square stable staff stage stages stand standard standards
standing stands start started starting starts state stated statement
statements states stating station stations statistic statistical
statistics status stay stayed staying stays steal step stick sticking
sticks still stock stone stones stood stop stopped stopping stops
storage store stored stores storing straight straightforward strange
strategy stream street strength strict strictly strike strikes
striking string strings strong strongly struck structure structures
stuck student students studied studies study studying stuff stupid
style subject subjects submit submits submitted submitting subsequent
subset substantial substitute subtle succeed success successful
successfully such sudden suddenly suffer suffered suffering suffers
suffice sufficient sufficiently sugar suggest suggested suggesting
suggestion suggestions suggests suit suitable suitably suited suiting
suits sum summary summer sun superior supervisor supplied supplies
supply supplying support supported supporting supports suppose
supposed supposedly supposes supposing sure surely surface surprise
surprised surprises surprising survey survive survived survives
surviving suspect suspected suspecting suspects suspend suspended
suspending suspends suspicion switch switched switches switching
symbol symbols syntax system systems table tables take taken takes
taking talk talked talking talks tank tanks tape tapes target task
tasks taste taught tax tea teach teacher teaches teaching team
technical technique techniques technology tedious teeth telephone
television tell telling tells temperature temporarily temporary ten
tend tendency tends term terminal terminals terminology terms terribly
test tested testing tests text than thank thanks that the their them
themselves then theoretical theory there thereby therefore these they
thin thing things think thinking thinks third this thoroughly those
though thought thoughts thousand thousands threat three threw through
throughout throw throwing thrown throws thus ticket tickets tie tied
ties tight till time timed times timing tin title titles to today
together token told tomorrow tonight too took tooth top topic topics
total totally touch touched touches touching toward towards town trace
track tracks traditional traffic train trained training trains
transfer transferred transferring transfers translate translated
translates translating translation transport trap trapped trapping
traps trash travel treat treated treating treatment treats tree trees
trial trick tried tries trip trivial trouble truck true truly trunk
trust trusted trusting trusts truth try trying tune turn turned
turning turns twelve twenty twice two tying type typed types typical
typing ugly ultimate ultimately unable unacceptable unaware uncertain
unclear under undergraduate undergraduates underneath understand
understanding understands understood unfortunate unfortunately unhappy
uniform unique unit unite units universal universities university
unknown unless unlike unlikely unlimited unnecessarily unnecessary
unpleasant unreasonable unsuitable until unusual unwanted up update
updated updates updating upon upper upset upsets upsetting upwards us
usage use used useful useless user users uses using usual usually
utility utterly vacation vacations vague vaguely valid validity
valuable value values van variable variables variation varied varies
variety various vary varying vast vastly vector version versions very
via vice video view views virtually virtue visible vision visit vital
voice volume vote votes wait waited waiting waits walk walked walking
walks wall walls want wanted wanting wants war warm warn warned
warning warns was wash waste wasted wastes wasting watch watched
watches watching water way ways we weapon wear wearing wears weather
week weekend weeks weight weird welcome welcomed welcomes welcoming
well went were west western what whatever whatsoever wheel wheels when
whenever where whereas whereby wherever whether which while whilst
white who whoever whole whom whose why wide widely wider widespread
widest wife wild will willed willing wills win wind window windows
wine winning wins winter wire wise wish wished wishes wishing with
withdraw within without woman women won wonder wondered wonderful
wondering wonders wooden word worded wording words wore work worked
worker workers working works world worn worried worries worry worrying
worse worst worth worthwhile worthy would write writer writes writing
written wrong wrote year years yellow yes yesterday yet you young your
yours yourself zero
""".split())
SCOWL20 = set("""
aardvark abandon abandoned abandoning abandons abbreviate abbreviated
abbreviates abbreviating abbreviation abbreviations abide abnormal
abnormally abolish abolished abolishes abolishing abolition abort
aborted aborting abortion aborts abroad absent absorb absorbed
absorbing absorbs abstract abstraction absurd abused abuses abusing
abusive abysmal academics accelerate accent accents acceptance
accessed accesses accessing accidents accommodate accommodation
accompanied accompanies accompany accompanying accomplish accomplished
accomplishes accomplishing accordance accountant accountants accounted
accounting accumulate accumulated accumulates accumulating accurately
accusation accusations accuse accused accuses accusing accustom
accustomed accustoming accustoms ace achievement achievements acid
acknowledge acknowledged acknowledges acknowledging acorn acoustic
acquaintance acquisition acronym acronyms activate activated activates
activating actively actor actors acute adapt adaptation adapted
adapting adapts addict addicted addicting addictive addicts
additionally additions adequately adhere adhered adheres adhering
adjacent adjective adjusted adjusting adjustment adjustments adjusts
administer administered administering administers administrative
admirable admiration admire admission adoption adult adults
advantageous advent adventure adventures adventurous adverse adversely
advert advertisement advertisements adverts advisable adviser advisers
advisory advocate advocated advocates advocating aerial aesthetic
aesthetically affection aforementioned afternoons aged agenda agent
agents aggressive agony agreements agricultural aided aiding aids
aircraft airport akin alarmed alarming alarms alas albeit albums
alcohol alcoholic alert algebra algebraic aliases alien aliens align
aligned aligning alignment aligns alike allegation allegations allege
alleged allegedly alleges alleging allergic alleviate alliance allies
allocate allocated allocates allocating allocation allocations
allowable allowance allowances ally alongside aloud alpha alphabet
alphabetic alphabetical alteration alterations amateur amaze amazed
amazes amazing amazingly ambassador amber ambient ambiguities
ambiguity ambitious amend amended amending amendment amends amp ample
amplifier amusement anagram analogous analogy analyst anarchy anatomy
ancestor ancestors anecdote anecdotes angel angels anger angles
anguish animals anniversary announced announcements announces
announcing annoyance annually anomalies anomaly anorak anoraks
anthology anticipate anticipated anticipates anticipating anticipation
antidote antique antisocial anxious anyhow apathetic apathy apostrophe
appalled appalling appallingly apparatus apparatuses appealed
appealing appeals appearances append appended appending appendix
appends applause applicable applicant applicants appoint appointed
appointing appointment appointments appoints appraisal appreciation
approached approaches approaching appropriately approximate
approximately approximation apt arbitrarily arc arcade arcane arch
archaic architecture archive archived archives archiving arena
arguable arguably arisen arising armed arming arms arose array arrays
arrest arrested arresting arrests arrival arrogance arrogant arrow
arrows artificially artistic artists arts ascend ascended ascending
ascends ash ashamed ashcan ashes assault assemble assembled assembles
assembling assert asserted asserting assertion asserts assess assessed
assesses assessing assessment asset assets assign assigned assigning
assignment assignments assigns assist assistance assisted assisting
assists associations assort assorted assorting assorts assumptions
asterisk asterisks astronomer astronomers astronomy asynchronous
atheism atheist atheists atlas atmospheric atom atomic atoms
atrocities atrocity attachment attacked attacking attacks attain
attendance attendant attentions attitudes attorney attorneys attracted
attracting attraction attracts attribute attributed attributes
attributing audible audiences audio aunt authentic autobiography
automate automated automates automating automobiles availability await
awaited awaiting awaits awarded awarding awards awareness awfully axes
axiom axioms axis babies baby backbone backgrounds backlog backspace
backward bacteria bacterium badge baffle baffled baffles baffling bag
baggage bags bake baked bakes baking balanced balances balancing
ballet ballot balls banal banana bananas bands bandwagon bandwidth
bang bankrupt banks banned banner banning bans bare barely bargain
bark barked barking barks baroque barred barrel barrier barriers
barring barrister barristers basement bash bashed bashes bashing
basics basket bass basses bastard bastards bat batch bath bathroom
baths batteries battle baud bay beach beam bean beans beard bearded
bearding beards beast beasts beat beaten beating beats beautifully
beauty bedroom beds beef beer beers beg beginner beginners behaved
behaves behaving beings belief beliefs believable believer believers
bell bells belonged belonging beloved belt bench bend bending bends
beneath beneficial bent beside beta beware bias biased biases biasing
bible biblical bicycle bicycles bigot bigoted bigotry billfold billion
billions bills bin biochemistry biography biological biologist
biologists bird birds birth birthday biscuit biscuits bishop bitmap
bitter blackboard blackmail blacks blade blades blamed blames blaming
blanket blanks blast blasted blasting blasts blatant blatantly bless
blessed blesses blessing blew blind blindly blink bliss blob blocked
blocking blocks blood bloody blowing blown blows blues blurb boats bob
bobs bog bogged bogging boggle boggles bogs bogus boil boiled boiling
boils bold bolt bomb bombed bombing bombs bond bone bones bonus booked
booking booklet bookshop bookshops bookstore boom boost boots border
borderline bored boredom bores boring born boss bottles bounce
boundaries boundary bounds bout bow bowl boys bracketed bracketing
brain brains brake brakes branded branding brands brass brave bread
breakdown breakfast breath breathe breathed breathes breathing bred
breed breeding breeds breeze brethren brick bricks bridges brigade
brighter brightest brightly brightness brilliant brilliantly broad
broadly brothers browse browsed browses browsing brush brutal bubble
buck bucks buffered buffering buffers bugger buggers bulb bulbs bull
bullet bullets bump bunch bundle burden bureaucracy burn burned
burning burns burnt burst bursting bursts buses bush businesses buss
bust butter buttons buyer buyers bye bypass cabbage cabinet cable
cabled cables cabling cafe caffeine cage cake cakes calculated
calculates calculating calculator calculus calendar caller calm cam
camera cameras camp campaigned campaigning campaigns camps campus
cancel cancels cancer candidates canonical cans cant cap capabilities
capability capitalism capitalist capitals caps capture captured
captures capturing carbon cared career careers careless caring carpet
carriage carrier carrot carrots cars cartoon cartoons cartridge
cartridges cased cash casing cassettes cast casting castle casts
casual catastrophic categorically cater catered catering caters
cathedral catholic cats cattle causal causality caution cave caveat
ceased ceases ceasing ceiling celebrate celebrated celebrates
celebrating celebration cells cellular censor censored censoring
censors censorship centrally centuries ceremony certainty certificate
chains chairs chalk challenge challenged challenges challenging
chamber champagne champion chancellor changeover chaotic chap chapel
chaps chapters characteristic characteristics charitable charities
charity charm charmed charming charms chart charter charts chase
chased chases chasing chat chats chatted chatting cheaply cheat
cheated cheating cheats cheek cheer cheerful cheers cheese chemicals
chemist chemistry chemists chess chest chestnut chew chewed chewing
chews chicken chickens chief childhood childish chocolate choices
choir chop chopped chopping chops choral chord chorus chuck chucked
chucking chucks chunk chunks churches cider cigarette cinema circa
circles circuitry circuits circular circulate circulated circulates
circulating cite cited cites cities citing citizens civil civilian
clarification clarified clarifies clarifying clarity clash clashes
classed classic classical classics classification classified
classifies classify classifying classing clause clauses cleaned
cleaner cleaners cleanest cleaning cleanly cleans clearance cleverer
cleverest cliche click client clients cliff climate climb climbed
climbing climbs clinic clinical clip clipped clipping clips clique
clocks clog clone clones closet closure cloth clothe clothed clothes
clothing cloud clouds clubs clues clumsy cluster clusters coach coal
coarse coast coat coats cobbler cobblers coherent coin coincide
coincidence coined coining coins coke collaboration collapsed
collapses collapsing collar collate collated collates collating
colleague colleagues collections collective colon colony columns
combat comedy comfort comfortable comfortably comic comics comma
commandment commandments commas commence commentary commentator
commentators commercially commissioned commissioning commissions
commit commitments commits committed committees committing commodity
commons communal communicated communicates communicating communism
communist communists communities compact companies companion
comparative comparisons compassion compel compelled compelling compels
compensate compensation compete competed competence competent competes
competing competitive competitor competitors compilation compile
compiled compilers compiles compiling complacent complement
complementary completeness completion complication complications
compliment comply composer composers composite compound comprehend
comprehensible comprehension compress compressed compresses
compressing compression comprise comprised comprises comprising
compulsion computation computational con concatenate concatenated
concatenates concatenating conceal concealed concealing conceals
concede conceivable conceivably conceive conceived conceives
conceiving concentrate concentrated concentrates concentrating
concentration conception concepts conceptual concert concerto concerts
concise conclude concluded concludes concluding conclusions concur
concurrently condemn condemnation condemned condemning condemns
condense condensed condenses condensing conditional conditioned
conditioning condom condone conduct conducted conducting conductor
conducts conferences confess confidence confidential confidentiality
configuration configurations configure configured configures
configuring confine confined confines confining confirmation conflict
conflicted conflicting conflicts conform confront confronted
confronting confronts congest congested congesting congestion congests
congratulate congratulations conjecture conjunction connector
connotation connotations conscience conscious consciously
consciousness consecutive consensus consent consented consenting
consents consequent conservation conservative conservatives
considerate considerations consisted consistently consisting
consolation console conspicuous conspiracy constantly constants
constituency constituent constituents constitute constitutes
constitution constitutional constrain constrained constraining
constrains constructed constructing construction constructions
constructive constructs consult consultancy consultant consultants
consultation consulted consulting consults consume consumed consumer
consumes consuming contacted contacting contacts container contemplate
contemplated contemplates contemplating contemporary contempt contend
contention contentious contest contexts continent continental
continual continuations continuity continuum contour contraception
contracted contracting contracts contradict contradicted contradicting
contradiction contradictory contradicts contravention contributed
contributes contributing contributor contributors contrive contrived
contrives contriving controller controllers controversial controversy
convenience conveniently conversations converse conversely conversion
conversions converted converter converting converts convey convict
convicted convicting conviction convictions convicts convincingly cook
cooked cookie cookies cooking cooks cool cooled cooling cools
cooperate cooperation coordinate coordinates coordination coped copes
coping copper copyright corn corporate corporation corpse corpses
corrections correlate correlation correspond corresponded
correspondence correspondent corresponding corresponds corridor
corruption cosmic cosmology costly cotton cough councils counsel
counsels counterexample counterpart counterparts countless countries
countryside coupled couples coupling courage courier courtesy courts
cousin coverage cow cows crack cracked cracking cracks craft cramp
cramped cramping cramps crap crass crawl crawled crawling crawls cream
creative creator creatures credibility credible credits creed creep
crew cricket cried cries crime crimes criminal criminals criteria
criterion critic criticisms critics crop crops crossed crosses
crossing crossroad crossroads crossword crowd crowded crowding crowds
crown crucial crude cruel cruelty cruise cruised cruises cruising
crunch crunched crunches crunching crush crushed crushes crushing
crying cryptic crystal crystals cube cubic cuckoo cuddly cue culprit
cult cultural cultures cumbersome cumulative cunning cupboard cups
cured cures curing curiosity curiously curly currency curriculum curry
curse curtain curtains curve curves custard custom customary customers
customs cute cycled cycling cyclist cyclists cylinder cynic cynical
daft damn damnation damned damning damns damp dance danced dances
dancing dangerously dangers dared dares daring darkness darling dash
dashed dashes dashing databases daughter dawn daylight daytime
deadline deadly deaf dealer dealers deaths debatable debated debates
debating debt debug debugged debugger debugging debugs decades decay
decimal deck declaration declarations decline declined declines
declining decode decoded decodes decoding decreased decreases
decreasing deduced deduces deducing deduction deductions deed deeds
deeper deepest defaults defeat defeated defeating defeats defect
defective defects defend defended defending defends deficiencies
deficiency defy degenerate degradation degrade degraded degrades
degrading deity delayed delaying delays deletion delicate delicious
delight delighted delightful delighting delights delimiters delta
delusion demanded demanding demented demise democracy democratically
demolish demolished demolishes demolishing demonstrated demonstrates
demonstrating demonstrations denied denies denominator denote denotes
dense density dentist deny denying departmental departments departure
dependence deposit depress depressed depresses depressing depression
deprive deprived deprives depriving depths deputy derange deranged
deranges deranging derivative derogatory descend descended descending
descends descriptive desert deserted deserting deserts deserve
deserved deserves deserving designate designated designates
designating designer designers desktop despair desperately despise
destination destine destined destines destining destruction
destructive detach detached detaches detaching detectable detection
detective detector deter determination deterrent detract devastate
devastated devastates devastating developer developers developments
deviation devil devious devise devised devises devising devoid
diagnosis diagnostic diagnostics diagonal diagram diagrams dial
dialect dialects dials diameter diary dice dictate dictator
dictatorship dictionaries diesel diet differed differential
differentiate differing differs dig digest digging dignity digs
dilemma dim dimension dimensional dimensions dine dined diner dines
dining dip diplomatic dire directive directives directories directors
dirt disable disabled disables disabling disadvantages disagreed
disagreeing disagreement disagrees disappoint disappointed
disappointing disappointment disappoints disasters disastrous discard
discarded discarding discards discharge disciplinary disclaimer disco
disconnect disconnected disconnecting disconnects discontinue
discontinued discontinues discontinuing discounts discoveries
discovery discrepancy discrete discretion discriminate discriminated
discriminates discriminating discrimination disease diseases disguise
disguised disguises disguising disgust disgusted disgusting disgusts
dish dishes dishonest disliked dislikes disliking dismal dismiss
dismissed dismisses dismissing disorder disposable disposal dispose
disposed disposes disposing disposition dispute disregard disrupt
disruption dissertation dissimilar distances distasteful distinctions
distinctive distinguished distinguishes distinguishing distort
distorted distorting distortion distorts distract distracted
distracting distracts distress distressed distresses distressing
disturbance ditch dive dived diverse diversity divert diverted
diverting diverts dives divine diving divisions divorce doctor doctors
doctrine documentary dodge dogma dogs dole dollars domestic dominant
dominate dominated dominates dominating don donate donated donates
donating donation donations dons doom doomed dooming dooms dose doses
dot dots dotted dotting doubled doubles doubling doubtless doubts
downhill downright downstairs downwards drag dragged dragging dragon
drags drain drained draining drains drama dramatic dramatically drank
drastically drawback drawbacks drawings dread dreaded dreadful
dreading dreads dreaming dreams dreary dress dressed dresses dressing
dried dries drift drill drinking drinks drip dripped dripping drips
drivel drown drowned drowning drowns drug drugs drum drums drunk
drunken drying dual duck ducks duff dug dull duly dummy dumped dumping
dumps dumpster duplicate duplicated duplicates duplicating duplication
duration dust dustbin dusty duties dynamic dynamically dynamics eager
eagerly eagle ear earn earned earning earns ears eastern eater
eccentric echo echoed echoes echoing ecological ecology economical
economically economics economies edges editions editorial educate
educated educates educating effectiveness efficiency efficiently egg
eggs ego egos eh eighteen eighth elaborate elderly elections electoral
electorate electrical electricity electron electronically elegant
elementary elephant elephants elevators eleven eligible eliminate
eliminated eliminates eliminating elite elitist em embarrassment embed
embedded embedding embeds emerge emerged emerges emerging eminent
eminently emit emotion emotional emotionally emotions empire empirical
employ employed employees employer employers employing employment
employs emptied empties emptying emulate emulation emulator emulators
enabled enabling enclose enclosed encloses enclosing encode encoded
encodes encoding encouragement endings endless endlessly enemies
energy enforce enforced enforces enforcing engage engaged engages
engaging engine engines enhance enhanced enhancement enhances
enhancing enjoyable enjoyed enjoying enjoyment enjoys enlarge enlarged
enlarges enlarging enlighten enlightened enlightening enlightenment
enlightens enormously entail entails enterprise entertain entertained
entertaining entertainment entertains enthusiasm enthusiastic entirety
entities envelope envelopes environmental environments envisage
envisaged envisages envisaging envy epic episode episodes equality
equals equate equation equations equilibrium equip equipped equipping
equips equivalents era erase erased erases erasing ergo erroneous
escaped escapes escaping esoteric essay essays essence establishments
estate estimated estimates estimating estimation eternal eternity
ethic ethical ethics ethnic etymology evaluate evaluated evaluates
evaluating evaluation evenly eventual everyday evident evidently evil
evils evolution evolutionary evolve evolved evolves evolving
exaggerate exaggerated exaggerates exaggerating exam examination
examiner exams exceed exceeded exceeding exceedingly exceeds excepted
excepting exceptional exceptionally excepts excessively exchanged
exchanges exchanging excite excited excitement excites exciting
exclamation exclusion exclusively excuses executable execution
executive exempt exercised exercises exercising exhaust exhausted
exhausting exhaustive exhausts exhibit exhibition exit exited exiting
exits exotic expectation expectations expedition expenditure expenses
experimentally experimentation experimented experimenting expertise
expire expired expires expiring expiry explanations explanatory
explicitly explode exploded explodes exploding exploit exploitation
exploited exploiting exploits exploration explore explored explores
exploring explosion explosions explosive exponential export expose
exposed exposes exposing exposure expressions expressway expressways
extant extensions extensively extents externally extinction extracted
extracting extraction extracts extraneous extraordinarily
extraordinary extras extremes extremist eyesight fabric faced faces
facilitate facing factories factory factual factually faculties
faculty failures faint fainter faintest fairer fairest fairness fairy
faithful fake fallacious fallacy fame familiarity families famine fans
fantasies fantastic fantasy farce fare farewell farmer farmers
fascinate fascinated fascinates fascinating fascist fashionable
fashioned fashioning fashions fat fathers fatuous faucet faulty feared
fearing fears feasibility feat featured featuring fee feeble feelings
fees fellow fellows female females feminist feminists fence fender
fenders festival fetch fever fiction fictional fiddle fiddled fiddles
fiddling fierce fifteen fifth fifty fighter fighting fights figured
figuring filmed filming films filter filtered filtering filters filthy
finals finance finances financially findings fined finer fines finest
fining fired fires firework fireworks firing firms fished fishing
fiver fizzy flagged flagging flags flame flames flaw flawed flawing
flaws fleet flesh flexibility flip flipped flipping flips flood
flooded flooding floods floors floppy flour flowed flower flowers
flowing flows fluctuation fluctuations fluent fluffy fluid flush
flushed flushes flushing flute foam focus fog fold folded folder
folders folding folds follower followers fond font fonts foods fool
fooled fooling foolish fools football footnote footnotes forbade
forbid forbidden forbidding forbids forcibly forecast forecasting
forecasts foreigner foreigners foreseeable forest forests forgave
forgive forgiven forgives forgiving fork formally formation formats
formatted formatting formerly formula formulation fortnight fortunate
forty forum forwarded forwarding forwards fossil fought foul
foundation foundations founded founding founds fountain fourteen
fractions fragile fragment fragments frames framework frank frankly
frantic fraud freak freaks freed freeing frees freeway freeways freeze
freezes freezing frequencies frequency friction fried friendship
frighten frightened frightening frightens fringe frivolous frog frogs
frown frowned frowning frowns froze frozen fruit fruits frustrate
frustrated frustrates frustrating frustration frying fudge fuel
fulfilled fulfilling fuller fullest fume fumes functional
functionality functioned functioning fundamentalist funded funding
funeral funnier funniest fur furniture furry furthermore fuse fusion
fuss fussy futile fuzzy galactic galaxy gang gaps garage garble
garbled garbles garbling gardens gasp gate gates gateway gathered
gathering gathers gay gear geared gearing gears gender gene
generations generator generators generic generous genes genetic
genetically genetics genius genocide genre gentle gentleman gentlemen
gently genuinely geographical geography geology geometry gesture
ghastly ghost giant gibberish gift gifts gig gin girlfriend girls
gladly glance glasses glean gleaned gleaning gleans globally glorious
glory glossy glove gloves glow glowed glowing glows glue gnome goal
goals goat god gods gold golden goldfish goldfishes golf goodbye
goodies goodness goody gorgeous gospel gossip govern governed
governing governments governs gown grab grabbed grabbing grabs grace
grade grades gradual graduated graduates graduating graduation
graffiti graffito grain grammar grammatical grandfather grandmother
graphical graphs grasp grass gratefully gratuitous gratuitously
gravitational gravity greasy greed greedy grid grief grim grip grips
groan grossly grouped grouping guarded guarding guards guest guests
guidance guided guideline guidelines guides guiding guilt guilty
guinea guitar gulf gullible gum guns gut guts gutter guys ha hacked
hacker hackers hacking hacks hail haircut hairs hairy halls halt
halted halting halts halve halves ham hammer handbook handful handicap
handler hangover happier happiest happiness hardback harden hardened
hardening hardens hardship hardy harmony harsh hash hassle hasten
hasty hated hates hating hatred hats havoc hay hazard hazards hazy
headache headers headline headlines heap heartily hearts heated
heating heats heaven heavens heavier heaviest heel heels height
heights helicopter helmet helpless henceforth herd heresy heritage
hero heroes heroic heroin herring herrings hesitate heterosexual
hexadecimal hey hided hideous hideously hierarchical hierarchy
highlight highlighted highlighting highlights highway highways
hilarious hills hindsight hinted hinting hip hire hired hires hiring
historian historians historic historically hitherto ho hobby hog
holder holders hollow holy homes homosexual homosexuality honestly
honesty honey honorary hook hooked hooking hooks hopeful hopeless
hopelessly horde hordes horizon horizontal horizontally horn
horrendous horrendously horribly horrid horrific horrified horrifies
horrify horrifying horror hospitals hostile hosts housed household
houses housing hugely huh hum humane humanity humans humble humbly
humorous hungry hunted hunting hunts hurt hurting hurts hut hydrogen
hyphen hypocrisy hypocrite hypocritical hypothesis hypothetical
hysterical icon icons id idealistic ideally ideals identically
identification identified identifier identifiers identifies
identifying ideological ideology idiom idiosyncratic idiot idiotic
idiots idle ignorance ignorant illegally illiterate illness illogical
illusion illustrate illustrated illustrates illustrating illustration
illustrations imaginary imaginative imagined imagines imagining
imbalance immature immense immensely imminent immoral immortal immune
impair impaired impairing impairs impend impended impending impends
imperative imperfect imperial impersonal implausible implementation
implementations implicit implicitly import imported importing imports
impractical impress impressed impresses impressing impressions
impressive imprison imprisoned imprisoning imprisons improbable
impulse inaccessible inaccuracies inaccuracy inaccurate inadvertently
inane inappropriate incapable incarnation incentive incidence
incidental incidents inclination inclusion inclusive incoherent
incoming incompetence incompetent incomprehensible inconsistencies
inconsistency inconvenienced inconveniences inconveniencing
inconvenient incorporate incorporated incorporates incorporating
incorrectly increasingly incredible incredibly increment incur
incurred incurring incurs indefensible indefinite indefinitely indent
independence indeterminate indexed indexes indexing indicated
indicating indications indicative indicator indicators indictment
indirect indirection indirectly indistinguishable induce induced
induces inducing induction indulge indulged indulges indulging
industries ineffective inefficiency inefficient inequality inertia
inevitable inexperienced infallible infamous infant infantile infect
infected infecting infection infects infelicity infer inference
inferiority infinitely infinity inflation inflexible inflict
influenced influences influencing influential informal informally
informative infrastructure infrequent infringement ingenious
ingredient ingredients inhabit inhabitant inhabitants inhabited
inhabiting inhabits inherent inherently inherit inheritance inherited
inheriting inherits inhibit inhibited inhibiting inhibition inhibits
initiate initiated initiates initiating initiative inject injure
injured injures injuries injuring injury injustice ink innocence
innovation innovative insane insect insects insecure insensitive
insertion insidious insight insignificant insistence insofar inspect
inspected inspecting inspection inspects inspiration inspire inspired
inspires inspiring installation installations instances instinct
institute instruct instructed instructing instructs instrument
instrumental instruments insufficient insult insulted insulting
insults intact intake integrate integrated integrates integrating
integration integrity intellect intellectual intense intensely
intensity intensive intent intentional intentionally intentions inter
interacted interacting interaction interactions interactive
interactively interacts intercourse interestingly interfaced
interfaces interfacing interfere interfered interference interferes
interfering interim interior intermediate intermittent internally
internals interpretations interpreter interrogate interrupt
interrupted interrupting interruption interruptions interrupts
intersection intersections intervene intervened intervenes intervening
interview interviewed interviewing interviews intimate intolerance
intrinsic intrinsically introductory intuitive invade invaded invades
invading invalidate invaluable invasion invention inventions inventor
inverse invert inverted inverting inverts invest investigated
investigates investigating investigation investigations investment
invoke invoked invokes invoking involvement ion irate iron ironic
irony irrational irrespective irresponsible irritation island islands
isolation jack jacket jackets jail jam jammed jamming jams jargon jazz
jealous jeans jellies jelly jerk jest jet jointly joints joked jokes
joking jolly journal journalist journalists journals journey judged
judges judging juice jumped jumping junction jungle junior jury
justice justifiable justifiably juvenile keeper ken kernel kettle
keyboards keyed keying keystroke keystrokes keyword keywords kick
kicked kicking kicks kidded kidding kidnap kidnapped kidnapping
kidnaps kidney kids killer kindness kingdom kings kiss kit kitchen
kits knee knees knife knight lab labs lad ladder lag lager laid lake
lamp landlord landscape lane lark laser lasers lasted lasting lately
laugh laughed laughing laughs laughter launch launched launches
launching lavatory lawn lawyer lawyers layer layers laying lays
laziness leaders leadership leaflet leaflets league leak lean leaned
leaning leans leap leather lectured lecturer lecturers lecturing
legend legendary legible legislation legitimate legitimately leisure
lemon lending lends lengths lengthy lenient lens lenses lent lesbian
lest lethal liability liaison libel liberal liberties liberty
librarian lid lifestyle lifted lifting lifts lighted lighter lightest
lighting lightly lightning lightninged lightnings likelihood limb
limbs limitation limitations lined linguistic lining linkage lion lip
lips liquid liquor lisp listened listener listening listens listings
lit literal literary literate litter lively liver livest loader loans
lobby locally locals locate located locates locating lodge logically
logo lonely loophole loops loosely lord lords lorries losses loud
louder loudest lousy loved lovely lover lovers loves loving lowered
lowering lowers loyal luckily ludicrous ludicrously luggage lump lumps
lunatic lunchtime lung lungs lurk lurked lurking lurks lust luxury
lyric lyrics machinery madness magazine magazines magical magnificent
mailbox mailed mailing mails mainframe mainframes mains mainstream
maintenance maize maker makers male males malfunction malicious
management managers mandate mandatory mangle mangled mangles mangling
mania manifestation manifestly manifesto manipulate manipulated
manipulates manipulating mankind manned manning manpower mans manually
manufacture manufactured manufacturer manufacturers manufactures
manufacturing mapped mapping maps margin marginal marginally margins
marital marker markers marketed marketing markets married marries
marrying mask masses massively masters matched matching mate
mathematically mathematician mathematicians matrices matrix mature
mayor maze meal meals meantime meanwhile measurement measurements meat
mechanical mechanisms medicine medieval megabyte megabytes melody melt
memorable memories mend mended mending mends mental mentality mentally
menu menus mercury mercy merge merged merges merging merry messed
messes messing metaphor metric metro metros mice microcomputer
microcomputers microprocessor microwave midday mighty migrate migrated
migrates migrating migration mild mildly mileage milk mill mimic
mindless mined mines minimalist mining minister ministers minorities
mint minus miracle miracles miraculous mirror mirrors miscellaneous
misdirect misdirected misdirecting misdirects miserable miserably
misery misfortune misguide misguided misguides misguiding misinterpret
misinterpreted misinterpreting misinterprets misplace misplaced
misplaces misplacing misprint misread misreading misreads misrepresent
misrepresented misrepresenting misrepresents missile missiles mission
mist mistakenly mists mixture mnemonic moan moaned moaning moans mob
mobile mock moderate moderately moderation modes modest modification
modifications module modules mole molecular molecule molecules
momentarily moments momentum monarch monitored monitoring monitors
monkey monkeys monochrome monopoly monster monsters monthly mood moon
moons morality morally morals moreover moron morons mortal mortality
mortals mothers motions motivate motivated motivates motivating
motivation motive motives motor motors motorway motorways motto mount
mountain mountains mounted mounting mounts mouse movies muck mucked
mucking mucks mud muddle muddled muddles muddling mug mugs multiples
multiplication multiplied multiplies multiply multiplying mum mumble
mummy mundane murder murdered murderer murdering murders muscle
muscles museum museums musical musician musicians mutter muttered
muttering mutters mutual mutually mysteries mysteriously mystery
mystic myth mythical mythology myths nail nailed nailing nails naked
nameless narrative narrow narrower narrowest nastier nastiest
nationally nations native natives nay neat neatly needle needles
needless needlessly negate neglect neglected neglecting neglects
negligible negotiable negotiate negotiated negotiates negotiating
negotiation negotiations nerve nerves nest nested nesting nests nets
networked networking neural neutral newcomer newcomers newer newest
newly newsletter newsletters newspaper newspapers nicely nick nicked
nicking nickname nicknames nicks nightmare nights nil noble node nodes
noises nominal nominally nominate nominated nominates nominating
nonetheless noon norm normality northern nose noses nostalgia notable
notably notation noticeable noticeably notification notified notifies
notifying notion notions notorious notwithstanding noun nouns novels
novelty novice novices nuclear nuisance null numbered numbering
numeral numerals numeric nun nuns nurse nurses nut nuts oar obeyed
obeying obeys objectionable objective obligation obligatory oblige
obliged obliges obliging obnoxious obscene obscured obscures obscuring
obscurity observations observer observers obsess obsessed obsesses
obsessing obsession obsolete obstruct obstructed obstructing obstructs
obtainable occupation occurrence occurrences ocean oddly offend
offended offender offenders offending offends offerings offhand
officers officially officials offset offsets offsetting offspring
omission omissions oneself ongoing onion onus onwards openly opera
operas operational opponent opponents oppress oppressed oppresses
oppressing oppression opt opted optic optical optimal optimistic
optimum opting optionally opts opus opuses oral orange orbit orbital
orchestra orchestral organ organic organs orient oriental orientate
orientated orientates orientating orientation oriented orienting
orients originals originate originated originates originating
originator origins orthodox outcome outcomes outcry outdated outgoing
outline outlined outlines outlining outlook outputs outrage outraged
outrageous outrages outraging outright outset outstanding outweigh
outweighs overcame overcome overcomes overcoming overdraft overdue
overflow overhead overheads overlap overload overloaded overloading
overloads overlong overlook overlooked overlooking overlooks overly
overnight overprice overpriced overprices overpricing overridden
override overrides overriding overrode overseas overtime overtone
overtones overview overwhelm overwhelmed overwhelming overwhelms
overwriting overwritten owned ownership owning owns oxygen ozone pace
pacifier packaged packaging packed packets packing packs pad padded
padding pads paged paging painfully painless pains paint painted
painting paintings paints palace pale pan panel panels panic pant
pants paperback par parade paradise paradox paragraphs parallels
parameter parameters paranoia paranoid paraphrase pardon parentheses
parenthesis parents parity parked parking parks parliament parochial
parody parrot parse parsed parses parsing participant participants
participate participated participates participating particle particles
partition partitioned partitioning partitions partner partners passage
passages passenger passengers passion passionate passive passport
password passwords paste pat patched patches patching patent pathetic
paths patience patients paused pauses pausing pavement payment
payments peaceful peaks peanut peanuts peasant peasants pedal pedant
pedantic pedantry pedants pedestrian pedestrians peer peers penalties
penalty pence pencil pended pending pends penguin pennies penny pens
peoples perceive perceived perceives perceiving percent percentage
percents perception perfection performances periodic periodically
periods peripheral peripherals permissible perpetual persecute
persecuted persecutes persecuting persist persistent personalities
personality personnel perspective persuasion perverse pet petty
pharmacies pharmacy phased phases phasing phenomena phenomenons
philosopher philosophers philosophical philosophies phoenix phoned
phones phoning photo photocopy photograph photographic photographs
photos phrased phrasing physic physically physicist physicists physics
physiology piano pie pig pigeon pigs piles pill pills pilot pin pinch
pinched pinches pinching pink pins pints pipeline pipes pit pitch
pitfall pitfalls pity pizza pizzas plague plagued plagues plaguing
plainly planes planetary planets planted planting plants plaster
plastered plastering plasters plate plates platform plausible player
players playground pleasantly pleasure plotted plotter plotting ploy
plugged plugging plugs plural pockets poems poetic poetry poets
pointer pointers poison poisoned poisoning poisons poke polar pole
policeman polish polished polishes polishing polite politeness
politically politician politicians politics polls pollution polynomial
pompous poorer poorest poorly pope popped popping pops populace
popularity populate populated populates populating populations pork
pornography portability portable ported porter porters porting portion
portions portray portrayed portraying portrays ports pose posed poses
posing positioned positioning positively possess possessed possesses
possessing possession postage postal postcard poster posters postpone
postponed postpones postponing postscript postulate pot potato
potatoes pour poured pouring pours poverty powder powered powering
practicable practicals pragmatic praise pray prayed prayer prayers
praying prays preach preached preaches preaching precaution
precautions precede preceded precedence precedent precedes preceding
precious precision predecessor predecessors predict predictable
predicted predicting prediction predictions predicts predominantly
preface preferences prefix prefixed prefixes prefixing pregnancy
pregnant prejudice prejudiced prejudices prejudicing preliminary
premature prematurely premise premises premium prerequisite prescribe
prescribed prescribes prescribing prescription presentation presently
preserved preserves preserving pressures presumed presumes presuming
pretend pretended pretending pretends pretentious prevail prevalent
prevention preview previewer priced pricing pride priest priests
primarily primes primitives prince principal principally printouts
priorities priority prison prisoner prisoners privacy privately
privilege privileged privileges privileging pro probabilities
probability probable procedures proceed proceeded proceeding
proceedings proceeds proclaim producer producers productive
productivity profession professionals professor profile profiles
profit profitable profits profound programmable progressed progresses
progressing prohibit prohibited prohibiting prohibits projected
projecting projection proliferation prolong prolonged prolonging
prolongs prominent promote promoted promotes promoting promotion
prompted prompting prompts pronoun pronounce pronounced pronounces
pronouncing pronunciation proofs propaganda prophet proportional
proportions proposals proposition proprietary prose prosecute
prosecuted prosecutes prosecuting prosecution prospective prospects
prostitute prostitutes protein protocol protocols prototype proud
provision provisional provisions provocative provoke provoked provokes
provoking proximity pseudo psychological psychologist psychologists
psychology pub publications publisher publishers pudding pulp pulse
pulses pump pumped pumping pumps pun punch punched punches punching
punish punished punishes punishing punishment puns punt punts pupil
pupils purchased purchases purchasing purge purity purple pursue
pursued pursues pursuing pursuit puzzle puzzled puzzles puzzling
python qualification qualifications qualifier qualifiers qualities
quantum quarters queen queens queries query quest questionable
questioned questioning questionnaire queued queues quibble quieter
quietest quiz quota quotas quotation quotations rabbit rabbits rabid
raced races racial racing racism racist rack racket racks radar
radiation radical radically radios radius rag rage raid raids rail
railroad rails railway rainbow rained raining rains ram rampant rang
ranged ranges ranging rank ranks rant ranted ranting rants rape rarer
rarest rash rat rated rating ratio rational rationale rationally
ratios rats rattle rattled rattles rattling rave raved raves raving
ray razor reacted reacting reactionary reactions reactor reacts
readership readings realistic realm realms rear rearrange rearranged
rearranges rearranging reasoned reasoning reassure reassured reassures
reassuring rebuild rebuilding rebuilds rebuilt recalled recalling
recalls receipt receiver recipe recipes recipient recipients reckless
reckon reckoned reckoning reckons reclaim recollection recommendations
reconcile reconsider recorder recordings recovery recreational recruit
recruited recruiting recruitment recruits rectangle rectangular
rectified rectifies rectify rectifying recursion recursive recycle
recycled recycles recycling redefine redefined redefines redefining
redirect reductions redundancy referenced referencing referendum
refine refined refines refining reflex reform reformat reformed
reforming reforms refrain refresh refreshed refreshes refreshing
refund refusal refute regain regime regional regions registration
regrets regrettably regretted regretting reign reinstate reinstated
reinstates reinstating reiterate rejection relations relationships
relatives relativity relax relaxed relaxes relaxing relay reliability
reliably relied relief relies relieve relieved relieves relieving
religions relocation reluctance reluctantly relying remainder
remarkable remarkably remarked remarking remedy reminder reminiscent
rename renamed renames renaming rend render rendered rendering renders
rending rendition rends renew renewed renewing renews rent repaired
repairing repairs repeatable repent repertoire repetition repetitive
rephrase replacements reporter representations representatives
reproduced reproduces reproducing reproduction repulsive reputation
requisite reread rereading rereads rescue researcher researchers
resemblance resemble resembled resembles resembling resent reservation
reservations reset resets resetting reside residence residents resides
resign resignation resigned resigning resigns resist resistance
resolve resolved resolves resolving resorted resorting resorts
respectable respected respecting respective responded responding
responds responsibilities restarted restarting restarts restaurant
restaurants rested resting restrain restrained restraining restrains
restriction restrictions restrictive rests resume resumed resumes
resuming resurrection retail retained retaining retains retire retired
retirement retires retiring retract retrieval retrieve retrieved
retrieves retrieving reuse revelation revenge revenue reversed
reverses reversing revert reviewed reviewing reviews revise revised
revises revising revision revolt revolted revolting revolts revolution
revolutionary reward rewards rewrites rewriting rewritten rewrote
rhetorical rhyme rhythm ribbon rice rich richer richest ridden rides
ridiculously riding rightly rigid rigorous ringed ringing rings riot
rip ripped ripping rips risen rises rising risked risking risks risky
ritual rituals rival rivals rivers roads robot robots robust rock
rocket rocks rod rode roles rolled rolling rolls roman romance
romantic roof roots rope rose rot rotate rotated rotates rotating
rotation rotten roundabout rounded rounding rounds rout routed routes
routinely routines routing routs rows royal royalties rub rude ruin
ruined ruining ruins ruled ruler rulers ruling rung rural rushed
rushes rushing rusty sabotage sack sacked sacking sacks sacred
sacrifice sacrificed sacrifices sacrificing sadden saddened saddening
saddens safeguard safeguards saga sail sailed sailing sails salaries
salary salesman salt salvation sampled samples sampling sand sandwich
sandwiches sane sang sanity sank sarcasm sarcastic satellite
satellites satire satisfaction satisfactorily satisfactory sauce
savings scaled scales scaling scandal scanned scanner scanning scans
scarce scarcely scare scared scares scarf scaring scarlet scatter
scattered scattering scatters scenario scenarios scenery scenes
schedule scheduled scheduler schedules scheduling schemes scholar
scholars scientifically scientist scientists scope scored scoring
scotch scrapped scrapping scraps scratched scratches scratching scream
screamed screaming screams screw screwed screwing screws scripts
scroll scrolled scrolling scrolls scum sea seal sealed sealing seals
seat seats seconded seconding secretaries secretly secrets sect sector
sects secular seed seemingly segment segments seldom selective
selectively selfish semantic semantics seminars sender sensation
senses sensitivity sentenced sentencing sentient sentiment sentimental
sentiments separated separates separating separation separator
separators sequel sequential seriousness sermon servant servants
servers settings seventh severity sexes sexist sexual sexuality
sexually sexy shade shades shadow shake shaken shakes shaking shaky
shallow shaped shapes shaping shareholder shareholders sharply shed
shedding sheds sheep sheer sheets shells shelter shelve shelves
shifted shifting shifts shine shined shines shining shiny shipped
shipping ships shirt shock shocked shocking shocks shoe shoes shone
shook shooting shoots shorten shortened shortening shortens shorthand
shorts shot shots shoulder shoulders shout shouted shouting shouts
shove shower showers shutdown shy sic sick sicken sickened sickening
sickens sided sideways siding sigh sighted sighting sights sigma
signature signatures silence silent silicon sillier silliest silver
similarities similarity simplicity simplified simplifies simplify
simplifying simplistic simulate simulated simulates simulating
simulation sin sincere sine sinful sing singer singers singing singles
sings singular singularly sinister sink sinking sinks sins sir sister
situate situated situates situating sixteen sixth sixties sixty sized
sizing skeleton sketch sketches skilled skin skip skipped skipping
skips skirt skull sky slag slang slash slave slaves sleeping sleeps
slept slice sliced slices slicing slid slide slides sliding slighter
slightest slim slipped slippery slipping slips slogan slope sloppy
slot slots slowed slowing slows smallish smart smash smashed smashes
smashing smell smells smelly smiled smiles smiling smith smoke smoked
smoker smokers smokes smoking smoothly smug snack snag snail sneak
sneaked sneaking sneaks sneaky sniff snobbery snow soap sober
socialism socialist socially societies sock socket sockets socks sod
soil solar soldier soldiers sole soles solicitor solicitors solo song
songs sons sordid sore soul souls soundtrack soup spaced spacing span
spares spatial specialist species specification specifications
specimen spectacular spectrum speculate speculation sped speeches
speeding speeds spellings sphere spies spigot spike spill spin spiral
spirits spiritual spit spits spitted spitting splendid spoil spoiling
spoils spokesman sponsor sponsored sponsoring sponsors spontaneous
spontaneously spoof spool sport sports spout sprang spray springing
springs sprung spur spurious spy squad squared squares squaring squash
squashed squashes squashing squeeze squeezed squeezes squeezing
stability stack stacks stagger staggered staggering staggers stair
staircase stairs stake stale stall stamp stamped stamping stamps
stance standpoint star stare stared stares staring stark starred
starring stars starter starters startle startled startles startling
starve starved starves starving static stationary steadily steady
stealing steals steam steel steep steer steered steering steers stem
stems stepped stepping steps stereo stereotype stereotypes sterile
sterling sticky stiff stimulate stimulated stimulates stimulating
stimulation stir stirred stirring stirs stocks stole stolen stomach
storm storms strain strains strangely stranger strangest strategic
strategies straw stray streams streets strengthen stress stressed
stresses stressing stretch stretched stretches stretching stringent
strip stripped stripping strips strive stroke stronger strongest
structural structured structuring struggle struggled struggles
struggling studio stuffed stuffing stuffs stumble stumbled stumbles
stumbling stun stunned stunning stuns stunt stupidity styles subjected
subjecting subjective submission subroutine subroutines subscribe
subscription subsequently subsidiary substance substances
substantially substituted substitutes substituting substitution
subtleties subtlety subtly subway subways succeeded succeeding
succeeds succession successive successor sue sued sues sufferer
sufferers suffix suicidal suicide suing suitability suite summaries
summed summing sums sundry sung sunk sunlight sunny sunrise sunshine
super superb superficial superficially superfluous superiority
supermarket supernatural supervise supervised supervises supervising
supervision supervisions supervisors supplement supplementary supplier
suppliers supporter supporters suppress suppressed suppresses
suppressing suppression supreme surfaces surgery surname surplus
surprisingly surround surrounded surrounding surroundings surrounds
surveys survival susceptible suspension suspicious suspiciously
sustain sustained sustaining sustains swallow swallowed swallowing
swallows swam swamp swamped swamping swamps swap swapped swapping
swaps swear swearing swears sweat sweating sweats sweep sweeping
sweeps sweet swept swim swimming swims swing sword swore sworn swum
symbolic symmetric symmetry sympathetic sympathies sympathy symphonies
symphony symptom symptoms syndicate syndrome synonym synonymous
synonyms syntactic syntactically synthesis systematic tab tabs tack
tacked tacking tackle tackled tackles tackling tacks tactic tactical
tactics tactless tag tail tailor tailored tailoring tailors tails
taker takers tale talent talented talents tales tall tame tangent tap
targets tasted tasteless tastes tasting taxation taxes taxi taxpayer
taxpayers teachers teams teapot tear teared tearing tears technically
technological teenage teenager teenagers telephones telescope temper
temperatures temple tempt temptation tempted tempting tempts tended
tendencies tender tending tennis tens tense tension tentative
tentatively tenth termed terminally terminate terminated terminates
terminating termination terminator terming terrible terrified
terrifies terrify terrifying territory terror terrorism terrorist
terrorists terse textbook textbooks texts textual thanked thankful
thankfully thanking thee theft theirs theme themes theological
theology theorem theorems theoretically theories therapy thereabouts
thereafter therein thereof theses thesis thick thickness thief thieve
thieves thirst thirty thorough thoroughfare thoroughfares thou thous
thread threaten threatened threatening threatens threats threshold
throat throats throughput thrust thrusting thrusts thumb thy tick
tidied tidies tidy tidying tiger tightly tile tiles timer timescale
timetable tins tiny tip tips tiresome toad toast tobacco toe toes
toggle toilet toilets tokens tolerance tolerant tolerate tolerated
tolerates tolerating toll tomato tomatoes tome ton tone tones tongue
tons tool tools topical tops tore torn torture toss tough tour tourist
tourists tower towers towns toy toys traced traces tracing tracked
tracking trade traded trades trading tradition traditionally
traditions tragedy tragic trail trailed trailing trails transaction
transactions transcript transform transformation transformed
transforming transforms transient transit transition translations
translator transmission transmissions transmit transmits transmitted
transmitter transmitters transmitting transparent transported
transporting transports trashcan travels tray tread treasure treaty
trek tremendous tremendously trend trends trendy trials triangle
triangles tribe tribes tricks tricky trifle trigger triggered
triggering triggers trilogy trinity triple tripos trips triumph trivia
trivially trolley troop troops troubles trouser trousers trucks
trumpet truncate truncated truncates truncating trunks trusty truths
tube tubes tuned tunes tuning tunnel tunnels turnround turntable tutor
tutorial twentieth twin twins twist twisted twisting twists typeset
typesets typesetting typewriter typically ugh umbrella unaffected
unambiguous unattended unavailable unavoidable unbalanced unbearable
unbelievable unbelievably unbiased uncertainty unchanged uncle
uncomfortable uncommon unconnected unconscious unconvincing undefined
underestimate undergo undergoes undergoing undergone underground
undergrounds underlain underlay underlie underlies underline
underlined underlines underlining underlying understandable undertake
undertaken undertakes undertaking undertook underwent undesirable
undid undo undocumented undoes undoing undone undoubtedly unduly
uneasy unemployed unemployment unexpected unexpectedly unexplained
unfair unfamiliar unfinished unfounded unfriendly unhealthy unhelpful
unified unifies uniformly unify unifying unimportant uninteresting
union unions uniquely united unites uniting unity universally universe
unjustified unload unlock unlocked unlocking unlocks unlucky unnatural
unobtainable unofficial unpopular unpredictable unread unreadable
unrealistic unrelated unreliable unsafe unsatisfactory unseen unset
unsolicited unsound unspecified unstable unsuccessful unsupported
unsure unsuspecting untidy unto untrue unusable unused unusually
unwelcome unwilling unwise unworkable upbringing upgrade upgraded
upgrades upgrading upright ups upside upstairs upward urban urge urged
urgency urgent urgently urges urging usable usefully usefulness
utilities utter vacancies vacancy vacuum vain valley valued valuing
valve valves vandalism vanish vanished vanishes vanishing vans
variance variant variants variations varieties vat vectors vegetable
vegetables vegetarian vehicle vehicles vein velocity vend vended
vending vendor vends venture venue venues verb verbal verbally
verbatim verbose verbs verdict verification verified verifies verify
verifying versatile verse verses versus vertical vertically vessel vet
viable vicar vicinity vicious victim victims victory viewed viewer
viewing viewpoint viewpoints vigorously vile village villages vintage
vinyl violate violation violence violent violently violin virgin
virtual virtues virus viruses visited visiting visitor visitors visits
visual visually vocabulary vocal voices void voltage volumes
voluntarily voluntary volunteer volunteered volunteering volunteers
vomit voted voter voters voting vouch vowel vulnerable wade waded
wades wading waffle wage wages wake waked wakes waking wallet wander
wandered wandering wanders ward warehouse warmed warming warms
warnings warp warped warping warps warrant warranty wars wartime wary
washed washes washing wasteful waters wave waved waves waving weak
weakness weaknesses wealth wealthy weapons weary weasel weasels wed
wedded wedding weds wee weekday weekends weekly weigh welfare wet wets
wetting whale whales whence whereupon whichever whim whistle whistles
whites wholeheartedly wholly whoop whoops wicked width wildly
willingly winded winding windowing winds wines wing wings winner
winners wipe wiped wipes wiping wired wires wiring wisdom wiser wisest
wit witch withdrawal withdrawing withdrawn withdraws withdrew witness
witnessed witnesses witnessing witty wive wives wizard woke woken wolf
wombat wonderfully wondrous wont wood woods workable workings workload
workshop workstation workstations worlds worldwide worm worms worship
worthless wound wow wrap wrapped wrapper wrappers wrapping wraps wrath
wreck wrecked wrecker wrecking wrecks wren wretched wrist writers
writings wrongly wrongs yard yards yawn yearly yeti yield yields
younger youngest yourselves youth zeros zone zones zoom
""".split())
SCOWL35 = set("""
aback abacus abacuses abandonment abate abated abates abating abbey
abbeys abbot abbots abdicate abdicated abdicates abdicating abdication
abdications abdomen abdomens abdominal abduct abducted abducting
abducts aberration aberrations abet abets abetted abetting abhor
abhorred abhorrence abhorrent abhorring abhors abides abiding abject
abjected abjecting abjects ablaze abler ables ablest ably
abnormalities abnormality aboard abode aboded abodes aboding
abominable abomination aboriginal aborigine aborigines abortions
abortive abound abounded abounding abounds abouts aboveboard abrasive
abrasives abreast abridge abridged abridges abridging abrupt abrupter
abruptest abruptly abscess abscessed abscesses abscessing abscond
absconded absconding absconds absences absented absentee absentees
absenting absents absoluter absolutes absolutest absolve absolved
absolves absolving absorbent absorbents absorption abstain abstained
abstaining abstains abstention abstentions abstinence abstracted
abstracter abstractest abstracting abstractions abstracts abstruse
absurder absurdest absurdities absurdity absurdly abundance abundances
abundant abundantly abuser abusers abyss abysses academically
academies academy accede acceded accedes acceding accelerated
accelerates accelerating acceleration accelerations accelerator
accelerators accented accenting accentuate accentuated accentuates
accentuating acceptability acceptably acceptances accessibility
accessories accessory accidentals acclaim acclaimed acclaiming
acclaims acclimate acclimated acclimates acclimating acclimatize
acclimatized acclimatizes acclimatizing accolade accoladed accolades
accolading accommodated accommodates accommodating accommodations
accompaniment accompaniments accompanist accompanists accomplice
accomplices accomplishment accomplishments accordion accordions accost
accosted accosting accosts accountability accountable accountancy
accredit accredited accrediting accredits accrue accrued accrues
accruing accumulation accumulations accuser accusers aced aces ache
ached aches achievable aching acidity acids acing acne acorns
acoustics acquaint acquaintances acquainted acquainting acquaints
acquiesce acquiesced acquiescence acquiesces acquiescing acquisitions
acquit acquits acquittal acquittals acquitted acquitting acre acreage
acreages acres acrid acrider acridest acrimonious acrimony acrobat
acrobatic acrobatics acrobats acrylic acrylics actioned actioning
actives activist activists actress actresses actualities actuality
actuary acumen acupuncture acutely acuter acutes acutest ad adage
adages adamant adaptable adaptations adaptive addendum addiction
addictions additive additives addressee addressees adept adepter
adeptest adepts adherence adherent adherents adhesion adhesive
adhesives adjectives adjoin adjoined adjoining adjoins adjourn
adjourned adjourning adjournment adjournments adjourns adjunct
adjuncts adjustable administrations administrator administrators
admirably admiral admirals admired admirer admirers admires admiring
admissible admissions admittance admonish admonished admonishes
admonishing admonition admonitions ado adobe adobes adolescence
adolescences adolescent adolescents adoptions adorable adoration adore
adored adores adoring adorn adorned adorning adornment adornments
adorns adrift adroit adroiter adroitest adroitly ads adulation
adulterate adulterated adulterates adulterating adulteration
adulteries adultery adulthood advancement advancements advantaged
advantaging adventured adventurer adventurers adventuring adverb
adverbial adverbials adverbs adversaries adversary adverser adversest
adversities adversity advertiser advertisers advisories aerials
aerodynamic aerodynamics aerosol aerosols aerospace afar affable
affabler affablest affably affectation affectations affectionate
affectionately affectioned affectioning affections affidavit
affidavits affiliate affiliated affiliates affiliating affiliation
affiliations affinities affinity affirm affirmation affirmations
affirmative affirmatives affirmed affirming affirms affix affixed
affixes affixing afflict afflicted afflicting affliction afflictions
afflicts affluence affluent affordable afforded affording affords
affront affronted affronting affronts afield aflame afloat afoot
aforesaid afresh aftereffect aftereffects afterlife afterlives
aftermath aftermaths afters afterthought afterthoughts agencies
agendas aggravate aggravated aggravates aggravating aggravation
aggravations aggregate aggregated aggregates aggregating aggression
aggressively aggressiveness aggressor aggressors aghast agile agiler
agilest agility agitate agitated agitates agitating agitation
agitations agitator agitators aglow agnostic agnosticism agnostics
agonies agreeable agreeably agriculture aground ah ahoy ahoys aide
aides ail ailed ailing ailment ailments ails aimless aimlessly
airborne aired airfield airfields airier airiest airing airline
airliner airliners airlines airmail airmailed airmailing airmails
airports airs airstrip airstrips airtight airy aisle aisled aisles
aisling ajar alarmingly alarmist alarmists alases albino albinos
alcoholics alcoholism alcohols alcove alcoves ale alerted alerter
alertest alerting alerts ales alga algae aliased aliasing alibi
alibied alibiing alibis alienate alienated alienates alienating
alienation aliened aliening alight alighted alighting alights
alignments alimony alkali alkalies alkaline allay allayed allaying
allays allegiance allegiances allegorical allegories allegory
allergies allergy alleviated alleviates alleviating alley alleys
alliances allied alligator alligators allot allotment allotments
allots allotted allotting alloy alloyed alloying alloys allude alluded
alludes alluding allure allured allures alluring allusion allusions
allying almanac almanacs almighty almond almonds alms aloft aloof
alphabeted alphabetically alphabeting alphabets alphanumeric altar
altars alterable alternated alternately alternates alternating
alternation alternator altitude altitudes alto altos altruism
altruistic amalgamate amalgamated amalgamates amalgamating
amalgamation amalgamations amass amassed amasses amassing amateurish
amateurs amazement ambassadors ambidextrous ambiguously ambition
ambitions ambitiously ambivalence ambivalent amble ambled ambles
ambling ambulance ambulances ambush ambushed ambushes ambushing amen
amenable amendments amened amening amenities amenity amens amethyst
amethysts amiable amiably amicable amicably amid amids amidst amiss
ammonia ammunition amnesia amnestied amnesties amnesty amnestying
amoeba amoebas amok amoral amorous amorphous amounted amounting ampere
amperes ampersand ampersands amphetamine amphetamines amphibian
amphibians amphibious ampler amplest amplification amplifications
amplified amplifiers amplifies amplify amplifying amplitude amply amps
amputate amputated amputates amputating amputation amputations amulet
amulets amusements amusingly anachronism anachronisms anal analgesic
analgesics analogies analysts analytic analytical analytics anarchic
anarchism anarchist anarchists anathema anatomical anatomies
ancestored ancestoring ancestral ancestries ancestry anchor anchorage
anchorages anchored anchoring anchors anchovies anchovy ancienter
ancientest ancients android androids ands anew angelic angered
angering angers angled angler anglers angling angrier angriest angrily
angst anguished anguishes anguishing angular animate animated animates
animating animation animations animosities animosity ankle ankled
ankles ankling annals annex annexation annexations annexe annexed
annexes annexing annihilate annihilated annihilates annihilating
annihilation anniversaries annotate annotated annotates annotating
annotation annotations announcer announcers annoyances annoyingly
annuals annuities annuity annul annulled annulling annulment
annulments annuls anoint anointed anointing anoints anomalous anon
anonymity anonymously answerable ant antagonism antagonisms antagonist
antagonistic antagonists ante anteater anteaters anted anteing
antelope antelopes antenna antennae antennas antes anthem anthems
anthill anthills anthologies anthrax anthropological anthropologist
anthropologists anthropology antibiotic antibiotics antibodies
antibody antic anticipations anticlimax anticlimaxes antics antidotes
antifreeze anting antipathies antipathy antiquate antiquated
antiquates antiquating antiqued antiques antiquing antiquities
antiquity antiseptic antiseptics antitheses antithesis antler antlers
antonym antonyms ants anus anuses anvil anvils anxieties anxiety
anxiously anybodies anythings anyways anywheres aorta aortas apartheid
apartment apartments ape aped aperture apertures apes apex apexes
aphorism aphorisms apiece aping aplomb apocryphal apologetic
apologetically apologetics apostle apostles apostrophes apparel
apparels apparition apparitions appease appeased appeasement
appeasements appeases appeasing appendage appendages appendices
appendicitis appendixes appetite appetites applaud applauded
applauding applauds apples appliance appliances applicability
applicator applicators appointee appointees apposite appraisals
appraise appraised appraises appraising appreciable appreciations
appreciative apprehend apprehended apprehending apprehends
apprehension apprehensions apprehensive apprentice apprenticed
apprentices apprenticeship apprenticeships apprenticing approachable
appropriated appropriates appropriating appropriation appropriations
approvals approximated approximates approximating approximations
apricot apricots apron aprons apter aptest aptitude aptitudes aptly
aquamarine aquamarines aquarium aquariums aquatic aquatics aqueduct
aqueducts arable arbiter arbiters arbitrate arbitrated arbitrates
arbitrating arbitration arbitrator arbitrators arcades arced
archaeological archaeologist archaeologists archbishop archbishops
arched archer archers archery arches archest archetypal arching
archipelago archipelagos architect architects architectural
architectures archway archways arcing arcs ardent ardently arduous
arduously arenas ares argumentative aria arias arid arider aridest
aristocracies aristocracy aristocrat aristocratic aristocrats ark arks
armadillo armadillos armament armaments armchair armchairs armies
armistice armistices armpit armpits aroma aromas aromatic aromatics
arouse aroused arouses arousing arraign arraigned arraigning arraigns
arrayed arraying arrears arrivals arrogantly arsenal arsenals arsenic
arson arterial arteries artery artful arthritic arthritics arthritis
artichoke artichokes articulate articulated articulately articulates
articulating articulation articulations artifact artifacts artifice
artifices artillery artisan artisans artistically artistry artwork
asbestos ascension ascensions ascent ascents ascertain ascertained
ascertaining ascertains ascetic ascetics ascribe ascribed ascribes
ascribing asexual ashed ashen ashing ashore ashtray ashtrays asides
askance askew asparagus aspen aspens aspersion aspersions asphalt
asphalted asphalting asphalts asphyxiate asphyxiated asphyxiates
asphyxiating asphyxiation asphyxiations aspirant aspirants aspiration
aspirations aspire aspired aspires aspirin aspiring aspirins assail
assailant assailants assailed assailing assails assassin assassinate
assassinated assassinates assassinating assassination assassinations
assassins assaulted assaulter assaulting assaults assemblers
assemblies assent assented assenting assents assertions assertive
assessments assessor assessors assimilate assimilated assimilates
assimilating assimilation assistants associative assortment
assortments assurance assurances assureds asterisked asterisking
asteroid asteroids asthma astonish astonished astonishes astonishing
astonishingly astonishment astound astounded astounding astounds
astray astride astringent astringents astrological astrology astronaut
astronauts astronomical astute astutely astuter astutest asylum
asylums asymmetry asynchronously ates atheistic athlete athletes
athletic athletics atlantes atlases atmospheres atmospherics atomics
atone atoned atonement atones atoning atrocious atrociously attache
attachments attacker attained attaining attainment attainments attains
attendances attendants attentive attentively attest attested attesting
attests attic attics attire attired attires attiring attractions
attractiveness attributable attribution attune attuned attunes
attuning auburn auction auctioned auctioneer auctioneers auctioning
auctions audacious audacity audibles audibly audios audit audited
auditing audition auditioned auditioning auditions auditor auditorium
auditoriums auditors auditory audits augment augmented augmenting
augments august auguster augustest augusts aunts aura aural auras
auspicious austere austerer austerest austerities austerity
authentically authenticate authenticated authenticates authenticating
authenticity authored authoring authoritarian authoritative
authoritatively authorship auto autobiographical autobiographies
autocracies autocracy autocrat autocratic autocrats autoed autograph
autographed autographing autographs autoing automatics automation
automobiled automobiling automotive autonomous autonomy autopsied
autopsies autopsy autopsying autos autumnal autumns auxiliaries
auxiliary avail availed availing avails avalanche avalanches avarice
avaricious avenge avenged avenges avenging avenue avenues averaged
averages averaging averse aversion aversions avert averted averting
averts aviation aviator aviators avid avider avidest avocado avocados
avoidable avoidance avow avowal avowals avowed avowing avows awaken
awakened awakening awakens awakes awaking awarer awarest aways awe
awed awes awesome awfuller awfullest awhile awing awkwarder awkwardest
awkwardly awkwardness awning awnings awoke awoken awry axed axing
axiomatic axiomatics axises axle axles aye ayes azalea azaleas azure
azures babble babbled babbles babbling babe babes babied babier
babiest baboon baboons babying babyish bachelor bachelors backbones
backer backers backfire backfired backfires backfiring backgammon
backhand backhanded backhanding backhands backings backlash backlashes
backlogged backlogging backlogs backpack backpacked backpacking
backpacks backside backslash backstage backtrack backtracked
backtracking backtracks backwoods bacon bacterial bacterias badder
baddest bade badger badgered badgering badgers badges badminton
badness bagel bagels bagged baggie baggier baggies baggiest bagging
baggy bail bailed bailing bails bait baited baiting baits baker
bakeries bakers bakery balconies balcony bald balded balder baldest
balding baldness balds bale baled bales baling balk balked balking
balks ballad ballads ballast ballasted ballasting ballasts balled
ballerina ballerinas ballets balling ballistics balloon ballooned
ballooning balloons balloted balloting ballots ballroom ballrooms balm
balmier balmiest balms balmy baloney bamboo bamboos bamboozle
bamboozled bamboozles bamboozling banaler banalest bandage bandaged
bandages bandaging bandanna bandannas banded bandied bandier bandies
bandiest banding bandit bandits bandstand bandstands bandwagons bandy
bandying banged banging bangs bani banish banished banishes banishing
banister banisters banjo banjos banked banker bankers banking banknote
banknotes bankruptcies bankruptcy bankrupted bankrupting bankrupts
bannered bannering banners banquet banqueted banqueting banquets
banter bantered bantering banters baptism baptisms barb barbarian
barbarians barbaric barbarous barbecue barbecued barbecues barbecuing
barbed barber barbered barbering barbers barbing barbiturate
barbiturates barbs bard bards bareback bared barefoot barer bares
barest bargained bargainer bargaining bargains barge barged barges
barging baring baritone baritones barley barman barn barnacle
barnacles barns barnyard barnyards barometer barometers baron barons
barrage barraged barrages barraging barrels barren barrener barrenest
barrens barrette barrettes barricade barricaded barricades barricading
barrings bartender bartenders barter bartered bartering barters
baseball baseballs baseline basements baser basest bashful basil basin
basins bask basked basketball basketballs baskets basking basks
bassoon bassoons baste basted bastes basting batched batches batching
bathe bathed bathes bathing bathrooms bathtub bathtubs baton batons
bats batsman battalion battalions batted batter battered battering
batters batting battled battlefield battlefields battles battleship
battleships battling bawdier bawdiest bawdy bawl bawled bawling bawls
bayed baying bayonet bayoneted bayoneting bayonets bayou bayous bays
bazaar bazaars beached beaches beaching beacon beacons bead beaded
beadier beadiest beading beads beady beagle beagled beagles beagling
beak beaked beaker beakers beaks beamed beaming beams beaned beaning
bearable bearer bearers bearings beater beaters beautician beauticians
beauties beautified beautifies beautifuler beautifulest beautify
beautifying beaver beavered beavering beavers beckon beckoned
beckoning beckons becomings bedbug bedbugs bedclothes bedded bedder
bedding bedlam bedlams bedridden bedrock bedrocks bedrooms bedside
bedsides bedspread bedspreads bedtime bedtimes bee beech beeches
beefed beefier beefiest beefing beefs beefy beehive beehives beeper
bees beeswax beet beetle beetled beetles beetling beets beeves befall
befallen befalling befalls befell befit befits befitted befitting
befriend befriended befriending befriends beggar beggared beggaring
beggars begged begging beginnings begrudge begrudged begrudges
begrudging begs beguile beguiled beguiles beguiling behalves behead
beheaded beheading beheads beheld behinds behold beholder beholding
beholds beige belated belatedly belch belched belches belching
belfries belfry belie belied belies belittle belittled belittles
belittling bellboy bellboys belled bellhop bellhops bellied bellies
belligerent belligerents belling bellow bellowed bellowing bellows
belly bellying belongings beloveds belows belted belting belts belying
bemoan bemoaned bemoaning bemoans bemuse bemused bemuses bemusing
benched benches benching bender benediction benedictions benefactor
benefactors beneficiaries beneficiary benefited benefiting benevolence
benevolences benevolent benighted benign bents bequeath bequeathed
bequeathing bequeaths bequest bequests bereave bereaved bereavement
bereavements bereaves bereaving bereft beret berets berried berries
berry berrying berserk berth berthed berthing berths beseech beseeches
beseeching beset besets besetting besiege besieged besieges besieging
besought bested bestial bestiality besting bestow bestowed bestowing
bestows bests betcha betray betrayal betrayals betrayed betraying
betrays betrothal betrothals bettered bettering betterment betters
bettor bettors beverage beverages bewared bewares bewaring bewilder
bewildered bewildering bewilderment bewilders bewitch bewitched
bewitches bewitching beyonds bib bibliographic bibliographies
bibliography bibs bicentennial bicentennials bicker bickered bickering
bickers bicycled bicycling bidden bide bides biding biennial biennials
bifocals bigamist bigamists bigamous bigamy bigots bike biked bikes
biking bikini bikinis bilateral bile bilingual bilinguals billboard
billboards billed billfolds billiards billing billow billowed
billowing billows binaries binder binders bindings bingo binned
binning binomial bins biochemical biodegradable biographer biographers
biographical biographies biologically bipartisan biped bipeds biplane
biplanes birch birched birches birching birdcage birdcages birded
birding birthdays birthed birthing birthmark birthmarks birthplace
birthplaces births bisect bisected bisecting bisects bisexual
bisexuals bishops bison bitch bitched bitches bitching bitings
bitterer bitterest bitterly bitterness bittersweet bittersweets
bizarres blab blabbed blabbing blabs blackberries blackberry
blackberrying blackbird blackbirds blackboards blacked blacken
blackened blackening blackens blacker blackest blackhead blackheads
blacking blackjack blackjacked blackjacking blackjacks blacklist
blacklisted blacklisting blacklists blackmailed blackmailer
blackmailers blackmailing blackmails blackout blackouts blacksmith
blacksmiths blacktop blacktopped blacktopping blacktops bladder
bladders bladed blading blameless blamer blanch blanched blanches
blanching blancmange bland blander blandest blanked blanker blankest
blanketed blanketing blankets blanking blankly blare blared blares
blaring blase blaspheme blasphemed blasphemes blasphemies blaspheming
blasphemous blasphemy blaster blaze blazed blazer blazers blazes
blazing bleach bleached bleaches bleaching bleak bleaker bleakest
blearier bleariest bleary bleat bleated bleating bleats bled bleed
bleeding bleeds blemish blemished blemishes blemishing blend blended
blending blends blesseder blessedest blessings blight blighted
blighting blights blimp blimps blinded blinder blindest blindfold
blindfolded blindfolding blindfolds blinding blindingly blindness
blinds blinked blinker blinkered blinkering blinkers blinking blinks
blip blips blissed blisses blissful blissfully blissing blister
blistered blistering blisters blithe blithely blither blithest blitz
blitzed blitzes blitzing blizzard blizzards blobbed blobbing blobs
bloc blockade blockaded blockades blockading blockage blockbuster
blockbusters blockhead blockheads blocs blond blonder blondest blonds
blooded bloodhound bloodhounds bloodied bloodier bloodies bloodiest
blooding bloods bloodshed bloodshot bloodstream bloodthirstier
bloodthirstiest bloodthirsty bloodying bloom bloomed blooming blooms
blossom blossomed blossoming blossoms blot blotch blotched blotches
blotching blots blotted blotter blotters blotting blouse bloused
blouses blousing blowout blowouts blowtorch blowtorches blubber
blubbered blubbering blubbers bludgeon bludgeoned bludgeoning
bludgeons bluebell bluebells blueberries blueberry bluebird bluebirds
blued bluegrass blueprint blueprinted blueprinting blueprints bluer
bluest bluff bluffed bluffer bluffest bluffing bluffs bluing blunder
blundered blundering blunders blunt blunted blunter bluntest blunting
bluntly bluntness blunts blur blurred blurring blurs blurt blurted
blurting blurts blush blushed blushes blushing bluster blustered
blustering blusters boa boar boarded boarder boarders boarding
boardwalk boardwalks boars boas boast boasted boastful boastfully
boasting boasts boated boating bobbed bobbin bobbing bobbins bobcat
bobcats bobsled bobsledded bobsledding bobsleds bode boded bodes
bodice bodices bodily boding bodyguard bodyguards bodywork boggled
boggling boiler boilers boisterous bolder boldest boldly boldness
bolds bologna bolster bolstered bolstering bolsters bolted bolting
bolts bombard bombarded bombarding bombardment bombardments bombards
bomber bombers bombings bondage bonded bonding bonds boned bonfire
bonfires bonier boniest boning bonnet bonnets bonuses bony boo booby
booed booing bookcase bookcases bookend bookended bookending bookends
bookings bookkeeper bookkeepers bookkeeping booklets bookmark
bookmarked bookmarking bookmarks bookshelf bookworm bookworms boomed
boomerang boomeranged boomeranging boomerangs booming booms boon boons
boor boorish boors boos boosted booster boosters boosting boosts
booted bootee bootees booth booths booties booting bootleg bootlegged
bootlegging bootlegs bootstrap booty booze bop bordered bordering
borderlines borders boringly borough boroughs bosom bosoms bossed
bosser bosses bossier bossiest bossing bossy botanical botanist
botanists botany botch botched botches botching bothersome bottled
bottleneck bottlenecks bottling bottomed bottoming bottomless bottoms
bough boughs boulder bouldered bouldering boulders boulevard
boulevards bounced bounces bouncing bounded bounding boundless
bounties bountiful bounty bouquet bouquets bourbon bourgeois
bourgeoisie boutique boutiques bouts bovine bovines bowed bowel bowels
bowing bowled bowlegged bowler bowling bowls bows boxcar boxcars boxed
boxer boxers boxing boycott boycotted boycotting boycotts boyfriend
boyfriends boyhood boyhoods boyish bra brace braced bracelet bracelets
braces bracing brackish brag braggart braggarts bragged bragging brags
braid braided braiding braids brained brainier brainiest braining
brainless brainstorm brainstormed brainstorming brainstorms brainwash
brainwashed brainwashes brainwashing brainy braise braised braises
braising braked braking bran branched branching brandied brandies
brandish brandished brandishes brandishing brandy brandying bras brash
brasher brashest brassed brasses brassier brassiere brassieres
brassiest brassing brassy brat brats bravado braved bravely braver
bravery braves bravest braving bravo bravos brawl brawled brawling
brawls brawn brawnier brawniest brawny bray brayed braying brays
brazen brazened brazening brazens brazier braziers breached breaches
breaching breaded breading breads breadth breadths breadwinner
breadwinners breakable breakables breakdowns breakfasted breakfasting
breakfasts breakneck breakpoints breakthrough breakthroughs breakwater
breakwaters breast breasted breasting breasts breather breathers
breathless breaths breathtaking breded bredes breding breeder breeders
breezed breezes breezier breeziest breezing breezy brevity brew brewed
breweries brewery brewing brews bribe bribed bribery bribes bribing
bricked bricking bricklayer bricklayers bridal bridals bride
bridegroom bridegrooms brides bridesmaid bridesmaids bridged bridging
bridle bridled bridles bridling briefcase briefcases briefed briefer
briefest briefing briefs brigades brighten brightened brightening
brightens brights brilliance brilliants brim brimmed brimming brims
brimstone brine brinier briniest brink brinks briny brisk brisked
brisker briskest brisking briskly brisks bristle bristled bristles
bristling britches brittle brittler brittlest broach broached broaches
broaching broaden broadened broadening broadens broader broadest
broads broadside broadsided broadsides broadsiding brocade brocaded
brocades brocading broccoli brochure brochures broil broiled broiler
broilers broiling broils broker brokered brokering brokers bronchitis
bronco broncos bronze bronzed bronzes bronzing brooch brooches brood
brooded brooding broods brook brooked brooking brooks broom brooms
broth brothered brotherhood brotherhoods brothering brotherly broths
brow browbeat browbeaten browbeating browbeats browned browner
brownest brownie brownier brownies browniest browning browns brows
bruise bruised bruises bruising brunch brunched brunches brunching
brunette brunettes brunt brunted brunting brunts brushed brushes
brushing brusque brusquer brusquest brutalities brutality brutally
brute brutes brutish bubbled bubbles bubblier bubbliest bubbling
bubbly bucked bucketed bucketing buckets bucking buckle buckled
buckles buckling bud budded buddies budding buddy budge budged budges
budgeted budgeting budgets budging buds buff buffalo buffaloed
buffaloes buffaloing buffed buffet buffeted buffeting buffets buffing
buffoon buffoons buffs bugged buggier buggies buggiest bugging buggy
bugle bugled bugler buglers bugles bugling builder builders bulbed
bulbing bulbous bulge bulged bulges bulging bulked bulkier bulkiest
bulking bulks bulky bulldog bulldogged bulldogging bulldogs bulldoze
bulldozed bulldozer bulldozers bulldozes bulldozing bulled bulletined
bulletining bulletins bullfight bullfighter bullfighters bullfights
bullfrog bullfrogs bullied bullier bullies bulliest bulling bullion
bulls bully bullying bum bumblebee bumblebees bummed bummer bummest
bumming bumped bumper bumpers bumpier bumpiest bumping bumps bumpy
bums bun bunched bunches bunching bundled bundles bundling bung
bungalow bungalows bungle bungled bungler bunglers bungles bungling
bunion bunions bunk bunked bunker bunkers bunking bunks bunnies bunny
buns buoy buoyancy buoyant buoyed buoying buoys burble burbled burbles
burbling burdened burdening burdens burdensome bureau bureaucracies
bureaucrat bureaucratic bureaucrats bureaus burger burgers burglar
burglaries burglars burglary burgle burial burials burlap burlier
burliest burly burner burners burnish burnished burnishes burnishing
burp burped burping burps burr burred burring burro burros burrow
burrowed burrowing burrows burrs bursar bused bushed bushel bushels
bushes bushier bushiest bushing bushy busied busier busies busiest
busily businessman businessmen businesswoman businesswomen busing
bussed busted busting bustle bustled bustles bustling busts busybodies
busybody busying butcher butchered butcheries butchering butchers
butchery butler butlered butlering butlers buts butt butte butted
buttercup buttercups buttered butterflied butterflies butterfly
butterflying buttering buttermilk butters butterscotch buttery buttes
butting buttock buttocks buttoned buttonhole buttonholed buttonholes
buttonholing buttoning buttress buttressed buttresses buttressing
butts buxom buxomer buxomest buzz buzzard buzzards buzzed buzzer
buzzers buzzes buzzing byes bygone bygones bypassed bypasses bypassing
bystander bystanders byway byways cab cabaret cabarets cabbages cabbed
cabbing cabin cabinets cabins caboose cabooses cabs cacao cacaos cache
cached caches caching cackle cackled cackles cackling cacti cactus cad
caddied caddies cadence cadences cadet cadets cafes cafeteria
cafeterias caged cages cagey cagier cagiest caging cajole cajoled
cajoles cajoling caked caking calamities calamity calcium calculators
calculi calendared calendaring calendars calf calibrate calibrated
calibrates calibrating calibration calibrations calico calicoes
callable callers calligraphy callings callous calloused callouses
callousing callow callus callused calluses callusing calmed calmer
calmest calming calmly calmness calms calorie calories calve calves
camaraderie camel camels cameo cameoed cameoing cameos camerae
camouflage camouflaged camouflages camouflaging campaigner campaigners
camped camper campers campest camping campused campuses campusing
canal canals canaries canary cancellation cancellations cancers candid
candidacies candidacy candider candidest candidly candied candies
candle candled candles candlestick candlesticks candling candy
candying cane caned canes canine canines caning canister canistered
canistering canisters canker cankered cankering cankers canned
canneries cannery cannibal cannibalism cannibals cannier canniest
canning cannon cannoned cannoning cannons canny canoe canoed canoes
canon canons canopied canopies canopy canopying cantaloupe cantaloupes
cantankerous canteen canteens canter cantered cantering canters canvas
canvased canvases canvasing canvass canvassed canvasser canvassers
canvasses canvassing canyon canyons capabler capablest capably
capacitance capacities capacitor capacitors cape caped caper capered
capering capers capes capillaries capillary capitalists capitulate
capitulated capitulates capitulating capped capping caprice caprices
capricious capriciously capsize capsized capsizes capsizing capsule
capsuled capsules capsuling captained captaining captains caption
captioned captioning captions captivate captivated captivates
captivating captive captives captivities captivity captor captors
caramel caramels carat carats caravan caravans carbohydrate
carbohydrates carbons carburetor carburetors carcass carcasses
carcinogenic carded cardiac cardigan cardigans cardinal cardinals
carding careered careering carefree carefuller carefullest carefulness
carelessly carelessness caress caressed caresses caressing caretaker
caretakers cargo cargoes caribou caribous caricature caricatured
caricatures caricaturing carnage carnal carnation carnations carnival
carnivals carnivore carnivores carnivorous carol carols carouse
caroused carouses carousing carp carped carpenter carpentered
carpentering carpenters carpentry carpeted carpeting carpets carping
carps carriages carriageway carriers carrion cart carted cartel
cartels cartilage cartilages carting cartographer cartographers
cartography carton cartons cartooned cartooning cartoonist cartoonists
carts cartwheel cartwheeled cartwheeling cartwheels carve carved
carves carving cascade cascaded cascades cascading cashed cashes
cashew cashews cashier cashiered cashiering cashiers cashing cashmere
casings casino casinos cask casket caskets casks casserole casseroled
casseroles casseroling castaway castaways caste casted caster casters
castes castigate castigated castigates castigating castings castled
castles castling castoff castoffs castrate castrated castrates
castrating casually casuals casualties casualty cataclysm cataclysmic
cataclysms catapult catapulted catapulting catapults cataract
cataracts catastrophe catastrophes catcall catcalled catcalling
catcalls catchier catchiest catchings catchment catchy catechism
catechisms categorical caterer caterers caterpillar caterpillars
catfish catfishes cathedrals catholics catnap catnapped catnapping
catnaps catnip catwalk catwalks caucus caucused caucuses caucusing
cauliflower cauliflowers caulk caulked caulking caulks causeway
causeways caustic caustics cautioned cautioning cautions cautious
cautiously cavalier cavaliers cavalries cavalry caveats caved cavern
caverns caves caviar caving cavities cavity cavort cavorted cavorting
cavorts caw cawed cawing caws ceasefire ceaseless ceaselessly cedar
cedars cede ceded cedes ceding ceilings celebrations celebrities
celebrity celery celestial celibacy celibate celibates cellar cellars
celled celling cellist cellists cello cellophane cellos cellulars
cellulose cement cemented cementing cements cemeteries cemetery
censure censured censures censuring census censused censuses censusing
centennial centennials centipede centipedes centraler centralest
centrals centrifuge cents ceramic cereal cereals cerebral ceremonial
ceremonials ceremonies ceremonious certainer certainest certainties
certificated certificates certificating certified certifies certify
certifying cervical cessation cessations chafe chafed chafes chaff
chaffed chaffing chaffs chafing chagrin chagrined chagrining chagrins
chained chaining chainsaw chaired chairing chairmen chairperson
chairpersons chalet chalets chalice chalices chalked chalkier
chalkiest chalking chalks chalky challenger challengers chambers
chameleon chameleons champ champagnes champed champing championed
championing champions championship championships champs chanced
chancellors chancing chandelier chandeliers changeable chant chanted
chanting chants chapels chaperon chaperoned chaperoning chaperons
chaplain chaplains chapped chapping characteristically charcoal
charcoals chargeable charger chariot chariots charisma charismatic
charismatics charitably charlatan charlatans charminger charmingest
charred charring charted chartered chartering charters charting chasm
chasms chassis chaste chasten chastened chastening chastens chaster
chastest chastise chastised chastisement chastisements chastises
chastising chastity chatter chatterbox chatterboxes chattered
chattering chatters chattier chattiest chatty chauffeur chauffeured
chauffeuring chauffeurs chauvinist chauvinists cheapen cheapened
cheapening cheapens cheapness checkout checkpoint checkup checkups
cheeked cheeking cheeks cheep cheeped cheeping cheeps cheered
cheerfuller cheerfullest cheerfully cheerfulness cheerier cheeriest
cheering cheery cheesecloth cheesed cheeses cheesing cheetah cheetahs
chef cheffed cheffing chefs chemically cherish cherished cherishes
cherishing cherries cherry cherub cherubim cherubs chestnuts chests
chewier chewiest chewy chi chic chicer chicest chick chickened
chickening chicks chide chided chides chiding chiefer chiefest chiefly
chiefs chieftain chieftains childbirth childbirths childed childes
childhoods childing childlike chili chilies chill chilled chiller
chillest chillier chillies chilliest chilling chills chilly chime
chimed chimes chiming chimney chimneys chimp chimpanzee chimpanzees
chimps chin china chink chinked chinking chinks chinned chinning chins
chintz chipmunk chipmunks chipped chipper chippers chipping
chiropractor chiropractors chirp chirped chirping chirps chisel
chiseled chiseling chisels chivalrous chivalry chlorine chloroform
chloroformed chloroforming chloroforms chlorophyll chocolates choicer
choicest choirs choke choked chokes choking cholera cholesterol
choosier choosiest choosy chopper choppered choppering choppers
choppier choppiest choppy chorals chords chore chored choreographer
choreographers choreography chores choring chortle chortled chortles
chortling chorused choruses chorusing chow chowder chowdered
chowdering chowders chowed chowing chows christen christened
christening christenings christens chrome chromed chromes chroming
chromium chromosome chromosomes chronic chronically chronicle
chronicled chronicles chronicling chronics chronological
chronologically chronologies chronology chrysanthemum chrysanthemums
chubbier chubbiest chubby chuckle chuckled chuckles chuckling chug
chugged chugging chugs chum chummed chummier chummies chummiest
chumming chummy chums chunkier chunkiest chunky churn churned churning
churns chute chutes ciders cigar cigarettes cigars cinch cinched
cinches cinching cinder cindered cindering cinders cinemas cinnamon
cipher ciphered ciphering ciphers circled circling circuited
circuiting circuitous circulars circulations circulatory circumcise
circumcised circumcises circumcising circumcision circumcisions
circumference circumferences circumflex circumstanced circumstancing
circumstantial circumstantials circumvent circumvented circumventing
circumvention circumvents circus circuses cistern cisterns citation
citations citizenship citric citrus citruses civic civics civilians
civilities civility clack clacked clacking clacks clad clairvoyance
clairvoyant clairvoyants clam clamber clambered clambering clambers
clammed clammier clammiest clamming clammy clamp clamped clamping
clamps clams clan clandestine clang clanged clanging clangs clank
clanked clanking clanks clans clap clapped clapper clappered
clappering clappers clapping claps claptrap claret clarifications
clarinet clarinets clashed clashing clasp clasped clasping clasps
classifications classmate classmates classroom classrooms classy
clatter clattered clattering clatters claustrophobia claw clawed
clawing claws clay cleanlier cleanliest cleanliness cleanse cleansed
cleanser cleansers cleanses cleansing clearances clearings clearness
cleat cleats cleavage cleavages cleave cleaved cleaver cleavers
cleaves cleaving clef clefs cleft clefted clefting clefts clemency
clench clenched clenches clenching clergies clergy clergyman clergymen
cleric clerical clerics clerk clerked clerking clerks cleverly
cleverness cliches clicked clicking clicks clientele clienteles cliffs
climactic climates climax climaxed climaxes climaxing climber climbers
clime climes clinch clinched clinches clinching cling clinging clings
clinically clinics clink clinked clinking clinks clipboard clipboards
clippings cliques clitoris cloak cloaked cloaking cloaks clocked
clocking clockwise clockwork clockworks clod clodded clodding clods
clogged clogging clogs cloister cloistered cloistering cloisters
closeness closeted closeting closets closures clot clothespin
clothespins cloths clots clotted clotting cloudburst cloudbursts
clouded cloudier cloudiest clouding cloudy clout clouted clouting
clouts clove cloven clover clovers cloves clown clowned clowning
clowns clubbed clubbing clubhouse clubhouses cluck clucked clucking
clucks clued clueless cluing clump clumped clumping clumps clumsier
clumsiest clumsily clumsiness clung clustered clustering clutch
clutched clutches clutching clutter cluttered cluttering clutters
coached coaches coaching coagulate coagulated coagulates coagulating
coagulation coaled coalesce coalesced coalesces coalescing coaling
coalition coalitions coals coarsely coarsen coarsened coarseness
coarsening coarsens coarser coarsest coastal coasted coaster coasters
coasting coastline coastlines coasts coated coater coating coattest
coax coaxed coaxes coaxing cob cobalt cobbed cobbing cobble cobra
cobras cobs cobweb cobwebs cocaine cock cocked cockeyed cockier
cockiest cocking cockpit cockpits cockroach cockroaches cocks cocktail
cocktails cocky cocoa cocoas coconut coconuts cocoon cocooned
cocooning cocoons cod codded codding cods coefficient coefficients
coerce coerced coerces coercing coercion coexist coexisted coexistence
coexisting coexists coffees coffer coffers coffin coffined coffining
coffins cog cogency cogent cognac cognacs cognitive cogs coherence
coherently coil coiled coiling coils coinage coinages coincided
coincidences coincidental coincidentally coincides coinciding coked
cokes coking colander colanders colder coldest coldly coldness colds
colic collaborate collaborated collaborates collaborating
collaborations collaborative collaborator collaborators collage
collages collapsible collarbone collarbones collared collaring collars
collateral collation colleagued colleaguing collectively collectives
collector collectors collegiate collide collided collides colliding
collie collied collies collision collisions colloquial colloquialism
colloquialisms colloquials collusion collying colonel colonels
colonial colonials colonies colons colossal colt colts coma comae
comas comb combatant combatants combated combating combats combed
combing combs combustible combustibles combustion comeback comedian
comedians comedies comelier comeliest comely comestible comestibles
comet comets comforted comforting comforts comical comings commandant
commandants commanded commandeer commandeered commandeering
commandeers commander commanders commanding commando commandos
commemorate commemorated commemorates commemorating commemoration
commemorations commenced commencement commencements commences
commencing commend commendable commendation commendations commended
commending commends commentaries commerce commerced commerces
commercialism commercials commercing commiserate commiserated
commiserates commiserating commiseration commiserations commissioner
commissioners commodities commodore commodores commoner commonest
commonplace commonplaces commonwealth commonwealths commotion
commotions commune communed communes communicable communicative
communicator communing communion communions communique communiques
commutative commute commuted commuter commuters commutes commuting
compacted compacter compactest compacting compaction compacts
companions companionship comparatives compartment compartments compass
compassed compasses compassing compassionate compatibles compatriot
compatriots compensated compensates compensating compensations
competences competently competitions compilations complacency
complemented complementing complements completer completest complexer
complexes complexest complexion complexioned complexions complexities
compliance compliant complied complies complimentary complimented
complimenting compliments complying composites compositions compost
composted composting composts composure compounded compounding
compounds comprehended comprehending comprehends comprehensions
comprehensively comprehensives compromised compromises compromising
compulsions compulsive compulsories compunction compunctions
computations comrade comrades comradeship concatenation concatenations
concave concealment conceded concedes conceding conceit conceited
conceits concentrations concentric conceptions conceptually concerted
concerting concertos concession concessions conciliate conciliated
conciliates conciliating conciliation concisely conciseness conciser
concisest conclusive conclusively concoct concocted concocting
concoction concoctions concocts concord concordance concourse
concourses concreted concretes concreting concurred concurrence
concurrences concurrency concurrent concurring concurs concussion
concussions condemnations condensation condensations condescend
condescended condescending condescends condiment condiments
conditionally conditionals condolence condolences condominium
condominiums condoms condoned condones condoning condor condors
conducive conductors cone cones confection confections confederacies
confederacy confederate confederated confederates confederating
confederation confederations confer conferred conferrer conferring
confers confessed confesses confessing confession confessions confetti
confidant confidants confide confided confidences confidentially
confidently confides confiding configurable confinement confinements
confirmations confiscate confiscated confiscates confiscating
confiscation confiscations conformed conforming conformity conforms
confound confounded confounding confounds confrontation confrontations
congeal congealed congealing congeals congenial conglomerate
conglomerated conglomerates conglomerating congratulated congratulates
congratulating congregate congregated congregates congregating
congregation congregations congress congresses congressman congressmen
congresswoman congresswomen congruent conical conicals conifer
coniferous conifers conjectured conjectures conjecturing conjugal
conjugate conjugated conjugates conjugating conjugation conjugations
conjunctions conjure conjured conjures conjuring connective
connectivity connectors conned connexion conning connoisseur
connoisseurs connote connoted connotes connoting conquer conquered
conquering conqueror conquerors conquers conquest conquests cons
consciences conscientious consciouses consciousnesses consecrate
consecrated consecrates consecrating consensuses consequential
conservatism conservatories conservatory conserve conserved conserves
conserving considerings consign consigned consigning consignment
consignments consigns consistencies consolations consoled consoles
consolidate consolidated consolidates consolidating consolidation
consolidations consoling consomme consonant consonants consort
consorted consorting consortium consorts conspicuously conspiracies
conspirator conspirators conspire conspired conspires conspiring
constancy constellation constellations consternation constipation
constituencies constituted constituting constitutionally
constitutionals constitutions constrict constricted constricting
constriction constrictions constricts construe construed construes
construing consul consular consulars consulate consulates consuls
consultations consumable consumables consumerism consumers consummate
consummated consummates consummating contagion contagions contagious
containers contaminate contaminated contaminates contaminating
contamination contemplation contemplative contemplatives
contemporaries contemptible contemptuous contended contender
contenders contending contends contented contenting contentions
contentment contestant contestants contested contesting contests
contextual contiguous continentals continents contingencies
contingency contingent contingents contort contorted contorting
contortion contortions contorts contoured contouring contours
contraband contraceptive contraceptives contraction contractions
contractor contractors contractual contradictions contraption
contraptions contraries contrasted contrasting contrasts contravene
contravenes contributory contrite controllable controversies
convalesce convalesced convalescence convalescences convalescent
convalescents convalesces convalescing convection convene convened
convenes conveniences convening convent convented conventing
conventionally convents converge converged convergence converges
converging conversant conversational conversed converses conversing
converters convertible convertibles convex convexed convexes convexing
conveyance conveyances conveyed conveying conveys convoluted convoy
convoyed convoying convoys convulse convulsed convulses convulsing
convulsion convulsions convulsive coo cooed cooing cookbook cookbooks
cooker cooler coolers coolest coolly coop cooped cooper cooperated
cooperates cooperating cooperative cooperatives cooping coops
coordinated coordinating coordinator coos cop copier copiers copious
copiously copped copperhead copperheads coppers copping cops copulate
copulation copyrighted copyrighting copyrights coral corals cord
corded cordial cordially cordials cording cordless cordon cordoned
cordoning cordons cords corduroy cored cores coring cork corked
corking corks corkscrew corkscrewed corkscrewing corkscrews cornea
corneas corned cornered cornering cornet cornets cornflakes cornier
corniest corning cornmeal corns cornstarch corny corollary coronaries
coronary coronation coronations coroner coroners corporal corporals
corporations corps corpulent corpus corpuscle corpuscles corral
corralled corralling corrals correcter correctest corrective
correctness corrector correlated correlates correlating correlations
correspondences correspondents correspondingly corridors corroborate
corroborated corroborates corroborating corroboration corrode corroded
corrodes corroding corrosion corrosive corrosives corrupter corruptest
corruptible corruptions corsage corsages corset corseted corseting
corsets cortex cosmetic cosmetics cosmonaut cosmonauts cosmopolitan
cosmopolitans cosmos cosmoses costings costlier costliest costume
costumed costumes costuming cot coting cots cottage cottaged cottages
cottaging cotted cottoned cottoning cottons cottontail cottontails
cottonwood cottonwoods couch couched couches couching cougar cougars
coughed coughing coughs countable countdown countdowns countenance
countenanced countenances countenancing counteract counteracted
counteracting counteracts counterattack counterattacked
counterattacking counterattacks counterbalance counterbalanced
counterbalances counterbalancing counterclockwise countered
counterfeit counterfeited counterfeiting counterfeits countering
counters countersign countersigned countersigning countersigns
countess countesses counties countryman countrymen countrysides coup
coupon coupons coups courageous courageously couriered couriering
couriers coursed courser coursing courted courteous courteously
courtesies courthouse courthouses courting courtroom courtrooms
courtship courtships courtyard courtyards cousins cove covenant
covenanted covenanting covenants covert covertly coverts coves covet
coveted coveting covetous covets coward cowardice cowardly cowards
cowboy cowboys cowed cower cowered cowering cowers cowgirl cowgirls
cowhide cowhides cowing cox coy coyer coyest coyote coyotes cozily
coziness crab crabbed crabbier crabbiest crabbing crabby crabs cracker
crackers crackle crackled crackles crackling crackpot crackpots cradle
cradled cradles cradling crafted craftier craftiest craftily crafting
crafts craftsman craftsmen crafty crag craggier craggiest craggy crags
cram crammed cramming crams cranberries cranberry crane craned cranes
craning cranium craniums crank cranked cranker crankest crankier
crankiest cranking cranks cranky crasser crassest crate crated crater
cratered cratering craters crates crating crave craved craves craving
cravings crayfish crayfishes crayon crayoned crayoning crayons craze
crazed crazes crazier crazies craziest crazily craziness crazing creak
creaked creakier creakiest creaking creaks creaky creamed creamier
creamiest creaming creams creamy crease creased creases creasing
creations creatively creativity creators credence credential
credentials creditable credited crediting creditor creditors credulous
creeds creek creeks creepier creepies creepiest creeping creeps creepy
cremate cremated cremates cremating cremation cremations crepe crepes
crept crescendo crescendos crescent crescents crest crested
crestfallen cresting crests cretin cretinous cretins crevasse
crevasses crevice crevices crewed crewing crews crib cribbed cribbing
cribs crickets crimed criminally criming crimson crimsoned crimsoning
crimsons cringe cringed cringes cringing crinkle crinkled crinkles
crinkling cripple crippled cripples crippling crises crisped crisper
crispest crisping crisply crispy crisscross crisscrossed crisscrosses
crisscrossing critically critique critiqued critiques critiquing croak
croaked croaking croaks crochet crocheted crocheting crochets crock
crockery crocks crocodile crocodiles crocus crocuses crofts cronies
crony crook crooked crookeder crookedest crooking crooks croon crooned
crooning croons cropped cropping croquet crossbow crossbows crosser
crossest crossings crosswalk crosswalks crosswords crotch crotches
crouch crouched crouches crouching crow crowbar crowbars crowed
crowing crowned crowning crowns crows crucially crucified crucifies
crucifix crucifixes crucifixion crucifixions crucify crucifying
crudely cruder crudest crudity crueler cruelest cruelly cruels
cruelties cruiser cruisers crumb crumbed crumbing crumble crumbled
crumbles crumblier crumblies crumbliest crumbling crumbly crumbs
crummier crummiest crummy crumple crumpled crumples crumpling crunchy
crusade crusaded crusader crusaders crusades crusading crust
crustacean crustaceans crusted crustier crusties crustiest crusting
crusts crusty crutch crutches crux cruxes crybabies crybaby crypt
crypts cub cubed cubes cubicle cubicles cubing cubs cuckoos cucumber
cucumbers cuddle cuddled cuddles cuddling cued cues cuff cuffed
cuffing cuffs cuing cuisine cuisines culinary cull culled culling
culls culminate culminated culminates culminating culmination
culminations culpable culprits cultivate cultivated cultivates
cultivating cultivation cults culturally cultured culturing cunninger
cunningest cunningly cupboards cupful cupfuls cupped cupping cur
curable curator curators curd curdle curdled curdles curdling curds
curfew curfews curio curios curiosities curiouser curiousest curl
curled curling curls currant currants currencies currents curricula
curried curries currying cursed curses cursing cursory curt curtail
curtailed curtailing curtails curtained curtaining curter curtest
curtsied curtsies curtsy curtsying curvature curvatures curved curving
cushion cushioned cushioning cushions custards custodian custodians
custody cutback cutbacks cuter cutes cutest cuticle cuticles cutlery
cutlet cutlets cutter cutters cutthroat cutthroats cuttings cyanide
cybernetics cyclic cyclone cyclones cylinders cylindrical cymbal
cymbals cynicism cynics cypress cypresses cyst cysts dab dabbed
dabbing dabble dabbled dabbles dabbling dabs dachshund dachshunds dad
daddies daddy dads daemon daffodil daffodils dagger daggers dailies
daintier dainties daintiest daintily dainty dairies dairy dais daises
daisies daisy dallied dallies dally dallying dam dame dames dammed
damming damneder damnedest damped dampen dampened dampening dampens
damper dampest damping dampness damps dams damsel damsels dancer
dancers dandelion dandelions dandier dandies dandiest dandruff dandy
dangered dangering dangle dangled dangles dangling dank danker dankest
dapper dapperer dapperest dappers daredevil daredevils darken darkened
darkening darkens darker darkest darklier darkliest darkly darlings
darn darned darning darns dart darted darting darts dashboard
dashboards dastardly daub daubed daubing daubs daughters daunt daunted
daunting dauntless daunts dawdle dawdled dawdles dawdling dawned
dawning dawns daybreak daydream daydreamed daydreaming daydreams daze
dazed dazes dazing dazzle dazzled dazzles dazzling deacon deacons
deaden deadened deadening deadens deader deadest deadlier deadliest
deadlined deadlines deadlining deadlock deadlocked deadlocking
deadlocks deafer deafest deafness dealings dean deaned deaning deans
dearer dearest dearly dears dearth dearths deathbed deathbeds deaves
debase debased debasement debasements debases debasing debaucheries
debauchery debilitate debilitated debilitates debilitating debilities
debility debit debited debiting debits debonair debrief debriefed
debriefing debriefs debris debtor debtors debts debunk debunked
debunking debunks debut debutante debutantes debuted debuting debuts
decadence decadent decadents decanter decanters decapitate decapitated
decapitates decapitating decayed decaying decays decease deceased
deceases deceasing deceit deceitful deceitfully deceits deceive
deceived deceives deceiving decencies decency decenter decentest
decently deception deceptions deceptive decibel decibels decidedly
deciduous decimals decimate decimated decimates decimating decipher
deciphered deciphering deciphers decisive decisively decked decking
decks declension decoder decompose decomposed decomposes decomposing
decomposition decorate decorated decorates decorating decoration
decorations decorative decorator decorators decorous decorum decoy
decoyed decoying decoys decree decreed decreeing decrees decrepit
decried decries decry decrying dedication dedications deduct deducted
deducting deductive deducts deeded deeding deepen deepened deepening
deepens deeps deer deface defaced defaces defacing defamation
defamatory defame defamed defames defaming defaulted defaulting
defeatist defecate defecated defecates defecating defected defecting
defectives defendant defendants defender defenders defensible defer
deference deferential deferred deferring defers defiance defiant
defiantly deficient deficit deficits defied defies defile defiled
defiles defiling definable deflate deflated deflates deflating
deflation deflect deflected deflecting deflection deflections deflects
deform deformed deforming deformities deformity deforms defraud
defrauded defrauding defrauds defrost defrosted defrosting defrosts
deft defter deftest deftly defunct defuncts defying degenerated
degenerates degenerating dehydrate dehydrated dehydrates dehydrating
deified deifies deify deifying deign deigned deigning deigns deities
deject dejected dejecting dejection dejects delectable delegate
delegated delegates delegating delegation delegations deleterious
deletions deli deliberated deliberates deliberating deliberation
deliberations delicacies delicacy delicately delicatessen
delicatessens deliciously delimit delimited delimiter delimiting
delimits delinquencies delinquency delinquent delinquents delirious
deliriously delirium deliriums delis deliverance deliveries deltas
delude deluded deludes deluding deluge deluged deluges deluging
delusions deluxe delve delved delves delving demagogue demagogues
demean demeaned demeaning demeans dementia demerit demerited
demeriting demerits demised demises demising democracies democrat
democrats demolition demolitions demon demons demonstrably
demonstrative demonstratives demonstrator demonstrators demote demoted
demotes demoting demotion demotions demount demure demurely demurer
demurest den denial denials denigrate denim denims denomination
denominations denominators denoted denoting denounce denounced
denounces denouncing dens densely denser densest densities dent dental
dented denting dentistry dentists dents denunciation denunciations
deodorant deodorants depart departed departing departs departures
dependable dependencies dependency depict depicted depicting depiction
depicts deplete depleted depletes depleting deplorable deplore
deplored deplores deploring deport deportation deportations deported
deporting deportment deports depose deposed deposes deposing deposited
depositing deposits depot depots deprave depraved depraves depraving
depravities depravity deprecate deprecated deprecates deprecating
depreciate depreciated depreciates depreciating depreciation
depressingly depressions deprivation deprivations deputies derail
derailed derailing derailment derailments derails derelict derelicts
deride derided derides deriding derision derivation derivations
derivatives derrick derricks descendant descendants descent descents
describable descriptor descriptors desecrate desecrated desecrates
desecrating desecration desegregation deserter deserters designation
designations desirability desirous desist desisted desisting desists
desks desolate desolated desolates desolating desolation despaired
despairing despairs desperation despicable despised despises despising
despondent despot despotic despots dessert desserts destinations
destinies destiny destitute destitution destroyer destroyers
detachable detachment detachments detain detained detaining detains
detectives detectors detention detentions detergent detergents
deteriorate deteriorated deteriorates deteriorating deterioration
determinable determinations determinism deterministic deterred
deterrents deterring deters detest detested detesting detests dethrone
dethroned dethrones dethroning detonate detonated detonates detonating
detonation detonations detonator detonators detour detoured detouring
detours detracted detracting detracts detriment detrimental detriments
devalue devastation deviant deviate deviated deviates deviating
deviations devils devolution devolve devolved devolves devolving
devotee devotees devotion devotions devour devoured devouring devours
devout devouter devoutest devoutly dew dexterity dexterous diabetes
diabetic diabetics diabolical diagnose diagnosed diagnoses diagnosing
diagonally diagonals diagrammed diagramming diameters diametrically
diamond diamonds diaper diapered diapering diapers diaphragm
diaphragms diaries diatribe diced dices dicing dictated dictates
dictating dictation dictations dictatorial dictators dictatorships
diction dieing dieseled dieseling diesels dietaries dietary dieted
dieting diets differentiated differentiates differentiating
differentiation diffuse diffused diffuses diffusing diffusion digested
digestible digesting digestion digestions digestive digests digger
digitally dignified dignifies dignify dignifying dignitaries dignitary
dignities digress digressed digresses digressing digression
digressions dike dikes dilapidated dilate dilated dilates dilating
dilation dilemmas diligence diligent diligently dill dilled dilling
dills dilute diluted dilutes diluting dilution dime dimer dimes
diminish diminished diminishes diminishing diminutive diminutives
dimly dimmed dimmer dimmest dimming dimple dimpled dimples dimpling
dims din diners dinghies dinghy dingier dingies dingiest dingy dinned
dinnered dinnering dinners dinning dinosaur dinosaurs dins diocese
dioceses dioxide diphtheria diphthong diphthongs diploma diplomacy
diplomas diplomat diplomata diplomatically diplomatics diplomats
dipped dipping dips directer directest directness direr direst dirge
dirges dirtied dirtier dirties dirtiest dirtying disabilities
disability disadvantaged disadvantageous disadvantaging disagreeable
disagreeably disagreements disallow disallowed disallowing disallows
disambiguate disappearance disappearances disappointments disapproval
disapprove disapproved disapproves disapproving disarm disarmament
disarmed disarming disarms disarray disarrayed disarraying disarrays
disavow disavowed disavowing disavows disband disbanded disbanding
disbands disbelief disbelieve disbelieved disbelieves disbelieving
disburse disbursed disbursement disbursements disburses disbursing
discern discerned discernible discerning discerns discharged
discharges discharging disciple disciples disciplinarian
disciplinarians disciplined disciplines disciplining disclaim
disclaimed disclaiming disclaims disclose disclosed discloses
disclosing disclosure disclosures discomfort discomforted
discomforting discomforts disconcert disconcerted disconcerting
disconcerts disconsolate disconsolately discontent discontented
discontenting discontents discontinuity discord discordant discorded
discording discords discos discounted discounting discouragement
discouragements discourse discoursed discourses discoursing
discourteous discourtesies discourtesy discredit discredited
discrediting discredits discreet discreeter discreetest discreetly
discrepancies discretionary discriminatory discus discuses disdain
disdained disdainful disdaining disdains diseased disembark
disembarkation disembarked disembarking disembarks disenchantment
disengage disengaged disengages disengaging disentangle disentangled
disentangles disentangling disfigure disfigured disfigures disfiguring
disgrace disgraced disgraceful disgraces disgracing disgruntle
disgruntled disgruntles disgruntling disgustingly dishearten
disheartened disheartening disheartens dished dishing dishonestly
dishonesty dishwasher dishwashers disillusion disillusioned
disillusioning disillusionment disillusions disincentive disinfect
disinfectant disinfectants disinfected disinfecting disinfects
disingenuous disinherit disinherited disinheriting disinherits
disintegrate disintegrated disintegrates disintegrating disintegration
disinterested disjoint disjointed disjointing disjoints disks
dislocate dislocated dislocates dislocating dislocation dislocations
dislodge dislodged dislodges dislodging disloyal disloyalty dismaler
dismalest dismally dismantle dismantled dismantles dismantling dismay
dismayed dismaying dismays dismember dismembered dismembering
dismembers dismissal dismissals dismissive dismount dismounted
dismounting dismounts disobedience disobedient disobey disobeyed
disobeying disobeys disordered disordering disorderly disorders disown
disowned disowning disowns disparage disparaged disparages disparaging
disparate disparities disparity dispassionate dispassionately dispatch
dispatched dispatches dispatching dispel dispelled dispelling dispels
dispensaries dispensary dispensation dispensations dispense dispensed
dispenser dispensers dispenses dispensing dispersal disperse dispersed
disperses dispersing dispersion displace displaced displacement
displacements displaces displacing displease displeased displeases
displeasing displeasure disposables disposals dispositions dispossess
dispossessed dispossesses dispossessing disproportionate
disproportionated disproportionates disproportionating disprove
disproved disproves disproving disputed disputes disputing
disqualified disqualifies disqualify disqualifying disquiet disquieted
disquieting disquiets disregarded disregarding disregards disrepair
disreputable disrepute disrespect disrespected disrespectful
disrespecting disrespects disrupted disrupting disruptions disruptive
disrupts dissatisfaction dissatisfied dissatisfies dissatisfy
dissatisfying dissect dissected dissecting dissection dissections
dissects disseminate disseminated disseminates disseminating
dissemination dissension dissensions dissent dissented dissenter
dissenters dissenting dissents dissertations disservice disservices
dissident dissidents dissimilarities dissimilarity dissimilars
dissipate dissipated dissipates dissipating dissipation dissociate
dissociated dissociates dissociating dissociation dissolute dissolutes
dissolution dissolve dissolved dissolves dissolving dissonance
dissonances dissuade dissuaded dissuades dissuading distanced
distancing distantly distaste distastes distend distended distending
distends distill distillation distillations distilled distiller
distilleries distillers distillery distilling distills distincter
distinctest distinctively distinguishable distorter distortions
distraction distractions distraught distressingly distributions
distributor distributors districts distrust distrusted distrustful
distrusting distrusts disturbances disuse disused disuses disusing
ditched ditches ditching dither dithered dithering dithers ditties
dittoed dittoing dittos ditty diver diverge diverged divergence
divergences divergent diverges diverging divers diversified
diversifies diversify diversifying diversion diversions diversities
divest divested divesting divests dividend dividends divined diviner
divines divinest divining divinities divinity divisible divisive
divisor divisors divorced divorcee divorcees divorces divorcing
divulge divulged divulges divulging dizzied dizzier dizzies dizziest
dizziness dizzy dizzying docile dock docked docking docks doctorate
doctored doctoring doctrines documentaries dodged dodges dodging dodo
doer doers doest dogged doggedly doggerel dogging doghouse doghouses
dogmas dogmatic dogmatics dogwood dogwoods doilies doily doldrums
doled doleful dolefuller dolefullest dolefully doles doling doll
dolled dollies dolling dolls dolly dolphin dolphins domains dome domed
domes domesticate domesticated domesticates domesticating domesticity
domestics domicile domiciled domiciles domiciling dominance dominants
domination doming dominion dominions domino dominoes donkey donkeys
donor donors doodle doodled doodles doodling doored dooring doorman
doormen doorstep doorstepped doorstepping doorsteps doorway doorways
dope doped dopes dopey dopier dopiest doping dormant dormants
dormitories dormitory dorsal dorsals dos dosed dosing dote doted dotes
doting doubly doubted doubtfully doubting dough dour dourer dourest
douse doused douses dousing dove doves dowdier dowdies dowdiest dowdy
downcast downed downfall downfalls downgrade downgraded downgrades
downgrading downhearted downhills downier downiest downing downpour
downpours downs downstream downtown downward downy dowries dowry doze
dozed dozes dozing drab drabber drabbest drabs draconian dragonflies
dragonfly dragons drainage dramas dramatics dramatist dramatists drape
draped draperies drapery drapes draping drawbridge drawbridges drawer
drawers drawl drawled drawling drawls dreadfully dreamer dreamers
dreamier dreamiest dreamy drearier drearies dreariest dredge dredged
dredges dredging dregs drench drenched drenches drenching dresser
dressers dressier dressiest dressings dressmaker dressmakers dressy
dribble dribbled dribbles dribbling drier driers driest drifted
drifting drifts driftwood drilled drilling drills drinkable drinker
drinkers drivels driveway driveways drizzle drizzled drizzles
drizzling droll droller drollest drone droned drones droning drool
drooled drooling drools droop drooped drooping droops dropout dropouts
droppings dross drought droughts droves drowse drowsed drowses
drowsier drowsiest drowsiness drowsing drowsy drudge drudged drudgery
drudges drudging drugged drugging druggist druggists drugstore
drugstores drummed drummer drummers drumming drumstick drumsticks
drunkard drunkards drunkenly drunkenness drunker drunkest drunks dryer
dryers dryly dryness drys dualism dub dubbed dubbing dubiously dubs
duchess duchesses ducked ducking duckling duct ducts dud dude duded
dudes duding duds duel duels dues duet duets dugout dugouts duke duked
dukes duking dulled duller dullest dulling dullness dulls dully
dumbbell dumbbells dumbed dumber dumbest dumbfound dumbfounded
dumbfounding dumbfounds dumbing dumbs dummies dumpier dumpies dumpiest
dumpling dumpy dunce dunces dune dunes dung dunged dungeon dungeoned
dungeoning dungeons dunging dungs dunk dunked dunking dunks dunno duo
dupe duped dupes duping duplex duplexes duplicity durability durable
duress dusk duskier duskiest dusky dusted dustier dustiest dusting
dustmen dustpan dustpans dusts dutiful dutifully duvet dwarf dwarfed
dwarfer dwarfest dwarfing dwarfs dwell dweller dwellers dwelling
dwellings dwells dwelt dwindle dwindled dwindles dwindling dye dyed
dyeing dyes dynamical dynamite dynamited dynamites dynamiting dynamo
dynamos dynasties dynasty dysentery dyslexia eagerer eagerest
eagerness eagles earache earaches eardrum eardrums earl earls earmark
earmarked earmarking earmarks earner earners earnest earnestly
earnestness earnests earnings earring earrings earshot earthed
earthier earthiest earthing earthlier earthliest earthly earthquake
earthquaked earthquakes earthquaking earths earthworm earthworms
earthy eased easel easels eases easies easing easterlies easterly
eastward easygoing eave eaves eavesdrop eavesdropped eavesdropping
eavesdrops ebb ebbed ebbing ebbs ebonies ebony eccentricities
eccentricity eccentrics ecclesiastical eclair eclairs eclectic eclipse
eclipsed eclipses eclipsing ecologically ecologist ecologists
economist economists ecosystem ecosystems ecstasies ecstasy ecstatic
ecumenical eczema eddied eddies eddy eddying edged edger edgewise
edgier edgiest edging edgy edible edibles edict edicts edifice
edifices editorials editorship educations educator educators eel eels
eerie eerier eeriest effected effecting effectual effeminate
effervescent efficients effigies effigy effortless effortlessly
effusive effusively egalitarian egged egging eggplant eggplants
egocentric egoism egotism egotist egotists eigenvalue eighteens
eighteenth eighteenths eighths eighties eightieth eightieths eights
eighty ejaculate ejaculated ejaculates ejaculating ejaculation
ejaculations eject ejected ejecting ejection ejections ejects eke eked
ekes eking elaborated elaborately elaborates elaborating elaboration
elaborations elapse elapsed elapses elapsing elastic elasticity
elastics elation elbow elbowed elbowing elbows elder elders eldest
elective electives elector electorates electors electrically
electrician electricians electrified electrifies electrify
electrifying electrocute electrocuted electrocutes electrocuting
electrocution electrocutions electrode electrodes electrolysis
electromagnetic electrons electrostatic elegance elegantly elegies
elegy elemental elevate elevated elevates elevating elevation
elevations elevens eleventh elevenths elf elicit elicited eliciting
elicits eligibility elimination eliminations elites elitism elk elks
ellipse ellipses ellipsis elliptic elliptical elm elms elongate
elongated elongates elongating elope eloped elopement elopements
elopes eloping eloquence eloquent eloquently elucidate elude eluded
eludes eluding elusive elves email emailed emailing emails emanate
emanated emanates emanating emancipate emancipated emancipates
emancipating emancipation embalm embalmed embalming embalms embankment
embankments embargo embargoed embargoes embargoing embark embarked
embarking embarks embarrassments embassies embassy embellish
embellished embellishes embellishing embellishment embellishments
ember embers embezzle embezzled embezzlement embezzles embezzling
embitter embittered embittering embitters emblem emblems embodied
embodies embodiment embody embodying emboss embossed embosses
embossing embrace embraced embraces embracing embroider embroidered
embroideries embroidering embroiders embroidery embryo embryonic
embryos emerald emeralds emergence emergencies emergent emigrant
emigrants emigrate emigrated emigrates emigrating emigration
emigrations eminence eminences emir emirs emissaries emissary emission
emissions emits emitted emitting emotive empathy emperor emperors
emphases emphatic emphatically emphysema empires employments emporium
emporiums empower empowered empowering empowers empress empresses
emptier emptiest emptiness emulated emulates emulating emulations
emulsion emulsions enact enacted enacting enactment enactments enacts
enamel enamels encapsulate encapsulated encapsulates encapsulating
encase encased encases encasing enchant enchanted enchanting
enchantment enchantments enchants encircle encircled encircles
encircling enclosure enclosures encompass encompassed encompasses
encompassing encore encored encores encoring encouragements encroach
encroached encroaches encroaching encrypted encryption encumber
encumbered encumbering encumbers encumbrance encumbrances encyclopedia
encyclopedias endanger endangered endangering endangers endear
endeared endearing endearment endearments endears endemic endemics
endive endives endorse endorsed endorsement endorsements endorses
endorsing endow endowed endowing endowment endowments endows endurance
endure endured endures enduring endways enema enemas energetic
energetically energetics energies enforcement engagement engagements
engender engendered engendering engenders engined engining engrave
engraved engraver engravers engraves engraving engravings engross
engrossed engrosses engrossing engulf engulfed engulfing engulfs
enhancements enigma enigmas enigmatic enjoyments enlargement
enlargements enlist enlisted enlisting enlistment enlistments enlists
enliven enlivened enlivening enlivens enmities enmity enormities
enormity enrage enraged enrages enraging enrich enriched enriches
enriching enrichment enrolled enrolling ensemble ensembles enshrine
enshrined enshrines enshrining ensign ensigns enslave enslaved
enslaves enslaving ensue ensued ensues ensuing entailed entailing
entangle entangled entanglement entanglements entangles entangling
enterprises enterprising entertainer entertainers entertainments
enthralled enthralling enthusiasms enthusiast enthusiastically
enthusiasts entice enticed enticement enticements entices enticing
entomologist entomologists entomology entrails entranced entrances
entrancing entrant entrants entrap entrapped entrapping entraps
entreat entreated entreaties entreating entreats entreaty entree
entrees entrench entrenched entrenches entrenching entropy entrust
entrusted entrusting entrusts entwine entwined entwines entwining
enumerate enumerated enumerates enumerating enumeration enunciate
enunciated enunciates enunciating enunciation envelop enveloped
enveloping envelops enviable envied envies envious enviously
environmentally environs envoy envoys envying enzyme enzymes eon eons
epaulet epaulets ephemeral epics epidemic epidemics epidermis
epidermises epilepsy epileptic epileptics epilogue epilogued epilogues
epiloguing epitaph epitaphs epithet epithets epitome epitomes epoch
epochs epsilon equanimity equated equates equating equator equatorial
equators equestrian equestrians equilateral equilaterals equine
equines equinox equinoxes equitable equities equity equivalence
equivalently equivocal eradicate eradicated eradicates eradicating
eras erasers erasure erect erected erecting erection erections erects
ergonomic erode eroded erodes eroding erosion erotic errand errands
errant errants erratic erratically erratics erred erring erroneously
errs erstwhile erudite erupt erupted erupting eruption eruptions
erupts escalate escalated escalates escalating escalation escalator
escalators escapade escapades escapism escort escorted escorting
escorts especial espionage essayed essaying essences essentials
estates esteem esteemed esteeming esteems estimations estrangement
estrangements etch etched etches etching etchings eternally eternities
ether ethereal ethically ethicals ethnics ethos etiquette etymological
etymologies eulogies eulogy euphemism euphemisms eureka euthanasia
evacuate evacuated evacuates evacuating evacuation evacuations evade
evaded evades evading evaluations evangelical evangelicals evangelism
evangelist evangelistic evangelists evaporate evaporated evaporates
evaporating evaporation evasion evasions evasive eve evener evenest
evenness eventful eventualities eventuality evergreen evergreens
everlasting everlastings evermore eves evict evicted evicting eviction
evictions evicts evidenced evidences evidencing evidents evocative
evoke evoked evokes evoking ewe ewes exacerbate exacerbated
exacerbates exacerbating exacted exacter exactest exacting exacts
exaggeration exaggerations exalt exaltation exalted exalting exalts
examinations examiners exampled exampling exasperate exasperated
exasperates exasperating exasperation excavate excavated excavates
excavating excavation excavations excel excelled excellence
excellently excelling excels excerpt excerpted excerpting excerpts
excesses excise excised excises excising excitable excitements exclaim
exclaimed exclaiming exclaims exclamations exclusives excommunicate
excommunicated excommunicates excommunicating excommunication
excommunications excrement excrete excreted excretes excreting
excruciating excursion excursions excusable excused excusing
executioner executioners executions executives executor executors
exemplary exemplified exemplifies exemplify exemplifying exempted
exempting exemption exemptions exempts exert exerted exerting exertion
exertions exerts exhale exhaled exhales exhaling exhaustion exhibited
exhibiting exhibitions exhibits exhilarate exhilarated exhilarates
exhilarating exhilaration exhort exhortation exhortations exhorted
exhorting exhorts exhume exhumed exhumes exhuming exile exiled exiles
exiling existences existent existential existentially exodus exoduses
exonerate exonerated exonerates exonerating exoneration exorbitant
exotics expandable expanse expanses expansions expansive expatriate
expatriated expatriates expatriating expectancy expectant expediencies
expediency expedient expedients expedite expedited expedites
expediting expeditions expel expelled expelling expels expend
expendable expendables expended expending expenditures expends
expertly expiration expletive expletives explicable explicits
explorations explorer explorers explosives exponent exponentially
exponents exported exporter exporters exporting exports exposition
expositions exposures expound expounded expounding expounds expressive
expressively expressly expulsion expulsions exquisite extemporaneous
exterior exteriors exterminate exterminated exterminates exterminating
extermination exterminations externals extinct extincted extincting
extinctions extincts extinguish extinguished extinguisher
extinguishers extinguishes extinguishing extol extolled extolling
extols extort extorted extorting extortion extortionate extorts
extractions extracurricular extracurriculars extradite extradited
extradites extraditing extradition extraditions extraordinaries
extrapolate extrapolated extrapolates extrapolating extrapolation
extrapolations extraterrestrial extraterrestrials extravagance
extravagances extravagant extravagantly extremer extremest extremists
extremities extremity extricate extricated extricates extricating
extrovert extroverts exuberance exuberant exude exuded exudes exuding
exult exultant exultation exulted exulting exults eyeball eyeballed
eyeballing eyeballs eyebrow eyebrows eyed eyelash eyelashes eyelid
eyelids eyesore eyesores eyewitness eyewitnesses fable fables
fabricate fabricated fabricates fabricating fabrication fabrications
fabrics fabulous facade facades faceless facet faceted faceting
facetious facets facial facials facile facilitated facilitates
facilitating facsimile facsimiled facsimileing facsimiles faction
factions factored factorial factoring fad fade faded fades fading fads
failings fainted fainting faintly faints fairies fairs faithed
faithfully faithfulness faithfuls faithing faithless faiths faked
fakes faking falcon falconry falcons fallacies fallible fallout
falsehood falsehoods falsely falser falsest falsetto falsettos
falsification falsifications falsified falsifies falsify falsifying
falsities falsity falter faltered faltering falters famed familiars
famines fanatic fanatical fanatics fancied fancier fancies fanciest
fanciful fancying fanfare fanfares fang fangs fanned fanning fantasied
fantastically fantasying faraway farces fared fares farewells faring
farmed farming farmland farms fascination fascinations fascism
fascists fashionably fasted fasten fastened fastener fasteners
fastening fastenings fastens fastidious fasting fasts fatalistic
fatalities fatality fatally fated fateful fates fathered fatherhood
fathering fatherland fatherlands fatherly fathom fathomed fathoming
fathoms fatigue fatigued fatigues fatiguing fating fats fatten
fattened fattening fattens fatter fattest fattier fatties fattiest
fatty faucets faulted faultier faultiest faulting faultless fauna
faunas fawn fawned fawning fawns faze fazed fazes fazing fearful
fearfuller fearfullest fearfully fearless fearlessly fearsome feast
feasted feasting feasts feather feathered featherier featheriest
feathering feathers feathery feats feces federalism federalist
federalists federals federation federations feds feebler feeblest
feeder feeders feeler feelers feign feigned feigning feigns feint
feinted feinting feints feline felines felled feller fellest felling
fellowship fellowships fells felon felonies felons felony felted
felting felts feminine feminines femininity feminism fen fenced fences
fencing fend fended fending fends ferment fermentation fermented
fermenting ferments fern ferns ferocious ferociously ferocity ferret
ferreted ferreting ferrets ferried ferries ferry ferrying fertile
fertility fervent fervently fester festered festering festers
festivals festive festivities festivity festoon festooned festooning
festoons fetched fetches fetching fete feted fetes fetid feting fetish
fetishes fetter fettered fettering fetters feud feudal feudalism
feuded feuding feuds feverish feverishly fevers fez fezzes fiance
fiancee fiancees fiances fiasco fiascoes fib fibbed fibber fibbers
fibbing fibs fiche fickle fickler ficklest fictions fictitious fiddler
fiddlers fiddly fidelity fidget fidgeted fidgeting fidgets fidgety
fielded fielding fiend fiendish fiendishly fiends fiercely fierceness
fiercer fiercest fierier fieriest fiery fiesta fiestas fifteens
fifteenth fifteenths fifths fifties fiftieth fiftieths fig figged
figging fighters figment figments figs figurative figuratively
figurehead figureheads filament filaments filch filched filches
filching filler fillet filleted filleting fillets fillies filly
filmier filmiest filmy filth filthier filthiest fin finale finales
finalist finalists finality financed financier financiers financing
finch finches finely finesse finessed finesses finessing fingered
fingering fingernail fingernails fingerprint fingerprinted
fingerprinting fingerprints fingertip fingertips finickier finickiest
finicky finner fins fir firearm firearms firecracker firecrackers
firefighter firefighters fireflies firefly fireman firemen fireplace
fireplaces fireproof fireproofed fireproofing fireproofs fireside
firesides firewood firmed firmer firmest firming firmness firmware
firring firs firsthand firsts fiscals fisher fisheries fisherman
fishermen fishery fishier fishiest fishy fission fissure fissures fist
fists fitful fitness fitter fittest fittings fives fixable fixation
fixations fixture fixtures fizz fizzed fizzes fizzing fizzle fizzled
fizzles fizzling flabbier flabbiest flabby flagpole flagpoles flagrant
flagrantly flagship flagships flagstone flagstones flail flailed
flailing flails flair flairs flak flake flaked flakes flakier flakiest
flaking flaky flamboyance flamboyant flamboyantly flamed flaming
flamingo flamingos flammable flammables flank flanked flanking flanks
flannel flannels flap flapjack flapjacks flapped flapping flaps flare
flared flares flaring flashback flashbacks flasher flashest flashier
flashiest flashlight flashlights flashy flask flasks flatly flatness
flats flatted flatten flattened flattening flattens flatter flattered
flatterer flatterers flattering flatters flattery flattest flatting
flaunt flaunted flaunting flaunts flawless flawlessly flea fleas fleck
flecked flecking flecks fled fledged fledgling fledglings flee fleece
fleeced fleeces fleecier fleeciest fleecing fleecy fleeing flees
fleeted fleeter fleetest fleeting fleets fleshed fleshes fleshier
fleshiest fleshing fleshy flex flexed flexes flexibly flexing flick
flicked flicker flickered flickering flickers flicking flicks flier
fliers fliest flightier flightiest flightless flights flighty flimsier
flimsiest flimsiness flimsy flinch flinched flinches flinching fling
flinging flings flint flints flippant flipper flippers flippest flirt
flirtation flirtations flirtatious flirted flirting flirts flit flits
flitted flitting flock flocked flocking flocks flog flogged flogging
flogs flooder floodlight floodlighted floodlighting floodlights
floored flooring flop flopped floppier floppies floppiest flopping
flops flora floral floras florid florist florists floss flossed
flosses flossing flotilla flotillas flounce flounced flounces
flouncing flounder floundered floundering flounders floured flouring
flourish flourished flourishes flourishing flours flout flouted
flouting flouts flowered flowerier floweriest flowering flowery flu
fluctuate fluctuated fluctuates fluctuating flue fluency fluently
fluents flues fluff fluffed fluffier fluffiest fluffing fluffs fluids
fluke fluked flukes fluking flung flunk flunked flunkies flunking
flunks flunky fluorescent flurried flurries flurry flurrying flusher
flushest fluster flustered flustering flusters fluted flutes fluting
flutist flutists flutter fluttered fluttering flutters flux fluxed
fluxes fluxing flyover flyovers foal foaled foaling foals foamed
foamier foamiest foaming foams foamy focal focused focuses focusing
fodder fodders foe foes fogged foggier foggiest fogging foggy foghorn
foghorns fogs foible foibles foil foiled foiling foils foist foisted
foisting foists foliage folklore folksier folksiest folksy follies
followings folly foment fomented fomenting foments fonded fonder
fondest fonding fondle fondled fondles fondling fondly fondness fonds
foodstuff foodstuffs foolhardier foolhardiest foolhardy foolisher
foolishest foolishly foolishness foolproof footage footballs footed
foothill foothills foothold footholds footing footings footlights
footnoted footnoting footpath footpaths footprint footprints foots
footstep footsteps footstool footstools footwear footwork forage
foraged forages foraging foray forayed foraying forays forbear
forbearance forbearing forbears forbiddings forbore forborne forceful
forcefully forceps forcible ford forded fording fords fore forearm
forearmed forearming forearms forebode foreboded forebodes foreboding
forebodings forefather forefathers forefinger forefingers forefront
forefronts foregoing foregoings foregone foreground foregrounded
foregrounding foregrounds forehead foreheads foreleg forelegs foreman
foremen foremost forensic forensics foreplay forerunner forerunners
fores foresaw foresee foreseeing foreseen foresees foreshadow
foreshadowed foreshadowing foreshadows foresight foreskin foreskins
forestall forestalled forestalling forestalls forested foresting
forestry foretaste foretasted foretastes foretasting foretell
foretelling foretells forethought foretold forewarn forewarned
forewarning forewarns forewent foreword forewords forfeit forfeited
forfeiting forfeits forge forged forger forgeries forgers forgery
forges forgetful forgetfulness forging forgiveness forgo forgoes
forgoing forgone forked forking forks forlorn forlorner forlornest
formalities formality formals formations formative formidable formless
formulate formulated formulates formulating formulations fornication
forsake forsaken forsakes forsaking forsook forswear forswearing
forswears forswore forsworn fort forte fortes forthright forthwith
forties fortieth fortieths fortification fortifications fortified
fortifies fortify fortifying fortitude fortnightly fortress fortressed
fortresses fortressing forts fortuitous fortunes forums forwarder
forwardest forwent fossils foster fostered fostering fosters fouled
fouler foulest fouling fouls founder foundered foundering founders
foundling foundries foundry fount fountained fountaining fountains
founts fours fourteens fourteenth fourteenths fourths fowl fowled
fowling fowls fox foxed foxes foxier foxiest foxing foxy foyer foyers
fracas fracases fractal fractional fracture fractured fractures
fracturing fragility fragmentary fragmentation fragmented fragmenting
fragrance fragrances fragrant frail frailer frailest frailties frailty
framed frameworks framing franc franchise franchised franchises
franchising francs franked franker frankest frankfurter frankfurters
franking franks frantically fraternal fraternities fraternity frauds
fraudulent fraudulently fraught fraughted fraughting fraughts fray
frayed fraying frays freaked freaking freckle freckled freckles
freckling freedoms freehand freelance freer freest freezer freezers
freight freighted freighter freighters freighting freights frenzied
frenzies frenzy frequented frequenter frequentest frequenting
frequents freshen freshened freshening freshens fresher freshest
freshly freshman freshmen freshness freshwater fret fretful fretfully
frets fretted fretting friar friars friendlier friendlies friendliest
friendliness friendships frieze friezed friezes friezing frigate
frigates fright frighted frighteningly frightful frightfully frighting
frights frigid frigidity frill frillier frillies frilliest frills
frilly fringed fringes fringing frisk frisked friskier friskiest
frisking frisks frisky fritter frittered frittering fritters
frivolities frivolity frizzier frizziest frizzy fro frock frocks
frolic frolicked frolicking frolics frond fronds frontage frontages
frontal fronted frontier frontiers fronting fronts frost frostbit
frostbite frostbites frostbiting frostbitten frosted frostier
frostiest frosting frosts frosty froth frothed frothier frothiest
frothing froths frothy frugal frugality frugally fruited fruitful
fruitfuller fruitfullest fruitier fruitiest fruiting fruition
fruitless fruitlessly fruity frustrations fudged fudges fudging fuels
fugitive fugitives fulcrum fulcrums fulled fulling fullness fulls
fumble fumbled fumbles fumbling fumed fumigate fumigated fumigates
fumigating fumigation fuming functionally fundamentalism
fundamentalists fundamentals funerals fungi fungicide fungicides
fungus funnel funnels funner funnest funnies funnily furies furious
furiously furl furled furling furlong furlongs furlough furloughed
furloughing furloughs furls furnace furnaces furnish furnished
furnishes furnishing furnishings furor furors furred furrier furriest
furring furrow furrowed furrowing furrows furs furthered furthering
furthers furtive furtively furtiveness fury fused fuselage fuselages
fuses fusing fussed fusses fussier fussiest fussing futility futures
futuristic fuzz fuzzed fuzzes fuzzier fuzziest fuzzing gab gabbed
gabbing gable gabled gables gabling gabs gadget gadgets gag gagged
gagging gags gaiety gaily gainful gait gaits gal gala galas galaxies
gale gales gall gallant gallantry gallants galled galleried galleries
gallery gallerying galley galleys galling gallivant gallivanted
gallivanting gallivants gallon gallons gallop galloped galloping
gallops gallows galls galore galores gals gambit gambits gamble
gambled gambler gamblers gambles gambling gamed gamer gamest gaming
gamma gamut gamuts gander ganders ganged ganging gangling gangplank
gangplanks gangrene gangrened gangrenes gangrening gangs gangster
gangsters gangway gangways gaol gape gaped gapes gaping garaged
garages garaging garb garbed garbing garbs gardened gardener gardeners
gardenia gardenias gardening gargle gargled gargles gargling gargoyle
gargoyles garish garland garlanded garlanding garlands garlic
garlicked garlicking garlics garment garments garnet garnets garnish
garnished garnishes garnishing garret garrets garrison garrisoned
garrisoning garrisons garrulous garter garters gaseous gases gash
gashed gashes gashing gasket gaskets gasped gasping gasps gassed
gassing gastric gated gateways gatherings gating gaudier gaudiest
gaudy gaunt gaunted gaunter gauntest gaunting gauntlet gauntlets
gaunts gauze gavel gavels gawk gawked gawkier gawkies gawkiest gawking
gawks gawky gayer gayest gays gaze gazed gazelle gazelles gazes
gazette gazetted gazettes gazetting gazing gee geed geeing gees geese
gel gelatin geld gelded gelding geldings gelds gem gems genders
genealogical genealogies genealogy genera generality generals generics
generosities generosity generously geneses genesis geneticist
geneticists genial genially genie genies genii genital genitals
geniuses genres gent gentile gentiles gentility gentled gentleness
gentler gentles gentlest gentling gentries gentry gents genuineness
genus geographic geographically geographies geological geologies
geologist geologists geometric geometries geranium geraniums gerbil
gerbils germ germicide germicides germinate germinated germinates
germinating germination germs gestation gesticulate gesticulated
gesticulates gesticulating gestured gestures gesturing getaway
getaways geyser geysers ghastlier ghastliest ghetto ghettos ghosted
ghosting ghostlier ghostliest ghostly ghosts ghoul ghouls giants
gibber gibbered gibbering gibbers gibe gibed gibes gibing giddier
giddiest giddiness giddy gifted gifting gigantic gigged gigging giggle
giggled giggles giggling gigs gild gilded gilding gilds gill gills
gilt gilts gimme gimmick gimmicks ginger gingerbread gingerly gingham
ginned ginning gins giraffe giraffes girder girders girdle girdled
girdles girdling girlfriends girlhood girlhoods girlish girth girths
gist givens gizzard gizzards glacial glacier glaciers gladden
gladdened gladdening gladdens gladder gladdest glade glades gladiator
gladiators gladlier gladliest glads glamorous glanced glances glancing
gland glands glandular glare glared glares glaring glassed glassier
glassiest glassing glassware glassy glaze glazed glazes glazing gleam
gleamed gleaming gleams glee glen glens glib glibber glibbest glibly
glide glided glider gliders glides gliding glimmer glimmered
glimmering glimmers glimpse glimpsed glimpses glimpsing glint glinted
glinting glints glisten glistened glistening glistens glitter
glittered glittering glitters gloat gloated gloating gloats globe
globes globular globule globules gloom gloomier gloomiest gloomy
gloried glories glorification glorified glorifies glorify glorifying
gloriously glorying gloss glossaries glossary glossed glosses glossier
glossies glossiest glossing gloved gloving glower glowered glowering
glowers glucose glued glues gluing glum glummer glummest glums glut
gluts glutted glutting glutton gluttons gluttony glycerin gnarl
gnarled gnarling gnarls gnash gnashed gnashes gnashing gnat gnats gnaw
gnawed gnawing gnaws gnomes gnu gnus goad goaded goading goads goaled
goalie goalies goaling goalkeeper goalkeepers goatee goatees goats gob
gobbed gobbing gobble gobbled gobbles gobbling goblet goblets goblin
goblins gobs godchild godchildren goddess goddesses godfather
godfathers godless godlier godliest godlike godly godmother godmothers
godparent godparents godsend godsends goggle goggles goldener
goldenest golder goldest golds goldsmith goldsmiths golfed golfer
golfers golfing golfs gondola gondolas goner goners gong gonged
gonging gongs gonna goo goodnight goodwill gooey goof goofed goofier
goofiest goofing goofs goofy gooier gooiest goon goons goose goosed
gooses goosing gopher gophers gore gored gores gorge gorged gorges
gorging gorier goriest gorilla gorillas goring gory gos gosh gosling
gospels gossamer gossips gouge gouged gouges gouging goulash goulashes
gourd gourds gourmet gourmets gout governess governesses governmental
governors gowned gowning gowns grabber graced graceful gracefuller
gracefullest gracefully graceless graces gracing gracious graciously
graciousness gradation gradations graded grader gradient gradients
grading graduations graft grafted grafting grafts grains gram grammars
grammatically gramophone grams grandchild grandchildren granddaughter
granddaughters grander grandest grandeur grandfathered grandfathering
grandfathers grandiose grandly grandmothers grandparent grandparents
grandson grandsons grandstand grandstanded grandstanding grandstands
granite grannies granny granola granular granule granules grape graped
grapefruit grapefruits grapes grapevine grapevines graphed graphically
graphing graphite graping grapple grappled grapples grappling grasped
grasping grasps grassed grasses grasshopper grasshoppers grassier
grassiest grassing grassy grate grated gratefuller gratefullest grater
graters grates gratification gratifications gratified gratifies
gratify gratifying grating gratings gratitude gratuities gratuity
graved gravel gravels gravely graven graver graves gravest gravestone
gravestones graveyard graveyards gravies graving gravitate gravitated
gravitates gravitating gravitation gravy graze grazed grazes grazing
grease greased greases greasier greasiest greasing greatness greats
greedier greediest greedily greediness greenback greenbacks greened
greener greenery greenest greenhorn greenhorns greenhouse greenhouses
greening greens greet greeted greeting greetings greets gregarious
gremlin gremlins grenade grenades greyhound greyhounds gridded griddle
griddles griding gridiron gridirons grids griefs grievance grievances
grieve grieved grieves grieving grievous grill grille grilled grilles
grilling grills grimace grimaced grimaces grimacing grime grimed
grimes grimier grimiest griming grimly grimmer grimmest grimy grin
grinder grinders grindstone grindstones grinned grinning grins gripe
griped gripes griping gripped gripping grislier grisliest grisly
gristle grit grits gritted grittier grittiest gritting gritty grizzled
grizzlier grizzlies grizzliest grizzly groaned groaning groans grocer
groceries grocers grocery groggier groggiest groggy groin groins groom
groomed grooming grooms groove grooved grooves groovier grooviest
grooving groovy grope groped gropes groping grossed grosser grossest
grossing grotesque grotesques grotto grottoes grouch grouched grouches
grouchier grouchiest grouching grouchy grounded grounding groundless
groundwork grouper groupers groupings grouse groused grouses grousing
grove grovel grovels groves grower growers growl growled growling
growls growths grub grubbed grubbier grubbiest grubbing grubby grubs
grudge grudged grudges grudging gruel gruels gruesome gruesomer
gruesomest gruff gruffed gruffer gruffest gruffing gruffly gruffs
grumble grumbled grumbles grumbling grumpier grumpiest grumpy grunt
grunted grunting grunts guarantor guarantors guardian guardians
gubernatorial guerrilla guerrillas guessable guesswork guested
guesting guffaw guffawed guffawing guffaws guidebook guidebooks guild
guilds guile guiled guiles guiling guillotine guillotined guillotines
guillotining guiltier guiltiest guiltily guiltless guise guises
guitarist guitars gulch gulches gulfs gull gulled gullet gullets
gullies gulling gulls gully gulp gulped gulping gulps gumdrop gumdrops
gummed gummier gummiest gumming gummy gumption gums gunfire gunman
gunmen gunned gunner gunners gunning gunpowder gunshot gunshots
guppies guppy gurgle gurgled gurgles gurgling guru gurus gush gushed
gusher gushers gushes gushing gust gusted gustier gustiest gusting
gusts gusty gutted guttered guttering gutters gutting guyed guying
guzzle guzzled guzzles guzzling gym gymnasium gymnasiums gymnast
gymnastics gymnasts gyms gyrate gyrated gyrates gyrating gyration
gyrations gyroscope gyroscopes habitable habitat habitation
habitations habitats habitual habitually habituals hackney hackneyed
hackneying hackneys hacksaw hacksawed hacksawing hacksaws haddock
haddocks haded hades hading hag haggard hagged hagging haggle haggled
haggles haggling hags hailed hailing hails hailstone hailstones
haircuts haircutting hairdo hairdos hairdresser hairdressers haired
hairier hairiest hairline hairlines hale haled haler hales halest
halfway halibut halibuts haling hallelujah hallelujahs hallmark
hallmarked hallmarking hallmarks hallucination hallucinations hallway
hallways halo haloed haloing halon halos halter haltered haltering
halters halved halving hamburger hamburgers hamlet hamlets hammed
hammered hammering hammers hamming hammock hammocks hamper hampered
hampering hampers hams hamster hamsters hamstring hamstringing
hamstrings hamstrung handbag handbagged handbagging handbags handbooks
handcuff handcuffed handcuffing handcuffs handedness handfuls
handicapped handicapping handicaps handicraft handicrafts handier
handiest handiwork handkerchief handkerchiefs handlebar handlebars
handlers handmade handout handouts handrail handrails handshake
handsome handsomer handsomest handwriting hangar hangars hanger
hangers hangings hangout hangouts hangovers hanker hankered hankering
hankers haphazard hapless happenings harangue harangued harangues
haranguing harass harassed harasses harassing harassment hardier
hardiest hardliner hardliners hardships hardwood hardwoods hare
harebrained hared harem harems hares haring hark harked harking harks
harlot harlots harmed harmfully harming harmlessly harmonic harmonica
harmonicas harmonies harmonious harms harness harnessed harnesses
harnessing harp harped harping harpist harpists harpoon harpooned
harpooning harpoons harps harpsichord harpsichords harried harries
harrow harrowed harrowing harrows harry harrying harsher harshest
harshly harshness hart harts harvest harvested harvester harvesters
harvesting harvests hashed hashes hashing hassled hassles hassling
haste hasted hastened hastening hastens hastes hastier hastiest
hastily hasting hatch hatched hatches hatchet hatchets hatching
hateful hatefully hatreds hatted hatting haughtier haughtiest
haughtily haughtiness haughty haul hauled hauling hauls haunt haunted
haunting haunts haven havens haves hawk hawked hawking hawks hayed
haying hays haystack haystacks haywire hazarded hazarding hazardous
haze hazed hazel hazels hazes hazier haziest hazing headaches
headfirst headier headiest headings headland headlands headlight
headlights headlined headlining headlong headmaster headphone
headphones headquarter headquarters headrest headrests headroom
headstone headstones headstrong headway heady heal healed healer
healers healing heals healthful healthier healthiest heaped heaping
heaps hearings hearsay hearse hearsed hearses hearsing heartache
heartaches heartbeat heartbeats heartbreak heartbreaking heartbreaks
heartbroke heartbroken heartburn hearted hearten heartened heartening
heartens heartfelt hearth hearths heartier hearties heartiest hearting
heartless hearty heatedly heater heaters heath heathen heathens
heather heave heaved heavenlier heavenliest heavenly heaves heavies
heaviness heaving heavyweight heavyweights heckle heckled heckler
hecklers heckles heckling hectic hectics hedge hedged hedgehog
hedgehogs hedges hedging heed heeded heeding heedless heeds heeled
heeling heftier heftiest hefty heifer heifers heighten heightened
heightening heightens heinous heir heirloom heirlooms heirs
helicoptered helicoptering helicopters heliport heliports helium
helling hellish hellos hells helm helmeted helmeting helmets helms
helper helpers helpfully helpings helplessly hem hemisphere
hemispheres hemlock hemlocks hemmed hemming hemophilia hemorrhage
hemorrhaged hemorrhages hemorrhaging hemp hems hen hences henchman
henchmen hens hepatitis herald heralded heralding heralds herb
herbivorous herbs herded herding herds hereabouts hereafter hereafters
hereditary heredity herein heresies heretic heretical heretics
herewith heritages hermaphrodite hermit hermits hernia hernias heroine
heroins heroism heron herons herpes hers hes hesitancy hesitant
hesitated hesitates hesitating hesitation hesitations heterogeneous
heterosexuality heterosexuals heuristic hew hewed hewing hews hexagon
hexagonal hexagons heyday heydays hi hiatus hiatuses hibernate
hibernated hibernates hibernating hibernation hick hickories hickory
hicks hideaway hideaways hierarchies hieroglyphic hieroglyphics
highbrow highbrows highland highlands highs hijack hijacked hijacking
hijacks hike hiked hiker hikers hikes hiking hilarity hillbillies
hillbilly hillier hilliest hillside hillsides hilly hilt hilts hims
hind hinder hindered hindering hinders hindrance hindrances hinds
hinge hinged hinges hinging hinterland hinterlands hipped hipper
hippest hippie hippier hippies hippiest hipping hippopotamus
hippopotamuses hips hiss hissed hisses hissing histogram histories
hitch hitched hitches hitchhike hitchhiked hitchhiker hitchhikers
hitchhikes hitchhiking hitching hither hive hived hives hiving hoard
hoarded hoarder hoarders hoarding hoards hoarse hoarseness hoarser
hoarsest hoax hoaxed hoaxes hoaxing hobbies hobbit hobble hobbled
hobbles hobbling hobbyhorse hobbyhorses hobgoblin hobgoblins hobnob
hobnobbed hobnobbing hobnobs hobo hoboed hoboing hobos hock hocked
hockey hocking hocks hodgepodge hodgepodges hoe hoed hoeing hoes
hogged hogging hogs hoist hoisted hoisting hoists holdup holdups holed
holidayed holidaying holier holiest holiness holing holler hollered
hollering hollers hollies hollowed hollower hollowest hollowing
hollows holly holocaust holocausts holster holstered holstering
holsters homage homaged homages homaging homed homeland homelands
homeless homelier homeliest homely homemade homesick homesickness
homespun homestead homesteaded homesteading homesteads homeward
homework homey homeys homicidal homicide homicides homier homiest
homing homogeneous homonym homonyms homophobic homosexuals hone honed
honer hones honester honestest honeycomb honeycombed honeycombing
honeycombs honeyed honeying honeymoon honeymooned honeymooning
honeymoons honeys honeysuckle honeysuckles honing honk honked honking
honks honoraries hood hooded hooding hoodlum hoodlums hoods hoodwink
hoodwinked hoodwinking hoodwinks hoof hoofed hoofing hoofs hoop hooped
hooping hoops hooray hoorayed hooraying hoorays hoot hooted hooter
hooting hoots hooves hop hopefuls hopped hopper hopping hops hopscotch
hopscotched hopscotches hopscotching horded hording horizons
horizontals hormone hormones horned hornet hornets hornier horniest
horns horny horoscope horoscopes horribles horrors horseback horsed
horseman horseplay horsepower horseradish horseradishes horseshoe
horseshoed horseshoeing horseshoes horsing horticultural horticulture
hose hosed hoses hosiery hosing hospitable hospitality hostage
hostages hosted hostel hosteled hosteling hostels hostess hostessed
hostesses hostessing hostiles hostility hosting hotbed hotbeds hotels
hothead hotheaded hotheads hotly hotter hottest hound hounded hounding
hounds hourglass hourglasses hourlies hourly houseboat houseboats
households housekeeper housekeepers housewarming housewarmings
housewife housewives housework housings hove hovel hovels hover
hovered hovering hovers howl howled howling howls hows hub hubbub
hubbubs hubs huddle huddled huddles huddling hue hued hues huff huffed
huffier huffiest huffing huffs huffy hug huger hugest hugged hugger
hugging hugs hulk hulking hulks hull hullabaloo hullabaloos hulled
hulling hulls humanely humaner humanest humanism humanist humanitarian
humanitarians humanities humanly humbled humbler humbles humblest
humbling humbug humdrum humid humidified humidifies humidify
humidifying humidity humiliate humiliated humiliates humiliating
humiliation humiliations humility hummed humming hummingbird
hummingbirds humorist humorists humorously hump humped humping humps
hums hunch hunchback hunchbacks hunched hunches hunching hundredth
hundredths hunger hungered hungering hungers hungrier hungriest
hungrily hunk hunks hunter hunters hurdle hurdled hurdles hurdling
hurl hurled hurling hurls hurricane hurricanes hurried hurriedly
hurries hurrying hurtful hurtle hurtled hurtles hurtling husbanded
husbanding husbands hush hushed hushes hushing husk husked huskier
huskies huskiest huskily huskiness husking husks husky hustle hustled
hustler hustlers hustles hustling hutch hutched hutches hutching huts
hyacinth hyacinths hybrid hybrids hydrant hydrants hydraulic
hydraulicked hydraulicking hydraulics hydroelectric hydroplane
hydroplaned hydroplanes hydroplaning hyena hyenas hygiene hygienic
hygienics hymn hymnal hymnals hymned hymning hymns hyperbole
hypertension hyphenate hyphenated hyphenates hyphenating hyphenation
hyphened hyphening hyphens hypnosis hypnotic hypnotics hypnotism
hypnotist hypnotists hypochondria hypochondriac hypochondriacs
hypocrisies hypocrites hypotenuse hypotenuses hypotheses hysteria
hysteric hysterically hysterics iceberg icebergs icebreaker
icebreakers iced ices icicle icicles icier iciest icing icings icy
idealist idealists identifiable identities ideologically ideologies
idiocies idiocy idiomatic idioms idiosyncrasies idiosyncrasy idled
idler idles idlest idling idly idol idols idyllic ifs igloo igloos
ignite ignited ignites igniting ignition ignitions ignorants iguana
iguanas ilk illegals illegible illegibly illegitimate illicit
illiteracy illiterates illnesses ills illuminate illuminated
illuminates illuminating illumination illuminations illusions illusory
illustrative illustrator illustrators illustrious imaged imagery
imaginable imaginations imaging imbalances imbecile imbeciles imitate
imitated imitates imitating imitation imitations imitative imitator
imitators immaculate immaculately immaterial immatures immaturity
immeasurable immeasurably immenser immensest immensities immensity
immerse immersed immerses immersing immersion immersions immigrant
immigrants immigrate immigrated immigrates immigrating immigration
imminently immobile immoralities immorality immortality immortals
immovable immunity imp impacted impacting impacts impairment
impairments impale impaled impales impaling impart imparted impartial
impartiality impartially imparting imparts impassable impasse impasses
impassioned impassive impatience impatiences impatient impatiently
impeach impeached impeaches impeaching impeccable impeccables
impedance impede impeded impedes impediment impediments impeding impel
impelled impelling impels impenetrable imperatives imperceptible
imperceptibly imperfection imperfections imperfectly imperfects
imperialism imperialist imperials imperil imperils impersonally
impersonate impersonated impersonates impersonating impersonation
impersonations impertinence impertinent impertinents impervious
impetuous impetuously impetus impetuses impinge impinged impinges
impinging impish implacable implant implanted implanting implants
implementable implementer implicate implicated implicates implicating
implore implored implores imploring impolite importation importations
imposition impositions impossibilities impossibility impossibles
impossibly impostor impostors impotence impotent impound impounded
impounding impounds impoverish impoverished impoverishes impoverishing
imprecise impregnable impregnate impregnated impregnates impregnating
impressionable impressively imprint imprinted imprinting imprints
imprisonment imprisonments improbabilities improbability improbably
impromptu impromptus improper improperly improprieties impropriety
improvisation improvisations improvise improvised improvises
improvising imps impudence impudent impulsed impulses impulsing
impulsive impulsively impunity impure impurer impurest impurities
impurity inabilities inaction inactive inactivity inadequacies
inadequacy inadequately inadequates inadmissible inadvertent
inadvisable inalienable inaner inanest inanimate inapplicable
inarticulate inarticulates inasmuch inaudible inaugural inaugurals
inaugurate inaugurated inaugurates inaugurating inauguration
inaugurations inauspicious inborn inbred inbreds inbreed inbreeding
inbreeds inbuilt incalculable incandescence incandescent incandescents
incantation incantations incapacitate incapacitated incapacitates
incapacitating incapacity incarcerate incarcerated incarcerates
incarcerating incarceration incarcerations incarnate incarnated
incarnates incarnating incarnations incendiaries incendiary incense
incensed incenses incensing incentives inception inceptions incessant
incessantly incest incestuous inched inching incidences incidentals
incinerate incinerated incinerates incinerating incinerator
incinerators incision incisions incisive incisor incisors incite
incited incitement incitements incites inciting inclinations
inclusions incognito incognitos incoherence incoherently incomes
incomparable incompatibilities incompatibility incompatibles
incompatibly incompetents inconceivable inconclusive incongruities
incongruity incongruous inconsequential inconsiderable inconsiderate
inconsolable inconspicuous inconveniently incorporation incorrigible
incredulity incredulous incremental incremented incrementing
increments incriminate incriminated incriminates incriminating
incubate incubated incubates incubating incubation incubator
incubators incumbent incumbents incurable incurables indebted
indecencies indecency indecent indecenter indecentest indecision
indecisive indeeds indefinable indefinites indelible indelibly
indelicate indentation indentations indented indenting indents
independents indescribable indescribables indestructible indicatives
indict indicted indicting indictments indicts indifference indifferent
indigenous indigestible indigestibles indigestion indignant
indignantly indignation indignities indignity indigo indiscreet
indiscretion indiscretions indiscriminate indiscriminately
indispensable indispensables indisposed indisputable indistinct
individualism individualist individualists individuality indivisible
indoctrinate indoctrinated indoctrinates indoctrinating indoctrination
indolence indolent indomitable indoor indoors inducement inducements
induct inducted inducting inductions inducts indulgence indulgences
indulgent industrialist industrialists industrious inedible
ineffectual inefficiencies inefficiently inefficients inelegant
ineligible ineligibles inept ineptitude inequalities inert inertial
inerts inescapable inexact inexcusable inexhaustible inexorable
inexorably inexpensive inexperience inexplicable inexplicably
inextricably infamies infamy infancy infantries infantry infants
infatuation infatuations infections infectious infelicities inferences
inferiors inferno infernos inferred inferring infers infertile infest
infestation infestations infested infesting infests infidel
infidelities infidelity infidels infield infields infiltrate
infiltrated infiltrates infiltrating infiltration infinitesimal
infinitesimals infinities infinitive infinitives infirm infirmaries
infirmary infirmities infirmity infix inflame inflamed inflames
inflaming inflammable inflammation inflammations inflammatory
inflatable inflate inflated inflates inflating inflationary inflection
inflections inflicted inflicting inflicts influenza influx influxes
informality informant informants informational informer informers
infraction infractions infrared infrequently infringe infringed
infringements infringes infringing infuriate infuriated infuriates
infuriating infuse infused infuses infusing infusion infusions
ingeniously ingenuity ingest ingested ingesting ingests ingrain
ingrained ingraining ingrains ingratiate ingratiated ingratiates
ingratiating ingratitude inhale inhaled inhaler inhalers inhales
inhaling inheritances inhibitions inhospitable inhuman inhumane
inhumanities inhumanity initiation initiations initiatives initiator
initiators injected injecting injection injections injects injunction
injunctions injurious injustices inked inkier inkiest inking inkling
inks inky inlaid inland inlay inlaying inlays inlet inlets inmate
inmates inn innards innate inned innermost inners inning innings
innkeeper innkeepers innocenter innocentest innocently innocents
innocuous innovations inns innuendo innuendoed innuendoing innuendos
innumerable inoculate inoculated inoculates inoculating inoculation
inoculations inoffensive inoperative inopportune inordinate inquest
inquests inquisition inquisitions inquisitive ins insanely insaner
insanest insanity insatiable inscribe inscribed inscribes inscribing
inscription inscriptions inscrutable insecticide insecticides
insecurities insecurity insensitivity inseparable inseparables
insertions insider insiders insides insights insignia insignias
insignificance insincere insincerely insincerity insinuate insinuated
insinuates insinuating insinuation insinuations insipid insistent
insolence insolent insoluble insolubles insolvency insolvent
insolvents insomnia inspections inspector inspectors inspirations
instability instanced instancing instantaneous instantaneously
instants instep insteps instigate instigated instigates instigating
instigation instill instilled instilling instills instinctive
instincts instituted institutes instituting institutional instructive
instructor instructors instrumentals instrumented instrumenting
insubordinate insubordination insubstantial insufferable
insufficiently insular insulate insulated insulates insulating
insulation insulator insulators insulin insurances insure insured
insurer insurers insures insurgent insurgents insuring insurmountable
insurrection insurrections intakes intangible intangibles integrals
intellects intellectually intellectuals intelligently intelligible
intelligibly intenser intensest intensified intensifies intensify
intensifying intensities intensives intents intercede interceded
intercedes interceding intercept intercepted intercepting interception
interceptions intercepts interchange interchangeable interchanged
interchanges interchanging intercom intercoms interconnect
intercontinental interdependence interdependent interiors interject
interjected interjecting interjection interjections interjects
interlock interlocked interlocking interlocks interloper interlopers
interlude interluded interludes interluding intermarriage
intermarriages intermarried intermarries intermarry intermarrying
intermediaries intermediary intermediates interment interments
interminable interminably intermingle intermingled intermingles
intermingling intermission intermissions intermittently intern
internationally internationals interned interning interns
interplanetary interplay interpolation interpose interposed interposes
interposing interpreters interracial interred interring interrogated
interrogates interrogating interrogation interrogations interrogator
interrogators inters intersect intersected intersecting intersects
intersperse interspersed intersperses interspersing interstate
interstates interstellar intertwine intertwined intertwines
intertwining interventions interviewer interviewers interweave
interweaves interweaving interwove interwoven intestinal intestine
intestines inti intimacies intimacy intimated intimately intimates
intimating intimation intimations intimidate intimidated intimidates
intimidating intimidation intolerable intolerably intolerant
intonation intonations intoxicate intoxicated intoxicates intoxicating
intoxication intractable intramural intransitive intransitives
intravenous intravenouses intrepid intricacies intricacy intricate
intrigue intrigued intrigues intriguing introductions introspective
introvert introverts intrude intruded intruder intruders intrudes
intruding intrusion intrusions intrusive intrusives intuition
intuitions intuitively inundate inundated inundates inundating
inundation inundations invader invaders invalidated invalidates
invalidating invalided invaliding invalids invariable invariables
invariant invasions invective inventive inventoried inventories
inventors inventory inventorying inversely inverses inversion
inversions invertebrate invertebrates invested investigator
investigators investing investments investor investors invests
inveterate invigorate invigorated invigorates invigorating invincible
invisibility invisibly invitations invocation invocations invoice
invoiced invoices invoicing involuntarily involuntary involvements
invulnerable inward inwardly inwards iodine ions iota iotas irascible
irater iratest ire ired ires iridescence iridescent iring iris irises
irk irked irking irks ironed ironically ironies ironing irons
irradiate irradiated irradiates irradiating irrationally irrationals
irreconcilable irrefutable irregular irregularities irregularity
irregulars irrelevance irrelevances irreparable irreplaceable
irrepressible irreproachable irresistible irresponsibility
irretrievable irretrievably irreverence irreverent irreversible
irrevocable irrevocably irrigate irrigated irrigates irrigating
irrigation irritability irritable irritably irritant irritants
irritations islander islanders isle isles isthmus isthmuses italic
italics itch itched itches itchier itchiest itching itchy iterate
iteration iterations iterative itinerant itinerants itineraries
itinerary ivies ivories ivory ivy jab jabbed jabber jabbered jabbering
jabbers jabbing jabs jackal jackals jackass jackasses jackdaw jacked
jacking jackknife jackknifed jackknifes jackknifing jackknives jackpot
jackpots jacks jade jaded jades jading jagged jaggeder jaggedest
jaguar jaguars jailed jailer jailers jailing jails jalopies jalopy
jamb jambed jambing jamboree jamborees jambs jangle jangled jangles
jangling janitor janitors jar jarred jarring jars jaundice jaundiced
jaundices jaundicing jaunt jaunted jauntier jaunties jauntiest
jauntily jaunting jaunts jaunty javelin javelins jaw jawbone jawboned
jawbones jawboning jawed jawing jaws jay jays jaywalk jaywalked
jaywalker jaywalkers jaywalking jaywalks jazzed jazzes jazzing
jealousies jealously jealousy jeer jeered jeering jeers jell jelled
jellied jelling jells jellyfish jellyfishes jellying jeopardy jerked
jerkier jerkiest jerking jerks jerky jersey jerseys jested jester
jesters jesting jests jets jetted jetties jetting jettison jettisoned
jettisoning jettisons jetty jewel jewelries jewelry jewels jiffies
jiffy jig jigged jigging jiggle jiggled jiggles jiggling jigs jigsaw
jigsawed jigsawing jigsaws jilt jilted jilting jilts jingle jingled
jingles jingling jinx jinxed jinxes jinxing jitterier jitteriest
jitters jittery jobbed jobbing jockey jockeyed jockeying jockeys
jocular jog jogged jogger joggers jogging jogs jointed jointing joker
jokers jollied jollier jollies jolliest jollying jolt jolted jolting
jolts jostle jostled jostles jostling jot jots jotted jotting
journalism journeyed journeying journeys jovial jovially joyed joyful
joyfuller joyfullest joyfully joying joyous joyously joys joystick
jubilant jubilation jubilee jubilees judicial judicially judiciaries
judiciary judicious judiciously judo jug jugged juggernaut jugging
juggle juggled juggler jugglers juggles juggling jugs jugular jugulars
juiced juices juicier juiciest juicing juicy jumble jumbled jumbles
jumbling jumbo jumbos jumper jumpers jumpier jumpiest jumpy junctions
juncture junctures jungles juniors juniper junipers junked junket
junketed junketing junkets junkie junkier junkies junkiest junking
junks junta juntas juries jurisdiction juror jurors juster justest
justices justifications justly jut jute juts jutted jutting juveniles
juxtapose juxtaposed juxtaposes juxtaposing juxtaposition
juxtapositions kaleidoscope kaleidoscopes kangaroo kangarooed
kangarooing kangaroos karat karate karats kayak kayaked kayaking
kayaks keel keeled keeling keels keened keener keenest keening keenly
keens keepers keepsake keepsakes keg kegged kegging kegs kelp kennel
kennels kerchief kerchiefed kerchiefing kerchiefs kernels kerosene
ketchup kettles keyboarded keyboarding keyhole keyholes keynote
keynoted keynotes keynoting keystone keystones khaki khakis kickback
kickbacks kickoff kickoffs kidnapper kidnappers kidneys killers
killings kiln kilned kilning kilns kilo kilobyte kilobytes kilos
kilowatt kilowatts kilt kilts kimono kimonos kin kinda kinder
kindergarten kindergartens kindest kindle kindled kindles kindlier
kindliest kindling kindnesses kindred kinfolk kingdoms kingfisher
kingfishers kink kinked kinkier kinkiest kinking kinks kinky kins
kinship kiosk kiosks kipper kissed kisses kissing kitchened
kitchenette kitchenettes kitchening kitchens kite kited kites kiting
kitten kittens kitties kitty kiwi kiwis knack knacked knacker knacking
knacks knapsack knapsacks knead kneaded kneading kneads kneecap
kneecapped kneecapping kneecaps kneed kneeing kneel kneeling kneels
knelt knickers knifed knifes knifing knighted knighthood knighthoods
knighting knights knit knits knitted knitting knives knob knobs
knocker knockers knockout knockouts knoll knolls knot knots knotted
knottier knottiest knotting knotty knowinger knowingest knowingly
knowings knowledgeable knuckle knuckled knuckles knuckling koala
koalas kosher koshered koshering koshers kowtow kowtowed kowtowing
kowtows kudos laboratories laborious laboriously labyrinth labyrinths
lace laced lacerate lacerated lacerates lacerating laceration
lacerations laces lacier laciest lacing lacquer lacquered lacquering
lacquers lacrosse lacy laddered laddering ladders lade laded laden
lades lading ladle ladled ladles ladling lads ladybug ladybugs
ladylike laggard laggards lagged lagging lagoon lagoons lags lair
lairs laked lakes laking lamb lambda lambed lambing lambs lame lamed
lament lamentable lamentation lamentations lamented lamenting laments
lamer lames lamest laming lampoon lampooned lampooning lampoons lamps
lance lanced lances lancing lander landings landladies landlady
landlocked landlords landmark landmarks landowner landowners
landscaped landscapes landscaping landslid landslide landslides
landsliding lanes languid languish languished languishes languishing
languor languorous languors lankier lankiest lanky lantern lanterns
lap lapel lapels lapped lapping laps lapse lapsed lapses lapsing
larcenies larceny lard larded larding lards larges larked larking
larks larva larvae larynges laryngitis larynx lascivious lash lashed
lashes lashing lass lasses lastly latch latched latches latching
latent latents lateral lateraled lateraling laterals latex lath lathe
lathed lather lathered lathering lathers lathes lathing laths latitude
latitudes latrine latrines lattice lattices laud laudable lauded
lauding lauds laughable laughingstock laughingstocks launcher
launchers launder laundered laundering launders laundries laundry
laureate laureated laureates laureating laurel laurels lava lavatories
lavender lavendered lavendering lavenders lavish lavished lavisher
lavishes lavishest lavishing lawful lawless lawmaker lawmakers lawns
lawsuit lawsuits lax laxative laxatives laxer laxes laxest laxity
layered layering layman laymen layouts lazied lazier lazies laziest
lazying leaden leafed leafier leafiest leafing leafleted leafleting
leafs leafy leagued leagues leaguing leakage leakages leaked leaking
leaks leaky leaner leanest leaped leapfrog leapfrogged leapfrogging
leapfrogs leaping leaps lease leased leases leash leashed leashes
leashing leasing leathery lectern lecterns ledge ledger ledgered
ledgering ledgers ledges lee leech leeched leeches leeching leek leeks
leer leered leerier leeriest leering leers leery leeway lefter leftest
leftmost lefts legacies legacy legalistic legality legals legends
legged legging leggings legibility legibly legion legions legislate
legislated legislates legislating legislative legislator legislators
legislature legislatures legitimacy legitimated legitimates
legitimating legume legumes leisurely lemme lemonade lemoned lemoning
lemons lengthen lengthened lengthening lengthens lengthier lengthiest
lengthwise leniency lenients lentil lentils leopard leopards leotard
leotards leper lepers leprosy lesbians lesion lesions lessen lessened
lessening lessens letdown letdowns lethals lethargic lethargy lettered
letterhead letterheads lettering lettuce lettuces letup letups levee
levees lever leverage leveraged leverages leveraging levered levering
levers levied levies levity levy levying lewd lewder lewdest lexical
lexicon lexicons liabilities liaisons liar liars libels liberalism
liberally liberals liberate liberated liberates liberating liberation
libertarian librarians libretto lice lichen lichens lick licked
licking licks licorice licorices lids lieu lieutenant lieutenants
lifeboat lifeboats lifeforms lifeguard lifeguards lifeless lifelike
lifeline lifelines lifelong lifespan lifestyles lifetimes ligament
ligaments ligature ligatures lighten lightened lightening lightens
lighters lighthouse lighthouses lightness lightweight lightweights
likable likelier likeliest likelihoods liken likened likeness
likenesses likening likens liker likest lilac lilacs lilies lilt
lilted lilting lilts lily limber limbered limberer limberest limbering
limbers limbo lime limed limelight limelighted limelighting limelights
limerick limericks limes limestone liming limitless limousine
limousines limp limped limper limpest limping limps linchpin linchpins
lineage lineages linearly linefeed linen liner liners linger lingered
lingerie lingering lingers lingo lingoes linguist linguistics
linguists liniment liniments linings linker linoleum lint lints
lioness lionesses lions lipstick lipsticked lipsticking lipsticks
liquefied liquefies liquefy liquefying liqueur liqueured liqueuring
liqueurs liquidate liquidated liquidates liquidating liquidation
liquidations liquids liquored liquoring liquors lisped lisping lisps
listeners listless litanies litany literacy literals literates lithe
lither lithest lithium litigation litterbug litterbugs littered
littering litters littler littlest liturgical liturgies liturgy
livable livelier liveliest livelihood livelihoods liveliness liven
livened livening livens livers livestock livid livings lizard lizards
llama llamas loadable loaf loafed loafer loafers loafing loafs loam
loaned loaning loath loathe loathed loather loathes loathing loathings
loathsome loaves lob lobbed lobbied lobbies lobbing lobbying lobbyist
lobbyists lobe lobed lobes lobing lobotomy lobs lobster lobstered
lobstering lobsters locale localed locales localing localities
locality locker lockers locket lockets locksmith locksmiths locomotion
locomotive locomotives locust locusts lodged lodger lodgers lodges
lodging lodgings loft lofted loftier loftiest loftiness lofting lofts
lofty logarithm logarithmic logger logician loin loincloth loincloths
loins loiter loitered loiterer loiterers loitering loiters loll lolled
lolling lollipop lollipops lolls lone lonelier loneliest loneliness
lonesome lonesomes longed longevity longhand longing longings longish
longitude longitudes longitudinal longs longshoreman longshoremen
lookout lookouts loom loomed looming looms loon loonier loonies
looniest loons loony looped loopholes looping loosed loosen loosened
loosening loosens looser looses loosest loosing loot looted looting
loots lop lope loped lopes loping lopped lopping lops lopsided lorded
lording lore loser losers lotion lotions lotteries lottery lotus
lotuses loudlier loudliest loudness loudspeaker loudspeakers lounge
lounged lounges lounging louse loused louses lousier lousiest lousing
lovable lovelier lovelies loveliest loveliness lovingly lovings
lowdown lowed lowing lowlier lowliest lowly lows loyaler loyalest
loyalties loyalty lozenge lozenges lubricant lubricants lubricate
lubricated lubricates lubricating lubrication lucid lucked luckier
luckiest lucking lucks lucrative lug lugged lugging lugs lukewarm lull
lullabies lullaby lulled lulling lulls lumber lumbered lumbering
lumberjack lumberjacks lumbers luminaries luminary luminous lumped
lumpier lumpiest lumping lumpy lunacies lunacy lunar lunatics lunched
luncheon luncheoned luncheoning luncheons lunches lunching lunge
lunged lunges lunging lurch lurched lurches lurching lure lured lures
lurid luring luscious lush lusher lushes lushest lusted lustier
lustiest lusting lustrous lusts lusty lute lutes luxuriant luxuriate
luxuriated luxuriates luxuriating luxuries luxurious lye lymph
lymphatic lymphatics lynch lynched lynches lynching lyre lyres lyrical
m ma macabre macaroni mace maced maces machete machetes machined
machining machinist machinists macho macing mackerel mackerels
macroscopic madam madame madams madcap madcaps madden maddened
maddening maddens madder maddest madhouse madhouses madly madman
madmen maelstrom maelstroms magenta maggot maggots magically magician
magicians magicked magicking magics magistrate magistrates magnanimity
magnanimous magnanimously magnate magnates magnesium magnet magnetics
magnetism magnets magnificence magnified magnifies magnify magnifying
magnitudes magnolia magnolias magnum magpie magpies mahoganies
mahogany maid maiden maidens maids mailboxes mailman mailmen maim
maimed maiming maims mainland mainlands mainline mainstay mainstays
maintainability maintainable maintainer maintainers maizes majestic
majestically majesties majesty majored majoring majorities majors
makeshift makeshifts makeup makeups maladies maladjusted malady
malaria malevolence malevolent malformed malice maliced malices
malicing maliciously malign malignancies malignancy malignant
malignants maligned maligning maligns mall mallard mallards malleable
malled mallet mallets malling malls malnutrition malpractice
malpractices malt malted malting maltreat maltreated maltreating
maltreats malts mama mamas mammal mammalian mammals mammoth mammoths
manacle manacled manacles manacling manageable managerial mandated
mandates mandating mandible mandibles mandolin mandolins mane manes
mange manged manger mangers manges mangier mangiest manging mango
mangoes mangrove mangroves mangy manhandle manhandled manhandles
manhandling manhole manholes manhood maniac maniacal maniacs manias
manic manicure manicured manicures manicuring manicurist manicurists
manifest manifestations manifested manifesting manifestoed
manifestoing manifestos manifests manifold manifolded manifolding
manifolds manipulations manlier manliest manliness manly mannequin
mannequins mannerism mannerisms manners mannish manor manors mansion
mansions manslaughter mantel mantelpiece mantelpieces mantels mantle
mantled mantles mantling manure manured manures manuring manuscript
manuscripts maple maples mapper mappings mar marathon marathons marble
marbled marbles marbling marched marcher marches marching mare mares
margarine marigold marigolds marijuana marina marinas marinate
marinated marinates marinating marine mariner mariners marines
marionette marionettes maritime markedly marketable marketplace
marketplaces markings marksman marksmen marmalade maroon marooned
marooning maroons marquee marquees marred marriages marring marrow
marrowed marrowing marrows mars marsh marshal marshaled marshaling
marshals marshes marshier marshiest marshmallow marshmallows marshy
marsupial marsupials mart marted martial martin marting marts martyr
martyrdom martyred martyring martyrs marvel marvels mas mascara
mascaraed mascaraing mascaras mascot mascots masculine masculines mash
mashed mashes mashing masked masking masks masochist masochists mason
masonry masons masquerade masqueraded masquerades masquerading
massacre massacred massacres massacring massage massaged massages
massaging massed massing mast mastered masterful mastering masterly
mastermind masterminded masterminding masterminds masterpiece
masterpieces mastery masticate masticated masticates masticating masts
masturbation mat matador matadors matchbook matchbooks matchless
matchmaker matchmakers mated materialism materialist materialistic
materialists maternal maternity mates math matinee matinees mating
matriarch matriarchal matriarchs matriculate matriculated matriculates
matriculating matriculation matrimonial matrimony matron matronly
matrons mats matte matted mattered mattering mattes matting mattress
mattresses matured maturer matures maturest maturing maturities
maturity maudlin maul mauled mauling mauls mausoleum mausoleums mauve
maverick mavericked mavericking mavericks maxim maximal maxims
maximums maybes mayhem mayonnaise mayors mazes meadow meadows mealed
mealier mealies mealiest mealing mealy meander meandered meandering
meanders meaner meanest measles measlier measliest measly measurable
meats mechanically medal medallion medallions medals meddle meddled
meddler meddlers meddles meddlesome meddling median medias mediate
mediated mediates mediating mediation mediator mediators medically
medicals medicate medicated medicates medicating medication
medications medicinal medicinals medicines mediocre mediocrities
mediocrity meditate meditated meditates meditating meditation
meditations medley medleys meek meeker meekest meekly meekness meeter
megalomaniac megaphone megaphoned megaphones megaphoning megaton
megatons melancholy mellow mellowed mellower mellowest mellowing
mellows melodic melodics melodies melodious melodrama melodramas
melodramatic melodramatics melon melons melted melting melts
memberships membrane membranes memento mementos memo memoir memoirs
memorably memorandum memorandums memorial memorials memos menace
menaced menaces menacing menagerie menageries menial menials menopause
menstrual menstruate menstruated menstruates menstruating menstruation
mentalities menthol mentor mentored mentoring mentors mercantile
mercenaries mercenary merchandise merchandised merchandises
merchandising merchant merchanted merchanting merchants mercies
merciful mercifully merciless mercilessly mered merer meres merest
merger mergers meridian meridians mering meringue meringues merited
meriting mermaid mermaids merrier merriest merrily merriment mes
mesdames mesh meshed meshes meshing messaged messaging messenger
messengers messier messiest metabolic metabolism metabolisms metallic
metallurgy metals metamorphose metamorphoses metamorphosis
metaphorical metaphorically metaphors metaphysical metaphysics mete
meted meteor meteoric meteorite meteorites meteorologist
meteorologists meteorology meteors metes methodical methodology
meticulous meting metropolis metropolises metropolitan mettle mew
mewed mewing mews mezzanine mezzanines microbe microbes microbiology
microcode microfiche microfilm microfilmed microfilming microfilms
microorganism microorganisms microphone microphones microscope
microscopes microscopic microsecond microseconds microwaved microwaves
microwaving middleman middlemen middles midget midgets midriff
midriffs midst midstream midsummer midway midways midwife midwifed
midwifes midwifing midwives mien miens mightier mightiest migraine
migraines migrant migrants migrations migratory mike miked mikes
miking milder mildest mildew mildewed mildewing mildews mileages
milestone milestones militancy militant militants militarily militate
militated militates militating militia militias milked milker milkier
milkiest milking milkman milkmen milks milky milled miller millers
milliner milliners millinery milling millionaire millionaires
millionth millionths millisecond milliseconds mills mime mimed mimes
mimicked mimicking mimicries mimicry mimics miming mince minced
mincemeat minces mincing mindbogglingly mindedness mindful mindlessly
minefield miner mineral minerals miners mingle mingled mingles
mingling miniature miniatured miniatures miniaturing minibus minibuses
minicomputer minimalism minimally minimals minimums minion minions
ministered ministerial ministering ministries ministry mink minks
minnow minnows minored minoring minors minstrel minstrels minted
minting mints minuet minuets minuscule minuscules minuses minuted
minuter minutest minuting miraculously mirage mirages mire mired mires
miring mirrored mirroring mirth misadventure misadventures
misapprehension misappropriate misappropriated misappropriates
misappropriating misappropriation misappropriations misbehave
misbehaved misbehaves misbehaving miscarriage miscarriages miscarried
miscarries miscarry miscarrying miscellany mischief mischiefed
mischiefing mischiefs mischievous misconception misconceptions
misconduct misconducted misconducting misconducts misconstrue
misconstrued misconstrues misconstruing misdeed misdeeds misdirection
miser miserables miseries miserly misers misfit misfits misfitted
misfitting misfortunes misgiving misgivings mishap mishapped
mishapping mishaps misinform misinformation misinformed misinforming
misinforms misinterpretation misjudge misjudged misjudges misjudging
mislaid mislay mislaying mislays mismanagement mismatch mismatched
mismatches mismatching misnomer misnomered misnomering misnomers
misprinted misprinting misprints misquote misquoted misquotes
misquoting misrepresentation misrepresentations misshapen missionaries
missionary missioned missioning missions missive missives misspell
misspelled misspelling misspellings misspells misted mistier mistiest
misting mistletoe mistress mistresses mistrust mistrusted mistrusting
mistrusts misty mistype mistyping misunderstandings misused misuses
misusing mite mites mitigate mitigated mitigates mitigating mitt
mitten mittens mitts mixer mixers mixtures mnemonics moat moated
moating moats mobbed mobbing mobiles mobility mobs moccasin moccasins
mocked mockeries mockery mocking mockingbird mockingbirds mocks modal
moder moderated moderates moderating moderator moderators moderner
modernest modernity moderns modester modestest modestly modesty
modicum modicums modifier modifiers modular modulate modulated
modulates modulating modulation modulations mohair moist moisten
moistened moistening moistens moister moistest moisture molar molars
molasses moles molest molested molesting molests mollified mollifies
mollify mollifying mollusk mollusks molt molted molten molting molts
mom momentary momentous moms monarchies monarchs monarchy monasteries
monastery monastic monastics monetarism monetary mongoose mongrel
mongrels monies monk monkeyed monkeying monks monogamous monogamy
monogram monogrammed monogramming monograms monolithic monologue
monologued monologues monologuing monopolies monorail monorails
monosyllable monosyllables monotonically monotonous monotony monsoon
monsoons monstrosities monstrosity monstrous monthlies monument
monumental monuments moo moodier moodiest moodily moods moody mooed
mooing moonbeam moonbeams mooned mooning moonlight moonlighted
moonlighting moonlights moor moored mooring moorings moors moos moose
moot mooted mooter mooting moots mop mope moped mopes moping mopped
mopping mops morale moralist moralists moralities moralled moralling
morass morasses moratorium moratoriums morbid morgue morgues morn
morned morns moronic morose morphine morphology morsel morsels
mortally mortar mortared mortaring mortars mortgage mortgaged
mortgages mortgaging mortification mortified mortifies mortify
mortifying mortuaries mortuary mosaic mosaics mosque mosques mosquito
mosquitoes moss mosses mossier mossies mossiest mossy motel motels
moth mothball mothballed mothballing mothballs mothered motherhood
mothering motherly moths motif motifs motioned motioning motionless
motivations motley motleys motlier motliest motorbike motorbikes
motorcade motorcades motorcycle motorcycled motorcycles motorcycling
motored motoring motorist motorists mottoes mound mounded mounding
mounds mountaineer mountaineered mountaineering mountaineers
mountainous mourn mourned mourner mourners mournful mournfuller
mournfullest mourning mourns moused mouses mousier mousiest mousing
mousse moussed mousses moussing mousy mouthed mouthful mouthfuls
mouthing mouthpiece mouthpieces mouths movable movables mover movers
mow mowed mower mowers mowing mows ms mu mucous mucus muddied muddier
muddies muddiest muddy muddying muff muffed muffin muffing muffins
muffle muffled muffler mufflers muffles muffling muffs mugged mugger
muggers muggier muggiest mugginess mugging muggy mulch mulched mulches
mulching mule muled mules muling mull mulled mulling mulls
multinational multinationals multiplications multiplicative
multiplicities multiplicity multiprocessing multitasking multitude
multitudes mumbled mumbles mumbling mummies mummified mummifies
mummify mummifying mumps mums munch munched munches munching mundanes
municipal municipalities municipality municipals mural murals
murderers murderous murkier murkiest murky murmur murmured murmuring
murmurs muscled muscling muscular muse mused muses mush mushed mushes
mushier mushiest mushing mushroom mushroomed mushrooming mushrooms
mushy musically musicals musicked musicking musics musing musk musked
musket muskets musking musks muss mussed mussel mussels musses mussing
mustang mustangs mustard muster mustered mustering musters mustier
mustiest musts musty mutant mutants mutate mutated mutates mutating
mutation mutations mute muted mutely muter mutes mutest mutilate
mutilated mutilates mutilating mutilation mutilations muting mutinied
mutinies mutinous mutiny mutinying mutt mutton mutts muzzle muzzled
muzzles muzzling myopic myopics myriad myriads mys mysteried
mysterying mystical mysticism mystics mystified mystifies mystify
mystifying mythological mythologies nab nabbed nabbing nabs nag nagged
nagging nags naively naiver naives naivest naivete naivety nakeder
nakedest nakedness namesake namesakes nap napalm napalmed napalming
napalms nape napes napkin napkins napped nappies napping nappy naps
narcotic narcotics narrate narrated narrates narrating narration
narrations narratives narrator narrators narrowed narrowing narrowly
narrowness narrows nasal nasals nastily nastiness nationalism
nationalist nationalistic nationalists nationalities nationality
nationals nationwide nativities nativity nattier nattiest natty
naturalist naturalists naturalness naturals natured natures naturing
naughtier naughties naughtiest naughtily naughtiness nausea nauseate
nauseated nauseates nauseating nauseous nautical naval navel navels
navies navigable navigate navigated navigates navigating navigation
navigator navigators navy nays neared nearing nearlier nearliest nears
nearsighted nearsightedness neater neatest neatness nebula nebulae
nebulous necessaries necessitate necessitated necessitates
necessitating necessities necked neckerchief neckerchiefs necking
necklace necklaces neckline necklines necks necktie neckties
necrophilia nectar nectarine nectarines nee needier neediest needled
needlework needling needy negated negates negating negation negations
negatived negatively negatives negativing neglectful negligee
negligees negligence negligent negligently negotiator negotiators
neigh neighed neighing neighs neon neophyte neophytes nephew nephews
nepotism nerved nerving nervously nervousness nestle nestled nestles
nestling nether netted netting nettle nettled nettles nettling
neurologist neurologists neurology neuron neurons neuroses neurosis
neurotic neurotics neuter neutered neutering neuters neutrality
neutrals neutron neutrons newbie newbies newborn newborns newed
newfangled newing newsagents newscast newscaster newscasters
newscasting newscasts newsed newses newsier newsiest newsing
newspapered newspapering newsprint newsstand newsstands newsy newt
newton newts nibble nibbled nibbles nibbling niceties nicety niche
niches nickel nickels nicknamed nicknaming nicotine niece nieces
niftier niftiest nifty nigh nightclub nightclubbed nightclubbing
nightclubs nightfall nightgown nightgowns nightingale nightingales
nightly nightmares nightmarish nighttime nilled nilling nils nimble
nimbler nimblest nimbly nincompoop nincompoops nines nineteen
nineteens nineteenth nineteenths nineties ninetieth ninetieths ninety
ninnies ninny ninth ninths nip nipped nippier nippiest nipping nipple
nippled nipples nippling nippy nips nit nitrate nitrated nitrates
nitrating nitrogen nits nitwit nitwits nobility nobleman noblemen
nobler nobles noblest noblewoman noblewomen nobly nobodies nocturnal
nod nodded nodding nods noes noised noiseless noiselessly noisier
noisiest noisily noisiness noising nomad nomadic nomads nomenclature
nomenclatures nomination nominations nominative nominatives nominee
nominees non nonchalance nonchalant nonchalantly noncommittal
nonconformist nonconformists nondescript nonentities nonentity nones
nonfiction nonflammable nonpartisan nonpartisans nonprofit nonprofits
nonresident nonresidents nonsensical nonstandard nonstop nontrivial
nonviolence noodle noodled noodles noodling nook nooks nooned nooning
noons noose nooses normed norming norms northeast northeasterly
northeastern northerlies northerly northward northwest northwestern
nosebleed nosebleeds nosed nosing nostalgic nostalgics nostril
nostrils notables notations notch notched notches notching notebook
notebooks noteworthy nothingness nothings noticeboard noticeboards
notifications notional notoriety notoriously nougat nougats nourish
nourished nourishes nourishing nourishment nova novelist novelists
novelties noxious nozzle nozzles nuance nuances nuclei nucleus nude
nuder nudes nudest nudge nudged nudges nudging nudity nugget nuggets
nuisances nullified nullifies nullify nullifying nulls numbed numbing
numbness numbs numerate numerator numerators numerically nuptial
nuptials nursed nursemaid nursemaids nurseries nursery nursing nurture
nurtured nurtures nurturing nutcracker nutcrackers nutmeg nutmegged
nutmegging nutmegs nutrient nutrients nutriment nutriments nutrition
nutritional nutritious nutshell nutshells nutted nuttier nuttiest
nutting nutty nuzzle nuzzled nuzzles nuzzling nylon nymph nymphs oaf
oafs oak oaks oared oaring oars oases oasis oath oaths oatmeal
obedience obedient obediently obelisk obelisks obese obesity
obfuscation obituaries obituary objectively objectives objectivity
objector objectors obligate obligated obligates obligating obligations
oblique obliques obliterate obliterated obliterates obliterating
obliteration oblivion oblivious oblong oblongs oboe oboes obscener
obscenest obscenities obscenity obscurer obscurest obscurities
observable observance observances observant observatories observatory
obsessions obsessive obsolescence obsolescent obsoleted obsoletes
obsoleting obstacle obstacles obstetrician obstetricians obstetrics
obstinacy obstinate obstruction obstructions obstructive obtrusive
obtuse obtuser obtusest occasioned occasioning occupancy occupant
occupants occupational occupations oceanic oceanography oceans octagon
octagonal octagons octal octave octaves octopus octopuses ocular
oculars odder oddest oddities oddity ode odes odious odometer
odometers offbeat offbeats offed offensiveness offensives officiate
officiated officiates officiating officious offing offings offload
offs offshoot offshoots offshore offstage offstages oftener oftenest
ogle ogled ogles ogling ogre ogres ohm ohms ohs oiled oilier oiliest
oiling oils oily ointment ointments okay okays okra okras olden
oldened oldening oldens olfactory olive olives omega omelet omelets
omen omens ominous ominously omnibus omnipotence omnipotent
omnipresent omniscient oncoming onerous onioned onioning onions
onliest onlooker onlookers onomatopoeia onrush onrushes onset onsets
onsetting onslaught onslaughts onuses onward oodles ooze oozed oozes
oozing opal opals opaque opaqued opaquer opaques opaquest opaquing
opener openers openest openings openness operand operands operatic
operatics operative operatives ophthalmologist ophthalmologists
ophthalmology opinionated opium opossum opossums opportune opportunist
opportunists opposites oppressive oppressor oppressors optician
opticians optics optima optimism optimist optimists optimums optionals
optioned optioning optometrist optometrists opulent oracle oracled
oracles oracling orals oranges orangutan orangutans oration orations
orator oratories orators oratory orbitals orbited orbiting orbits
orchard orchards orchestras orchestrate orchestrated orchestrates
orchestrating orchestration orchestrations orchid orchids ordain
ordained ordaining ordains ordeal ordeals orderlies orderly ordinance
ordinances ordinarier ordinaries ordinariest ordinarily ordination
ordinations ore ores organics organism organisms organist organists
orgasm orgies orgy orientations orifice originality originators
ornament ornamental ornamented ornamenting ornaments ornate ornately
ornithologist ornithologists ornithology orphan orphanage orphanages
orphaned orphaning orphans orthodontist orthodontists orthodoxes
orthogonal orthogonality orthography oscillate oscillated oscillates
oscillating oscillation oscillations oscilloscope osmosis ostensible
ostensibly ostentation ostentatious ostrich ostriches otter ottered
ottering otters ouch ounce ounces oust ousted ouster ousters ousting
ousts outbound outbreak outbreaking outbreaks outbroke outbroken
outburst outbursting outbursts outcast outcasting outcasts outclass
outclassed outclasses outclassing outcries outdid outdistance
outdistanced outdistances outdistancing outdo outdoes outdoing outdone
outdoor outdoors outed outermost outers outfield outfields outfit
outfits outfitted outfitting outgoings outgrew outgrow outgrowing
outgrown outgrows outgrowth outgrowths outhouse outhouses outing
outings outlaid outlandish outlast outlasted outlasting outlasts
outlaw outlawed outlawing outlaws outlay outlaying outlays outlet
outlets outlive outlived outlives outliving outlooked outlooking
outlooks outlying outmoded outnumber outnumbered outnumbering
outnumbers outpatient outpatients outpost outposts outputted
outputting outrageously outran outrun outrunning outruns outs outsets
outsetting outshine outshines outshining outshone outsider outsiders
outsides outskirt outskirts outsmart outsmarted outsmarting outsmarts
outspoken outstandingly outstation outstations outstrip outstripped
outstripping outstrips outward outwardly outwards outweighed
outweighing outwit outwits outwitted outwitting ova oval ovals ovaries
ovary ovation ovations oven ovens overalls overate overbear
overbearing overbears overblown overboard overbore overborne
overburden overburdened overburdening overburdens overcast overcasting
overcasts overcharge overcharged overcharges overcharging overcoat
overcoats overcrowd overcrowded overcrowding overcrowds overdid overdo
overdoes overdoing overdone overdose overdosed overdoses overdosing
overdraw overdrawing overdrawn overdraws overdrew overeat overeaten
overeating overeats overestimate overestimated overestimates
overestimating overflowed overflowing overflows overgrew overgrow
overgrowing overgrown overgrows overhand overhands overhang
overhanging overhangs overhaul overhauled overhauling overhauls
overhear overheard overhearing overhears overheat overheated
overheating overheats overhung overkill overlaid overlain overland
overlands overlapped overlapping overlaps overlay overlaying overlays
overlie overlies overlying overnights overpass overpasses
overpopulation overpower overpowered overpowering overpowers overprint
overprinted overprinting overprints overran overrate overrated
overrates overrating overreact overreacted overreacting overreacts
overrule overruled overrules overruling overrun overrunning overruns
overs oversampling oversaw oversee overseeing overseen overseer
overseers oversees overshadow overshadowed overshadowing overshadows
overshoot overshooting overshoots overshot oversight oversights
oversimplification oversleep oversleeping oversleeps overslept
overstate overstated overstates overstating overstep overstepped
overstepping oversteps overt overtake overtaken overtakes overtaking
overthrew overthrow overthrowing overthrown overthrows overtimes
overtly overtook overture overtures overturn overturned overturning
overturns overuse overused overuses overusing overweight
overwhelmingly overwork overworked overworking overworks overwrite
overwrites overwrought ovum owl owls ox oxen oxes oxidation oxide
oxides oyster oysters pa paced pacemaker pacemakers paces pacific
pacified pacifiers pacifies pacifism pacifist pacifists pacify
pacifying pacing packer packers pact pacts paddies paddle paddled
paddles paddling paddock paddocked paddocking paddocks paddy padlock
padlocked padlocking padlocks pagan pagans pageant pageantry pageants
pager pagination pagoda pagodas pail pails pained painfuller
painfullest paining painlessly painstaking painter paired pairing pal
palaces palatable palate palates palatial paled paleontologist
paleontologists paleontology paler pales palest palette palettes
paling pall pallbearer pallbearers palled pallid palling pallor palls
palm palmed palming palms palomino palominos palpable palpably pals
paltrier paltriest paltry pamper pampered pampering pampers pamphlet
pamphlets panacea panaceas pancake pancaked pancakes pancaking
pancreas pancreases pancreatic panda pandas pandemonium pander
pandered pandering panders pane panes pang panged panging pangs
panhandle panhandled panhandler panhandlers panhandles panhandling
panicked panickier panickiest panicking panicky panics panned panning
panorama panoramas panoramic pans pansies pansy panted panther
panthers pantie panties panting pantomime pantomimed pantomimes
pantomiming pantries pantry pap papa papacies papacy papal papas
papaya papayas paperbacked paperbacking paperbacks papered papering
paperweight paperweights paperwork paprika papyri papyrus parable
parabled parables parabling parachute parachuted parachutes
parachuting paraded parades paradigm parading paradises paradoxes
paradoxical paradoxically paraffin paragon paragons paragraphed
paragraphing parakeet parakeets paralysis paralytic paralytics
paramount paranoids paraphernalia paraphrased paraphrases paraphrasing
paraplegic paraplegics parasite parasites parasitic parasol parasols
paratrooper paratroopers parcel parcels parch parched parches parching
parchment parchments pardonable pardoned pardoning pardons pare pared
parentage parental parented parenthetical parenthood parenting pares
paring parish parishes parishioner parishioners parka parkas parkway
parkways parliamentary parliaments parodied parodies parodying parole
paroled paroles paroling parred parring parroted parroting parrots
pars parsec parsecs parser parsley parsnip parsnips parson parsonage
parsonages parsons partake partaken partakes partaking parted
partiality partials participation participle participles particulars
partied parting partings partisan partisans partnered partnering
partnership partnerships partook partridge partridges partying pas
passable passageway passageways passbook passbooks passe passer
passionated passionately passionates passionating passioned passioning
passions passively passives passports pasta pastas pasted pastel
pastels pastes pastiche pastier pasties pastiest pastime pastimes
pasting pastor pastoral pastorals pastors pastries pastry pasts
pasture pastured pastures pasturing pasty patchwork patchworks patchy
pate patented patenting patently patents paternal paternalism
paternity pates pathetically pathological pathologist pathologists
pathology pathos pathway pathways patienter patientest patiently patio
patios patriarch patriarchal patriarchs patrimonies patrimony patriot
patriotic patriotism patriots patrol patrolled patrolling patrols
patron patronage patronages patrons pats patted patter pattered
pattering patterned patterning patters patties patting patty paucity
paunch paunched paunches paunchier paunchiest paunching paunchy pauper
paupers pave paved pavemented pavementing pavements paves pavilion
pavilions paving paw pawed pawing pawn pawnbroker pawnbrokers pawned
pawning pawns paws payable payer payers payload payoff payoffs payroll
payrolls pea peaceable peacefuller peacefullest peacefully peacemaker
peacemakers peaces peach peaches peacock peacocks peaked peaking peal
pealed pealing peals pear pearl pearled pearling pearls pears peas
peat pebble pebbled pebbles pebbling pecan pecans peck pecked pecking
pecks peculiarities peculiarity peculiarly pedagogy pedals peddle
peddled peddler peddlers peddles peddling pedestal pedestals
pediatricians pediatrics pedigree pedigrees peek peeked peeking peeks
peel peeled peeling peels peep peeped peeping peeps peered peering
peerless peeve peeved peeves peeving peevish peg pegged pegging pegs
pelican pelicans pellet pelleted pelleting pellets pelt pelted pelting
pelts pelvic pelvics pelvis pelvises penal penance penanced penances
penancing penchant pencils pendant pendants pendulum pendulums
penetrate penetrated penetrates penetrating penetration penetrations
penguins penicillin peninsula peninsulas penis penises penitence
penitent penitentiaries penitentiary penitents penknife penknives
penmanship pennant pennants penned penniless penning pension pensioned
pensioner pensioners pensioning pensions pensive pensively pentagon
pentagonal pentagonals pentagons penthouse penthoused penthouses
penthousing penultimate peon peonies peons peony peopled peopling pep
pepped pepper peppered peppering peppermint peppermints peppers
pepping peps percentages perceptible perceptions perceptive perch
perchance perched perches perching percolate percolated percolates
percolating percolation percolator percolators percussion peremptory
perennial perennials peres perfected perfecter perfectest perfecting
perfectionist perfectionists perfections perfects perforate perforated
perforates perforating perforation perforations performer performers
perfume perfumed perfumes perfuming perfunctorily perfunctory
perhapses peril perilous perilously perils perimeter perimeters
periodical periodicals peripheries periphery periscope periscoped
periscopes periscoping perish perishable perishables perished perishes
perishing perjure perjured perjures perjuries perjuring perjury perk
perked perkier perkiest perking perks perky permanence permanents
permeate permeated permeates permeating permissions permissive
permutation permutations pernicious peroxide peroxided peroxides
peroxiding perpendicular perpendiculars perpetrate perpetrated
perpetrates perpetrating perpetrator perpetrators perpetually
perpetuals perpetuate perpetuated perpetuates perpetuating perplex
perplexed perplexes perplexing perplexities perplexity persecution
persecutions persecutor persecutors perseverance persevere persevered
perseveres persevering persisted persistence persistently persisting
persists persona personable personals personification personifications
personified personifies personify personifying perspectives
perspiration perspire perspired perspires perspiring persuasions
persuasive persuasively pert pertain pertained pertaining pertains
perter pertest pertinent pertinents perts perturb perturbed perturbing
perturbs perusal perusals peruse perused peruses perusing pervade
pervaded pervades pervading pervasive perversion perversions pervert
perverted perverting perverts peskier peskiest pesky pessimism
pessimist pessimistic pessimists pest pester pestered pestering
pesters pesticide pesticides pestilence pestilences pests petal petals
peter petered petering peters petite petites petition petitioned
petitioning petitions petrified petrifies petrify petrifying petroleum
pets petted petticoat petticoats pettier petties pettiest pettiness
petting petulant petunia petunias pew pews pewter pewters phantom
phantoms pharmaceutical pharmaceuticals pharmacist pharmacists
pheasant pheasants phenomenal phenomenally phenomenas philanthropic
philanthropies philanthropist philanthropists philanthropy phlegm
phlegmatic phobia phobias phonetic phonetics phonics phonied phonier
phonies phoniest phonograph phonographs phony phonying phosphor
phosphorescence phosphorescent phosphorus photocopied photocopier
photocopiers photocopies photocopying photoed photogenic photographed
photographer photographers photographing photography photoing photon
photons photosynthesis phototypesetter phraseology physicals physician
physicians physiological physique physiques pianist pianists pianos
piccolo piccolos pickax pickaxed pickaxes pickaxing picket picketed
picketing pickets pickier pickiest pickle pickled pickles pickling
pickpocket pickpockets pickup pickups picky picnic picnicked
picnicking picnics pictorial pictorials pictured picturesque picturing
piddle piddled piddles piddling pieced piecemeal piecework piecing
pier pierce pierced pierces piercing piers pies piety pigeoned
pigeonhole pigeonholed pigeonholes pigeonholing pigeoning pigeons
pigged pigging piggish piggyback piggybacked piggybacking piggybacks
pigheaded pigment pigments pigpen pigpens pigtail pigtails pike piked
pikes piking piled pilfer pilfered pilfering pilfers pilgrim
pilgrimage pilgrimages pilgrims piling pillage pillaged pillages
pillaging pillar pillars pillow pillowcase pillowcases pillowed
pillowing pillows piloted piloting pilots pimple pimples pimplier
pimpliest pimply pincushion pincushions pine pineapple pineapples
pined pines pining pinion pinioned pinioning pinions pinked pinker
pinkest pinking pinks pinnacle pinnacles pinned pinning pinpoint
pinpointed pinpointing pinpoints pioneer pioneered pioneering pioneers
pious piped pipelines piping pique piqued piques piquing piracy
piranha piranhas pirate pirated pirates pirating pirouette pirouetted
pirouettes pirouetting pis pistachio pistachios pistol pistols piston
pistons pitched pitcher pitchers pitches pitchfork pitchforked
pitchforking pitchforks pitching piteous piteously pithier pithiest
pithy pitied pities pitiful pitifuller pitifullest pitifully pitiless
pits pittance pittances pitted pitting pitying pivot pivotal pivoted
pivoting pivots pixie pixies placard placarded placarding placards
placate placated placates placating placement placenta placentas
placid placidly plagiarism plagiarisms plagiarist plagiarists plaice
plaid plaided plaiding plaids plainer plainest plains plaintiff
plaintiffs plaintive planar planed planetarium planetariums planing
plank planked planking planks plankton planner planners plantain
plantains plantation plantations planter planters plaque plaques
plasma plastics plateau plateaued plateauing plateaus plated
platformed platforming platforms plating platinum platitude platitudes
platoon platooned platooning platoons platter platters plausibility
plausibly playable playback playful playfully playfulness playgrounds
playhouse playhouses playmate playmates playpen playpens plaything
playthings playwright playwrights plaza plazas plead pleaded pleading
pleads pleas pleasanter pleasantest pleasantries pleasantry pleasings
pleasurable pleasured pleasures pleasuring pleat pleated pleating
pleats pledge pledged pledges pledging plentiful plentifully plethora
pliable pliant plied pliers plies plight plighted plighting plights
plod plodded plodding plods plop plopped plopping plops plotters ploys
pluck plucked plucking plucks plucky plum plumage plumb plumbed
plumber plumbers plumbing plumbs plume plumed plumes pluming plummet
plummeted plummeting plummets plump plumped plumper plumpest plumping
plumps plums plunder plundered plundering plunders plunge plunged
plunger plungers plunges plunging plurality plurals pluses plush
plusher plushest plussed plussing plutonium ply plying plywood
pneumatic pneumonia poach poached poacher poachers poaches poaching
pocketbook pocketbooks pocketed pocketing pockmark pockmarked
pockmarking pockmarks pod podded podding podium podiums pods poetical
poignancy poignant poinsettia poinsettias pointedly pointlessly poise
poised poises poising poisonous poked poker pokers pokes pokier
pokiest poking poky polarity polars poled polemic polemics poles
policed policemen polices policewoman policewomen policing poling
polio polios politely politer politest polka polkaed polkaing polkas
polled pollen pollinate pollinated pollinates pollinating pollination
polling pollster pollsters pollutant pollutants pollute polluted
pollutes polluting polo polygamous polygamy polygon polygons
polynomials polyp polyps polytechnic pomegranate pomegranates pomp
poncho ponchos pond ponder pondered pondering ponderous ponders ponds
ponies pontoon pontooned pontooning pontoons pony poodle poodles
pooled pooling pools poop pooped pooping poops popcorn poplar poplars
poppies poppy populaces popularly populars populous porcelain porch
porches porcupine porcupines pore pored pores poring pornographic
porous porpoise porpoised porpoises porpoising porridge portables
portal portals portend portended portending portends portent portents
portered portering portfolio portfolios porthole portholes portico
porticoes portioned portioning portlier portliest portly portrait
portraits portrayal portrayals posies positional positiver positives
positivest positivism possessions possessive possessives possessor
possessors possibler possibles possiblest possum possums postbox
postcards postcode posterior posteriors posterity postgraduate
postgraduates posthumous posthumously postman postmark postmarked
postmarking postmarks postmasters postmen postponement postponements
postscripts postulated postulates postulating posture postured
postures posturing posy potassium potency potent pothole potholed
potholes potholing potion potions pots potted potter pottered
potteries pottering potters pottery potting pouch pouched pouches
pouching poultry pounce pounced pounces pouncing pounded pounding pout
pouted pouting pouts powdered powdering powders powdery powerfully
powerhouse powerhouses powerless powwow powwowed powwowing powwows
practicalities practicality practitioner practitioners pragmatics
pragmatism prairie prairies praised praises praiseworthy praising pram
prance pranced prances prancing prank pranks prattle prattled prattles
prattling prawn prawned prawning prawns preacher preachers preamble
preambled preambles preambling precarious precariously precautionary
precedents precinct precincts precipice precipices precipitate
precipitated precipitates precipitating precipitation precipitations
precipitous precis precised preciser precises precisest precising
preclude precluded precludes precluding precocious preconceive
preconceived preconceives preconceiving preconception preconceptions
precursor precursors predator predators predatory predefined
predestination predicament predicaments predicate predicated
predicates predicating predictably predictor predisposition
predispositions predominance predominant predominate predominated
predominates predominating preeminence preeminent preempt preempted
preempting preempts preen preened preening preens prefab prefabbed
prefabbing prefabs prefaced prefaces prefacing prefect preferential
pregnancies prehistoric prejudicial preliminaries prelude preludes
premeditation premier premiere premiered premieres premiering premiers
premised premising premiums premonition premonitions prenatal
preoccupied preoccupies preoccupy preoccupying prepaid preparations
preparatory prepay prepaying prepays preponderance preponderances
preposition prepositional prepositioned prepositioning prepositions
preposterous prerequisites prerogative prerogatives prescriptions
presences presentable presentations presenter preservation
preservative preservatives preside presided presidencies presidency
presidential presidents presides presiding pressings pressured
pressuring prestige prestigious presto presumption presumptions
presumptuous presuppose presupposed presupposes presupposing pretender
pretenders pretentiously pretentiousness pretext pretexted pretexting
pretexts prettied prettier pretties prettiest prettying pretzel
pretzels prevailed prevailing prevails prevalence prevalents
preventable preventive preventives previewed previewers previewing
previews prey preyed preying preys priceless prick pricked pricking
prickle prickled prickles pricklier prickliest prickling prickly
pricks prided prides priding pried prier pries priestess priestesses
priesthood priesthoods prim primal primaries primate primates primed
primer primers primeval priming primly primmer primmest primp primped
primping primps primrose primrosed primroses primrosing princes
princess princesses principalities principality principals principled
principling printable printings priors prism prisms prisoned prisoning
prisons privater privates privatest privation privations privier
privies priviest privy probabilistic probables probation probe probed
probes probing problematic procedural procession processional
processionals processioned processioning processions proclaimed
proclaiming proclaims proclamation proclamations procrastinate
procrastinated procrastinates procrastinating procrastination procure
procured procurement procures procuring prod prodded prodding prodigal
prodigals prodigies prodigious prodigy prods productions profane
profaned profanes profaning profanities profanity profess professed
professes professing professionally professions professors proffer
proffered proffering proffers proficiency proficient proficiently
proficients profiled profiling profited profiteer profiteered
profiteering profiteers profiting profounder profoundest profoundly
profundities profundity profuse profusely profusion profusions progeny
prognoses prognosis progression progressions progressive progressively
progressives prohibition prohibitions prohibitive prohibitively
projectile projectiles projections projector projectors proletarian
proletarians proletariat proliferate proliferated proliferates
proliferating prolific prologue prologues prom promenade promenaded
promenades promenading prominence prominently promiscuity promiscuous
promontories promontory promotions prompter promptest promptness proms
promulgate promulgated promulgates promulgating prong prongs
pronouncement pronouncements pronouns pronunciations proofed proofing
proofread proofreading proofreads prop propagate propagated propagates
propagating propagation propel propelled propeller propellers
propelling propels propensities propensity properer properest
prophecies prophecy prophesied prophesies prophesy prophesying
prophetic prophets proponent proponents proportionality proportionally
proportionals proportionate proportioned proportioning propositional
propositioned propositioning propositions propped propping
proprietaries proprietor proprietors propriety props propulsion pros
prosecutions prosecutor prosecutors proses prospected prospecting
prospectives prospector prospectors prospectus prospectuses prosper
prospered prospering prosperity prosperous prospers prostituted
prostituting prostitution prostrate prostrated prostrates prostrating
protagonist protagonists protections protective protectives protector
protectors protege proteges proteins protestant protested protesting
protests proton protons prototypes protract protracted protracting
protractor protractors protracts protrude protruded protrudes
protruding protrusion protrusions prouder proudest proudly provable
provably provenance proverb proverbial proverbs providence provider
province provinces provincial provincials provisionally provisioned
provisioning proviso provisos provocation provocations prow prowess
prowl prowled prowler prowlers prowling prowls prows proxies proxy
prude prudence prudent prudes prudish prune pruned prunes pruning pry
prying psalm psalms pseudonym pseudonyms psych psyche psyched
psychedelic psychedelics psyches psychiatric psychiatrist
psychiatrists psychiatry psychic psychics psyching psychoanalysis
psychoanalyst psychoanalysts psychologically psychologies psychopath
psychopaths psychoses psychosis psychotherapies psychotherapy
psychotic psychs puberty puck pucked pucker puckered puckering puckers
pucking pucks puddings puddle puddled puddles puddling pudgier
pudgiest pudgy pueblo pueblos puff puffed puffer puffier puffiest
puffing puffs puffy pugnacious puke puked pukes puking pulley pulleys
pullover pullovers pulmonary pulped pulping pulpit pulpits pulps
pulsate pulsated pulsates pulsating pulsation pulsations pulsed
pulsing puma pumas pumice pumices pummel pummels pumpernickel pumpkin
pumpkins punchline punctual punctuality punctuate punctuated
punctuates punctuating punctured punctures puncturing pundit pundits
pungent punier puniest punishable punishments punitive punk punker
punkest punks punned punning punted punter punters punting puny pup
pupped puppet puppets puppied puppies pupping puppy puppying pups
purchaser purchasers pured puree pureed pureeing purees purer purest
purgatory purged purges purging purification purified purifies purify
purifying puring puritanical purpler purples purplest purport
purported purporting purports purposed purposeful purposing purr
purred purring purrs purse pursed purses pursing pursuits purveyor pus
pusher pushers pushier pushiest pushover pushovers pushy puss pusses
pussier pussies pussiest pussy putative putrid putter puttered
puttering putters puttied putties putty puttying pyramid pyramided
pyramiding pyramids pyre pyres pythons qua quack quacked quacking
quacks quadrangle quadrangles quadrant quadrants quadratic
quadrilateral quadrilaterals quadruped quadrupeds quadruple quadrupled
quadruples quadruplet quadruplets quadrupling quagmire quagmired
quagmires quagmiring quail quailed quailing quails quaint quainter
quaintest quake quaked quakes quaking qualitative qualm qualms
quandaries quandary quantifier quantify quantitative quarantine
quarantined quarantines quarantining quark quarrel quarrels
quarrelsome quarried quarries quarry quarrying quart quarterback
quarterbacked quarterbacking quarterbacks quartered quartering
quarterlies quarterly quartet quartets quarts quartz quash quashed
quashes quashing quaver quavered quavering quavers quay quays queasier
queasiest queasy queened queening queenlier queenliest queenly queer
queered queerer queerest queering queers quell quelled quelling quells
quench quenched quenches quenching queried querying quested questing
questionnaires quests quibbled quibbles quibbling quiche quicken
quickened quickening quickens quicksand quicksands quieted quieting
quiets quill quills quilt quilted quilting quilts quinine quintessence
quintessences quintet quintets quintuplet quintuplets quip quipped
quipping quips quirk quirked quirking quirks quirky quited quites
quiting quitter quitters quiver quivered quivering quivers quizzed
quizzes quizzical quizzing quorum quorums quotient quotients rabbi
rabbis rabbited rabbiting rabble rabbles rabies raccoon raccoons racer
racetrack racetracks racially racier raciest racists racked racketed
racketeer racketeered racketeering racketeers racketing rackets
racking racy radars radial radials radiance radiant radiate radiated
radiates radiating radiations radiator radiators radicals radii
radioactive radioactivity radioed radioing radish radishes radium
raffle raffled raffles raffling raft rafted rafter rafters rafting
rafts ragamuffin ragamuffins raged rages ragged raggeder raggedest
ragging raging rags ragtime raided raider raiders raiding railed
railing railroaded railroading railroads railways rainbows raincoat
raincoats raindrop raindrops rainfall rainfalls rainier rainiest
rainstorm rainstorms rainwater rainy raisin raisins rake raked rakes
raking rallied rallies rally rallying ramble rambled rambler ramblers
rambles rambling ramification ramifications rammed ramming ramp
rampage rampaged rampages rampaging ramps ramrod ramrodded ramrodding
ramrods rams ramshackle ranch ranched rancher ranchers ranches
ranching rancid rancorous randomness ranger rangers ranked ranker
rankest ranking rankle rankled rankles rankling ransack ransacked
ransacking ransacks ransom ransomed ransoming ransoms rap raped rapes
rapider rapidest rapidity rapids raping rapist rapists rapped rapping
rapport rapports raps rapt rapture raptures rapturous rared rares
raring rarities rarity rascal rascals rasher rashes rashest rashly
rasp raspberries raspberry rasped rasping rasps raster ratification
ratified ratifies ratify ratifying ratings ration rationales
rationality rationals rationed rationing rations ratted ratting
rattler rattlers rattlesnake rattlesnakes ratty raucous raucously
ravage ravaged ravages ravaging ravel ravels raven ravened ravening
ravenous ravenously ravens ravine ravined ravines ravings ravining
ravish ravished ravishes ravishing rawer rawest rayon rays raze razed
razes razing razors reactionaries reactive reactors readability
readied readier readies readiest readiness readjust readjusted
readjusting readjusts readying realer realest realism realist
realistically realists realities reallied reallies reallocate
reallocated reallocates reallocating reallying realty ream reamed
reaming reams reap reaped reaper reapers reaping reappear reappeared
reappearing reappears reaps reared rearing rearrangement
rearrangements rears reassurance reassurances rebate rebated rebates
rebating rebel rebelled rebelling rebellion rebellions rebellious
rebels rebind rebinding rebinds rebirth rebirths reborn rebound
rebounded rebounding rebounds rebuff rebuffed rebuffing rebuffs rebuke
rebuked rebukes rebuking rebut rebuts rebuttal rebuttals rebutted
rebutting recalcitrant recant recanted recanting recants recap
recapped recapping recaps recapture recaptured recaptures recapturing
recede receded recedes receding receipted receipting receipts
receivers recenter recentest receptacle receptacles receptionist
receptionists receptions receptive recess recessed recesses recessing
recession recessions recharge rechargeable recharged recharges
recharging reciprocal reciprocals reciprocate reciprocated
reciprocates reciprocating recital recitals recitation recitations
recite recited recites reciting recklessly recklessness reclaimed
reclaiming reclaims reclamation recline reclined reclines reclining
recluse recluses recoil recoiled recoiling recoils recollect
recollected recollecting recollections recollects recompense
recompensed recompenses recompensing recompile recompiled recompiling
reconciled reconciles reconciliation reconciliations reconciling
recondition reconditioned reconditioning reconditions reconfigure
reconfigured reconnaissance reconnaissances reconnect reconnected
reconnecting reconnects reconsidered reconsidering reconsiders
reconstruct reconstructed reconstructing reconstruction
reconstructions reconstructs recorders recount recounted recounting
recounts recoup recouped recouping recoups recourse recoverable
recoveries recreate recreated recreates recreating recreation
recreations rectal rectangles rector rectors rectum rectums recuperate
recuperated recuperates recuperating recuperation recur recurred
recurrence recurrences recurrent recurring recurs recursively redden
reddened reddening reddens redder reddest redeem redeemable redeemed
redeeming redeems redefinition redemption redesign redesigned
redesigning redesigns redhead redheads redid redirected redirecting
redirection redirects rediscover rediscovered rediscovering
rediscovers redistribute redistributed redistributes redistributing
redistribution redo redoes redoing redone redraft redraw redress
redressed redresses redressing reds redundancies reed reeds reef
reefed reefing reefs reek reeked reeking reeks reel reelect reelected
reelecting reelects reeled reeling reels referee refereed refereeing
referees referendums refill refilled refilling refills refinement
refinements refineries refinery reflections reflective reflector
reflectors reflexes reflexive reflexives reformation reformations
reformatted reformatting reformer reformers refraction refrained
refraining refrains refreshment refreshments refrigerate refrigerated
refrigerates refrigerating refrigeration refrigerator refrigerators
refuel refuels refuge refugee refugees refuges refunded refunding
refunds refurbish refurbished refurbishes refurbishing refurbishment
refusals refutation refuted refutes refuting regained regaining
regains regal regale regaled regales regalia regaling regals regatta
regattas regenerate regenerated regenerates regenerating regeneration
regent regents regimen regimens regiment regimental regimentals
regimented regimenting regiments regimes registrar registrars
registrations registries registry regress regressed regresses
regressing regression regressions regretful regrettable regularity
regulars regulate regulated regulates regulating regurgitate
regurgitated regurgitates regurgitating rehabilitate rehabilitated
rehabilitates rehabilitating rehabilitation rehash rehashed rehashes
rehashing rehearsal rehearsals rehearse rehearsed rehearses rehearsing
reigned reigning reigns reimburse reimbursed reimbursement
reimbursements reimburses reimbursing rein reincarnate reincarnated
reincarnates reincarnating reincarnation reincarnations reindeer
reined reinforce reinforced reinforcement reinforcements reinforces
reinforcing reining reins reinstatement reiterated reiterates
reiterating reiteration reiterations rejections rejoice rejoiced
rejoices rejoicing rejoin rejoinder rejoinders rejoined rejoining
rejoins rejuvenate rejuvenated rejuvenates rejuvenating rejuvenation
relaid relapse relapsed relapses relapsing relational relativistic
relaxation relaxations relayed relaying relays releasable relegate
relegated relegates relegating relent relented relenting relentless
relentlessly relents reliables reliance reliant relic relics reliefs
religiously relinquish relinquished relinquishes relinquishing relish
relished relishes relishing relive relived relives reliving reload
reloaded reloading reloads relocatable relocate relocated relocates
relocating remade remainders remake remakes remaking remedial remedied
remedies remedying remembrance remembrances reminders reminisce
reminisced reminiscence reminiscences reminisces reminiscing remiss
remission remissions remit remits remittance remittances remitted
remitting remnant remnants remodel remodels remorse remorseful
remorseless remoter remotes remotest removable removables removals
remunerate remunerated remunerates remunerating remuneration
remunerations renaissance rendezvous rendezvoused rendezvouses
rendezvousing renditioned renditioning renditions renegade renegaded
renegades renegading renege reneged reneges reneging renewable renewal
renewals renounce renounced renounces renouncing renovate renovated
renovates renovating renovation renovations renown renowned renowning
renowns rental rentals rented renting rents renunciation renunciations
reopen reopened reopening reopens repaid reparation repatriate
repatriated repatriates repatriating repay repaying repayment
repayments repays repeal repealed repealing repeals repel repelled
repellent repellents repelling repels repentance repentant repentants
repented repenting repents repercussion repercussions repertoires
repetitions repetitious replay replenish replenished replenishes
replenishing replete repleted repletes repleting replica replicas
replicate replicated replicates replicating replication reportedly
reporters repose reposed reposes reposing repositories repository
reprehensible repress repressed represses repressing repression
repressions repressive reprieve reprieved reprieves reprieving
reprimand reprimanded reprimanding reprimands reprint reprinted
reprinting reprints reprisal reprisals reproach reproached reproaches
reproaching reproductions reproductive reprogrammed reprogramming
reprove reproved reproves reproving reptile reptiles republic
republican republicans republics repudiate repudiated repudiates
repudiating repudiation repudiations repugnance repugnant repulse
repulsed repulses repulsing repulsion reputable reputations repute
reputed reputedly reputes reputing requiem requisites requisition
requisitioned requisitioning requisitions reroute rerouted reroutes
rerouting resale reschedule rescheduled reschedules rescheduling
rescind rescinded rescinding rescinds rescued rescuer rescuers rescues
rescuing researched researches researching resemblances resented
resentful resenting resentment resentments resents reservoir
reservoirs reshuffle resided residences residential residing residual
residuals residue residues resignations resilience resilient resin
resins resistances resistant resisted resisting resistor resistors
resists resolute resolutely resoluter resolutes resolutest resolutions
resolver resonance resonances resonant resound resounded resounding
resounds resourceful resourcefulness respectability respectables
respectably respectful respectfully respiration respirator respirators
respiratory respite respites resplendent responsibly responsive
restful restfuller restfullest restitution restive restless restlessly
restlessness restoration restorations restraint restraints
restrictives restructure restructured restructures restructuring
resubmit resubmits resubmitted resubmitting resultant resultants
resumption resumptions resurface resurfaced resurfaces resurfacing
resurgence resurgences resurrect resurrected resurrecting
resurrections resurrects resuscitate resuscitated resuscitates
resuscitating resuscitation retailed retailer retailers retailing
retails retainer retainers retaliate retaliated retaliates retaliating
retaliation retaliations retard retarded retarding retards retch
retched retches retching retention rethink reticence reticent retina
retinas retirements retort retorted retorting retorts retrace retraced
retraces retracing retracted retracting retraction retractions
retracts retreat retreated retreating retreats retribution
retributions retries retrievals retriever retrievers retroactive
retrograde retrospect retrospected retrospecting retrospective
retrospectively retrospectives retrospects retry returnable
returnables retype reunion reunions reunite reunited reunites
reuniting reused reuses reusing rev revamp revamped revamping revamps
revel revelations revelries revelry revels revenged revengeful
revenges revenging revenues reverberate reverberated reverberates
reverberating reverberation reverberations revere revered reverence
reverenced reverences reverencing reverent reverently reveres reverie
reveries revering reversal reversals reversible reversion reverted
reverting reverts reviewer reviewers revile reviled reviles reviling
revisions revisit revisited revisiting revisits revival revivals
revive revived revives reviving revoke revoked revokes revoking
revolutionaries revolutions revolve revolved revolver revolvers
revolves revolving revs revue revues revulsion revved revving rewarded
rewarding rewind rework rhapsodies rhapsody rhetoric rheumatism rhino
rhinoceros rhinoceroses rhinos rhododendron rhododendrons rhubarb
rhubarbs rhymed rhymes rhyming rhythmic rhythms rib ribbed ribbing
ribbons ribs riced rices riches richly richness ricing ricketier
ricketiest rickety rickshaw rickshaws ricochet ricocheted ricocheting
ricochets riddance riddle riddled riddles riddling rider riders ridge
ridged ridges ridging ridicule ridiculed ridicules ridiculing rife
rifer rifest rifle rifled rifles rifling rift rifted rifting rifts rig
rigged rigging righted righteous righteously righteousness righter
rightest rightful rightfully righting rightmost rightness rigidity
rigidly rigorously rigs rile riled riles riling rim rimmed rimming
rims rind rinded rinding rinds ringleader ringleaders ringlet ringlets
ringworm rink rinked rinking rinks rinse rinsed rinses rinsing rioted
rioter rioters rioting riotous riots ripe riped ripen ripened ripeness
ripening ripens riper ripes ripest riping riposte ripper ripple
rippled ripples rippling riser risers riskier riskiest risque rite
rites rivalries rivalry rive rives rivet riveted riveting rivets roach
roaches roadblock roadblocked roadblocking roadblocks roadside
roadsides roam roamed roaming roams roar roared roaring roars roast
roasted roasting roasts rob robbed robber robberies robbers robbery
robbing robe robed robes robin robing robins robs robuster robustest
robustness rocked rocker rockers rocketed rocketing rockets rockier
rockiest rocking rocky roded rodent rodents rodeo rodeos rodes roding
rods roe roes rogue rogues roguish roller rollers romanced romances
romancing romantically romantics romp romped romping romps roofed
roofing roofs rook rooked rookie rookier rookies rookiest rooking
rooks roomed roomier roomiest rooming roommate roommates roomy roost
roosted rooster roosters roosting roosts rooted rooter rooting roped
ropes roping rosaries rosary rosemary roses rosier rosiest roster
rostered rostering rosters rostrum rostrums rosy rotaries rotary
rotations rote roted rotes roting rotisserie rotisseries rotor rotors
rots rotted rottener rottenest rottens rotting rotund rotunda rotundas
rotunded rotunding rotunds rouge rouged rouges roughage roughed
roughen roughened roughening roughens rougher roughest roughhouse
roughhoused roughhouses roughhousing roughing roughness roughs rouging
roulette roundabouts rounder roundest roundness rouse roused rouses
rousing router rowboat rowboats rowdier rowdies rowdiest rowdiness
rowdy rowed rowing royally royals royalty rubbed rubbers rubbing
rubbished rubbishes rubbishing rubble rubbled rubbles rubbling rubied
rubier rubies rubiest rubric rubs ruby rubying rucksack ruckus
ruckuses rudder rudders ruddied ruddier ruddies ruddiest ruddy
ruddying rudely rudeness ruder rudest rudimentary rue rued rueful rues
ruff ruffed ruffian ruffianed ruffianing ruffians ruffing ruffle
ruffled ruffles ruffling ruffs rug rugby rugged ruggeder ruggedest
rugging rugs ruing ruinous rulered rulering rulings rum rumble rumbled
rumbles rumbling ruminate ruminated ruminates ruminating rummage
rummaged rummages rummaging rummer rummest rummy rump rumped rumping
rumple rumpled rumples rumpling rumps rums runaway runaways rundown
rundowns rune runes rungs runner runners runnier runniest runny runt
runts runway runways rupture ruptured ruptures rupturing ruse ruses
rust rusted rustic rustics rustier rustiest rusting rustle rustled
rustler rustlers rustles rustling rusts rut ruthless ruthlessly
ruthlessness ruts rutted rutting rye sabbatical sabotaged sabotages
sabotaging saboteur saboteurs sac sacrament sacramented sacramenting
sacraments sacrificial sacrilege sacrileges sacrilegious sacs sadder
saddest saddle saddled saddles saddling sades sadism sadist sadistic
sadists sadness safari safaried safariing safaris safeguarded
safeguarding safekeeping safekeepings safes safetied safeties
safetying saffron saffrons sag sagas sage sagebrush sager sages sagest
sagged sagger sagging sags sailboat sailboats sailor sailors saintlier
saintliest saintly saints salad salads salami salamis salaried
salarying salesmen salespeople salesperson saleswoman saleswomen
salient salients saliva salivate salivated salivates salivating sallow
sallower sallowest sally salmon salmons salon salons saloon saloons
salted salter saltest saltier salties saltiest salting salts salty
salutation salutations salute saluted salutes saluting salvage
salvaged salvages salvaging salve salved salves salving sameness sames
sampler sanatorium sanatoriums sanctified sanctifies sanctify
sanctifying sanctimonious sanction sanctioned sanctioning sanctions
sanctity sanctuaries sanctuary sandal sandals sandbag sandbagged
sandbagging sandbags sanded sandier sandiest sanding sandman sandmen
sandpaper sandpapered sandpapering sandpapers sands sandstone
sandstorm sandstorms sandwiched sandwiching sandy saned saner sanes
sanest sangs saning sanitaries sanitarium sanitariums sanitary
sanitation sanserif sap sapling sapped sapphire sapphires sapping saps
sarcasms sarcastically sardine sardined sardines sardining sari saris
sash sashes sassier sassiest sassy satanic satchel satchels satellited
satelliting satin satined satining satins satires satirical satirist
satirists satisfactions saturate saturated saturates saturating
saturation sauced saucepan saucepans saucer saucers sauces saucier
sauciest saucing saucy sauerkraut sauna saunaed saunaing saunas
saunter sauntered sauntering saunters sausage sausages saute sauteed
sauteing sautes savage savaged savagely savager savageries savagery
savages savagest savaging saver savvied savvier savvies savviest savvy
savvying sawdust sawdusted sawdusting sawdusts sawed sawing saws
saxophone saxophones sayings scab scabbed scabbing scabs scaffold
scaffolding scaffolds scalar scalars scald scalded scalding scalds
scalier scaliest scallop scalloped scalloping scallops scalp scalped
scalpel scalpels scalping scalps scaly scamper scampered scampering
scampers scandalous scandals scanners scant scanted scanter scantest
scantier scanties scantiest scanting scants scanty scapegoat
scapegoated scapegoating scapegoats scar scarcer scarcest scarcity
scarecrow scarecrows scarfed scarfing scarfs scarier scariest
scarleted scarleting scarlets scarred scarring scars scarves scary
scathing scatterbrain scatterbrained scatterbrains scavenger
scavengers scened scenic scening scent scented scenting scents schemed
schemer schemers scheming schizophrenia schizophrenic scholarly
scholarship scholarships scholastic schoolboy schoolboys schoolchild
schoolchildren schooled schooling schoolteacher schoolteachers
schooner schooners scissor scissors scoff scoffed scoffing scoffs
scold scolded scolding scolds scoop scooped scooping scoops scoot
scooted scooter scooters scooting scoots scoped scopes scoping scorch
scorched scorches scorching scorer scorn scorned scornful scorning
scorns scorpion scorpions scotchs scoundrel scoundrels scour scoured
scourge scourged scourges scourging scouring scours scout scouted
scouting scouts scowl scowled scowling scowls scrabble scram scramble
scrambled scrambles scrambling scrammed scramming scrams scrapbook
scrapbooks scrape scraped scrapes scraping scratchier scratchiest
scratchy scrawl scrawled scrawling scrawls scrawnier scrawniest
scrawny screech screeched screeches screeching screened screening
screwdriver screwdrivers screwier screwiest screwy scribble scribbled
scribbles scribbling scribe scribes scripted scripting scripture
scriptures scriptwriter scriptwriters scrounge scrounged scrounges
scrounging scrub scrubbed scrubbing scrubs scruff scruffier scruffiest
scruffs scruffy scruple scrupled scruples scrupling scrupulous
scrupulously scrutiny scuff scuffed scuffing scuffle scuffled scuffles
scuffling scuffs sculptor sculptors sculpture sculptured sculptures
sculpturing scummed scumming scums scurried scurries scurrilous scurry
scurrying scuttle scuttled scuttles scuttling scythe scythed scythes
scything seafaring seafood seam seaman seamed seamen seaming seams
seamstress seamstresses seaport seaports sear searchlight searchlights
seared searing sears seas seashell seashells seashore seashores
seasick seasickness seaside seasides seasonable seasonal seasoned
seasoning seasonings seasons seated seating seaweed secede seceded
secedes seceding secession seclude secluded secludes secluding
seclusion secondaries secondarily secrecy secretarial secrete secreted
secreter secretes secretest secreting secretion secretions secretive
sectioned sectioning sectors secured securely securer secures securest
securing securities sedan sedans sedate sedated sedater sedates
sedatest sedating sedative sedatives sedentary sediment sedimentary
sediments seduce seduced seduces seducing seduction seductions
seductive seeded seedier seediest seeding seedling seeds seedy seep
seepage seeped seeping seeps seer seesaw seesawed seesawing seesaws
seethe seethed seethes seething segmentation segmented segmenting
segregate segregated segregates segregating segregation seize seized
seizes seizing seizure seizures selections selector selectors
selfishness seller sellers selves semantically semblance semblances
semen semester semesters semicircle semicircles semicolon semicolons
semiconductor semiconductors semifinal semifinals seminaries seminary
senate senates senator senators senile senility seniority seniors
sensational sensationalism sensations sensed senseless sensibilities
sensibility sensibler sensibles sensiblest sensing sensitives
sensitivities sensor sensors sensory sensual sensuality sensuous
sentience sentimentality sentries sentry separations sequels sequenced
sequencer sequencing sequentially sequin sequining sequins serenade
serenaded serenades serenading serene serened serener serenes serenest
serening serenity sergeant sergeants serials sermoned sermoning
sermons serpent serpented serpenting serpents serum serums servanted
servanting serviceable serviced serviceman servicemen servicing
serviette serviettes servile serviles servitude setback setbacks
settable setter setters settlement settlements settler settlers sevens
seventeen seventeens seventeenth seventeenths sevenths seventies
seventy sever severance severances severed severer severest severing
severs sew sewage sewed sewer sewers sewing sewn sews sexed sexing
sexism shabbier shabbiest shabbily shabby shack shackle shackled
shackles shackling shacks shaded shadier shadiest shading shadowed
shadowier shadowiest shadowing shadows shadowy shady shaft shafted
shafting shafts shaggier shaggiest shaggy shakier shakiest shallower
shallowest shallows sham shamble shambles shamed shameful shamefully
shameless shames shaming shammed shamming shampoo shampooed shampooing
shampoos shamrock shamrocks shams shanties shanty shapelier shapeliest
shapely shark sharked sharking sharks sharped sharpen sharpened
sharpener sharpeners sharpening sharpens sharper sharpest sharping
sharpness sharps shatter shattered shattering shatters shave shaved
shaver shavers shaves shaving shawl shawled shawling shawls sheaf
shear sheared shearing shears sheath sheathe sheathed sheathes
sheathing sheaths sheave sheaves sheen sheepish sheepishly sheered
sheerer sheerest sheering sheers sheik sheiks shelled sheller
shellfish shellfishes shelling sheltered sheltering shelters shelved
shelving shepherd shepherded shepherding shepherds sherbet sherbets
sheriff sheriffs sherries sherry shes shied shield shielded shielding
shields shies shiftier shiftiest shiftless shifty shimmer shimmered
shimmering shimmers shin shingle shingled shingles shingling shinier
shiniest shinned shinning shins shipment shipments shipshape shipwreck
shipwrecked shipwrecking shipwrecks shire shirk shirked shirking
shirks shirted shirting shirts shiver shivered shivering shivers shoal
shoaled shoaling shoals shod shoddier shoddiest shoddy shoeing
shoelace shoelaces shoestring shoestrings shoo shooed shooing shoos
shopkeeper shopkeepers shoplifter shoplifters shopper shoppers shore
shored shores shoring shortages shortcoming shortcomings shorted
shortenings shortfall shorting shortlist shortness shotgun shotgunned
shotgunning shotguns shouldered shouldering shouldest shoved shovel
shovels shoves shoving showcase showcased showcases showcasing
showdown showdowns showered showering showier showiest showings
showman showmen showy shrank shrapnel shred shredded shredding shreds
shrew shrewd shrewder shrewdest shrewdness shrewed shrewing shrews
shriek shrieked shrieking shrieks shrill shrilled shriller shrillest
shrilling shrills shrimp shrimped shrimping shrimps shrine shrines
shrink shrinkage shrinking shrinks shrivel shrivels shroud shrouded
shrouding shrouds shrub shrubbed shrubberies shrubbery shrubbing
shrubs shrug shrugged shrugging shrugs shrunk shrunken shuck shucked
shucking shucks shudder shuddered shuddering shudders shuffle shuffled
shuffles shuffling shun shunned shunning shuns shunt shunted shunting
shunts shutter shuttered shuttering shutters shuttle shuttled shuttles
shuttling shyer shyest shying shyness sibling siblings sicked
sickenings sicker sickest sicking sickle sickled sickles sicklier
sickliest sickling sickly sickness sicknesses sicks sics sideline
sidelined sidelines sidelining sidelong sideshow sideshows sidestep
sidestepped sidestepping sidesteps sidetrack sidetracked sidetracking
sidetracks sidewalk sidewalks sidings sidle sidled sidles sidling
siege sieges sierra siesta siestas sieve sieved sieves sieving sift
sifted sifting sifts sighed sighing sighs sightless signer signified
signifies signify signifying signpost signposted signposting signposts
silenced silences silencing silenter silentest silently silents
silhouette silhouetted silhouettes silhouetting silk silken silkened
silkening silkens silks sill sillies silliness sills silo silos silt
silted silting silts silvered silverier silveriest silvering silvers
silversmith silversmiths silverware silvery simile similes simmer
simmered simmering simmers simpled simples simplex simplification
simpling simulations simulator sincerer sincerest sincerity sinew
sinews sinewy singe singed singeing singes singled singling singly
singularity singulars sinned sinner sinners sinning sinus sinuses sip
sipped sipping sips sire sired siren sirens sires siring sirloin
sirloins sirred sirring sirs sissier sissies sissiest sissy sistered
sisterhood sisterhoods sistering sisterly sisters sited siting sitter
sitters sixes sixpence sixpences sixteens sixteenth sixteenths sixths
sixtieth sixtieths sizable sizer sizzle sizzled sizzles sizzling skate
skateboard skateboarded skateboarding skateboards skated skater
skaters skates skating skein skeined skeining skeins skeletons
sketched sketchier sketchiest sketching sketchy skew skewed skewer
skewered skewering skewers skewing skews ski skid skidded skidding
skids skied skies skiing skillet skillets skillful skim skimmed
skimming skimp skimped skimpier skimpiest skimping skimps skimpy skims
skinflint skinflints skinned skinnier skinniest skinning skinny skins
skipper skippered skippering skippers skirmish skirmished skirmishes
skirmishing skirted skirting skirts skis skit skited skiting skits
skittish skulk skulked skulking skulks skulls skunk skunked skunking
skunks skying skylight skylights skyline skylines skyrocket
skyrocketed skyrocketing skyrockets skyscraper skyscrapers slab
slabbed slabbing slabs slack slacked slacken slackened slackening
slackens slacker slackest slacking slacks slain slake slaked slakes
slaking slam slammed slamming slams slander slandered slandering
slanders slant slanted slanting slants slap slapped slapping slaps
slapstick slashed slashes slashing slat slate slated slates slating
slats slaughter slaughtered slaughtering slaughters slaved slavery
slaving slavish slay slaying slays sleazier sleaziest sleazy sled
sledded sledding sledgehammer sleds sleek sleeked sleeker sleekest
sleeking sleeks sleeper sleepers sleepier sleepiest sleepless sleepy
sleet sleeted sleeting sleets sleeve sleeveless sleeves sleigh
sleighed sleighing sleighs slender slenderer slenderest slew slewed
slewing slews slick slicked slicker slickest slicking slicks slided
slier sliest slighted slighting slights slime slimier slimiest slimmed
slimmer slimmest slimming slims slimy sling slinging slings slingshot
slingshots slink slinking slinks slipper slipperier slipperiest
slippers slipshod slit slither slithered slithering slithers slits
slitted slitter slitting sliver slivered slivering slivers slob
slobber slobbered slobbering slobbers slobs slog slogans slogged
slogging slogs slop sloped slopes sloping slopped sloppier sloppiest
slopping slops slosh sloshed sloshes sloshing sloth slothed slothful
slothing sloths slotted slotting slouch slouched slouches slouching
slovenlier slovenliest slovenly slowness sludge sludged sludges
sludging slug slugged slugging sluggish slugs sluice sluiced sluices
sluicing slum slumber slumbered slumbering slumbers slummed slummer
slumming slump slumped slumping slumps slums slung slunk slur slurred
slurring slurs slush slut sluts sly slyness smack smacked smacking
smacks smalled smalling smallpox smalls smarted smarter smartest
smarting smartly smarts smattering smatterings smear smeared smearing
smears smelled smellier smelliest smelling smelt smelted smelting
smelts smidgen smidgens smirk smirked smirking smirks smite smites
smithereens smiths smiting smitten smock smocked smocking smocks smog
smokestack smokestacks smokier smokies smokiest smoky smoldered
smolders smoothed smoother smoothest smoothing smoothness smooths
smote smother smothered smothering smothers smudge smudged smudges
smudging smugged smugger smuggest smugging smuggle smuggled smuggler
smugglers smuggles smuggling smugly smugs smut smuts snacked snacking
snacks snagged snagging snags snailed snailing snails snake snaked
snakes snaking snap snapped snappier snappiest snapping snappy snaps
snapshot snapshots snare snared snares snaring snarl snarled snarling
snarls snatch snatched snatches snatching sneaker sneakers sneakier
sneakiest sneer sneered sneering sneers sneeze sneezed sneezes
sneezing snicker snickered snickering snickers snide snider snides
snidest sniffed sniffing sniffle sniffled sniffles sniffling sniffs
snip snipe sniped sniper snipers snipes sniping snipped snippet
snippets snipping snips snitch snitched snitches snitching snob
snobbish snobs snooker snoop snooped snooping snoops snootier
snootiest snooty snooze snoozed snoozes snoozing snore snored snores
snoring snorkel snorkeled snorkeling snorkels snort snorted snorting
snorts snot snots snotted snotting snout snouted snouting snouts
snowball snowballed snowballing snowballs snowdrift snowdrifts snowed
snowfall snowfalls snowflake snowflakes snowier snowiest snowing
snowplow snowplowed snowplowing snowplows snows snowstorm snowstorms
snowy snub snubbed snubbing snubs snuff snuffed snuffer snuffing
snuffs snug snugged snugger snuggest snugging snuggle snuggled
snuggles snuggling snugly snugs soak soaked soaking soaks soaped
soapier soapiest soaping soaps soapy soar soared soaring soars sob
sobbed sobbing sobered soberer soberest sobering sobers sobriety sobs
soccer sociable sociables socialists socials sociological sociologist
sociologists sociology socked socking soda sodas sodded sodden sodding
sodium sodomy sods sofa sofas softball softballs soften softened
softening softens softer softest softly softness soggier soggiest
soggy soiled soiling soils sojourn sojourned sojourning sojourns
solace solaced solaces solacing solder soldered soldering solders
soldiered soldiering soled solemn solemner solemnest solemnity
solemnly solicit solicited soliciting solicitous solicits solidarity
solider solidest solidified solidifies solidify solidifying solidity
solidly solids soling solitaire solitaires solitaries solitary
solitude soloed soloing soloist soloists solos soluble solubles
solvent solvents somber somebodies someday someones somersault
somersaulted somersaulting somersaults somethings somewhats somewheres
sonata sonatas sonic sonics sonnet sonnets sonorous soot soothe
soothed soother soothes soothest soothing sootier sootiest sooty sop
sophistication sophistry sophomore sophomores sopped sopping soprano
sopranos sops sorcerer sorcerers sorceress sorceresses sorcery sored
sorely sorer sores sorest soring sororities sorority sorrier sorriest
sorrow sorrowed sorrowful sorrowing sorrows sorta souffle souffles
sounder soundest soundly soundproof soundproofed soundproofing
soundproofs souped souping soups sour sourced sourcing soured sourer
sourest souring sours southeast southeastern southerlies southerly
southerner southerners southerns southpaw southpaws southward
southwest southwestern souvenir souvenirs sovereign sovereigns
sovereignty sow sowed sowing sown sows spa spacecraft spacecrafts
spaceship spaceships spacial spacious spade spaded spades spading
spaghetti spangle spangled spangles spangling spaniel spanielled
spanielling spaniels spank spanked spanking spankings spanks spanned
spanner spanners spanning spans spar spared sparer sparest sparing
spark sparked sparking sparkle sparkled sparkler sparklers sparkles
sparkling sparks sparred sparrer sparring sparrow sparrows spars
sparse sparsely sparser sparsest spas spasm spasmed spasming spasmodic
spasms spat spate spats spatted spatter spattered spattering spatters
spatting spatula spatulas spawn spawned spawning spawns spay spayed
spaying spays spear speared spearhead spearheaded spearheading
spearheads spearing spearmint spears specialer specialists specials
specifics specifier specimens specious speck specked specking specks
spectacle spectacles spectacularly spectaculars spectator spectators
spectra speculated speculates speculating speculations speculative
speculator speculators speeched speeching speechless speedboat
speedboats speedier speediest speedometer speedometers speedy
spellbind spellbinding spellbinds spellbound speller spendthrift
spendthrifts sperm sperms spew spewed spewing spews spheres spherical
sphinx sphinxes spice spiced spices spicier spiciest spicing spicy
spider spiders spied spigots spiked spikes spiking spilling spills
spinach spinal spinals spindlier spindliest spindly spine spineless
spines spinning spins spinster spinsters spirals spire spires spirited
spiriting spiritually spirituals spited spiteful spitefuller
spitefullest spites spiting spittle splash splashed splashes splashing
splat splatter splattered splattering splatters spleen spleens
splendider splendidest splendidly splice spliced splices splicing
splint splinted splinter splintered splintering splinters splinting
splints splurge splurged splurges splurging spokes spokesmen
spokespeople spokesperson spokespersons spokeswoman spokeswomen sponge
sponged sponges spongier spongiest sponging spongy sponsorship
spontaneity spoofed spoofing spoofs spook spooked spookier spookiest
spooking spooks spooky spooled spooling spools spoon spooned spoonful
spoonfuls spooning spoons sporadic spore spores sporran sported
sporting sportsmanship spotless spotlight spotlighted spotlighting
spotlights spottier spottiest spotty spouse spouses spouted spouting
spouts sprain sprained spraining sprains sprangs sprawl sprawled
sprawling sprawls sprayed spraying sprays spreadsheet spreadsheets
spree spreed spreeing sprees sprier spriest sprig sprigs springboard
springboards springier springiest springtime springy sprinkle
sprinkled sprinkler sprinklers sprinkles sprinkling sprinklings sprint
sprinted sprinter sprinters sprinting sprints sprout sprouted
sprouting sprouts spruce spruced sprucer spruces sprucest sprucing
spry spud spuds spun spunk spunked spunking spunks spurn spurned
spurning spurns spurred spurring spurs spurt spurted spurting spurts
sputter sputtered sputtering sputters spying squabble squabbled
squabbles squabbling squadded squadding squadron squadrons squads
squalid squalider squalidest squall squalled squalling squalls squalor
squander squandered squandering squanders squarely squarer squarest
squat squats squatted squatter squattest squatting squawk squawked
squawking squawks squeak squeaked squeakier squeakiest squeaking
squeaks squeaky squeal squealed squealing squeals squeamish squelch
squelched squelches squelching squid squidded squidding squids squint
squinted squinter squintest squinting squints squire squired squires
squiring squirm squirmed squirming squirms squirrel squirrels squirt
squirted squirting squirts stab stabbed stabbing stabled stabler
stables stablest stabling stabs stacked stacking stadium stadiums
staffed staffing staffs stag stagecoach stagecoaches staged staging
stagnant stagnate stagnated stagnates stagnating stagnation stags
staid staider staidest stain stained staining stains staircases
stairway stairways staked stakes staking staled stalemate stalemated
stalemates stalemating staler stales stalest staling stalk stalked
stalking stalks stalled stalling stallion stallions stalls stalwart
stalwarts stamina stammer stammered stammering stammers stampede
stampeded stampedes stampeding stances stanch stanched stancher
stanches stanchest stanching standby standbys standings standoff
standoffs standpoints standstill standstills stank stanks stanza
stanzas staple stapled stapler staplers staples stapling starboard
starch starched starches starchier starchiest starching starchy
stardom starfish starfishes starked starker starkest starking starks
starlight starrier starriest starry startlingly starvation statelier
stateliest stately stater statesman statesmanship statesmen stationed
stationery stationing statistically statistician statisticians statue
statues stature statures statuses statute statutes statutory staunch
staunched stauncher staunches staunchest staunching staunchly stave
staved staving steadfast steadied steadier steadies steadiest
steadying steak steaks stealth stealthier stealthiest stealthily
stealthy steamed steamier steamies steamiest steaming steamroller
steamrollered steamrollering steamrollers steams steamy steeled
steeling steels steeped steeper steepest steeping steeple steeples
steeps stellar stemmed stemming stench stenched stenches stenching
stencil stencils stenographer stenographers stenography stepladder
stepladders stereos stereotyped stereotyping stern sterned sterner
sternest sterning sternly sternness sterns stethoscope stethoscopes
stew steward stewarded stewardess stewardesses stewarding stewards
stewed stewing stews sticker stickers stickied stickier stickies
stickiest stickler sticklers stickying stiffed stiffen stiffened
stiffening stiffens stiffer stiffest stiffing stiffly stiffness stiffs
stifle stifled stifles stifling stigma stigmas stigmata stillborn
stillborns stilled stiller stillest stilling stillness stills stilted
stimulant stimulants stimuli stimulus sting stinger stingers stingier
stingiest stinginess stinging stings stingy stink stinking stinks
stint stinted stinting stints stipulate stipulated stipulates
stipulating stipulation stipulations stirrup stirrups stitch stitched
stitches stitching stockade stockaded stockades stockading stockbroker
stockbrokers stocked stockholder stockholders stockier stockiest
stocking stockings stockpile stockpiled stockpiles stockpiling stocky
stockyard stockyards stodgier stodgiest stodgy stoical stoke stoked
stokes stoking stoles stolid stolider stolidest stolidly stomached
stomaching stomachs stomp stomped stomping stomps stoned stonier
stoniest stoning stony stool stools stoop stooped stooping stoops
stopgap stopgaps stopover stopovers stoppage stoppages stopper
stoppered stoppering stoppers stopwatch stopwatches storehouse
storehouses storekeeper storekeepers storeroom storerooms stork storks
stormed stormier stormiest storming stormy stout stouter stoutest
stove stoves stow stowaway stowaways stowed stowing stows straddle
straddled straddles straddling straggle straggled straggler stragglers
straggles straggling straighted straighten straightened straightening
straightens straighter straightest straightforwardly straightforwards
straighting straights strained strainer strainers straining strait
straited straiting straitjacket straitjacketed straitjacketing
straitjackets straits strand stranded stranding strands strangeness
strangered strangering strangers strangle strangled strangles
strangling strangulation strap strapped strapping straps strata
stratagem stratagems strategics stratified stratifies stratify
stratifying stratosphere stratospheres stratum strawberries strawberry
strawed strawing straws strayed straying strays streak streaked
streaking streaks streamed streamer streamers streaming streamline
streamlined streamlines streamlining streetcar streetcars strengthened
strengthening strengthens strengths strenuous strenuously stressful
stretcher stretchers strew strewed strewing strewn strews stricken
stricter strictest strictness stridden stride strides striding strife
striker strikers strikings stringier stringiest stringing stringy
stripe striped stripes striping stripper striven strives striving
strode stroked strokes stroking stroll strolled stroller strollers
strolling strolls stronghold strongholds strove structuralist strum
strummed strumming strums strung strut struts strutted strutting stub
stubbed stubbier stubbies stubbiest stubbing stubble stubborn
stubborned stubborner stubbornest stubborning stubborns stubby stubs
stud studded studding studentship studios studious studs stuffier
stuffiest stuffy stump stumped stumping stumps stung stunk stunted
stunting stunts stupefied stupefies stupefy stupefying stupendous
stupider stupidest stupidities stupidly stupids stupor stupors
sturdier sturdiest sturdy stutter stuttered stuttering stutters styled
styling stylish stylistic stylus suave suaver suavest sub subbed
subbing subcommittee subcommittees subconscious subconsciously
subdivide subdivided subdivides subdividing subdivision subdivisions
subdue subdued subdues subduing subgroup subjectives subjugate
subjugated subjugates subjugating subjunctive sublet sublets
subletting sublime sublimed sublimer sublimes sublimest subliming
submarine submarines submerge submerged submerges submerging
submersion submissions submissive subnormal subordinate subordinated
subordinates subordinating subprogram subs subscribed subscriber
subscribers subscribes subscribing subscript subscriptions subscripts
subsection subsections subsequents subservient subservients subsets
subside subsided subsides subsidiaries subsidies subsiding subsidy
subsist subsisted subsistence subsisting subsists substandard
substantiate substantiated substantiates substantiating substitutions
subsystem subterfuge subterfuges subterranean subtler subtlest
subtract subtracted subtracting subtraction subtractions subtracts
suburb suburban suburbans suburbs subversive subversives subvert
subverted subverting subverts successes successions successively
successors succinct succincter succinctest succinctly succulent
succulents succumb succumbed succumbing succumbs suck sucked sucker
suckered suckering suckers sucking suckle suckled suckles suckling
sucks suction suctioned suctioning suctions suds suede sufferings
sufficed suffices sufficing suffixed suffixes suffixing suffocate
suffocated suffocates suffocating suffocation suffrage sugared
sugarier sugariest sugaring sugars sugary suggester suggestive
suicides suitcase suitcases suites suitor suitors sulk sulked sulkier
sulkies sulkiest sulking sulks sulky sullen sullener sullenest sultan
sultans sultrier sultriest sultry summarily summered summering summers
summit summits summon summoned summoning summons summonsed summonses
summonsing sumptuous sunbathe sunbathed sunbathes sunbathing sunburn
sunburned sunburning sunburns sundae sundaes sundial sundials sundown
sundowns sundries sunflower sunflowers sunglasses sunken sunks sunlit
sunned sunnier sunnies sunniest sunning sunrises sunrising suns
sunscreen sunscreens sunset sunsets sunsetting suntan suntanned
suntanning suntans sunup sup superber superbest superbly supercomputer
supercomputers supered superficials superhuman superimpose
superimposed superimposes superimposing supering superintendent
superintendents superiors superlative superlatives supermarkets
supernaturals supers superscript superscripts supersede superseded
supersedes superseding supersonic supersonics superstar superstars
superstition superstitions superstitious superstructure
superstructures supervisory supper suppers supplant supplanted
supplanting supplants supple supplemented supplementing supplements
suppler supplest supportive supposition suppositions supremacy
supremely supremer supremest surcharge surcharged surcharges
surcharging surer surest surf surfaced surfacing surfboard surfboarded
surfboarding surfboards surfed surfing surfs surge surged surgeon
surgeons surgeries surges surgical surging surlier surliest surly
surmise surmised surmises surmising surmount surmounted surmounting
surmounts surnames surpass surpassed surpasses surpassing surpluses
surplussed surplussing surreal surrender surrendered surrendering
surrenders surreptitious surveillance surveyed surveying surveyor
surveyors survivals survivor survivors suspender suspenders suspense
suspensions suspicions sustainable sustenance swab swabbed swabbing
swabs swagger swaggered swaggerer swaggering swaggers swampier
swampiest swampy swan swans swarm swarmed swarming swarms swarthier
swarthiest swarthy swat swathe swathed swathes swathing swats swatted
swatting sway swayed swaying sways sweater sweaters sweaty sweeper
sweepers sweepings sweepstakes sweeten sweetened sweetening sweetens
sweeter sweetest sweetheart sweethearts sweetly sweetness sweets swell
swelled sweller swellest swelling swellings swells swerve swerved
swerves swerving swift swifted swifter swiftest swifting swiftly
swifts swig swigged swigging swigs swill swilled swilling swills
swindle swindled swindler swindlers swindles swindling swine swines
swinging swings swipe swiped swipes swiping swirl swirled swirling
swirls swish swished swisher swishes swishest swishing switchable
switchboard switchboards switcher swivel swivels swollen swoon swooned
swooning swoons swoop swooped swooping swoops sworded swordfish
swordfishes swording swords swung syllable syllables syllabus
syllabuses symbolics symbolism symmetrical sympathetically
sympathetics symphonic symptomatic synagogue synagogues synapse
synapses synchronous syndicated syndicates syndicating syndromes
synopses synopsis syntheses synthetic synthetics syphilis syphilises
syringe syringed syringes syringing syrup systematically systematics
tabbed tabbies tabbing tabby tabernacle tabernacles tablecloth
tablecloths tabled tablespoon tablespoonful tablespoonfuls tablespoons
tablet tablets tabling tabloid tabloids taboo tabooed tabooing taboos
tabulate tabulated tabulates tabulating tabulation tacit tacitly
taciturn tackier tackies tackiest tacky taco tacos tact tactful
tactfully tactlessly tadpole tadpoles tagged tagging tags tailed
tailgate tailgated tailgates tailgating tailing taillight taillights
tailspin tailspins taint tainted tainting taints takeoff takeoffs
takeover talc talisman talismans talkative talker talkers taller
tallest tallied tallies tallow tally tallying talon talons tambourine
tambourines tamed tamely tameness tamer tames tamest taming tamper
tampered tampering tampers tan tandem tandems tang tangential tangents
tangerine tangerines tangible tangibles tangle tangled tangles
tangling tango tangoed tangoing tangos tangs tankard tankards tanked
tanker tankers tanking tanned tanner tannest tanning tans tantamount
tantrum tantrums taped taper tapered tapering tapers tapestries
tapestry taping tapped tapping taps tar tarantula tarantulas tardier
tardies tardiest tardiness tardy targeted targeting tariff tariffs
tarnish tarnished tarnishes tarnishing tarpaulin tarpaulins tarred
tarried tarrier tarries tarriest tarring tarry tarrying tars tart
tartan tartans tartar tartars tarted tarter tartest tarting tarts
tasked tasking tassel tasteful tastefully tastier tastiest tasty
tattle tattled tattles tattling tattoo tattooed tattooing tattoos
tatty taunt taunted taunting taunts taut tauted tauter tautest tauting
tautology tauts tavern taverns tawdrier tawdriest tawdry tawnier
tawniest tawny taxable taxed taxicab taxicabs taxied taxiing taxing
taxis teachings teacup teacups teaed teaing teak teaks teamed teaming
teammate teammates teamster teamsters teamwork teapots teardrop
teardrops tearful teas tease teased teases teasing teaspoon teaspoons
teat teats technicalities technicality technicals technician
technicians technologically technologies tediously tedium tee teed
teeing teem teemed teeming teems teen teens tees teeter teetered
teetering teeters teethe teethed teethes teething teetotal
telecommunications telegram telegrams telegraph telegraphed
telegraphing telegraphs telepathic telepathy telephoned telephoning
telescoped telescopes telescoping teletype televise televised
televises televising televisions teller tellered tellering tellers
telltale telltales temperament temperamental temperaments temperance
temperate temperated temperates temperating tempered tempering tempers
tempest tempests tempestuous template temples tempo temporal
temporaries tempos temptations tenable tenacious tenacity tenancies
tenancy tenant tenanted tenanting tenants tendered tenderer tenderest
tendering tenderly tenderness tenders tendon tendons tendril tendrils
tenement tenements tenet tenets tenor tenors tensed tenser tenses
tensest tensing tensions tensors tent tentacle tentacles tentatives
tented tenths tenting tents tenuous tenure tenured tenures tenuring
tepee tepees tepid terminators termini terminologies terminus termite
termites termly terrace terraced terraces terracing terrain terrains
terrestrial terrestrials terrier terriers terrific territorial
territorials territories terrors tersely terseness terser tersest
testable testament testaments tester testers testes testicle testicles
testified testifies testify testifying testimonial testimonials
testimonies testimony testis tetanus tether tethered tethering tethers
textile textiles textually texture textures thankfuller thankfullest
thankless thatch thatched thatcher thatches thatching thaw thawed
thawing thaws theatrical thefts theist theists thence theologian
theologians theologies theoretic theorist theorists therapeutic
therapies therapist therapists thereon thereupon thermal thermals
thermodynamics thermometer thermometers thermostat thermostats
thesauri thesaurus thesauruses theta thicken thickened thickening
thickens thicker thickest thicket thickets thickly thicknesses thigh
thighs thimble thimbled thimbles thimbling thinker thinkers thinly
thinned thinner thinnest thinning thins thirded thirding thirds
thirsted thirstier thirstiest thirsting thirsts thirsty thirteen
thirteens thirteenth thirteenths thirties thirtieth thirtieths thistle
thistles thong thongs thorn thornier thorniest thorns thorny
thoroughbred thoroughbreds thorougher thoroughest thoughtful
thoughtfully thoughtfulness thoughtless thoughtlessly thousandth
thousandths thrash thrashed thrashes thrashing threadbare threaded
threading threads threes thresh threshed thresher threshers threshes
threshing thresholds thrice thrift thriftier thriftiest thrifts
thrifty thrill thrilled thriller thrillers thrilling thrills thrive
thrived thrives thriving throb throbbed throbbing throbs throne
thrones throng thronged thronging throngs throttle throttled throttles
throttling throwaway throwback throwbacks thud thudded thudding thuds
thug thugs thumbed thumbing thumbs thumbtack thumbtacks thump thumped
thumping thumps thunder thunderbolt thunderbolts thundered thundering
thunderous thunders thunderstorm thunderstorms thunderstruck thwart
thwarted thwarting thwarts thyme thyroid thyroids tiara tiaras ticked
ticketed ticketing ticking tickle tickled tickles tickling ticklish
ticks tidal tidbit tidbits tide tided tides tidier tidiest tiding tier
tiers tiff tiffed tiffing tiffs tigers tighten tightened tightening
tightens tighter tightest tightness tightrope tightropes tights
tightwad tightwads tilde tiled tiling tilled tilling tills tilt tilted
tilting tilts timber timbers timekeeper timekeepers timeless timelier
timeliest timely timers timescales timetabled timetables timetabling
timezone timid timider timidest timidity timidly timings tinder ting
tinge tinged tingeing tinges tinging tingle tingled tingles tingling
tings tinier tiniest tinker tinkered tinkering tinkers tinkle tinkled
tinkles tinkling tinned tinnier tinnies tinniest tinning tinny tinsel
tinsels tint tinted tinting tints tipped tipping tipsier tipsiest
tipsy tiptoe tiptoed tiptoeing tiptoes tirade tirades tireder tiredest
tireless tissue tissues tit titillate titillated titillates
titillating titled titling tits titted titter tittered tittering
titters titting toads toadstool toadstools toasted toaster toasters
toasting toasts tobaccos toboggan tobogganed tobogganing toboggans
toddle toddled toddler toddlers toddles toddling toed toeing toenail
toenails toffee toffees toga togas toil toiled toileted toileting
toiling toils tolerable tolerably tolerances tolled tolling tolls
tomahawk tomahawked tomahawking tomahawks tomb tombed tombing tomboy
tomboys tombs tombstone tombstones tomcat tomcats tomes tomorrows
tonal toned tong tongs tongued tongues tonguing tonic tonics toning
tonnage tonnages tonne tonnes tonsil tonsillitis tonsils tooled
tooling toolkit toot tooted toothache toothaches toothbrush
toothbrushes toothpaste toothpastes toothpick toothpicks tooting toots
topaz topazes topographies topography topology topped topping topple
toppled topples toppling torch torched torches torching torment
tormented tormenting tormentor tormentors torments tornado tornadoes
torpedo torpedoed torpedoes torpedoing torque torrent torrential
torrents torrid torrider torridest torso torsos tortilla tortillas
tortoise tortoises tortuous tortured tortures torturing tossed tosses
tossing tot totalitarian totalitarianism totalitarians totalities
totality totals tote toted totem totems totes toting tots totted
totter tottered tottering totters totting toucan toucans touchdown
touchdowns touchier touchiest touchings touchy toughed toughen
toughened toughening toughens tougher toughest toughing toughness
toughs toupee toupees toured touring tournament tournaments tourniquet
tourniquets tours tousle tousled tousles tousling tout touted touting
touts tow towed towel towels towered towering towing townspeople tows
toxic toxin toxins toyed toying tract traction tractor tractors tracts
trademark trademarked trademarking trademarks trader traders
traditionalist trafficked trafficking traffics tragedies tragically
tragics trailer trailered trailering trailers trainee trainees trainer
trainers trait traitor traitorous traitors traits tramp tramped
tramping trample trampled tramples trampling trampoline trampolined
trampolines trampolining tramps trance trances tranquil tranquiler
tranquilest transact transacted transacting transacts transatlantic
transcend transcended transcending transcends transcontinental
transcribe transcribed transcribes transcribing transcription
transcriptions transcripts transferable transformations transformer
transformers transfusion transfusions transgress transgressed
transgresses transgressing transgression transgressions transients
transistor transistors transited transiting transitional transitioned
transitioning transitions transitive transitives transitory transits
translators transliteration translucent transparencies transparency
transparently transpire transpired transpires transpiring transplant
transplanted transplanting transplants transportable transportation
transpose transposed transposes transposing transverse transversed
transverses transversing trapdoor trapeze trapezed trapezes trapezing
trapezoid trapezoids trapper trappers trappings trashed trashes
trashier trashiest trashing trashy trauma traumas traumatic traverse
traversed traverses traversing travestied travesties travesty
travestying trawl trawled trawler trawlers trawling trawls trays
treacheries treacherous treachery treacle treading treadmill
treadmills treads treason treasured treasurer treasurers treasures
treasuries treasuring treasury treaties treatise treatises treatments
treble trebled trebles trebling treed treeing trekked trekking treks
trellis trellised trellises trellising tremble trembled trembles
trembling tremor tremors trench trenched trenches trenching trended
trendier trendies trendiest trending trepidation trespass trespassed
trespasser trespassers trespasses trespassing trestle trestles
trialled trialling triangular tribal tribulation tribulations tribunal
tribunals tributaries tributary tribute tributes tricked trickery
trickier trickiest tricking trickle trickled trickles trickling
trickster tricksters tricycle tricycled tricycles tricycling trifled
trifles trifling trigonometry trill trilled trilling trillion
trillions trills trilogies trim trimester trimesters trimmed trimmer
trimmest trimming trims trinket trinkets trio trios tripe tripled
triples triplet triplets triplicate triplicated triplicates
triplicating tripling tripod tripods tripped tripping trite triter
trites tritest triumphant triumphed triumphing triumphs triviality
trod trodden trodes troll trolled trolleys trolling trolls trombone
trombones trooped trooper troopers trooping trophied trophies trophy
trophying tropical tropicals trot trots trotted trotting troubled
troublemaker troublemakers troublesome troubling trough troughs
trounce trounced trounces trouncing troupe trouped troupes trouping
trout trouts trowel trowels truancy truant truanted truanting truants
truce truces trucked trucking trudge trudged trudges trudging trued
truer trues truest truffle truffles truing truism truisms trump
trumped trumpeted trumpeting trumpets trumping trumps truncation
trunked trunking trustee trustees trustful trustier trusties trustiest
trustworthier trustworthiest trustworthy truthful truthfully
truthfulness tryings tryout tryouts tub tuba tubae tubas tubed
tuberculosis tubing tubs tubular tuck tucked tucking tucks tuft tufted
tufting tufts tug tugged tugging tugs tuition tulip tulips tumble
tumbled tumbler tumblers tumbles tumbling tummies tummy tumult
tumulted tumulting tumults tumultuous tuna tunas tundra tundras
tuneful tuner tuners tunic tunics turban turbans turbine turbines
turbulence turbulent tureen tureens turf turfed turfing turfs turgid
turkey turkeys turmoil turmoiled turmoiling turmoils turnaround turner
turnip turniped turniping turnips turnout turnouts turnover turnovers
turnpike turnpikes turnstile turnstiles turntables turpentine
turquoise turquoises turret turrets turtle turtleneck turtlenecks
turtles tusk tusks tussle tussled tussles tussling tutored tutorials
tutoring tutors tuxedo tuxedos twang twanged twanging twangs tweak
tweaked tweaking tweaks twee tweed tweet tweeted tweeting tweets
tweezers twelfth twelfths twelves twenties twentieths twiddle twiddled
twiddles twiddling twig twigged twigging twigs twilight twine twined
twines twinge twinged twinges twinging twining twinkle twinkled
twinkles twinkling twinned twinning twirl twirled twirling twirls
twister twisters twitch twitched twitches twitching twitter twittered
twittering twitters twos tycoon tycoons typeface typescript typesetter
typewriters typhoid typhoon typhoons typhus typified typifies typify
typifying typist typists typographic typographical tyrannical
tyrannies tyranny tyrant tyrants ubiquitous udder udders uglied uglier
uglies ugliest ugliness uglying ulcer ulcered ulcering ulcers ulterior
ultimated ultimates ultimating ultimatum ultimatums ultra ultrasonic
ultrasonics ultraviolet umbrellaed umbrellaing umbrellas umpire
umpired umpires umpiring umpteen unacceptably unaccepted unaccountable
unaccountably unadulterated unaltered unambiguously unanimity
unanimous unanimously unanswerable unanswered unarmed unassigned
unassuming unattached unattainable unattractive unawares unbearably
unbeatable unbecoming unbeliever unbelievers unblock unblocked
unblocking unblocks unborn unbreakable unbroken unburden unburdened
unburdening unburdens uncannier uncanniest uncanny unceasing
uncertainties unchallenged uncharitable unchristian unclean uncleaner
uncleanest uncled uncles uncling uncomfortably uncommoner uncommonest
uncompromising unconcerned unconditional unconditionally unconfirmed
unconsciously unconstitutional uncontrollable uncontrolled
uncontroversial unconventional unconvinced uncountable uncouth uncover
uncovered uncovering uncovers uncultured uncut undamaged undaunted
undecidable undecided undecideds undemocratic undeniable undeniably
underbrush underbrushed underbrushes underbrushing undercover
undercurrent undercurrents undercut undercuts undercutting underdog
underdogs underestimated underestimates underestimating underflow
underfoot undergarment undergarments undergrowth underhanded underlays
undermine undermined undermines undermining underneaths undernourished
underpants underpass underpasses underprivileged underrate underrated
underrates underrating underscore underscored underscores underscoring
undershirt undershirts underside undersides understandably
understandings understate understated understatement understatements
understates understating understudied understudies understudy
understudying undertaker undertakers undertakings undertone undertones
undertow undertows underwater underwear underweight underworld
underworlds underwrite underwrites underwriting underwritten
underwrote undeserved undesirables undetected undeveloped undisturbed
undoings undoubted undress undressed undresses undressing undue
undying unearth unearthed unearthing unearthly unearths uneasier
uneasiest uneasily uneasiness uneconomic uneconomical uneducated
unemployable unenlightened unequal unequals unequivocal unerring
unethical uneven unevener unevenest unevenly uneventful unfailing
unfairer unfairest unfairly unfaithful unfasten unfastened unfastening
unfastens unfeasible unfeeling unfilled unfit unfits unfitted
unfitting unfold unfolded unfolding unfolds unforeseen unforgettable
unforgivable unfortunates unfriendlier unfriendliest unfunny unfurl
unfurled unfurling unfurls ungainlier ungainliest ungainly ungodlier
ungodliest ungodly ungrammatical ungrateful unhappier unhappiest
unhappily unhappiness unhealthier unhealthiest unheard unhook unhooked
unhooking unhooks unicorn unicorns unicycle unidentified unification
uniformed uniformer uniformest uniforming uniformity uniforms
unilateral unilaterally unimaginative unimpressed uninformative
uninformed uninhibited uninitiated uninspired uninspiring
unintelligent unintelligible unintended unintentional unintentionally
uninterested uniqueness uniquer uniquest unison unities universals
universes unjust unjustifiable unjustly unkempt unkind unkinder
unkindest unkindlier unkindliest unkindly unknowns unlawful unleash
unleashed unleashes unleashing unlikelier unlikeliest unlikes unloaded
unloading unloads unluckier unluckiest unman unmanned unmanning unmans
unmarked unmarried unmask unmasked unmasking unmasks unmistakable
unmistakably unmitigated unmodified unmoved unnamed unnerve unnerved
unnerves unnerving unnoticed unoccupied unoriginal unorthodox unpack
unpacked unpacking unpacks unpaid unpick unpleasantly unpleasantness
unpopularity unprecedented unprepared unprincipled unprintable
unprivileged unprotected unproven unprovoked unpublished unqualified
unquestionable unquestionably unravel unravels unreal unreasonably
unrelenting unreliability unremarkable unrepeatable unrepresentative
unreservedly unresolved unrest unrested unresting unrestricted unrests
unruffled unrulier unruliest unruly unsafer unsafest unsaid unsanitary
unsatisfied unsay unsaying unsays unscathed unscheduled unscientific
unscrew unscrewed unscrewing unscrews unscrupulous unseasonable unseat
unseated unseating unseats unseemlier unseemliest unseemly unsettle
unsettled unsettles unsettling unsightlier unsightliest unsightly
unsigned unskilled unsolved unsophisticated unsounder unsoundest
unspeakable unstabler unstablest unstructured unstuck unsubstantiated
unsuccessfully unsuited unsung unsupportable untangle untangled
untangles untangling untenable unthinkable untidier untidiest untie
untied unties untiled untiles untiling untiring untold untouched
untrained untruer untruest untrustworthy untying unveil unveiled
unveiling unveils unwarranted unwary unwashed unwell unwieldier
unwieldiest unwieldy unwillingness unwind unwinding unwinds unwiser
unwisest unwittingly unworthy unwound unwrap unwrapped unwrapping
unwraps unwritten upbeat upbeats upbringings upend upended upending
upends upheaval upheavals upheld uphill uphills uphold upholding
upholds upholster upholstered upholsterer upholsterers upholstering
upholsters upholstery upkeep uplift uplifted uplifting uplifts upload
upped uppermost uppers upping uprights uprising uprisings uproar
uproars uproot uprooted uprooting uproots upshot upshots upstanding
upstart upstarted upstarting upstarts upstream upstreamed upstreaming
upstreams uptake uptight uptown upturn upturned upturning upturns
upwardly uranium urbane urbaner urbanest urchin urchins urinate
urinated urinates urinating urine urn urned urning urns usages
uselessly uselessness usher ushered ushering ushers usurp usurped
usurping usurps utensil utensils uteri uterus utilitarian
utilitarianism utmost utterance utterances uttered utterer utterest
uttering utters vacant vacate vacated vacates vacating vacationed
vacationing vaccinate vaccinated vaccinates vaccinating vaccination
vaccinations vaccine vaccines vacillate vacillated vacillates
vacillating vacuous vacuumed vacuuming vacuums vagabond vagabonded
vagabonding vagabonds vagaries vagary vagina vaginae vaginal vagrant
vagrants vagued vagueing vagueness vaguer vagues vaguest vainer
vainest valentine valentines valet valeted valeting valets valiant
validate validated validates validating validation validly valise
valises valleys valuables valueless valved valving vampire vampired
vampires vampiring vandal vandals vane vanes vanguard vanguards
vanilla vanillas vanities vanity vanned vanning vanquish vanquished
vanquishes vanquishing variously varnish varnished varnishes
varnishing varsities varsity vase vases vaster vastest vastness vasts
vats vatted vatting vault vaulted vaulting vaults veal vealed vealing
veals veer veered veering veers vegetarianism vegetarians vegetation
vehement vehemently veil veiled veiling veils veined veining veins
velocities velvet velveted velvetier velvetiest velveting velvets
velvety vendors veneer veneered veneering veneers venerable venerate
venerated venerates venerating veneration vengeance vengeful venison
venom venomous vent vented ventilate ventilated ventilates ventilating
ventilation ventilator ventilators venting ventricle ventricles
ventriloquist ventriloquists vents ventured ventures venturing
veracity veranda verandas verballed verballing verbals verbiage
verbosity verdicts verge verged verges verging verier veriest
veritable vermin vernacular vernaculars versatility versed versing
vertebra vertebrae vertebrate vertebrates verticals vertigo verve
vessels vest vested vestibule vestibules vestige vestiges vesting
vestment vestments vests veteran veterans veterinarian veterinarians
veterinaries veterinary veto vetoed vetoes vetoing vets vetted vetting
vex vexation vexations vexed vexes vexing viability viaduct viaducts
vial vials vibrant vibrate vibrated vibrates vibrating vibration
vibrations vicarious vicariously vicars viced vices vicing viciously
victor victories victorious victors videoed videoing videos videotape
videotaped videotapes videotaping vie vied vies viewers vigil
vigilance vigilant vigilante vigilantes vigils vigorous viler vilest
vilified vilifies vilify vilifying villa villager villagers villain
villainies villainous villains villainy villas vindicate vindicated
vindicates vindicating vindictive vine vined vinegar vines vineyard
vineyards vining vintages vinyls viola violas violated violates
violating violations violet violets violins viper vipers viral
virginity virgins virile virility virtuoso virtuosos virtuous
virtuously virulent visa visaed visaing visas vise vised vises
visibility visibly vising visionaries visionary visioned visioning
visions visitation visitations visor visors vista vistaed vistaing
vistas visuals vitality vitally vitals vitamin vitamins vitriolic
vivacious vivaciously vivacity vivid vivider vividest vividly
vivisection vocabularies vocalist vocalists vocals vocation vocational
vocations vociferous vociferously vodka vogue vogued vogueing vogues
voguing voiced voicing voided voiding voids volatile volcanic
volcanics volcano volcanoes volition volley volleyball volleyballs
volleyed volleying volleys volt voltages volts volumed voluming
voluminous voluntaries voluptuous vomited vomiting vomits voodoo
voodooed voodooing voodoos voracious vortex vortexes vouched voucher
vouchers vouches vouching vow vowed vowels vowing vows voyage voyaged
voyager voyagers voyages voyaging vulgar vulgarer vulgarest
vulgarities vulgarity vulnerabilities vulnerability vulture vultures
vying wad wadded wadding waddle waddled waddles waddling wads wafer
wafers waffled waffles waffling waft wafted wafting wafts wag waged
wager wagered wagering wagers wagged wagging waging wags waif waifed
waifing waifs wail wailed wailing wails waist waisted waisting
waistline waistlines waists waiter waiters waitress waitresses waive
waived waiver waivers waives waiving waken wakened wakening wakens
walker walkers walkout walkouts walled wallets walling wallop walloped
walloping wallops wallow wallowed wallowing wallows wallpaper
wallpapered wallpapering wallpapers walnut walnuts walrus walruses
waltz waltzed waltzes waltzing wan wand wanderer wanderers wands wane
waned wanes waning wanna wanner wannest wanton wantoned wantoner
wantoning wantons warble warbled warbles warbling warded warden
wardened wardening wardens warding wardrobe wardrobes wards warehoused
warehouses warehousing warfare warhead warheads warier wariest warlike
warmer warmest warmly warmth warpath warpaths warranted warrantied
warranties warranting warrants warrantying warred warren warrens
warring warrior warriors wart warts wases washable washables washcloth
washcloths washer washered washering washers washout washouts washroom
washrooms wasp wasps wastage wastebasket wastebaskets wastefully
wasteland wastelands watchdog watchdogs watchful watchman watchmen
watchword watchwords watered waterfall waterfalls waterfront
waterfronts waterier wateriest watering waterlogged watermark
watermarked watermarking watermarks watermelon watermelons waterproof
waterproofed waterproofing waterproofs watershed watersheds watertight
waterway waterways waterworks watery watt watter wattest watts
waveform wavelength wavelengths waver wavered wavering wavers wavier
waviest wavy wax waxed waxes waxier waxiest waxing waxy waylaid waylay
waylaying waylays wayside waysides wayward weaken weakened weakening
weakens weaker weakest weaklier weakliest weakling weakly wealthier
wealthiest wean weaned weaning weans weaponry wearied wearier wearies
weariest wearily weariness wearisome wearying weathered weathering
weathers weave weaved weaver weavers weaves weaving web webbed webbing
webs wedder weddings wedge wedged wedges wedging wedlock weed weeded
weedier weediest weeding weeds weedy weeing weekdays weekended
weekending weeklies weep weeping weeps weer wees weest weighed
weighing weighs weighted weightier weightiest weighting weights
weighty weirded weirder weirdest weirding weirdness weirdo weirdos
weirds weld welded welder welders welding welds welled welling
wellington wells welt welted welter weltered weltering welters welting
welts wept werewolf werewolves wested westerlies westerly westerns
westing wests westward wetter wettest whack whacked whacking whacks
whaled whaler whalers whaling wharf wharves whats wheat wheedle
wheedled wheedles wheedling wheelbarrow wheelbarrows wheelchair
wheelchairs wheeled wheeling wheeze wheezed wheezes wheezing whens
whereabouts wherein wheres wherewithal whet whets whetted whetting
whew whewed whewing whews whiff whiffed whiffing whiffs whiled whiles
whiling whimmed whimming whimper whimpered whimpering whimpers whims
whimsical whine whined whines whining whinnied whinnier whinnies
whinniest whinny whinnying whip whipped whipping whips whir whirl
whirled whirling whirlpool whirlpools whirls whirlwind whirlwinds
whirred whirring whirs whisk whisked whisker whiskered whiskers
whisking whisks whisper whispered whispering whispers whistled
whistling whiten whitened whitening whitens whiter whitest whitewash
whitewashed whitewashes whitewashing whittle whittled whittles
whittling whizzed whizzes whizzing whoa wholehearted wholes wholesale
wholesaled wholesaler wholesalers wholesales wholesaling wholesome
whooped whooping whopper whoppers whore whores whys wick wickeder
wickedest wickedly wickedness wicker wickers wicket wickets wicks
widen widened widening widens widow widowed widower widowers widowing
widows widths wield wielded wielding wields wig wigged wigging wiggle
wiggled wiggles wiggling wigs wigwam wigwams wildcat wildcats
wildcatted wildcatting wilded wilder wilderness wildernesses wildest
wildfire wildfires wilding wildlife wildness wilds wilier wiliest
willinger willingest willingness willow willows willpower wilt wilted
wilting wilts wily wince winced winces winch winched winches winching
wincing windfall windfalls windier windiest windmill windmilled
windmilling windmills windowpane windowpanes windpipe windpipes
windscreen windscreens windshield windshields windy wined winged
wingers winging wining wink winked winking winks winnings winsome
winsomer winsomest wintered wintering winters wintertime wintrier
wintriest wintry wiper wipers wirier wiriest wiry wisecrack
wisecracked wisecracking wisecracks wiselier wiseliest wisely wises
wishbone wishbones wishful wisp wispier wispiest wisps wispy wist
wistful wistfully witchcraft witched witches witching withdrawals
withe withed wither withered withering withers withes withheld
withhold withholding withholds withing withs withstand withstanding
withstands withstood witless wits witticism witticisms wittier
wittiest witting wizards wizened wobble wobbled wobbles wobblier
wobblies wobbliest wobbling wobbly woe woes wok woks wolfed wolfing
wolfs wolves womanhood womankind womb wombats wombs wonderland
wonderlands woo woodchuck woodchucks wooded woodener woodenest woodier
woodies woodiest wooding woodland woodlands woodpecker woodpeckers
woodsman woodsmen woodwind woodwinds woodwork woody wooed woof woofed
woofing woofs wooing wool woollier woollies woolliest woolly woos
wordier wordiest wordings wordy workbench workbenches workbook
workbooks workforce workman workmanship workmen workout workouts
workplace workshops worldlier worldliest worldly wormed wormhole
wormholes worming worrisome worsen worsened worsening worsens worships
worsted worsting worsts worthier worthies worthiest wost wot woulds
wounded wounder wounding wounds wove woven wovens wowed wowing wows
wrangle wrangled wrangler wranglers wrangles wrangling wrappings
wrathed wrathing wraths wreak wreaked wreaking wreaks wreath wreathe
wreathed wreathes wreathing wreaths wreckage wrench wrenched wrenches
wrenching wrens wrest wrested wresting wrestle wrestled wrestler
wrestlers wrestles wrestling wrests wretch wretcheder wretchedest
wretches wried wries wriggle wriggled wriggles wriggling wright wring
wringer wringers wringing wrings wrinkle wrinkled wrinkles wrinkling
wrists wristwatch wristwatches writ writable writhe writhed writhes
writhing writs wrongdoer wrongdoers wrongdoing wrongdoings wronged
wronger wrongest wronging wrought wrung wry wryer wryest wrying
xenophobia xylophone xylophones yacht yachted yachting yachts yak
yakked yakking yaks yam yams yank yanked yanking yanks yap yapped
yapping yaps yardstick yardsticks yarn yarns yawned yawning yawns
yearlies yearling yearn yearned yearning yearnings yearns yeast yeasts
yell yelled yelling yellowed yellower yellowest yellowing yellows
yells yelp yelped yelping yelps yen yens yeses yessed yessing
yesterdays yew yews yielded yielding yodel yodels yoga yogurt yogurts
yoke yoked yokel yokels yokes yoking yolk yolks yonder youngster
youngsters yous youthful youths yowl yowled yowling yowls zanied
zanier zanies zaniest zany zanying zeal zealous zebra zebras zenith
zeniths zeroed zeroing zest zests zeta zigzag zigzagged zigzagging
zigzags zillion zillions zinc zincked zincking zincs zip zipped zipper
zippered zippering zippers zipping zips zodiac zodiacs zombie zombies
zoned zoning zoo zoological zoologist zoologists zoology zoomed
zooming zooms zoos zucchini zucchinis
""".split())
SCOWL50 = set("""
aardvarks abaft abalone abalones abase abased abasement abases abash
abashed abashes abashing abasing abatement abattoir abattoirs abbe
abbes abbess abbesses abduction abductions abductor abductors abeam
abed aberrant abettor abettors abeyance abidings abjectly abjuration
abjurations abjure abjured abjures abjuring ablative ablatives abloom
ablution ablutions abnegate abnegated abnegates abnegating abnegation
abolitionist abolitionists abominably abominate abominated abominates
abominating abominations aboriginals abortionist abortionists
abracadabra abrade abraded abrades abrading abrasion abrasions
abrasively abrasiveness abrogate abrogated abrogates abrogating
abrogation abrogations abruptness abscissa abscissas absenteeism
absently absinthe absolution absolutism absorbency abstainer
abstainers abstemious abstinent abstractedly abstractly abstractness
abstractnesses abstrusely abstruseness abstruser abstrusest abusively
abusiveness abut abutment abutments abuts abutted abutting abuzz
abysmally acacia acacias academia academical academician academicians
acanthus acanthuses accentuation accessibly accession accessioned
accessioning accessions acclamation acclimation acclimatisation
acclimatization accouterments accreditation accretion accretions
accrual accruals acculturation accumulative accumulator accurateness
accursed accusative accusatives accusatory accusingly acerbic acerbity
acetaminophen acetate acetates acetic acetone acetylene achier achiest
achiever achievers achoo achromatic achy acidic acidified acidifies
acidify acidifying acidly acidulous acme acmes acolyte acolytes
aconite aconites acoustical acoustically acquiescent acquirable
acquirement acquisitive acquisitiveness acrostic acrostics actinium
actionable activation activism actuarial actuaries actuate actuated
actuates actuating actuator actuators acuity acupuncturist
acupuncturists acuteness adagio adagios adamantly adaptability addend
addenda addends adder adders addle addled addles addling addressable
adduce adduced adduces adducing adenoid adenoidal adenoids adeptly
adeptness adequacy adiabatic adieu adieus adios adipose adjacently
adjectival adjectivally adjudge adjudged adjudges adjudging adjudicate
adjudicated adjudicates adjudicating adjudication adjudicator
adjudicators adjuration adjurations adjure adjured adjures adjuring
adjuster adjusters adjutant adjutants adman admen administrate
administrated administrates administrating administratively admiralty
admiringly admissibility admixture admixtures admonishment
admonishments admonitory adoptive adorably adoringly adrenal
adrenaline adrenals adroitness adulate adulated adulates adulating
adulterant adulterants adulterer adulterers adulteress adulteresses
adulterous adumbrate adumbrated adumbrates adumbrating adumbration
advantageously adventitious advents adventuresome adventuress
adventuresses adventurously adversarial adverted adverting
advisability advisedly advisement advocacy adz adzes aegis aerate
aerated aerates aerating aeration aerator aerators aerialist
aerialists aerie aerier aeries aeriest aerobatics aerobic aerobics
aerodynamically aeronautical aeronautics aesthete aesthetes aesthetics
affability affirmatively affluently afforest afforestation afforested
afforesting afforests affray affrays afghan afghans aficionado
aficionados afire aflutter aforethought afoul aft afterbirth
afterbirths afterburner afterburners aftercare afterglow afterglows
aftershave aftershaves aftershock aftershocks aftertaste aftertastes
afterword afterwords agape agar agate agates agave ageism ageless
agglomerate agglomerated agglomerates agglomerating agglomeration
agglomerations agglutinate agglutinated agglutinates agglutinating
agglutination agglutinations aggregation aggregations aggrieve
aggrieved aggrieves aggrieving agilely agleam aglitter agog agrarian
agrarians agribusiness agribusinesses agriculturalist agriculturalists
agronomist agronomists agronomy ague aha ahas ahem ahems aileron
ailerons aimlessness airbrush airbrushed airbrushes airbrushing
airdrop airdropped airdropping airdrops airfare airfares airhead
airheads airily airiness airings airless airlift airlifted airlifting
airlifts airman airmen airship airships airsick airsickness airspace
airwaves airway airways airworthier airworthiest airworthy akimbo
alabaster alacrity alb albacore albacores albatross albatrosses albs
albumen albumin alchemist alchemists alchemy alder alderman aldermen
alders alderwoman alderwomen alertly alertness alfalfa alfresco
algebraically algebras algorithmic alienable alimentary alkalinity
alkaloid alkaloids allegorically allegro allegros alleluia alleluias
allergen allergenic allergens allergist allergists alleviation
alleyway alleyways alliteration alliterations alliterative allover
allspice allusive allusively alluvial alluvium alluviums aloe aloes
aloha alohas aloofness alpaca alpacas alphanumerics alphas alpine
alpines alright altercation altercations alternations alternators
altimeter altimeters altruist altruistically altruists alum alumna
alumnae alumni alumnus alums amalgam amalgams amanuenses amanuensis
amaranth amaranths amaryllis amaryllises amateurism amatory amazon
amazons ambassadorial ambassadorship ambassadorships ambergris
ambiance ambiances ambidextrously ambitiousness ambivalently ambrosia
ambulatories ambulatory ameliorate ameliorated ameliorates
ameliorating amelioration amendable amiability amicability amidships
amigo amigos amity ammeter ammeters ammo amnesiac amnesiacs
amniocenteses amniocentesis amoebic amorality amorally amorously
amorousness amorphously amorphousness amour amours amped amperage
amping amplitudes ampule ampules amputee amputees anachronistic
anaconda anacondas anaerobic anagrams analgesia analog analogously
analogs analogues analytically anapest anapests anarchically
anarchistic anathemas anatomic anatomically anatomist anatomists
ancestress ancestresses anchorite anchorites anchorman anchormen
anchorpeople anchorperson anchorwoman anchorwomen ancillaries
ancillary andante andantes andiron andirons androgen androgynous
anecdotal anemometer anemometers anemone anemones aneurysm aneurysms
angelically angina angioplasties angioplasty angiosperm angiosperms
angleworm angleworms angora angoras angstrom angstroms angularities
angularity animatedly animator animators animism animist animistic
animists animus anion anions anise aniseed ankh ankhs anklet anklets
anneal annealed annealing anneals annihilator annihilators annular
annulars anode anodes anodyne anodynes anointment anons anopheles
anorexia anorexic anorexics antacid antacids antagonistically
antarctic antebellum antecedent antecedents antechamber antechambers
antedate antedated antedates antedating antediluvian anterior anteroom
anterooms anther anthers anthologist anthologists anthracite
anthropocentric anthropoid anthropoids anthropomorphic
anthropomorphism anti antiabortion antiaircraft anticipatory anticked
anticking anticlimactic anticlockwise anticyclone anticyclones
antidepressant antidepressants antigen antigens antihero antiheroes
antihistamine antihistamines antiknock antimatter antimony
antiparticle antiparticles antipasti antipasto antipastos antipathetic
antipersonnel antiperspirant antiperspirants antiphonal antiphonals
antipodes antiquarian antiquarians antiquaries antiquary antis
antiseptically antislavery antithetical antithetically antitoxin
antitoxins antitrust antiviral antivirals antiwar antlered anymore
anytime apace apathetically aperitif aperitifs aphasia aphasic
aphasics aphelia aphelion aphelions aphid aphids aphoristic
aphrodisiac aphrodisiacs apiaries apiary aplenty apocalypse
apocalypses apocalyptic apogee apogees apolitical apologia apologias
apologist apologists apoplectic apoplexies apoplexy apostasies
apostasy apostate apostates apostolic apothecaries apothecary
apotheoses apotheosis appeaser appeasers appellant appellants
appellate appellation appellations appendectomies appendectomy
appertain appertained appertaining appertains applejack applesauce
applique appliqued appliqueing appliques apportion apportioned
apportioning apportionment apportions appositely appositeness
apposition appositive appositives appraiser appraisers appreciably
appreciatively apprehensively apprehensiveness apprise apprised
apprises apprising approbation approbations appropriateness
approvingly appurtenance appurtenances apropos apse apses aptness aqua
aquaculture aquanaut aquanauts aquaplane aquaplaned aquaplanes
aquaplaning aquas aquavit aqueous aquifer aquifers aquiline arabesque
arabesques arachnid arachnids arbitrariness arboreal arboretum
arboretums arborvitae arborvitaes arbutus arbutuses archaically
archaism archaisms archangel archangels archbishopric archbishoprics
archdeacon archdeacons archdiocese archdioceses archduke archdukes
archenemies archenemy archetype archetypes architecturally archivist
archivists archly archness arctic arctics arduousness argon argosies
argosy argot argots argumentation argyle argyles aridity aright
aristocratically arithmetical arithmetically armada armadas armature
armatured armatures armaturing armband armbands armful armfuls armhole
armholes armlet armlets armrest armrests aromatherapy arousal arpeggio
arpeggios arraignment arraignments arranger arrangers arrant arrogate
arrogated arrogates arrogating arrowhead arrowheads arrowroot arroyo
arroyos arsonist arsonists arteriosclerosis artfully artfulness
arthropod arthropods articulateness artier artiest artificer
artificers artificiality artiste artistes artless artlessly
artlessness artsier artsiest artsy artworks arty ascendancy ascendant
ascendants ascertainable asceticism ascot ascots ascribable ascription
aseptic asexually ashamedly ashcans ashier ashiest ashram ashrams ashy
asinine aslant asocial asocials asp aspartame asperities asperity
asphyxia aspic aspics aspirate aspirated aspirates aspirating asps
assailable assay assayed assaying assays assemblage assemblages
assemblyman assemblymen assemblywoman assemblywomen assertively
assertiveness asseverate asseverated asseverates asseverating
assiduous assiduously assiduousness assignable assignation
assignations assize assizes assonance assuage assuaged assuages
assuaging assuredly aster astern asters asthmatic asthmatics
astigmatic astigmatism astigmatisms astir astoundingly astrakhan
astral astrals astringency astrologer astrologers astronautics
astronomic astronomically astrophysicist astrophysicists astrophysics
astuteness asunder asymmetric asymmetrical asymmetrically asymptotic
asymptotically atavism atavistic atelier ateliers atherosclerosis
athletically atmospherically atoll atolls atonal atonality atop atria
atrium atrociousness atrophied atrophies atrophy atrophying attackers
attainable attar attender attentiveness attenuate attenuated
attenuates attenuating attenuation attestation attestations
attractively attributions attributive attributively attributives
attrition atwitter atypical atypically audaciously audaciousness
audibility audiophile audiophiles audiovisual auger augers aught
aughts augmentation augmentations augur augured auguries auguring
augurs augury auk auks aurally aureole aureoled aureoles aureoling
auricle auricles auspice auspices auspiciously auspiciousness
austerely authentication authentications authoritarianism
authoritarians authoritativeness autism autistic autistics
autocratically autoimmune automaton automatons autonomously autopilot
autopilots autoworker autoworkers avariciously avast avasts avatar
avatars avenger avengers aver averred averring avers avian avians
aviaries aviary aviatrices aviatrix aviatrixes avidity avidly avionics
avocation avocations avoidably avoirdupois avowedly avuncular
awakenings awash aweigh awesomely awestruck awfulness awl awls axial
axiomatically axon axons ayatollah ayatollahs azimuth azimuths b baa
baaed baaing baas babbler babblers babel babels babushka babushkas
babyhood babysat babysit babysits babysitter babysitters babysitting
baccalaureate baccalaureates bacchanal bacchanalian bacchanalians
bacchanals bacilli bacillus backache backaches backbit backbite
backbiter backbiters backbites backbiting backbitings backbitten
backboard backboards backbreaking backdate backdated backdates
backdating backdrop backdrops backfield backfields backhoe backhoes
backless backpacker backpackers backpedal backpedals backrest
backrests backsides backslapper backslappers backslid backslide
backslider backsliders backslides backsliding backspaced backspaces
backspacing backspin backstabbing backstairs backstop backstopped
backstopping backstops backstretch backstretches backstroke
backstroked backstrokes backstroking backup backups backwardness
backwash backwater backwaters backyard backyards bacteriological
bacteriologist bacteriologists bacteriology badinage badlands badmouth
badmouthed badmouthing badmouths bafflement bagatelle bagatelles
bagginess bagpipe bagpipes bah bahs bailiff bailiffs bailiwick
bailiwicks bailout bailouts baize balalaika balalaikas balderdash
baldly baleen baleful balefuller balefullest balefully balkier
balkiest balky balladeer balladeers ballistic balloonist balloonists
ballpark ballparks ballplayer ballplayers ballpoint ballpoints
ballsier ballsiest ballsy ballyhoo ballyhooed ballyhooing ballyhoos
balminess balsa balsam balsamed balsaming balsams balsas baluster
balusters balustrade balustrades banalities banality banditry
bandoleer bandoleers bane baned baneful banefuller banefullest banes
bangle bangles baning banishment banjoist banjoists bankbook bankbooks
bankroll bankrolled bankrolling bankrolls banns banshee banshees
bantam bantams bantamweight bantamweights banyan banyans baobab
baobabs baptismal baptist baptisteries baptistery baptists barbarism
barbarisms barbarities barbarity barbarously barbell barbells
barberries barberry barbershop barbershops barefaced barefooted
barehanded bareheaded bareness barf barfed barfing barfs barium barker
barkers barmaid barmaids barnstorm barnstormed barnstorming barnstorms
barometric baroness baronesses baronet baronets baronial barrack
barracks barracuda barracudas barrenness barrio barrios barroom
barrooms barrow barrows basal basalt baseboard baseboards baseless
baselines basely baseman basemen baseness bashfully bashfulness
basilica basilicas bassinet bassinets bassist bassists basso
bassoonist bassoonists bassos bast bastion bastions basts bate bated
bates bather bathers bathhouse bathhouses bathmat bathmats bathos
bathrobe bathrobes batik batiks bating batsmen batten battened
battening battens battier battiest battleground battlegrounds
battlement battlements batty bauble baubles bauxite bawdily bawdiness
bayberries bayberry bazillion bazillions bazooka bazookas beachcomber
beachcombers beachhead beachheads beanbag beanbags bearish bearskin
bearskins beastlier beastliest beastliness beastly beatific
beatification beatifications beatified beatifies beatify beatifying
beatings beatitude beatitudes beatnik beatniks beau beaus beauteous
beauteously beautification beautifier beautifiers bebop bebops becalm
becalmed becalming becalms beck becks becomingly bedazzle bedazzled
bedazzles bedazzling bedeck bedecked bedecking bedecks bedevil
bedevilment bedevils bedfellow bedfellows bedpan bedpans bedraggle
bedraggled bedraggles bedraggling bedroll bedrolls bedsore bedsores
bedstead bedsteads beechnut beechnuts beefburger beefsteak beefsteaks
beekeeper beekeepers beekeeping beeline beelined beelines beelining
beep beeped beepers beeping beeps befog befogged befogging befogs
befoul befouled befouling befouls befuddle befuddled befuddles
befuddling beget begets begetting beggarly begone begonia begonias
begot begotten begrudgingly beguilingly behemoth behemoths behest
behests beholden beholders belay belayed belaying belays beleaguer
beleaguered beleaguering beleaguers belladonna belle belles bellicose
bellicosity belligerence belligerency belligerently bellwether
bellwethers bellyache bellyached bellyaches bellyaching bellybutton
bellybuttons bellyful bellyfuls beltway beltways benchmark benchmarks
benefaction benefactions benefactress benefactresses benefice
beneficence beneficent beneficently benefices beneficially
benevolently benignly benumb benumbed benumbing benumbs benzene berate
berated berates berating berg bergs beriberi berm berms beryl
beryllium beryls besieger besiegers besmirch besmirched besmirches
besmirching besom besomed besoming besoms besot besots besotted
besotting bespeak bespeaking bespeaks bespoke bespoken bestiaries
bestiary bestir bestirred bestirring bestirs bestowal bestowals
bestridden bestride bestrides bestriding bestrode bestseller
bestsellers betake betaken betakes betaking betas bethink bethinking
bethinks bethought betide betided betides betiding betoken betokened
betokening betokens betook betrayer betrayers betroth betrothed
betrothing betroths betwixt bevel bevels bevies bevy bewail bewailed
bewailing bewails biannual biannually biathlon biathlons bibles
bibliographer bibliographers bibliographical bibliophile bibliophiles
bibulous bicameral bicep biceps bicuspid bicuspids bicyclist
bicyclists bidder bidders biddies biddy bidet bidets bidirectional
biennially bier biers bifocal bifurcate bifurcated bifurcates
bifurcating bifurcation bifurcations biggie biggies bighearted bighorn
bighorns bight bights bigmouth bigmouths bigness bigotries bigwig
bigwigs biker bikers bilaterally bilge bilges bilious bilk bilked
bilking bilks billet billeted billeting billets billies billings
billionaire billionaires billionth billionths billowier billowiest
billowy billy bimbo bimbos bimonthlies bimonthly binderies bindery
binge binged binges binging binnacle binnacles binocular binoculars
binomials biochemicals biochemist biochemists biodiversity biofeedback
bionic bionics biophysicist biophysicists biophysics biopsied biopsies
biopsy biopsying biorhythm biorhythms biosphere biospheres
biotechnology bipartite bipedal bipolar biracial birdbath birdbaths
birdbrained birdhouse birdhouses birdie birdied birdieing birdies
birdseed birdwatcher birdwatchers biretta birettas birthrate
birthrates birthright birthrights birthstone birthstones bisection
bisections bisector bisectors bisexuality bishopric bishoprics bismuth
bisque bistro bistros bitchier bitchiest bitchy bitingly bittern
bitterns bitters bitumen bituminous bivalve bivalves bivouac
bivouacked bivouacking bivouacs biweeklies biweekly bizarrely
blabbermouth blabbermouths blackball blackballed blackballing
blackballs blackcurrant blackguard blackguards blackish blackness
blackthorn blackthorns blah blahed blahing blahs blamelessly
blameworthy blandishment blandishments blandly blandness blankness
blarney blarneyed blarneying blarneys blasphemer blasphemers
blasphemously blasters blastoff blastoffs blazon blazoned blazoning
blazons bleacher bleachers bleakly bleakness blearily bleeder bleeders
bleep bleeped bleeping bleeps blench blenched blenches blenching
blender blenders blessedly blessedness blinders blindside blindsided
blindsides blindsiding blintz blintze blintzes blissfulness bloat
bloated bloating bloats blockages blockhouse blockhouses blondness
bloodbath bloodbaths bloodcurdling bloodless bloodlessly bloodmobile
bloodmobiles bloodstain bloodstained bloodstains bloodstreams
bloodsucker bloodsuckers bloodthirstiness bloomer bloomers blooper
bloopers blotchier blotchiest blotchy blower blowers blowgun blowguns
blowup blowups blowzier blowziest blowzy bluebottle bluebottles
bluefish bluefishes bluejacket bluejackets bluenose bluenoses bluffers
bluish blunderbuss blunderbusses blunderer blunderers blurbs blurrier
blurriest blurry blusher blushers blustery boardinghouse
boardinghouses boardroom boardrooms boaster boasters boastfulness
boater boaters boatman boatmen boatswain boatswains bobbies bobble
bobbled bobbles bobbling bobby bobolink bobolinks bobtail bobtails
bobwhite bobwhites bodega bodegas bodkin bodkins bodybuilding bogey
bogeyed bogeying bogeyman bogeymen bogeys boggier boggiest boggy
bohemian bohemians boilerplate boilings boisterously boisterousness
bola bolas boldface bole bolero boleros boles boll bolled bolling
bolls bombardier bombardiers bombast bombastic bombshell bombshells
bonanza bonanzas bonbon bonbons bondsman bondsmen bonehead boneheads
boneless boner boners bong bonged bonging bongo bongos bongs bonito
bonitos bonkers bonnie bonnier bonniest bonny bonsai bonsais boob
boobed boobies boobing boobs boodle boodles boogie boogied boogieing
boogies bookie bookies bookish bookmaker bookmakers bookmaking
bookmobile bookmobiles bookseller booksellers bookshelves bookstores
boondocks boondoggle boondoggled boondoggles boondoggling boorishly
bootblack bootblacks bootlegger bootleggers bootless bootstraps boozed
boozer boozers boozes boozier booziest boozing boozy bopped bopping
bops borax bordello bordellos borderland borderlands borer borers
boron borrower borrowers borscht bosh bossily bossiness botulism
boudoir boudoirs bouffant bouffants bouillabaisse bouillabaisses
bouillon bouillons bouncer bouncers bouncier bounciest bouncy bounden
bounder bounders bounteous bountifully boutonniere boutonnieres bower
bowers bowlers bowman bowmen bowsprit bowsprits bowstring bowstrings
boxwood boyishly boyishness boysenberries boysenberry bozo bozos
bracken bract bracts brad brads bragger braggers braille brainchild
brainchildren brainteaser brainteasers brakeman brakemen bramble
brambles brashly brashness brattier brattiest bratty bravura bravuras
brawler brawlers brawniness brazenly brazenness breadbasket
breadbaskets breadfruit breadfruits breakage breakages breaker
breakers breakup breakups breastbone breastbones breastplate
breastplates breaststroke breaststrokes breastwork breastworks
breathable breathier breathiest breathlessly breathlessness
breathtakingly breathy breech breeches breezily breeziness breviaries
breviary brewer brewers brickbat brickbats bricklaying bridgehead
bridgeheads bridgework briefings briefness brier briers brig brigand
brigandage brigands brigantine brigantines brigs brilliancy brimful
brindled brinkmanship briquette briquettes brisket briskets briskness
bristlier bristliest bristly brittleness broadcaster broadcasters
broadcloth broadloom broadness broadsword broadswords brogan brogans
brogue brogues brokenhearted brokerage brokerages bromide bromides
bromine bronchi bronchial bronchus brontosaur brontosaurs brontosaurus
brontosauruses brooder brooders broomstick broomsticks brothel
brothels brotherliness brouhaha brouhahas brownish brownout brownouts
brownstone brownstones browser browsers brr bruin bruins bruiser
bruisers brunet brunets brushwood brusquely brusqueness brutishly
buccaneer buccaneered buccaneering buccaneers buckboard buckboards
bucketful bucketfuls buckeye buckeyes buckler bucklers buckram bucksaw
bucksaws buckshot buckskin buckskins buckteeth bucktooth bucktoothed
buckwheat bucolic bucolics buddings budgerigar budgerigars budgetary
budgie budgies buffoonery bugaboo bugaboos bugbear bugbears buildup
buildups bulgier bulgiest bulgy bulimia bulimic bulimics bulkhead
bulkheads bulkiness bulletproof bulletproofed bulletproofing
bulletproofs bullfighting bullfinch bullfinches bullheaded bullhorn
bullhorns bullish bullock bullocks bullpen bullpens bullring bullrings
bullshit bullshits bullshitted bullshitting bulrush bulrushes bulwark
bulwarks bumble bumbled bumbler bumblers bumbles bumbling bumblings
bummers bumpkin bumpkins bumptious bunged bunghole bungholes bunging
bungs bunkhouse bunkhouses bunkum bunt bunted bunting buntings bunts
buoyantly bur burdock bureaucratically burg burgeon burgeoned
burgeoning burgeons burgher burghers burgled burgles burgling burgs
burlesque burlesqued burlesques burlesquing burliness burnoose
burnooses burnout burnouts burrito burritos burs bursars bursitis
busbies busboy busboys busby bushiness bushings bushman bushmen
bushwhack bushwhacked bushwhacker bushwhackers bushwhacking bushwhacks
businesslike buster busters busyness busywork butane butch butches
butterfat butterfingers butterier butteries butteriest butternut
butternuts buttocked buttocking buyout buyouts buzzword buzzwords
bylaw bylaws byline bylines byplay byproduct byproducts byword bywords
c cabal cabals cabana cabanas cabinetmaker cabinetmakers cablecast
cablecasting cablecasts cablegram cablegrams caboodle cachet cacheted
cacheting cachets cacophonies cacophonous cacophony cadaver cadaverous
cadavers caddish cadenza cadenzas cadge cadged cadger cadgers cadges
cadging cadmium cadre cadres cads caducei caduceus caesura caesuras
caftan caftans cagily caginess cahoot cahoots cairn cairns caisson
caissons cajolery calabash calabashes calamine calamined calamines
calamining calamitous calcified calcifies calcify calcifying calcine
calcined calcines calcining calcite calculable calfskin calibrator
calibrators caliph caliphate caliphates caliphs calisthenic
calisthenics calligrapher calligraphers calliope calliopes callously
callousness callower callowest caloric calorific calumniate
calumniated calumniates calumniating calumnies calumny calved calving
calypso calypsos calyx calyxes camber cambered cambering cambers
cambium cambiums cambric camcorder camcorders camellia camellias
cameraman cameramen camerawoman camerawomen camisole camisoles
campanile campaniles campfire campfires campground campgrounds camphor
campier campiest campsite campsites campy cams camshaft camshafts
canape canapes canard canards canasta cancan cancans cancerous
candelabra candelabras candelabrum candidness candlelight cankerous
cannabis cannabises cannibalistic cannily canniness cannonade
cannonaded cannonades cannonading cannonball cannonballed
cannonballing cannonballs canoeist canoeists canonicals cantankerously
cantankerousness cantata cantatas canted canticle canticles cantilever
cantilevered cantilevering cantilevers canting canto canton cantons
cantor cantors cantos cants canvasback canvasbacks capacious
capaciously capaciousness caparison caparisoned caparisoning
caparisons capitalistic capitol capitols capitulation capitulations
caplet caplets capon capons cappuccino cappuccinos capriciousness
capstan capstans captaincies captaincy captious captivation carafe
carafes carapace carapaces caraway caraways carbide carbides carbine
carbines carbonate carbonated carbonates carbonating carbonation
carboy carboys carbuncle carbuncles carcase carcinogen carcinogens
carcinoma carcinomas cardiogram cardiograms cardiologist cardiologists
cardiology cardiopulmonary cardiovascular cardsharp cardsharps careen
careened careening careens caregiver caregivers caret carets careworn
carfare caricaturist caricaturists caries carillon carillonned
carillonning carillons carjack carjacked carjacker carjackers
carjacking carjackings carjacks carmine carmines carnally carnelian
carnelians carom caromed caroming caroms carotid carotids carousal
carousals carousel carousels carouser carousers carpal carpals carpel
carpels carpetbag carpetbagged carpetbagger carpetbaggers
carpetbagging carpetbags carpi carport carports carpus carrel carrels
carryall carryalls carryout carryouts carsick carsickness
cartilaginous carver carvers carvings caryatid caryatids casein
caseload caseloads casement casements casework caseworker caseworkers
cassava cassavas cassia cassias cassock cassocks castanet castanets
castigation castigator castigators castration castrations casualness
casuist casuistry casuists catacomb catacombs catafalque catafalques
catalepsy cataleptic cataleptics catalpa catalpas catalysis catalyst
catalysts catalytic catamaran catamarans catarrh catastrophically
catatonic catatonics catbird catbirds catboat catboats catchall
catchalls catcher catchers catchphrase catchword catchwords caterings
caterwaul caterwauled caterwauling caterwauls catgut catharses
catharsis cathartic cathartics catheter catheters cathode cathodes
catholicity cation cations catkin catkins cattail cattails cattier
cattiest cattily cattiness cattleman cattlemen catty caudal cauldron
cauldrons causalities causally causals causation causative causeless
caustically cautionary cautiousness cavalcade cavalcades cavalryman
cavalrymen caveatted caveatting caveman cavemen cavernous cavil cavils
cayenne cedilla cedillas celebrant celebrants celebratory celerity
celesta celestas cellulite celluloid cenotaph cenotaphs censer censers
censorious censoriously centaur centaurs centenarian centenarians
centenaries centenary centigrade centime centimes centrifugal
centrifuged centrifuges centrifuging centripetal centrist centrists
centurion centurions cephalic cephalics ceramics cerebellum
cerebellums cerebra cerebrum cerebrums ceremonially ceremoniously
cerise certifiable certification certifications certitude cerulean
cervices cervix cesarean cesareans cession cessions cesspool cesspools
cetacean cetaceans chaffinch chaffinches chainsawed chainsawing
chainsaws chairlift chairlifts chairmanship chairwoman chairwomen
chaise chaises chalkboard chalkboards chamberlain chamberlains
chambermaid chambermaids chambray chamois chamomile chamomiles chancel
chancelleries chancellery chancels chanceries chancery chancier
chanciest chancy chandler chandlers changeling changeovers chanter
chanters chantey chanteys chanticleer chanticleers chaotically
chaparral chaparrals chaplaincies chaplaincy chaplet chaplets charade
charades charbroil charbroiled charbroiling charbroils chargers
charier chariest charily charioteer charioteered charioteering
charioteers charmer charmers charmingly chartreuse charwoman charwomen
chary chaser chasers chastely chasuble chasubles chateau chateaus
chateaux chatelaine chatelaines chattel chattels chatterer chatterers
chattily chattiness chauvinism chauvinistic cheapskate cheapskates
cheater cheaters checklist checklists checkmate checkmated checkmates
checkmating checkouts checkpoints checkroom checkrooms cheddar
cheekbone cheekbones cheekier cheekiest cheekily cheekiness cheeky
cheerily cheeriness cheerleader cheerleaders cheerless cheerlessly
cheerlessness cheerses cheeseburger cheeseburgers cheesecake
cheesecakes cheesier cheesiest cheesy chemise chemises chemotherapy
chenille cheroot cheroots cherubic chervil chessboard chessboards
chessman chessmen chevron chevrons chewer chewers chiaroscuro
chicaneries chicanery chichi chichier chichiest chichis chickadee
chickadees chickenpox chickpea chickpeas chickweed chicle chicories
chicory chiffon chigger chiggers chignon chignons chilblain chilblains
childbearing childcare childishly childishness childless childlessness
childproof childproofed childproofing childproofs chillers chilliness
chillings chimera chimeras chimerical chinchilla chinchillas chino
chinos chinstrap chinstraps chintzier chintziest chintzy chiropodist
chiropodists chiropody chiropractic chiropractics chirrup chirruped
chirruping chirrups chiseler chiselers chit chitchat chitchats
chitchatted chitchatting chitin chits chitterlings chivalrously chive
chived chives chiving chloride chlorides chlorinate chlorinated
chlorinates chlorinating chlorination chlorofluorocarbon
chlorofluorocarbons chock chocked chocking chocks choker chokers
choler choleric chomp chomped chomping chomps choppily choppiness
chopstick chopsticks chorale chorales choreograph choreographed
choreographic choreographing choreographs chorister choristers
chromatic chromatics chronicler chroniclers chronometer chronometers
chrysalis chrysalises chubbiness chuckhole chuckholes chumminess chump
chumps chunkiness churchgoer churchgoers churchman churchmen
churchyard churchyards churl churlish churlishly churlishness churls
chutney chutzpah cicada cicadas cicatrice cicatrices cigarillo
cigarillos cilantro cilia cilium cinchona cinchonas cincture cinctures
cinematic cinematographer cinematographers cinematography cinnabar
circadian circlet circlets circuitously circularity circumflexes
circumlocution circumlocutions circumnavigate circumnavigated
circumnavigates circumnavigating circumnavigation circumnavigations
circumscribe circumscribed circumscribes circumscribing
circumscription circumscriptions circumspect circumspection
circumstantially cirrhosis cirrus citadel citadels citizenry citron
citronella citrons civet civets civilly civvies claimant claimants
clambake clambakes clamminess clamorous clampdown clampdowns
clandestinely clannish clapboard clapboarded clapboarding clapboards
clareted clareting clarets clarinetist clarinetists clarion clarioned
clarioning clarions classically classicism classicist classicists
classier classiest classifiable classifieds classiness classless
claustrophobic clavichord clavichords clavicle clavicles clayey
clayier clayiest cleanings cleanness cleanup cleanups clearinghouse
clearinghouses clematis clematises clement clerestories clerestory
clergywoman clergywomen clew clewed clewing clews cliched cliffhanger
cliffhangers climatic clincher clinchers clingier clingiest clingy
clinician clinicians clinker clinkers clipper clippers cliquish
clitoral clitorises cloakroom cloakrooms clobber clobbered clobbering
clobbers cloche cloches clodhopper clodhoppers clomp clomped clomping
clomps cloned cloning clop clopped clopping clops closefisted
closemouthed closeout closeouts clothesline clotheslined clotheslines
clotheslining clothier clothiers cloture clotures cloudiness cloudless
cloverleaf cloverleafs clownish clownishly clownishness cloy cloyed
cloying cloys clubfeet clubfoot clunk clunked clunker clunkers
clunkier clunkiest clunking clunks clunky coachman coachmen coagulant
coagulants coalescence coatings coauthor coauthored coauthoring
coauthors cobbled cobbles cobblestone cobblestones cobbling cocci
coccis coccus coccyges coccyx cochlea cochleae cochleas cockade
cockades cockamamie cockatoo cockatoos cockerel cockerels cockfight
cockfights cockily cockiness cockle cockles cockleshell cockleshells
cockney cockneys cockscomb cockscombs cocksucker cocksuckers cocksure
coda codas coddle coddled coddles coddling codeine codependency
codependent codependents codex codfish codfishes codger codgers
codices codicil codicils codification codifications codified codifies
codify codifying coed coeds coeducation coeducational coequal coequals
coercive coeval coevals coffeecake coffeecakes coffeehouse
coffeehouses coffeepot coffeepots cogently cogitate cogitated
cogitates cogitating cogitation cognate cognates cognition cognomen
cognomens cogwheel cogwheels cohabit cohabitation cohabited cohabiting
cohabits cohere cohered coheres cohering cohesion cohesive cohesively
cohesiveness cohort cohorts coif coiffed coiffing coiffure coiffured
coiffures coiffuring coifs coincident coital coitus cola colas coled
coleslaw colicky coliseum coliseums colitis collaboratives collations
collectible collectibles collectivism collectivist collectivists
colleen colleens collegian collegians collier collieries colliers
colliery colling collocate collocated collocates collocating
collocation collocations colloid colloids colloquially colloquies
colloquium colloquiums colloquy collude colluded colludes colluding
collusive cologne colognes colonialism colonialist colonialists
colonist colonists colonnade colonnades colossally colossi colossus
cols coltish columbine columbines columned columnist columnists
comatose combative combo combos combustibility comebacks comedic
comedienne comediennes comedown comedowns comeliness comer comers
comeuppance comeuppances comfier comfiest comforter comforters
comfortingly comfy comically comity commemorative commendably
commensurable commensurate commentate commentated commentates
commentating commingle commingled commingles commingling commissar
commissariat commissariats commissaries commissars commissary
committal committals commode commodes commodious commoners communally
communicant communicants communicators communistic commutation
commutations compactly compactness compactor compactors companionable
companionway companionways comparability comparably compassionately
compatibly compellingly compendium compendiums compensatory
competencies competency competitively competitiveness complacence
complacently complainant complainants complainer complainers
complaisance complaisant complaisantly complected complicity comport
comported comporting comportment comports compositor compositors
compote compotes comprehensibility comprehensiveness compressor
compressors comptroller comptrollers compulsively compulsiveness
compulsorily computationally concavities concavity concentrically
concertina concertinaed concertinaing concertinas concertmaster
concertmasters concessionaire concessionaires conch conched conching
conchs concierge concierges conciliator conciliators conciliatory
conclave conclaves concomitant concomitants concordances concordant
concretely concubine concubines condemnatory condenser condensers
condescendingly condescension conditioner conditioners condo condole
condoled condoles condoling condos conduce conduced conduces conducing
conduction conductive conductivity conduit conduits confab confabbed
confabbing confabs confectioner confectioneries confectioners
confectionery conferencing conferment conferments confessedly
confessional confessionals confessor confessors confidante confidantes
confirmatory conflagration conflagrations confluence confluences
confluent conformance conformation conformations conformist
conformists confrere confreres confrontational confusedly confusingly
confusions confute confuted confutes confuting conga congaed congaing
congas congeniality congenially congenital congenitally congestive
conglomeration conglomerations congratulation congratulatory
congregational congressional congruence congruities congruity
congruous conic conics conjectural conjoin conjoined conjoining
conjoins conjoint conjunctive conjunctives conjunctivitis conjuncture
conjunctures conjurer conjurers conk conked conking conks connectives
connivance connive connived conniver connivers connives conniving
connotative connubial conquistador conquistadors consanguinity
conscientiously conscientiousness conscript conscripted conscripting
conscription conscripts consecration consecrations consecutively
consensual conservationist conservationists conservatively conservator
conservators considerately consonance consonances consortia
conspiratorial constable constables constabularies constabulary
constipate constipated constipates constipating constitutionality
constrictive constrictor constrictors constructively constructor
constructors consultancies consultative consumings consummation
consummations consumptive consumptives contactable containment
contaminant contaminants contemporaneous contemporaneously
contemptibly contemptuously contentedly contentedness contentiously
contiguity continence continua continuance continuances contortionist
contortionists contractile contractually contradistinction
contradistinctions contrail contrails contralto contraltos
contrapuntal contrarily contrariness contrariwise contravened
contravening contraventions contretemps contritely contrition
contrivance contrivances controversially controvert controverted
controverting controverts contumacious contumelies contumely contuse
contused contuses contusing contusion contusions conundrum conundrums
conurbation conurbations conventionality convergences convergent
conversationalist conversationalists conversationally convexity
conveyor conveyors convivial conviviality convocation convocations
convoke convoked convokes convoking convolution convolutions
convulsively cookeries cookers cookery cookout cookouts coolant
coolants coolie coolies coolness coon coons cooperatively coopered
coopering coopers coordinators coot cootie cooties coots copilot
copilots copings coppery copra copse copsed copses copsing copter
copters copula copulas copulated copulates copulating copycat copycats
copycatted copycatting copywriter copywriters coquette coquetted
coquettes coquetting coquettish cordiality cordite corduroys
corespondent corespondents coriander corm cormorant cormorants corms
cornball cornballs cornbread corncob corncobs corneal cornerstone
cornerstones cornflower cornflowers cornice cornices cornrow cornrowed
cornrowing cornrows cornstalk cornstalks cornucopia cornucopias
corolla corollaries corollas corona coronas coronet coronets corpora
corporas corporeal corpulence correctable correctional correctives
correlative correlatives corroborations corroborative corrugate
corrugated corrugates corrugating corrugation corrugations corruptly
corruptness corsair corsairs cortege corteges cortical cortices
cortisone coruscate coruscated coruscates coruscating cosign
cosignatories cosignatory cosigned cosigner cosigners cosigning
cosigns cosine cosmetically cosmetologist cosmetologists cosmetology
cosmically cosmogonies cosmogony cosmological cosmologies cosmologist
cosmologists cosponsor cosponsored cosponsoring cosponsors costar
costarred costarring costars costliness cote coterie coteries cotes
cotillion cotillions cotter cotters cottonmouth cottonmouths
cottonseed cottonseeds cotyledon cotyledons councilman councilmen
councilwoman councilwomen counselings countably counteraction
counteractions counterclaim counterclaimed counterclaiming
counterclaims counterculture counterespionage counterexamples
counterfeiter counterfeiters counterintelligence countermand
countermanded countermanding countermands counteroffer counteroffers
counterpane counterpanes counterpoint counterpoints counterproductive
counterrevolution counterrevolutionaries counterrevolutionary
counterrevolutions countersink countersinking countersinks countersunk
countertenor countertenors counterweight counterweights countrified
countrywoman countrywomen coupe couped coupes couping couplet couplets
courteousness courtesan courtesans courtier courtiers courtlier
courtliest courtliness courtly coven covens coverall coveralls
coverings coverlet coverlets covetously covetousness covey coveys
cowardliness cowbird cowbirds cowhand cowhands cowl cowlick cowlicks
cowling cowls coworker coworkers cowpoke cowpokes cowpox cowpuncher
cowpunchers cowslip cowslips coxcomb coxcombs coxswain coxswained
coxswaining coxswains coyly coyness cozen cozened cozening cozens
crabbily crabbiness crackdown crackdowns crackerjack crackerjacks
cracklier crackliest crackly crackup crackups craftiness craftsmanship
cranial crankcase crankcases crankiness crankshaft crankshafts
crannied crannies cranny crannying crape crapes crapped crappie
crappier crappies crappiest crapping crappy craps crassly crassness
cravat cravats cravatted cravatting craven cravenly cravens craw
crawlspace crawlspaces craws creamer creameries creamers creamery
creaminess creationism creativeness creche creches credenza credenzas
credibly creditably credo credos credulity credulously creel creeled
creeling creels creeper creepers creepily creepiness crematoria
crematories crematorium crematoriums crematory creole creoles creosote
creosoted creosotes creosoting cress crewman crewmen cribbage crick
cricked cricketer cricketers cricking cricks crier criers
criminologist criminologists criminology crimp crimped crimping crimps
crinklier crinklies crinkliest crinkly crinoline crinolines crispier
crispiest crispness critter critters crocked croissant croissants
crone crones crookedly crookedness crooner crooners cropper croppers
croquette croquettes crosier crosiers crossbar crossbarred
crossbarring crossbars crossbeam crossbeams crossbones crossbred
crossbreed crossbreeding crossbreeds crosscheck crosschecked
crosschecking crosschecks crossfire crossfires crossly crossness
crossover crossovers crosspiece crosspieces crosstown crosswise
crotchet crotchets crotchety croup crouped croupier croupiers
croupiest crouping croups croupy crouton croutons crucible crucibles
cruciform cruciforms crud cruddier cruddiest cruddy crudeness crudites
crudities cruet cruets cruller crullers crumbier crumbiest crumby
crumpet crumpets cruncher crunchier crunchiest cryings cryogenics
cryptically cryptogram cryptograms cryptographer cryptographers
cryptography crystalline crystallines crystallographic crystallography
cubbyhole cubbyholes cubical cubism cubist cubists cubit cubits
cuckold cuckolded cuckolding cuckolds cud cuddlier cuddliest cudgel
cudgels cuds culotte culottes culpability cultivator cultivators
culvert culverts cumin cummerbund cummerbunds cumulatively cumuli
cumulus cuneiform cunnilingus cunt cunts cupcake cupcakes cupidity
cupola cupolaed cupolaing cupolas curacies curacy curate curated
curates curating curative curatives curer curie curies curler curlers
curlew curlews curlicue curlicued curlicues curlicuing curlier
curliest curliness curmudgeon curmudgeons curred curring currycomb
currycombed currycombing currycombs curs curseder cursedest cursive
cursored cursorily cursoring cursors curtailment curtailments curtly
curtness curvaceous curvier curviest curvy cushier cushiest cushy cusp
cuspid cuspids cusps cuss cussed cusses cussing custodial customarily
cutely cuteness cutesier cutesiest cutesy cutlass cutlasses cutoff
cutoffs cuttlefish cuttlefishes cutup cutups cybernetic cyberpunk
cyberpunks cyberspace cyclamen cyclamens cyclical cyclically cyclonic
cyclotron cyclotrons cygnet cygnets cynically cynosure cynosures
cystic cytology cytoplasm czarina czarinas d dB dabbler dabblers dacha
dachas dactyl dactylic dactylics dactyls dado dadoes daemons daffier
daffiest daffy dafter daftest daguerreotype daguerreotyped
daguerreotypes daguerreotyping dahlia dahlias daintiness daiquiri
daiquiris dairying dairymaid dairymaids dairyman dairymen dale dales
dalliance dalliances dalmatian dalmatians damask damasked damasking
damasks damnable damnably dampers damply damson damsons dander
dandered dandering danders dandle dandled dandles dandling dankly
dankness dapple dappled dapples dappling daringly darkroom darkrooms
darneder darnedest dartboard dartboards dashiki dashikis dashingly
databased databasing dateline datelined datelines datelining dative
datives datums dauber daubers dauntlessly dauntlessness dauphin
dauphins davenport davenports davit davits dawdler dawdlers daybed
daybeds daydreamer daydreamers daylights dazzlings deaconess
deaconesses deactivate deactivated deactivates deactivating deadbeat
deadbeats deadbolt deadbolts deadliness deadpan deadpanned deadpanning
deadpans deadwood deafen deafened deafening deafens dealership
dealerships dearness deathblow deathblows deathless deathlier
deathliest deathlike deathly deathtrap deathtraps deb debacle debacles
debar debark debarkation debarked debarking debarks debarment debarred
debarring debars debater debaters debauch debauched debauches
debauching debenture debentures debilitation debonairly debriefings
debs debuggers decadently decaf decaffeinate decaffeinated
decaffeinates decaffeinating decal decals decamp decamped decamping
decamps decant decanted decanting decants decapitation decapitations
decathlon decathlons decedent decedents deceitfulness deceiver
deceivers decelerate decelerated decelerates decelerating deceleration
deceptively deceptiveness decimation decipherable decisiveness
deckhand deckhands declaim declaimed declaiming declaims declamation
declamations declamatory declarative declassified declassifies
declassify declassifying declensions declination declivities declivity
decollete decolletes decommission decommissioned decommissioning
decommissions decompress decompressed decompresses decompressing
decompression decongestant decongestants deconstruction
deconstructions decontaminate decontaminated decontaminates
decontaminating decontamination decor decorously decors decremented
decrements decrepitude decrescendo decrescendos deducible deductible
deductibles deejay deejayed deejaying deejays deepness deerskin
deescalate deescalated deescalates deescalating defacement defaulter
defaulters defeatism defeatists defecation defection defections
defector defectors defensively defensiveness deferentially deferment
deferments defilement definer definers definiteness definitively
deflector deflectors defogger defoggers defoliant defoliants defoliate
defoliated defoliates defoliating defoliation deforest deforestation
deforested deforesting deforests deformation deformations defray
defrayal defrayed defraying defrays defroster defrosters deftness
defuse defused defuses defusing degeneracy degeneration degenerative
dehumidified dehumidifier dehumidifiers dehumidifies dehumidify
dehumidifying dehydration deice deiced deicer deicers deices deicing
deification deism dejectedly delectation deliciousness delightfully
delineate delineated delineates delineating delineation delineations
delinquently deliquescent deliverer deliverers dell dells delphinium
delphiniums delusive demagogic demagoguery demagogy demarcate
demarcated demarcates demarcating demarcation dementedly demesne
demesnes demigod demigods demijohn demijohns demitasse demitasses demo
demoed demographer demographers demographic demographically
demographics demography demoing demoniac demoniacal demonic
demonstrable demonstratively demos demur demurred demurrer demurring
demurs denature denatured denatures denaturing dendrite dendrites
denier deniers denigrated denigrates denigrating denigration denizen
denizens denominate denominated denominates denominating
denominational denotation denotations denouement denouements
denouncement denouncements denseness dentifrice dentifrices dentin
denture dentures denude denuded denudes denuding dependability
dependably depictions depilatories depilatory deplane deplaned
deplanes deplaning depletion deplorably deploy deployed deploying
deployment deployments deploys depopulate depopulated depopulates
depopulating depopulation deposition depositions depositor
depositories depositors depository deprecation deprecatory depredation
depredations depressant depressants depressive depressives deprogram
deprogrammed deprogramming deprograms deputation deputations depute
deputed deputes deputing derailleur derailleurs derangement derbies
derby deregulate deregulated deregulates deregulating deregulation
dereliction derisive derisively derisory derivable dermatitis
dermatologist dermatologists dermatology dermis derogate derogated
derogates derogating derogation derriere derrieres derringer
derringers dervish dervishes desalinate desalinated desalinates
desalinating desalination descant descanted descanting descants
descender descried descries descriptively descry descrying desegregate
desegregated desegregates desegregating desertion desertions
deservedly deservings desiccate desiccated desiccates desiccating
desiccation desiderata desideratum desirably desktops desolately
desolateness despairingly desperado desperadoes despicably despoil
despoiled despoiling despoils despondency despondently despotism
destruct destructed destructible destructing destructively
destructiveness destructs desultory detainment detente detentes
determinant determinants determinate determiner determiners deterrence
detestable detestation dethronement detox detoxed detoxes
detoxification detoxified detoxifies detoxify detoxifying detoxing
detraction detractor detractors detritus deuce deuced deuces deucing
deuterium devaluation devaluations devalued devalues devaluing
developmental deviance deviants devilish devilishly devilment
devilries devilry deviltries deviltry deviously deviousness devotedly
devotional devotionals devoutness dewberries dewberry dewdrop dewdrops
dewier dewiest dewlap dewlaps dewy dexterously dextrose dhoti dhotis
diabolic diabolically diacritic diacritical diacritics diadem diadems
diagnostician diagnosticians diagrammatic dialectal dialectic dialysis
diametrical diaphanous diarist diarists diastolic diatom diatoms
diatribes dibble dibbled dibbles dibbling dicey dichotomies dichotomy
dicier diciest dick dicker dickered dickering dickers dickey dickeys
dickier dickiest dicks dicta dictum didactic didactics diddle diddled
diddles diddling diereses dieresis dieter dieters dietetic dietetics
dietitian dietitians differentials diffidence diffident diffidently
diffraction diffusely diffuseness diggers digitalis digraph digraphs
digressive dilapidation dilatory dilettante dilettantes dilettantism
dillies dilly dillydallied dillydallies dillydally dillydallying
dimensionless diminuendo diminuendos diminution diminutions dimmers
dimness dimwit dimwits dimwitted dinette dinettes ding dinged
dinginess dinging dingo dingoes dings dinkier dinkies dinkiest dinky
dint diocesan diocesans diode diodes diorama dioramas dioxin dioxins
dipole dipper dippers dipsomania dipsomaniac dipsomaniacs dipstick
dipsticks directional directionals directorate directorates
directorial directorship directorships dirigible dirigibles dirk dirks
dirtiness dis disablement disabuse disabused disabuses disabusing
disadvantageously disaffect disaffected disaffecting disaffection
disaffects disambiguation disappointingly disapprobation
disapprovingly disarrange disarranged disarrangement disarranges
disarranging disassemble disassembled disassembles disassembling
disassociate disassociated disassociates disassociating disastrously
disavowal disavowals disbar disbarment disbarred disbarring disbars
discernment disclaimers discoed discoing discombobulate
discombobulated discombobulates discombobulating discomfit discomfited
discomfiting discomfits discomfiture discommode discommoded
discommodes discommoding discompose discomposed discomposes
discomposing discomposure disconnectedly disconnection disconnections
discontentedly discontentment discontinuance discontinuances
discontinuation discontinuations discontinuities discontinuous
discotheque discotheques discountenance discountenanced
discountenances discountenancing discouragingly discourteously
discoverer discoverers discreditable discriminant discursive
discussant discussants disdainfully disembodied disembodies disembody
disembodying disembowel disemboweled disemboweling disembowels
disenchant disenchanted disenchanting disenchants disencumber
disencumbered disencumbering disencumbers disenfranchise
disenfranchised disenfranchisement disenfranchises disenfranchising
disengagement disengagements disentanglement disestablish
disestablished disestablishes disestablishing disfigurement
disfigurements disfranchise disfranchised disfranchisement
disfranchises disfranchising disgorge disgorged disgorges disgorging
disgracefully disgustedly disharmonious disharmony dishcloth
dishcloths dishevel dishevels dishpan dishpans dishrag dishrags
dishtowel dishtowels dishwater disinclination disincline disinclined
disinclines disinclining disinformation disinter disinterest
disinterestedly disinterests disinterment disinterred disinterring
disinters disjointedly diskette diskettes disloyally dismemberment
disobediently disoblige disobliged disobliges disobliging
disorderliness disorient disorientation disoriented disorienting
disorients disparagement disparates dispatcher dispatchers dispensable
dispirit dispirited dispiriting dispirits displayable disport
disported disporting disports dispossession disproof disproportion
disproportionately disproportions disputable disputant disputants
disputation disputations disputatious disqualification
disqualifications disquisition disquisitions disreputably
disrespectfully disrobe disrobed disrobes disrobing dissed dissemble
dissembled dissembles dissembling disses dissidence dissimulate
dissimulated dissimulates dissimulating dissimulation dissing
dissolutely dissoluteness dissonant dissuasion distaff distaffs
distastefully distemper distension distensions distention distentions
distillate distillates distinctiveness distrait distressful
distributive distrustfully disturbingly disunite disunited disunites
disuniting disunity diuretic diuretics diurnal diurnally diva divan
divans divas diversely diversification diversionary divider dividers
divination divinely diviners divisibility divisional divisively
divisiveness divot divots divvied divvies divvy divvying dizzily
doable doc docent docents docilely docility docket docketed docketing
dockets dockyard dockyards docs doctoral doctorates doctrinaire
doctrinaires doctrinal docudrama docudramas dodder doddered doddering
dodders dodger dodgers dodos doff doffed doffing doffs dogcatcher
dogcatchers dogfight dogfighting dogfights dogfish dogfishes dogfought
doggedness doggier doggies doggiest doggone doggoned doggoner doggones
doggonest doggoning doggy dogie dogies dogmatically dogmatism
dogmatist dogmatists dogtrot dogtrots dogtrotted dogtrotting doings
dollhouse dollhouses dollop dolloped dolloping dollops dolmen dolmens
dolorous dolt doltish dolts domestically domestication dominantly
domineer domineered domineering domineers donned donning doodad
doodads doodler doodlers doohickey doohickeys doomsday doorbell
doorbells doorknob doorknobs doormat doormats dories dork dorkier
dorkiest dorks dorky dorm dormancy dormer dormers dormice dormouse
dorms dory dosage dosages dossier dossiers dotage doth dotingly
dotings dotty doublet doublets doubloon doubloons doubter doubters
doubtlessly douche douched douches douching doughier doughiest
doughtier doughtiest doughty doughy dourly dovetail dovetailed
dovetailing dovetails dowager dowagers dowdily dowdiness dowel dowels
downbeat downbeats downer downers download downloaded downloading
downloads downplay downplayed downplaying downplays downscale
downscaled downscales downscaling downsize downsized downsizes
downsizing downstage downstate downswing downswings downtime
downtrodden downturn downturns downwind dowse dowsed dowses dowsing
doxologies doxology doyen doyens drably drabness drachma drachmas
draftee draftees dragnet dragnets dragoon dragooned dragooning
dragoons drainer drainers drainpipe drainpipes drake drakes dram drams
drawstring drawstrings dray drays dreadlocks dreadnought dreadnoughts
dreamily dreamland dreamless dreamlike drearily dreariness dredger
dredgers dressage dressiness dressmaking dribbler dribblers driblet
driblets drifter drifters drinkings drippings drivings drizzlier
drizzliest drizzly drolleries drollery drollness drolly dromedaries
dromedary droopier droopiest droopy droplet droplets dropper droppers
dropsy drover drovers drownings drowsily drub drubbed drubbing
drubbings drubs druid druids dryad dryads drywall drywalled drywalling
drywalls duality dubiety dubiousness ducal ducat ducats duchies duchy
duckbill duckbills ductile ductility ducting ductless dudgeon duffer
duffers duh dukedom dukedoms dulcet dulcimer dulcimers dullard
dullards dumbly dumbness dumbwaiter dumbwaiters dun dungaree dungarees
dunned dunner dunnest dunning duns duodena duodenal duodenum duos
duplicator duplicators durably dustbins duster dusters dustiness
dustless dustman duteous dutiable dwarfish dwarfism dweeb dweebs
dyadic dyer dyers dyestuff dynamism dynastic dysfunction dysfunctional
dysfunctions dyslexic dyslexics dyspepsia dyspeptic dyspeptics e
eaglet eaglets earful earfuls earldom earldoms earliness earlobe
earlobes earmuff earmuffs earphone earphones earplug earplugs
earsplitting earthen earthenware earthiness earthling earthlings
earthshaking earthward earthwards earthwork earthworks earwax earwig
earwigs easiness eastbound easterner easterners easternmost eastwards
eatable eatables eateries eaters eatery eavesdropper eavesdroppers
ebullience ebullient eccentrically ecclesiastic ecclesiastics echelon
echelons eclat eclectically eclecticism eclectics ecliptic econometric
ecru ecstatically ecumenically edelweiss edginess edgings edibility
edification edified edifies edify edifying editable editorially
educable educables educationally eerily eeriness efface effaced
effacement effaces effacing effectually effectuate effectuated
effectuates effectuating effeminacy effervesce effervesced
effervescence effervesces effervescing effete efficacious
efficaciously efficacy effluent effluents effrontery effulgence
effulgent effusion effusions effusiveness egalitarianism egalitarians
eggbeater eggbeaters egghead eggheads eggnog eggshell eggshells
eglantine eglantines egocentrics egoist egoistic egoists egotistic
egotistical egotistically egregious egregiously egress egresses egret
egrets eider eiderdown eiderdowns eiders eigenvalues elaborateness
elan elate elated elates elating elbowroom elderberries elderberry
electioneer electioneered electioneering electioneers electrification
electrocardiogram electrocardiograms electrocardiograph
electrocardiographs electrodynamics electroencephalogram
electroencephalograms electroencephalograph electroencephalographs
electrolyte electrolytes electrolytic electromagnet electromagnetism
electromagnets electroplate electroplated electroplates electroplating
elegiac elegiacs elephantine elfin elfish elide elided elides eliding
elision elisions elitists elixir elixirs ell elliptically ells
elocution elocutionist elocutionists elongation elongations elucidated
elucidates elucidating elucidation elucidations elusively elusiveness
emaciate emaciated emaciates emaciating emaciation emanation
emanations emancipator emancipators emasculate emasculated emasculates
emasculating emasculation embalmer embalmers embarkation embarkations
embarrassingly embattled embezzler embezzlers emblazon emblazoned
emblazoning emblazons emblematic embolden emboldened emboldening
emboldens embolism embolisms embroil embroiled embroiling embroils
embryologist embryologists embryology emcee emceed emceeing emcees
emend emendation emendations emended emending emends emeried emeries
emeritus emery emerying emetic emetics emigre emigres emirate emirates
emollient emollients emolument emoluments emote emoted emotes emoting
emotionalism empathetic empirically empiricism emplacement
emplacements employable employables empowerment emptily ems emu
emulsification emulsified emulsifies emulsify emulsifying emus encamp
encamped encamping encampment encampments encamps encapsulation
encapsulations encephalitis enchanter enchanters enchantingly
enchantress enchantresses enchilada enchiladas encirclement enclave
enclaves encoder encoders encouragingly encroachment encroachments
encrust encrustation encrustations encrusted encrusting encrusts
encrypt encrypts encyclical encyclicals encyclopedic endearingly
endlessness endocrine endocrines endorser endorsers endue endued
endues enduing endurable enervate enervated enervates enervating
enervation enfeeble enfeebled enfeebles enfeebling enfold enfolded
enfolding enfolds enforceable enforcer enforcers enfranchise
enfranchised enfranchisement enfranchises enfranchising engagingly
engorge engorged engorges engorging enhancer enigmatically enjoin
enjoined enjoining enjoins enlarger enlargers enlistee enlistees
enmesh enmeshed enmeshes enmeshing ennoble ennobled ennoblement
ennobles ennobling ennui enormousness enrapture enraptured enraptures
enrapturing ensconce ensconced ensconces ensconcing enshroud
enshrouded enshrouding enshrouds enslavement ensnare ensnared ensnares
ensnaring entente ententes entertainingly enthrone enthroned
enthronement enthronements enthrones enthroning enthuse enthused
enthuses enthusing enticings entitlement entitlements entomb entombed
entombing entombment entombs entomological entourage entourages
entrapment entrenchment entrenchments entrepreneur entrepreneurial
entrepreneurs entryway entryways enumerable enumerations envelopment
enviably enviousness environmentalism environmentalist
environmentalists envision envisioned envisioning envisions epee epees
epicure epicurean epicureans epicures epidemiology epidermal
epiglottis epiglottises epigram epigrammatic epigrams episcopacy
episcopal episcopate episodic epistemology epistle epistles
epistolaries epistolary epochal epoxied epoxies epoxy epoxying
equability equable equably equestrienne equestriennes equidistant
equinoctial equipage equipages equipoise equitably equivalences
equivocally equivocate equivocated equivocates equivocating
equivocation equivocations eradication erasures ere erectile erectly
erectness erg ergonomics ergs ermine ermines erogenous erosive erotica
erotically eroticism errata erratas erratum ersatz ersatzes eruditely
erudition erythrocyte erythrocytes es escalations escapee escapees
escapist escapists escarole escaroles escarpment escarpments
eschatology eschew eschewed eschewing eschews escrow escrows
escutcheon escutcheons esoterically espadrille espadrilles espied
espies esplanade esplanades espousal espouse espoused espouses
espousing espresso espressos espy espying esquire esquires essayist
essayists ester esters esthetically estimable estimator estimators
estrange estranged estranges estranging estuaries estuary eta etcher
etchers ethereally ethnically ethnicity ethnological ethnologist
ethnologists ethnology etiologies etiology etude etudes etymologist
etymologists eucalypti eucalyptus eucalyptuses eugenics eulogistic
eunuch eunuchs euphemistic euphemistically euphony euphoria euphoric
eurekas euro euros eutectic evacuee evacuees evanescent evasively
evasiveness evenhanded eventfully eventfulness eventide eventuate
eventuated eventuates eventuating everglade everglades everyplace
evildoer evildoers evilly evince evinced evinces evincing eviscerate
eviscerated eviscerates eviscerating evisceration evocation evocations
ewer ewers ex exacerbation exactingly exactitude exactness excavator
excavators exceptionable exchangeable exchequer exchequers excision
excisions excitability excitation excitedly excitingly exclamatory
exclusiveness exclusivity excoriate excoriated excoriates excoriating
excoriation excoriations excrescence excrescences excreta excretion
excretions excretories excretory excruciatingly exculpate exculpated
exculpates exculpating exec execrable execrate execrated execrates
execrating execs executrices executrix exegeses exegesis exemplar
exemplars exemplification exemplifications exes exhalation exhalations
exhaustible exhaustively exhibitionism exhibitionist exhibitionists
exhibitor exhibitors exhumation exhumations exigencies exigency
exigent exigents exiguous existentialism existentialist
existentialists exorbitance exorbitantly exorcise exorcised exorcises
exorcising exorcism exorcisms exorcist exorcists exotically
expansionist expansionists expansively expansiveness expatiate
expatiated expatiates expatiating expatriation expectantly expectorant
expectorants expectorate expectorated expectorates expectorating
expectoration expedience expediences expediently expediter expediters
expeditionary expeditious expeditiously expensively experimenter
experimenters expertness expiate expiated expiates expiating expiation
explicate explicated explicates explicating explication explications
explicitness exploitative exploiter exploiters exploratory explosively
explosiveness expo exponentiation exportation expos expository
expostulate expostulated expostulates expostulating expostulation
expostulations expressible expressionism expressionist expressionists
expressionless expressiveness expropriate expropriated expropriates
expropriating expropriation expropriations expunge expunged expunges
expunging expurgate expurgated expurgates expurgating expurgation
expurgations exquisitely extemporaneously extempore extempores
extensional extensiveness extenuate extenuated extenuates extenuating
extenuation exterminator exterminators extinguishable extirpate
extirpated extirpates extirpating extirpation extortionist
extortionists extractor extractors extramarital extraneously
extrasensory extravaganza extravaganzas extremism extrication
extrinsic extrinsically extroversion extroverted extrude extruded
extrudes extruding extrusion extrusions exuberantly exultantly eyeful
eyefuls eyeglass eyeglasses eyelet eyelets eyeliner eyeliners eyepiece
eyepieces eyestrain eyeteeth eyetooth f fa fabled fabulously
facetiously facetiousness facially facilitation facings factional
factionalism factitious factotum factotums faddish fag fagged fagging
fags fain fained fainer fainest faining fains fainthearted faintness
fairground fairgrounds fairway fairways fairyland fairylands
faithlessly faithlessness faker fakers fakir fakirs falconer falconers
fallaciously fallibility fallibly falloff falloffs fallow fallowed
fallowing fallows falseness falsifiable falteringly falterings
familial familiarly famish famished famishes famishing famously
fanatically fanaticism fanciers fancifully fancily fanciness fannies
fanny fanzine farcical farina farinaceous farmhand farmhands farmhouse
farmhouses farmyard farmyards farrow farrowed farrowing farrows
farsighted farsightedness fart farted farthing farthings farting farts
fastidiously fastidiousness fastness fastnesses fatalism fatalist
fatalists fatefully fathead fatheads fatherless fathomable fathomless
fatness fattenings fatuously fatuousness faultfinding faultily
faultiness faultlessly faun fauns fax faxed faxes faxing fealty
fearfulness fearlessness feasibly featherbedding featherweight
featherweights featureless febrile fecal feckless fecund fecundity
federally federate federated federates federating fedora fedoras
feebleness feebly feedbag feedbags feedings feelingly feistier
feistiest feisty feldspar felicities felicitous felicity fellatio
felonious femoral fems femur femurs fencer fencers fennel fens fer
feral ferociousness ferric ferrous ferrule ferrules ferryboat
ferryboats fervency fervid fervidly fest festal festals festively
fests feta fetchingly fetishism fetishist fetishistic fetishists
fetlock fetlocks fettle feudalistic fevered fey fiat fiats fibroid
fibrous fibula fibulae fiches fickleness fiddlesticks fiduciaries
fiduciary fie fief fiefs fielder fielders fieldwork fieriness fies
fife fifes figurine figurines filamentous filbert filberts filial
filibuster filibustered filibustering filibusters filigree filigreed
filigreeing filigrees filings fillers fillings fillip filliped
filliping fillips filmmaker filmmakers filmstrip filmstrips filterable
filthiness filtrate filtrated filtrates filtrating filtration finagle
finagled finagler finaglers finagles finagling finder finders fineness
finery fingerboard fingerboards fingerings finis finises finisher
finishers finitely fink finked finking finks finnier finniest finny
fireball fireballs firebomb firebombed firebombing firebombs firebrand
firebrands firebreak firebreaks firebug firebugs firefight
firefighting firefights firehouse firehouses fireplug fireplugs
firepower firestorm firestorms firetrap firetraps firewall firewalled
firewalling firewalls firewater firmament firmaments firstborn
firstborns firth firths fiscally fishbowl fishbowls fishers fishhook
fishhooks fishnet fishnets fishtail fishtailed fishtailing fishtails
fishwife fishwives fistful fistfuls fisticuffs fitfully fitly fitters
fittingly fixate fixated fixates fixating fixative fixatives fixedly
fixer fixers fixings fixity fizzier fizziest fjord fjords flab
flabbergast flabbergasted flabbergasting flabbergasts flabbiness
flaccid flack flacks flagella flagellate flagellated flagellates
flagellating flagellation flagellum flagon flagons flagstaff
flagstaffs flakiness flaks flambe flambeed flambeing flambes flamenco
flamencos flamethrower flamethrowers flamings flammability flan flange
flanges flapper flappers flashbulb flashbulbs flashers flashgun
flashguns flashily flashiness flatbed flatbeds flatboat flatboats
flatcar flatcars flatfeet flatfish flatfishes flatfoot flatfooted
flatfooting flatfoots flatiron flatirons flatteringly flattop flattops
flatulence flatulent flatware flautist flautists flax flaxen flay
flayed flaying flays fleetingly fleetness fleshlier fleshliest fleshly
flextime flibbertigibbet flibbertigibbets flightiness flimflam
flimflammed flimflamming flimflams flimsily flintier flintiest
flintlock flintlocks flinty flippancy flippantly flirtatiously floater
floaters floe floes floggings floodgate floodgates floodlit floorboard
floorboards floozies floozy flophouse flophouses floppiness floridly
florin florins flotation flotations flotsam flourier flouriest floury
flowerbed flowerbeds floweriness flowerpot flowerpots flub flubbed
flubbing flubs fluffiness fluidity fluidly flukier flukiest fluky
flume flumes flummox flummoxed flummoxes flummoxing fluoresce
fluoresced fluorescence fluoresces fluorescing fluoridate fluoridated
fluoridates fluoridating fluoridation fluoride fluorides fluorine
fluorite fluorocarbon fluorocarbons fluoroscope fluoroscopes fluttery
flyby flybys flycatcher flycatchers flyleaf flyleaves flypaper
flypapers flysheet flyspeck flyspecked flyspecking flyspecks
flyswatter flyswatters flyweight flyweights flywheel flywheels fob
fobbed fobbing fobs foetid fogbound fogginess foldaway folio folios
follicle follicles fomentation fondant fondants fondue fondues
fooleries foolery foolhardiness foolscap footballer footballers
footbridge footbridges footfall footfalls footlocker footlockers
footloose footman footmen footrest footrests footsie footsies footsore
fop fopped fopping foppish fops forager foragers forbiddingly
forcefulness forebear forebears forecaster forecasters forecastle
forecastles foreclose foreclosed forecloses foreclosing foreclosure
foreclosures forefeet forefoot forehand forehands foreknowledge
forelock forelocks foremast foremasts forename forenames forenoon
forenoons foreordain foreordained foreordaining foreordains foresail
foresails foreshorten foreshortened foreshortening foreshortens
forestation forester foresters forevermore forewoman forewomen
forfeiture forgather forgathered forgathering forgathers forgetfully
forgettable forgivable forklift forklifts forlornly formaldehyde
formalism formidably formlessly formlessness formulaic fornicate
fornicated fornicates fornicating forsooth forsythia forsythias
forthrightly forthrightness fortissimo fortnights fortuitously
forwardness foully foulness fountainhead fountainheads fourfold
fourscore foursome foursomes foursquare fourthly foxglove foxgloves
foxhole foxholes foxhound foxhounds foxtrot foxtrots foxtrotted
foxtrotting fractals fractionally fractious fractiously fragrantly
framer framers franchisee franchisees franchiser franchisers
frankincense frankness frappe frappes frat fraternally fratricide
fratricides frats fraudulence frazzle frazzled frazzles frazzling
freakier freakiest freakish freaky freebase freebased freebases
freebasing freebie freebies freebooter freebooters freedman freedmen
freehold freeholder freeholders freeholds freelanced freelancer
freelancers freelances freelancing freeload freeloaded freeloader
freeloaders freeloading freeloads freeman freemen freestanding
freestyle freestyles freethinker freethinkers freethinking freewheel
freewheeled freewheeling freewheels freewill frenetic frenetically
frenziedly fresco frescoes freshet freshets fretfulness fretwork
friable fricassee fricasseed fricasseeing fricassees fridge fridges
friendless frigidly fripperies frippery friskily friskiness
frivolously frizz frizzed frizzes frizzing frizzle frizzled frizzles
frizzling frogman frogmen frolicsome frontally frontiersman
frontiersmen frontispiece frontispieces frostily frostiness frowzier
frowziest frowzy fructified fructifies fructify fructifying fructose
fruitcake fruitcakes fruitfully fruitfulness fruitlessness frump
frumpier frumpiest frumps frumpy fryer fryers fuchsia fuchsias fuck
fucked fucker fuckers fucking fucks fuddle fuddled fuddles fuddling
fugue fugues fullback fullbacks fulminate fulminated fulminates
fulminating fulmination fulminations fulsome fumbler fumblers
fumigator fumigators functionaries functionary funereal funereally
fungal fungals fungicidal fungous funicular funiculars funk funked
funkier funkiest funking funks funky funniness furbelow furbish
furbished furbishes furbishing furriers furtherance furthermost furze
fusible fusillade fusillades fusions fussbudget fussbudgets fussily
fussiness fustian fustier fustiest fusty futilely futon futons
futuristics futurities futurity futz futzed futzes futzing fuzzily
fuzziness g gabardine gabardines gabbier gabbiest gabble gabbled
gabbles gabbling gabby gad gadabout gadabouts gadded gadding gadflies
gadfly gadgetry gads gaff gaffe gaffed gaffes gaffing gaffs gaggle
gaggles gainfully gainsaid gainsay gainsaying gainsays gaiter gaiters
galena gallantly gallbladder gallbladders galleon galleons gallium
gallstone gallstones galosh galoshed galoshes galoshing galvanic
galvanometer galvanometers gambol gambols gamecock gamecocks
gamekeeper gamekeepers gamely gameness gamesmanship gamete gametes
gamier gamiest gamin gamine gamines gamins gammas gamy gangland
ganglia ganglion gangrenous gannet gannets gantlet gantlets gantries
gantry gaoled gaoling gaols gapings garbageman garbanzo garbanzos
gargantuan garishly garishness garlicky garner garnered garnering
garners garnishee garnisheed garnisheeing garnishees garrote garroted
garrotes garroting garrulity garrulously garrulousness gaslight
gaslights gasohol gassier gassiest gassy gastritis gastrointestinal
gastronomic gastronomical gastronomy gasworks gatecrasher gatecrashers
gatepost gateposts gatherer gatherers gauche gaucher gauchest gaucho
gauchos gaudily gaudiness gauntness gauzier gauziest gauzy gavotte
gavottes gawkily gawkiness gayness gazebo gazebos gazer gazers
gazetteer gazetteered gazetteering gazetteers gazillion gazillions
gazpacho gearbox gearboxes gearshift gearshifts gearwheel gearwheels
gecko geckos geek geekier geekiest geeks geeky geezer geezers geisha
gelatinous gelid gelled gelling gels gemstone gemstones gendarme
gendarmes genealogist genealogists generalissimo generalissimos
generalities generative generically geniality genitalia genitive
genitives genome genomes genteel genteeler genteelest gentian gentians
gentlefolk gentlemanly gentlewoman gentlewomen gentrification
gentrified gentrifies gentrify gentrifying genuflect genuflected
genuflecting genuflection genuflections genuflects geocentric geode
geodes geodesic geodesics geographer geographers geologic geologically
geometer geometrical geometrically geophysical geophysics geopolitical
geopolitics geostationary geothermal geriatric geriatrics germane
germanium germicidal germinal gerontologist gerontologists gerontology
gerrymander gerrymandered gerrymandering gerrymanders gerund gerunds
gestate gestated gestates gestating gesticulation gesticulations
gesundheit getup gewgaw gewgaws ghastliness gherkin gherkins
ghostliness ghostwrite ghostwriter ghostwriters ghostwrites
ghostwriting ghostwritten ghostwrote ghoulish giantess giantesses
gibbet gibbeted gibbeting gibbets gibbon gibbons giblet giblets
giddily gigabyte gigabytes giggler gigglers gigglier giggliest giggly
gigolo gigolos gimcrack gimcracks gimlet gimleted gimleting gimlets
gimmickry gimmicky gimpier gimpiest gimpy gingersnap gingersnaps
gingivitis ginkgo ginkgoes ginseng gird girded girding girds girlishly
girt girted girting girts giveaway giveaways glacially gladiatorial
gladiola gladiolas gladioli gladiolus gladness glamorously glaringly
glassful glassfuls glaucoma glazier glaziers gleamings gleeful
gleefully glibness glimmerings glissandi glissando glitch glitches
glitterings glittery glitz glitzier glitziest glitzy gloaming
gloamings glob globed globetrotter globetrotters globing globs
glockenspiel glockenspiels gloomily gloominess glop glopped glopping
glops glossiness glottis glottises glowingly glowworm glowworms gluey
gluier gluiest glumly glumness gluten glutinous gluttonous
gluttonously glycerol glycogen glyph gnarlier gnarliest gnarly gneiss
gnomish goalpost goalposts goaltender goaltenders goatherd goatherds
goatskin goatskins gobbledygook gobbler gobblers goddamn goddaughter
goddaughters godforsaken godhood godliness godson godsons gofer gofers
goggled goggling goings goldbrick goldbricked goldbricking goldbricks
goldenrod goldfinch goldfinches gollies golly gonad gonads gondolier
gondoliers goober goobers goodbyes goodlier goodliest goodly gook
gooks goop gooseberries gooseberry gorgeously goriness gorse goshes
gossipy gotta gouger gougers gourmand gourmands goutier goutiest gouty
governable governance governorship govs gracefulness gracelessly
gracelessness grackle grackles grad graders grads grafter grafters
grail grainier grainiest grainy grammarian grammarians gramme grammes
granaries granary granddad granddads grandee grandees grandiloquence
grandiloquent grandma grandmas grandness grandpa grandpas grange
granges granularity granulate granulated granulates granulating
granulation graphologist graphologists graphology grapnel grapnels
grassland gratefulness gratis gravelly graybeard graybeards grayish
grayness greasepaint greasiness grebe grebes greengrocer greengrocers
greenish greenness greensward gregariously gregariousness grenadier
grenadiers griddlecake griddlecakes gridlock gridlocked gridlocking
gridlocks grievously griffin griffins grimness gringo gringos grippe
grippes grist gristlier gristliest gristly grog groggily grogginess
grommet grommets grosbeak grosbeaks grossness grotesquely grouchiness
groundbreaking groundbreakings grounder grounders groundhog groundhogs
groundings groundlessly groundswell groundswells groupie groupies
grout grouted grouting grouts grownup grownups grubbiness grubstake
grudgingly grudgings gruesomely gruffness grumbler grumblers grumpily
grumpiness grunge grungier grungiest grungy gs guacamole guano
guarantied guaranties guaranty guarantying guardedly guardhouse
guardhouses guardianship guardrail guardrails guardroom guardrooms
guardsman guardsmen guava guavas guesser guessers guesstimate
guesstimated guesstimates guesstimating guff guilder guilders guileful
guileless guiltiness guineas guitarists gulag gulags gullibility gumbo
gumbos gunboat gunboats gunfight gunfighting gunfights gunfought gunk
gunnery gunny gunnysack gunnysacks gunpoint gunrunner gunrunners
gunrunning gunslinger gunslingers gunsmith gunsmiths gunwale gunwales
gurney gurneys gushier gushiest gushy gusset gusseted gusseting
gussets gustatory gusto gutless gutsier gutsiest gutsy guttersnipe
guttural gutturals guzzler guzzlers gymnastic gymnosperm gymnosperms
gyp gypped gypping gyps gypsies gypsum gypsy gyro gyros h haberdasher
haberdasheries haberdashers haberdashery habitability habituate
habituated habituates habituating habituation habitue habitues
hacienda haciendas hackle hackles hafnium haft hafts haggler hagglers
haiku hailstorm hailstorms hairbreadth hairbreadths hairbrush
hairbrushes hairdressing hairiness hairless hairnet hairnets hairpiece
hairpieces hairpin hairpins hairsbreadth hairsbreadths hairsplitting
hairspring hairsprings hairstyle hairstyles hairstylist hairstylists
hake hakes halberd halberds halcyon halfback halfbacks halfhearted
halfheartedly halfheartedness halfpence halfpennies halfpenny halftime
halftimes halitosis hallow hallowed hallowing hallows hallucinate
hallucinated hallucinates hallucinating hallucinatory hallucinogen
hallucinogenic hallucinogenics hallucinogens halogen halogens
haltingly haltings halyard halyards hammerhead hammerheads hammerings
handball handballs handbill handbills handcar handcars handcart
handcarts handcraft handcrafted handcrafting handcrafts handgun
handguns handicapper handicappers handily handiness handmaid
handmaiden handmaidens handmaids handpick handpicked handpicking
handpicks handset handsets handshakes handshaking handsomely
handsomeness handspring handsprings handstand handstands handwork
handwritten handyman handymen hangdog hangman hangmen hangnail
hangnails hank hankerings hankie hankies hanks hansom hansoms
haphazardly happenstance happenstances harbinger harbingers hardbacks
hardball hardcover hardcovers hardener hardeners hardheaded
hardheadedly hardheadedness hardhearted hardheartedly hardheartedness
hardily hardiness hardness hardtack hardtop hardtops harelip harelips
harlequin harlequins harmfulness harmlessness harmonically harmonics
harmoniously harmoniousness harpies harpy harridan harridans hashish
hasp hasps hassock hassocks hastiness hatchback hatchbacks hatcheries
hatchery hatchway hatchways hatefulness hater haters hath hatter
hatters hauler haulers haunch haunched haunches haunching hauntingly
hauteur haversack haversacks haw hawed hawing hawker hawkers hawkish
haws hawser hawsers hawthorn hawthorns haycock haycocks hayloft
haylofts haymow haymows hayseed hayseeds hazelnut hazelnuts hazily
haziness hazings headband headbands headboard headboards headdress
headdresses headgear headhunter headhunters headless headlock
headlocks headmasters headmistress headmistresses headset headsets
headwaiter headwaiters headwaters headwind headwinds headword
headwords healthfully healthfulness healthily healthiness hearer
hearers hearken hearkened hearkening hearkens heartiness heartland
heartlands heartlessly heartlessness heartrending heartsick
heartstrings heartthrob heartthrobs heartwarming heathenish heaths
heatstroke heavenward heavenwards heavyset heck hectare hectares
hectically hector hectored hectoring hectors hedgerow hedgerows
hedonism hedonist hedonistic hedonists heedful heedlessly heedlessness
heehaw heehawed heehawing heehaws heft hefted hefting hefts hegemony
heinously heinousness heiress heiresses heist heisted heisting heists
helical helices heliotrope heliotropes helix hellebore hellhole
hellholes hellion hellions hellishly helmsman helmsmen helot helots
helpfulness helplessness helpmate helpmates hemispheric hemispherical
hemline hemlines hemorrhoid hemorrhoids hempen hemstitch hemstitched
hemstitches hemstitching henceforward henna hennaed hennaing hennas
henpeck henpecked henpecking henpecks hep hepatic hepatics hepper
heppest heps heptagon heptagons heraldic heraldry herbaceous herbage
herbal herbalist herbalists herbicide herbicides herbivore herbivores
herculean herdsman herdsmen hereabout hereof hereto heretofore
hereupon hermaphrodites hermaphroditic hermetic hermetically hermetics
hermitage hermitages heroically heroics herringbone herringboned
herringbones herringboning hertz hesitantly hesitatingly heterodox
heterodoxy heterogeneity heuristics hewer hewers hex hexameter
hexameters hexed hexes hexing hibachi hibachis hibiscus hibiscuses
hickey hickeys hidebound hideousness hideout hideouts hie hied hieing
hierarchically hies highball highballs highborn highboy highboys
highchair highchairs highfalutin highlighter highlighters highness
hightail hightailed hightailing hightails highwayman highwaymen
hijacker hijackers hijackings hilariously hillock hillocks hilltop
hilltops hindmost hindquarter hindquarters hippo hippos hireling
hirsute histamine histamines histograms histrionic histrionics hitter
hitters hoarfrost hoarier hoariest hoariness hoarsely hoary hoaxer
hoaxers hob hobbyist hobbyists hobnail hobnailed hobnailing hobnails
hobs hockshop hockshops hod hods hoedown hoedowns hogan hogans hoggish
hogshead hogsheads hogwash hokey hokier hokiest hokum holdings holdout
holdouts holdover holdovers holistic hollowly hollowness hollyhock
hollyhocks hologram holograms holograph holographic holographs
holography homburg homburgs homebodies homebody homeboy homeboys
homecoming homecomings homegrown homelessness homeliness homemaker
homemakers homeowner homeowners homepage homepages homer homered
homering homeroom homerooms homers homesteader homesteaders
homestretch homestretches hometown hometowns homeyness homilies homily
hominy homogeneity homogeneously homograph homographs homophobia
homophone homophones honcho honchos honeybee honeybees honeydew
honeydews honeymooner honeymooners honorarium honorariums honorific
honorifics hooch hoodoo hoodooed hoodooing hoodoos hooey hookah
hookahs hooker hookers hookier hookiest hookup hookups hookworm
hookworms hooky hooligan hooliganism hooligans hoopla hooters
hopefulness hopelessness hoppers hormonal hornless hornpipe hornpipes
horology horridly horseflies horsefly horsehair horsehide horsemanship
horsemen horsetail horsetails horsewhip horsewhipped horsewhipping
horsewhips horsewoman horsewomen horsey horsier horsiest
horticulturist horticulturists hos hosanna hosannas hospice hospices
hospitably hosteler hostelers hostelries hostelry hostilely
hostilities hostler hostlers hotcake hotcakes hotelier hoteliers
hotheadedly hotheadedness hothouse hothoused hothouses hothousing
hotness hotshot hotshots housebound housebreak housebreaking
housebreaks housebroke housebroken houseclean housecleaned
housecleaning housecleans housecoat housecoats houseflies housefly
householder householders househusband househusbands housekeeping
housemaid housemaids housemother housemothers houseplant houseplants
housetop housetops housewares hovercraft howdah howdahs howdied
howdies howdy howdying howitzer howitzers howler howlers howsoever
hubbies hubby hubcap hubcaps hubris huckleberries huckleberry huckster
huckstered huckstering hucksters huffily hugeness huhs hula hulaed
hulaing hulas humaneness humanistic humanists humanitarianism
humankind humanness humanoid humanoids humbleness humblings humbugged
humbugging humbugs humdinger humdingers humeri humerus humidifier
humidifiers humidor humidors hummock hummocked hummocking hummocks
humongous humpback humpbacked humpbacks humus hunchbacked hundredfold
hundredfolds hundredweight hundredweights hungover hunker hunkered
hunkering hunkers huntress huntresses huntsman huntsmen hurdler
hurdlers hurler hurlers husbandry husker huskers hussar hussars
hussies hussy hustings hydra hydrangea hydrangeas hydras hydrate
hydrated hydrates hydrating hydraulically hydrocarbon hydrocarbons
hydroelectricity hydrofoil hydrofoils hydrogenate hydrogenated
hydrogenates hydrogenating hydrology hydrolysis hydrometer hydrometers
hydrophobia hydroponic hydroponics hydrosphere hydrotherapy
hygienically hygienist hygienists hygrometer hygrometers hying hymen
hymens hype hyped hyper hyperactive hyperactives hyperactivity
hyperbola hyperbolas hyperbolic hypercritical hypercritically
hypermarket hypersensitive hypersensitivities hypersensitivity
hyperspace hypertext hyperventilate hyperventilated hyperventilates
hyperventilating hyperventilation hypes hyphenations hyping hypnoses
hypnotically hypo hypoallergenic hypocritically hypodermic hypodermics
hypoglycemia hypoglycemic hypoglycemics hypos hypothalami hypothalamus
hypothermia hypothetically hysterectomies hysterectomy hysteresis i
iamb iambic iambics iambs ibex ibexes ibis ibises ibuprofen icebound
icebox iceboxes icecap icecaps icily iciness ickier ickiest icky
iconoclast iconoclastic iconoclasts idealism idealistically ideogram
ideograms ideograph ideographs ideologist ideologists ides
idiomatically idiotically idleness idlers idolater idolaters
idolatrous idolatry ids idyll idylls iffier iffiest iffy igneous
ignoble ignobly ignominies ignominious ignominiously ignominy
ignoramus ignoramuses ignorantly ilks illegalities illegality
illegibility illegitimacy illegitimately illiberal illicitly
illicitness illogically illumine illumined illumines illumining
illusive imaginably imaginatively imam imams imbalanced imbecilic
imbecilities imbecility imbibe imbibed imbibes imbibing imbroglio
imbroglios imbue imbued imbues imbuing immaculateness immanence
immanent immaturely immediacy immemorial imminence immobility
immoderate immoderately immodest immodestly immodesty immolate
immolated immolates immolating immolation immorally immortally
immovably immunology immure immured immures immuring immutability
immutable immutably impala impalas impalement impalpable impanel
impanels impassively impassivity impeachment impeachments
impeccability impeccably impecunious impecuniousness impedimenta
impenetrability impenetrably impenitence impenitent impenitents
imperatively imperialistic imperialists imperially imperious
imperiously imperiousness imperishable imperishables impermanence
impermanent impermeable impermissible impersonator impersonators
impertinently imperturbability imperturbable imperturbably impetigo
impetuosity impieties impiety impingement impious impiously impishly
impishness implacability implacably implantation implausibilities
implausibility implausibly implode imploded implodes imploding
implosion implosions impolitely impoliteness impolitenesses impolitic
imponderable imponderables importer importers importunate importunated
importunates importunating importune importuned importunes importuning
importunity imposingly imposture impostures impotently impoverishment
impracticable impracticably impracticality imprecation imprecations
imprecisely imprecision impregnability impregnably impregnation
impresario impresarios impressionism impressionist impressionistic
impressionists impressiveness imprimatur imprimaturs improvable
improvidence improvident improvidently imprudence imprudent impudently
impugn impugned impugning impugns impulsion impulsiveness impurely
imputation imputations impute imputed imputes imputing inaccessibility
inaccurately inadvertence inamorata inamoratas inanely inanities
inanity inappropriately inapt inarticulately inattention inattentive
inaudibly inboard inboards inbound inbounded inbounding inbounds
incalculably incapability incautious inchoate inchoated inchoates
inchoating incineration incipient incise incised incises incising
incisively incisiveness incivilities incivility inclemency inclement
inclusively incombustible incommensurate incommunicado incomparably
incompetently incompletely incompleteness incomprehensibly
inconceivably inconclusively incongruously inconsequentially
inconsiderately inconsiderateness inconsistently inconspicuously
inconspicuousness inconstancy inconstant incontestable incontestably
incontinence incontinent incontrovertible incontrovertibly incorporeal
incorrectness incorrigibility incorrigibly incorruptibility
incorruptible incorruptibles increasings incredibility incredulously
incrimination incriminatory incrustation incrustations incubus
incubuses inculcate inculcated inculcates inculcating inculcation
inculpate inculpated inculpates inculpating incumbencies incumbency
incurably incurious incursion incursions indebtedness indecently
indecipherable indecisively indecisiveness indecorous indefatigable
indefatigably indefensibly indefinably indelicacies indelicacy
indelicately indemnification indemnifications indemnified indemnifies
indemnify indemnifying indemnities indemnity indenture indentured
indentures indenturing indescribably indestructibly indeterminable
indeterminacy indeterminately indictable indifferently indigence
indigent indigents indirectness indiscernible indiscreetly
indispensably indisposition indispositions indisputably indissoluble
indistinctly indistinctness individualistic indivisibility indivisibly
indolently indomitably indubitable indubitably inductance inductee
inductees inductive indulgently industrialism industrially
industriously industriousness inebriate inebriated inebriates
inebriating inebriation ineducable ineffable ineffably ineffectively
ineffectiveness ineffectually inelastic inelegance inelegantly
ineligibility ineluctable ineluctably ineptly ineptness inequitable
inequities inequity inertly inertness inescapably inessential
inessentials inestimable inestimably inevitability inexcusably
inexhaustibly inexpedient inexpensively inexpert inexperts
inexpressible inextinguishable inextricable infallibility infallibly
infamously infanticide infanticides infantryman infantrymen infarction
infatuate infatuated infatuates infatuating infectiously
infectiousness infelicitous inferential infernal infertility infielder
infielders infighting infiltrator infiltrators infinitesimally
infinitude inflatables inflect inflected inflecting inflectional
inflects inflexibility inflexibly inflexion infliction inflorescence
inflow influentially infomercial infomercials infotainment
infrastructures infrequency infuriatingly ingenue ingenues ingenuous
ingenuously ingenuousness ingestion inglorious ingot ingots ingrate
ingrates ingratiatingly ingress ingresses ingrown inhabitable inhalant
inhalants inhalation inhalations inhalator inhalators inhere inhered
inheres inhering inheritor inheritors inhumanely inhumanly inimical
inimically inimitable inimitably iniquities iniquitous iniquity
injector injectors injudicious inkblot inkblots inkiness inkwell
inkwells inmost innately innocuously innovate innovated innovates
innovating innovator innovators inoffensively inoperable inordinately
inorganic inpatient inpatients inquietude inquirer inquirers
inquiringly inquisitively inquisitiveness inquisitor inquisitors
inroad inroads insatiably inscrutably inseam inseams insectivore
insectivores insectivorous insecurely inseminate inseminated
inseminates inseminating insemination insensate insensibility
insensible insensibly insensitively insentience insentient
inseparability inseparably inset insets insetting inshore insidiously
insidiousness insightful insignificantly insistently insole insolently
insoles insolubility insolvable insomniac insomniacs insouciance
insouciant inspirational instigator instigators instinctively
instructional instructively instrumentalist instrumentalists
instrumentality instrumentation insufferably insufficiency insularity
insuperable insupportable insureds insurgence insurgences insurgencies
insurgency insurrectionist insurrectionists intaglio intaglios
intangibly integrator integument integuments intellectualism
intelligentsia intelligibility intemperance intemperate intendeds
intensification intensifier intensifiers intensively intently
intentness interbred interbreed interbreeding interbreeds interceptor
interceptors intercession intercessions intercessor intercessors
interchangeably intercollegiate interconnected interconnecting
interconnection interconnections interconnects interdenominational
interdepartmental interdict interdicted interdicting interdiction
interdicts interdisciplinary interfaith interferon intergalactic
interlace interlaced interlaces interlacing interlard interlarded
interlarding interlards interleave interleaved interleaves
interleaving interleukin interlink interlinked interlinking interlinks
interlocutory intermezzi intermezzo intermezzos internationalism
internecine internee internees internist internists internment
internship internships interoffice interpersonal interpolate
interpolated interpolates interpolating interpolations interposition
interpretative interpretive interrelate interrelated interrelates
interrelating interrelation interrelations interrelationship
interrelationships interrogative interrogatives interrogatories
interrogatory interscholastic interstice interstices interurban
interviewee interviewees intestate intone intoned intones intoning
intoxicant intoxicants intractability intransigence intransigent
intransigents intransitively intravenously intrepidly intricately
intriguingly intros introspection introversion introverted intuit
intuited intuiting intuits inure inured inures inuring invalidation
invalidity invasive inveigh inveighed inveighing inveighs inveigle
inveigled inveigles inveigling inventiveness investigative investiture
investitures invidious invidiously invigoration invincibility
invincibly inviolability inviolable inviolate invitational
invitationals invitingly invulnerability invulnerably ionosphere
ionospheres ipecac ipecacs irascibility irately irateness iridium
irksome ironclad ironclads ironical ironware ironwork irradiation
irrationality irrecoverable irredeemable irredeemables irregardless
irregularly irrelevancies irrelevancy irrelevantly irreligious
irremediable irremediably irreparably irresistibly irresolute
irresolutely irresolution irresponsibly irreverently irreversibly
irritatingly irruption irruptions isinglass islet islets ism isms
isobar isobars isolationism isolationist isolationists isometric
isometrics isomorphic isosceles isotope isotopes isotopic isotropic
issuance itchiness iterated iterates iterating iterator iterators j
jabberer jabberers jabot jabots jackboot jackboots jackdaws jackhammer
jackhammered jackhammering jackhammers jackrabbit jackrabbits jag
jaggedly jaggedness jags jailbreak jailbreaks jalapeno jalapenos
jalousie jalousies janitorial japan japanned japanning japans jape
japed japes japing jardiniere jardinieres jasmine jasmines jasper
jauntiness jawbreaker jawbreakers jazzier jazziest jazzy jeep jeeps
jeeringly jeez jejune jellybean jellybeans jeremiad jeremiads jerkily
jerkin jerkins jerkwater jetsam jib jibbed jibbing jibs jigger
jiggered jiggering jiggers jihad jihads jimmied jimmies jimmy jimmying
jingoism jingoist jingoistic jingoists jinn jinns jinrikisha
jinrikishas jitney jitneys jitterbug jitterbugged jitterbugging
jitterbugs jive jived jives jiving jobber jobbers jobless joblessness
jock jocked jocking jocks jockstrap jockstraps jocose jocosely
jocosity jocularity jocularly jocund jocundity jocundly jodhpurs
joggle joggled joggles joggling john johns joiner joiners joist joists
jokingly jolliness jollity jonquil jonquils josh joshed joshes joshing
jottings joule joules jounce jounced jounces jouncing journalese
journalistic journeyman journeymen joust jousted jousting jousts
joviality jowl jowls joyfulness joyless joyousness joyridden joyride
joyrider joyriders joyrides joyriding joyrode joysticks jubilantly
judgeship judicature judiciousness juggernauts juicer juicers
juiciness jujitsu jujube jujubes jukebox jukeboxes julep juleps
julienne julienned juliennes julienning jumpiness jumpsuit jumpsuits
junco juncos junker junkers junkyard junkyards juridical
jurisdictional jurisprudence jurist jurists justness k kHz kW kabob
kabobs kaboom kale kaleidoscopic kamikaze kamikazes kaolin kapok kaput
karakul karaoke karaokes karma katydid katydids kazoo kazoos kc kebab
kebabs keenness kenned kenning kens keratin kestrel kestrels ketch
ketches kettledrum kettledrums keyboarder keyboarders keypunch
keypunched keypunches keypunching keystroked keystroking khan khans
kibbutz kibbutzim kibitz kibitzed kibitzer kibitzers kibitzes
kibitzing kibosh kicker kickers kickier kickiest kickstand kickstands
kicky kidder kidders kiddie kiddied kiddies kiddo kiddos kiddying
kidnappings kielbasa kielbasas killdeer killdeers killjoy killjoys
kilocycle kilocycles kilohertz kiloton kilotons kilter kindergartner
kindergartners kindhearted kindliness kinematic kinematics kinetic
kinetics kinfolks kinglier kingliest kingly kingpin kingpins kingship
kinsman kinsmen kinswoman kinswomen kippered kippering kippers kirk
kirked kirking kirks kismet kisser kissers kitchenware kith kithed
kithing kiths kitsch kitschy kittenish kleptomania kleptomaniac
kleptomaniacs klutz klutzes klutzier klutziest klutzy knackwurst
knackwursts knave knavery knaves knavish kneader kneaders knell
knelled knelling knells knickknack knickknacks knightly knitter
knitters knitwear knobbier knobbiest knobby knothole knotholes
knowable knowledgeably knucklehead knuckleheads kohlrabi kohlrabies
kook kookaburra kookaburras kooked kookier kookiest kookiness kooking
kooks kooky kopeck kopecks krona krone kroner kronor kronur krypton ks
kudzu kudzus kumquat kumquats l la labia labial labials labium
laburnum laburnums labyrinthine lachrymal lachrymose lackadaisical
lackadaisically lackey lackeys laconic laconically lactate lactated
lactates lactating lactation lactic lactose lacuna lacunae laddie
laddies ladings ladybird ladybirds ladyfinger ladyfingers ladyship
lagers lagniappe lagniappes laity lam lama lamas lamaseries lamasery
lambaste lambasted lambastes lambasting lambent lambkin lambkins
lambskin lambskins lamebrain lamebrains lamely lameness lamentably
laminate laminated laminates laminating lamination lammed lammer
lamming lampblack lamppost lampposts lamprey lampreys lampshade
lampshades lams lancer lancers lancet lancets landfall landfalls
landfill landfills landholder landholders landlubber landlubbers
landmass landmasses landscaper landscapers landward landwards
languidly languorously lank lanker lankest lankiness lanolin lanyard
lanyards lapidaries lapidary laptop laptops lapwing lapwings larboard
larboards larcenous larch larches larder larders largeness largess
largo largos lariat lariated lariating lariats larkspur larkspurs
larval lasagna lasagnas lasciviously lasciviousness lassie lassies
lassitude lasso lassoed lassoing lassos lastingly latecomer latecomers
latency lateness laterally latitudinal lats latterly latticed
latticework latticeworks laudably laudanum laudatory laughably
laughingly launderer launderers laundress laundresses laundryman
laundrymen lavishly lavishness lawbreaker lawbreakers lawfully
lawfulness lawgiver lawgivers lawlessly lawlessness lawrencium laxly
laxness layaway layette layettes layoff layoffs layover layovers
laypeople layperson laywoman laywomen laze lazed lazes lazily lazing
lazybones lea leached leaches leaching leafless leakier leakiest
leanings leanness learner learners leas leasehold leaseholder
leaseholders leaseholds leastwise leatherneck leathernecks leathers
leaven leavened leavening leavens leavings lecher lecherous
lecherously lechers lechery lecithin lees leeward leewards lefties
leftism leftist leftists leftover leftovers leftwards lefty legalese
legalism legalisms legate legated legatee legatees legates legating
legation legations legato legatos legerdemain leggier leggiest leggy
legionnaire legionnaires legit legless legman legmen legroom legrooms
leguminous legwork lei leis leitmotif leitmotifs lemma lemmas lemming
lemmings lemony lemur lemurs lender lenders lengthily leniently
leonine leprechaun leprechauns leprous lesbianism lessee lessees
lessor lessors lethally lethargically letterbox leukocyte leukocytes
levelheaded levelheadedness levelness leviathan leviathans levitate
levitated levitates levitating levitation lewdly lewdness
lexicographer lexicographers lexicography liaise liaised liaises
liaising lib libation libations libbed libbing liberality liberator
liberators libertarians libertine libertines libidinous libido libidos
librettist librettists librettos libs licensee licensees licentiate
licentiates licentious licentiously licentiousness licit lickings
lidded lief liefer liefest liefs liege lieges lien liens lieutenancy
lifeblood lifer lifers lifesaver lifesavers lifesaving lifespans
lifework lifeworks liftoff liftoffs ligatured ligaturing lightheaded
lighthearted lightheartedly lightheartedness lignite likableness
limbless limboed limboing limbos limeade limeades limier limiest
limitings limn limned limning limns limo limos limpet limpets limpid
limpidity limpidly limply limpness limy linage linden lindens lineal
lineally lineament lineaments linebacker linebackers lineman linemen
linens linesman linesmen lineup lineups lingerer lingerers lingeringly
lingerings lingual linkages linkup linkups linnet linnets linseed
lintel lintels lionhearted lipid lipids liposuction lipread lipreading
lipreads liquefaction liquidator liquidators liquidity lira lire lisle
lissome listlessly listlessness litchi litchis lite literati lites
lithograph lithographed lithographer lithographers lithographic
lithographing lithographs lithography lithosphere lithospheres
litigant litigants litigate litigated litigates litigating litigious
litigiousness litmus littleness littoral littorals livability livelong
livelongs liveried liveries liverwurst livery lividly llano llanos lo
loaders loamier loamiest loamy loaner loaners loanword loanwords
loathsomeness lobotomies loch lochs loci lockable lockjaw lockout
lockouts lockstep lockup lockups loco locoweed locoweeds locus
locution locutions lode lodes lodestar lodestars lodestone lodestones
loftily loganberries loganberry logarithms logbook logbooks loge loges
loggerhead loggerheads loggers logicians logistic logistical
logistically logistics logjam logjams logos logotype logotypes
logrolling loner loners longboat longboats longhair longhairs longhorn
longhorns longingly longitudinally longtime loofah lookalike
lookalikes loopier loopiest loopy looseness looter looters lopsidedly
lopsidedness loquacious loquacity lordlier lordliest lordly lordship
lordships lorgnette lorgnettes lorn lotto loudmouth loudmouthed
loudmouths lousiness lout loutish louts lovebird lovebirds loveless
lovelorn lovemaking lovesick lowbrow lowbrows lowercase lowish lowland
lowlands lowliness lowness lox loxed loxes loxing loyalist loyalists
loyally ls luau luaus lubber lubbers lube lubed lubes lubing
lubricator lubricators lucidity lucidly lucidness luckiness luckless
lucratively lucre ludicrousness lugubrious lugubriously lugubriousness
lumbago lumbar lumberman lumbermen lumberyard lumberyards luminescence
luminescent luminosity luminously lummox lummoxes lumpiness lumpish
lunchbox lunchboxes luncheonette luncheonettes lunchroom lunchrooms
lunchtimes lupus luridly luridness lusciously lusciousness lushness
lustful lustfully lustily lustiness luxuriance luxuriantly luxuriously
luxuriousness lyceum lyceums lymphoma lymphomas lynchings lynx lynxes
lyrically lyricist lyricists macadam macaroon macaroons macaw macaws
macerate macerated macerates macerating maceration machination
machinations machismo mackinaw mackinaws mackintosh mackintoshes
macrame macro macrobiotic macrobiotics macrocosm macrocosms macron
macrons macros maddeningly madders mademoiselle mademoiselles madras
madrases madrigal madrigals madwoman madwomen maestro maestros
magisterial magisterially magma magnesia magnetically magneto magnetos
magnetosphere magnification magnifications magnificently magnifier
magnifiers magnums maharajah maharajahs maharani maharanis maharishi
maharishis mahatma mahatmas maidenhair maidenhead maidenheads
maidenhood maidenly maidservant maidservants mailer mailers mailings
mainlined mainlines mainlining mainmast mainmasts mainsail mainsails
mainspring mainsprings mainstreamed mainstreaming mainstreams
majorette majorettes majorly makings maladjustment maladroit malaise
malapropism malapropisms malarial malarkey malcontent malcontents
malediction maledictions malefactor malefactors maleness malevolently
malfeasance malformation malformations malfunctioned malfunctioning
malfunctions malignantly malignity malinger malingered malingerer
malingerers malingering malingers malleability mallow mallows
malnourished malodorous malteds maltreatment mambo mamboed mamboing
mambos mammalians mammary mammogram mammograms mammography mammon
manageability manatee manatees mandarin mandarins mandrake mandrakes
mandrill mandrills manege maneged maneges maneging manful manfully
manganese manhunt manhunts manics manikin manikins manipulative
manipulator manipulators manna mannered mannerly mannishly mannishness
manorial manque mansard mansards manse manservant manses mantilla
mantillas mantis mantises mantissa mantra mantras manumit manumits
manumitted manumitting marabou marabous maraca maracas marathoner
marathoners maraud marauded marauder marauders marauding marauds
marchers marchioness marchionesses margarita margaritas marginalia
marginals maria mariachi mariachis marimba marimbas marinade marinaded
marinades marinading marjoram markdown markdowns marketability
marketer marketers marksmanship markup markups marlin marlins marmoset
marmosets marmot marmots marquess marquesses marquetry marquis
marquise marquises marriageable marrieds marten martens martinet
martinets martini martinis martins marzipan masculinity masher mashers
masochism masochistic masonic masque masquerader masqueraders masques
masseur masseurs masseuse masseuses massiveness mastectomies
mastectomy masterfully masterstroke masterstrokes masterwork
masterworks masthead mastheads mastication mastiff mastiffs mastodon
mastodons mastoid mastoids masturbate masturbated masturbates
masturbating matchbox matchboxes matchmaking matchstick matchsticks
materialistically materially materiel maternally matins matriarchies
matriarchy matricide matricides mattock mattocks maturation maturely
matzo matzoh matzohs matzos matzot matzoth maunder maundered
maundering maunders maven mavens maw mawed mawing mawkish mawkishly
maws maxed maxes maxilla maxillae maxillary maximally maximals maxing
mayday maydays mayflies mayflower mayflowers mayfly mayo mayoral
mayoralty maypole maypoles mead meadowlark meadowlarks meagerly
meagerness mealtime mealtimes meaningfully meanly meanness measurably
measureless meatball meatballs meatier meatiest meatloaf meatloaves
meaty mecca meccas mechanistic medial medians medic medicinally medics
meditative meditatively medulla medullas meetinghouse meetinghouses
meg megacycle megacycles megahertz megalith megaliths megalomania
megalomaniacs megalopolis megalopolises megs melancholia melancholic
melancholics melange melanges melanin melanoma melanomas meld melded
melding melds melee melees mellifluous mellifluously mellowness
melodically melodiously melodiousness melodramatically meltdown
meltdowns membranous memorabilia menacingly menage menages mendacious
mendacity mender menders mendicant mendicants menfolk menhaden
menially meningitis menopausal menorah menorahs menservants menses
menswear mentholated merchantman merchantmen mercurial mercuric
meretricious merganser mergansers merino merinos meritocracies
meritocracy meritorious meritoriously merman mermen merriness
merrymaker merrymakers merrymaking mesa mesas mescal mescaline mescals
mesdemoiselles mesmerism mesquite mesquites messiah messiahs messieurs
messily messiness mestizo mestizos metacarpal metacarpals metacarpi
metacarpus metallurgical metallurgist metallurgists metamorphic
metamorphism metamorphosed metamorphosing metastases metastasis
metatarsal metatarsals meteoroid meteoroids meteorological methadone
methane methanol methinks methodically methodological methodologies
methought meticulously meticulousness metier metiers metrical
metrically metrication metrics metronome metronomes mettlesome mewl
mewled mewling mewls mi miasma miasmas mica microbiologist
microbiologists microchip microchips microcosm microcosms
microeconomics micron microns microprocessors microscopically
microscopy microsurgery mid midair middies middlebrow middlebrows
middleweight middleweights middling middy midge midges midland
midlands midmost midmosts midpoint midpoints midshipman midshipmen
midterm midterms midtown midweek midweeks midwiferies midwifery
midwinter midyear midyears miff miffed miffing miffs mightily
mightiness mil milch mildness milepost mileposts miler milers milieu
milieus militantly militarism militarist militaristic militarists
militiaman militiamen milkiness milkmaid milkmaids milkshake milksop
milksops milkweed milkweeds millage millennial millennium millenniums
millet millipede millipedes millrace millraces millstone millstones
milquetoast milquetoasts mils mimeograph mimeographed mimeographing
mimeographs mimetic mimosa mimosas minaret minarets minatory mindfully
mindfulness mindlessness minefields mineralogist mineralogists
mineralogy minestrone minesweeper minesweepers mini miniaturist
miniaturists minibike minibikes minicam minicams minicomputers minim
minimalists minims minis miniseries miniskirt miniskirts ministrant
ministrants ministration ministrations minivan minivans minster
mintier mintiest minty minuend minuends minutely minuteman minutemen
minuteness minutia minutiae minx minxes mirthful mirthfully mirthless
misalignment misalliance misalliances misanthrope misanthropes
misanthropic misanthropist misanthropists misanthropy misapplication
misapplied misapplies misapply misapplying misapprehend misapprehended
misapprehending misapprehends misapprehensions misbegotten
miscalculate miscalculated miscalculates miscalculating miscalculation
miscalculations miscall miscalled miscalling miscalls miscast
miscasting miscasts miscegenation miscellanies mischance mischanced
mischances mischancing mischievously mischievousness misconceive
misconceived misconceives misconceiving misconstruction
misconstructions miscount miscounted miscounting miscounts miscreant
miscreants miscue miscued miscues miscuing misdeal misdealing misdeals
misdealt misdiagnose misdiagnosed misdiagnoses misdiagnosing
misdiagnosis misdid misdo misdoes misdoing misdoings misdone
miserliness misfeasance misfire misfired misfires misfiring misgovern
misgoverned misgoverning misgoverns misguidedly mishandle mishandled
mishandles mishandling mishmash mishmashes misidentified misidentifies
misidentify misidentifying misinterpretations mismanage mismanaged
mismanages mismanaging misogynist misogynistic misogynists misogyny
misplay misplayed misplaying misplays mispronounce mispronounced
mispronounces mispronouncing mispronunciation mispronunciations
misquotation misquotations misreadings misrule misruled misrules
misruling missal missals missilery misspend misspending misspends
misspent misstate misstated misstatement misstatements misstates
misstating misstep misstepped misstepping missteps mister misters
mistily mistime mistimed mistimes mistiming mistiness mistranslated
mistreat mistreated mistreating mistreatment mistreats mistrial
mistrials mistrustful mistypes mitigation mitosis mizzen mizzenmast
mizzenmasts mizzens mobster mobsters mocha mocker mockers mockingly
modals modem modems modernism modernist modernistic modernists
modifiable modish modishly mods modulator modulators modulus mogul
moguls moieties moiety moire moires moistly moistness molder moldered
moldering molders moldiness molehill molehills moleskin molestation
molester molesters moll mollification molls mollycoddle mollycoddled
mollycoddles mollycoddling molybdenum momentousness momma mommas
mommies mommy monarchic monarchical monarchism monarchist monarchists
monasticism monaural monetarily moneybag moneybags moneyed moneymaker
moneymakers moneymaking monger mongered mongering mongers mongolism
mongooses moniker monikers monkeyshine monkeyshines mono monochromatic
monochromes monocle monocles monocotyledon monocotyledons monograph
monographs monolingual monolinguals monolith monoliths monomania
monomaniac monomaniacs mononucleosis monophonic monopolist
monopolistic monopolists monosyllabic monotheism monotheist
monotheistic monotheists monotone monotoned monotones monotonic
monotoning monotonously monoxide monoxides monsieur monsignor
monsignors monstrance monstrances monstrously montage montages
monumentally mooch mooched moocher moochers mooches mooching moodiness
moonlighter moonlighters moonlit moonscape moonscapes moonshine
moonshines moonshot moonshots moonstone moonstones moonstruck moorland
mopeds moppet moppets moraine moraines moralistic moray morays
morbidity morbidly mordant mordants mores moribund morocco morosely
moroseness morpheme morphemed morphemes morpheming morphological
morrow morrows mortarboard mortarboards mortgagee mortgagees mortgagor
mortgagors mortician morticians mortise mortised mortises mortising
moses mosey moseyed moseying moseys mossed mossing mote motes
motherboard motherboards motherfucker motherfuckers motherfucking
motherland motherlands motherless motherliness motile motiles
motivational motivator motivators motocross motocrosses motorbiked
motorbiking motorboat motorboats motorcar motorcars motorcyclist
motorcyclists motorman motormen motormouth motormouths mottle mottled
mottles mottling moult moulted moulting moults mountainside
mountainsides mountaintop mountaintops mountebank mountebanks
mountings mournfully mournfulness mouser mousers mousetrap
mousetrapped mousetrapping mousetraps mousiness mouthwash mouthwashes
mouthwatering movingly mozzarella mucilage muckier muckiest muckrake
muckraked muckraker muckrakers muckrakes muckraking mucky muddiness
mudguard mudguards mudslide mudslides mudslinger mudslingers
mudslinging muesli muezzin muezzins mufti muftis muggings mukluk
mukluks mulatto mulattoes mulberries mulberry muleteer muleteers
mulish mulishly mulishness mullah mullahs mullet mullets mulligatawny
mullion mullions multicultural multiculturalism multidimensional
multifaceted multifarious multifariousness multilateral multilingual
multimedia multimillionaire multimillionaires multiplex multiplexed
multiplexer multiplexers multiplexes multiplexing multiplicand
multiplicands multiplier multipliers multipurpose multiracial
multitudinous multivariate multivitamin multivitamins mumbler mumblers
mummer mummers mummery mummification munchies mundanely municipally
munificence munificent munition munitions muralist muralists murderess
murderesses murderously murk murkily murkiness murks muscat muscatel
muscatels muscularity musculature mushiness musicale musicales
musicianship musicologist musicologists musicology musings muskellunge
muskellunges musketeer musketeers musketry muskier muskiest muskiness
muskmelon muskmelons muskrat muskrats musky muslin mussier mussiest
mussy mustiness mutability mutable muteness mutineer mutineered
mutineering mutineers mutinously mutuality muumuu muumuus myna mynas
myopia myrrh myrtle myrtles mysteriousness mystically mystification
mystique mythic mythologist mythologists n nabob nabobs nacho nachos
nacre nadir nadirs naiad naiads nailbrush nailbrushes nakedly nannied
nannies nanny nannying nanosecond nanoseconds naphtha naphthalene
nappier nappiest narc narced narcing narcissism narcissist
narcissistic narcissists narcissus narcosis narcs narwhal narwhals
nary nasally nascent nasturtium nasturtiums natal nattily naturalism
naturalistic nauseatingly nautically nautilus nautiluses nave naves
navigability navigational naysayer naysayers nearness neath nebular
necromancer necromancers necromancy necrosis needful needfuls
neediness needlepoint nefarious nefariously nefariousness negativity
neglectfully negligibly negs nematode nematodes nemeses nemesis
neoclassic neoclassical neoclassicism neocolonialism neodymium
neologism neologisms neonatal neonate neonates neoprene nephritis
neptunium nerd nerdier nerdiest nerds nerdy nerveless nervelessly
nervier nerviest nervy nethermost nettlesome neuralgia neuralgic
neuritis neurological neurosurgery neurotically neurotransmitter
neurotransmitters neutrally neutrino neutrinos nevermore newel newels
newlywed newlyweds newness newsboy newsboys newsflash newsman newsmen
newspaperman newspapermen newspaperwoman newspaperwomen newsreel
newsreels newsworthier newsworthiest newsworthy newtons nexus nexuses
niacin nib nibbed nibbing nibbler nibblers nibs niceness nickelodeon
nickelodeons niggard niggarded niggarding niggardliness niggardly
niggards nigger niggers niggle niggled niggles niggling nigglings
nigher nighest nightcap nightcaps nightclothes nighthawk nighthawks
nightie nighties nightlife nightshade nightshades nightshirt
nightshirts nightstick nightsticks nihilism nihilist nihilistic
nihilists nimbi nimbleness nimbus ninepin ninepins ninja ninjas nipper
nippered nippering nippers nirvana nitpick nitpicked nitpicker
nitpickers nitpicking nitpicks nitrogenous nitroglycerin nix nixed
nixes nixing nobleness nocturnally nocturne nocturnes nodal noddy
nodular nodule nodules noel noels noggin noggins noiselessness
noisemaker noisemakers noisome nonabrasive nonabsorbent nonabsorbents
nonagenarian nonagenarians nonalcoholic nonalcoholics nonaligned
nonbeliever nonbelievers nonbreakable nonce noncom noncombatant
noncombatants noncommercial noncommercials noncommittally
noncompetitive noncompliance noncoms nonconductor nonconductors
nonconformity noncontagious noncooperation nondairy nondeductible
nondenominational nondrinker nondrinkers nonempty nonessential
nonesuch nonesuches nonevent nonevents nonexempt nonexistence
nonexistent nonfat nonfatal nongovernmental nonhazardous nonhuman
nonindustrial noninterference nonintervention nonjudgmental nonliving
nonmalignant nonmember nonmembers nonnegotiable nonobjective nonpareil
nonpareils nonpayment nonpayments nonphysical nonplus nonpluses
nonplussed nonplussing nonpoisonous nonpolitical nonpolluting
nonprescription nonproductive nonprofessional nonprofessionals
nonproliferation nonrefillable nonrefundable nonrenewable
nonrepresentational nonrestrictive nonreturnable nonreturnables
nonrigid nonscheduled nonseasonal nonsectarian nonsensically nonsexist
nonskid nonsmoker nonsmokers nonsmoking nonstick nonsupport nontaxable
nontechnical nontoxic nontransferable nonunion nonuser nonusers
nonverbal nonviolent nonvoting nonwhite nonwhites nonzero noonday
noontime nope nopes normalcy normative northbound northeaster
northeasters northeastward northerner northerners northernmost
northwards northwesterly northwestward nosedive nosedived nosedives
nosediving nosegay nosegays nosh noshed noshes noshing nosiness
nostalgically nostrum nostrums notaries notary notepad notepaper
notionally nous novae novas novelette novelettes novella novellas
novitiate novitiates noway noways nowise nth nu nuanced nub nubile
nubs nucleic nudism nudist nudists nuke nuked nukes nuking
nullification nullity numberless numbly numeracy numerated numerates
numerating numeration numerations numerology numismatic numismatics
numismatist numismatists numskull numskulls nuncio nuncios nunneries
nunnery nurseryman nurserymen nuthatch nuthatches nutmeat nutmeats
nutria nutrias nutritionally nutritionist nutritionists nutritive
nuttiness nylons nymphomania nymphomaniac nymphomaniacs o oafish oaken
oakum oarlock oarlocks oarsman oarsmen oat oaten oats obduracy
obdurate obdurated obdurately obdurates obdurating obeisance
obeisances obeisant obfuscate obfuscated obfuscates obfuscating obit
obits objectionably objectiveness oblate oblation oblations obligingly
obliquely obliqueness obliviously obliviousness obloquy obnoxiously
oboist oboists obscenely obscurely obsequies obsequious obsequiously
obsequiousness obsequy observably observantly observational
obsessively obsessives obsidian obstetric obstetrical obstinately
obstreperous obstructionist obstructionists obstructively
obstructiveness obstructives obtrude obtruded obtrudes obtruding
obtrusively obtrusiveness obtusely obtuseness obverse obverses obviate
obviated obviates obviating obviousness ocarina ocarinas occidental
occidentals occlude occluded occludes occluding occlusion occlusions
occult oceangoing oceanographer oceanographers oceanographic ocelot
ocelots octane octet octets octogenarian octogenarians oculist
oculists oddball oddballs oddness odiously odium odoriferous odorous
odyssey odysseys offal offensively offertories offertory offhandedly
officeholder officeholders officialdom officiously officiousness
offside offsides oft oftentimes ofter oftest oho ohos oilcloth
oilcloths oilfield oilfields oiliness oilskin oink oinked oinking
oinks oldie oldies oleaginous oleander oleanders oleo oleomargarine
oligarch oligarchic oligarchies oligarchs oligarchy ombudsman
ombudsmen omegas omnibuses omnipresence omniscience omnivore omnivores
omnivorous oncology oneness onetime ongoings onionskin onomatopoeic
onrushing onshore onyx onyxes oops oopses opacity opalescence
opalescent opaquely opaqueness openhanded openwork operable
operationally operetta operettas ophthalmic opiate opiates opine
opined opines opining opportunism opportunistic oppressively
opprobrious opprobrium optically optimistically optometry opulence
oracular orally orangeade orangeades orate orated orates orating
oratorical oratorio oratorios orb orbs orderings orderliness ordinal
ordinals ordinariness ordnance ordure oregano organdy organelle
organelles organically orgasmic orgasms orgiastic orientals orifices
origami origination oriole orioles ormolu ornamentation ornateness
ornerier orneriest ornery orotund orthodontia orthodontic orthodontics
orthodoxies orthodoxy orthographic orthographies oscillator
oscillators oscilloscopes osier osiers osmotic osprey ospreys
ossification ossified ossifies ossify ossifying ostentatiously
osteopath osteopaths osteopathy osteoporosis ostracism otherworldly
otiose ottoman ottomans outage outages outback outbacks outbalance
outbalanced outbalances outbalancing outbid outbidding outbids
outbounds outbuilding outbuildings outcrop outcropped outcropping
outcroppings outcrops outfielder outfielders outfitter outfitters
outflank outflanked outflanking outflanks outfox outfoxed outfoxes
outfoxing outgo outgoes outlandishly outperform outperformed
outperforming outperforms outplacement outplay outplayed outplaying
outplays outpouring outpourings outrank outranked outranking outranks
outre outreach outreached outreaches outreaching outrider outriders
outrigger outriggers outsell outselling outsells outsize outsizes
outsold outsource outsourced outsources outsourcing outspokenly
outspokenness outspread outspreading outspreads outstay outstayed
outstaying outstays outstretch outstretched outstretches outstretching
outtake outtakes outvote outvoted outvotes outvoting outwear
outwearing outwears outwore outworn ovarian overabundance overabundant
overachieve overachieved overachiever overachievers overachieves
overachieving overact overacted overacting overactive overacts overage
overages overambitious overanxious overawe overawed overawes overawing
overbalance overbalanced overbalances overbalancing overbite overbites
overbook overbooked overbooking overbooks overcautious overcompensate
overcompensated overcompensates overcompensating overcompensation
overconfident overcook overcooked overcooking overcooks overdrafts
overdress overdressed overdresses overdressing overdrive overeager
overenthusiastic overexpose overexposed overexposes overexposing
overexposure overextend overextended overextending overextends
overfull overgenerous overgrowth overindulge overindulged
overindulgence overindulges overindulging overjoy overjoyed overjoying
overjoys overkilled overkilling overkills overlord overlords overmuch
overmuches overpaid overpay overpaying overpays overplay overplayed
overplaying overplays overpopulate overpopulated overpopulates
overpopulating overproduce overproduced overproduces overproducing
overproduction overprotective overqualified overreach overreached
overreaches overreaching overreaction overreactions overripe oversell
overselling oversells oversensitive oversexed overshoe overshoes
oversimplifications oversimplified oversimplifies oversimplify
oversimplifying oversize oversizes oversizing oversold overspend
overspending overspends overspent overspill overspread overspreading
overspreads overstatement overstatements overstay overstayed
overstaying overstays overstock overstocked overstocking overstocks
overstuffed oversupplied oversupplies oversupply oversupplying overtax
overtaxed overtaxes overtaxing overviews overweening overzealous
oviduct oviducts oviparous ovoid ovoids ovulate ovulated ovulates
ovulating ovulation ovule ovules ow owlet owlets owlish oxbow oxbows
oxford oxfords oxyacetylene oxygenate oxygenated oxygenates
oxygenating oxygenation oxymora oxymoron p pH pacesetter pacesetters
pachyderm pachyderms pacifically pacification padre padres paean
paeans paediatrics paganism pagers paginate paginated paginates
paginating pailful pailfuls painkiller painkillers painstakingly
paintbrush paintbrushes painters paintwork pairwise paisley paisleys
palatal palatals palaver palavered palavering palavers paleface
palefaces paleness palimony palimpsest palimpsests palindrome
palindromes palindromic palings palisade palisades palladium pallet
pallets palliate palliated palliates palliating palliation palliative
palliatives palmetto palmettos palmier palmiest palmist palmistry
palmists palmy palpate palpated palpates palpating palpation palpitate
palpitated palpitates palpitating palpitation palpitations palsied
palsies palsy palsying paltriness pampas pamphleteer pamphleteers
panache panchromatic pandemic pandemics panderer panderers panegyric
panegyrics panelist panelists pannier panniers panoplies panoply
pantaloons pantheism pantheist pantheistic pantheists pantheon
pantheons pantsuit pantsuits pantyhose papaw papaws paperboy paperboys
papergirl papergirls paperhanger paperhangers papery papilla papillae
papoose papooses papped papping paps parabola parabolas parabolic
parachutist parachutists paradigmatic paradigms paralegal paralegals
parallax parallaxes parallelism parallelisms parallelogram
parallelograms paramecia paramecium paramedic paramedical paramedicals
paramedics paramilitaries paramilitary paramour paramours paranormal
parapet parapets paraplegia paraprofessional paraprofessionals
parapsychology paratroops parboil parboiled parboiling parboils
parenthetic parenthetically parfait parfaits pariah pariahs parings
parlance parlay parlayed parlaying parlays parley parleyed parleying
parleys parliamentarian parliamentarians parochialism parolee parolees
paroxysm paroxysms parquet parqueted parqueting parquetry parquets
parricide parricides parried parries parry parrying parsimonious
parsimony partaker partakers parterre parterres parthenogenesis
participator participators participatory participial particularities
particularity particulate particulates partisanship parturition
partway parvenu parvenus paschal pasha pashas passably passel passels
passerby passersby passionless passivity passkey passkeys pasteboard
pastern pasterns pastiches pastorate pastorates pastrami pasturage
patchier patchiest patchiness patella patellae patellas paternalistic
paternally pathogen pathogenic pathogens pathologically patina patinas
patois patriarchies patriarchy patrician patricians patricide
patricides patrimonial patriotically patrolman patrolmen patrolwoman
patrolwomen patronymic patronymics patsies patsy pauperism pavings
pawl pawls pawnshop pawnshops paycheck paychecks payday paydays payee
payees payloads paymaster paymasters peaceably peacefulness
peacekeeping peacetime peafowl peafowls peahen peahens pearlier
pearliest pearly peasantry pebblier pebbliest pebbly peccadillo
peccadilloes peccaries peccary pectin pectoral pectorals pecuniary
pedagogic pedagogical pedagogics pedagogue pedagogued pedagogues
pedagoguing pedantically pederast pederasts pederasty pedicure
pedicured pedicures pedicuring pedigreed pediment pediments pedometer
pedometers pee peed peeing peekaboo peeper peepers peephole peepholes
peerage peerages pees peevishly peevishness peewee peewees pejorative
pejoratives pekoe pelagic pellagra pellucid penchants pendent pendents
pendulous penetrable penetrative penile peninsular penitential
penitently penlight penlights pennon pennons pennyweight pennyweights
penologist penologists penology pensiveness pent pentameter
pentameters pentathlon pentathlons pents penultimates penurious penury
peonage peppercorn peppercorns pepperoni pepperonis peppery peppier
peppiest peppy pepsin peptic peptics perambulate perambulated
perambulates perambulating perambulator perambulators percale percales
perceivable percentile percentiles perceptibly perceptively
perceptiveness perceptual percussionist percussionists perdition
peregrination peregrinations peremptorily perennially perfectible
perfectionism perfidies perfidious perfidy perforce perfumeries
perfumery pericardia pericardium perigee perigees perihelia perihelion
periodicity periodontal peripatetic peripatetics periphrases
periphrasis peritoneum peritoneums peritonitis periwig periwigged
periwigging periwigs periwinkle periwinkles perjurer perjurers
perkiness perm permafrost permeability permeable permed perming
permissibly permissively permissiveness perms permute permuted
permutes permuting perniciously peroration perorations perpetration
perpetuation perpetuity perquisite perquisites persiflage persimmon
persimmons persnickety personae personage personages perspicacious
perspicacity perspicuity perspicuous persuasiveness pertinacious
pertinacity pertinence pertly pertness perturbation perturbations
perversely perverseness perversity peseta pesetas peso pesos
pessimistically pestilent pestle pestled pestles pestling petard
petards petiole petioles petitioner petitioners petrel petrels
petrifaction petrochemical petrochemicals petrolatum pettifog
pettifogged pettifogger pettifoggers pettifogging pettifogs pettily
petulance petulantly pewee pewees peyote phalanges phalanx phalanxes
phalli phallic phallus phantasm phantasmagoria phantasmagorias
phantasms pharaoh pharaohs pharmacologist pharmacologists pharmacology
pharmacopoeia pharmacopoeias pharyngeal pharynges pharynx
phenobarbital phenotype pheromone pheromones phial phialled phialling
phials philander philandered philanderer philanderers philandering
philanders philanthropically philatelic philatelist philatelists
philately philharmonic philharmonics philippic philippics philistine
philistines philodendron philodendrons philological philologist
philologists philology philosophic philosophically philter philters
phlebitis phlegmatically phloem phlox phobic phobics phoebe phoebes
phoenixes phoneme phonemes phonemic phonemics phonetically phonetician
phoneticians phonic phonically phoniness phonological phonologist
phonologists phonology phooey phooeys phosphate phosphates phosphoric
phosphors photoelectric photographically photojournalism
photojournalist photojournalists photosensitive phototypesetting
phrasal phrasings phrenology phyla phylae phylum physicked physicking
physiognomies physiognomy physiologist physiologists physiotherapist
physiotherapists physiotherapy pianissimo pianissimos pianoforte
pianofortes piazza piazzas pica picaresque picaresques picayune
piccalilli picker pickerel pickerels pickers pickings picnicker
picnickers pictograph pictographs pictorially pidgin pidgins piebald
piebalds pied pieing piercingly piercings piffle piggier piggies
piggiest piggishness piggy piglet piglets pigmentation pigskin
pigskins pigsties pigsty piing piker pikers pilaf pilafs pilaster
pilasters pilchard pilchards pileup pileups pilferer pilferers pilings
pillbox pillboxes pillion pillioned pillioning pillions pilloried
pillories pillory pillorying pilothouse pilothouses pimento pimentos
pimiento pimientos pimp pimped pimpernel pimpernels pimping pimps
pinafore pinafores pinball pincer pincers pinfeather pinfeathers ping
pinged pinging pings pinhead pinheads pinhole pinholes pinkeye pinkie
pinkies pinkish pinnate pinochle pinprick pinpricked pinpricking
pinpricks pinstripe pinstriped pinstripes pinto pintos pinup pinups
pinwheel pinwheeled pinwheeling pinwheels piously pip piper pipers
pipit pipits pipped pippin pipping pippins pips pipsqueak pipsqueaks
piquancy piquant piratical piscatorial piss pissed pisses pissing
pistil pistillate pistils pita pitchblende pitchman pitchmen pith
pithily pitiable pitiably pitilessly piton pitons pituitaries
pituitary pixel pixels pizazz pizzeria pizzerias pizzicati pizzicato
placation placebo placebos placeholder placements placental placentals
placer placers placidity placket plackets plainclothes plainclothesman
plainclothesmen plainness plaint plaintively plaints plait plaited
plaiting plaits plangent plannings plantings plasterboard plasterer
plasterers plasticity plateful platefuls platelet platelets platen
platens platitudinous platonic platypus platypuses plaudit plaudits
playact playacted playacting playacts playbacks playbill playbills
playboy playboys playgoer playgoers playoff playoffs playroom
playrooms pleader pleaders pleasantness pleasingly pleasurably
plebeian plebeians plebiscite plebiscites plectra plectrum plectrums
plenaries plenary plenipotentiaries plenipotentiary plenitude
plenitudes plenteous pleurisy plexus plexuses pliability pliancy
plinth plinths plodder plodders ploddings plottered plottering plover
plovers pluckier pluckiest pluckiness plumpness plunderer plunderers
plunk plunked plunking plunks pluperfect pluperfects pluralism
pluralistic pluralities plushier plushiest plushy plutocracies
plutocracy plutocrat plutocratic plutocrats pneumatically pock pocked
pocketful pocketfuls pocketknife pocketknives pocking pocks podiatrist
podiatrists podiatry poesied poesies poesy poesying poetess poetesses
poetically pogrom pogromed pogroming pogroms poi poignantly pointier
pointiest pointillism pointillist pointillists pointlessness pointy
poisoner poisoners poisonings poisonously pol polarities polecat
polecats polemical polestar polestars policyholder policyholders
poliomyelitis polisher polishers politesse politic politicked
politicking politico politicos polities polity polliwog polliwogs
polluter polluters polonaise polonaises polonium pols poltergeist
poltergeists poltroon poltroons polyester polyesters polyethylene
polygamist polygamists polyglot polyglots polygonal polygraph
polygraphed polygraphing polygraphs polyhedron polyhedrons polymath
polymaths polymer polymeric polymers polymorphic polyphonic polyphony
polystyrene polysyllabic polysyllable polysyllables polytechnics
polytheism polytheist polytheistic polytheists polythene
polyunsaturated pomade pomaded pomades pomading pommel pommels
pompadour pompadoured pompadours pompom pompoms pomposity pompously
pompousness ponderously pone pones poniard poniards pontiff pontiffs
pontifical pontificate pontificated pontificates pontificating
ponytail ponytails pooch pooched pooches pooching pooh poohed poohing
poohs poorhouse poorhouses popes popgun popguns popinjay popinjays
poplin popover popovers poppa poppas poppycock populism populist
populists porcine porn porno pornographer pornographers porosity
porphyry porringer porringers portage portaged portages portaging
portcullis portcullises portentous portentously porterhouse
porterhouses portliness portmanteau portmanteaus portraitist
portraitists portraiture poser posers poseur poseurs posh poshed
posher poshes poshest poshing posit posited positing positron
positrons posits posse posses possessively possessiveness postcodes
postdate postdated postdates postdating postdoc postdocs postdoctoral
posthaste postlude postludes postmistress postmistresses postmodern
postmortem postmortems postnatal postoperative postpaid postpartum
postwar potable potables potash potbellied potbellies potbelly
potboiler potboilers potentate potentates potentialities potentiality
potentials potful potfuls potholder potholders pothook pothooks
potluck potlucks potpie potpies potpourri potpourris potsherd
potsherds potshot potshots pottage pottier potties pottiest potty
poultice poulticed poultices poulticing powerboat powerboats
powerlessly powerlessness pox poxed poxes poxing practicability
practicably pragmatically pragmatist pragmatists praiseworthiness
praline pralines prancer prancers prankster pranksters prate prated
prates pratfall pratfalls prating preachier preachiest preachy
prearrange prearranged prearrangement prearranges prearranging precept
preceptor preceptors precepts preciosity preciously preciousness
precipitant precipitants precipitately precipitously preciseness
preclusion precociously precociousness precocity precognition
precondition preconditioned preconditioning preconditions predate
predated predates predating predecease predeceased predeceases
predeceasing predestine predestined predestines predestining
predetermination predetermine predetermined predetermines
predetermining predication predicative predictability predictive
predilection predilections predispose predisposed predisposes
predisposing preeminently preemption preemptive preexist preexisted
preexisting preexists prefabricate prefabricated prefabricates
prefabricating prefabrication prefatory prefects prefecture
prefectures preferentially preferment prefigure prefigured prefigures
prefiguring preheat preheated preheating preheats prehensile
prehistory prejudge prejudged prejudges prejudging prelate prelates
premarital premeditate premeditated premeditates premeditating
premenstrual premonitory preoccupation preoccupations preordain
preordained preordaining preordains prep prepackage prepackaged
prepackages prepackaging preparedness prepayment prepayments
preponderant preponderate preponderated preponderates preponderating
prepossess prepossessed prepossesses prepossessing preposterously
prepped preppier preppies preppiest prepping preppy preps prequel
prequels prerecord prerecorded prerecording prerecords preregister
preregistered preregistering preregisters preregistration presage
presaged presages presaging preschool preschooler preschoolers
preschools prescience prescient prescriptive presentiment
presentiments preserver preservers preses preset presets presetting
preshrank preshrink preshrinking preshrinks preshrunk pressman
pressmen prestos presumable presumptive presumptuously
presumptuousness presupposition presuppositions preteen preteens
preterit preterits preternatural prettified prettifies prettify
prettifying prettily prettiness prevaricate prevaricated prevaricates
prevaricating prevarication prevarications prevaricator prevaricators
preventative preventatives prewar pricey pricier priciest priestlier
priestliest priestly prig priggish prigs primacy primitively primness
primogeniture primordial primordials princelier princeliest princely
prioress prioresses priories priory prismatic prissier prissies
prissiest prissiness prissy pristine prithee prithees privateer
privateers privet privets prizefight prizefighter prizefighters
prizefighting prizefights proactive probate probated probates
probating probationaries probationary probationer probationers probity
problematical problematically problematics proboscis proboscises
proclivities proclivity procrastinator procrastinators procreate
procreated procreates procreating procreation procreative proctor
proctored proctoring proctors procurator procurators procurer
procurers prodigality prodigiously productively productiveness prof
profanation profanations profanely professionalism professorial
professorship professorships profitability profitably profligacy
profligate profligates proforma profs progenitor progenitors
progesterone prognostic prognosticate prognosticated prognosticates
prognosticating prognostication prognostications prognosticator
prognosticators prognostics programmables prohibitionist
prohibitionists prohibitory projectionist projectionists prolifically
prolix prolixity prolongation prolongations promiscuously promisingly
promissory promo promos promoter promoters promotional prompters
promptings promulgation proneness pronged pronghorn pronghorns
pronounceable pronto proofreader proofreaders propagandist
propagandists propane propellant propellants propertied prophetess
prophetesses prophetically prophylactic prophylactics prophylaxis
propinquity propitiate propitiated propitiates propitiating
propitiation propitiatory propitious proportionately proposer propound
propounded propounding propounds proprietorship proprietress
proprietresses propulsive prorate prorated prorates prorating prosaic
prosaically proscenium prosceniums proscribe proscribed proscribes
proscribing proscription proscriptions proselyte proselyted proselytes
proselyting prosier prosiest prosodies prosody prosperously prostate
prostates prostheses prosthesis prosthetic prosthetics prostration
prostrations prosy protean proteans protectively protectiveness
protectorate protectorates protestants protestation protestations
protester protesters protoplasm protoplasmic prototyping protozoa
protozoan protozoans protraction protuberance protuberances
protuberant provender provendered provendering provenders proverbially
provident providential providentially providently providers
provincialism provocatively provost provosts prudential prudentials
prudently prudery prudishly prurience prurient psalmist psalmists
pshaw pshaws psoriasis psst pssts psychical psychically psycho
psychobabble psychogenic psychokinesis psychopathic psychopathics
psychos psychosomatic psychosomatics psychotherapist psychotherapists
psychotics ptarmigan ptarmigans pterodactyl pterodactyls ptomaine
ptomaines pubbed pubbing pubescence pubescent pubic publican publicans
publicist publicists publishable pubs puckish puerile puerility
puffball puffballs puffin puffiness puffins pug pugilism pugilist
pugilistic pugilists pugnaciously pugnacity pugs pulchritude pullback
pullbacks puller pullers pullet pullets pullout pullouts pulpier
pulpiest pulpy pulsar pulsars punchier punchiest punchy punctilious
punctiliously punctually pungency pungently punster punsters pupa
pupae pupal puppeteer puppeteers puppetry purblind purchasable
purebred purebreds pureness purgative purgatives purgatorial
purgatories purifier purifiers purism purist purists puritan
puritanically puritanism puritans purl purled purling purloin
purloined purloining purloins purls purplish purportedly purposefully
purposeless purposely purser pursers pursuance pursuant pursuer
pursuers purulence purulent purvey purveyed purveying purveyors
purveys purview pushcart pushcarts pushiness pusillanimity
pusillanimous pussycat pussycats pussyfoot pussyfooted pussyfooting
pussyfoots pustule pustules putrefaction putrefied putrefies putrefy
putrefying putrescence putrescent putsch putsches puzzlement puzzler
puzzlers pygmies pygmy pylon pylons pyramidal pyrite pyromania
pyromaniac pyromaniacs pyrotechnic pyrotechnics pyx pyxed pyxes pyxing
q quackery quad quadrangular quadraphonic quadraphonics quadrature
quadrennial quadriceps quadricepses quadrille quadrilled quadrilles
quadrilling quadriplegia quadriplegic quadriplegics quadruplicate
quadruplicated quadruplicates quadruplicating quads quaff quaffed
quaffing quaffs quahog quahogs quaintly quaintness qualitatively
quanta quantified quantifiers quantifies quantifying quarks
quarterdeck quarterdecks quarterfinal quarterfinals quartermaster
quartermasters quarto quartos quasar quasars quasi quatrain quatrains
quavery queasily queasiness queerly queerness querulous querulously
questionably questioner questioners questioningly quibbler quibblers
quiches quickie quickies quicklime quickness quicksilver quid quids
quiescence quiescent quietness quietude quietus quietuses quilter
quilters quince quinces quintessential quintuple quintupled quintuples
quintupling quire quires quirkier quirkiest quisling quislings
quixotic quizzically quoit quoited quoiting quoits quondam quotable
quoth quotidian r rabbinate rabbinical racecourse racecourses
racehorse racehorses raceme racemes racers raceway raceways racily
raciness raconteur raconteurs racquetball racquetballs radially
radiantly radicalism radiogram radiograms radioisotope radioisotopes
radiologist radiologists radiology radiotelephone radiotelephones
radiotherapist radiotherapists radiotherapy radon raffia raffish raga
ragas raggedier raggediest raggedly raggedness raggedy raglan raglans
ragout ragouts ragtag ragtags ragweed railleries raillery raiment
rainmaker rainmakers rajah rajahs rakish rakishly rakishness ramblings
rambunctious rambunctiousness ramified ramifies ramify ramifying
rampantly rampart ramparts rancidity rancorously randier randiest
randy rangier rangiest ranginess rangy rankings rankness ranter
rapacious rapaciously rapaciousness rapacity rapier rapiers rapine
rapper rappers rapprochement rapprochements rapscallion rapscallions
rarefied rarefies rarefy rarefying rareness rascally rashers rashness
raspier raspiest raspy ratchet ratcheted ratcheting ratchets
rathskeller rathskellers rationalism rationalist rationalistic
rationalists rattan rattans rattier rattiest rattletrap rattletraps
rattlings rattrap rattraps raucousness raunchier raunchiest
raunchiness raunchy ravioli raviolis ravishingly ravishment rawboned
rawhide rawness razz razzed razzes razzing reachable reactivate
reactivated reactivates reactivating reactivation readabilities
readerships readjustment readjustments readmit readmits readmitted
readmitting readout readouts reaffirm reaffirmed reaffirming reaffirms
reagent reagents realign reallocation reamer reamers reanimate
reanimated reanimates reanimating reappearance reappearances reapplied
reapplies reapply reapplying reappoint reappointed reappointing
reappointment reappoints reapportion reapportioned reapportioning
reapportionment reapportions reappraisal reappraisals reappraise
reappraised reappraises reappraising rearm rearmament rearmed rearming
rearmost rearms rearward rearwards reasonableness reassemble
reassembled reassembles reassembling reassert reasserted reasserting
reasserts reassess reassessed reassesses reassessing reassessment
reassessments reassign reassigned reassigning reassigns reassuringly
reawaken reawakened reawakening reawakens rebelliously rebelliousness
rebroadcast rebroadcasting rebroadcasts rebus rebuses recalcitrance
recantation recantations recapitulate recapitulated recapitulates
recapitulating recapitulation recapitulations recast recasting recasts
receivable receivables receivership receptively receptiveness
receptivity receptor receptors recessional recessionals recessive
recessives recheck rechecked rechecking rechecks recherche recidivism
recidivist recidivists reciprocally reciprocation reciprocity
recitative recitatives reckonings reclassified reclassifies reclassify
reclassifying recliner recliners reclusive recombination recombine
recombined recombines recombining recommence recommenced recommences
recommencing recompilation reconcilable recondite reconfiguration
reconquer reconquered reconquering reconquers reconsideration
reconstitute reconstituted reconstitutes reconstituting reconvene
reconvened reconvenes reconvening recopied recopies recopy recopying
recreant recreants recriminate recriminated recriminates recriminating
recrimination recriminations recrudescence recruiter recruiters
rectifiable rectification rectifications rectifier rectifiers
rectilinear rectitude rectories rectory recumbent recuperative
recyclable recyclables redbreast redbreasts redcap redcaps redcoat
redcoats reddish redecorate redecorated redecorates redecorating
rededicate rededicated rededicates rededicating redeemer redeemers
redeploy redeployed redeploying redeployment redeploys redevelop
redeveloped redeveloping redevelopment redevelopments redevelops
redheaded rediscovery redistrict redistricted redistricting
redistricts redneck rednecks redness redolence redolent redouble
redoubled redoubles redoubling redoubt redoubtable redoubted
redoubting redoubts redound redounded redounding redounds redrafted
redrafting redrafts redrawing redrawn redraws redrew redskin redskins
redundantly redwood redwoods reedier reediest reeducate reeducated
reeducates reeducating reeducation reedy reefer reefers reelection
reelections reemerge reemerged reemerges reemerging reenact reenacted
reenacting reenactment reenactments reenacts reenlist reenlisted
reenlisting reenlists reenter reentered reentering reenters reentries
reentry reestablish reestablished reestablishes reestablishing
reevaluate reevaluated reevaluates reevaluating reeve reeves reeving
reexamine reexamined reexamines reexamining ref refashion refashioned
refashioning refashions refectories refectory referent referential
referral referrals reffed reffing refile refiled refiles refiling
refillable refinance refinanced refinances refinancing refiner
refiners refinish refinished refinishes refinishing refit refits
refitted refitting reflexively refocus refocused refocuses refocusing
reforest reforestation reforested reforesting reforests reformatories
reformatory reformulate reformulated reformulates reformulating
refract refracted refracting refractories refractory refracts
refresher refreshers refreshingly refrigerant refrigerants refs
refulgence refulgent refundable refurbishments refurnish refurnished
refurnishes refurnishing refutations regally regencies regency
regenerative reggae regicide regicides regimentation regionalism
regionalisms regionally registrant registrants regressive regretfully
regroup regrouped regrouping regroups regulator regulators regulatory
regurgitation rehab rehabbed rehabbing rehabs reheat reheated
reheating reheats rehire rehired rehires rehiring reimpose reimposed
reimposes reimposing reinsert reinserted reinserting reinserts
reinterpret reinterpretation reinterpretations reinterpreted
reinterpreting reinterprets reinvent reinvented reinventing reinvents
reinvest reinvested reinvesting reinvests reissue reissued reissues
reissuing rejoicings rekindle rekindled rekindles rekindling relabel
relabels relaxant relaxants relearn relearned relearning relearns
relegation relentlessness relevancy relevantly relinquishment remades
remaindered remand remanded remanding remands remarriage remarriages
remarried remarries remarry remarrying rematch rematches remediable
remissness remonstrance remonstrances remonstrate remonstrated
remonstrates remonstrating remorsefully remorselessly remoteness
remount remounted remounting remounts remover removers remunerative
renaissances renal renascence renascences renascent renderings
renegotiate renegotiated renegotiates renegotiating rennet renovator
renovators renter renters renumber renumbered renumbering renumbers
reoccupied reoccupies reoccupy reoccupying reoccur reoccurred
reoccurring reoccurs reorder reordered reordering reorders rep
repackage repackaged repackages repackaging repaint repainted
repainting repaints repairable repairman repairmen reparations
repartee reparteed reparteeing repartees repast repasted repasting
repasts repatriation repayable repeatably repeater repeaters
repertories repertory rephrased rephrases rephrasing replaceable
replayed replaying replays replenishment repletion replications
reportage reposeful repossess repossessed repossesses repossessing
repossession repossessions reprehend reprehended reprehending
reprehends reprehensibly representational reprise reprises reprising
reproachful reproachfully reprobate reprobates reprocess reprocessed
reprocesses reprocessing reproducible reproducibles reproof reproofed
reproofing reproofs reps reptilian reptilians republicanism republish
republished republishes republishing repulsively repulsiveness
reputably requester requiems requital requite requited requites
requiting reran rerun rerunning reruns resales rescission resell
reselling resells resend resentfully reservedly reservist reservists
resettle resettled resettles resettling reshuffled reshuffles
reshuffling residencies residency resignedly resiliency resinous
resister resisters resold resoluteness resonantly resonate resonated
resonates resonating resonator resonators resoundingly resourcefully
respell respelled respelling respells respire respired respires
respiring resplendence resplendently respondent respondents
responsively responsiveness restate restated restatement restatements
restates restating restaurateur restaurateurs restfully restfulness
restively restiveness restock restocked restocking restocks
restorative restoratives restorer restorers restrictively restroom
restrooms restructurings restudied restudies restudy restudying
resupplied resupplies resupply resupplying resurgent resuscitator
resuscitators retake retaken retakes retaking retaliatory retardant
retardants retardation retell retelling retells retentive
retentiveness rethinking rethinks rethought retinal retinue retinues
retiree retirees retold retook retool retooled retooling retools
retouch retouched retouches retouching retractable retrain retrained
retraining retrains retread retreaded retreading retreads retrench
retrenched retrenches retrenching retrenchment retrenchments retrial
retrials retributive retried retrievable retroactively retrod
retrodden retrofit retrofits retrofitted retrofitting retrograded
retrogrades retrograding retrogress retrogressed retrogresses
retrogressing retrogression retrogressive retrorocket retrorockets
retrospection retrying returnee returnees retyped retypes retyping
reunification reunified reunifies reunify reunifying reupholster
reupholstered reupholstering reupholsters reusable revaluation
revaluations revalue revalued revalues revaluing revealings reveille
reverend reverends reverential revilement reviler revilers revivalist
revivalists revivification revivified revivifies revivify revivifying
revocable revocation revocations revoltingly revolutionist
revolutionists rewindable rewinding rewinds rewire rewired rewires
rewiring reword reworded rewording rewords reworked reworking reworks
rewound rhapsodic rhea rheas rheostat rheostats rhetorically
rhetorician rhetoricians rheum rheumatic rheumatics rheumier rheumiest
rheumy rhinestone rhinestones rhizome rhizomes rho rhodium rhomboid
rhomboids rhombus rhombuses rhythmical rhythmically ribald ribaldry
riboflavin rick ricked rickets ricking ricks ricotta ridgepole
ridgepoles ridiculousness riff riffed riffing riffle riffled riffles
riffling riffraff riffs rifleman riflemen rightfulness rightist
rightists rigidness rigmarole rigmaroles rill rilled rilling rills
rime rimed rimes riming ringer ringers ringmaster ringmasters ringside
ripely riposted ripostes riposting rippers ripsaw ripsaws risible
risibles riskiness ritualism ritualistic ritually ritzier ritziest
ritzy riven riverbed riverbeds riverfront riverfronts riverside
riversides rivulet rivulets roadbed roadbeds roadhouse roadhouses
roadkill roadrunner roadrunners roadshow roadster roadsters roadway
roadways roadwork roadworthy roamer roamers roan roans roaster
roasters robotic robotics robustly rocketry rockiness rococo roebuck
roebucks roentgen roentgens roger rogered rogering rogers roguery
roguishly roil roiled roiling roils roister roistered roisterer
roisterers roistering roisters rollback rollbacks rollerskating
rollick rollicked rollicking rollicks romaine romanticism romanticist
romanticists romper rompers rood roods rooftop rooftops rookeries
rookery roomer roomers roomful roomfuls roominess rootless roseate
rosebud rosebuds rosebush rosebushes rosette rosetted rosettes
rosetting rosewood rosewoods rosily rosin rosined rosiness rosining
rosins rotational rotogravure rotogravures rottenness rotundity
rotundness roue roues roughneck roughnecked roughnecking roughnecks
roughshod roundelay roundelays roundhouse roundhouses roundish roundly
roundup roundups roundworm roundworms roustabout roustabouts rove
roved rover rovers roves roving rowdyism rowel rowels rower rowers
royalist royalists rs rubberier rubberiest rubberneck rubbernecked
rubbernecking rubbernecks rubbery rubbishy rubdown rubdowns rube
rubella rubes rubicund rubrics rucksacks ruddiness rudiment rudiments
ruefully ruggedly ruggedness ruination ruinously rumba rumbaed
rumbaing rumbas rumblings ruminant ruminants rumination ruminations
rumpus rumpuses runabout runabouts runaround runarounds runnel runnels
runoff runoffs rupee rupees rusk rusks russet russeted russets
russetting rustically rusticity rustiness rustproof rustproofed
rustproofing rustproofs rutabaga rutabagas s sabbaticals sable sabled
sables sabling saccharin saccharine sacerdotal sachem sachems sachet
sachets sackcloth sackful sackfuls sacramental sacredly sacredness
sacristan sacristans sacristies sacristy sacrosanct saddlebag
saddlebags sadistically safeness safflower safflowers sagacious
sagacity sago saguaro saguaros sahib sahibs sailboard sailboarded
sailboarding sailboards sailcloth sailfish sailfishes sainthood
saintliness saith saiths salaam salaamed salaaming salaams salacious
salaciously salaciousness salamander salamanders salesclerk
salesclerks salesgirl salesgirls salesmanship saline salines salinity
salivary salivation sallied sallies sallying salmonella salmonellae
salsa salsas saltcellar saltcellars saltine saltines saltiness
saltshaker saltshakers saltwater salubrious salutary salvageable
salver salvers salvo salvos samba sambaed sambaing sambas samovar
samovars sampan sampans samplers samurai sanctification
sanctimoniously sanctum sanctums sandalwood sandbank sandbanks sandbar
sandbars sandblast sandblasted sandblaster sandblasters sandblasting
sandblasts sandbox sandboxes sandcastle sandcastles sander sanders
sandhog sandhogs sandiness sandlot sandlots sandpiper sandpipers
sanely sangfroid sanguinary sanguine sanguined sanguines sanguining
sans sapience sapient sappier sappiest sappy saprophyte saprophytes
sapsucker sapsuckers sarcoma sarcomas sarcophagi sarcophagus sardonic
sardonically sarong sarongs sarsaparilla sarsaparillas sartorial
sartorially sashay sashayed sashaying sashays sass sassafras
sassafrases sassed sasses sassing satanically satanism sate sated
sateen sates satiate satiated satiates satiating satiety sating
satinwood satinwoods satiny satirically satrap satraps saturnine satyr
satyrs saucily sauciness savageness savanna savannas savant savants
savers sawhorse sawhorses sawmill sawmills sawyer sawyers sax saxes
saxophonist saxophonists scabbard scabbards scabbier scabbiest scabby
scabies scabrous scad scads scalawag scalawags scaldings scalene
scallion scallions scalper scalpers scam scammed scamming scamp scampi
scamps scams scandalmonger scandalmongers scandalously scansion
scantily scantiness scapula scapulae scarab scarabs scarceness
scarified scarifies scarify scarifying scat scathingly scatological
scats scatted scatting scavenge scavenged scavenges scavenging
scenically schedulers schema schematic schematically schematics
schemings scherzo scherzos schism schismatic schismatics schisms
schist schizoid schizoids schizophrenics schlemiel schlemiels schlep
schlepped schlepping schleps schlock schlockier schlockiest schlocky
schmaltz schmaltzier schmaltziest schmaltzy schmooze schmoozed
schmoozes schmoozing schmuck schmucks schnapps schnauzer schnauzers
scholastically schoolbook schoolbooks schooldays schoolgirl
schoolgirls schoolhouse schoolhouses schoolmarm schoolmarms
schoolmaster schoolmasters schoolmate schoolmates schoolmistress
schoolmistresses schoolroom schoolrooms schoolwork schoolyard
schoolyards schuss schussed schusses schussing schwa schwas sciatic
sciatica scimitar scimitars scintilla scintillas scintillate
scintillated scintillates scintillating scintillation scion scions
sclerosis sclerotic sclerotics scofflaw scofflaws scoldings scoliosis
sconce sconces scone scones scorcher scorchers scoreboard scoreboards
scorecard scorecards scoreless scorers scornfully scotched scotches
scotching scoutmaster scoutmasters scow scows scrabbled scrabbles
scrabbling scragglier scraggliest scraggly scrambler scramblers
scraper scrapers scrappier scrappiest scrappy scratchiness screechier
screechiest screechy screenings screenplay screenplays screenwriter
screenwriters screwball screwballs scribbler scribblers scrimmage
scrimmaged scrimmages scrimmaging scrimp scrimped scrimping scrimps
scrimshaw scrimshawed scrimshawing scrimshaws scrip scrips scriptural
scrod scrofula scrooge scrooges scrota scrotum scrounger scroungers
scrubber scrubbers scrubbier scrubbiest scrubby scrumptious scrunch
scrunched scrunches scrunching scuba scubaed scubaing scubas scud
scudded scudding scuds scull sculled sculleries scullery sculling
scullion scullions sculls sculpt sculpted sculpting sculpts sculptural
scumbag scumbags scummier scummiest scummy scupper scuppered
scuppering scuppers scurf scurfier scurfiest scurfy scurrilously
scurvier scurviest scurvy scuttlebutt scuzzier scuzziest scuzzy seabed
seabeds seabird seabirds seaboard seaboards seacoast seacoasts
seafarer seafarers seagoing sealant sealants sealer sealers sealskin
seamanship seamier seamiest seamless seamy seance seances seaplane
seaplanes searcher searchers searchingly seascape seascapes seasonally
seaward seawards seaway seaways seaworthier seaworthiest seaworthy
sebaceous secessionist secessionists seclusive secondhand secretariat
secretariats secretively secretiveness secs sectarian sectarianism
sectarians sectional sectionalism sectionals secularism sedately
sedation sedge sedimentation sedition seditious seducer seducers
seductively sedulous seediness seedless seeings seeker seekers
seemlier seemliest seemliness seemly seers seersucker seethings
segregationist segregationists segue segued segueing segues seismic
seismically seismograph seismographic seismographs seismologist
seismologists seismology selectivity selectman selectmen selenium
selfishly selfless selflessly selflessness selfsame sellout sellouts
seltzer selvage selvaged selvages selvaging semaphore semaphored
semaphores semaphoring semi semiannual semiautomatic semiautomatics
semicircular semiconscious semifinalist semifinalists semimonthlies
semimonthly seminal seminarian seminarians semipermeable semiprecious
semiprivate semiprofessional semiprofessionals semis semiskilled
semitone semitones semitrailer semitrailers semitropical semiweeklies
semiweekly senatorial senders senna sensationalist sensationalists
sensationally senselessly senselessness sensitively sensitiveness
sensually sensuously sensuousness sententious sentimentalism
sentimentalist sentimentalists sentimentally sentinel sentinels sepal
sepals separable separatism separatist separatists sepia sepsis septa
septet septets septic septics septuagenarian septuagenarians septum
sepulchral sequencers sequester sequestered sequestering sequesters
sequestration sequestrations sequined sequoia sequoias seraglio
seraglios serape serapes seraph seraphic seraphs sere sered
serendipitous serendipity serenely sereneness serer seres serest serf
serfdom serfs serge serially sering serous serpentine serrated serried
servicewoman servicewomen servility servings servo servomechanism
servomechanisms servos sesame sesames settee settees setup setups
seventieth seventieths severally sewerage sexagenarian sexagenarians
sexier sexiest sexiness sexists sexless sexpot sexpots sextant
sextants sextet sextets sexton sextons sh shabbiness shad shadiness
shadings shadowbox shadowboxed shadowboxes shadowboxing shads shag
shagged shagginess shagging shags shah shahs shakedown shakedowns
shaker shakers shakeup shakeups shakily shakiness shale shallot
shallots shallowness shalt shaman shamans shambled shambling
shamefaced shamefulness shamelessly shandy shanghai shanghaied
shanghaiing shanghais shank shanks shantung shantytown shantytowns
shapeless shapelessly shapelessness shapeliness shard shards
sharecropper sharecroppers sharkskin sharpers sharpshooter
sharpshooters shat shatterproof shavings shearer shearers sheathings
shebang shebangs sheepdog sheepdogs sheepfold sheepfolds sheepishness
sheepskin sheepskins sheeting sheikdom sheikdoms shekel shekels
shellac shellacked shellacking shellacs shenanigan shenanigans
shepherdess shepherdesses shibboleth shibboleths shiftily shiftiness
shiftlessness shill shilled shillelagh shillelaghs shilling shillings
shills shim shimmed shimmery shimmied shimmies shimming shimmy
shimmying shims shinbone shinbones shindig shindigs shiner shiners
shininess shinnied shinnies shinny shinnying shipboard shipboards
shipbuilder shipbuilders shipbuilding shipload shiploads shipmate
shipmates shipper shippers shipwright shipwrights shipyard shipyards
shires shirker shirkers shirr shirred shirring shirrings shirrs
shirtsleeve shirtsleeves shirttail shirttails shirtwaist shirtwaists
shit shits shittier shittiest shitting shitty shivery shocker shockers
shockingly shockproof shoddily shoddiness shoehorn shoehorned
shoehorning shoehorns shoemaker shoemakers shoeshine shoeshines shogun
shoguns shooter shooters shootings shootout shootouts shoplift
shoplifted shoplifting shoplifts shoptalk shopworn shoreline
shorelines shortbread shortcake shortcakes shortchange shortchanged
shortchanges shortchanging shortcut shortcuts shortcutting shortfalls
shorthorn shorthorns shortish shortsighted shortsightedly
shortsightedness shortstop shortstops shortwave shortwaves shovelful
shovelfuls showbiz showboat showboated showboating showboats showerier
showeriest showery showgirl showgirls showily showiness showmanship
showoff showoffs showpiece showpieces showplace showplaces showroom
showrooms shredder shredders shrewdly shrewish shrift shrike shrikes
shrillness shrilly shrinkable shrive shrived shriven shrives shriving
shrubbier shrubbiest shrubby shtick shticks shuckses shuffleboard
shuffleboards shuffler shufflers shush shushed shushes shushing
shutdowns shuteye shutout shutouts shutterbug shutterbugs shuttlecock
shuttlecocked shuttlecocking shuttlecocks shyly shyster shysters
sibilant sibilants sibyl sibyls sickbed sickbeds sickeningly sidearm
sidearms sidebar sidebars sideboard sideboards sideburns sidecar
sidecars sidekick sidekicks sidelight sidelights sidereal sidesaddle
sidesaddles sidesplitting sidestroke sidestroked sidestrokes
sidestroking sideswipe sideswiped sideswipes sideswiping sidewall
sidewalls sierras sifter sifters sightings sightread sightseeing
sightseer sightseers signally signatories signatory signboard
signboards signers signet signets signification significations
signings silage silaged silages silaging silencer silencers silica
silicate silicates siliceous silicone silicosis silkier silkies
silkiest silkworm silkworms silky silverfish silverfishes simian
simians simpatico simper simpered simpering simpers simpleness
simpleton simpletons simplifications simulators simulcast simulcasted
simulcasting simulcasts sinecure sinecures sinfully sinfulness
singleton singletons singsong singsonged singsonging singsongs
singularities sinkable sinker sinkers sinkhole sinkholes sinuous
sinusitis sinusoidal sirocco siroccos sis sisal sises sitar sitars
sitcom sitcoms sittings skateboarder skateboarders skedaddle
skedaddled skedaddles skedaddling skeet skeletal skeptically skier
skiers skiff skiffed skiffing skiffs skillfully skimpiness skinhead
skinheads skinless skinniness skintight skitter skittered skittering
skitters skivvied skivvies skivvy skivvying skulduggery skullcap
skullcaps skycap skycaps skydive skydived skydiver skydivers skydives
skydiving skyjack skyjacked skyjacker skyjackers skyjacking skyjacks
skylark skylarked skylarking skylarks skyward skywards skywriter
skywriters skywriting slackers slackly slackness slags slalom slalomed
slaloming slaloms slammer slammers slanderer slanderers slanderous
slangier slangiest slangy slantwise slapdash slapdashes slaphappier
slaphappiest slaphappy slather slathered slathering slathers slattern
slatternly slatterns slaughterer slaughterers slaughterhouse
slaughterhouses slaver slavered slavering slavers slavishly slaw
slayer slayers slayings sleaze sleazes sleazily sleaziness sledge
sledged sledgehammered sledgehammering sledgehammers sledges sledging
sleekly sleekness sleepily sleepiness sleeplessness sleepwalk
sleepwalked sleepwalker sleepwalkers sleepwalking sleepwalks sleepwear
sleepyhead sleepyheads sleetier sleetiest sleety slenderness sleuth
sleuths slicer slicers slickers slickly slickness slider sliders
slightness slimness slinkier slinkiest slinky slipcover slipcovers
slipknot slipknots slippage slippages slipperiness slithery sloe sloes
sloop sloops sloppily sloppiness slothfulness slouchier slouchiest
slouchy slough sloughed sloughing sloughs sloven slovenliness slovens
slowdown slowdowns slowpoke slowpokes slue slued slues sluggard
sluggards slugger sluggers sluggishly sluggishness sluing slumberous
slumlord slumlords slurp slurped slurping slurps slushier slushiest
slushy sluttish slyly smacker smackers smallness smarmier smarmiest
smarmy smarten smartened smartening smartens smartness smelter
smelters smilingly smirch smirched smirches smirching smithies smithy
smoggier smoggiest smoggy smokehouse smokehouses smokeless smokiness
smooch smooched smooches smooching smorgasbord smorgasbords smudgier
smudgiest smudgy smugness smuttier smuttiest smutty snaffle snaffled
snaffles snaffling snafu snafus snakebite snakebites snakier snakiest
snaky snapdragon snapdragons snapper snappers snappish snazzier
snazziest snazzy sneeringly snifter sniftered sniftering snifters
snippier snippiest snippy snit snits snivel snivels snobbier snobbiest
snobbishness snobby snooper snoopers snoopier snoopiest snoopy snoot
snootiness snoots snorer snorers snorkeler snorkelers snottier
snottiest snotty snowboard snowboarded snowboarding snowboards
snowbound snowdrop snowdrops snowman snowmen snowmobile snowmobiled
snowmobiles snowmobiling snowshoe snowshoeing snowshoes snowsuit
snowsuits snuffbox snuffboxes snuffers snuffle snuffled snuffles
snuffling soakings soapbox soapboxed soapboxes soapboxing soapiness
soapstone soapsuds soberly soberness sobriquet sobriquets sociability
sociably socialistic socialite socialites societal socioeconomic
sociopath sociopaths sodomite sodomites softener softeners softhearted
softies softwood softwoods softy soggily sogginess soiree soirees sol
solaria solarium soldierly solecism solecisms solenoid solenoids
solicitation solicitations solicitously solicitude solidification
solidness soliloquies soliloquy sols solstice solstices solubility
solvable solvency solver solvers sombre sombrero sombreros someway
someways somnambulism somnambulist somnambulists somnolence somnolent
sonar sonars songbird songbirds songster songsters songwriter
songwriters sonnies sonny sonority sooth soothingly soothings sooths
soothsayer soothsayers sophism sophist sophistries sophists sophomoric
soporific soporifics soppier soppiest soppings soppy sorbet sorbets
sordidly sordidness sorehead soreheads soreness sorghum sorrel sorrels
sorrowfully sorter sorters sortie sortied sortieing sorties sot sots
sottish sough soughed soughing soughs soulful soulfully soulfulness
soulless soundings soundless soundlessly soundness soundtracks soupcon
soupcons soupier soupiest soupy sourdough sourdoughs sourly sourness
sourpuss sourpusses souse soused souses sousing southbound
southeasterly southeastward southernmost southwards southwester
southwesterly southwesters southwestward soviet soviets sower sowers
soy soybean soybeans spaceflight spaceflights spaceman spacemen
spacesuit spacesuits spacewalk spacewalked spacewalking spacewalks
spacey spacier spaciest spaciously spaciousness spadeful spadefuls
spadework spake spandex sparely spareness spareribs sparingly
sparseness sparsity spartan spasmodically spastic spastics spates
spatially speakeasies speakeasy spec specie specifiable specifiers
speciously speckle speckled speckles speckling specs spectral
spectroscope spectroscopes spectroscopic spectroscopy speeder speeders
speedily speedster speedsters speedup speedups speedway speedways
spellbinder spellbinders spellers spelunker spelunkers spender
spenders spermatozoa spermatozoon spermicide spermicides spheroid
spheroidal spheroids sphincter sphincters spiciness spiderier
spideriest spidery spiel spieled spieling spiels spiffied spiffier
spiffies spiffiest spiffy spiffying spikier spikiest spiky spillage
spillages spillway spillways spindle spindled spindles spindling
spinet spinets spinier spiniest spinnaker spinnakers spinner spinners
spinsterhood spiny spirally spirea spireas spiritless spiritualism
spiritualist spiritualistic spiritualists spirituality spirituous
spitball spitballs spitefully spitefulness spitfire spitfires spittoon
spittoons splashdown splashdowns splashier splashiest splashy splats
splatted splatting splay splayed splaying splays splenetic splicer
splicers spline splines splittings splodge splotch splotched splotches
splotchier splotchiest splotching splotchy splutter spluttered
spluttering splutters spoilage spoiler spoilers spoilsport spoilsports
spoliation sponger spongers spoonbill spoonbills spoonerism
spoonerisms spoor spoored spooring spoors sporadically spored sporing
sportier sportiest sportive sportscast sportscaster sportscasters
sportscasting sportscasts sportsman sportsmanlike sportsmen sportswear
sportswoman sportswomen sporty spotlessly spotlessness spotter
spotters spottiness sprat sprats sprayer sprayers spreader spreaders
sprightlier sprightliest sprightliness sprightly springiness sprite
sprites spritz spritzed spritzes spritzing sprocket sprockets spryly
spryness spume spumed spumes spuming spumoni spunkier spunkies
spunkiest spunky spuriously spuriousness sputum spyglass spyglasses
squab squabbed squabbing squabs squareness squashier squashiest
squashy squattered squattering squatters squaw squaws squealer
squealers squeamishly squeamishness squeegee squeegeed squeegeeing
squeegees squeezer squeezers squiggle squiggled squiggles squigglier
squiggliest squiggling squiggly squirmier squirmiest squirmy squish
squished squishes squishier squishiest squishing squishy stabbings
staccato staccatos staffer staffers stagehand stagehands stagflation
staggeringly staggerings stagings staidly stainless stairwell
stairwells stakeout stakeouts stalactite stalactites stalagmite
stalagmites staleness stalker stalkers stalkings stamen stamens
stammerer stammerers stanchion stanchions standoffish standout
standouts staph staphylococci staphylococcus stargazer stargazers
starkly starkness starless starlet starlets starling starlit starvings
stash stashed stashes stashing statehood statehouse statehouses
stateless stateliness stateroom staterooms stateside statesmanlike
statewide statically stationer stationers stats statuary statuesque
statuette statuettes stead steadfastly steadfastness steadiness steads
steakhouse steakhouses steamboat steamboats steamer steamers steamroll
steamrolled steamrolling steamrolls steamship steamships steed steeds
steelier steelies steeliest steely steeplechase steeplechases
steeplejack steeplejacks steeply steepness steerage stein steined
steining steins stenographic stentorian stepbrother stepbrothers
stepchild stepchildren stepdaughter stepdaughters stepfather
stepfathers stepmother stepmothers stepparent stepparents steppe
steppes steppingstone steppingstones stepsister stepsisters stepson
stepsons stereophonic stereoscope stereoscopes stereotypical sterility
sternum sternums steroid steroids stevedore stevedores stewardship
stickiness stickleback sticklebacks stickpin stickpins stickup
stickups sties stiffener stiffeners stiflings stile stiled stiles
stiletto stilettos stiling stillbirth stillbirths stilt stilts
stingily stingray stingrays stinker stinkers stinkings stipend
stipends stipple stippled stipples stippling stirrer stirrers
stirrings stoat stoats stochastic stockiness stockroom stockrooms
stodginess stoic stoically stoicism stoics stoker stokers stolidity
stomachache stomachaches stonewall stonewalled stonewalling stonewalls
stoneware stonework stonily stooge stooges stopcock stopcocks
stoplight stoplights stoppable storefront storefronts storied stormily
storminess storybook storybooks storyteller storytellers stoutly
stoutness stovepipe stovepipes strafe strafed strafes strafing
stragglier straggliest straggly straightaway straightaways
straightedge straightedges straightness straiten straitened
straitening straitens stranglehold strangleholds strangler stranglers
strangulate strangulated strangulates strangulating strapless
straplesses strategically strategist strategists stratification
streakier streakiest streaky streetlight streetlights streetwalker
streetwalkers streetwise strenuousness strep streptococcal
streptococci streptococcus streptomycin stretchier stretchiest
stretchy striated stricture strictures strident stridently strikeout
strikeouts strikingly stringed stringency stringently stringer
stringers stripling strippers striptease stripteased stripteases
stripteasing strobe strobes strongbox strongboxes strontium strop
strophe strophes stropped stropping strops structurally strudel
strudels strumpet strumpeted strumpeting strumpets strychnine
stubblier stubbliest stubbly stubbornly stubbornness stucco stuccoed
stuccoes stuccoing studentships studiously stuffily stuffiness
stultification stultified stultifies stultify stultifying stumbler
stumblers stumpier stumpiest stumpy stunningly stupefaction
stupendously sturdily sturdiness sturgeon sturgeons stutterer
stutterers sty stying styli stylishly stylishness stylist
stylistically stylistics stylists styluses stymie stymied stymieing
stymies styptic styptics suavely suavity subatomic subatomics
subbasement subbasements subclass subcompact subcompacts subcontinent
subcontinents subcontract subcontracted subcontracting subcontractor
subcontractors subcontracts subculture subcultures subcutaneous
subgroups subhead subheading subheadings subheads subhuman subhumans
subjection subjectively subjectivity subjoin subjoined subjoining
subjoins subjugation subjunctives sublease subleased subleases
subleasing sublimate sublimated sublimates sublimating sublimation
sublimely subliminal subliminally sublimity submergence submerse
submersed submerses submersible submersibles submersing submitter
suborbital subordination suborn subornation suborned suborning suborns
subplot subplots subpoena subpoenaed subpoenaing subpoenas subprograms
subservience subsidence subsoil subsoiled subsoiling subsoils subsonic
subspace substantiation substantiations substantive substantives
substation substations substrata substrate substratum substructure
substructures subsume subsumed subsumes subsuming subsystems subteen
subteens subtitle subtitled subtitles subtitling subtotal subtotals
subtrahend subtrahends subtropical suburbanite suburbanites suburbia
subversion succinctness succotash succulence suchlike sucrose
suddenness sudsier sudsiest sudsy suet sufferance sufficiency
suffocatings suffragan suffragans suffragette suffragettes suffragist
suffragists suffuse suffused suffuses suffusing suffusion sugarcane
sugarcoat sugarcoated sugarcoating sugarcoats sugarless suggestible
suggestively sukiyaki sulfate sulfates sulfide sulfides sulkily
sulkiness sullenly sullenness sullied sullies sully sullying sultana
sultanas sultanate sultanates sumac summation summations summerhouse
summerhouses summerier summeriest summertime summery summitry summoner
summoners sumo sump sumps sunbather sunbathers sunbeam sunbeams
sunblock sunblocks sunbonnet sunbonnets sunder sundered sundering
sunders sunfish sunfishes sunlamp sunlamps sunless sunroof sunroofs
sunspot sunspots sunstroke superabundance superabundances
superabundant superannuate superannuated superannuates superannuating
supercharge supercharged supercharger superchargers supercharges
supercharging supercilious superconductivity superconductor
superconductors superego superegos superficiality superfluity
superhighway superhighways superintend superintended superintendence
superintendency superintending superintends superlatively superman
supermen supernova supernovae supernovas supernumeraries supernumerary
superpower superpowers superstitiously supertanker supertankers
supervene supervened supervenes supervening supine supped supping
supplemental suppleness suppliant suppliants supplicant supplicants
supplicate supplicated supplicates supplicating supplication
supplications supportable supposings suppositories suppository
suppurate suppurated suppurates suppurating suppuration supranational
supremacist supremacists sups surcease surceased surceases surceasing
surefire surefooted sureness sureties surety surfeit surfeited
surfeiting surfeits surfer surfers surgically surliness surmountable
surplice surplices surprisings surrealism surrealist surrealistic
surrealists surreals surreptitiously surrey surreys surrogate
surrogates surtax surtaxed surtaxes surtaxing susceptibility sushi
suspenseful suture sutured sutures suturing svelte svelter sveltest
swaddle swaddled swaddles swaddling swag swagged swagging swags swain
swains swallowtail swallowtails swami swamis swank swanked swanker
swankest swankier swankies swankiest swanking swanks swanky sward
swards swash swashbuckler swashbucklers swashbuckling swashed swashes
swashing swastika swastikas swatch swatches swath swaths swatter
swattered swattering swatters swaybacked swearer swearers swearword
swearwords sweatier sweatiest sweatpants sweatshirt sweatshirts
sweatshop sweatshops sweetbread sweetbreads sweetbrier sweetbriers
sweetener sweeteners sweetie sweeties sweetish sweetmeat sweetmeats
swellhead swellheaded swellheads swelter sweltered sweltering
swelterings swelters swiftness swimmer swimmers swimsuit swimsuits
swinger swingers swinish swirlier swirliest swirly switchback
switchbacks switchblade switchblades swordplay swordsman swordsmen
sybarite sybarites sybaritic sycamore sycamores sycophant sycophantic
sycophants syllabic syllabication syllabification syllabified
syllabifies syllabify syllabifying syllogism syllogisms syllogistic
sylph sylphs sylvan symbioses symbiosis symbiotic symbolically
symmetrically symmetries symposium symposiums sync synced
synchronously syncing syncopate syncopated syncopates syncopating
syncopation syncs syndication synergism synergistic synergy synod
synods syntactical syntactics synthetically syphilitic syphilitics
syrupy systemic systemics systolic t tableau tableaux tableland
tablelands tableware tabular tabulator tabulators tachometer
tachometers tacitness taciturnity tackiness tackler tacklers
tactically tactician tacticians tactile tactlessness tad tads taffeta
taffies taffy tailcoat tailcoats tailless tailpipe tailpipes tailwind
tailwinds takeaways takeout takeouts takeovers takings talkativeness
tallness tallyho tallyhoed tallyhoing tallyhos tam tamable tamale
tamales tamarind tamarinds tamers tamp tamped tamping tampon tampons
tamps tams tanager tanagers tangelo tangelos tangibility tangibly
tangier tangies tangiest tangy tankful tankfuls tanneries tanners
tannery tannin tansy tapeworm tapeworms tapioca tapir tapirs taproom
taprooms taproot taproots tardily tare tared tares taring tarmac
tarmacked tarmacking tarmacs taro taros tarot tarots tarp tarpon
tarpons tarps tarragon tarragons tartly tartness taskmaster
taskmasters tastelessly tastelessness taster tasters tastiness tat
tats tatted tatter tattered tattering tatters tatting tattler tattlers
tattletale tattletales tattooist tattooists taupe tautly tautness
tautological tautologies tawdriness taxidermist taxidermists taxidermy
taxings taxonomic taxonomies taxonomy teabag teachable teakettle
teakettles teal teals tearfully teargas teargases teargassed
teargassing tearier teariest tearjerker tearjerkers tearoom tearooms
teary teasel teasels teaser teasers teaspoonful teaspoonfuls teatime
technocracy technocrat technocrats technologist technologists techs
tectonics tediousness teenier teeniest teeny telecast telecaster
telecasters telecasting telecasts telecommunication telecommute
telecommuted telecommuter telecommuters telecommutes telecommuting
teleconference teleconferenced teleconferences teleconferencing
telegrapher telegraphers telegraphic telegraphy telekinesis
telemarketing telemeter telemeters telemetries telemetry
telepathically telephonic telephony telephoto telephotos telescopic
telethon telethons teletypes teletypewriter teletypewriters
televangelist televangelists telex telexed telexes telexing tellingly
temblor temblors temerity temp temped tempera temperamentally temperas
tempestuously tempestuousness temping templates temporally temps
tempter tempters temptingly temptings temptress temptresses tempura
tenability tenaciously tendentious tendentiously tendentiousness
tenderfoot tenderfoots tenderhearted tenderloin tenderloins tendinitis
tenfold tenfolds tenon tenons tenpin tenpins tensely tenseness tensile
tensor tenuously tenuousness tequila tequilas tercentenaries
tercentenary termagant termagants terminable terminations
terminological tern terned terning terns terrapin terrapins terrarium
terrariums terrifically terrifyingly terry tertiary testamentary
testate testates testier testiest testily testiness testosterone testy
tetrahedron tetrahedrons textural thalami thalamus thallium
thankfulness thanklessly thanksgiving thanksgivings theatrically theed
theeing thees theism theistic thematic thematically thematics
thenceforth thenceforward thenceforwards theocracies theocracy
theocratic theoretician theoreticians theosophy therapeutically
therapeutics thereabout therefrom thereto therewith thermally
thermionic thermodynamic thermonuclear thermoplastic thermoplastics
thermos thermoses thermostatic thermostatics thespian thespians
thiamine thickener thickeners thickenings thickset thieved thievery
thieving thievish thighbone thighbones thimbleful thimblefuls thine
thingamajig thingamajigs thinners thinness thirdly thirstily
thistledown thither tho thoracic thorax thoraxes thorium thoroughgoing
thoroughness thoughtlessness thrall thralldom thralled thralling
thralls thrasher thrashers thrashings threateningly threatenings
threefold threescore threescores threesome threesomes threnodies
threnody thriftily thriftiness thrivings throatier throatiest
throatily throatiness throaty throe throed throeing throes thromboses
thrombosis throwaways thrower throwers thrum thrummed thrumming thrums
thrush thrushes thruway thruways thumbnail thumbnails thumbscrew
thumbscrews thunderclap thunderclaps thundercloud thunderclouds
thunderhead thunderheads thundershower thundershowers thwack thwacked
thwacking thwacks thymus thymuses thyself ti tibia tibiae tic ticker
tickers tics tiddlywinks tidewater tidewaters tidily tidiness tidings
tiebreaker tiebreakers tightfisted tigress tigresses tildes tillable
tillage tiller tillers timbered timberland timberline timberlines
timbre timbres timelessness timeliness timepiece timepieces timeworn
timorous timorously timpani timpanist timpanists tincture tinctured
tinctures tincturing tinderbox tinderboxes tinfoil tinglier tingliest
tingly tinsmith tinsmiths tintinnabulation tintinnabulations tipper
tippers tipple tippled tippler tipplers tipples tippling tipsily
tipster tipsters tiptop tiptops tiredness tirelessly tirelessness
tiresomely tiresomeness tirings titan titanic titanium titans tithe
tithed tithes tithing titillation titmice titmouse tittle tittled
tittles tittling titular tizzies tizzy toadied toadies toady toadying
toastier toasties toastiest toastmaster toastmasters toasty
tobacconist tobacconists tocsin tocsins toddies toddy toehold toeholds
tofu tog togetherness toggled toggles toggling togs toiler toilers
toiletries toiletry toilette toilsome toke toked tokenism tokes toking
tolerantly toleration tollbooth tollbooths tollgate tollgates tom
tomfooleries tomfoolery toms tonalities tonality toneless toner tonier
toniest tonsillectomies tonsillectomy tonsorial tonsure tonsured
tonsures tonsuring tony toolbar toolbars toolbox toolboxes toothed
toothier toothiest toothless toothsome toothy topcoat topcoats
topically topknot topknots topless topmast topmasts topmost
topographer topographers topographic topographical topological
topologically toppings topsail topsails topside topsides topsoil toque
toques tor torchlight toreador toreadors torpid torpidity torpor
torqued torques torquing tors torsion tort torte tortes tortoiseshell
tortoiseshells torts tortuously torturer torturers torus tossup
tossups totemic touche touchingly touchstone touchstones toughly
tourism tourmaline tourney tourneys towhead towheaded towheads
townhouse townhouses townsfolk township townships townsman townsmen
towpath towpaths toxicity toxicologist toxicologists toxicology
traceable tracer traceries tracers tracery trachea tracheae
tracheotomies tracheotomy tracings tracker trackers tractable
tradesman tradesmen traditionalists traduce traduced traduces
traducing trafficker traffickers tragedian tragedians tragicomedies
tragicomedy trailblazer trailblazers traipse traipsed traipses
traipsing trajectories trajectory tram trammed trammel trammels
tramming trams tranquilly transceiver transceivers transcendence
transcendent transcendental transcendentalism transcendentalist
transcendentalists transcendentally transducer transducers transept
transepts transferal transferals transference transfiguration
transfigure transfigured transfigures transfiguring transfinite
transfix transfixed transfixes transfixing transfuse transfused
transfuses transfusing transgressor transgressors transience
transiency transitively transliterate transliterated transliterates
transliterating transliterations translucence transmigrate
transmigrated transmigrates transmigrating transmigration
transmissible transmittable transmittal transmutation transmutations
transmute transmuted transmutes transmuting transnational
transnationals transoceanic transom transoms transpiration
transplantation transponder transponders transporter transporters
transposition transpositions transsexual transsexuals transship
transshipment transshipped transshipping transships transubstantiation
transversely transvestism transvestite transvestites trapdoors
trapezoidal trappable trapshooting trashcans travail travailed
travailing travails travelogue travelogues treacherously treacled
treacles treacling treadle treadled treadles treadling treasonable
treasonous treatable treeless treetop treetops trefoil trefoils
tremolo tremolos tremulous tremulously trenchant tress tresses triad
triads triage triangulation triathlon triathlons tribalism tribesman
tribesmen tribune tribunes trice triceps tricepses triceratops
trickiness trident tridents triennial triennials trifler triflers
trifocals trig triggest triglyceride triglycerides trigonometric trike
triked trikes triking trilateral trilaterals trillionth trillionths
trimaran trimarans trimly trimmers trimmings trimness trinities
tripartite triplied triplies triply triplying triptych triptychs
trisect trisected trisecting trisects tritely triteness triumphal
triumphantly triumvirate triumvirates trivet trivets trivialities
trochee trochees troika troikas trollop trolloped trolloping trollops
trombonist trombonists tromp tromped tromping tromps troopship
troopships trope tropes tropic tropics tropism tropisms troposphere
tropospheres troth trothed trothing troths trotter trotters troubadour
troubadours troubleshoot troubleshooted troubleshooter troubleshooters
troubleshooting troubleshoots troubleshot trouper troupers trousseau
trousseaux troy troys trucker truckers truckle truckled truckles
truckling truckload truckloads truculence truculent truculently
trumpery trumpeter trumpeters truncheon truncheons trundle trundled
trundles trundling truss trussed trusses trussing trusteeship
trusteeships trustfully trustfulness trustworthiness tryst trysted
trysting trysts ts tsunami tsunamis tubbier tubbiest tubby tubeless
tuber tubercle tubercles tubercular tuberculous tuberous tubers tucker
tuckered tuckering tuckers tugboat tugboats tulle tumbledown
tumbleweed tumbleweeds tumbrel tumbrels tumid tun tunefully tuneless
tunelessly tungsten tunnies tunny tuns turbid turbojet turbojets
turboprop turboprops turbot turbots turbulently turd turds turgidity
turgidly turmeric turmerics turnabout turnabouts turnarounds turncoat
turncoats turners turnkey turnkeys turnoff turnoffs turpitude
turtledove turtledoves tush tushed tushes tushing tusked tussock
tussocks tutelage tutu tutus tux tuxes twaddle twaddled twaddles
twaddling twain tweedier tweediest tweeds tweedy tweeter tweeters
twerp twerps twiggier twiggiest twiggy twill twilled twirler twirlers
twit twits twitted twitting twofer twofers twofold twofolds twosome
twosomes tyke tykes tympanum tympanums typecast typecasting typecasts
typefaces typescripts typesetters typewrite typewrites typewriting
typewritten typewrote typo typographer typographers typographically
typography typos tyrannically tyrannosaur tyrannosaurs tyrannosaurus
tyrannosauruses tyrannous tyro tyros u ubiquitously ubiquity uh
ukulele ukuleles ulcerate ulcerated ulcerates ulcerating ulceration
ulcerous ulna ulnae ultraconservative ultraconservatives ultramarine
ultras ultrasonically ultrasound ultrasounds ululate ululated ululates
ululating um umbel umbels umber umbilical umbilici umbilicus umbrage
umbraged umbrages umbraging umiak umiaks umlaut umlauts ump umped
umping umps umpteenth unabashed unabated unabridged unabridgeds
unaccented unacceptability unaccompanied unaccustomed unacknowledged
unacquainted unadorned unadvised unafraid unaided unalterable
unalterably unannounced unanticipated unappealing unappreciated
unappreciative unapproachable unashamed unashamedly unasked
unassailable unassisted unattributed unauthenticated unavailing
unavoidably unbar unbarred unbarring unbars unbeaten unbeknown
unbelief unbend unbending unbends unbent unbidden unbind unbinding
unbinds unblushing unbolt unbolted unbolting unbolts unbosom unbosomed
unbosoming unbosoms unbound unbounded unbranded unbridled unbuckle
unbuckled unbuckles unbuckling unbutton unbuttoned unbuttoning
unbuttons uncalled uncannily uncaring uncased uncatalogued unceasingly
uncensored unceremonious unceremoniously uncertainly unchanging
uncharacteristic uncharacteristically uncharitably uncharted unchecked
uncivil unclaimed unclasp unclasped unclasping unclasps unclassified
uncleanlier uncleanliest uncleanly uncleanness unclearer unclearest
unclothe unclothed unclothes unclothing uncluttered uncoil uncoiled
uncoiling uncoils uncollected uncommitted uncommonly uncommunicative
uncomplaining uncompleted uncomplicated uncomplimentary
uncomprehending uncompressed uncompromisingly unconcern unconcernedly
unconcerning unconcerns unconquerable unconscionable unconscionably
unconsciousness unconsidered uncontaminated uncontested uncontrollably
unconventionally unconvincingly uncooked uncooperative uncoordinated
uncork uncorked uncorking uncorks uncorrelated uncorroborated
uncounted uncouple uncoupled uncouples uncoupling uncritical unction
unctions unctuous unctuously unctuousness uncultivated undated
undeceive undeceived undeceives undeceiving undecipherable undeclared
undefeated undefended undefinable undelivered undemanding
undemonstrative undependable underachieve underachieved underachiever
underachievers underachieves underachieving underact underacted
underacting underacts underage underarm underarmed underarming
underarms underbellies underbelly underbid underbidding underbids
undercarriage undercarriages undercharge undercharged undercharges
undercharging underclass underclassman underclassmen underclothes
underclothing undercoat undercoated undercoating undercoats
underdeveloped underdone underemployed underexpose underexposed
underexposes underexposing underfed underfeed underfeeding underfeeds
underfunded undergrad undergrads underhand underhandedly underlains
underling undermost underpaid underpay underpaying underpays underpin
underpinned underpinning underpinnings underpins underplay underplayed
underplaying underplays undersea underseas undersecretaries
undersecretary undersell underselling undersells undershoot
undershooting undershoots undershorts undershot undersign undersigned
undersigning undersigns undersized underskirt underskirts undersold
understaffed understandingly underused undervalue undervalued
undervalues undervaluing underwriter underwriters undeservedly
undeserving undesirability undetectable undetermined undeterred undies
undignified undiluted undiminished undisciplined undisclosed
undiscovered undiscriminating undisguised undisputed undistinguished
undivided undulant undulate undulated undulates undulating undulation
undulations unearned unease uneaten unedited unembarrassed unemotional
unending unendurable unenforceable unenthusiastic unenviable unequally
unequivocally unerringly unevenness uneventfully unexampled
unexceptionable unexceptional unexciting unexplored unexpurgated
unfailingly unfairness unfaithfully unfaithfulness unfamiliarity
unfashionable unfathomable unfeelingly unfeigned unfetter unfettered
unfettering unfetters unflagging unflappable unflattering unflinching
unflinchingly unforeseeable unforgettably unforgiving unformed
unfrequented unfriendliness unfrock unfrocked unfrocking unfrocks
unfulfilled unfurnished ungainliness ungentlemanly ungovernable
ungracious ungratefully ungratefulness ungrudging unguarded unguent
unguents ungulate ungulates unhand unhanded unhanding unhands unharmed
unhealthful unheeded unhesitating unhesitatingly unhindered unhinge
unhinged unhinges unhinging unhitch unhitched unhitches unhitching
unholier unholiest unholy unhorse unhorsed unhorses unhorsing
unhurried unhurt unicameral unicycles unidentifiable unidirectional
unimaginable unimpaired unimpeachable unimplementable unimplemented
unimpressive uninhabitable uninhabited uninjured uninsured
unintelligibly uninterpreted uninterrupted uninvited uninviting unisex
unitary universality unkindness unknowable unknowing unknowingly
unknowings unlace unlaced unlaces unlacing unlatch unlatched unlatches
unlatching unlawfully unleaded unlearn unlearning unlearns unleavened
unlettered unlicensed unlikelihood unlisted unloose unloosed unlooses
unloosing unloved unluckily unmade unmake unmakes unmaking
unmanageable unmanlier unmanliest unmanly unmannerly unmatched
unmemorable unmentionable unmentionables unmerciful unmercifully
unmindful unmoral unnaturally unneeded unnoticeable unnumbered
unobjectionable unobservant unobserved unobstructed unobtrusive
unobtrusively unoffensive unofficially unopened unopposed unpainted
unpalatable unpardonable unpatriotic unpaved unperturbed unpin
unpinned unpinning unpins unplanned unplug unplugged unplugging
unplugs unplumbed unpolluted unpredictability unprejudiced
unpremeditated unpretentious unpreventable unproductive unprofessional
unprofitable unpromising unprompted unpronounceable unproved
unpunished unquenchable unquestioned unquestioning unquestioningly
unquote unquoted unquotes unquoting unreachable unreadier unreadiest
unready unrealistically unreasonableness unreasoning unreconstructed
unrecorded unrefined unregenerate unregistered unregulated unrehearsed
unreleased unrelentingly unrelieved unremitting unrepentant unrequited
unreserved unresponsive unrestrained unrewarding unripe unriper
unripest unroll unrolled unrolling unrolls unromantic unruliness
unsaddle unsaddled unsaddles unsaddling unsalted unsanctioned
unsatisfying unsaturated unschooled unscramble unscrambled unscrambles
unscrambling unscrupulously unscrupulousness unseal unsealed unsealing
unseals unseasonably unseasoned unseeing unseemliness unseens
unselfish unselfishly unselfishness unsent unsentimental unshakable
unshaven unsheathe unsheathed unsheathes unsheathing unsightliness
unskillful unsmiling unsnap unsnapped unsnapping unsnaps unsnarl
unsnarled unsnarling unsnarls unsociable unsold unsparing unspeakably
unspecific unspoiled unspoken unsportsmanlike unstated unsteadier
unsteadiest unsteadily unsteadiness unsteady unstop unstoppable
unstopped unstopping unstops unstressed unstrung unstudied
unsubstantial unsubtle unsuitably unsupervised unsurpassed
unsurprising unsuspected unsweetened unswerving unsympathetic
untainted untamed untapped untaught untested unthinking unthinkingly
untidiness untimelier untimeliest untimeliness untimely untiringly
untitled untouchable untouchables untoward untreated untried
untroubled untruth untruthful untruthfully untruths untutored untwist
untwisted untwisting untwists unutterable unutterably unvarnished
unvarying unverified unvoiced unwarier unwariest unwariness unwavering
unwed unwholesome unwieldiness unwillingly unwisely unwitting unwonted
unworldly unworthier unworthiest unworthiness unyielding unzip
unzipped unzipping unzips upbraid upbraided upbraiding upbraids
upchuck upchucked upchucking upchucks upcoming upcountry updater
upfront upland uplands upliftings upmarket uppercase upperclassman
upperclassmen uppercut uppercuts uppercutting uppity upraise upraised
upraises upraising uproarious uproariously upscale upsides upstage
upstaged upstages upstaging upstate upsurge upsurged upsurges
upsurging upswing upswings uptakes urbanity urea urethra urethrae uric
urinal urinals urinalyses urinalysis urinary urination urologist
urologists urology usherette usherettes usurer usurers usurious
usurpation usurper usurpers usury uterine utilitarians utopia utopias
uttermost uvula uvular uvulars uvulas v vacantly vacationer
vacationers vacillation vacillations vacuity vacuously vagrancy
vainglorious vainglory vainly valance valanced valances valancing vale
valedictorian valedictorians valedictories valedictory valence
valences vales valiantly validations validness valorous valuation
valuations vamoose vamoosed vamooses vamoosing vamp vamped vamping
vamps vanadium vanishings vantage vantages vapid vapidity vapidness
vaporous variability variably variances variate varicose varicoses
variegate variegated variegates variegating varlet varlets varmint
varmints vascular vasectomies vasectomy vassal vassalage vassaled
vassaling vassals vaudeville vaulter vaulters vaunt vaunted vaunting
vaunts vectored vectoring veep veeps vegan vegans vegetate vegetated
vegetates vegetating vegetative veggie veggies vehemence vehicular
veld velds vellum velveteen venal venality venally vendetta vendettas
venereal vengefully venial venomously venous ventral ventrals
ventricular ventriloquism venturesome venturous veracious verbena
verbenas verdant verdigris verdigrised verdigrises verdigrising
verdure verifiable verily verisimilitude veritably verities verity
vermicelli vermilion verminous vermouth vernal versification versified
versifies versify versifying vertebral vertex vertexes vertiginous
vesicle vesicles vesper vespers vestigial vestries vestry vetch
vetches vexatious viand viands vibe vibes vibrancy vibrantly
vibraphone vibraphones vibrato vibrator vibrators vibratos viburnum
viburnums vicarage vicarages viceroy viceroys vichyssoise viciousness
vicissitude vicissitudes victoriously victual victuals vicuna vicunas
videocassette videocassettes videodisc videodiscs viewfinder
viewfinders viewings vigilantism vigilantly vignette vignetted
vignettes vignetting vilely vileness vilification villein villeins vim
vinaigrette vindication vindications vindicator vindicators
vindictively vindictiveness vinegary vintner vintners viol violable
violator violators violinist violinists violist violists violoncello
violoncellos viols virago viragoes vireo vireos virginal virginals
virgule virgules virology virtuosity virtuousness virulence virulently
visage visages viscera visceral viscid viscosity viscount viscountess
viscountesses viscounts viscous viscus vitiate vitiated vitiates
vitiating vitiation viticulture vitreous vitreouses vitriol vituperate
vituperated vituperates vituperating vituperation vituperative viva
vivace vivaces vivaciousness vivaed vivaing vivas vividness vivified
vivifies vivify vivifying viviparous vixen vixenish vixens vizier
viziers vocalic vocalics vocally vocative vocatives vociferate
vociferated vociferates vociferating vociferation voguish voiceless
voile volatility vole voled voles voling voltaic voltmeter voltmeters
volubility voluble volubly voluminously voluptuaries voluptuary
voluptuously voluptuousness voodooism voraciously voracity votaries
votary votive vouchsafe vouchsafed vouchsafes vouchsafing voyeur
voyeurism voyeuristic voyeurs vs vulgarism vulgarisms vulgarly
vulnerably vulva vulvae w wackier wackiest wackiness wacko wackos
wacky wader waders wadi wadis waggish waggle waggled waggles waggling
wainscot wainscoted wainscoting wainscotings wainscots waistband
waistbands waistcoat waistcoated waistcoating waistcoats wakeful
wakefulness wale waled wales waling walkway walkways wallabies wallaby
wallboard walleye walleyed walleyes wallflower wallflowers wallopings
wampum wanderlust wanderlusts wangle wangled wangles wangling wanly
wannabe wannabes wantings wantonly wantonness wapiti wapitis warbler
warblers warder wardered wardering warders wardroom wardrooms ware
wares warhorse warhorses warily wariness warlock warlocks warlord
warlords warmers warmhearted warmonger warmongering warmongers warship
warships warthog warthogs wartier wartiest warty washbasin washbasins
washboard washboards washbowl washbowls washerwoman washerwomen
washstand washstands washtub washtubs waspish wassail wassailed
wassailing wassails wastefulness wastepaper waster wastered wastering
wasters wastrel wastrels watchband watchbands watcher watchers
watchfully watchfulness watchmaker watchmakers watchtower watchtowers
watercourse watercourses watercraft watercress waterfowl waterfowls
waterline waterlines waterside watersides waterspout waterspouts
wattage wattle wattled wattles wattling wavelet wavelets waviness
waxen waxiness waxwing waxwings waxwork waxworks wayfarer wayfarers
wayfaring wayfarings waywardly waywardness weakfish weakfishes weal
weals wealthiness weaponless wearable wearer wearers weathercock
weathercocked weathercocking weathercocks weatherman weathermen
weatherproof weatherproofed weatherproofing weatherproofs website
websites weeder weeders weeknight weeknights weeper weepers weepier
weepies weepiest weepings weepy weevil weevils weft wefted wefting
wefts weightiness weightless weightlessness weightlifter weightlifters
weightlifting weir weirdly weired weiring weirs welkin wellspring
wellsprings welsh welshed welshes welshing welterweight welterweights
wen wench wenches wend wended wending wends wens westbound westerner
westerners westernmost westwards wetback wetbacks wetland wetlands
wetly wetness whalebone wham whammed whammies whamming whammy whams
whatchamacallit whatchamacallits whatnot wheal wheals wheaten wheatens
wheelbase wheelbases wheeler wheelwright wheelwrights wheezier
wheeziest wheezy whelk whelked whelks whelp whelped whelping whelps
whences whereat wherefore wherefores whereof whereon wheresoever
whetstone whetstones whey whimsicality whimsically whimsier whimsies
whimsiest whimsy whiner whiners whinier whiniest whiny whipcord
whiplash whiplashes whippersnapper whippersnappers whippet whippets
whippings whippoorwill whippoorwills whirligig whirligigs whist
whistler whistlers whit whitecap whitecaps whited whitefish
whitefishes whitener whiteners whiteness whitewall whitewalls whither
whithered whithering whithers whiting whitings whitish whits whitter
whittler whittlers whodunit whodunits wholeness wholesomeness whomever
whomsoever whoopee whoopees whoosh whooshed whooshes whooshing
whopping whoppings whorehouse whorehouses whorl whorled whorls
whosoever wickerwork wideness widowhood wiener wieners wifelier
wifeliest wifely wigeon wigeons wiggler wigglers wigglier wiggliest
wiggly wight wighted wighting wights wigwag wigwagged wigwagging
wigwags wildebeest wildebeests wildflower wildflowers wildfowl wile
wiled wiles wiliness wiling willfulness willies willowier willowiest
willowy wimp wimped wimpier wimpiest wimping wimple wimpled wimples
wimpling wimps wimpy windbag windbags windbreak windbreaker
windbreakers windbreaks windburn windburned windburning windburns
windiness windjammer windjammers windlass windlasses windowed
windowsill windowsills windsock windsocks windstorm windstorms
windsurf windsurfed windsurfing windsurfs windswept windup windups
windward wineglass wineglasses wineries winery winger wingless
wingspan wingspans wingspread wingspreads wingtip wingtips winnow
winnowed winnowing winnows wino winos winsomely wintergreen wireless
wirelesses wiretap wiretapped wiretapping wiretaps wiriness wiseacre
wiseacres wisher wishers wishfully wisteria wisterias wistfulness
witchery withal witlessly wittily wittiness wittingly wizardry
woebegone woeful woefuller woefullest woefully wolfhound wolfhounds
wolfish wolfram wolverine wolverines womanish womanlier womanliest
womanlike womanliness womanly womenfolk womenfolks wonderment
wondrously wonted woodbine woodcarving woodcarvings woodcock woodcocks
woodcraft woodcut woodcuts woodcutter woodcutters woodcutting woodenly
woodenness woodiness woodman woodmen woodpile woodpiles woodshed
woodsheds woodsier woodsiest woodsy woodworking woodworm wooer wooers
woofer woofers woolgathering woolliness woozier wooziest wooziness
woozy wordiness wordplay workaday workaholic workaholics workday
workdays workfare workhorse workhorses workhouse workhouses workingman
workingmen workloads workmanlike workplaces worksheet worksheets
workweek workweeks worldliness wormier wormiest wormwood wormy worrier
worriers worryings worrywart worrywarts worshipful worthily worthiness
worthlessness wrack wraith wraiths wraparound wraparounds wrathful
wrathfully wreckers wretchedly wretchedness wriggler wrigglers
wrigglier wriggliest wriggly wrinklier wrinklies wrinkliest wrinkly
wristband wristbands wrongful wrongfully wrongfulness wrongheaded
wrongheadedly wrongheadedness wrongness wroth wryly wryness wuss
wusses x xenon xenophobic xerographic xerography xylem xylophonist
xylophonists y ya yachtsman yachtsmen yahoo yahoos yammer yammered
yammering yammers yardage yardages yardarm yardarms yarmulke yarmulkes
yaw yawed yawing yawl yawls yaws ye yea yeah yeahs yearbook yearbooks
yeas yeastier yeastiest yeasty yellowish yeoman yeomen yep yeps
yeshiva yeshivas yest yesteryear yieldings yip yipped yippee yippees
yipping yips yo yogi yogis yon yore youngish youthfully youthfulness
yttrium yucca yuccas yuck yucked yuckier yuckiest yucking yucks yucky
yuk yukked yukking yuks yule yuletide yum yummier yummiest yummy yup
yuppie yuppies yups z zaniness zap zapped zapping zaps zealot zealots
zealously zealousness zebu zebus zed zeds zephyr zephyrs zeppelin
zeppelins zestful zestfully zilch zing zinged zinger zingers zinging
zings zinnia zinnias zippier zippiest zippy zircon zirconium zircons
zit zither zithers zits zodiacal zonal zonked zwieback zygote zygotes
""".split())
def score_scowl(word):
"""
>>> score_scowl('zing')
1
"""
if word in SCOWL10:
return 4
if word in SCOWL20:
return 3
if word in SCOWL35:
return 2
if word in SCOWL50:
return 1
return 0
def score_scowl_substrings(word):
"""
>>> score_scowl_substrings('zing')
1
>>> score_scowl_substrings('totallyzonked')
15
"""
result = score_scowl(word)
for index in range(1, len(word)):
if index > 1:
# print word[:index], score(word[:index])
result += score_scowl(word[:index])
if index < len(word) - 1:
# print word[index:], score(word[index:])
result += score_scowl(word[index:])
# logging.debug('%s.scowl = %d', word, result)
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
| jcrocholl/nxdom | dictionaries/english.py | Python | mit | 564,985 | [
"Amber",
"BLAST",
"CASINO",
"CRYSTAL",
"ESPResSo",
"Elk",
"Firefly",
"GULP",
"Galaxy",
"Jaguar",
"MOOSE",
"NEURON",
"Octopus",
"SIESTA",
"TINKER",
"VisIt",
"exciting"
] | a11e96d607634f41652987e748eb5fd1c3ba49a205bc603c8ff4221bfb002c06 |
"""
Preprocess the fluorescence traces to get a spike train matrix.
Based on the ChaLearn Connectomics challenge starter kit
by Bisakha Ray, Javier Orlandi and Olav Stetter. I tried to clean it
up since it didn't make use of numpy functions and it was woefully
lacking in useful comments.
"""
import os
import sys
import cPickle
import gzip
import numpy as np
# Use the Agg backend in running on a server without the DISPLAY variable
if "DISPLAY" not in os.environ:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
# Import OOPSI. Available at: https://github.com/liubenyuan/py-oopsi
# or originally in Matlab from https://github.com/jovo/oopsi
pyoopsi_path = os.path.join("/home", "pyhawkes", "py-oopsi")
sys.path.append(pyoopsi_path)
import oopsi
def process_dataset(K=100,
suffix="_iNet1_Size100_CC01inh.txt",
dir="data/chalearn/small",
outfile="network1c.pkl"):
# Get the full filenames
fluor_file = os.path.join(dir, "fluorescence" + suffix)
net_file = os.path.join(dir, "network" + suffix)
pos_file = os.path.join(dir, "networkPositions" + suffix)
# Parse the files
F = parse_fluorescence_file(fluor_file, K)
# # Load the oopsi processed fluorescence
# data_path = os.path.join("data", "chalearn", "small", "network1c.pkl.gz")
# with gzip.open(data_path, 'r') as f:
# _, F, C, network, pos = cPickle.load(f)
network = parse_network_file(net_file, K)
pos = parse_position_file(pos_file, K)
# Discretize the fluorescence signal
# Hardcode the bins
# bins = np.array([-10, 0.17, 10]).reshape((1,3)).repeat(K, axis=0)
# S, _ = discretize_fluorescence(F, edges=bins, binsui=False)
# # S, bins = discretize_fluorescence(F, nbins=2, binsui=True)
# S, bins = discretize_fluorescence(C, nbins=2, binsui=True)
# # S = remove_double_spikes(S)
# Get the spike times with oopsi
# fast-oopsi,
S,C = extract_spike_oopsi(F, dt=0.02)
# Plot a segment of fluorescence traces and spikes
start = 0
end = 10000
k = 0
spks = np.where(S[start:end, k])
plt.figure()
plt.plot(F[start:end, k], '-k')
plt.plot(spks, F[spks,k], 'ro')
plt.show()
# Scatter plot the positions
plt.figure()
pres,posts = network.nonzero()
for i,j in zip(pres,posts):
if np.random.rand() < 0.25:
plt.plot([pos[i,0], pos[j,0]],
[pos[i,1], pos[j,1]],
'-k', lw=0.5)
plt.scatter(pos[:,0], pos[:,1], s=10, c='r', marker='o', facecolor='k')
plt.xlabel("X")
plt.ylabel("Y")
plt.show()
# Plot the network as a function of X position
perm = np.argsort(pos[:,0])
plt.figure()
plt.imshow(network[np.ix_(perm, perm)], cmap="Greys", interpolation="none")
plt.xlabel("Postsynaptic")
plt.ylabel("Presynaptic")
plt.show()
with gzip.open(os.path.join(dir, outfile + ".gz"), 'w') as f:
cPickle.dump((S, F, C, network, pos), f, protocol=-1)
def parse_fluorescence_file(filename, K, delimiter=','):
"""
Parse a fluorescence file to into a numpy matrix
:param filename: Name fo the file
:param delimiter: Delimeter between neurons (always ',')
:return: TxK matrix of fluorescence values
"""
assert os.path.exists(filename), "File doesn't exist! %s" % filename
# Define an iterator to yield split lines
def iter_func():
with open(filename, 'r') as f:
for line in f:
line = line.rstrip().split(delimiter)
assert len(line) == K, "Line is not of length %d!" % K
for item in line:
yield np.float(item)
F = np.fromiter(iter_func(), dtype=np.float)
F = F.reshape((-1, K))
return F
def parse_network_file(filename, K, vmin=0):
"""
Parse a network file where each row is of the form I,J,W
denoting a connection from neuron I to neuron J with weight W.
:param filename:
:return:
"""
network = np.zeros((K,K))
with open(filename, 'r') as f:
for line in f:
# Read the line
i,j,w = line.rstrip().split(',')
# Cast to ints
i,j,w = int(i)-1, int(j)-1, int(w)
network[i,j] = w
network = np.clip(network, vmin, np.inf)
return network
def parse_position_file(filename, K):
"""
Parse a network file where each row is of the form I,J,W
denoting a connection from neuron I to neuron J with weight W.
:param filename:
:return:
"""
pos = np.zeros((K,2))
with open(filename, 'r') as f:
for k in xrange(K):
line = f.readline()
# Read the line
x,y = line.rstrip().split(',')
# Cast to floats
x,y = float(x), float(y)
pos[k,0] = x
pos[k,1] = y
return pos
def extract_spike_oopsi(F, dt):
"""
Extract the spike times with OOPSI
:param F: Fluorescence data (each row a time bin, each column a neuron).
:param dt: Time bin size
:return The discretized signal
"""
D = np.zeros_like(F)
C = np.zeros_like(F)
for k in xrange(F.shape[1]):
print "Running oopsi on neuron ", k
D[:,k], C[:,k] = oopsi.fast(F[:,k], dt=dt, iter_max=6)
# Cast D to an integer matrix
# D = D.astype(np.int)
return D, C
def discretize_fluorescence(F,
nbins=2,
edges=None,
binsui=True,
hpfilter=True,
debug=False):
"""
Discretizes the fluorescence signal so it
can be used to compute the joint PDF. If conditioning is applied, the
entries above the conditioning level are returned in the G vector.
Example usage: D = discretizeFluorescenceSignal(F)
:param F: Fluorescence data (each row a time bin, each column a neuron).
:param binedges: An array of bin edges (min length 3)
:param nbins: If bins is None, use nbins evenly spaced bins for each neuron
:param binsui: If true, pop up a UI to set the bin threshold
:param hpfilter: Apply a high pass filter to the fluorescence signal,
i.e., work with the derivative (default true).
:return The discretized signal
"""
T,K = F.shape
# Apply the high pass filter
if hpfilter:
Fhat = np.diff(F, axis=0)
else:
Fhat = F
# Compute the range of the fluorescence for each neuron
F_min = np.amin(Fhat, axis=0)
F_max = np.amax(Fhat, axis=0)
# Discretize the signal
D = -1 * np.ones((T-1,K))
# If global bins are not given, use neuron-specific binning evenly spaced
# between F_min and F_max
if edges is not None:
bins_given = True
else:
edges = np.zeros((K, nbins+1))
bins_given = False
for k in xrange(K):
if not bins_given:
edges[k,:] = np.linspace(F_min[k]-1e-3, F_max[k]+1e-3, num=nbins+1)
if binsui:
# Histogram of the fluorescence
fig = plt.figure()
plt.hist(Fhat[:,k], bins=100, normed=True)
for edge in edges[k,:]:
plt.plot([edge, edge],
[0, plt.gca().get_ylim()[1]], '-r')
plt.plot()
plt.xlabel('F')
plt.ylabel('p(F)')
plt.title('Neuron %d' % k)
# Add an event handler to get the threshold
def onclick(event):
edges[k,1] = event.xdata
print "Neuron: ", k, "\tThreshold: %.3f" % edges[k,1]
plt.close()
fig.canvas.mpl_connect('button_press_event', onclick)
plt.show()
# Digitize this column
D[:,k] = np.digitize(Fhat[:,k], edges[k,:]) - 1
assert np.all(D >= 0) and np.all(D < nbins), "Error in digitizing!"
# Cast D to an integer matrix
D = D.astype(np.int)
return D, edges
def remove_double_spikes(D):
Dhat = D.copy()
# Identify back to back spikes
doubles = (Dhat[1:,:] > 0) & (D[:-1] > 0)
# Remove the first of the pair
Dhat = Dhat[:-1,:]
Dhat[doubles] = 0
return Dhat
for i in [1,4]:
print "Processing network ", i
process_dataset(suffix="_iNet1_Size100_CC0%dinh.txt" % i,
outfile="network%d_oopsi.pkl" % i)
| mattjj/pyhawkes | data/chalearn/preprocess.py | Python | mit | 8,562 | [
"NEURON"
] | 2c044425d90532328f57cefe0b6013fa410122469e192fecfa0d236047171168 |
import unittest
import paydunya
from . import PAYDUNYA_ACCESS_TOKENS
paydunya.debug = True
paydunya.api_keys = PAYDUNYA_ACCESS_TOKENS
class TestGeneral(unittest.TestCase):
"""General/Miscellaneous tests"""
def setUp(self):
# Your PAYDUNYA developer tokens
self.store = paydunya.Store(name="Fabrice Accessoires")
self.opr_data = {'total_amount': 215000,
'description': "Samsung Galaxy S6",
"account_alias": "774563209"}
self.opr = paydunya.OPR(self.opr_data, self.store)
def tearDown(self):
self.opr = None
self.store = None
self.opr_data = None
def test_rsc_endpoints(self):
endpoint = 'checkout-invoice/confirm/test_98567JGF'
url = self.opr.get_rsc_endpoint(endpoint)
self.assertTrue(url.startswith('https') and url.endswith(endpoint))
def test_add_headers(self):
header = {'Foo': 'Bar'}
self.opr.add_header(header)
self.assertTrue("Foo" in self.opr.headers.keys())
self.assertFalse('FooBar' in self.opr.headers.keys())
if __name__ == '__main__':
unittest.main()
| paydunya/paydunya-python | tests/test_general.py | Python | mit | 1,158 | [
"Galaxy"
] | 295dc7cb5fdcdbd22dc9d59c18603519107f1d0bb7a05d2f57186aa45229dab0 |
from __future__ import division
import numpy as np
import parabem
from parabem.pan2d import doublet_2_0, doublet_2_0_v, source_2_0, source_2_0_v
from parabem.vtk_export import VtkWriter
from parabem.utils import check_path
p1 = parabem.PanelVector2(-1, -10)
p2 = parabem.PanelVector2(0, -10)
p3 = parabem.PanelVector2(1, -10)
p4 = parabem.PanelVector2(-1, 10)
p5 = parabem.PanelVector2(0, 10)
p6 = parabem.PanelVector2(1, 10)
pan1 = parabem.Panel2([p4, p1])
pan2 = parabem.Panel2([p2, p5])
pan3 = parabem.Panel2([p3, p6])
mat = np.zeros([3, 3])
rhs = np.zeros([3])
panels = [pan1, pan2, pan3]
# | | |
# col|+ +| +|
# | | |
# x -1 0 1
# T 0 ? 1
# l 1 2
# panel1: temp-formulation
T1 = -10
T2 = 10
l1 = 1
l2 = 2
mat[0, 0] = source_2_0(panels[0].center, panels[0])
mat[0, 1] = source_2_0(panels[0].center, panels[1]) * (1 - l2 / l1)
mat[0, 2] = source_2_0(panels[0].center, panels[2])
rhs[0] += doublet_2_0(panels[0].center, panels[0]) * T1
rhs[0] += doublet_2_0(panels[0].center, panels[2]) * T2
rhs[0] += T1
# panel2: velocity formulation
mat[1, 0] = source_2_0_v(panels[1].center, panels[0]).dot(panels[1].n)
mat[1, 1] = source_2_0_v(panels[1].center, panels[1]).dot(panels[1].n) * (1 - l2 / l1) - 1
mat[1, 2] = source_2_0_v(panels[1].center, panels[2]).dot(panels[1].n)
rhs[1] += doublet_2_0_v(panels[1].center, panels[0]).dot(panels[1].n) * T1
rhs[1] += doublet_2_0_v(panels[1].center, panels[2]).dot(panels[1].n) * T2
# panel3: temp-formulation
mat[2, 0] = source_2_0(panels[2].center, panels[0])
mat[2, 1] = source_2_0(panels[2].center, panels[1]) * (1 - l2 / l1)
mat[2, 2] = source_2_0(panels[2].center, panels[2])
rhs[2] += doublet_2_0(panels[2].center, panels[0]) * T1
rhs[2] += doublet_2_0(panels[2].center, panels[2]) * T2
rhs[2] += T2
sol = np.linalg.solve(mat, rhs)
print(mat)
print(rhs)
print(sol)
nx = 300
ny = 300
x_grid = np.linspace(-3, 3, nx)
y_grid = np.linspace(-3, 3, ny)
grid = [parabem.Vector2(x, y) for y in y_grid for x in x_grid]
t_list = []
for point in grid:
t = 0
t -= doublet_2_0(point, pan1) * T1
t -= doublet_2_0(point, pan3) * T2
t += source_2_0(point, pan1) * sol[0]
t += source_2_0(point, pan2) * sol[1] * (1 - l2 / l1)
t += source_2_0(point, pan3) * sol[2]
t_list.append(t)
q_list = []
for point in grid:
q = parabem.Vector2(0, 0)
q -= doublet_2_0_v(point, pan1) * T1
q -= doublet_2_0_v(point, pan3) * T2
q += source_2_0_v(point, pan1) * sol[0]
q += source_2_0_v(point, pan2) * sol[1] * (1 - l2 / l1)
q += source_2_0_v(point, pan3) * sol[2]
if point.x > -1 and point.x < 0:
q *= l1
if point.x > 0 and point.x < 1:
q *= l2
q_list.append(q)
writer = VtkWriter()
with open(check_path("results/heat_test.vtk"), "w") as _file:
writer.structed_grid(_file, "element_2", [nx, ny, 1])
writer.points(_file, grid)
writer.data(_file, t_list, name="temperature", _type="SCALARS", data_type="POINT_DATA")
writer.data(_file, q_list, name="q", _type="VECTORS", data_type="POINT_DATA")
| looooo/panel-method | examples/vtk/vtk_heat_test.py | Python | gpl-3.0 | 3,051 | [
"VTK"
] | cdec3c424ed1dc8488d7f660ce88068c721b560254c844daebc39e5faaa619c9 |
#!/bin/env python
import os
from math import *
# Read in topology files and make changes
import parmed
import sys
import numpy as np
# If ParmEd is older than 2.0.4 then it will not work, raise an error
try:
ver = parmed.version
if ver < (2,0,4):
raise RuntimeError("ParmEd is too old, please upgrade to 2.0.4 or later")
except:
oldParmEd = Exception('ERROR: ParmEd is too old, please upgrade to 2.0.4 or later')
raise oldParmEd
"""Implements the new GAFF hydoxyl parameterization of Fennell, Wymer, and Mobley (2014), which involves scaling partial charges on hydroxyl and some surrounding atoms, and new LJ parameters for hydroxyl oxygens.
Written by Caitlin Bannan, modeled after hydroxynator.py by David Mobley and hydroxynator.pl by Chris Fennell. Updated using ParmEd tools to read in topology files.
Modules needed:
NumPy
ParmEd - available at http://github.com/ParmEd/ParmEd
Change Log:
- First version, 11/20/13
- 11/21/13: Fixed bug with tolerance for hydrogen bond lengths; fixed output file name; added option for output topology file name to have different name from input topology file.
- 9/11/2015: Complete rewrite by Caitlin Bannan using ParmEd tools started
Theoretically this should be able to handle other file times than GROMACS topology files, but testing has not been done on other file types.
- 9/22/2015: Added to MobleyLab GitHub repository where changes will be tracked.
"""
def getTotalCharge(system):
"""
Calculates total charge on a molecule or system of molecules from parmed
input:
system = could be a single molecule or system of molecules from parmed
output:
charge = float is the net charge on the system
"""
charge = 0
for a in system.atoms:
charge += a.charge
return charge
def findHydroxylsAlphaCarbons(system,
hydroxyl_o = 'oh',
hydroxyl_h = 'ho'):
"""
Finds the hydroxyl oxygens, hydrogens, and alpha carbons (or heavy atoms) in a parmed molecule
input:
system = parmed molecule
hydroxyl_o = string, atom type for hyroxyl oxygen, default = 'oh' from Amber
hydroxyl_h = string, atom type for hydroxyl hydrogen, default = 'ho' from Amber
output:
oxygens = list of atoms in the system that have the type hydroxyl oxygen
hydrogens = list of atoms that have the type hydroxyl hydrogens
alpha_carbons = list of atoms that are in the alpha position to a hydroxyl group
Raises an error if the number of hydroxyl oxygens and hydrogens are not equal.
Prints a warning if there are multiple hydroxyls on one carbon or if the alpha atom is a non-carbon. These cases are not well documented. Prints a warning if a found hydroxyl oxygen is a part of a peroxide group, peroxides are not scaled as they are believed to behave differently from hydroxyl groups.
"""
alpha_carbons = [] # save alpha atoms
oxygens = [] # save hydroxyl oxygens
hydrogens = [] # save hydroxyl hydrogens
# Parse through atoms in the system
for a in system.atoms:
# If hydroxyl group is found investigate
if a.type == hydroxyl_o:
neighbors = a.bond_partners
partner_types = [n.type for n in neighbors]
# hydroxyl oxygen should always have 2 neighbors
if len(neighbors) != 2:
oxygenBondError = Exception("ERROR: hydroxyl oxygen (%s) has the wrong number of bonded neighbors check your topology file!" % str(a))
raise oxygenBondError
# hydroxyl oxygens should always be bound to a hydroxyl hydrogen
if not hydroxyl_h in partner_types:
noHydroxylHydrogen = Exception("ERROR: One of the hydroxyl oxygens is not bound to a hydroxyl hydrogen. Please check your topology file")
raise noHydroxylHydrogen
# Skip it if the hydroxyl oxygen is a part of a peroxide group
if 'os' in [n.type for n in a.bond_partners]:
print "WARNING: peroxide functional group found. No scaling or LJ parameter adjustment done for peroxide groups."
continue
# If it passed all the checks add oxygen to hydroxyl oxygen list
oxygens.append(a)
for n in neighbors:
# If it's the hydroxyl hydrogen, add to hydrogen list
if n.type == hydroxyl_h:
hydrogens.append(n)
# Otherwise check it and add to alpha_carbons list
else:
# diols on single atom have not been documented
if n in alpha_carbons:
print "WARNING: diols with two hydroxyl groups on the same carbon have not been well documented. For now, alpha carbons (or other atoms) will be scaled for each hydroxyl group attached to it."
# Non-carbon alpha atoms have not been documented
elif n.type[0] != 'c':
print "WARNING: hydroxyl groups attached to non-carbon alpha atoms has not been well documented. For now, these will be treated the same as if the alpha atom was a carbon unless the hydroxyl is a part of a peroxide group in which case no scaling will occur."
# add neighbor to alpha carbon list
alpha_carbons.append(n)
return oxygens, hydrogens, alpha_carbons
def scaleAndNeutralize(system, oxygens, hydrogens, alpha_carbons,
sigmaScale = 3.21990,
epsilonScale = 0.20207,
chargeScale = 1.20905,
hydroxyl_o = 'oh',
hydroxyl_h = 'ho'):
"""
Scales all hydroxyl oxygens, hydrogens, alpha carbons (heavy atoms), and hydrogens attached to alpha atoms.
Changes sigma and epsilon values on hydroxyl oxygens
Finds heavy atoms attached to alpha atoms that will be used to neutralize any excess charge so that the total charge on the molecule is not changed
input:
system = parmed molecule
oxygens = list of parmed atoms that are hydroxyl oxygens
hydrogens = list of parmed atoms that are hydroxyl hydrogens
alpha_carbons = list of parmed atoms that are in alpha positions to hydroxyl groups
sigmaScale = float, LJ parameter, default = 3.21990 Angstroms
epsilonScale = float, LJ parameter, default = 0.20207 kcal/mol
chargeScale = float, amount the scaled atoms are scaled by, default = 1.20905
hydroxyl_o = string, atom type for hyroxyl oxygen, default = 'oh' from Amber
hydroxyl_h = string, atom type for hydroxyl hydrogen, default = 'ho' from Amber
output:
system = parmed molecule system with the changes in charge for all scaled and neutralizing atoms and the sigma and epsilon values changed for hydroxyl oxygen
len(neutralize) = integer, number of atoms used to neutralize the system
scales = integer, number of times an atom was scaled (this could be multiple if alpha atom has multiple hydroxyl groups)
"""
initial_charge = getTotalCharge(system)
scales = 0 # Used to track how many total atoms were scaled
# Scale all of the hydroxyl oxygens and hydrogens
for a in (oxygens + hydrogens):
a.charge *= chargeScale
scales += 1
# If oxygen change epsilon and sigma
if a.type == hydroxyl_o:
a.atom_type.sigma = sigmaScale
a.atom_type.epsilon = epsilonScale
# Scale alpha carbon/heavy atom and attached hydrogens.
# Make a neutralize list
neutralize = []
junction = []
for a in alpha_carbons:
a.charge *= chargeScale
# Look at neighbors and scale or add to neutralizing list
for n in a.bond_partners:
if n.type[0] == 'h':
n.charge *= chargeScale
scales += 1
# Skip hydroxyl oxygen, it was scaled above
elif n.type == hydroxyl_o:
continue
# If neighbor is also an alpha carbon, don't scale it
elif n in alpha_carbons:
if not n in junction:
junction.append(n)
print "\tWARNING: atom %i is at a juction between hydroxyl charge scaling groups. \n\t\tThis atom will be fully scaled rather than used as a neutralization site" % (n.idx+1)
# If it is not already in neutralizing list add to list
elif not n in neutralize:
neutralize.append(n)
# If there are no neutralizing atoms return the system as is
if len(neutralize) == 0:
return system, len(neutralize), scales
# Now neutralize these atoms
charge_off = getTotalCharge(system) - initial_charge
neutralFactor = charge_off / float(len(neutralize))
for a in neutralize:
a.charge -= neutralFactor
return system, len(neutralize), scales
def changeMolecule(molecule,
sigmaScale = 3.21990,
epsilonScale = 0.20207,
chargeScale = 1.20905,
hydroxyl_o = 'oh',
hydroxyl_h = 'ho',
charge_tol = 0.00001):
"""
Identifies hydroxyl groups, if found, changes sigma and epsilon values on the hydroxyl oxygen and scales charges near the hydroxyl groups and neutralizes molecule so the total charge is unchanged.
input:
molecule = parmed molecule to be edited
sigmaScale = float, LJ parameter, default = 3.21990 Angstroms
epsilonScale = float, LJ parameter, default = 0.20207 kcal/mol
chargeScale = float, amount the scaled atoms are scaled by, default = 1.20905
hydroxyl_o = string, atom type for hyroxyl oxygen, default = 'oh' from Amber
hydroxyl_h = string, atom type for hydroxyl hydrogen, default = 'ho' from Amber
charge_tol = float, warning if the final charge is not within this tolerance from the original, default = 0.00001
output:
molecule = parmed moelcule with the LJ parameters changed for hydroxyl oxygens and charges scaled
"""
initial_charge = int(getTotalCharge(molecule))
print "The initial charge on this molecule is %.2E" % initial_charge
oxygens, hydrogens, alpha_carbons = findHydroxylsAlphaCarbons(molecule, hydroxyl_o, hydroxyl_h)
# If no hydroxyl group found, return with no changes made
if len(oxygens) == 0:
print "No hydroxyl groups found in this molecule, no changes were made"
return molecule
else:
print "\tFound %i hydroxyl groups" % (len(oxygens))
molecule, num_neutral, num_scale = scaleAndNeutralize(molecule, oxygens, hydrogens, alpha_carbons, sigmaScale, epsilonScale, chargeScale, hydroxyl_o, hydroxyl_h)
print "\tNumber of times an atom was fully scaled: %i" % num_scale
print "\tUsed %i atom(s) to neutralize the charge on this molecule" % num_neutral
totalCharge = getTotalCharge(molecule)
print "\tThe total charge is %.2E" % totalCharge
if np.abs(totalCharge-initial_charge) > charge_tol:
print "\tWARNING: After scaling, the net charge on the molecule is not equal to the initial charge within the tolerance (%.2E). If you want the molecule to have the intial charge (%i), redistribute this charge manually." % (charge_tol, initial_charge)
return molecule
def hydroxynate(topfile,
outtop = None,
sigmaScale = 3.21990,
epsilonScale = 0.20207,
chargeScale = 1.20905,
hydroxyl_o = 'oh',
hydroxyl_h = 'ho',
charge_tol = 0.00001):
"""
Parses a topology file using ParmEd tools, changes any molecules with hydroxyl groups. Outputs a topology file with the changes
input:
topfile = string, input file that can be read with ParmEd tools
outtpu = string, output topology file to be created, if not provided it will write over the topfile
sigmaScale = float, LJ parameter, default = 3.21990 Angstroms
epsilonScale = float, LJ parameter, default = 0.20207 kcal/mol
chargeScale = float, amount the scaled atoms are scaled by, default = 1.20905
hydroxyl_o = string, atom type for hyroxyl oxygen, default = 'oh' from Amber
hydroxyl_h = string, atom type for hydroxyl hydrogen, default = 'ho' from Amber
charge_tol = float, warning if the final charge is not within this tolerance from the original, default = 0.00001
output:
outputSys = parmed system of molecules with changes for all hydroxyl groups and no change in net charge (within tolerance)
"""
# If outtop is specified it should have the same name as the input file
if outtop == None:
outtop = topfile
# output and input files should be of the same type
# Note I have done no testing of these on file types other than .top
if outtop.split('.')[1] != topfile.split('.')[1]:
wrongOutputFileType = Exception('ERROR: input and output files must both be the same file type. Please change your output file extension to match the input file.')
raise wrongOutputFileType
systems = parmed.load_file(topfile)
components = systems.split()
molecules = []
numbers = []
for c in components:
molecules.append(c[0])
numbers.append(len(c[1]))
print "Found %s molecule(s)" %str(len(components))
for i, molecule in enumerate(molecules):
print "molecule", i+1
molecules[i] = changeMolecule(molecule, sigmaScale, epsilonScale, chargeScale, hydroxyl_o, hydroxyl_h, charge_tol)
print
outputSys = molecules[0] * numbers[0]
for idx in range(1, len(molecules)):
outputSys += molecules[idx] * numbers[idx]
outputSys.write(outtop)
return outputSys
# If being run from the command line import Option Parser and use methods above
if __name__ == '__main__':
#Configure input options
from optparse import OptionParser
# Default Constants
sigmaScale = 3.21990 # Changed to match parmed units (A)
epsilonScale = 0.20207 # Changed to match parmed units (kcal/mol)
chargeScale = 1.20905
hydroxyl_o = 'oh'
hydroxyl_h = 'ho'
tol_bond_h = 0.12
charge_tol = 0.00001
parser = OptionParser(usage = "Converts sigma, epsilon, and charge OH values in a GROMACS topology to dielectric corrected values.\nUsage: [-options] [topology file name] \n\n", epilog = "Note: Assumes hydroxyl oxygens and hydrogens follow standard GAFF naming ('%s' and '%s' respectively; if you have hydroxyls with other atom names you will need to adjust the source code.)." % (hydroxyl_o, hydroxyl_h))
# Set options
parser.add_option('-e',
help='OH epsilon conversion value, if other than standard. Default: %.5g kcal/mol' % epsilonScale,
default = epsilonScale,
type = "float",
dest = 'epsilonScale')
parser.add_option('-q',
help='OH environment charge scaling fraction, if other than standard. Default: %.5f Angstroms' % chargeScale,
default = chargeScale,
type = "float",
dest = 'chargeScale')
parser.add_option('-s',
help='OH sigma conversion value, if other than standard. Default: %.5g' % sigmaScale,
default = sigmaScale,
type = "float",
dest = 'sigmaScale')
parser.add_option('-o',
help='Output topology file name. Default: Edit input topology file. If specified, instead creates new output topology file.',
dest = 'outtop',
type = "string" )
parser.add_option('-O',
help = "Hydroxyl oxygen atom type. The Default uses Amber atom types or 'oh'",
default = hydroxyl_o,
type = "string",
dest = 'hydroxyl_o')
parser.add_option('-H',
help = "Hydroxyl hydrogen atom type. The Default uses Amber atom types of 'ho'",
default = hydroxyl_h,
type = "string",
dest = 'hydroxyl_h')
parser.add_option('-c',
help = 'Charge tolerance, this is how different the charge on the scaled molecules can be from the net charge on the molecule to begin with. This should always be no bigger than rounding error. Default = 0.00001',
default = charge_tol,
type = "float",
dest = 'charge_tol')
# Load Options
(opt, args) = parser.parse_args()
topfile = args[0]
if not opt.outtop:
outtop = topfile
else:
outtop = opt.outtop
if not os.path.isfile( topfile):
parser.error('ERROR: "%s" is not a topology file I can find. Please enter the name of a valid topology file.' % topfile )
hydroxynate(topfile, outtop, opt.sigmaScale, opt.epsilonScale, opt.chargeScale, opt.hydroxyl_o, opt.hydroxyl_h, opt.charge_tol)
| MobleyLab/Hydroxynator | hydroxynator.py | Python | gpl-3.0 | 16,894 | [
"Amber",
"Gromacs"
] | c344b6db44962d41257883846eb957fa1125a8042f417c3b22ddf3c6afd1bbb9 |
import Scientific.IO.NetCDF as nc
import numpy as np
import sys
import math
import pylab as pl
import matplotlib.colors as colors
from numpy import floor, sqrt, sin, cos, arccos, arctan2, pi
class gth_hemisphere:
"""Class implementing the gathering hemisphere."""
def __init__(self, resTheta=1, nThetaI=1, nDataLevels=1):
self.Type = "Hemisphere"
self.resTheta = resTheta
self.dTheta = 0.5 * pi / float(resTheta)
self.dThetaInv = 1.0 / self.dTheta
self.nCells = 1
self.nThetaI = nThetaI
self.nLevels = nDataLevels
self.type = type
self.dPhi = np.zeros(resTheta)
self.dPhiInv = np.zeros(resTheta)
self.dA = np.zeros(resTheta)
self.mTheta = np.zeros(resTheta)
self.nPhi = np.zeros(resTheta, np.int64)
self.cIdx = np.zeros(resTheta)
self.phiRange = 2.0 * pi
dA0 = self.phiRange * (1.0 - cos(self.dTheta))
self.nPhi [0] = 1
self.cIdx [0] = 0
self.dPhi [0] = self.phiRange
self.dPhiInv [0] = 1.0 / self.phiRange
self.dA [0] = dA0
self.mTheta [0] = 0.5 * self.dTheta
for i in range(1, resTheta):
dPhi = dA0 / (cos(i * self.dTheta) - cos((i+1) * self.dTheta))
rPhi = round(self.phiRange / dPhi)
dPhi = self.phiRange / float(rPhi)
self.nPhi [i] = rPhi
self.dPhi [i] = dPhi
self.dPhiInv [i] = 1.0 / dPhi
self.dA [i] = dPhi * (cos(i * self.dTheta) - cos((i+1) * self.dTheta))
self.mTheta [i] = self.dTheta * (float(i) - 0.5)
self.cIdx [i] = self.cIdx[i-1] + self.nPhi[i-1]
self.nCells = self.nCells + rPhi
self.dAMean = self.phiRange / float(self.nCells)
self.data = np.zeros([nThetaI, self.nCells, nDataLevels])
self.weight = np.zeros([nThetaI, self.nCells, nDataLevels])
def load(self, fName):
"""
Loads the hemisphere data from a netCDF file.
Returns: nothing
"""
try:
dFile = nc.NetCDFFile(fName, "r")
except IOError:
print "Error reading file, exiting."
sys.exit()
if "Hemisphere" not in dFile.variables.keys():
print "Error: not a proper hemisphere file."
sys.exit()
if "Elements" in dir(dFile):
self.Elements = str(dFile.Elements).split()
self.Type = dFile.Type
self.nPhi = np.array(dFile.nPhi)
self.cIdx = np.array(dFile.cIdx)
## Convert Fortran indices to numpy indices
if self.cIdx[0] == 1:
self.cIdx -= 1
self.dPhi = np.array(dFile.dPhi)
self.dPhiInv = 1.0 / self.dPhi
self.nThetaI = int(dFile.nThetaI)
self.nLevels = int(dFile.nLevels)
self.resTheta = int(dFile.nThetaE)
self.dTheta = 0.5 * math.pi / float(self.resTheta)
self.dThetaInv = 1.0/self.dTheta
self.dA = dFile.dA
self.data = np.array(dFile.variables['Hemisphere'].getValue())
dFile.close()
def divideBySolidAngle(self):
for i in range(self.resTheta):
self.data[:, self.cIdx[i] : self.cIdx[i] + self.nPhi[i], :] /= self.dA[i]
def carDirToCell(self, D):
r = sqrt ( (D**2).sum() )
theta = arccos ( D[2] / r )
phi = arctan2 ( D[1] / r, D[0] / r )
if( phi < 0.0 ):
phi = 2.0*pi + phi
t = floor( theta * self.dThetaInv )
p = floor( phi * self.dPhiInv[t] )
return self.cIdx[t] + p
def addDataCar(self, D, v, set=0, lvl=0):
c = self.carDirToCell(D)
self.data[set, c, lvl] += v
def toArray(self, set=0, lvl=0):
"""
Unpacks the gathering hemisphere into a 2-dimensional array.
Returns: numpy.array
"""
resTheta = self.resTheta
resPhi = self.nPhi.max()
if(self.Type == 'Hemisphere'):
dp = math.pi * 2.0 / float(resPhi)
else:
dp = math.pi / float(resPhi)
data = np.zeros([resTheta, resPhi])
#print self.data[lvl,:,set]
for i in range(resTheta):
dPhiI = dp * self.dPhiInv[i]
for j in range(resPhi):
data[i,j] = self.data[lvl, self.cIdx[i] + int(math.floor(j * dPhiI)), set]
return data
def phiSlice(self, theta, set=0, lvl=0):
"""
Returns: numpy.array
"""
iTheta = int(math.floor(theta * self.dThetaInv))
resPhi = self.nPhi[iTheta]
dPhi = self.dPhi[iTheta]
data = np.zeros([resPhi,2])
for i in range(resPhi):
data[i,0] = (i + 0.5) * dPhi
data[i,1] = self.rows[set][iTheta][i,lvl]
return data
def thetaSlice(self, phi, set=0, lvl=0):
"""
Returns: numpy.array
"""
data = np.zeros([self.resTheta, 2])
for i in range(self.resTheta):
data[i,0] = (i+0.5) * self.dTheta
data[i,1] = self.rows[set][i][phi * self.dPhiInv[i],lvl]
return data
def eval(self, thtI, thtE, phi):
iThtI = int(math.floor(thtI))
iThtE = int(math.floor(math.radians(thtE) * self.dThetaInv))
iPhi = int(math.floor(math.radians(phi) * self.dPhiInv[iThtE]))
return self.rows[iThtI][iThtE][iPhi,0] * 4.0 * math.pi
class xrHemisphere(gth_hemisphere):
def __init__(self, resTheta=1, nThetaI=1, nDataLevels=1):
gth_hemisphere.__init__(self, resTheta, nThetaI, nDataLevels)
def load(self, fName):
"""
Loads the hemisphere data from a netCDF file.
Returns: nothing
"""
try:
dFile = nc.NetCDFFile(fName, "r")
except IOError:
print "Error reading file, exiting."
sys.exit()
if "Hemisphere" not in dFile.variables.keys():
print "Error: not a proper hemisphere file."
sys.exit()
try:
self.Elements = str(dFile.Elements).split()
self.muAbs = np.array(dFile.variables['muAbs'].getValue())
self.muAbsCDF = np.array(dFile.variables['muAbsCdf'].getValue())
self.muExt = np.array(dFile.variables['muExt'].getValue())
self.Spectrum = np.array(dFile.variables['Spectrum'].getValue())
self.SpectrumCdf = np.array(dFile.variables['SpectrumCdf'].getValue())
self.SpectrumCdfInv = np.array(dFile.variables['SpectrumCdfInv'].getValue())
self.SpectrumTest = np.array(dFile.variables['SpectrumTest'].getValue())
except (KeyError, AttributeError):
print "Error: Malformed input file, missing data."
sys.exit(1)
self.Type = dFile.Type
self.nPhi = np.array(dFile.nPhi)
self.cIdx = np.array(dFile.cIdx)
## Convert Fortran indices to numpy indices
if self.cIdx[0] == 1:
self.cIdx -= 1
self.dPhi = np.array(dFile.dPhi)
self.dPhiInv = 1.0 / self.dPhi
self.nThetaI = int(dFile.nThetaI)
self.nLevels = int(dFile.nLevels)
self.resTheta = int(dFile.nThetaE)
self.dTheta = 0.5 * math.pi / float(self.resTheta)
self.dThetaInv = 1.0/self.dTheta
self.dA = dFile.dA
self.data = np.array(dFile.variables['Hemisphere'].getValue())
dFile.close()
| dronir/EM | python/xringleDirPlot.py | Python | gpl-3.0 | 6,957 | [
"NetCDF"
] | 40c0da805d853c603c2259db9cc171372993a4817b6fa30521144f6ff974a69e |
#!/usr/bin/env python
import numpy as np
from numpy import linalg as la
import numexpr as ne
import sys, csv
import pdb
import copy
import warnings
class CSVInput:
def __init__(self, filename, first_row_titles=False, num_convert=True, set_true_false_01=True):
self.titles = []
self.data = []
self.boolean_false = ['F', 'f', 'False', 'FALSE', 'false']
self.boolean_true = ['T', 't', 'True', 'TRUE', 'true']
with open(filename, 'rb') as file:
reader = csv.reader(file, delimiter=' ')
for i, row in enumerate(reader):
if i==0 and first_row_titles:
self.titles += row
else:
if num_convert:
row_list = []
for elem in row:
try:
value = float(elem)
except ValueError:
try:
value = int(elem)
except ValueError:
value = elem
if any(false in value for false in self.boolean_false):
value = 0
elif any(true in value for true in self.boolean_true):
value = 1
row_list.append(value)
self.data.append(row_list)
self.rows = len(self.data)
self.cols = len(self.data[0])
class Classifier(object):
def __init__(self):
pass
def Train(self, samples, truth):
pass
def Classify(self, sample):
pass
def ReformatData(self, samples, truth):
return (samples, truth)
class Fisher(Classifier):
def __init__(self, class_a, class_b):
self.projection = []
self.c_a = class_a
self.c_b = class_b
self.a_gauss = 0
self.b_gauss = 0
def Train(self, samples, truth):
# solve for projection
a_samples = np.asmatrix(samples[np.asarray(truth.T)[0] == self.c_a])
b_samples = np.asmatrix(samples[np.asarray(truth.T)[0] == self.c_b])
# compute mean and covariance
a_mean = np.asmatrix(np.mean(a_samples, 0).T)
b_mean = np.asmatrix(np.mean(b_samples, 0).T)
a_cov = np.asmatrix(np.cov(a_samples.T))
b_cov = np.asmatrix(np.cov(b_samples.T))
# Compute fisher criteria projection to one dimension
if a_samples.shape[0] == 0:
a_cov = np.zeros(b_cov.shape)
a_mean = np.zeros(b_mean.shape)
error = True
if b_samples.shape[0] == 0:
b_cov = np.zeros(a_cov.shape)
b_mean = np.zeros(a_mean.shape)
error = True
self.projection = la.inv((a_cov + b_cov) + np.eye(a_cov.shape[0]) * 0.00001) * (a_mean - b_mean)
self.projection /= la.norm(self.projection)
self.a_gauss = Gaussian()
self.b_gauss = Gaussian()
# project all of the data
if a_samples.shape[0] != 0:
a_projected = a_samples * self.projection
self.a_gauss.Train(a_projected)
else:
self.a_gauss = None
if b_samples.shape[0] != 0:
b_projected = b_samples * self.projection
self.b_gauss.Train(b_projected)
else:
self.b_gauss = None
def Classify(self, samples):
# project samples into space
projected = samples * self.projection
# Perform Gaussian classification
if self.a_gauss:
a_prob = self.a_gauss.Classify(projected)
else:
a_prob = np.zeros((projected.shape[0], 1))
if self.b_gauss:
b_prob = self.b_gauss.Classify(projected)
else:
b_prob = np.zeros((projected.shape[0], 1))
a = self.c_a
b = self.c_b
# classify against probability
return ne.evaluate('where(a_prob > b_prob, a, b)')
def ReformatData(self, samples, truth):
ref_samples = np.ones((samples.shape[0], samples.shape[1]+1))
ref_samples[:, 1:] = np.matrix(samples)
ref_truth = np.matrix(truth)
return (ref_samples, ref_truth)
class Regression(Classifier):
def __init__(self, class_a, class_b):
self.a = []
self.c_a = class_a
self.c_b = class_b
def Train(self, samples, truth):
samples = np.matrix(samples)
truth = np.matrix(truth * 2 - 1)
try:
self.a = la.inv(samples.T * samples) * samples.T * truth
except la.linalg.LinAlgError:
self.a = la.inv(samples.T * samples + np.eye(samples.shape[1]) * 0.0000001) * samples.T * truth
def Classify(self, samples):
samples = np.matrix(samples)
projection = samples * self.a
result = np.zeros(projection.shape)
result[projection < 0] = self.c_a
result[projection >=0] = self.c_b
return result
def ReformatData(self, samples, truth):
ref_samples = np.ones((samples.shape[0], samples.shape[1]+1))
ref_samples[:, 1:] = np.matrix(samples)
ref_truth = np.matrix(truth)
return (np.asmatrix(ref_samples), ref_truth)
class Gaussian(Classifier):
def __init__(self):
self.cov_inv = []
self.mean = []
self.normalizer = []
def Train(self, samples):
self.mean = np.mean(samples, 0).T
self.cov = np.cov(samples.T)
if samples.shape[0] == 1:
self.cov = np.ones(self.cov.shape)
if self.cov.shape != ():
self.cov_inv = la.inv(self.cov)
else:
self.cov_inv = 1.0 / (self.cov + 0.000000001)
# Compute normalizing term
if self.cov.shape != ():
self.normalizer = 1.0 / (np.sqrt(la.det(2.0 * np.pi * self.cov)))
else:
self.normalizer = 1.0 / (np.sqrt(2.0 * np.pi * (self.cov + 0.000000001)))
def ClassifySample(self, sample):
return self.normalizer * np.exp(- 0.5 * (sample - self.mean).T * self.cov_inv * (sample - self.mean))
def Classify(self, samples):
# compute mahalanobis distance
dist = self.cov_inv * samples.T
dist = np.multiply(samples.T, dist)
dist = np.sum(dist, 0).T
# compute exponent
return self.normalizer * np.exp(-0.5 * dist)
def ReformatData(self, samples):
return np.matrix(samples)
class Random(Classifier):
def __init__(self, class_a, class_b):
self.projection = 0
self.a_gauss = 0
self.b_gauss = 0
self.c_a = class_a
self.c_b = class_b
def Train(self, samples, truth):
# randomly select projection
self.projection = np.random.rand(samples.shape[1], 1)
self.projection /= la.norm(self.projection)
# pdb.set_trace()
# project training samples
a_samples = np.asmatrix(samples[np.asarray(truth.T)[0] == self.c_a])
b_samples = np.asmatrix(samples[np.asarray(truth.T)[0] == self.c_b])
# pdb.set_trace()
# a_projected = a_samples * self.projection
# b_projected = b_samples * self.projection
self.a_gauss = Gaussian()
self.b_gauss = Gaussian()
# self.a_gauss.Train(a_projected)
# self.b_gauss.Train(b_projected)
if a_samples.shape[0] != 0:
a_projected = a_samples * self.projection
self.a_gauss.Train(a_projected)
else:
self.a_gauss = None
if b_samples.shape[0] != 0:
b_projected = b_samples * self.projection
self.b_gauss.Train(b_projected)
else:
self.b_gauss = None
def Classify(self, samples):
# project samples into space
# pdb.set_trace()
# projected = samples * self.projection
projected = samples * self.projection
# Perform Gaussian classification
if self.a_gauss:
a_prob = self.a_gauss.Classify(projected)
else:
a_prob = np.zeros((projected.shape[0], 1))
if self.b_gauss:
b_prob = self.b_gauss.Classify(projected)
else:
b_prob = np.zeros((projected.shape[0], 1))
# Perform Gaussian classification
# a_prob = self.a_gauss.Classify(projected)
# b_prob = self.b_gauss.Classify(projected)
a = self.c_a
b = self.c_b
# classify against probability
return ne.evaluate('where(a_prob > b_prob, a, b)')
def ReformatData(self, samples, truth):
ref_samples = np.ones((samples.shape[0], samples.shape[1]+1))
ref_samples[:, 1:] = np.matrix(samples)
ref_truth = np.matrix(truth)
return (np.asmatrix(ref_samples), ref_truth)
################################################################################
# Decision Tree Classifier
################################################################################
class Node(object):
def __init__(self, classifier):
self.classifier = copy.deepcopy(classifier)
self.left = None
self.right = None
class DecisionTree(Classifier):
def __init__(self, classifier, class_a, class_b, max_depth=1000):
self.tree = Node(copy.deepcopy(classifier))
self.classifier = classifier
self.max_depth = max_depth
def ReformatData(self, samples, truth):
return self.classifier.ReformatData(samples, truth)
def Train(self, samples, truth):
# pdb.set_trace()
self.TrainRecur(self.tree, samples, truth, 1)
def TrainRecur(self, node, samples, truth, depth):
node.classifier.Train(samples, truth)
if depth > self.max_depth:
return
result = node.classifier.Classify(samples)
compare = result != truth
a_compare = np.sum(compare[result.T[0] == 0])
b_compare = np.sum(compare[result.T[0] == 1])
# pdb.set_trace()
if a_compare > 0:
a_samples = samples[result.T[0] == 0]
if not np.array_equal(a_samples, samples):
a_truth = truth[result.T[0] == 0]
node.left = Node(self.classifier)
self.TrainRecur(node.left, a_samples, a_truth, depth + 1)
# a_truth = truth[result.T[0] == 0]
# node.left = Node(self.classifier)
# self.TrainRecur(node.left, a_samples, a_truth, depth + 1)
if b_compare > 0:
b_samples = samples[result.T[0] == 1]
if not np.array_equal(b_samples, samples):
b_truth = truth[result.T[0] == 1]
node.right = Node(self.classifier)
self.TrainRecur(node.right, b_samples, b_truth, depth + 1)
# b_truth = truth[result.T[0] == 1]
# node.right = Node(self.classifier)
# self.TrainRecur(node.right, b_samples, b_truth, depth + 1)
def Classify(self, samples):
return self.ClassifyRecur(self.tree, samples)
def ClassifyRecur(self, node, samples):
result = node.classifier.Classify(samples)
output = np.zeros(result.shape)
if node.left:
a_samples = samples[result.T[0] == 0]
a_result = self.ClassifyRecur(node.left, a_samples)
output[result.T[0] == 0] = a_result
else:
output[result.T[0] == 0] = result[result.T[0] == 0]
if node.right:
b_samples = samples[result.T[0] == 1]
b_result = self.ClassifyRecur(node.right, b_samples)
output[result.T[0] == 1] = b_result
else:
output[result.T[0] == 1] = result[result.T[0] == 1]
return output
################################################################################
################################################################################
class ClassiferTest(object):
def __init__(self, classifier, training_set):
self.classifier = classifier
self.train_data, self.train_truth = self.classifier.ReformatData(training_set[0], training_set[1])
# self.test_data, self.test_truth = self.classifier.ReformatData(testing_set[0], testing_set[1])
self.train_truth_raw = training_set[1]
# self.test_truth_raw = testing_set[1]
def Training(self):
self.classifier.Train(self.train_data, self.train_truth)
def Testing(self):
self.train_result = self.classifier.Classify(self.train_data)
# self.test_result = self.classifier.Classify(self.test_data)
def Results(self):
compare = self.train_result - self.train_truth_raw
compare = compare != 0
a_compare = compare[self.train_truth_raw == 0]
b_compare = compare[self.train_truth_raw == 1]
error_rate = float(np.sum(compare)) / float(compare.shape[0])
a_miss_class = float(np.sum(a_compare)) / float(a_compare.shape[1])
b_miss_class = float(np.sum(b_compare)) / float(b_compare.shape[1])
print error_rate
print a_miss_class
print b_miss_class
def GraphResults(results):
pass
def GenerateTable(results):
pass
def main():
''' Test the classes for performance and corrrecness'''
data = CSVInput(sys.argv[1], first_row_titles=False)
truth_training = CSVInput(sys.argv[2], first_row_titles=False)
samples = np.matrix(data.data)
truth = samples[:,-1]
samples = samples[:,:-1]
sets = np.array(truth_training.data)
training_samples = samples[sets.T[0] == 0]
trainging_truth = truth[sets.T[0] == 0]
testing_samples = samples[sets.T[0] == 1]
testing_truth = truth[sets.T[0] == 1]
# print samples, samples.shape
# print truth, truth.shape
regression = Regression(0, 1)
classify_test = ClassiferTest(regression, (training_samples, trainging_truth))
classify_test.Training()
classify_test.Testing()
classify_test.Results()
# fisher = Fisher(0, 1)
# classify_test = ClassiferTest(fisher, (samples, truth))
# classify_test.Training()
# classify_test.Testing()
# classify_test.Results()
# random = Random(0, 1)
# classify_test = ClassiferTest(random, (samples, truth))
# classify_test.Training()
# classify_test.Testing()
# classify_test.Results()
decision_tree = DecisionTree(Regression(0,1), 0,1, max_depth=800)
dec_samples, dec_truth = decision_tree.ReformatData(training_samples, trainging_truth)
decision_tree.Train(dec_samples, dec_truth)
dec_result = decision_tree.Classify(dec_samples)
print float(np.sum(dec_result != dec_truth)) / float(dec_result.shape[0])
print dec_result.shape
dec_samples, dec_truth = decision_tree.ReformatData(testing_samples, testing_truth)
dec_result = decision_tree.Classify(dec_samples)
print float(np.sum(dec_result != dec_truth)) / float(dec_result.shape[0])
print dec_result.shape
# GraphResults(results)
# GenerateTable(results)
if __name__ == '__main__':
main()
| lukefrasera/cs775Homework | midterm/scripts/classifier.py | Python | lgpl-3.0 | 13,567 | [
"Gaussian"
] | 2c8d5c1d4c31cd1009b57bb8c4a4d966317c7283ca685680e55a0d0cd44907fa |
# -*- coding: utf-8 -*-
#
# store_restore_network.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Store and restore a network simulation
--------------------------------------
This example shows how to store user-specified aspects of a network
to file and how to later restore the network for further simulation.
This may be used, e.g., to train weights in a network up to a certain
point, store those weights and later perform diverse experiments on
the same network using the stored weights.
.. admonition:: Only user-specified aspects are stored
NEST does not support storing the complete state of a simulation
in a way that would allow one to continue a simulation as if one had
made a new ``Simulate()`` call on an existing network. Such complete
checkpointing would be very difficult to implement.
NEST's explicit approach to storing and restoring network state makes
clear to all which aspects of a network are carried from one simulation
to another and thus contributes to good scientific practice.
Storing and restoring is currently not supported for MPI-parallel simulations.
"""
###############################################################################
# Import necessary modules.
import nest
import pickle
###############################################################################
# These modules are only needed for illustrative plotting.
import matplotlib.pyplot as plt
from matplotlib import gridspec
import numpy as np
import pandas as pd
import textwrap
###############################################################################
# Implement network as class.
#
# Implementing the network as a class makes network properties available to
# the initial network builder, the storer and the restorer, thus reducing the
# amount of data that needs to be stored.
class EINetwork:
"""
A simple balanced random network with plastic excitatory synapses.
This simple Brunel-style balanced random network has an excitatory
and inhibitory population, both driven by external excitatory poisson
input. Excitatory connections are plastic (STDP). Spike activity of
the excitatory population is recorded.
The model is provided as a non-trivial example for storing and restoring.
"""
def __init__(self):
self.nI = 500
self.nE = 4 * self.nI
self.n = self.nE + self.nI
self.JE = 1.0
self.JI = -4 * self.JE
self.indeg_e = 200
self.indeg_i = 50
self.neuron_model = "iaf_psc_delta"
# Create synapse models so we can extract specific connection information
nest.CopyModel("stdp_synapse_hom", "e_syn", {"Wmax": 2 * self.JE})
nest.CopyModel("static_synapse", "i_syn")
self.nrn_params = {"V_m": nest.random.normal(-65., 5.)}
self.poisson_rate = 800.
def build(self):
"""
Construct network from scratch, including instrumentation.
"""
self.e_neurons = nest.Create(self.neuron_model, n=self.nE, params=self.nrn_params)
self.i_neurons = nest.Create(self.neuron_model, n=self.nI, params=self.nrn_params)
self.neurons = self.e_neurons + self.i_neurons
self.pg = nest.Create("poisson_generator", {"rate": self.poisson_rate})
self.sr = nest.Create("spike_recorder")
nest.Connect(self.e_neurons, self.neurons,
{"rule": "fixed_indegree", "indegree": self.indeg_e},
{"synapse_model": "e_syn", "weight": self.JE})
nest.Connect(self.i_neurons, self.neurons,
{"rule": "fixed_indegree", "indegree": self.indeg_i},
{"synapse_model": "i_syn", "weight": self.JI})
nest.Connect(self.pg, self.neurons, "all_to_all", {"weight": self.JE})
nest.Connect(self.e_neurons, self.sr)
def store(self, dump_filename):
"""
Store neuron membrane potential and synaptic weights to given file.
"""
assert nest.NumProcesses() == 1, "Cannot dump MPI parallel"
###############################################################################
# Build dictionary with relevant network information:
# - membrane potential for all neurons in each population
# - source, target and weight of all connections
# Dictionary entries are Pandas Dataframes.
#
# Strictly speaking, we would not need to store the weight of the inhibitory
# synapses since they are fixed, but we do so out of symmetry and to make it
# easier to add plasticity for inhibitory connections later.
network = {}
network["n_vp"] = nest.total_num_virtual_procs
network["e_nrns"] = self.neurons.get(["V_m"], output="pandas")
network["i_nrns"] = self.neurons.get(["V_m"], output="pandas")
network["e_syns"] = nest.GetConnections(synapse_model="e_syn").get(
("source", "target", "weight"), output="pandas")
network["i_syns"] = nest.GetConnections(synapse_model="i_syn").get(
("source", "target", "weight"), output="pandas")
with open(dump_filename, "wb") as f:
pickle.dump(network, f, pickle.HIGHEST_PROTOCOL)
def restore(self, dump_filename):
"""
Restore network from data in file combined with base information in the class.
"""
assert nest.NumProcesses() == 1, "Cannot load MPI parallel"
with open(dump_filename, "rb") as f:
network = pickle.load(f)
assert network["n_vp"] == nest.total_num_virtual_procs, "N_VP must match"
###############################################################################
# Reconstruct neurons
# Since NEST does not understand Pandas Series, we must pass the values as
# NumPy arrays
self.e_neurons = nest.Create(self.neuron_model, n=self.nE,
params={"V_m": network["e_nrns"].V_m.values})
self.i_neurons = nest.Create(self.neuron_model, n=self.nI,
params={"V_m": network["i_nrns"].V_m.values})
self.neurons = self.e_neurons + self.i_neurons
###############################################################################
# Reconstruct instrumentation
self.pg = nest.Create("poisson_generator", {"rate": self.poisson_rate})
self.sr = nest.Create("spike_recorder")
###############################################################################
# Reconstruct connectivity
nest.Connect(network["e_syns"].source.values, network["e_syns"].target.values,
"one_to_one",
{"synapse_model": "e_syn", "weight": network["e_syns"].weight.values})
nest.Connect(network["i_syns"].source.values, network["i_syns"].target.values,
"one_to_one",
{"synapse_model": "i_syn", "weight": network["i_syns"].weight.values})
###############################################################################
# Reconnect instruments
nest.Connect(self.pg, self.neurons, "all_to_all", {"weight": self.JE})
nest.Connect(self.e_neurons, self.sr)
class DemoPlot:
"""
Create demonstration figure for effect of storing and restoring a network.
The figure shows raster plots for five different runs, a PSTH for the
initial 1 s simulation and PSTHs for all 1 s continuations, and weight
histograms.
"""
def __init__(self):
self._colors = [c["color"] for c in plt.rcParams["axes.prop_cycle"]]
self._next_line = 0
plt.rcParams.update({'font.size': 10})
self.fig = plt.figure(figsize=(10, 7), constrained_layout=False)
gs = gridspec.GridSpec(4, 2, bottom=0.08, top=0.9, left=0.07, right=0.98, wspace=0.2, hspace=0.4)
self.rasters = ([self.fig.add_subplot(gs[0, 0])] +
[self.fig.add_subplot(gs[n, 1]) for n in range(4)])
self.weights = self.fig.add_subplot(gs[1, 0])
self.comment = self.fig.add_subplot(gs[2:, 0])
self.fig.suptitle("Storing and reloading a network simulation")
self.comment.set_axis_off()
self.comment.text(0, 1, textwrap.dedent("""
Storing, loading and continuing a simulation of a balanced E-I network
with STDP in excitatory synapses.
Top left: Raster plot of initial simulation for 1000ms (blue). Network state
(connections, membrane potential, synaptic weights) is stored at the end of
the initial simulation.
Top right: Immediate continuation of the initial simulation from t=1000ms
to t=2000ms (orange) by calling Simulate(1000) again after storing the network.
This continues based on the full network state, including spikes in transit.
Second row, right: Simulating for 1000ms after loading the stored network
into a clean kernel (green). Time runs from 0ms and only connectivity, V_m and
synaptic weights are restored. Dynamics differ somewhat from continuation.
Third row, right: Same as in second row with identical random seed (red),
resulting in identical spike patterns.
Fourth row, right: Simulating for 1000ms from same stored network state as
above but with different random seed yields different spike patterns (purple).
Above: Distribution of excitatory synaptic weights at end of each sample
simulation. Green and red curves are identical and overlay to form brown curve."""),
transform=self.comment.transAxes, fontsize=8,
verticalalignment='top')
def add_to_plot(self, net, n_max=100, t_min=0, t_max=1000, lbl=""):
spks = pd.DataFrame.from_dict(net.sr.get("events"))
spks = spks.loc[(spks.senders < n_max) & (t_min < spks.times) & (spks.times < t_max)]
self.rasters[self._next_line].plot(spks.times, spks.senders, ".",
color=self._colors[self._next_line])
self.rasters[self._next_line].set_xlim(t_min, t_max)
self.rasters[self._next_line].set_title(lbl)
if 1 < self._next_line < 4:
self.rasters[self._next_line].set_xticklabels([])
elif self._next_line == 4:
self.rasters[self._next_line].set_xlabel('Time [ms]')
# To save time while plotting, we extract only a subset of connections.
# For simplicity, we just use a prime-number stepping.
w = nest.GetConnections(source=net.e_neurons[::41], synapse_model="e_syn").weight
wbins = np.arange(0.7, 1.4, 0.01)
self.weights.hist(w, bins=wbins,
histtype="step", density=True, label=lbl,
color=self._colors[self._next_line],
alpha=0.7, lw=3)
if self._next_line == 0:
self.rasters[0].set_ylabel("neuron id")
self.weights.set_ylabel("p(w)")
self.weights.set_xlabel("Weight w [mV]")
plt.draw()
plt.pause(1e-3) # allow figure window to draw figure
self._next_line += 1
if __name__ == "__main__":
plt.ion()
T_sim = 1000
dplot = DemoPlot()
###############################################################################
# Ensure clean slate and make NEST less chatty
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
###############################################################################
# Create network from scratch and simulate 1s.
nest.local_num_threads = 4
nest.print_time = True
ein = EINetwork()
print("*** Initial simulation ***")
ein.build()
nest.Simulate(T_sim)
dplot.add_to_plot(ein, lbl="Initial simulation")
###############################################################################
# Store network state to file with state after 1s.
print("\n*** Storing simulation ...", end="", flush=True)
ein.store("ein_1000.pkl")
print(" done ***\n")
###############################################################################
# Continue simulation by another 1s.
print("\n*** Continuing simulation ***")
nest.Simulate(T_sim)
dplot.add_to_plot(ein, lbl="Continued simulation", t_min=T_sim, t_max=2 * T_sim)
###############################################################################
# Clear kernel, restore network from file and simulate for 1s.
print("\n*** Reloading and resuming simulation ***")
nest.ResetKernel()
nest.local_num_threads = 4
ein2 = EINetwork()
ein2.restore("ein_1000.pkl")
nest.Simulate(T_sim)
dplot.add_to_plot(ein2, lbl="Reloaded simulation")
###############################################################################
# Repeat previous step. This shall result in *exactly* the same results as
# the previous run because we use the same random seed.
print("\n*** Reloading and resuming simulation (same seed) ***")
nest.ResetKernel()
nest.local_num_threads = 4
ein2 = EINetwork()
ein2.restore("ein_1000.pkl")
nest.Simulate(T_sim)
dplot.add_to_plot(ein2, lbl="Reloaded simulation (same seed)")
###############################################################################
# Clear, restore and simulate again, but now with different random seed.
# Details in results shall differ from previous run.
print("\n*** Reloading and resuming simulation (different seed) ***")
nest.ResetKernel()
nest.local_num_threads = 4
nest.rng_seed = 987654321
ein2 = EINetwork()
ein2.restore("ein_1000.pkl")
nest.Simulate(T_sim)
dplot.add_to_plot(ein2, lbl="Reloaded simulation (different seed)")
dplot.fig.savefig("store_restore_network.png")
input("Press ENTER to close figure!")
| sanjayankur31/nest-simulator | pynest/examples/store_restore_network.py | Python | gpl-2.0 | 14,596 | [
"NEURON"
] | 070df34bc723d564bc41db5990df7d23fa239c2f4be9aa4da1825df29625ee1a |
from .r_dependencies import *
from .r_base import r_base
class r_dataNormalization(r_base):
def calculate_glogNormalization_v1(self,data_I):
'''normalize the data using a glog transformation using LMGene
https://www.bioconductor.org/packages/release/bioc/html/LMGene.html
Citation: Rocke D, Lee GC, Tillinghast J, Durbin-Johnson B and Wu S (2013). LMGene: LMGene Software for Data Transformation and Identification of Differentially Expressed Genes in Gene Expression Arrays. R package version 2.26.0, http://dmrocke.ucdavis.edu/software.html.
INPUT:
data_I = listDict
...
OUTPUT:
data_O = listDict of the transformed data
concentrations = original data matrix
concentrations_glog = normalized data matrix
TODO:
1. break into individual functions and calls to R
2. add in optional input for calls to tranest()
'''
#make the ExpressionSet
#format into R matrix and list objects
# convert data dict to matrix filling in missing values
# with 'NA'
sns = []
cn = []
#replicates = [];
sample_name_abbreviations = [];
for d in data_I:
sns.append(d['sample_name_short']);
#replicates.append(d['sample_replicate']);
sample_name_abbreviations.append(d['sample_name_abbreviation'])
cn.append(d['component_name']);
sns_sorted = sorted(set(sns))
#replicates_sorted = sorted(set(replicates))
cn_sorted = sorted(set(cn))
sample_name_abbreviations_sorted = sorted(set(sample_name_abbreviations))
# extract out replicates
replicates_dict = {};
for sns in sns_sorted:
replicates_dict[sns]=None;
cnt_reps = 0;
for sna_sorted in sample_name_abbreviations_sorted:
for sns in sns_sorted:
for d in data_I:
if d['sample_name_short'] == sns and d['sample_name_abbreviation'] == sna_sorted:
replicates_dict[sns] = cnt_reps;
cnt_reps+=1;
break;
cnt_reps = 0;
concentrations = ['NA' for r in range(len(sns_sorted)*len(cn_sorted))];
experiment_ids = ['' for r in range(len(sns_sorted)*len(cn_sorted))];
time_points = ['' for r in range(len(sns_sorted)*len(cn_sorted))];
component_group_names = ['' for r in range(len(sns_sorted)*len(cn_sorted))];
analysis_ids = ['' for r in range(len(sns_sorted)*len(cn_sorted))];
calculated_concentration_units = ['' for r in range(len(sns_sorted)*len(cn_sorted))];
cnt = 0;
cnt_bool = True;
cnt_reps = 0;
sna = []
replicates = []
for c in cn_sorted:
for s in sns_sorted:
for d in data_I:
if d['sample_name_short'] == s and d['component_name'] == c:
if d['calculated_concentration']:
concentrations[cnt] = d['calculated_concentration'];
experiment_ids[cnt] = d['experiment_id'];
time_points[cnt] = d['time_point'];
component_group_names[cnt] = d['component_group_name'];
analysis_ids[cnt] = d['analysis_id'];
calculated_concentration_units[cnt] = d['calculated_concentration_units'];
if cnt_bool:
sna.append(d['sample_name_abbreviation']);
replicates.append(replicates_dict[s]);
#replicates.append(replicates_sorted[cnt_reps]);
#if cnt_reps < len(replicates_sorted)-1:
# cnt_reps+=1;
#else:
# cnt_reps=0;
break;
cnt = cnt+1
cnt_bool = False;
# check if there were any missing values in the data set in the first place
mv = 0;
for c in concentrations:
if c=='NA':
mv += 1;
if mv==0:
# Call to R
try:
# convert lists to R matrix
concentrations_r = '';
for c in concentrations:
concentrations_r = (concentrations_r + ',' + str(c));
concentrations_r = concentrations_r[1:];
r_statement = ('concentrations = c(%s)' % concentrations_r);
ans = robjects.r(r_statement);
r_statement = ('concentrations_m = matrix(concentrations, nrow = %s, ncol = %s, byrow = TRUE)' %(len(cn_sorted),len(sns_sorted)));
ans = robjects.r(r_statement);
# convert lists to R list
sna_r = '';
for c in sna:
sna_r = (sna_r + ',' + '"' + c + '"');
sna_r = sna_r[1:];
replicates_r = '';
for c in replicates:
replicates_r = (replicates_r + ',' + str(c));
replicates_r = replicates_r[1:];
r_statement = ('sna = c(%s)' % sna_r);
ans = robjects.r(r_statement);
r_statement = ('replicates = c(%s)' % replicates_r);
ans = robjects.r(r_statement);
r_statement = ('concentrations_l = list(sna=sna,replicates=replicates)');
ans = robjects.r(r_statement);
#convert to Expression Set
r_statement = ('eS = neweS(concentrations_m,concentrations_l)');
ans = robjects.r(r_statement);
#estimate the parameters for g-log transformation
#r_statement = ('tranpar = tranest(eS)');
#r_statement = ('tranpar = tranest(eS, lowessnorm=TRUE)');
#r_statement = ('tranpar = tranest(eS, mult=TRUE, lowessnorm=TRUE)');
r_statement = ('tranpar = tranest(eS, mult=TRUE)'); # Matches metabo-analyst and produces the most uniform distribution
ans = robjects.r(r_statement);
r_statement = ('eS_transformed <- transeS(eS, tranpar$lambda, tranpar$alpha)');
ans = robjects.r(r_statement);
# extract out data matrices
r_statement = ('exprs(eS_transformed)');
ans = robjects.r(r_statement);
concentrations_glog = np.array(ans);
# convert array back to dict
data_O = [];
cnt = 0;
for c in range(len(cn_sorted)):
for s in range(len(sns_sorted)):
if isinstance(concentrations_glog[c,s], (int, float, complex)):
data_tmp = {};
data_tmp['sample_name_short'] = sns_sorted[s]
data_tmp['component_name'] = cn_sorted[c]
data_tmp['component_group_name'] = component_group_names[cnt];
data_tmp['calculated_concentration'] = concentrations_glog[c,s];
data_tmp['experiment_id'] = experiment_ids[cnt];
data_tmp['time_point'] = time_points[cnt];
data_tmp['analysis_id'] = analysis_ids[cnt];
data_tmp['calculated_concentration_units'] = calculated_concentration_units[cnt]+ '_glog_normalized';
data_tmp['comment_'] = None;
data_tmp['used_'] = True;
data_O.append(data_tmp);
cnt+=1;
else:
print('concentration value is not a number.');
#for c in range(len(sns_sorted)):
# for r in range(len(cgn_sorted)):
#if isinstance(concentrations_glog[r,c], (int, long, float, complex)):
# data_tmp = {};
# data_tmp['sample_name_short'] = sns_sorted[c]
# data_tmp['component_name'] = cgn_sorted[r]
# data_tmp['calculated_concentration'] = concentrations_glog[r,c];
# #sns_O.append(sns_sorted[c]);
# #cn_O.append(cgn_sorted[r]);
# #cc_O.append(ans[c*len(cgn_sorted)+r]);
except Exception as e:
print(e);
exit(-1);
# reshape original concentrations
concentrations_original = np.array(concentrations);
concentrations = concentrations_original.reshape(len(cn_sorted),len(sns_sorted));
return data_O, concentrations, concentrations_glog;
else:
print('missing values found in data!');
def calculate_glogNormalization(self,data_I,
mult="TRUE",
lowessnorm="FALSE"
):
'''normalize the data using a glog transformation using LMGene
https://www.bioconductor.org/packages/release/bioc/html/LMGene.html
Citation: Rocke D, Lee GC, Tillinghast J, Durbin-Johnson B and Wu S (2013). LMGene: LMGene Software for Data Transformation and Identification of Differentially Expressed Genes in Gene Expression Arrays. R package version 2.26.0, http://dmrocke.ucdavis.edu/software.html.
INPUT:
data_I = listDict
...
OUTPUT:
data_O = listDict of the transformed data
concentrations = original data matrix
concentrations_glog = normalized data matrix
TODO:
1. break into individual functions and calls to R
2. add in optional input for calls to tranest()
'''
#make R matrix lists
listdict = listDict(data_I);
concentrations,cn_sorted,sns_sorted,row_variables,column_variables = listdict.convert_listDict2dataMatrixList_pd(
row_label_I='component_name',
column_label_I='sample_name_short',
value_label_I='calculated_concentration',
row_variables_I=['component_group_name','calculated_concentration_units'],
column_variables_I=['sample_name_abbreviation','experiment_id','time_point','analysis_id'],
na_str_I="NA");
cgn = row_variables['component_group_name'];
calculated_concentration_units = row_variables['calculated_concentration_units'];
experiment_ids = column_variables['experiment_id'];
time_points = column_variables['time_point'];
analysis_ids = column_variables['analysis_id'];
sna = column_variables['sample_name_abbreviation'];
# check if there were any missing values in the data set in the first place
mv = 0;
mv = listdict.count_missingValues_pivotTable();
#make replicate numbers for each sample abbreviation
listdict = listDict()
listdict.set_dictList({'sample_name_abbreviation':sna})
listdict.convert_dictList2DataFrame()
listdict.make_dummyIndexColumn(column_index_I='sna_index',column_label_I='sample_name_abbreviation')
replicates=listdict.dataFrame['sna_index'].get_values();
## extract out replicates
#nsna_unique,sna_unique = listdict.get_uniqueValues('sample_name_abbreviation');
#replicates_dict = {};
#for sns in sns_sorted:
# replicates_dict[sns]=None;
#cnt_reps = 0;
#for sna_sorted in sna_unique:
# for sns in sns_sorted:
# for d in data_I:
# if d['sample_name_short'] == sns and d['sample_name_abbreviation'] == sna_sorted:
# replicates_dict[sns] = cnt_reps;
# cnt_reps+=1;
# break;
# cnt_reps = 0;
#replicates = [];
#for s in sns_sorted:
# replicates.append(replicates_dict[s]);
if mv==0:
# Call to R
try:
# clear the R workspace
self.clear_workspace();
# convert lists to R matrix
self.make_matrixFromList(concentrations,len(cn_sorted),len(sns_sorted),'concentrations_m');
#concentrations_r = '';
#for c in concentrations:
# concentrations_r = (concentrations_r + ',' + str(c));
#concentrations_r = concentrations_r[1:];
#r_statement = ('concentrations = c(%s)' % concentrations_r);
#ans = robjects.r(r_statement);
#r_statement = ('concentrations_m = matrix(concentrations, nrow = %s, ncol = %s, byrow = TRUE)' %(len(cn_sorted),len(sns_sorted)));
#ans = robjects.r(r_statement);
# convert lists to R list
self.make_vectorFromList(sna,'sna');
#sna_r = '';
#for c in sna:
# sna_r = (sna_r + ',' + '"' + c + '"');
#sna_r = sna_r[1:];
#r_statement = ('sna = c(%s)' % sna_r);
#ans = robjects.r(r_statement);
self.make_vectorFromList(replicates,'replicates');
#replicates_r = '';
#for c in replicates:
# replicates_r = (replicates_r + ',' + str(c));
#replicates_r = replicates_r[1:];
#r_statement = ('replicates = c(%s)' % replicates_r);
#ans = robjects.r(r_statement);
# make the R factor list
self.make_factorList('sna','replicates','concentrations_l');
#r_statement = ('concentrations_l = list(sna=sna,replicates=replicates)');
#ans = robjects.r(r_statement);
#convert to Expression Set
self.convert_matrix2ExpressionSet(matrix_I='concentrations_m',vlist_I='concentrations_l',es_O='eS');
#r_statement = ('eS = neweS(concentrations_m,concentrations_l)');
#ans = robjects.r(r_statement);
# estimate the parameters
self.call_tranest('eS','tranpar',
mult=mult,
lowessnorm=lowessnorm
);
#r_statement = ('tranpar = tranest(eS, mult=TRUE)'); # Matches metabo-analyst and produces the most uniform distribution
#ans = robjects.r(r_statement);
# transform the expression set
self.call_transeS('eS','tranpar','eS_transformed');
#r_statement = ('eS_transformed <- transeS(eS, tranpar$lambda, tranpar$alpha)');
#ans = robjects.r(r_statement);
# extract out data matrices
concentrations_glog = self.extract_expressionSet('eS_transformed');
#r_statement = ('exprs(eS_transformed)');
#ans = robjects.r(r_statement);
#concentrations_glog = np.array(ans);
# convert array back to dict
data_O = [];
cnt = 0;
for c in range(len(cn_sorted)):
for s in range(len(sns_sorted)):
if isinstance(concentrations_glog[c,s], (int, float, complex)):
data_tmp = {};
data_tmp['sample_name_short'] = sns_sorted[s]
data_tmp['component_group_name'] = cgn[c]
data_tmp['component_name'] = cn_sorted[c]
data_tmp['calculated_concentration_units'] = calculated_concentration_units[c] + '_glog_normalized';
data_tmp['calculated_concentration'] = concentrations_glog[c,s];
data_tmp['experiment_id'] = experiment_ids[s];
data_tmp['time_point'] = time_points[s];
data_tmp['analysis_id'] = analysis_ids[s];
data_tmp['imputation_method'] = None;
data_tmp['normalization_method'] = 'glog';
data_tmp['normalization_ooptions'] = {'mult':"TRUE",'lowessnorm':"FALSE"};
data_tmp['comment_'] = None;
data_tmp['used_'] = True;
data_O.append(data_tmp);
cnt+=1;
else:
print('concentration value is not a number.');
#for c in range(len(sns_sorted)):
# for r in range(len(cgn_sorted)):
#if isinstance(concentrations_glog[r,c], (int, long, float, complex)):
# data_tmp = {};
# data_tmp['sample_name_short'] = sns_sorted[c]
# data_tmp['component_name'] = cgn_sorted[r]
# data_tmp['calculated_concentration'] = concentrations_glog[r,c];
# #sns_O.append(sns_sorted[c]);
# #cn_O.append(cgn_sorted[r]);
# #cc_O.append(ans[c*len(cgn_sorted)+r]);
except Exception as e:
print(e);
exit(-1);
# reshape original concentrations
concentrations_original = np.array(concentrations);
concentrations = concentrations_original.reshape(len(cn_sorted),len(sns_sorted));
return data_O, concentrations, concentrations_glog;
else:
print('missing values found in data!');
def convert_matrix2ExpressionSet(self,matrix_I,vlist_I,es_O):
'''
Convert a matrix to an expressions set in R
INPUT:
matrix_I = string, matrix variable in the R workspace
vlist_I = string, list variable in the R workspace
OUTPUT:
es_O = string, name of the expressionSet variable in the R workspace
Description
This function converts a data matrix into an ExpressionSet object.
Usage
neweS(mat, vlist, vlabel = as.list(names(vlist)))
Arguments
mat A data matrix to be converted.
vlist A list, each component of which describes a factor in the experimental design.
vlabel A list of labels for each component of vlist.
Details
Each element of a component of vlist corresponds to a column of mat. See vlist for an example.
Value
eset An ExpressionSet object.
'''
try:
r_statement = ('%s = neweS(%s,%s)' %(es_O,matrix_I,vlist_I));
ans = robjects.r(r_statement);
except Exception as e:
print(e);
exit(-1);
def make_factorList(self,sna,replicates,list_O):
'''
make factor list for LMGene
INPUT:
sna = string, name of the R workspace variable
replicates = string, name of the R workspace variable
OUTPUT:
list_O = string, name of the R workspace variable
'''
try:
r_statement = ('%s = list(sna=%s,replicates=%s)' %(list_O,sna,replicates));
ans = robjects.r(r_statement);
except Exception as e:
print(e);
exit(-1);
def extract_expressionSet(self,es_I):
'''
Extract out data matrices from an expression set
INPUT:
es_I = string, name of the expression set in the R workspace
OUTPUT:
data_O = np matrix
'''
data_O = None;
try:
r_statement = ('exprs(eS_transformed)');
ans = robjects.r(r_statement);
data_O = np.array(ans);
except Exception as e:
print(e);
exit(-1);
return data_O;
def call_tranest(self,es_I,transpar_O,
mult="TRUE",
lowessnorm="FALSE"
):
'''
estimate the glog transformation parameters
INPUT:
es_I = string, name of the expression set R workspace variable
OUTPUT:
transpar_O = string, name of the R woskpace variable
NOTES: r_statement = ('tranpar = tranest(eS, mult=TRUE)'); # Matches metabo-analyst and produces the most uniform distribution
Description
Estimates parameters for the glog transformation, by maximum likelihood or by minimizing the
stability score.
Usage
tranest(eS, ngenes = -1, starting = FALSE, lambda = 1000, alpha = 0,
gradtol = 1e-3, lowessnorm = FALSE, method=1, mult=FALSE, model=NULL,
SD = FALSE, rank = TRUE, model.based = TRUE, rep.arrays = NULL)
Arguments
eS An ExpressionSet object
ngenes Number of genes to be used in parameter estimation. Default is to use all genes
unless there are more than 100,000, in which case a subset of 50,000 genes is
selected at random.
starting If TRUE, user-specified starting values for lambda and alpha are input to the
optimization routine
lambda Starting value for parameter lambda. Ignored unless starting = TRUE
alpha Starting value for parameter alpha. Ignored unless starting = TRUE
gradtol A positive scalar giving the tolerance at which the scaled gradient is considered
close enough to zero to terminate the algorithm
lowessnorm If TRUE, lowess normalization (using lnorm) is used in calculating the likelihood.
method Determines optimization method. Default is 1, which corresponds to a Newtontype
method (see nlm and details.)
mult If TRUE, tranest will use a vector alpha with one (possibly different) entry per
sample. Default is to use same alpha for every sample. SD and mult may not
both be TRUE.
model Specifies model to be used. Default is to use all variables from eS without
interactions. See details.
SD If TRUE, transformation parameters are estimated by minimizing the stability
score rather than by maximum likelihood. See details.
rank If TRUE, the stability score is calculated by regressing the replicate standard deviations
on the ranks of the gene/row means (rather than on the means themselves).
Ignored unless SD = TRUE
model.based If TRUE, the stability score is calculated using the standard deviations of residuals
from the linear model in model. Ignored unless SD = TRUE
rep.arrays List of sets of replicate arrays. Each element of rep.arrays should be a vector
with entries corresponding to arrays (columns) in exprs(eS) conducted under
the same experimental conditions, i.e., with identical rows in pData(eS). Ignored
unless SD = TRUE and model.based = FALSE
tranest 19
Details
If you have data in a matrix and information about experimental design factors, then you can use
neweS to convert the data into an ExpressionSet object. Please see neweS for more detail.
The model argument is an optional character string, constructed like the right-hand side of a formula
for lm. It specifies which of the variables in the ExpressionSet will be used in the model
and whether interaction terms will be included. If model=NULL, it uses all variables from the
ExpressionSet without interactions. Be careful of using interaction terms with factors; this often
leads to overfitting, which will yield an error.
The default estimation method is maximum likelihood. The likelihood is derived by assuming that
there exist values for lambda and alpha such that the residuals from the linear model in model, fit
to glog-transformed data using those values for lambda and alpha, follow a normal distribution.
See Durbin and Rocke (2003) for details.
If SD = TRUE, lambda and alpha are estimated by minimizing the stability score rather than by
maximum likelihood. The stability score is defined as the absolute value of the slope coefficient
from the regression of the replicate/residual standard deviation on the gene/row means, or on the
rank of the gene/row means. If model.based = TRUE, the stability score is calculated using the
standard deviation of residuals from the linear model in model. Otherwise, the stability score is
calculated using the pooled standard deviation over sets of replicates in rep.arrays. See Wu and
Rocke (2009) for details.
Optimization methods in method are as follows:
1 = Newton-type method, using nlm
2 = Nelder-Mead, using optim
3 = BFGS, using optim
4 = Conjugate gradients, using optim
5 = Simulated annealing, using optim (may only be used when mult = TRUE)
Value
A list with components:
lambda Estimate of transformation parameter lambda
alpha Estimate of transformation parameter alpha
'''
try:
r_statement = ('%s = tranest(%s, mult=%s, lowessnorm=%s)'
%(transpar_O,es_I,mult,lowessnorm));
ans = robjects.r(r_statement);
except Exception as e:
print(e);
exit(-1);
def call_transeS(self,es_I,transpar_I,es_O):
'''
call transeS Function to apply the glog transform to an expression set.
INPUT:
es_I = string, name of the expression set R workspace variable
transpar_I = string, name of the R woskpace variable
OUTPUT:
es_I = string, name of the transformed expression set R workspace variable
Description
For each element in the array of expression data, this function applies the glog transform y -> glog
(y-alpha, lambda). If alpha is a vector, it must have one element for each column in exprs(eS).
Usage
transeS(eS, lambda, alpha)
Arguments
eS An ExpressionSet or AffyBatch object
lambda The parameter lambda to be used in the glog transform.
alpha The alpha parameter(s) for the glog transform. May be a single number used for
all samples, or a vector with one entry per sample.
Details
The glog transformation of a variable y is defined as log(y + sqrt(y^2 + lambda)). Using
lambda = 0 corresponds to the log transformation, up to a scale factor of 2. (Other, equivalent
expressions exist for the glog transformation. See Durbin et al. (2002) and Huber et al. (2002) for
futher details.)
transeS subtracts a (scalar or vector) parameter alpha prior to application of the glog transformation,
resulting in the expression log(y - alpha + sqrt((y - alpha)^2 + lambda)).
The parameters lambda and alpha may be estimated using tranest.
Value
Returns an ExpressionSet or AffyBatch object with the expression matrix glog-transformed.
'''
try:
r_statement = ('%s = transeS(%s, %s$lambda, %s$lambda)'
%(es_O,es_I,transpar_I,transpar_I));
ans = robjects.r(r_statement);
except Exception as e:
print(e);
exit(-1);
| dmccloskey/r_statistics | r_statistics/r_dataNormalization.py | Python | mit | 28,099 | [
"Bioconductor"
] | 06a67b87d6265329d8f5e174ed295953a2a5a33fc893267e2ca4c420e8e49286 |
import unittest
from unittest import mock
from django.utils import timezone
from tethys_config.init import initial_settings, reverse_init, setting_defaults, custom_settings, \
reverse_custom, tethys4_site_settings
class TestInit(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch('tethys_config.init.setting_defaults')
@mock.patch('tethys_config.init.SettingsCategory')
def test_initial_settings(self, mock_settings, mock_defaults):
mock_apps = mock.MagicMock()
mock_schema_editor = mock.MagicMock()
initial_settings(apps=mock_apps, schema_editor=mock_schema_editor)
mock_settings.assert_any_call(name='General Settings')
mock_settings(name='General Settings').save.assert_called()
mock_settings.assert_any_call(name='Home Page')
mock_settings(name='Home Page').save.assert_called()
self.assertEqual(mock_defaults.call_count, 2)
@mock.patch('tethys_config.init.initial_settings')
@mock.patch('tethys_config.init.setting_defaults')
@mock.patch('tethys_config.init.SettingsCategory')
def test_custom_settings(self, mock_settings, mock_defaults, mock_init_settings):
mock_apps = mock.MagicMock()
mock_schema_editor = mock.MagicMock()
mock_settings.objects.all.return_value = False
custom_settings(apps=mock_apps, schema_editor=mock_schema_editor)
mock_init_settings.called_with(mock_apps, mock_schema_editor)
mock_settings.assert_has_calls([mock.call(name='Custom Styles'), mock.call(name='Custom Templates')],
any_order=True)
mock_settings(name='Custom Styles').save.assert_called()
mock_settings(name='Custom Templates').save.assert_called()
self.assertEqual(mock_defaults.call_count, 2)
@mock.patch('tethys_config.init.Setting')
@mock.patch('tethys_config.init.SettingsCategory')
def test_reverse_init(self, mock_categories, mock_settings):
mock_apps = mock.MagicMock
mock_schema_editor = mock.MagicMock()
mock_cat = mock.MagicMock()
mock_set = mock.MagicMock()
mock_categories.objects.all.return_value = [mock_cat]
mock_settings.objects.all.return_value = [mock_set]
reverse_init(apps=mock_apps, schema_editor=mock_schema_editor)
mock_categories.objects.all.assert_called_once()
mock_settings.objects.all.assert_called_once()
mock_cat.delete.assert_called_once()
mock_set.delete.assert_called_once()
@mock.patch('tethys_config.init.Setting')
@mock.patch('tethys_config.init.SettingsCategory')
def test_reverse_custom(self, mock_categories, mock_settings):
mock_apps = mock.MagicMock
mock_schema_editor = mock.MagicMock()
mock_cat = mock.MagicMock()
mock_cat.name = 'Custom Styles'
mock_set = mock.MagicMock()
mock_set.name = 'Home Page Template'
mock_categories.objects.all.return_value = [mock_cat]
mock_settings.objects.all.return_value = [mock_set]
reverse_custom(apps=mock_apps, schema_editor=mock_schema_editor)
mock_categories.objects.all.assert_called_once()
mock_settings.objects.all.assert_called_once()
mock_cat.delete.assert_called_once()
mock_set.delete.assert_called_once()
@mock.patch('tethys_config.init.timezone')
@mock.patch('tethys_config.init.SettingsCategory')
def test_setting_defaults(self, mock_settings, mock_timezone):
# Set known now
now = timezone.now()
mock_timezone.now.return_value = now
# General settings
type(mock_settings()).name = mock.PropertyMock(return_value="General Settings")
setting_defaults(category=mock_settings())
mock_settings().setting_set.create.assert_any_call(name="Site Title",
content="Tethys Portal",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Favicon",
content="/tethys_portal/images/"
"default_favicon.png",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Brand Text",
content="Tethys Portal",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Brand Image",
content="/tethys_portal/images/"
"tethys-logo-25.png",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Brand Image Height", content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Brand Image Width", content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Brand Image Padding",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Apps Library Title",
content="Apps",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Primary Color",
content="#0a62a9",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Secondary Color",
content="#7ec1f7",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Background Color", content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Primary Text Color", content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Primary Text Hover Color",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Secondary Text Color",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Secondary Text Hover Color",
content="",
date_modified=now)
mock_settings().save.assert_called()
# Home page settings
type(mock_settings()).name = mock.PropertyMock(return_value="Home Page")
setting_defaults(category=mock_settings())
mock_settings().setting_set.create.assert_any_call(name="Hero Text",
content="Welcome to Tethys Portal,\nthe hub "
"for your apps.",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Blurb Text",
content="Tethys Portal is designed to be "
"customizable, so that you can host "
"apps for your\norganization. You "
"can change everything on this page "
"from the Home Page settings.",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 1 Heading",
content="Feature 1",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 1 Body",
content="Use these features to brag about "
"all of the things users can do "
"with your instance of Tethys "
"Portal.",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 1 Image",
content="/tethys_portal/images/"
"placeholder.gif",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 2 Heading",
content="Feature 2",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 2 Body",
content="Describe the apps and tools that "
"your Tethys Portal provides and "
"add custom pictures to each "
"feature as a finishing touch.",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 2 Image",
content="/tethys_portal/images/"
"placeholder.gif",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 3 Heading",
content="Feature 3",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 3 Body",
content="You can change the color theme and "
"branding of your Tethys Portal in "
"a jiffy. Visit the Site Admin "
"settings from the user menu and "
"select General Settings.",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Feature 3 Image",
content="/tethys_portal/images/"
"placeholder.gif",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Call to Action",
content="Ready to get started?",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Call to Action Button",
content="Start Using Tethys!",
date_modified=now)
mock_settings().save.assert_called()
# Custom Styles
type(mock_settings()).name = mock.PropertyMock(return_value="Custom Styles")
setting_defaults(category=mock_settings())
mock_settings().setting_set.create.assert_any_call(name="Portal Base CSS",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Home Page CSS",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Apps Library CSS",
content="",
date_modified=now)
mock_settings().save.assert_called()
# Custom Templates
type(mock_settings()).name = mock.PropertyMock(return_value="Custom Templates")
setting_defaults(category=mock_settings())
mock_settings().setting_set.create.assert_any_call(name="Home Page Template",
content="",
date_modified=now)
mock_settings().setting_set.create.assert_any_call(name="Apps Library Template",
content="",
date_modified=now)
mock_settings().save.assert_called()
@mock.patch('tethys_config.init.timezone')
@mock.patch('tethys_config.init.Setting')
def test_tethys4_site_settings(self, mock_setting, mock_timezone):
# Set up some mock settings
mock_brand_setting = mock.MagicMock(
content="/tethys_portal/images/tethys-logo-75.png"
)
mock_apps_library_setting = mock.MagicMock(
content="Apps Library"
)
mock_copyright_setting = mock.MagicMock(
content="Copyright © 2019 Your Organization"
)
# Create a fake "get" method for the mocked Setting
def setting_get(name):
if name == "Brand Image":
return mock_brand_setting
elif name == "Apps Library Title":
return mock_apps_library_setting
elif name == "Footer Copyright":
return mock_copyright_setting
# Bind mocked "get" method
mock_setting.objects.filter().get = setting_get
# Set now to something that is verifiable
now = timezone.now()
mock_timezone.now.return_value = now
# Execute
tethys4_site_settings(mock.MagicMock(), mock.MagicMock())
# Verify values changed appropriately
self.assertEqual("/tethys_portal/images/tethys-logo-25.png", mock_brand_setting.content)
self.assertEqual("Apps", mock_apps_library_setting.content)
self.assertEqual(f"Copyright © {now:%Y} Your Organization", mock_copyright_setting.content)
# Verify settings saved
mock_brand_setting.save.assert_called()
mock_apps_library_setting.save.assert_called()
mock_copyright_setting.save.assert_called()
| tethysplatform/tethys | tests/unit_tests/test_tethys_config/test_init.py | Python | bsd-2-clause | 15,927 | [
"VisIt"
] | 05be15209497ecbf43362b29dc654c31609b3585eae37b9e2d439eb0f4f258b6 |
# import unittest
#
# from hyperopt import hp
# import hyperopt
# import numpy as np
# import six
#
# import HPOlib.format_converter.configuration_space as configuration_space
# import HPOlib.format_converter.pyll_parser as pyll_parser
# from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \
# UniformFloatHyperparameter, UniformIntegerHyperparameter, Constant, \
# NormalFloatHyperparameter, NormalIntegerHyperparameter
#
# # More complex search space
# classifier = CategoricalHyperparameter("classifier", ["svm", "nn"])
# kernel = CategoricalHyperparameter("kernel", ["rbf", "linear"],
# conditions=[["classifier == svm"]])
# C = UniformFloatHyperparameter("C", 0.03125, 32768, base=2,
# conditions=[["classifier == svm"]])
# gamma = UniformFloatHyperparameter("gamma", 0.000030518, 8, base=2,
# conditions=[["kernel == rbf"]])
# neurons = UniformIntegerHyperparameter("neurons", 16, 1024, q=16,
# conditions=[["classifier == nn"]])
# lr = UniformFloatHyperparameter("lr", 0.0001, 1.0,
# conditions=[["classifier == nn"]])
# preprocessing = CategoricalHyperparameter("preprocessing", [None, "pca"])
# config_space = {"classifier": classifier,
# "kernel": kernel,
# "C": C,
# "gamma": gamma,
# "neurons": neurons,
# "lr": lr,
# "preprocessing": preprocessing}
#
# # A search space where a hyperparameter depends on two others:
# gamma_2 = UniformFloatHyperparameter("gamma_2", 0.000030518, 8, base=2,
# conditions=[["kernel == rbf", "classifier == svm"]])
#
# config_space_2 = {"classifier": classifier,
# "kernel": kernel,
# "C": C,
# "gamma_2": gamma_2,
# "neurons": neurons,
# "lr": lr,
# "preprocessing": preprocessing}
#
#
# class TestPyllReader(unittest.TestCase):
# def setUp(self):
# self.pyll_reader = pyll_parser.PyllReader()
#
# def test_read_literal(self):
# literal = hyperopt.pyll.as_apply("5....4....3....1! Off blast! ")
# ret = self.pyll_reader.read_literal(literal, "pre_chorus")
# expected = Constant("pre_chorus", "5....4....3....1! Off blast! ")
# self.assertEqual(expected, ret)
#
# def test_read_container(self):
# #### Lists
# # The Literal is added to the content of the list, but no method will
# # add it to the list of found hyperparameters
# # hyperparameter
# # 0 pos_args
# # 1 float
# # 2 hyperopt_param
# # 3 Literal{a}
# # 4 uniform
# # 5 Literal{0}
# # 6 Literal{10}
# # 7 Literal{Alpha}
# expected = {'a': UniformFloatHyperparameter('a', 0, 10),
# 'Alpha': Constant('Alpha', 'Alpha')}
# param = hyperopt.pyll.as_apply([hp.uniform("a", 0, 10), "Alpha"])
# ret = self.pyll_reader.read_container(param)
# self.assertEqual(expected, ret)
# # 0 pos_args
# # 1 float
# # 2 hyperopt_param
# # 3 Literal{a}
# # 4 uniform
# # 5 Literal{0}
# # 6 Literal{10}
# # 7 float
# # 8 hyperopt_param
# # 9 Literal{b}
# # 10 uniform
# # 11 Literal{5}
# # 12 Literal{15}
# container = hyperopt.pyll.as_apply([hp.uniform("a", 0, 10),
# hp.uniform("b", 5, 15)])
# ret = self.pyll_reader.read_container(container)
# expected = {"a": UniformFloatHyperparameter("a", 0, 10),
# "b": UniformFloatHyperparameter("b", 5, 15)}
# self.assertEqual(expected, ret)
#
# def test_read_dict(self):
# #### Dictionaries
# # 0 dict
# # 1 Elektronik =
# # 2 Literal{Supersonik}
# # 3 a =
# # 4 float
# # 5 hyperopt_param
# # 6 Literal{a}
# # 7 uniform
# # 8 Literal{0}
# # 9 Literal{10}
# container = hyperopt.pyll.as_apply({"a": hp.uniform("a", 0, 10),
# "Elektronik": "Supersonik"})
# ret = self.pyll_reader.read_dict(container)
# expected = {"a": UniformFloatHyperparameter("a", 0, 10),
# "Elektronik": Constant("Elektronik", "Supersonik")}
# self.assertEqual(expected, ret)
#
# # 0 dict
# # 1 @1:F:ASPEED:aspeed-opt =
# # 2 switch
# # 3 hyperopt_param
# # 4 Literal{@1:F:ASPEED:aspeed-opt}
# # 5 randint
# # 6 Literal{1}
# # 7 dict
# # 8 @1:F:ASPEED:aspeed-opt =
# # 9 Literal{yes}
# # 10 @1:approach =
# # 11 Literal{ASPEED}
# param_6 = hp.choice("@1:F:ASPEED:aspeed-opt", [
# {"@1:F:ASPEED:aspeed-opt": "yes", },
# ])
# container = hyperopt.pyll.as_apply(
# {"@1:F:ASPEED:aspeed-opt": param_6,
# "@1:approach": "ASPEED"})
# ret = self.pyll_reader.read_dict(container)
# expected = {"@1:F:ASPEED:aspeed-opt":
# CategoricalHyperparameter("@1:F:ASPEED:aspeed-opt", ['yes']),
# "@1:approach":
# Constant("@1:approach", "ASPEED")}
# self.maxDiff = None
# self.assertEqual(expected, ret)
#
# def test_read_switch(self):
# # 0 switch
# # 1 hyperopt_param
# # 2 Literal{dist1}
# # 3 randint
# # 4 Literal{2}
# # 5 Literal{uniform}
# # 6 Literal{normal}
# dist = hp.choice('dist1', ['uniform', 'normal'])
# ret = self.pyll_reader.read_switch(dist)
# expected = CategoricalHyperparameter('dist1', ['uniform', 'normal'])
# self.assertEqual(expected, ret)
#
# bigger_choice = hp.choice('choice', [
# {'choice': "zero", 'a': 0, 'b': hp.uniform('b', 0, 10)},
# {'choice': "other", 'a': 1, 'b': hp.uniform('b', 0, 10)}])
# ret = self.pyll_reader.read_switch(bigger_choice)
# expected = CategoricalHyperparameter('choice', ['zero', 'other'])
# self.assertEqual(expected, ret)
# self.assertEqual(2, len(self.pyll_reader.constants))
# # Only the hyperparameter b is put into pyll_reader.hyperparameters
# self.assertEqual(1, len(self.pyll_reader.hyperparameters))
#
# # TODO: duplicate these tests for Integer/care about integers + test if
# # the warning of non-uniform parameters is actually printed
# def test_read_uniform(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{scale_mult1}
# # 3 uniform
# # 4 Literal{0.2}
# # 5 Literal{2}
# uniform = hp.uniform('scale_mult1', .2, 2).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_uniform(uniform, 'scale_mult1')
# expected = UniformFloatHyperparameter('scale_mult1', 0.2, 2)
# self.assertEqual(expected, ret)
#
# def test_read_loguniform(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{colnorm_thresh}
# # 3 loguniform
# # 4 Literal{-20.7232658369}
# # 5 Literal{-6.90775527898}
# loguniform = hp.loguniform('colnorm_thresh', np.log(1e-9),
# np.log(1e-3)).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_loguniform(loguniform, 'colnorm_thresh')
# expected = UniformFloatHyperparameter(
# 'colnorm_thresh', 1e-9, 1e-3, base=np.e)
# self.assertEqual(expected, ret)
#
# def test_read_quniform(self):
# # TODO scope.int(hp.quniform('liblinear:LOG2_C', -5, 15, 1))
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{l0eg_fsize}
# # 3 quniform
# # 4 Literal{2.50001}
# # 5 Literal{8.5}
# # 6 Literal{1}
# quniform = hp.quniform('l0eg_fsize', 2.50001, 8.5, 1). \
# inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_quniform(quniform, 'l0eg_fsize')
# expected = UniformIntegerHyperparameter(
# 'l0eg_fsize', 3, 8)
# self.assertEqual(expected, ret)
#
# l2_out_lp_psize = hp.quniform("l2_out_lp_psize", 0.50001, 5.5, 1). \
# inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_quniform(l2_out_lp_psize, "l2_out_lp_psize")
# expected = UniformIntegerHyperparameter(
# "l2_out_lp_psize", 1, 5)
# self.assertEqual(expected, ret)
#
# def test_read_qloguniform(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{nhid1}
# # 3 qloguniform
# # 4 Literal{2.77258872224}
# # 5 Literal{6.9314718056}
# # 6 q =
# # 7 Literal{16}
# qloguniform = hp.qloguniform('nhid1', np.log(16), np.log(1024), q=16). \
# inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_qloguniform(qloguniform, 'nhid1')
# expected = UniformFloatHyperparameter(
# 'nhid1', 16, 1024, q=16, base=np.e)
# self.assertEqual(expected, ret)
#
# def test_read_normal(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{l0eg_alpha}
# # 3 normal
# # 4 Literal{0.0}
# # 5 Literal{1.0}
# normal = hp.normal("l0eg_alpha", 0.0, 1.0).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_normal(normal, "l0eg_alpha")
# expected = NormalFloatHyperparameter("l0eg_alpha", 0.0, 1.0)
# self.assertEqual(expected, ret)
#
#
# def test_read_lognormal(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{lr}
# # 3 lognormal
# # 4 Literal{-4.60517018599}
# # 5 Literal{3.0}
# lognormal = hp.lognormal('lr', np.log(.01), 3.).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_lognormal(lognormal, "lr")
# expected = NormalFloatHyperparameter("lr", np.log(0.01), 3.0, base=np.e)
# self.assertEqual(expected, ret)
#
# def test_read_qnormal(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{qnormal}
# # 3 qnormal
# # 4 Literal{0.0}
# # 5 Literal{1.0}
# # 6 Literal{0.5}
# qnormal = hp.qnormal("qnormal", 0.0, 1.0, 0.5).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_qnormal(qnormal, "qnormal")
# expected = NormalFloatHyperparameter("qnormal", 0.0, 1.0, q=0.5)
# self.assertEqual(expected, ret)
#
# def test_read_qlognormal(self):
# # 0 float
# # 1 hyperopt_param
# # 2 Literal{qlognormal}
# # 3 qlognormal
# # 4 Literal{0.0}
# # 5 Literal{1.0}
# # 6 Literal{0.5}
# qlognormal = hp.qlognormal("qlognormal", 0.0, 1.0, 0.5).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_qlognormal(qlognormal, "qlognormal")
# expected = NormalFloatHyperparameter("qlognormal", 0.0, 1.0, q=0.5, base=np.e)
# self.assertEqual(expected, ret)
#
# qlognormal = hp.qlognormal("qlognormal", 1.0, 5.0, 1.0).inputs()[0].inputs()[1]
# ret = self.pyll_reader.read_qlognormal(qlognormal, "qlognormal")
# expected = NormalIntegerHyperparameter("qlognormal", 1.0, 5.0, base=np.e)
# self.assertEqual(expected, ret)
#
#
# class TestPyllWriter(unittest.TestCase):
# def setUp(self):
# self.pyll_writer = pyll_parser.PyllWriter()
#
# def test_convert_configuration_space(self):
# a = UniformFloatHyperparameter("a", 0, 1)
# b = UniformFloatHyperparameter("b", 0, 3, q=0.1)
#
# expected = six.StringIO()
# expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
# expected.write('\n\n')
# expected.write('param_0 = hp.uniform("a", 0.0, 1.0)\n')
# expected.write('param_1 = hp.quniform("b", -0.0499, 3.05, 0.1)\n\n')
# expected.write('space = {"a": param_0, "b": param_1}\n')
# simple_space = {"a": a, "b": b}
# cs = self.pyll_writer.write(simple_space)
# self.assertEqual(expected.getvalue(), cs)
#
# def test_convert_conditional_space(self):
# a_or_b = CategoricalHyperparameter("a_or_b", ["a", "b"])
# cond_a = UniformFloatHyperparameter(
# 'cond_a', 0, 1, conditions=[['a_or_b == a']])
# cond_b = UniformFloatHyperparameter(
# 'cond_b', 0, 3, q=0.1, conditions=[['a_or_b == b']])
# conditional_space = {"a_or_b": a_or_b, "cond_a": cond_a, "cond_b": cond_b}
# cs = self.pyll_writer.write(conditional_space)
# expected = six.StringIO()
# expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
# expected.write('\n\n')
# expected.write('param_0 = hp.uniform("cond_a", 0.0, 1.0)\n')
# expected.write('param_1 = hp.quniform("cond_b", -0.0499, 3.05, 0.1)\n')
# expected.write('param_2 = hp.choice("a_or_b", [\n')
# expected.write(' {"a_or_b": "a", "cond_a": param_0, },\n')
# expected.write(' {"a_or_b": "b", "cond_b": param_1, },\n')
# expected.write(' ])\n\n')
# expected.write('space = {"a_or_b": param_2}\n')
# self.assertEqual(expected.getvalue(), cs)
#
# def test_convert_complex_space(self):
# cs = self.pyll_writer.write(config_space)
# expected = six.StringIO()
# expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
# expected.write('\n\n')
# expected.write('param_0 = hp.uniform("LOG2_C", -5.0, 15.0)\n')
# expected.write('param_1 = hp.uniform("LOG2_gamma", -14.9999800563, '
# '3.0)\n')
# expected.write('param_2 = hp.choice("kernel", [\n')
# expected.write(' {"kernel": "linear", },\n')
# expected.write(' {"kernel": "rbf", "LOG2_gamma": param_1, },\n')
# expected.write(' ])\n')
# expected.write('param_3 = hp.uniform("lr", 0.0001, 1.0)\n')
# expected.write('param_4 = pyll.scope.int(hp.quniform('
# '"neurons", 15.50001, 1024.5, 16.0))\n')
# expected.write('param_5 = hp.choice("classifier", [\n')
# expected.write(' {"classifier": "nn", "lr": param_3, "neurons": '
# 'param_4, },\n')
# expected.write(' {"classifier": "svm", "LOG2_C": param_0, '
# '"kernel": param_2, },\n')
# expected.write(' ])\n')
# expected.write('param_6 = hp.choice("preprocessing", [\n')
# expected.write(' {"preprocessing": "None", },\n')
# expected.write(' {"preprocessing": "pca", },\n')
# expected.write(' ])\n\n')
# expected.write('space = {"classifier": param_5, '
# '"preprocessing": param_6}\n')
# self.assertEqual(expected.getvalue(), cs)
#
# self.pyll_writer.reset_hyperparameter_countr()
# expected.seek(0)
# cs = self.pyll_writer.write(config_space_2)
# self.assertEqual(expected.getvalue().replace("gamma", "gamma_2"), cs)
#
# def test_operator_in(self):
# a_or_b = CategoricalHyperparameter("a_or_b", ["a", "b"])
# cond_a = UniformFloatHyperparameter(
# 'cond_a', 0, 1, conditions=[['a_or_b == a']])
# cond_b = UniformFloatHyperparameter(
# 'cond_b', 0, 3, q=0.1, conditions=[['a_or_b == b']])
# e = UniformFloatHyperparameter("e", 0, 5,
# conditions=[['a_or_b in {a,b}']])
# conditional_space_operator_in = {"a_or_b": a_or_b, "cond_a": cond_a,
# "cond_b": cond_b, "e": e}
# cs = self.pyll_writer.write(conditional_space_operator_in)
# expected = six.StringIO()
# expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
# expected.write('\n\n')
# expected.write('param_0 = hp.uniform("cond_a", 0.0, 1.0)\n')
# expected.write('param_1 = hp.quniform("cond_b", -0.0499, 3.05, 0.1)\n')
# expected.write('param_2 = hp.uniform("e", 0.0, 5.0)\n')
# expected.write('param_3 = hp.choice("a_or_b", [\n')
# expected.write(' {"a_or_b": "a", "cond_a": param_0, "e": param_2, '
# '},\n')
# expected.write(' {"a_or_b": "b", "cond_b": param_1, "e": param_2, '
# '},\n')
# expected.write(' ])\n\n')
# expected.write('space = {"a_or_b": param_3}\n')
# self.assertEqual(expected.getvalue(), cs)
#
# def test_write_uniform(self):
# a = UniformFloatHyperparameter("a", 0, 1)
# expected = ('a', 'param_0 = hp.uniform("a", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(a, None)
# self.assertEqual(expected, value)
#
# # The hyperparameter name has to be converted seperately because
# # otherwise the parameter values are converted at object costruction
# # time
# a = UniformFloatHyperparameter("a", 1, 10, base=10)
# a.name = self.pyll_writer.convert_name(a)
# expected = ('LOG10_a', 'param_1 = hp.uniform("LOG10_a", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(a, None)
# self.assertEqual(expected, value)
#
# nhid1 = UniformFloatHyperparameter(
# "nhid1", 16, 1024, q=16, base=np.e)
# expected = ('nhid1', 'param_2 = hp.qloguniform('
# '"nhid1", 2.0794540416, 6.93925394604, 16.0)')
# value = self.pyll_writer.write_hyperparameter(nhid1, None)
# self.assertEqual(expected, value)
#
# def test_write_uniform_int(self):
# a_int = UniformIntegerHyperparameter("a_int", 0, 1)
# expected = ('a_int', 'param_0 = pyll.scope.int(hp.quniform('
# '"a_int", -0.49999, 1.5, 1.0))')
# value = self.pyll_writer.write_hyperparameter(a_int, None)
# self.assertEqual(expected, value)
#
# # Test for the problem that if a parameter has Q not None and is on
# # log scale, the Q must not be in the hp object, but the
# # hyperparameter name. If this is done the other way round,
# # the log-value of the hyperparameter is quantized
# a_int = UniformIntegerHyperparameter(
# "a_int", 1, 1000, base=10)
# a_int.name = self.pyll_writer.convert_name(a_int)
# expected = ('LOG10_Q1_a_int', 'param_1 = hp.uniform('
# '"LOG10_Q1_a_int", -0.301021309861, 3.00021709297)')
# value = self.pyll_writer.write_hyperparameter(a_int, None)
# self.assertEqual(expected, value)
#
# def test_write_quniform(self):
# b = UniformFloatHyperparameter("b", 0, 3, q=0.1)
# expected = ("b", 'param_0 = hp.quniform("b", -0.0499, 3.05, 0.1)')
# value = self.pyll_writer.write_hyperparameter(b, None)
# self.assertEqual(expected, value)
#
# b = UniformFloatHyperparameter(
# "b", 0.1, 3, q=0.1, base=10)
# b.name = self.pyll_writer.convert_name(b)
# expected = ('LOG10_Q0.100000_b', 'param_1 = hp.uniform('
# '"LOG10_Q0.100000_b", -1.30016227413, 0.484299839347)')
# value = self.pyll_writer.write_hyperparameter(b, None)
# self.assertEqual(expected, value)
#
# def test_write_quniform_int(self):
# b_int_1 = UniformIntegerHyperparameter("b_int", 0, 3, q=1.0)
# expected = ("b_int", 'param_0 = pyll.scope.int(hp.quniform('
# '"b_int", -0.49999, 3.5, 1.0))')
# value = self.pyll_writer.write_hyperparameter(b_int_1, None)
# self.assertEqual(expected, value)
#
# # TODO: trying to add the same parameter name a second time, maybe an
# # error should be raised!
# b_int_2 = UniformIntegerHyperparameter("b_int", 0, 3, q=2.0)
# expected = ("b_int", 'param_1 = pyll.scope.int(hp.quniform('
# '"b_int", -0.49999, 3.5, 2.0))')
# value = self.pyll_writer.write_hyperparameter(b_int_2, None)
# self.assertEqual(expected, value)
#
# b_int_3 = UniformIntegerHyperparameter( "b_int", 1, 100, base=10)
# b_int_3.name = self.pyll_writer.convert_name(b_int_3)
# # TODO: this is an example of non-uniform integer sampling!
# expected = ('LOG10_Q1_b_int', 'param_1 = hp.uniform('
# '"LOG10_Q1_b_int", -0.301021309861, 2.00216606176)')
# value = self.pyll_writer.write_hyperparameter(b_int_3, None)
# self.assertEqual(expected, value)
#
# def test_write_loguniform(self):
# c = UniformFloatHyperparameter("c", 0.001, 1, base=np.e)
# expected = ("c", 'param_0 = hp.loguniform("c", -6.90775527898, 0.0)')
# value = self.pyll_writer.write_hyperparameter(c, None)
# self.assertEqual(expected, value)
#
# def test_write_loguniform_int(self):
# c_int = UniformIntegerHyperparameter("c_int", 1, 10, base=np.e)
# expected = ("c_int", 'param_0 = pyll.scope.int(hp.qloguniform('
# '"c_int", -0.69312718076, 2.35137525716, 1.0))')
# value = self.pyll_writer.write_hyperparameter(c_int, None)
# self.assertEqual(expected, value)
#
# def test_write_qloguniform(self):
# d = UniformFloatHyperparameter("d", 0.1, 3, q=0.1, base=np.e)
# expected = ("d", 'param_0 = hp.qloguniform("d", -2.99373427089, '
# '1.11514159062, 0.1)')
# value = self.pyll_writer.write_hyperparameter(d, None)
# self.assertEqual(expected, value)
#
# def test_write_qloguniform_int(self):
# d_int_1 = UniformIntegerHyperparameter("d_int", 1, 3, q=1.0, base=np.e)
# expected = ("d_int", 'param_0 = pyll.scope.int(hp.qloguniform('
# '"d_int", -0.69312718076, 1.2527629685, 1.0))')
# value = self.pyll_writer.write_hyperparameter(d_int_1, None)
# self.assertEqual(expected, value)
#
# d_int_2 = UniformIntegerHyperparameter("d_int", 1, 3, q=2.0, base=np.e)
# expected = ("d_int", 'param_1 = pyll.scope.int(hp.qloguniform('
# '"d_int", -0.69312718076, 1.2527629685, 2.0))')
# value = self.pyll_writer.write_hyperparameter(d_int_2, None)
# self.assertEqual(expected, value)
#
# def test_write_normal(self):
# parameter = NormalFloatHyperparameter("e", 0, 1)
# expected = ('e', 'param_0 = hp.normal("e", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# parameter = NormalFloatHyperparameter("e", 0, 1, base=10)
# parameter.name = self.pyll_writer.convert_name(parameter)
# expected = ('LOG10_e', 'param_1 = hp.normal("LOG10_e", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_normal_int(self):
# parameter = NormalIntegerHyperparameter("e", 0, 1)
# expected = ('e',
# 'param_0 = pyll.scope.int(hp.qnormal("e", 0.0, 1.0, 1.0))')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# parameter = NormalIntegerHyperparameter("e", 0, 1, base=10)
# parameter.name = self.pyll_writer.convert_name(parameter)
# # TODO: this is an example of non-uniform sampling
# expected = ('LOG10_Q1_e', 'param_1 = hp.normal("LOG10_Q1_e", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_qnormal(self):
# parameter = NormalFloatHyperparameter("f", 0, 1, q=0.1)
# expected = ('f', 'param_0 = hp.qnormal("f", 0.0, 1.0, 0.1)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# parameter = NormalFloatHyperparameter("f", 0, 1, q=0.1, base=10)
# parameter.name = self.pyll_writer.convert_name(parameter)
# expected = ('LOG10_Q0.100000_f',
# 'param_1 = hp.normal("LOG10_Q0.100000_f", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_qnormal_int(self):
# parameter = NormalIntegerHyperparameter("f_int", 0, 1, q=1.0)
# expected = ('f_int',
# 'param_0 = pyll.scope.int(hp.qnormal("f_int", 0.0, 1.0, 1.0))')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# parameter = NormalIntegerHyperparameter("f_int", 0, 1, q=1.0, base=10)
# parameter.name = self.pyll_writer.convert_name(parameter)
# expected = ('LOG10_Q1.000000_f_int',
# 'param_1 = hp.normal("LOG10_Q1.000000_f_int", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_lognormal(self):
# parameter = NormalFloatHyperparameter("g", 0, 1, base=np.e)
# expected = ('g', 'param_0 = hp.lognormal("g", 0.0, 1.0)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_lognormal_int(self):
# parameter = NormalIntegerHyperparameter("g", 0, 1, base=np.e)
# expected = ('g',
# 'param_0 = pyll.scope.int(hp.qlognormal("g", 0.0, 1.0, 1.0))')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_qlognormal(self):
# parameter = NormalFloatHyperparameter("g", 0, 1, q=0.1, base=np.e)
# expected = ('g', 'param_0 = hp.qlognormal("g", 0.0, 1.0, 0.1)')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_write_qlognormal_int(self):
# parameter = NormalIntegerHyperparameter("g_int", 0, 10, q=2.0, base=np.e)
# expected = ('g_int',
# 'param_0 = pyll.scope.int(hp.qlognormal("g_int", 0.0, 10.0, 2.0))')
# value = self.pyll_writer.write_hyperparameter(parameter, None)
# self.assertEqual(expected, value)
#
# def test_get_bounds_as_exponent(self):
# parameter = UniformFloatHyperparameter('a', 1, 1000, base=10)
# lower, upper = self.pyll_writer.get_bounds_as_exponent(parameter)
# name = self.pyll_writer.convert_name(parameter)
# self.assertEqual(name, 'LOG10_a')
# self.assertEqual(lower, 0)
# self.assertEqual(upper, 3)
#
# parameter = UniformFloatHyperparameter('a', 2, 128, base=2)
# name = self.pyll_writer.convert_name(parameter)
# lower, upper = self.pyll_writer.get_bounds_as_exponent(parameter)
# self.assertEqual(name, 'LOG2_a')
# self.assertEqual(lower, 1)
# self.assertEqual(upper, 7)
#
# parameter = UniformFloatHyperparameter('a', 128, 256, base=np.e)
# name = self.pyll_writer.convert_name(parameter)
# lower, upper = self.pyll_writer.get_bounds_as_exponent(parameter)
# self.assertEqual(name, 'LOG_a')
# self.assertAlmostEqual(lower, 4.852030264)
# self.assertAlmostEqual(upper, 5.545177444)
#
# parameter = UniformFloatHyperparameter('a', 10, 1000, base=5)
# lower, upper = self.pyll_writer.get_bounds_as_exponent(parameter)
# name = self.pyll_writer.convert_name(parameter)
# self.assertEqual(name, 'LOG5_a')
# self.assertAlmostEqual(lower, 1.430676558)
# self.assertAlmostEqual(upper, 4.292029674)
#
# parameter = UniformFloatHyperparameter('illegal',
# 0, 1000, base=10)
# self.assertRaises(ValueError, self.pyll_writer.get_bounds_as_exponent,
# parameter) | mfeurer/HPOlibConfigSpace | test/converters/test_pyll_util.py | Python | gpl-3.0 | 28,534 | [
"BLAST"
] | ffd965d18dd83dbb7dc6dd8880e6cbed03b4f0d7f561f211fe39dfedc49fc3bf |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module implements an interface to the VAMPIRE code for atomistic
simulations of magnetic materials.
This module depends on a compiled vampire executable available in the path.
Please download at https://vampire.york.ac.uk/download/ and
follow the instructions to compile the executable.
If you use this module, please cite the following:
"Atomistic spin model simulations of magnetic nanomaterials."
R. F. L. Evans, W. J. Fan, P. Chureemart, T. A. Ostler, M. O. A. Ellis
and R. W. Chantrell. J. Phys.: Condens. Matter 26, 103202 (2014)
"""
import logging
import subprocess
import pandas as pd
from monty.dev import requires
from monty.json import MSONable
from monty.os.path import which
from pymatgen.analysis.magnetism.heisenberg import HeisenbergMapper
__author__ = "ncfrey"
__version__ = "0.1"
__maintainer__ = "Nathan C. Frey"
__email__ = "ncfrey@lbl.gov"
__status__ = "Development"
__date__ = "June 2019"
VAMPEXE = which("vampire-serial")
class VampireCaller:
"""
Run Vampire on a material with magnetic ordering and exchange parameter information to compute the critical
temperature with classical Monte Carlo.
"""
@requires(
VAMPEXE,
"VampireCaller requires vampire-serial to be in the path."
"Please follow the instructions at https://vampire.york.ac.uk/download/.",
)
def __init__(
self,
ordered_structures=None,
energies=None,
mc_box_size=4.0,
equil_timesteps=2000,
mc_timesteps=4000,
save_inputs=False,
hm=None,
avg=True,
user_input_settings=None,
):
"""
user_input_settings is a dictionary that can contain:
* start_t (int): Start MC sim at this temp, defaults to 0 K.
* end_t (int): End MC sim at this temp, defaults to 1500 K.
* temp_increment (int): Temp step size, defaults to 25 K.
Args:
ordered_structures (list): Structure objects with magmoms.
energies (list): Energies of each relaxed magnetic structure.
mc_box_size (float): x=y=z dimensions (nm) of MC simulation box
equil_timesteps (int): number of MC steps for equilibrating
mc_timesteps (int): number of MC steps for averaging
save_inputs (bool): if True, save scratch dir of vampire input files
hm (HeisenbergModel): object already fit to low energy
magnetic orderings.
avg (bool): If True, simply use <J> exchange parameter estimate.
If False, attempt to use NN, NNN, etc. interactions.
user_input_settings (dict): optional commands for VAMPIRE Monte Carlo
Parameters:
sgraph (StructureGraph): Ground state graph.
unique_site_ids (dict): Maps each site to its unique identifier
nn_interacations (dict): {i: j} pairs of NN interactions
between unique sites.
ex_params (dict): Exchange parameter values (meV/atom)
mft_t (float): Mean field theory estimate of critical T
mat_name (str): Formula unit label for input files
mat_id_dict (dict): Maps sites to material id # for vampire
indexing.
TODO:
* Create input files in a temp folder that gets cleaned up after run terminates
"""
self.mc_box_size = mc_box_size
self.equil_timesteps = equil_timesteps
self.mc_timesteps = mc_timesteps
self.save_inputs = save_inputs
self.avg = avg
if not user_input_settings: # set to empty dict
self.user_input_settings = {}
else:
self.user_input_settings = user_input_settings
# Get exchange parameters and set instance variables
if not hm:
hmapper = HeisenbergMapper(ordered_structures, energies, cutoff=3.0, tol=0.02)
hm = hmapper.get_heisenberg_model()
# Attributes from HeisenbergModel
self.hm = hm
self.structure = hm.structures[0] # ground state
self.sgraph = hm.sgraphs[0] # ground state graph
self.unique_site_ids = hm.unique_site_ids
self.nn_interactions = hm.nn_interactions
self.dists = hm.dists
self.tol = hm.tol
self.ex_params = hm.ex_params
self.javg = hm.javg
# Full structure name before reducing to only magnetic ions
self.mat_name = hm.formula
# Switch to scratch dir which automatically cleans up vampire inputs files unless user specifies to save them
# with ScratchDir('/scratch', copy_from_current_on_enter=self.save_inputs,
# copy_to_current_on_exit=self.save_inputs) as temp_dir:
# os.chdir(temp_dir)
# Create input files
self._create_mat()
self._create_input()
self._create_ucf()
# Call Vampire
with subprocess.Popen(["vampire-serial"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
stdout, stderr = process.communicate()
stdout = stdout.decode()
if stderr:
vanhelsing = stderr.decode()
if len(vanhelsing) > 27: # Suppress blank warning msg
logging.warning(vanhelsing)
if process.returncode != 0:
raise RuntimeError("Vampire exited with return code {}.".format(process.returncode))
self._stdout = stdout
self._stderr = stderr
# Process output
nmats = max(self.mat_id_dict.values())
parsed_out, critical_temp = VampireCaller.parse_stdout("output", nmats)
self.output = VampireOutput(parsed_out, nmats, critical_temp)
def _create_mat(self):
structure = self.structure
mat_name = self.mat_name
magmoms = structure.site_properties["magmom"]
# Maps sites to material id for vampire inputs
mat_id_dict = {}
nmats = 0
for key in self.unique_site_ids:
spin_up, spin_down = False, False
nmats += 1 # at least 1 mat for each unique site
# Check which spin sublattices exist for this site id
for site in key:
m = magmoms[site]
if m > 0:
spin_up = True
if m < 0:
spin_down = True
# Assign material id for each site
for site in key:
m = magmoms[site]
if spin_up and not spin_down:
mat_id_dict[site] = nmats
if spin_down and not spin_up:
mat_id_dict[site] = nmats
if spin_up and spin_down:
# Check if spin up or down shows up first
m0 = magmoms[key[0]]
if m > 0 and m0 > 0:
mat_id_dict[site] = nmats
if m < 0 and m0 < 0:
mat_id_dict[site] = nmats
if m > 0 > m0:
mat_id_dict[site] = nmats + 1
if m < 0 < m0:
mat_id_dict[site] = nmats + 1
# Increment index if two sublattices
if spin_up and spin_down:
nmats += 1
mat_file = ["material:num-materials=%d" % (nmats)]
for key in self.unique_site_ids:
i = self.unique_site_ids[key] # unique site id
for site in key:
mat_id = mat_id_dict[site]
# Only positive magmoms allowed
m_magnitude = abs(magmoms[site])
if magmoms[site] > 0:
spin = 1
if magmoms[site] < 0:
spin = -1
atom = structure[i].species.reduced_formula
mat_file += ["material[%d]:material-element=%s" % (mat_id, atom)]
mat_file += [
"material[%d]:damping-constant=1.0" % (mat_id),
"material[%d]:uniaxial-anisotropy-constant=1.0e-24" % (mat_id), # xx - do we need this?
"material[%d]:atomic-spin-moment=%.2f !muB" % (mat_id, m_magnitude),
"material[%d]:initial-spin-direction=0,0,%d" % (mat_id, spin),
]
mat_file = "\n".join(mat_file)
mat_file_name = mat_name + ".mat"
self.mat_id_dict = mat_id_dict
with open(mat_file_name, "w") as f:
f.write(mat_file)
def _create_input(self):
structure = self.structure
mcbs = self.mc_box_size
equil_timesteps = self.equil_timesteps
mc_timesteps = self.mc_timesteps
mat_name = self.mat_name
input_script = ["material:unit-cell-file=%s.ucf" % (mat_name)]
input_script += ["material:file=%s.mat" % (mat_name)]
# Specify periodic boundary conditions
input_script += [
"create:periodic-boundaries-x",
"create:periodic-boundaries-y",
"create:periodic-boundaries-z",
]
# Unit cell size in Angstrom
abc = structure.lattice.abc
ucx, ucy, ucz = abc[0], abc[1], abc[2]
input_script += ["dimensions:unit-cell-size-x = %.10f !A" % (ucx)]
input_script += ["dimensions:unit-cell-size-y = %.10f !A" % (ucy)]
input_script += ["dimensions:unit-cell-size-z = %.10f !A" % (ucz)]
# System size in nm
input_script += [
"dimensions:system-size-x = %.1f !nm" % (mcbs),
"dimensions:system-size-y = %.1f !nm" % (mcbs),
"dimensions:system-size-z = %.1f !nm" % (mcbs),
]
# Critical temperature Monte Carlo calculation
input_script += [
"sim:integrator = monte-carlo",
"sim:program = curie-temperature",
]
# Default Monte Carlo params
input_script += [
"sim:equilibration-time-steps = %d" % (equil_timesteps),
"sim:loop-time-steps = %d" % (mc_timesteps),
"sim:time-steps-increment = 1",
]
# Set temperature range and step size of simulation
if "start_t" in self.user_input_settings:
start_t = self.user_input_settings["start_t"]
else:
start_t = 0
if "end_t" in self.user_input_settings:
end_t = self.user_input_settings["end_t"]
else:
end_t = 1500
if "temp_increment" in self.user_input_settings:
temp_increment = self.user_input_settings["temp_increment"]
else:
temp_increment = 25
input_script += [
"sim:minimum-temperature = %d" % (start_t),
"sim:maximum-temperature = %d" % (end_t),
"sim:temperature-increment = %d" % (temp_increment),
]
# Output to save
input_script += [
"output:temperature",
"output:mean-magnetisation-length",
"output:material-mean-magnetisation-length",
"output:mean-susceptibility",
]
input_script = "\n".join(input_script)
with open("input", "w") as f:
f.write(input_script)
def _create_ucf(self):
structure = self.structure
mat_name = self.mat_name
abc = structure.lattice.abc
ucx, ucy, ucz = abc[0], abc[1], abc[2]
ucf = ["# Unit cell size:"]
ucf += ["%.10f %.10f %.10f" % (ucx, ucy, ucz)]
ucf += ["# Unit cell lattice vectors:"]
a1 = list(structure.lattice.matrix[0])
ucf += ["%.10f %.10f %.10f" % (a1[0], a1[1], a1[2])]
a2 = list(structure.lattice.matrix[1])
ucf += ["%.10f %.10f %.10f" % (a2[0], a2[1], a2[2])]
a3 = list(structure.lattice.matrix[2])
ucf += ["%.10f %.10f %.10f" % (a3[0], a3[1], a3[2])]
nmats = max(self.mat_id_dict.values())
ucf += ["# Atoms num_materials; id cx cy cz mat cat hcat"]
ucf += ["%d %d" % (len(structure), nmats)]
# Fractional coordinates of atoms
for site, r in enumerate(structure.frac_coords):
# Back to 0 indexing for some reason...
mat_id = self.mat_id_dict[site] - 1
ucf += ["%d %.10f %.10f %.10f %d 0 0" % (site, r[0], r[1], r[2], mat_id)]
# J_ij exchange interaction matrix
sgraph = self.sgraph
ninter = 0
for i, node in enumerate(sgraph.graph.nodes):
ninter += sgraph.get_coordination_of_site(i)
ucf += ["# Interactions"]
ucf += ["%d isotropic" % (ninter)]
iid = 0 # counts number of interaction
for i, node in enumerate(sgraph.graph.nodes):
connections = sgraph.get_connected_sites(i)
for c in connections:
jimage = c[1] # relative integer coordinates of atom j
dx = jimage[0]
dy = jimage[1]
dz = jimage[2]
j = c[2] # index of neighbor
dist = round(c[-1], 2)
# Look up J_ij between the sites
if self.avg is True: # Just use <J> estimate
j_exc = self.hm.javg
else:
j_exc = self.hm._get_j_exc(i, j, dist)
# Convert J_ij from meV to Joules
j_exc *= 1.6021766e-22
j_exc = str(j_exc) # otherwise this rounds to 0
ucf += ["%d %d %d %d %d %d %s" % (iid, i, j, dx, dy, dz, j_exc)]
iid += 1
ucf = "\n".join(ucf)
ucf_file_name = mat_name + ".ucf"
with open(ucf_file_name, "w") as f:
f.write(ucf)
@staticmethod
def parse_stdout(vamp_stdout, nmats):
"""Parse stdout from Vampire.
Args:
vamp_stdout (txt file): Vampire 'output' file.
nmats (int): Num of materials in Vampire sim.
Returns:
parsed_out (DataFrame): MSONable vampire output.
critical_temp (float): Calculated critical temp.
"""
names = ["T", "m_total"] + ["m_" + str(i) for i in range(1, nmats + 1)] + ["X_x", "X_y", "X_z", "X_m", "nan"]
# Parsing vampire MC output
df = pd.read_csv(vamp_stdout, sep="\t", skiprows=9, header=None, names=names)
df.drop("nan", axis=1, inplace=True)
parsed_out = df.to_json()
# Max of susceptibility <-> critical temp
critical_temp = df.iloc[df.X_m.idxmax()]["T"]
return parsed_out, critical_temp
class VampireOutput(MSONable):
"""
This class processes results from a Vampire Monte Carlo simulation
and returns the critical temperature.
"""
def __init__(self, parsed_out=None, nmats=None, critical_temp=None):
"""
Args:
parsed_out (json): json rep of parsed stdout DataFrame.
nmats (int): Number of distinct materials (1 for each specie and up/down spin).
critical_temp (float): Monte Carlo Tc result.
"""
self.parsed_out = parsed_out
self.nmats = nmats
self.critical_temp = critical_temp
| gmatteo/pymatgen | pymatgen/command_line/vampire_caller.py | Python | mit | 15,181 | [
"pymatgen"
] | 10686821c1d4152559c66dcef3f6dd6382e343cab296516c1cbd0004f8cb6048 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module is the CLI of the library, that can be called after installing it from the command line
"""
import os
from sys import path, argv
import argparse
import numpy as np
try:
import sportran as st
except ImportError:
abs_path = os.path.abspath(__file__)
tc_path = abs_path[:abs_path.rfind('/')]
path.append(tc_path[:tc_path.rfind('/')])
try:
import sportran as st
except ImportError:
raise ImportError('Cannot locate sportran.')
from sportran.utils import log
log.set_method('bash')
log.append_method('file')
from sportran.plotter.cli import CLIPlotter
st.Current.set_plotter(CLIPlotter)
from sportran.plotter import plt # this imports matplotlib.pyplot
from sportran.plotter import addPlotToPdf, PdfPages
np.set_printoptions(precision=8)
def main():
"""
--------------------------------------------------------------------------------
*** SPORTRAN *** command line interface
--------------------------------------------------------------------------------
This script performs the cepstral analysis of a (heat) current.
Results are written to stdout and a log file, and plots are saved in PDF format.
INPUT FORMAT:
- table : a column-formatted text file, with a header in the same format of LAMMPS.
The name of the LAMMPS compute can start with c_ and end with [#some_number], the code will recognize
vectors, and will read automatically all the components.
- dict : a Numpy binary file containing a dictionary (e.g. obtained from the script i_o/read_lammps_log.py)
- LAMMPS : a LAMMPS log file.
In this case a --run-keyword must be provided, that identifies the desired 'run' command. This keyword must equal to the comment line placed just before the desired 'run' command (see documentation of i_o/read_lammps_log.py for an example).
Physical parameters (time step, temperature, volume, units) must be provided.
The average temperature is computed if a column with the header (or a dictionary key) 'Temp' is found; otherwise you have to specify it.
You must provide the key that identifies the main current ('-k KEY')
You can also provide additional currents if your system is a multi-component fluid ('-j CURRENT2 -j CURRENT3'), or you want to decorrelate the main current with respect to them (see PRL).
(Notice that the output is the same with any number of components. If you have a lots of components, note that you may want to use more than 3 independent processes -- see theory.)
OUTPUT FILES:
[output].logfile
A log of the available information.
[output].plots.pdf
A PDF with all the plots generated.
OUTPUT DATA files (can be text ".dat" or binary ".npy"):
[output].psd
freqs [THz], original periodogram, original log(periodogram)
[output].cospectrum (if any)
freqs [THz], full matrix cospectrum
[output].resampled_psd
freqs [THz], resampled periodogram, resampled log(periodogram)
[output].cepstral
cepstral coefficients ck, error(ck), L0(P*), err(L0(P*)), kappa(P*) [W/mK], err(kappa(P*)) [W/mK]
the line number minus one is the number of cepstral coefficients used (P*).
[output].cepstrumfiltered_psd
freqs [THz], cepstrum-filtered periodogram, cepstrum-filtered log(periodogram)
-------------------------
Example:
read and analyze "examples/data/Silica.dat" file. The energy-flux columns are called c_flux[1], c_flux[2], c_flux[3]
./analysis "examples/data/Silica.dat" --VOLUME 3130.431110818 --TEMPERATURE 1065.705630 -t 1.0 -k flux1 -u metal -r --FSTAR 28.0 -w 0.5 -o silica_test
-------------------------
"""
_epilog = """---
Enjoy it!
---
Developed by Loris Ercole, Riccardo Bertossa, Sebastiano Bisacchi, under the supervision of prof. Stefano Baroni at SISSA, Via Bonomea, 265 - 34136 Trieste ITALY.
Please cite these references:
- Ercole, Marcolongo, Baroni, Sci. Rep. 7, 15835 (2017), https://doi.org/10.1038/s41598-017-15843-2
- Bertossa, Grasselli, Ercole, Baroni, Phys. Rev. Lett. 122, 255901 (2019), https://doi.org/10.1103/PhysRevLett.122.255901
- Baroni, Bertossa, Ercole, Grasselli, Marcolongo, Handbook of Materials Modeling (2018), https://doi.org/10.1007/978-3-319-50257-1_12-1
GitHub: https://github.com/lorisercole/sportran
Contact: loris.ercole@epfl.ch, rbertoss@sissa.it
Acknowledgment
The development of this software is part of the scientific program of the EU MaX Centre of Excellence for Supercomputing Applications (Grant No. 676598, 824143) and has been partly funded through it.
"""
# yapf: disable
if '--list-currents' in argv:
print('units and docstrings list for each current:')
print('=================')
print(st.current._list_of_currents_and_units(verbose=True))
print('=================')
print('currents and units implementation table')
print(st.current.build_currents_units_table())
return 0
parser = argparse.ArgumentParser(description=main.__doc__, epilog=_epilog, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('inputfile', type=str,
help='input file to read (default format: Table)')
input_file_group = parser.add_argument_group('Input file format')
input_file_group.add_argument('--input-format', default='table', type=str, choices=['table', 'dict', 'lammps'],
help='Format of the input file. (default: table)')
input_file_group.add_argument('-k', '--mainfluxkey', type=str, required=True,
help='Name of the column keyword that identifies the first flux in the onsager matrix')
input_file_group.add_argument('-N', '--nsteps', type=int, default=0,
help='Number of steps to read. (optional, default: 0=all)')
input_file_group.add_argument('-S', '--start-step', type=int, default=0,
help='The first step to read. (optional, default: 0=first)')
input_file_group.add_argument('--cindex', nargs='*', type=int,
help='Column indexes of the main current to read (0,1,2,...). (optional, default: all)')
input_file_group.add_argument('--sindex', nargs='*', type=int,
help='Column indexes of a current to be substracted from the main current. (optional)')
input_file_group.add_argument('--split', type=int, default=1,
help='Build a time series with n*m independent processes (n is the number of processes of the original timeseries, m is the number provided with --split). The length of the new time series will be [original length]/m. (optional)')
lammps_group = input_file_group.add_argument_group('LAMMPS format settings')
lammps_group.add_argument('--run-keyword', type=str,
help='Keyword that identifies the run to be read: a specific comment line placed just before the run command (only for "lammps" format)')
lammps_group.add_argument('--structure', type=str,
help='LAMMPS data file containing the structure. Read to get Volume. (optional)')
output_file_group = parser.add_argument_group('Output file format')
output_file_group.add_argument('-o', '--output', type=str, default='output',
help='Prefix of the output files')
output_file_group.add_argument('-O', '--bin-output', action='store_true',
help='Save also binary files. (optional)')
output_file_group.add_argument('--no-text-output', action='store_true',
help='Do not save text files. (optional)')
output_file_group.add_argument('--no-plot', action='store_true',
help='Do not save plot files. (optional)')
output_file_group.add_argument('--bin-output-old', action='store_true',
help='Use old format for binary files (compatibility). (optional) - *TO BE DEPRECATED*')
input_params_group = parser.add_argument_group('Physical parameters')
input_params_group.add_argument('-t', '--timestep', type=float, required=True,
help='Time step of the data (fs)')
for parameter in st.current.all_parameters:
input_params_group.add_argument(f'--{parameter}', type=float,
help='Usually Angstrom or Kelvins, see description of units and currents implemented available with --list-currents')
input_params_group.add_argument('-u', '--units', type=str, default='metal',
choices=st.current.all_units,
help='Units. (optional, default: metal)')
input_params_group.add_argument('-C', '--current', type=str, default='heat',
choices=list(st.current.all_currents.keys()),
help='Type of currents that is provided to the code. Usually this just changes the conversion factor')
input_params_group.add_argument('--param-from-input-file-column', type=str,
action='append', dest='parameters_from_input_file', nargs=2,
help='in order: header of the column and name of the parameter that will be setted to the average of that column of the input file')
input_params_group.add_argument('--list-currents', action='store_true',
help='show the list of currents implemented, the docstrings of the units, then exit')
analysis_group = parser.add_argument_group('Analysis options')
analysis_group.add_argument('-r', '--resample', action='store_true',
help='Resample the time series (using TSKIP or FSTAR). (optional)')
resamplearg = analysis_group.add_mutually_exclusive_group()
resamplearg.add_argument('--TSKIP', type=int,
help='Resampling time period (steps)')
resamplearg.add_argument('--FSTAR', type=float,
help='Resampling target Nyquist frequency (THz)')
analysis_group.add_argument('-c', '--corr-factor', type=float, default=1.0,
help='Correction factor to the AIC. (optional, default: 1.0 = no correction)')
analysis_group.add_argument('-j', '--add-currents', type=str, default=[], action='append',
help='Additional current for multi-component fluids. (optional, repeat -j to add more currents)')
plot_group = parser.add_argument_group('Plot options (optional)')
plot_group.add_argument('-w', '--psd-filterw', type=float,
help='Periodogram filter window width (THz)')
plot_group.add_argument('--plot-conv-max-pstar', type=int,
help='Max number of P* in the kappa(P*) plot (x)')
plot_group.add_argument('--plot-conv-max-kappa', type=float,
help='Max kappa in the kappa(P*) plot (y)')
plot_group.add_argument('--plot-conv-pstar-tick-interval', type=int,
help='Tick interval on the x-axis for the kappa(P*) plot')
plot_group.add_argument('--plot-conv-kappa-tick-interval', type=float,
help='Tick interval on the y-axis for the kappa(P*) plot')
plot_group.add_argument('--plot-psd-max-THz', type=float,
help='Max frequency (THz) in the psd plot (x)')
plot_group.add_argument('--plot-psd-max-kappa', type=float,
help='Max kappa (W/m/K) in the psd plot (y)')
plot_group.add_argument('--plot-psd-THz-tick-interval', type=float,
help='Tick interval on the x-axis for the psd plot')
plot_group.add_argument('--plot-psd-kappa-tick-interval', type=float,
help='Tick interval on the y-axis for the psd plot')
beta_group = parser.add_argument_group('Testing options')
beta_group.add_argument('--test-suite-run', action='store_true')
beta_group.add_argument('--savetxt-format', type=str, default='%.18e',
help='Format string used by `numpy.savetxt` in the output files')
# yapf: enable
args = parser.parse_args()
run_analysis(args)
return 0
def run_analysis(args):
inputfile = args.inputfile
input_format = args.input_format
j1_key = args.mainfluxkey
NSTEPS = args.nsteps
START_STEP = args.start_step
jindex = args.cindex
sindex = args.sindex
NSPLIT = args.split
run_keyword = args.run_keyword
structurefile = args.structure
output = args.output
binout = args.bin_output
binout_old = args.bin_output_old
do_plot = not args.no_plot
no_text_out = args.no_text_output
DT_FS = args.timestep
parameters = {}
for parameter in st.current.all_parameters:
p = getattr(args, parameter)
if p is not None:
if p <= 0.:
raise ValueError(f'{parameter} must be positive')
parameters[parameter] = p
parameters_from_input_file = args.parameters_from_input_file if args.parameters_from_input_file else []
parameters_from_input_file_key = [x[0] for x in parameters_from_input_file]
parameters_from_input_file_name = [x[1] for x in parameters_from_input_file]
units = args.units
current_type = args.current
resample = args.resample
TSKIP = args.TSKIP
FSTAR = args.FSTAR
corr_factor = args.corr_factor
j2_keys = args.add_currents
psd_filter_w = args.psd_filterw
print_elapsed = not args.test_suite_run
print_cmd = not args.test_suite_run
fmt = args.savetxt_format
if DT_FS <= 0.:
raise ValueError('Time step must be positive')
if NSTEPS < 0:
raise ValueError('nsteps must be positive')
if resample:
if TSKIP is not None:
if TSKIP <= 1:
raise ValueError('Resampling: TSKIP should be > 1')
elif FSTAR is not None:
if FSTAR <= 0.:
raise ValueError('Resampling: FSTAR should be positive')
else:
raise ValueError('Resampling: you should specify either TSKIP or FSTAR')
elif TSKIP is not None:
raise ValueError('Use flag -r to resample. TSKIP will be ignored')
elif FSTAR is not None:
raise ValueError('Use flag -r to resample. FSTAR will be ignored')
if corr_factor <= 0.:
raise ValueError('The correction factor must be positive')
if NSPLIT < 1:
raise ValueError('The number of splits must be a positive number')
log.open_file(output + '.log')
if print_cmd:
log.write_log('Command:\n ' + ' '.join(argv) + '\n\n')
# Write some parameters
if print_cmd:
log.write_log(' Input file ({}): {}'.format(input_format, inputfile))
log.write_log(' Units: {}'.format(units))
log.write_log(' Time step: {} fs'.format(DT_FS))
# Read data
selected_keys = [j1_key]
selected_keys.extend(j2_keys)
jdata = None
if input_format == 'table':
# Table format: data is organized in columns, the selected_keys determines which to read
#input parameters that are read from file
for col, pname in parameters_from_input_file:
selected_keys.append(col)
jfile = st.i_o.TableFile(inputfile, group_vectors=True, print_elapsed=print_elapsed)
jfile.read_datalines(start_step=START_STEP, NSTEPS=NSTEPS, select_ckeys=selected_keys)
jdata = jfile.data
START_STEP = 0 # reset to zero, as later we will need to read all of jdata
elif input_format == 'dict':
# Dict format: data is stored in a binary Numpy file containing a dictionary
jdata = np.load(inputfile, allow_pickle=True).tolist()
elif input_format == 'lammps':
# LAMMPS format: a LAMMPS log file is scanned until the run_keywork is found
jfile = st.i_o.LAMMPSLogFile(inputfile, run_keyword=run_keyword)
if temperature is None:
selected_keys.append('Temp')
jfile.read_datalines(NSTEPS, select_ckeys=selected_keys)
jdata = jfile.data
else:
raise NotImplementedError('Input format not implemented.')
# split data
if NSPLIT > 1:
log.write_log('Splitting input data time series into {:d} segments...'.format(NSPLIT))
data_size = jdata[selected_keys[0]].shape[0]
if len(jdata[selected_keys[0]].shape) > 1:
n_proc = jdata[selected_keys[0]].shape[1]
else:
n_proc = 1
rm = data_size % NSPLIT
steps_start = data_size - rm
steps_end = data_size / NSPLIT
if (steps_end % 2) == 1:
steps_end = steps_end - 1
for key, value in jdata.items():
if not key in parameters_from_input_file_key:
newdata = value[:steps_start].reshape((NSPLIT, data_size / NSPLIT, n_proc)).transpose(
(1, 0, 2)).reshape((data_size / NSPLIT, NSPLIT * n_proc))
jdata[key] = newdata[:steps_end]
log.write_log('New shape of input data: {}'.format(jdata[selected_keys[0]].shape))
if NSTEPS == 0:
NSTEPS = jdata[list(jdata.keys())[0]].shape[0]
# compute average parameters from input file, if requested
def average(data, name, units=''):
ave = np.mean(data)
std = np.std(data)
log.write_log(f'Mean {name} (computed): {ave} +/- {std}')
return ave
for key, value in parameters.items():
log.write_log(f'{key} (input): {value}')
for key, name in parameters_from_input_file:
parameters[name] = average(jdata[key], name)
selected_keys.remove(key)
if structurefile is not None:
# read volume from LAMMPS data file
_, volume = st.i_o.read_lammps_datafile.get_box(structurefile)
log.write_log(' Volume (structure file): {} A^3'.format(volume))
#note: here I hardcoded the volume key
# nothing guarantees that in the parameter list
# of the function that calculates KAPPA_SCALE
# you are going to find the VOLUME parameter with this meaning
parameters['VOLUME'] = volume
# Time step
log.write_log(' Time step (input): {} fs'.format(DT_FS))
# Define currents
if jindex is None:
# read all components
currents = np.array([jdata[key][START_STEP:(START_STEP + NSTEPS), :] for key in selected_keys])
else:
# read only the components jindex
# NOTE: for multi-current cases, it will select jindex of each current
if sindex is None:
currents = np.array([jdata[key][START_STEP:(START_STEP + NSTEPS), jindex] for key in selected_keys])
else:
# subtract the components sindex from those jindex
currents = np.array([
jdata[key][START_STEP:(START_STEP + NSTEPS), jindex] -
jdata[key][START_STEP:(START_STEP + NSTEPS), sindex] for key in selected_keys
])
log.write_log(' currents shape is {}'.format(currents.shape))
log.write_log('snippet:')
log.write_log(currents)
# create HeatCurrent object
j = st.current.all_currents[current_type][0](currents, DT_FS=DT_FS, UNITS=units, **parameters,
PSD_FILTER_W=psd_filter_w)
log.write_log(' Number of currents = {}'.format(j.N_CURRENTS))
log.write_log(' Number of components = {}'.format(j.N_COMPONENTS))
log.write_log(' KAPPA_SCALE = {}'.format(j.KAPPA_SCALE))
log.write_log(' Nyquist_f = {} THz'.format(j.Nyquist_f_THz))
# resample
if resample:
if TSKIP is not None:
jf = j.resample(TSKIP=TSKIP, PSD_FILTER_W=psd_filter_w)
#FSTAR = j.Nyquist_f_THz / TSKIP # from st.heatcurrent.resample_current
FSTAR = jf.Nyquist_f_THz
else:
jf = j.resample(fstar_THz=FSTAR, PSD_FILTER_W=psd_filter_w)
#log.write_log(jf.resample_log)
else:
jf = j
# cepstral analysis
jf.cepstral_analysis(aic_type='aic', Kmin_corrfactor=corr_factor)
#log.write_log(jf.cepstral_log)
############################################################################
## OUTPUT SECTION
## TODO: cleanup data files
############################################################################
# DATA OUTPUT
if binout:
binoutobj = TCOutput()
binoutobj.j_DT_FS = j.DT_FS
binoutobj.j_freqs_THz = j.freqs_THz
binoutobj.j_fpsd = j.fpsd
binoutobj.j_flogpsd = j.flogpsd
binoutobj.j_psd = j.psd
binoutobj.j_logpsd = j.logpsd
binoutobj.j_Nyquist_f_THz = j.Nyquist_f_THz
binoutobj.j_PSD_FILTER_W_THz = psd_filter_w
if j.MANY_CURRENTS:
binoutobj.j_cospectrum = j.cospectrum
binoutobj.j_fcospectrum = j.fcospectrum
if resample:
binoutobj.jf_DT_FS = jf.DT_FS
binoutobj.jf_freqs_THz = jf.freqs_THz
binoutobj.jf_fpsd = jf.fpsd
binoutobj.jf_flogpsd = jf.flogpsd
binoutobj.jf_psd = jf.psd
binoutobj.jf_logpsd = jf.logpsd
binoutobj.jf_Nyquist_f_THz = jf.Nyquist_f_THz
binoutobj.jf_resample_log = jf.resample_log
binoutobj.kappa_Kmin = jf.kappa_Kmin
binoutobj.kappa_Kmin_std = jf.kappa_Kmin_std
binoutobj.cepstral_log = jf.cepstral_log
binoutobj.units = jf.UNITS
binoutobj.KAPPA_SCALE = jf.KAPPA_SCALE
binoutobj.TEMPERATURE = jf.TEMPERATURE
binoutobj.VOLUME = jf.VOLUME
binoutobj.jf_dct_logpsdK = jf.dct.logpsdK
binoutobj.jf_dct_logpsdK_THEORY_std = jf.dct.logpsdK_THEORY_std
binoutobj.jf_dct_logtau = jf.dct.logtau
binoutobj.jf_dct_logtau_THEORY_std = jf.dct.logtau_THEORY_std
binoutobj.jf_dct_kappa = jf.dct.tau * jf.KAPPA_SCALE * 0.5
binoutobj.jf_dct_kappa_THEORY_std = jf.dct.tau_THEORY_std * jf.KAPPA_SCALE * 0.5
binoutobj.jf_dct_aic_Kmin = jf.dct.aic_Kmin
binoutobj.jf_dct_Kmin_corrfactor = jf.dct.Kmin_corrfactor
binoutobj.jf_dct_psd = jf.dct.psd
binoutobj.jf_dct_logpsd = jf.dct.logpsd
if binout_old:
binoutobj.write_old_binary(output)
else:
np.save(output, binoutobj)
if not no_text_out:
outfile_name = output + '.psd.dat'
outarray = np.c_[j.freqs_THz, j.psd, j.fpsd, j.logpsd, j.flogpsd]
outfile_header = 'freqs_THz psd fpsd logpsd flogpsd\n'
np.savetxt(outfile_name, outarray, header=outfile_header, fmt=fmt)
if j.MANY_CURRENTS:
outfile_name = output + '.cospectrum.dat'
outarray = np.c_[j.freqs_THz,
j.cospectrum.reshape(
(j.cospectrum.shape[0] * j.cospectrum.shape[1], j.cospectrum.shape[2])).transpose()]
np.savetxt(outfile_name, np.column_stack([outarray.real, outarray.imag]), fmt=fmt)
outfile_name = output + '.cospectrum.filt.dat'
outarray = np.c_[j.freqs_THz,
j.fcospectrum.reshape(
(j.fcospectrum.shape[0] * j.fcospectrum.shape[1], j.fcospectrum.shape[2])).transpose()]
np.savetxt(outfile_name, np.column_stack([outarray.real, outarray.imag]), fmt=fmt)
if resample:
outfile_name = output + '.resampled_psd.dat'
outarray = np.c_[jf.freqs_THz, jf.psd, jf.fpsd, jf.logpsd, jf.flogpsd]
outfile_header = 'freqs_THz psd fpsd logpsd flogpsd\n'
np.savetxt(outfile_name, outarray, header=outfile_header, fmt=fmt)
outfile_name = output + '.cepstral.dat'
outarray = np.c_[jf.dct.logpsdK, jf.dct.logpsdK_THEORY_std, jf.dct.logtau, jf.dct.
logtau_THEORY_std, jf.dct.tau * jf.KAPPA_SCALE * 0.5, jf.dct.tau_THEORY_std * jf.KAPPA_SCALE *
0.5]
outfile_header = 'ck ck_std L0(P*) L0_std(P*) kappa(P*) kappa_std(P*)\n'
np.savetxt(outfile_name, outarray, header=outfile_header, fmt=fmt)
outfile_name = output + '.cepstrumfiltered_psd.dat'
outarray = np.c_[jf.freqs_THz, jf.dct.psd, jf.dct.logpsd]
outfile_header = 'freqs_THz cepf_psd cepf_logpsd\n'
np.savetxt(outfile_name, outarray, header=outfile_header, fmt=fmt)
####################################
# PLOTS
####################################
if do_plot:
pdf = PdfPages(output + '.plots.pdf')
addPlotToPdf(j.plot_periodogram, pdf)
if resample:
ax = j.plot_resample(jf)
ax[0].set_xlim([0, 2.5 * FSTAR])
pdf.savefig()
plt.close()
addPlotToPdf(j.plot_psd, pdf, jf, f_THz_max=args.plot_psd_max_THz, k_SI_max=args.plot_psd_max_kappa,
k_tick=args.plot_psd_kappa_tick_interval, f_tick=args.plot_psd_THz_tick_interval)
addPlotToPdf(jf.plot_psd, pdf, f_THz_max=args.plot_psd_max_THz, k_SI_max=args.plot_psd_max_kappa,
k_tick=args.plot_psd_kappa_tick_interval, f_tick=args.plot_psd_THz_tick_interval)
addPlotToPdf(jf.plot_psd, pdf, jf, jf, f_THz_max=args.plot_psd_max_THz, k_SI_max=args.plot_psd_max_kappa,
k_tick=args.plot_psd_kappa_tick_interval, f_tick=args.plot_psd_THz_tick_interval)
try:
for idx1 in range(j.N_CURRENTS):
for idx2 in range(idx1, j.N_CURRENTS):
addPlotToPdf(j.plot_other, pdf, idx1, idx2)
except:
pass
# plot cepstral coefficients
ax = jf.plot_ck()
ax.set_xlim([0, 5 * jf.dct.aic_Kmin])
ax.set_ylim([-1, 15])
ax.grid()
pdf.savefig()
plt.close()
# plot L0(Pstar)
ax = jf.plot_L0_Pstar()
ax.set_xlim([0, 10 * jf.dct.aic_Kmin])
pdf.savefig()
plt.close()
# # plot kappa(Pstar)
# ax = jf.plot_kappa_Pstar()
# ax.set_xlim([0, 10*jf.dct.aic_Kmin])
addPlotToPdf(jf.plot_cepstral_conv, pdf, pstar_max=args.plot_conv_max_pstar,
pstar_tick=args.plot_conv_pstar_tick_interval, k_SI_max=args.plot_conv_max_kappa,
kappa_tick=args.plot_conv_kappa_tick_interval)
# plot cepstral log-PSD
ax = jf.plot_periodogram()
jf.plot_cepstral_spectrum(axes=ax, label='cepstrum-filtered')
ax[0].axvline(x=jf.Nyquist_f_THz, ls='--', c='r')
ax[1].axvline(x=jf.Nyquist_f_THz, ls='--', c='r')
#ax[0].set_xlim([0., 2.5*FSTAR_THZ])
#ax[1].set_ylim([12,18])
#ax[0].legend(['original', 'resampled', 'cepstrum-filtered'])
#ax[1].legend(['original', 'resampled', 'cepstrum-filtered']);
pdf.close()
log.close_file()
return 0
#################################
class TCOutput(object):
# yapf: disable
def __init__(self):
# TO BE COMPLETED WIHT ALL PARAMETERS
self.j_DT_FS = None
self.j_freqs_THz = None
self.j_fpsd = None
self.j_flogpsd = None
self.j_psd = None
self.j_logpsd = None
self.j_Nyquist_f_THz = None
self.j_PSD_FILTER_W_THz = None
self.j_cospectrum = None
self.jf_DT_FS = None
self.jf_freqs_THz = None
self.jf_fpsd = None
self.jf_flogpsd = None
self.jf_psd = None
self.jf_logpsd = None
self.jf_Nyquist_f_THz = None
self.jf_resample_log = None
self.jf_dct_logpsdK = None
self.jf_dct_logpsdK_THEORY_std = None
self.jf_dct_logtau = None
self.jf_dct_logtau_THEORY_std = None
self.jf_dct_kappa = None
self.jf_dct_kappa_THEORY_std = None
self.jf_dct_psd = None
self.jf_dct_logpsd = None
self.jf_dct_aic_Kmin = None
self.jf_dct_Kmin_corrfactor = None
self.kappa_Kmin = None
self.kappa_Kmin_std = None
self.cepstral_log = None
self.UNITS = None
self.KAPPA_SCALE = None
self.TEMPERATURE = None
self.VOLUME = None
self.TSKIP = None
# yapf: enable
def write_old_binary(self, output):
"""Write old binary format."""
opts = {'allow_pickle': False}
optsa = {'axis': 1}
outarray = np.c_[self.j_freqs_THz, self.j_fpsd, self.j_flogpsd, self.j_psd, self.j_logpsd]
np.save(output + '.psd.npy', outarray, **opts)
if self.j_cospectrum is not None:
outarray = np.c_[self.j_freqs_THz, self.j_cospectrum.reshape(-1, self.j_cospectrum.shape[-1]).transpose()]
np.save(output + '.cospectrum.npy', outarray, **opts)
if self.j_fcospectrum is not None:
outarray = np.c_[self.j_freqs_THz, self.j_fcospectrum.reshape(-1, self.j_fcospectrum.shape[-1]).transpose()]
np.save(output + '.cospectrum.filt.npy', outarray, **opts)
outarray = np.c_[self.jf_freqs_THz, self.jf_psd, self.jf_fpsd, self.jf_logpsd, self.jf_flogpsd]
np.save(output + '.resampled_psd.npy', outarray, **opts)
outarray = np.c_[self.jf_dct_logpsdK, self.jf_dct_logpsdK_THEORY_std, self.jf_dct_logtau, self.
jf_dct_logtau_THEORY_std, self.jf_dct_kappa, self.jf_dct_kappa_THEORY_std]
np.save(output + '.cepstral', outarray, **opts)
outarray = np.c_[self.jf_freqs_THz, self.jf_dct_psd, self.jf_dct_logpsd]
np.save(output + '.cepstrumfiltered_psd', outarray, **opts)
if __name__ == '__main__':
main()
| lorisercole/thermocepstrum | sportran/analysis.py | Python | gpl-3.0 | 29,366 | [
"LAMMPS"
] | 0367eb8740235a65a36770e686951e4f48ab4759f419b2a994855b0cbb87cfc4 |
"""Base classes for organizing data (for instance to visualize data
with Qt, or to run hierarchical checks) in a global graph, and a
detail tree at each node of the global graph.
The classes defined here assume that data can be organized in two
stages: a global level which only shows 'top-level' objects
(i.e. large file blocks, chunks, and so on) as nodes and links between
the nodes via directed arcs, and a detail level which shows the
details of a top-level object, that is, the actual data they
contain.
:class:`DetailNode` implements the detail side of things. The
:class:`GlobalNode` class implements the global level, which does not show
any actual data, but only structure.
The global level forms a directed graph where the nodes are data
blocks and directed edges represent links from one block to
another.
This directed graph is assumed to have a spanning acyclic directed
subgraph, that is, a subgraph which contains all nodes of the original
graph, and which contains no cycles. This graph constitutes of those
edges which have the default edge type.
The :class:`pyffi.object_models.Data` class is the root node of
the graph. Recursing over all edges of default type of this node will
visit each node (possibly more than once) in a hierarchical order.
The base classes are roughly based on the TreeItem example in the Qt docs:
http://doc.trolltech.com/4.4/itemviews-simpletreemodel.html
"""
# --------------------------------------------------------------------------
# ***** BEGIN LICENSE BLOCK *****
#
# Copyright (c) 2007-2012, Python File Format Interface
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Python File Format Interface
# project nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# ***** END LICENSE BLOCK *****
# --------------------------------------------------------------------------
from itertools import repeat
from operator import itemgetter
class EdgeType(tuple):
"""Represents all possible edge types. By default, there are four
types: any edge can be part of the acyclic graph or not, and can
be active or not.
The default edge type is active and acylic.
"""
def __new__(cls, active=True, acyclic=True):
return tuple.__new__(cls, (active, acyclic))
active = property(itemgetter(0))
acyclic = property(itemgetter(1))
class EdgeFilter(tuple):
"""A simple filter for edges. The default filter only checks the edge's
active and acyclic attributes, and accepts them if both are ``True``.
"""
def __new__(cls, active_filter=True, acyclic_filter=True):
return tuple.__new__(cls, (active_filter, acyclic_filter))
active_filter = property(itemgetter(0))
acyclic_filter = property(itemgetter(1))
def accept(self, edge_type):
if not(self.active_filter is None):
if edge_type.active != self.active_filter:
return False
if not(self.acyclic_filter is None):
if edge_type.acyclic != self.acyclic_filter:
return False
class DetailNode(object):
"""A node of the detail tree which can have children.
If the data must be editable, also derive the class from one of
the delegate classes defined in :mod:`pyffi.object_models.editable`,
and make sure that the get_value and set_value functions are
implemented.
"""
def get_detail_child_nodes(self, edge_filter=EdgeFilter()):
"""Generator which yields all children of this item in the
detail view (by default, all acyclic and active ones).
Override this method if the node has children.
:param edge_filter: The edge type to include.
:type edge_filter: :class:`EdgeFilter` or ``type(None)``
:return: Generator for detail tree child nodes.
:rtype: generator yielding :class:`DetailNode`\ s
"""
return (dummy for dummy in ())
def get_detail_child_names(self, edge_filter=EdgeFilter()):
"""Generator which yields all child names of this item in the detail
view.
Override this method if the node has children.
:return: Generator for detail tree child names.
:rtype: generator yielding ``str``\ s
"""
return (dummy for dummy in ())
def get_detail_child_edge_types(self, edge_filter=EdgeFilter()):
"""Generator which yields all edge types of this item in the
detail view, one edge type for each child.
Override this method if you rely on more than one edge type.
"""
return repeat(EdgeType())
def get_detail_display(self):
"""Object used to display the instance in the detail view.
Override this method if the node has data to display in the detail view.
:return: A string that can be used to display the instance.
:rtype: ``str``
"""
return ""
def get_detail_iterator(self, edge_filter=EdgeFilter()):
"""Iterate over self, all children, all grandchildren, and so
on (only given edge type is followed). Do not override.
"""
yield self
for child in self.get_detail_child_nodes(edge_filter=edge_filter):
for branch in child.get_detail_iterator(edge_filter=edge_filter):
yield branch
def replace_global_node(self, oldnode, newnode, edge_filter=EdgeFilter()):
"""Replace a particular branch in the graph."""
raise NotImplementedError
class GlobalNode(DetailNode):
"""A node of the global graph."""
def get_global_display(self):
"""Very short summary of the data of this global branch for display
purposes. Override this method.
:return: A string.
"""
return ""
# possible implementation:
#return self.name if hasattr(self, "name") else ""
def get_global_child_nodes(self, edge_filter=EdgeFilter()):
"""Generator which yields all children of this item in the
global view, of given edge type (default is edges of type 0).
Override this method.
:return: Generator for global node children.
"""
return (dummy for dummy in ())
def get_global_child_edge_types(self, edge_filter=EdgeFilter()):
"""Generator which yields all edge types of this item in the
global view, one edge type for each child.
Override this method if you rely on non-default edge types.
"""
return repeat(EdgeType())
def get_global_iterator(self, edge_filter=EdgeFilter()):
"""Iterate over self, all children, all grandchildren, and so
on (only given edge_filter is followed). Do not override.
"""
yield self
for child in self.get_global_child_nodes(edge_filter=edge_filter):
for branch in child.get_global_iterator(edge_filter=edge_filter):
yield branch
| griest024/PokyrimTools | pyffi-develop/pyffi/utils/graph.py | Python | mit | 8,280 | [
"VisIt"
] | e969c065900386713e532d511a4287969acb816cc248629132fea1201080bda3 |
from posterior import *
from astropy.cosmology import FlatLambdaCDM
import numpy as N
import sys
# Use sys to assign arguments for the galaxy data from the command line
u_r, err_u_r, nuv_u, err_nuv_u, z, dr8, ra, dec = sys.argv[1:]
# Use astropy to calculate the age from the redshift in the data
cosmo = FlatLambdaCDM(H0 = 71.0, Om0 = 0.26)
age = N.array(cosmo.age(float(z)))
# Define parameters needed for emcee
nwalkers = 100 # number of monte carlo chains
nsteps= 400 # number of steps in the monte carlo chain
start = [7.5, 1.5] # starting place of all the chains
burnin = 400 # number of steps in the burn in phase of the monte carlo chain
#The rest calls the emcee module which is initialised in the sample function of the posterior file.
samples, samples_save = sample(2, nwalkers, nsteps, burnin, start, float(u_r), float(err_u_r), float(nuv_u), float(err_nuv_u), age, dr8, ra, dec)
tq_mcmc, tau_mcmc, = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]), zip(*N.percentile(samples, [16,50,84],axis=0)))
fig = corner_plot(samples, labels = [r'$ t_{quench}$', r'$ \tau$'], extents=[[N.min(samples[:,0]), N.max(samples[:,0])],[N.min(samples[:,1]),N.max(samples[:,1])]], bf=[tq_mcmc, tau_mcmc], id=dr8)
fig.savefig('starpy_output_'+str(dr8)+'_'+str(ra)+'_'+str(dec)+'.pdf')
print 'Best fit [t, tau] values found by starpy for input parameters are : [', tq_mcmc[0], tau_mcmc[0], ']'
| zooniverse/starpy | starpy.py | Python | apache-2.0 | 1,388 | [
"Galaxy"
] | 8317d29833230f9dc1d00455751e90d4bdcd4c667d87967fb135ca999bc2fae4 |
#!/usr/bin/env python
# Copyright (C) 2007 CAMP
# Please see the accompanying LICENSE file for further information.
from distutils.core import setup, Command
from distutils.command.build_py import build_py as _build_py
from glob import glob
from os.path import join
import os
import sys
import shutil
long_description = """\
ASE is a python package providing an open source Atomic Simulation
Environment in the Python language."""
if sys.version_info < (2, 6, 0, 'final', 0):
raise SystemExit('Python 2.6 or later is required!')
packages = []
for dirname, dirnames, filenames in os.walk('ase'):
if '__init__.py' in filenames:
packages.append(dirname.replace('/', '.'))
package_dir = {'ase': 'ase'}
package_data = {'ase': ['lattice/spacegroup/spacegroup.dat']}
class test(Command):
description = 'build and run test suite; exit code is number of failures'
user_options = [('calculators=', 'c',
'Comma separated list of calculators to test')]
def __init__(self, dist):
Command.__init__(self, dist)
self.sub_commands = ['build']
def initialize_options(self):
self.calculators = None
def finalize_options(self):
pass
def run(self):
self.run_command('build')
buildcmd = self.get_finalized_command('build')
sys.path.insert(0, buildcmd.build_lib)
if self.calculators is not None:
calculators = self.calculators.split(',')
elif 'ASE_CALCULATORS' in os.environ:
calculators = os.environ['ASE_CALCULATORS'].split(',')
else:
calculators = []
from ase.test import test as _test
testdir = '%s/testase-tempfiles' % buildcmd.build_base
origcwd = os.getcwd()
if os.path.isdir(testdir):
shutil.rmtree(testdir) # clean before running tests!
os.mkdir(testdir)
os.chdir(testdir)
try:
results = _test(2, calculators, display=False)
if results.failures or results.errors:
print >> sys.stderr, 'Test suite failed'
raise SystemExit(len(results.failures) + len(results.errors))
finally:
os.chdir(origcwd)
class build_py(_build_py):
"""Custom distutils command to build translations."""
def __init__(self, *args, **kwargs):
_build_py.__init__(self, *args, **kwargs)
# Keep list of files to appease bdist_rpm. We have to keep track of
# all the installed files for no particular reason.
self.mofiles = []
def run(self):
"""Compile translation files (requires gettext)."""
_build_py.run(self)
msgfmt = 'msgfmt'
status = os.system(msgfmt + ' -V')
if status == 0:
for pofile in glob('ase/gui/po/*/LC_MESSAGES/ag.po'):
dirname = join(self.build_lib, os.path.dirname(pofile))
if not os.path.isdir(dirname):
os.makedirs(dirname)
mofile = join(dirname, 'ag.mo')
status = os.system('%s -cv %s --output-file=%s 2>&1' %
(msgfmt, pofile, mofile))
assert status == 0, 'msgfmt failed!'
self.mofiles.append(mofile)
def get_outputs(self, *args, **kwargs):
return _build_py.get_outputs(self, *args, **kwargs) + self.mofiles
# Get the current version number:
execfile('ase/svnversion_io.py') # write ase/svnversion.py and get svnversion
execfile('ase/version.py') # get version_base
if svnversion and os.name not in ['ce', 'nt']:
# MSI accepts only version X.X.X
version = version_base + '.' + svnversion
else:
version = version_base
scripts = ['tools/ase-gui', 'tools/ase-db', 'tools/ase-info',
'tools/ase-build', 'tools/ase-run']
# provide bat executables in the tarball and always for Win
if 'sdist' in sys.argv or os.name in ['ce', 'nt']:
for s in scripts[:]:
scripts.append(s + '.bat')
# data_files needs (directory, files-in-this-directory) tuples
data_files = []
for dirname, dirnames, filenames in os.walk('doc'):
if '.svn' not in dirname: # skip .svn dirs
fileslist = []
for filename in filenames:
fullname = os.path.join(dirname, filename)
if '.svn' not in fullname:
fileslist.append(fullname)
data_files.append(('share/python-ase/' + dirname, fileslist))
setup(name='python-ase',
version=version,
description='Atomic Simulation Environment',
url='https://wiki.fysik.dtu.dk/ase',
maintainer='ASE-community',
maintainer_email='ase-developers@listserv.fysik.dtu.dk',
license='LGPLv2.1+',
platforms=['linux'],
packages=packages,
package_dir=package_dir,
package_data=package_data,
scripts=scripts,
data_files=data_files,
long_description=long_description,
cmdclass={'build_py': build_py,
'test': test})
| askhl/ase | setup.py | Python | gpl-2.0 | 4,998 | [
"ASE"
] | c49cd65edb61b1f4200fea8d2dc04cc23da33d317d02fa7a523adfc462772c46 |
import pytest
from graphapi.tests.utils import populate_db
from openstates.data.models import Person, VoteEvent
from testutils.factories import create_test_bill
@pytest.mark.django_db
def setup():
populate_db()
@pytest.fixture
def sortable_bills(kansas):
# A's introduced first
# B's latest action is first
# C's introduced last
b = create_test_bill("2020", "upper", identifier="A")
b.first_action_date = "2020-01-01"
b.latest_action_date = "2020-08-01"
b.save()
b = create_test_bill("2020", "upper", identifier="B")
b.first_action_date = "2020-01-02"
b.latest_action_date = "2020-06-01"
b.save()
b = create_test_bill("2020", "upper", identifier="C")
b.first_action_date = "2020-07-01"
b.latest_action_date = "2020-07-01"
b.save()
BILLS_QUERY_COUNT = 7
ALASKA_BILLS = 12
@pytest.mark.django_db
def test_bills_view_basics(client, django_assert_num_queries):
with django_assert_num_queries(BILLS_QUERY_COUNT):
resp = client.get("/ak/bills/")
assert resp.status_code == 200
assert resp.context["state"] == "ak"
assert resp.context["state_nav"] == "bills"
assert len(resp.context["chambers"]) == 2
assert len(resp.context["sessions"]) == 2
assert "nature" in resp.context["subjects"]
assert len(resp.context["sponsors"]) == 7
assert len(resp.context["classifications"]) == 3
# 10 random bills, 2 full featured
assert len(resp.context["bills"]) == ALASKA_BILLS
@pytest.mark.django_db
def test_bills_view_query(client, django_assert_num_queries):
# title search works
with django_assert_num_queries(BILLS_QUERY_COUNT):
resp = client.get("/ak/bills/?query=moose")
assert resp.status_code == 200
assert len(resp.context["bills"]) == 1
# search in body works
resp = client.get("/ak/bills/?query=gorgonzola")
assert resp.status_code == 200
assert len(resp.context["bills"]) == 1
# test that a query doesn't alter the search options
assert len(resp.context["chambers"]) == 2
assert len(resp.context["sessions"]) == 2
assert "nature" in resp.context["subjects"]
assert len(resp.context["subjects"]) > 10
assert len(resp.context["sponsors"]) == 7
assert len(resp.context["classifications"]) == 3
@pytest.mark.django_db
def test_bills_view_query_bill_id(client, django_assert_num_queries):
# query by bill id
with django_assert_num_queries(BILLS_QUERY_COUNT):
resp = client.get("/ak/bills/?query=HB 1")
assert resp.status_code == 200
assert len(resp.context["bills"]) == 1
# case insensitive
resp = client.get("/ak/bills/?query=hb 1")
assert resp.status_code == 200
assert len(resp.context["bills"]) == 1
@pytest.mark.django_db
def test_bills_view_chamber(client, django_assert_num_queries):
with django_assert_num_queries(BILLS_QUERY_COUNT):
upper = len(client.get("/ak/bills/?chamber=upper").context["bills"])
with django_assert_num_queries(BILLS_QUERY_COUNT):
lower = len(client.get("/ak/bills/?chamber=lower").context["bills"])
assert upper + lower == ALASKA_BILLS
@pytest.mark.django_db
def test_bills_view_session(client, django_assert_num_queries):
with django_assert_num_queries(BILLS_QUERY_COUNT):
b17 = len(client.get("/ak/bills/?session=2017").context["bills"])
with django_assert_num_queries(BILLS_QUERY_COUNT):
b18 = len(client.get("/ak/bills/?session=2018").context["bills"])
assert b17 + b18 == ALASKA_BILLS
@pytest.mark.django_db
def test_bills_view_sponsor(client, django_assert_num_queries):
amanda = Person.objects.get(name="Amanda Adams")
with django_assert_num_queries(BILLS_QUERY_COUNT):
assert len(client.get(f"/ak/bills/?sponsor={amanda.id}").context["bills"]) == 2
@pytest.mark.django_db
def test_bills_view_classification(client, django_assert_num_queries):
bills = len(client.get("/ak/bills/?classification=bill").context["bills"])
resolutions = len(
client.get("/ak/bills/?classification=resolution").context["bills"]
)
assert (
len(
client.get("/ak/bills/?classification=constitutional+amendment").context[
"bills"
]
)
== 2
)
assert bills + resolutions == ALASKA_BILLS
@pytest.mark.django_db
def test_bills_view_subject(client, django_assert_num_queries):
with django_assert_num_queries(BILLS_QUERY_COUNT):
assert len(client.get("/ak/bills/?subjects=nature").context["bills"]) == 2
@pytest.mark.django_db
def test_bills_view_status(client, django_assert_num_queries):
with django_assert_num_queries(BILLS_QUERY_COUNT):
assert (
len(client.get("/ak/bills/?status=passed-lower-chamber").context["bills"])
== 1
)
@pytest.mark.django_db
def test_bills_view_sort_latest_action(
client, django_assert_num_queries, sortable_bills
):
bills = client.get("/ks/bills/?sort=latest_action").context["bills"]
assert len(bills) == 3
assert bills[0].identifier == "B"
assert bills[1].identifier == "C"
assert bills[2].identifier == "A"
assert (
bills[0].latest_action_date
< bills[1].latest_action_date
< bills[2].latest_action_date
)
# reverse
bills = client.get("/ks/bills/?sort=-latest_action").context["bills"]
assert bills[0].identifier == "A"
assert bills[1].identifier == "C"
assert bills[2].identifier == "B"
# no sort provided, same as -latest_action
bills = client.get("/ks/bills/?").context["bills"]
assert bills[0].identifier == "A"
assert bills[1].identifier == "C"
assert bills[2].identifier == "B"
@pytest.mark.django_db
def test_bills_view_sort_first_action(
client, django_assert_num_queries, sortable_bills
):
bills = client.get("/ks/bills/?sort=first_action").context["bills"]
for b in bills:
print(b.identifier, b.first_action_date)
assert len(bills) == 3
assert bills[0].identifier == "A"
assert bills[1].identifier == "B"
assert bills[2].identifier == "C"
assert (
bills[0].first_action_date
< bills[1].first_action_date
< bills[2].first_action_date
)
# reverse
bills = client.get("/ks/bills/?sort=-first_action").context["bills"]
assert bills[0].identifier == "C"
assert bills[1].identifier == "B"
assert bills[2].identifier == "A"
@pytest.mark.django_db
def test_bills_view_bad_page(client):
resp = client.get("/ak/bills/?page=A")
assert resp.status_code == 404
@pytest.mark.django_db
def test_bill_view(client, django_assert_num_queries):
with django_assert_num_queries(17):
resp = client.get("/ak/bills/2018/HB1/")
assert resp.status_code == 200
assert resp.context["state"] == "ak"
assert resp.context["state_nav"] == "bills"
assert resp.context["bill"].identifier == "HB 1"
assert len(resp.context["sponsorships"]) == 2
assert len(resp.context["actions"]) == 3
assert resp.context["actions"][0].date > resp.context["actions"][2].date
assert len(resp.context["votes"]) == 1
assert len(resp.context["versions"]) == 2
assert len(resp.context["documents"]) == 2
assert resp.context["read_link"] == "https://example.com/f.pdf"
assert resp.context["stages"][1] == {
"date": "2018-03-01",
"stage": "Alaska House",
"text": "Passed Alaska House",
}
@pytest.mark.django_db
def test_vote_view(client, django_assert_num_queries):
vid = VoteEvent.objects.get(motion_text="Vote on House Passage").id.split("/")[1]
with django_assert_num_queries(7):
resp = client.get(f"/vote/{vid}/")
assert resp.status_code == 200
assert resp.context["state"] == "ak"
assert resp.context["state_nav"] == "bills"
assert len(resp.context["person_votes"]) == 5
# vote counts in order, yes, no, others
assert resp.context["vote_counts"][0].option == "yes"
assert resp.context["vote_counts"][1].option == "no"
assert resp.context["vote_counts"][0].value == 1
assert resp.context["vote_counts"][1].value == 4
# sorted list of (party, counts) pairs
assert resp.context["party_votes"][0][0] == "Democratic"
assert resp.context["party_votes"][0][1]["no"] == 1
assert resp.context["party_votes"][0][1]["yes"] == 0
assert resp.context["party_votes"][1][0] == "Republican"
assert resp.context["party_votes"][1][1]["no"] == 2
assert resp.context["party_votes"][1][1]["yes"] == 1
assert resp.context["party_votes"][2][0] == "Unknown"
assert resp.context["party_votes"][2][1]["no"] == 1
@pytest.mark.django_db
def test_bills_feed(client):
resp = client.get("/ak/bills/feed/")
assert resp.status_code == 200
| openstates/openstates.org | public/tests/test_bill_views.py | Python | mit | 8,757 | [
"MOOSE"
] | db89a70f8a3b6c2cac645aa766def65ae24935dc632f14aa3c6d6ae90e67bc06 |
################################################################################
# #
# Copyright (C) 2010,2011,2012,2013,2014, 2015,2016 The ESPResSo project #
# #
# This file is part of ESPResSo. #
# #
# ESPResSo is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# ESPResSo is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
# #
# Active Matter: Rectification Tutorial #
# #
################################################################################
from __future__ import print_function
from math import cos, pi, sin
import numpy as np
import os
import sys
from espressomd import assert_features
from espressomd.shapes import Cylinder, Wall, HollowCone
assert_features(["ENGINE","CONSTRAINTS","LENNARD_JONES","ROTATION","MASS"])
# Quaternion procedure
def a2quat(phi,theta):
q1w = cos(theta/2.0)
q1x = 0
q1y = sin(theta/2.0)
q1z = 0
q2w = cos(phi/2.0)
q2x = 0
q2y = 0
q2z = sin(phi/2.0)
q3w = (q1w*q2w-q1x*q2x-q1y*q2y-q1z*q2z)
q3x = (q1w*q2x+q1x*q2w-q1y*q2z+q1z*q2y)
q3y = (q1w*q2y+q1x*q2z+q1y*q2w-q1z*q2x)
q3z = (q1w*q2z-q1x*q2y+q1y*q2x+q1z*q2w)
return [q3w,q3x,q3y,q3z]
################################################################################
# Read in the active velocity from the command prompt
if len(sys.argv) != 2:
print("Usage:",sys.argv[0],"<vel> (0 <= vel < 10.0)")
exit()
vel = float(sys.argv[1])
################################################################################
# create an output folder
outdir = "./RESULTS_RECTIFICATION_SIMULATION/"
try:
os.makedirs(outdir)
except:
print("INFO: Directory \"{}\" exists".format(outdir))
# Setup the box (we pad the diameter to ensure that the LB boundaries
# and therefore the constraints, are away from the edge of the box)
length = 100
diameter = 20
prod_steps = 500
prod_length = 500
dt = 0.01
# Setup the MD parameters
system = espressomd.System(box_l=[length, diameter + 4, diameter + 4])
system.box_l = [length, diameter + 4, diameter + 4]
system.cell_system.skin = 0.1
system.time_step = dt
system.min_global_cut = 0.5
system.thermostat.set_langevin(kT=1.0,gamma=1.0)
################################################################################
#
# Here we use exactly the same parameters for the geometry of the constraints
# that was used for the LB boundaries. This can be done, since the distance
# function used for the constraints is the same as the one used for the
# LB boundaries.
#
################################################################################
cylinder = Cylinder(center=[length/2.0, (diameter+4)/2.0, (diameter+4)/2.0],
axis=[1,0,0],
radius=diameter/2.0,
length=length,
direction=-1)
system.constraints.add(shape=cylinder,particle_type=1)
# Setup walls
wall = Wall(dist=2, normal=[1,0,0])
system.constraints.add(shape=wall,particle_type=2)
wall = Wall(dist=-(length - 2), normal=[-1,0,0])
system.constraints.add(shape=wall,particle_type=3)
# Setup cone
irad = 4.0
angle = pi/4.0
orad = (diameter - irad)/sin(angle)
shift = 0.25*orad*cos(angle)
hollow_cone = HollowCone(position_x=length/2.0 - shift,
position_y=(diameter+4)/2.0,
position_z=(diameter+4)/2.0,
orientation_x=1,
orientation_y=0,
orientation_z=0,
outer_radius=orad,
inner_radius=irad,
width=2.0,
opening_angle=angle,
direction=1)
system.constraints.add(shape=hollow_cone,particle_type=4)
################################################################################
#
# We set up a WCA (almost-hard) interaction between the particles and the
# the confining geometry. We do not have particle-particle interactions, which
# are not necessary to observe rectification.
#
################################################################################
sig = 0.5
cut = 1.12246*sig
eps = 1.0
shift = 0.25
system.non_bonded_inter[0,1].lennard_jones.set_params(epsilon=eps, sigma=sig, cutoff=cut, shift=shift)
system.non_bonded_inter[0,2].lennard_jones.set_params(epsilon=eps, sigma=sig, cutoff=cut, shift=shift)
system.non_bonded_inter[0,3].lennard_jones.set_params(epsilon=eps, sigma=sig, cutoff=cut, shift=shift)
system.non_bonded_inter[0,4].lennard_jones.set_params(epsilon=eps, sigma=sig, cutoff=cut, shift=shift)
################################################################################
#
# Setup the particles. We put them all in two points one in each chamber
# and give them random directions. This speeds up the equilibration, since
# putting them all in a single chamber, would make it take a long time to
# observe the effect of rectification. Note that they need to be able to
# rotate freely, hence the command rotation=[1,1,1] is provided
#
################################################################################
npart = 500
for cntr in range(npart):
if cntr % 2 == 0:
x = 0.25*length
else:
x = 0.75*length
y = (diameter+4)/2.0
z = (diameter+4)/2.0
theta = float(2*np.random.random()*np.pi)
phi = float(2*np.random.random()*np.pi)
quats = a2quat(theta,phi)
system.part.add(pos=[x,y,z],type=0,swimming={'v_swim':vel},quat=quats,rotation=[1,1,1])
################################################################################
# Equilibrate
system.integrator.run(25*prod_length)
# Output the CMS coordinates
with open("{}/CMS_{}.dat".format(outdir,vel), "w") as outfile:
print("####################################################",file=outfile)
print("# time CMS x coord average CMS #",file=outfile)
print("####################################################",file=outfile)
# Production run
dev_sum = 0.0
dev_av = 0.0
time_0 = system.time
for i in range(prod_steps):
# We output the coordinate of the center of mass in
# the direction of the long axis, here we consider
# the deviation from the center
dev = system.galilei.system_CMS()[0] - 0.5*length;
if i > 0:
dev_sum = dev_sum + dev
dev_av = dev_sum/i
time = system.time - time_0
print("{} {} {}".format(time,dev,dev_av),file=outfile)
system.integrator.run(prod_length)
# Output the final configuration
system.part.writevtk("{}/points_{}.vtk".format(outdir,vel),types=[0])
| KonradBreitsprecher/espresso | doc/tutorials/06-active_matter/SOLUTIONS/rectification_simulation.py | Python | gpl-3.0 | 8,352 | [
"ESPResSo",
"VTK"
] | dab1c633925d4b1f2bcb6e3fa6b9e82798f4b5603d667f32de6c1a4e75345dec |
# staticchecking/DogsAndRobots.py
# (c)2017 MindView LLC: see Copyright.txt
# We make no guarantees that this code is fit for any purpose.
# Visit http://OnJava8.com for more book information.
def speak(anything):
anything.talk()
class Dog:
def talk(self): print("Arf!")
def reproduce(self): pass
class Robot:
def talk(self): print("Click!")
def oilChange(self): pass
a = Dog()
b = Robot()
speak(a)
speak(b)
output = """
Arf!
Click!
"""
| mayonghui2112/helloWorld | sourceCode/testMaven/onjava8/src/main/java/staticchecking/DogsAndRobots.py | Python | apache-2.0 | 464 | [
"VisIt"
] | 44074c2524d791a94ceb21a266e891d3ed15bbe5731b430e159bd71d5aad66cf |
#!/usr/bin/python3
#Copyright (C) 2020- The University of Notre Dame
#This software is distributed under the GNU General Public License.
#See the file COPYING for details.
import os
import sys
import json
from work_queue_server import WorkQueueServer
from time import time
from create_splits import create_splits
tasks = []
inputs = ["bwa", "ref.fastq", "ref.fastq.sa", "ref.fastq.pac", "ref.fastq.amb", "ref.fastq.ann", "ref.fastq.bwt"]
def define_tasks(nsplits):
for i in range(nsplits):
task = {}
task["command_line"] = "./bwa mem ref.fastq query.fastq.%d.gz | gzip > query.fastq.%d.sam" % (i,i)
task["output_files"] = []
task["input_files"] = []
#output files
out = {}
out["local_name"] = "query.fastq.%d.sam" % i
out["remote_name"] = "query.fastq.%d.sam" % i
flags = {}
flags["cache"] = False
flags["watch"] = False
out["flags"] = flags
task["output_files"].append(out)
#input files
for name in inputs:
input_file = {}
input_file["local_name"] = name
input_file["remote_name"] = name
flags = {}
flags["cache"] = True
flags["watch"] = False
input_file["flags"] = flags
task["input_files"].append(input_file)
q = {}
q["local_name"] = "query.fastq.%d.gz" % i
q["remote_name"] = "query.fastq.%d.gz" % i
flags = {}
flags["cache"] = False
flags["watch"] = False
q["flags"] = flags
task["input_files"].append(q)
q = {}
q["local_name"] = "/usr/bin/gzip"
q["remote_name"] = "gzip"
flags = {}
flags["cache"] = True
flags["watch"] = False
q["flags"] = flags
task["input_files"].append(q)
#specify resources
task["cores"] = 2
task["memory"] = 1000
task["disk"] = 1000
tasks.append(task)
def main():
if len(sys.argv) < 3:
print("USAGE: ./wq_bwa_json.py <nsplits> <nworkers>")
sys.exit(0)
start = time()
#generate tasks
define_tasks(int(sys.argv[1]))
q = WorkQueueServer()
#connect to server
q.connect('127.0.0.1', 0, 0, "wq_bwa_json")
response = q.status()
print(response)
#submit tasks
for t in tasks:
t = json.dumps(t)
response = q.submit(t)
print(response)
#submit wait requests
while not q.empty():
response = q.wait(10)
print(response)
#disconnect
q.disconnect()
end = time()
start = float(start)
end = float(end)
print("time: {}".format(end-start-1))
os.system("rm -f query.fastq.*.sam")
if __name__ == "__main__":
main()
| dthain/cctools | apps/wq_bwa_json/wq_bwa_json.py | Python | gpl-2.0 | 2,833 | [
"BWA"
] | 285e4371e531a118bcd26af6102d82fd3a2e5dbaf6ac145d1ef4f0421ff5b013 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import vtk
def main():
colors = vtk.vtkNamedColors()
# Set the colors.
colors.SetColor("AzimuthArrowColor", [255, 77, 77, 255])
colors.SetColor("ElevationArrowColor", [77, 255, 77, 255])
colors.SetColor("RollArrowColor", [255, 255, 77, 255])
colors.SetColor("SpikeColor", [255, 77, 255, 255])
colors.SetColor("BkgColor", [26, 51, 102, 255])
# Create a rendering window, renderer and interactor.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Create a camera model.
camCS = vtk.vtkConeSource()
camCS.SetHeight(1.5)
camCS.SetResolution(12)
camCS.SetRadius(0.4)
camCBS = vtk.vtkCubeSource()
camCBS.SetXLength(1.5)
camCBS.SetZLength(0.8)
camCBS.SetCenter(0.4, 0, 0)
camAPD = vtk.vtkAppendFilter()
camAPD.AddInputConnection(camCBS.GetOutputPort())
camAPD.AddInputConnection(camCS.GetOutputPort())
camMapper = vtk.vtkPolyDataMapper()
camMapper.SetInputConnection(camAPD.GetOutputPort())
camActor = vtk.vtkLODActor()
camActor.SetMapper(camMapper)
camActor.SetScale(2, 2, 2)
# Draw the arrows.
pd = vtk.vtkPolyData()
ca = vtk.vtkCellArray()
fp = vtk.vtkPoints()
fp.InsertNextPoint(0, 1, 0)
fp.InsertNextPoint(8, 1, 0)
fp.InsertNextPoint(8, 2, 0)
fp.InsertNextPoint(10, 0.01, 0)
fp.InsertNextPoint(8, -2, 0)
fp.InsertNextPoint(8, -1, 0)
fp.InsertNextPoint(0, -1, 0)
ca.InsertNextCell(7)
ca.InsertCellPoint(0)
ca.InsertCellPoint(1)
ca.InsertCellPoint(2)
ca.InsertCellPoint(3)
ca.InsertCellPoint(4)
ca.InsertCellPoint(5)
ca.InsertCellPoint(6)
pd.SetPoints(fp)
pd.SetPolys(ca)
pd2 = vtk.vtkPolyData()
ca2 = vtk.vtkCellArray()
fp2 = vtk.vtkPoints()
fp2.InsertNextPoint(0, 1, 0)
fp2.InsertNextPoint(8, 1, 0)
fp2.InsertNextPoint(8, 2, 0)
fp2.InsertNextPoint(10, 0.01, 0)
ca2.InsertNextCell(4)
ca2.InsertCellPoint(0)
ca2.InsertCellPoint(1)
ca2.InsertCellPoint(2)
ca2.InsertCellPoint(3)
pd2.SetPoints(fp2)
pd2.SetLines(ca2)
arrowIM = vtk.vtkImplicitModeller()
arrowIM.SetInputData(pd)
arrowIM.SetSampleDimensions(50, 20, 8)
arrowCF = vtk.vtkContourFilter()
arrowCF.SetInputConnection(arrowIM.GetOutputPort())
arrowCF.SetValue(0, 0.2)
arrowWT = vtk.vtkWarpTo()
arrowWT.SetInputConnection(arrowCF.GetOutputPort())
arrowWT.SetPosition(5, 0, 5)
arrowWT.SetScaleFactor(0.85)
arrowWT.AbsoluteOn()
arrowT = vtk.vtkTransform()
arrowT.RotateY(60)
arrowT.Translate(-1.33198, 0, -1.479)
arrowT.Scale(1, 0.5, 1)
arrowTF = vtk.vtkTransformFilter()
arrowTF.SetInputConnection(arrowWT.GetOutputPort())
arrowTF.SetTransform(arrowT)
arrowMapper = vtk.vtkDataSetMapper()
arrowMapper.SetInputConnection(arrowTF.GetOutputPort())
arrowMapper.ScalarVisibilityOff()
# Draw the azimuth arrows.
a1Actor = vtk.vtkLODActor()
a1Actor.SetMapper(arrowMapper)
a1Actor.RotateZ(180)
a1Actor.SetPosition(1, 0, -1)
a1Actor.GetProperty().SetColor(colors.GetColor3d("AzimuthArrowColor"))
a1Actor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
a1Actor.GetProperty().SetSpecular(0.3)
a1Actor.GetProperty().SetSpecularPower(20)
a1Actor.GetProperty().SetAmbient(0.2)
a1Actor.GetProperty().SetDiffuse(0.8)
a2Actor = vtk.vtkLODActor()
a2Actor.SetMapper(arrowMapper)
a2Actor.RotateZ(180)
a2Actor.RotateX(180)
a2Actor.SetPosition(1, 0, 1)
a2Actor.GetProperty().SetColor(colors.GetColor3d("AzimuthArrowColor"))
a2Actor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
a2Actor.GetProperty().SetSpecular(0.3)
a2Actor.GetProperty().SetSpecularPower(20)
a2Actor.GetProperty().SetAmbient(0.2)
a2Actor.GetProperty().SetDiffuse(0.8)
# Draw the elevation arrows.
a3Actor = vtk.vtkLODActor()
a3Actor.SetMapper(arrowMapper)
a3Actor.RotateZ(180)
a3Actor.RotateX(90)
a3Actor.SetPosition(1, -1, 0)
a3Actor.GetProperty().SetColor(colors.GetColor3d("ElevationArrowColor"))
a3Actor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
a3Actor.GetProperty().SetSpecular(0.3)
a3Actor.GetProperty().SetSpecularPower(20)
a3Actor.GetProperty().SetAmbient(0.2)
a3Actor.GetProperty().SetDiffuse(0.8)
a4Actor = vtk.vtkLODActor()
a4Actor.SetMapper(arrowMapper)
a4Actor.RotateZ(180)
a4Actor.RotateX(-90)
a4Actor.SetPosition(1, 1, 0)
a4Actor.GetProperty().SetColor(colors.GetColor3d("ElevationArrowColor"))
a4Actor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
a4Actor.GetProperty().SetSpecular(0.3)
a4Actor.GetProperty().SetSpecularPower(20)
a4Actor.GetProperty().SetAmbient(0.2)
a4Actor.GetProperty().SetDiffuse(0.8)
# Draw the DOP.
arrowT2 = vtk.vtkTransform()
arrowT2.Scale(1, 0.6, 1)
arrowT2.RotateY(90)
arrowTF2 = vtk.vtkTransformPolyDataFilter()
arrowTF2.SetInputData(pd2)
arrowTF2.SetTransform(arrowT2)
arrowREF = vtk.vtkRotationalExtrusionFilter()
arrowREF.SetInputConnection(arrowTF2.GetOutputPort())
arrowREF.CappingOff()
arrowREF.SetResolution(30)
spikeMapper = vtk.vtkPolyDataMapper()
spikeMapper.SetInputConnection(arrowREF.GetOutputPort())
a5Actor = vtk.vtkLODActor()
a5Actor.SetMapper(spikeMapper)
a5Actor.SetScale(.3, .3, .6)
a5Actor.RotateY(90)
a5Actor.SetPosition(-2, 0, 0)
a5Actor.GetProperty().SetColor(colors.GetColor3d("SpikeColor"))
a5Actor.GetProperty().SetAmbient(0.2)
a5Actor.GetProperty().SetDiffuse(0.8)
# Focal point.
fps = vtk.vtkSphereSource()
fps.SetRadius(0.5)
fpMapper = vtk.vtkPolyDataMapper()
fpMapper.SetInputConnection(fps.GetOutputPort())
fpActor = vtk.vtkLODActor()
fpActor.SetMapper(fpMapper)
fpActor.SetPosition(-9, 0, 0)
fpActor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
fpActor.GetProperty().SetSpecular(0.3)
fpActor.GetProperty().SetAmbient(0.2)
fpActor.GetProperty().SetDiffuse(0.8)
fpActor.GetProperty().SetSpecularPower(20)
# Create the roll arrows.
arrowWT2 = vtk.vtkWarpTo()
arrowWT2.SetInputConnection(arrowCF.GetOutputPort())
arrowWT2.SetPosition(5, 0, 2.5)
arrowWT2.SetScaleFactor(0.95)
arrowWT2.AbsoluteOn()
arrowT3 = vtk.vtkTransform()
arrowT3.Translate(-2.50358, 0, -1.70408)
arrowT3.Scale(0.5, 0.3, 1)
arrowTF3 = vtk.vtkTransformFilter()
arrowTF3.SetInputConnection(arrowWT2.GetOutputPort())
arrowTF3.SetTransform(arrowT3)
arrowMapper2 = vtk.vtkDataSetMapper()
arrowMapper2.SetInputConnection(arrowTF3.GetOutputPort())
arrowMapper2.ScalarVisibilityOff()
# Draw the roll arrows.
a6Actor = vtk.vtkLODActor()
a6Actor.SetMapper(arrowMapper2)
a6Actor.RotateZ(90)
a6Actor.SetPosition(-4, 0, 0)
a6Actor.SetScale(1.5, 1.5, 1.5)
a6Actor.GetProperty().SetColor(colors.GetColor3d("RollArrowColor"))
a6Actor.GetProperty().SetSpecularColor(colors.GetColor3d("White"))
a6Actor.GetProperty().SetSpecular(0.3)
a6Actor.GetProperty().SetSpecularPower(20)
a6Actor.GetProperty().SetAmbient(0.2)
a6Actor.GetProperty().SetDiffuse(0.8)
# Add the actors to the renderer, set the background and size.
ren.AddActor(camActor)
ren.AddActor(a1Actor)
ren.AddActor(a2Actor)
ren.AddActor(a3Actor)
ren.AddActor(a4Actor)
ren.AddActor(a5Actor)
ren.AddActor(a6Actor)
ren.AddActor(fpActor)
ren.SetBackground(colors.GetColor3d("BkgColor"))
ren.SetBackground(colors.GetColor3d("SlateGray"))
renWin.SetSize(640, 480)
# Render the image.
cam1 = (ren.GetActiveCamera())
ren.ResetCamera()
cam1.Azimuth(150)
cam1.Elevation(30)
cam1.Dolly(1.5)
ren.ResetCameraClippingRange()
# Create a TextActor for azimuth (a1 and a2 actor's color).
text = vtk.vtkTextActor()
text.SetInput("Azimuth")
tprop = text.GetTextProperty()
tprop.SetFontFamilyToArial()
tprop.ShadowOff()
tprop.SetLineSpacing(1.0)
tprop.SetFontSize(36)
tprop.SetColor(a1Actor.GetProperty().GetColor())
text.SetDisplayPosition(20, 50)
ren.AddActor2D(text)
# Create a TextActor for elevation (a3 and a4 actor's color).
text2 = vtk.vtkTextActor()
text2.SetInput("Elevation")
tprop = text2.GetTextProperty()
tprop.SetFontFamilyToArial()
tprop.ShadowOff()
tprop.SetLineSpacing(1.0)
tprop.SetFontSize(36)
tprop.SetColor(a3Actor.GetProperty().GetColor())
text2.SetDisplayPosition(20, 100)
ren.AddActor2D(text2)
# Create a TextActor for roll (a6 actor's color).
text3 = vtk.vtkTextActor()
text3.SetInput("Roll")
tprop = text3.GetTextProperty()
tprop.SetFontFamilyToArial()
tprop.ShadowOff()
tprop.SetLineSpacing(1.0)
tprop.SetFontSize(36)
tprop.SetColor(a6Actor.GetProperty().GetColor())
text3.SetDisplayPosition(20, 150)
ren.AddActor2D(text3)
iren.Initialize()
iren.Start()
if __name__ == "__main__":
main()
| lorensen/VTKExamples | src/Python/Visualization/CameraModel1.py | Python | apache-2.0 | 9,220 | [
"VTK"
] | 3a4b15bf0bdfa1ce6855dda8ab7b6f254b99ca9869c1643e8770b5c347ac52f9 |
'''
synbiochem (c) University of Manchester 2016
synbiochem is licensed under the MIT License.
To view a copy of this license, visit <http://opensource.org/licenses/MIT/>.
@author: neilswainston
'''
# pylint: disable=invalid-name
# pylint: disable=too-many-arguments
import os
from shutil import rmtree
import pandas as pd
class Writer(object):
'''CSV file writer class for biochem4j files.'''
def __init__(self, dest_dir):
self.__nodes_dir = os.path.join(os.path.abspath(dest_dir), 'nodes')
self.__rels_dir = os.path.join(os.path.abspath(dest_dir), 'rels')
if os.path.exists(self.__nodes_dir):
rmtree(self.__nodes_dir)
os.makedirs(self.__nodes_dir)
if os.path.exists(self.__rels_dir):
rmtree(self.__rels_dir)
os.makedirs(self.__rels_dir)
def write_nodes(self, nodes, group, separator=';'):
'''Writes Nodes to csv file.'''
if not nodes:
return None
df = pd.DataFrame(nodes)
df.dropna(axis=1, how='all', inplace=True)
filename = os.path.join(self.__nodes_dir, group + '.csv')
df.to_csv(filename, index=False, encoding='utf-8', sep=separator)
return filename
def write_rels(self, rels, group_start, group_end, separator=';'):
'''Writes Relationships to csv file.'''
if not rels:
return None
columns = [':START_ID(' + group_start + ')',
':TYPE',
':END_ID(' + group_end + ')']
if len(rels[0]) > 3:
columns.append('PROPERTIES')
df = pd.DataFrame(rels, columns=columns)
if len(rels[0]) > 3:
props_df = pd.DataFrame(list(df['PROPERTIES']))
df.drop('PROPERTIES', axis=1, inplace=True)
df = df.join(props_df)
filename = os.path.join(self.__rels_dir,
group_start + '_' + group_end + '.csv')
df.to_csv(filename, index=False, encoding='utf-8', sep=separator)
return filename
| synbiochem/biochem4j | sbcdb/utils.py | Python | mit | 2,044 | [
"VisIt"
] | 7f5026b6d074621cce0d604880268ad0820f60ed1c631940cca1b8f93d4e4d2b |
from numpy import (linspace, zeros, meshgrid, abs, empty, arange, int32,
unravel_index, dtype)
from multiprocessing import Pool
from ..solvers import solver_dict, get_solver_name
# attempt to import plotting libraries
try:
from matplotlib import pyplot
from mpl_toolkits.mplot3d import axes3d
except ImportError:
pyplot = None
axes3d = None
mlab = None # mayavi may crash python
try: # for prettier colors
from palettable.colorbrewer import get_map
except ImportError:
try:
from brewer2mpl import get_map
except ImportError:
get_map = None
class phenotypePhasePlaneData:
"""class to hold results of a phenotype phase plane analysis"""
def __init__(self,
reaction1_name, reaction2_name,
reaction1_range_max, reaction2_range_max,
reaction1_npoints, reaction2_npoints):
self.reaction1_name = reaction1_name
self.reaction2_name = reaction2_name
self.reaction1_range_max = reaction1_range_max
self.reaction2_range_max = reaction2_range_max
self.reaction1_npoints = reaction1_npoints
self.reaction2_npoints = reaction2_npoints
self.reaction1_fluxes = linspace(0, reaction1_range_max,
reaction1_npoints)
self.reaction2_fluxes = linspace(0, reaction2_range_max,
reaction2_npoints)
self.growth_rates = zeros((reaction1_npoints, reaction2_npoints))
self.shadow_prices1 = zeros((reaction1_npoints, reaction2_npoints))
self.shadow_prices2 = zeros((reaction1_npoints, reaction2_npoints))
self.segments = zeros(self.growth_rates.shape, dtype=int32)
self.phases = []
def plot(self):
"""plot the phenotype phase plane in 3D using any available backend"""
if pyplot is not None:
self.plot_matplotlib()
elif mlab is not None:
self.plot_mayavi()
else:
raise ImportError("No suitable 3D plotting package found")
def plot_matplotlib(self, theme="Paired", scale_grid=False):
"""Use matplotlib to plot a phenotype phase plane in 3D.
theme: color theme to use (requires palettable)
returns: maptlotlib 3d subplot object"""
if pyplot is None:
raise ImportError("Error importing matplotlib 3D plotting")
colors = empty(self.growth_rates.shape, dtype=dtype((str, 7)))
n_segments = self.segments.max()
# pick colors
color_list = ['#A6CEE3', '#1F78B4', '#B2DF8A', '#33A02C',
'#FB9A99', '#E31A1C', '#FDBF6F', '#FF7F00',
'#CAB2D6', '#6A3D9A', '#FFFF99', '#B15928']
if get_map is not None:
try:
color_list = get_map(theme, 'Qualitative',
n_segments).hex_colors
except ValueError:
from warnings import warn
warn('palettable could not be used for this number of phases')
if n_segments > len(color_list):
from warnings import warn
warn("not enough colors to color all detected phases")
if n_segments > 0 and n_segments <= len(color_list):
for i in range(n_segments):
colors[self.segments == (i + 1)] = color_list[i]
else:
colors[:, :] = 'b'
if scale_grid:
# grid wires should not have more than ~20 points
xgrid_scale = int(self.reaction1_npoints / 20)
ygrid_scale = int(self.reaction2_npoints / 20)
else:
xgrid_scale, ygrid_scale = (1, 1)
figure = pyplot.figure()
xgrid, ygrid = meshgrid(self.reaction1_fluxes, self.reaction2_fluxes)
axes = figure.add_subplot(111, projection="3d")
xgrid = xgrid.transpose()
ygrid = ygrid.transpose()
axes.plot_surface(xgrid, ygrid, self.growth_rates, rstride=1,
cstride=1, facecolors=colors, linewidth=0,
antialiased=False)
axes.plot_wireframe(xgrid, ygrid, self.growth_rates, color="black",
rstride=xgrid_scale, cstride=ygrid_scale)
axes.set_xlabel(self.reaction1_name, size="x-large")
axes.set_ylabel(self.reaction2_name, size="x-large")
axes.set_zlabel("Growth rate", size="x-large")
axes.view_init(elev=30, azim=-135)
figure.set_tight_layout(True)
return axes
def plot_mayavi(self):
"""Use mayavi to plot a phenotype phase plane in 3D.
The resulting figure will be quick to interact with in real time,
but might be difficult to save as a vector figure.
returns: mlab figure object"""
from mayavi import mlab
figure = mlab.figure(bgcolor=(1, 1, 1), fgcolor=(0, 0, 0))
figure.name = "Phenotype Phase Plane"
max = 10.0
xmax = self.reaction1_fluxes.max()
ymax = self.reaction2_fluxes.max()
zmax = self.growth_rates.max()
xgrid, ygrid = meshgrid(self.reaction1_fluxes, self.reaction2_fluxes)
xgrid = xgrid.transpose()
ygrid = ygrid.transpose()
xscale = max / xmax
yscale = max / ymax
zscale = max / zmax
mlab.surf(xgrid * xscale, ygrid * yscale, self.growth_rates * zscale,
representation="wireframe", color=(0, 0, 0), figure=figure)
mlab.mesh(xgrid * xscale, ygrid * yscale, self.growth_rates * zscale,
scalars=self.shadow_prices1 + self.shadow_prices2,
resolution=1, representation="surface", opacity=0.75,
figure=figure)
# draw axes
mlab.outline(extent=(0, max, 0, max, 0, max))
mlab.axes(opacity=0, ranges=[0, xmax, 0, ymax, 0, zmax])
mlab.xlabel(self.reaction1_name)
mlab.ylabel(self.reaction2_name)
mlab.zlabel("Growth rates")
return figure
def segment(self, threshold=0.01):
"""attempt to segment the data and identify the various phases"""
self.segments *= 0
# each entry in phases will consist of the following tuple
# ((x, y), shadow_price1, shadow_price2)
self.phases = []
# initialize the area to be all False
covered_area = (self.growth_rates * 0 == 1)
# as long as part of the area has not been covered
segment_id = 0
while self.segments.min() == 0:
segment_id += 1
# i and j are indices for a current point which has not been
# assigned a segment yet
i, j = unravel_index(self.segments.argmin(), self.segments.shape)
# update the segment id for any point with a similar shadow price
# to the current point
d1 = abs(self.shadow_prices1 - self.shadow_prices1[i, j])
d2 = abs(self.shadow_prices2 - self.shadow_prices2[i, j])
self.segments[(d1 < threshold) * (d2 < threshold)] += segment_id
# add the current point as one of the phases
self.phases.append((
(self.reaction1_fluxes[i], self.reaction2_fluxes[j]),
self.shadow_prices1[i, j], self.shadow_prices2[i, j]))
def _calculate_subset(arguments):
"""Calculate a subset of the phenotype phase plane data.
Store each result tuple as:
(i, j, growth_rate, shadow_price1, shadow_price2)"""
model = arguments["model"]
reaction1_fluxes = arguments["reaction1_fluxes"]
reaction2_fluxes = arguments["reaction2_fluxes"]
metabolite1_name = arguments["metabolite1_name"]
metabolite2_name = arguments["metabolite2_name"]
index1 = arguments["index1"]
index2 = arguments["index2"]
i_list = arguments["i_list"]
j_list = arguments["j_list"]
tolerance = arguments["tolerance"]
solver = solver_dict[arguments["solver"]]
results = []
reaction1 = model.reactions[index1]
reaction2 = model.reactions[index2]
problem = solver.create_problem(model)
solver.solve_problem(problem)
for a, flux1 in enumerate(reaction1_fluxes):
i = i_list[a]
# flux is actually negative for uptake. Also some solvers require
# float instead of numpy.float64
flux1 = float(-1 * flux1)
# change bounds on reaction 1
solver.change_variable_bounds(problem, index1, flux1 - tolerance,
flux1 + tolerance)
for b, flux2 in enumerate(reaction2_fluxes):
j = j_list[b]
flux2 = float(-1 * flux2) # same story as flux1
# change bounds on reaction 2
solver.change_variable_bounds(problem, index2, flux2 - tolerance,
flux2 + tolerance)
# solve the problem and save results
solver.solve_problem(problem)
solution = solver.format_solution(problem, model)
if solution is not None and solution.status == "optimal":
results.append((i, j, solution.f,
solution.y_dict[metabolite1_name],
solution.y_dict[metabolite2_name]))
else:
results.append((i, j, 0, 0, 0))
# reset reaction 2 bounds
solver.change_variable_bounds(problem, index2,
float(reaction2.lower_bound),
float(reaction2.upper_bound))
# reset reaction 1 bounds
solver.change_variable_bounds(problem, index1,
float(reaction1.lower_bound),
float(reaction1.upper_bound))
return results
def calculate_phenotype_phase_plane(
model, reaction1_name, reaction2_name,
reaction1_range_max=20, reaction2_range_max=20,
reaction1_npoints=50, reaction2_npoints=50,
solver=None, n_processes=1, tolerance=1e-6):
"""calculates the growth rates while varying the uptake rates for two
reactions.
:returns: a `phenotypePhasePlaneData` object containing the growth rates
for the uptake rates. To plot the
result, call the plot function of the returned object.
:Example:
>>> import cobra.test
>>> model = cobra.test.create_test_model("textbook")
>>> ppp = calculate_phenotype_phase_plane(model, "EX_glc__D_e", "EX_o2_e")
>>> ppp.plot()
"""
if solver is None:
solver = get_solver_name()
data = phenotypePhasePlaneData(
str(reaction1_name), str(reaction2_name),
reaction1_range_max, reaction2_range_max,
reaction1_npoints, reaction2_npoints)
# find the objects for the reactions and metabolites
index1 = model.reactions.index(data.reaction1_name)
index2 = model.reactions.index(data.reaction2_name)
metabolite1_name = list(model.reactions[index1]._metabolites)[0].id
metabolite2_name = list(model.reactions[index2]._metabolites)[0].id
if n_processes > reaction1_npoints: # limit the number of processes
n_processes = reaction1_npoints
range_add = reaction1_npoints // n_processes
# prepare the list of arguments for each _calculate_subset call
arguments_list = []
i = arange(reaction1_npoints)
j = arange(reaction2_npoints)
for n in range(n_processes):
start = n * range_add
if n != n_processes - 1:
r1_range = data.reaction1_fluxes[start:start + range_add]
i_list = i[start:start + range_add]
else:
r1_range = data.reaction1_fluxes[start:]
i_list = i[start:]
arguments_list.append({
"model": model,
"index1": index1, "index2": index2,
"metabolite1_name": metabolite1_name,
"metabolite2_name": metabolite2_name,
"reaction1_fluxes": r1_range,
"reaction2_fluxes": data.reaction2_fluxes.copy(),
"i_list": i_list, "j_list": j.copy(),
"tolerance": tolerance, "solver": solver})
if n_processes > 1:
p = Pool(n_processes)
results = list(p.map(_calculate_subset, arguments_list))
else:
results = [_calculate_subset(arguments_list[0])]
for result_list in results:
for result in result_list:
i = result[0]
j = result[1]
data.growth_rates[i, j] = result[2]
data.shadow_prices1[i, j] = result[3]
data.shadow_prices2[i, j] = result[4]
data.segment()
return data
| pstjohn/cobrapy | cobra/flux_analysis/phenotype_phase_plane.py | Python | gpl-2.0 | 12,577 | [
"Mayavi"
] | 01a572efe05ad8820ae0ab1490f8ea06d55cdca7d9806751f78619df4ffecbd0 |
import sys
import unittest
from collections import OrderedDict
from . import new_pkg_universe_builder
from britney2.installability.solver import compute_scc, InstallabilitySolver, OrderNode
class TestInstTester(unittest.TestCase):
def test_basic_inst_test(self):
builder = new_pkg_universe_builder()
universe, inst_tester = builder.new_package('lintian').depends_on('perl').depends_on_any_of('awk', 'mawk').\
new_package('perl-base').is_essential().\
new_package('dpkg').is_essential(). \
new_package('perl').\
new_package('awk').not_in_testing().\
new_package('mawk').\
build()
pkg_lintian = builder.pkg_id('lintian')
pkg_awk = builder.pkg_id('awk')
pkg_mawk = builder.pkg_id('mawk')
pkg_perl = builder.pkg_id('perl')
pkg_perl_base = builder.pkg_id('perl-base')
assert inst_tester.is_installable(pkg_lintian)
assert inst_tester.is_installable(pkg_perl)
assert inst_tester.any_of_these_are_in_the_suite((pkg_lintian, pkg_perl))
assert not inst_tester.is_installable(pkg_awk)
assert not inst_tester.any_of_these_are_in_the_suite((pkg_awk,))
inst_tester.remove_binary(pkg_perl)
assert not inst_tester.any_of_these_are_in_the_suite((pkg_perl,))
assert inst_tester.any_of_these_are_in_the_suite((pkg_lintian,))
assert not inst_tester.is_pkg_in_the_suite(pkg_perl)
assert inst_tester.is_pkg_in_the_suite(pkg_lintian)
assert not inst_tester.is_installable(pkg_lintian)
assert not inst_tester.is_installable(pkg_perl)
inst_tester.add_binary(pkg_perl)
assert inst_tester.is_installable(pkg_lintian)
assert inst_tester.is_installable(pkg_perl)
assert universe.reverse_dependencies_of(pkg_perl) == {pkg_lintian}
assert universe.reverse_dependencies_of(pkg_lintian) == frozenset()
# awk and mawk are equivalent, but nothing else is eqv.
assert universe.are_equivalent(pkg_awk, pkg_mawk)
assert not universe.are_equivalent(pkg_lintian, pkg_mawk)
assert not universe.are_equivalent(pkg_lintian, pkg_perl)
assert not universe.are_equivalent(pkg_mawk, pkg_perl)
# Trivial test of the special case for adding and removing an essential package
inst_tester.remove_binary(pkg_perl_base)
inst_tester.add_binary(pkg_perl_base)
inst_tester.add_binary(pkg_awk)
assert inst_tester.is_installable(pkg_lintian)
def test_basic_essential_conflict(self):
builder = new_pkg_universe_builder()
pseudo_ess1 = builder.new_package('pseudo-essential1')
pseudo_ess2 = builder.new_package('pseudo-essential2')
essential_simple = builder.new_package('essential-simple').is_essential()
essential_with_deps = builder.new_package('essential-with-deps').is_essential().\
depends_on_any_of(pseudo_ess1, pseudo_ess2)
conflict1 = builder.new_package('conflict1').conflicts_with(essential_simple)
conflict2 = builder.new_package('conflict2').conflicts_with(pseudo_ess1, pseudo_ess2)
conflict_installable1 = builder.new_package('conflict-inst1').conflicts_with(pseudo_ess1)
conflict_installable2 = builder.new_package('conflict-inst2').conflicts_with(pseudo_ess2)
universe, inst_tester = builder.build()
assert inst_tester.is_installable(essential_simple.pkg_id)
assert inst_tester.is_installable(essential_with_deps.pkg_id)
assert inst_tester.is_installable(conflict_installable1.pkg_id)
assert inst_tester.is_installable(conflict_installable2.pkg_id)
assert not inst_tester.is_installable(conflict1.pkg_id)
assert not inst_tester.is_installable(conflict2.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.conflicts_essential == 1
def test_basic_simple_choice(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
conflicting1 = builder.new_package('conflict1')
conflicting2 = builder.new_package('conflict2')
bottom1_pkg = builder.new_package('bottom1').conflicts_with(conflicting1)
bottom2_pkg = builder.new_package('bottom2').conflicts_with(conflicting2)
pkg1 = builder.new_package('pkg1').depends_on(bottom1_pkg)
pkg2 = builder.new_package('pkg2').depends_on(bottom2_pkg)
root_pkg.depends_on_any_of(pkg1, pkg2)
universe, inst_tester = builder.build()
# The dependencies of "root" are not equivalent (if they were, we would trigger
# an optimization, which takes another code path)
assert not universe.are_equivalent(pkg1.pkg_id, pkg2.pkg_id)
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
def test_basic_simple_choice_deadend(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
bottom1_pkg = builder.new_package('bottom1').conflicts_with(root_pkg)
bottom2_pkg = builder.new_package('bottom2').conflicts_with(root_pkg)
pkg1 = builder.new_package('pkg1').depends_on(bottom1_pkg)
pkg2 = builder.new_package('pkg2').depends_on(bottom2_pkg)
root_pkg.depends_on_any_of(pkg1, pkg2)
universe, inst_tester = builder.build()
# The dependencies of "root" are not equivalent (if they were, we would trigger
# an optimization, which takes another code path)
assert not universe.are_equivalent(pkg1.pkg_id, pkg2.pkg_id)
assert not inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
# This case is simple enough that the installability tester will assert it does not
# need to recurse to reject the first option
assert inst_tester.stats.backtrace_restore_point_used == 0
assert inst_tester.stats.backtrace_last_option == 1
def test_basic_simple_choice_opt_no_restore_needed(self):
builder = new_pkg_universe_builder()
conflicting = builder.new_package('conflict')
root_pkg = builder.new_package('root').conflicts_with(conflicting)
bottom1_pkg = builder.new_package('bottom1').conflicts_with(conflicting)
bottom2_pkg = builder.new_package('bottom2').conflicts_with(conflicting)
# These two packages have (indirect) conflicts, so they cannot trigger the
# safe set optimization. However, since "root" already have the same conflict
# it can use the "no restore point needed" optimization.
pkg1 = builder.new_package('pkg1').depends_on(bottom1_pkg)
pkg2 = builder.new_package('pkg2').depends_on(bottom2_pkg)
root_pkg.depends_on_any_of(pkg1, pkg2)
universe, inst_tester = builder.build()
# The dependencies of "root" are not equivalent (if they were, we would trigger
# an optimization, which takes another code path)
assert not universe.are_equivalent(pkg1.pkg_id, pkg2.pkg_id)
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
assert inst_tester.stats.backtrace_restore_point_used == 0
assert inst_tester.stats.backtrace_last_option == 0
assert inst_tester.stats.choice_resolved_without_restore_point == 1
def test_basic_simple_choice_opt_no_restore_needed_deadend(self):
builder = new_pkg_universe_builder()
conflicting1 = builder.new_package('conflict1').conflicts_with('conflict2')
conflicting2 = builder.new_package('conflict2').conflicts_with('conflict1')
root_pkg = builder.new_package('root')
bottom_pkg = builder.new_package('bottom').depends_on(conflicting1).depends_on(conflicting2)
mid1_pkg = builder.new_package('mid1').depends_on(bottom_pkg)
mid2_pkg = builder.new_package('mid2').depends_on(bottom_pkg)
# These two packages have (indirect) conflicts, so they cannot trigger the
# safe set optimization. However, since "root" already have the same conflict
# it can use the "no restore point needed" optimization.
pkg1 = builder.new_package('pkg1').depends_on(mid1_pkg)
pkg2 = builder.new_package('pkg2').depends_on(mid2_pkg)
root_pkg.depends_on_any_of(pkg1, pkg2)
universe, inst_tester = builder.build()
# The dependencies of "root" are not equivalent (if they were, we would trigger
# an optimization, which takes another code path)
assert not universe.are_equivalent(pkg1.pkg_id, pkg2.pkg_id)
assert not inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
assert inst_tester.stats.backtrace_restore_point_used == 0
assert inst_tester.stats.choice_resolved_without_restore_point == 0
assert inst_tester.stats.backtrace_last_option == 1
def test_basic_choice_deadend_restore_point_needed(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
bottom1_pkg = builder.new_package('bottom1').depends_on_any_of('bottom2', 'bottom3')
bottom2_pkg = builder.new_package('bottom2').conflicts_with(root_pkg)
bottom3_pkg = builder.new_package('bottom3').depends_on_any_of('bottom1', 'bottom2')
pkg1 = builder.new_package('pkg1').depends_on_any_of(bottom1_pkg, bottom2_pkg).conflicts_with('bottom3')
pkg2 = builder.new_package('pkg2').depends_on_any_of(bottom2_pkg, bottom3_pkg).conflicts_with('bottom1')
root_pkg.depends_on_any_of(pkg1, pkg2)
universe, inst_tester = builder.build()
# The dependencies of "root" are not equivalent (if they were, we would trigger
# an optimization, which takes another code path)
assert not universe.are_equivalent(pkg1.pkg_id, pkg2.pkg_id)
assert not inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
# This case is simple enough that the installability tester will assert it does not
# need to recurse to reject the first option
assert inst_tester.stats.backtrace_restore_point_used == 1
assert inst_tester.stats.backtrace_last_option == 1
def test_corner_case_dependencies_inter_conflict(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root').depends_on('conflict1').depends_on('conflict2')
conflicting1 = builder.new_package('conflict1').conflicts_with('conflict2')
conflicting2 = builder.new_package('conflict2').conflicts_with('conflict1')
universe, inst_tester = builder.build()
# They should not be eqv.
assert not universe.are_equivalent(conflicting1.pkg_id, conflicting2.pkg_id)
# "root" should not be installable and we should trigger a special code path where
# the installability tester has both conflicting packages in its "check" set
# Technically, we cannot assert we hit that path with this test, but we can at least
# check it does not regress
assert not inst_tester.is_installable(root_pkg.pkg_id)
def test_basic_choice_deadend_pre_solvable(self):
builder = new_pkg_universe_builder()
# This test is complicated by the fact that the inst-tester has a non-deterministic ordering.
# To ensure that it becomes predictable, we have to force it to see the choice before
# the part that eliminates it. In practise, this is easiest to do by creating a symmetric
# graph where one solving one choice eliminates the other.
root_pkg = builder.new_package('root')
# These two packages are used to make options distinct; otherwise the eqv. optimisation will just
# collapse the choices.
nodep1 = builder.new_package('nodep1')
nodep2 = builder.new_package('nodep2')
path1a = builder.new_package('path1a').depends_on(nodep1).depends_on('end1')
path1b = builder.new_package('path1b').depends_on(nodep2).depends_on('end1')
path2a = builder.new_package('path2a').depends_on(nodep1).depends_on('end2')
path2b = builder.new_package('path2b').depends_on(nodep2).depends_on('end2')
builder.new_package('end1').conflicts_with(path2a, path2b)
builder.new_package('end2').conflicts_with(path1a, path1b)
root_pkg.depends_on_any_of(path1a, path1b).depends_on_any_of(path2a, path2b)
_, inst_tester = builder.build()
assert not inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
# The following numbers are observed due to:
# * Pick an option from (pathXa | pathXb)
# * First option -> obviously unsolvable
# * Undo and do "last option" on the remaining
# * "last option" -> obviously unsolvable
# * unsolvable
assert inst_tester.stats.backtrace_restore_point_used == 1
assert inst_tester.stats.backtrace_last_option == 1
assert inst_tester.stats.choice_presolved == 2
def test_basic_choice_pre_solvable(self):
builder = new_pkg_universe_builder()
# This test is complicated by the fact that the inst-tester has a non-deterministic ordering.
# To ensure that it becomes predictable, we have to force it to see the choice before
# the part that eliminates it. In practise, this is easiest to do by creating a symmetric
# graph where one solving one choice eliminates the other.
root_pkg = builder.new_package('root')
nodep1 = builder.new_package('nodep1').conflicts_with('path1b', 'path2b')
nodep2 = builder.new_package('nodep2').conflicts_with('path1b', 'path2b')
end1 = builder.new_package('end1')
end2 = builder.new_package('end2')
path1a = builder.new_package('path1a').depends_on(nodep1).depends_on(end1)
path1b = builder.new_package('path1b').depends_on(nodep2).depends_on(end1)
path2a = builder.new_package('path2a').depends_on(nodep1).depends_on(end2)
path2b = builder.new_package('path2b').depends_on(nodep2).depends_on(end2)
root_pkg.depends_on_any_of(path1a, path1b).depends_on_any_of(path2a, path2b)
_, inst_tester = builder.build()
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 0
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 0
# After its first guess, the tester can pre-solve remaining choice
assert inst_tester.stats.backtrace_restore_point_used == 0
assert inst_tester.stats.choice_presolved == 1
def test_optimisation_simple_full_eqv_reduction(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
conflicting = builder.new_package('conflict')
bottom1_pkg = builder.new_package('bottom1').conflicts_with(conflicting)
# Row 1 is simple enough that it collapse into a single option immediately
# (Ergo eqv_table_reduced_to_one == 1)
row1 = ['pkg-%s' % x for x in range(1000)]
root_pkg.depends_on_any_of(*row1)
for pkg in row1:
builder.new_package(pkg).depends_on(bottom1_pkg)
universe, inst_tester = builder.build()
pkg_row1 = builder.pkg_id(row1[0])
# all items in a row are eqv.
for pkg in row1:
assert universe.are_equivalent(builder.pkg_id(pkg), pkg_row1)
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 1
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 999
assert inst_tester.stats.eqv_table_reduced_to_one == 1
def test_optimisation_simple_partial_eqv_reduction(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
conflicting = builder.new_package('conflict')
another_pkg = builder.new_package('another-pkg')
bottom1_pkg = builder.new_package('bottom1').conflicts_with(conflicting)
# Row 1 is simple enough that it collapse into a single option immediately
# but due to "another_pkg" the entire choice is only reduced into two
row1 = ['pkg-%s' % x for x in range(1000)]
root_pkg.depends_on_any_of(another_pkg, *row1)
for pkg in row1:
builder.new_package(pkg).depends_on(bottom1_pkg)
universe, inst_tester = builder.build()
pkg_row1 = builder.pkg_id(row1[0])
# all items in a row are eqv.
for pkg in row1:
assert universe.are_equivalent(builder.pkg_id(pkg), pkg_row1)
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 1
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 999
assert inst_tester.stats.eqv_table_reduced_to_one == 0
def test_optimisation_simple_zero_eqv_reduction(self):
builder = new_pkg_universe_builder()
root_pkg = builder.new_package('root')
conflicting1 = builder.new_package('conflict1')
conflicting2 = builder.new_package('conflict2')
bottom1_pkg = builder.new_package('bottom1').conflicts_with(conflicting1)
bottom2_pkg = builder.new_package('bottom2').conflicts_with(conflicting2)
# To trigger a failed reduction, we have to create eqv. packages and ensure that only one
# of them are in testing. Furthermore, the choice has to remain, so we create two pairs
# of them
pkg1_v1 = builder.new_package('pkg1', version='1.0-1').depends_on(bottom1_pkg)
pkg1_v2 = builder.new_package('pkg1', version='2.0-1').depends_on(bottom1_pkg).not_in_testing()
pkg2_v1 = builder.new_package('pkg2', version='1.0-1').depends_on(bottom2_pkg)
pkg2_v2 = builder.new_package('pkg2', version='2.0-1').depends_on(bottom2_pkg).not_in_testing()
root_pkg.depends_on_any_of(pkg1_v1, pkg1_v2, pkg2_v1, pkg2_v2)
universe, inst_tester = builder.build()
# The packages in the pairs are equivalent, but the two pairs are not
assert universe.are_equivalent(pkg1_v1.pkg_id, pkg1_v2.pkg_id)
assert universe.are_equivalent(pkg2_v1.pkg_id, pkg2_v2.pkg_id)
assert not universe.are_equivalent(pkg1_v1.pkg_id, pkg2_v1.pkg_id)
assert inst_tester.is_installable(root_pkg.pkg_id)
for line in inst_tester.stats.stats():
print(line)
assert inst_tester.stats.eqv_table_times_used == 1
assert inst_tester.stats.eqv_table_total_number_of_alternatives_eliminated == 0
assert inst_tester.stats.eqv_table_reduced_to_one == 0
assert inst_tester.stats.eqv_table_reduced_by_zero == 1
def test_solver_recursion_limit(self):
builder = new_pkg_universe_builder()
recursion_limit = 200
pkg_limit = recursion_limit + 20
orig_limit = sys.getrecursionlimit()
pkgs = [builder.new_package('pkg-%d' % i) for i in range(pkg_limit)]
for i, pkg in enumerate(pkgs):
# Intentionally -1 for the first package (wrap-around)
ni = i - 1
pkg.not_in_testing()
pkg.depends_on(pkgs[ni])
try:
sys.setrecursionlimit(recursion_limit)
universe, inst_tester = builder.build()
solver = InstallabilitySolver(universe, inst_tester)
groups = []
for pkg in pkgs:
group = (pkg.pkg_id.package_name, {pkg.pkg_id}, set())
groups.append(group)
expected = {g[0] for g in groups}
actual = solver.solve_groups(groups)
assert actual
assert expected == set(actual[0])
assert len(actual) == 1
finally:
sys.setrecursionlimit(orig_limit)
def test_solver_simple_scc(self):
builder = new_pkg_universe_builder()
# SCC 1
pkga = builder.new_package('pkg-a').not_in_testing()
pkgb = builder.new_package('pkg-b').not_in_testing()
pkgc = builder.new_package('pkg-c').not_in_testing()
# SSC 2
pkgd = builder.new_package('pkg-d').not_in_testing()
pkge = builder.new_package('pkg-e').not_in_testing()
pkgf = builder.new_package('pkg-f').not_in_testing()
pkgg = builder.new_package('pkg-g').not_in_testing()
pkgh = builder.new_package('pkg-h').not_in_testing()
# SSC 3
pkgi = builder.new_package('pkg-i').not_in_testing()
# SSC 1 dependencies
pkga.depends_on(pkgb)
pkgb.depends_on(pkgc).depends_on(pkgd)
pkgc.depends_on(pkga).depends_on(pkge)
# SSC 2 dependencies
pkgd.depends_on(pkgf)
pkge.depends_on(pkgg).depends_on(pkgd)
pkgf.depends_on(pkgh)
pkgg.depends_on(pkgh)
pkgh.depends_on(pkge).depends_on(pkgi)
universe, inst_tester = builder.build()
solver = InstallabilitySolver(universe, inst_tester)
expected = [
# SSC 3 first
{pkgi.pkg_id.package_name},
# Then SSC 2
{pkgd.pkg_id.package_name, pkge.pkg_id.package_name, pkgf.pkg_id.package_name,
pkgg.pkg_id.package_name, pkgh.pkg_id.package_name},
# Finally SSC 1
{pkga.pkg_id.package_name, pkgb.pkg_id.package_name, pkgc.pkg_id.package_name},
]
groups = []
for ssc in expected:
for node in ssc:
groups.append((node, {builder.pkg_id(node)}, {}))
actual = [set(x) for x in solver.solve_groups(groups)]
print("EXPECTED: %s" % str(expected))
print("ACTUAL : %s" % str(actual))
assert expected == actual
def test_solver_no_scc_stack_bug(self):
"""
This whitebox test is designed to trigger a bug in Tarjan's algorithm
if you omit the "w is on stack of points" check from the pseudo code
(or it is wrong). It makes tons of assumptions about how compute_scc
works, so it is very sensitive to even minor tweaks.
There is no strongly-connected component in this test, but if we
trigger the bug, the algorithm will think there is one.
"""
graph = OrderedDict()
def _order_node(**args):
node = OrderNode()
node.before = args['before']
node.after = args['after']
return node
graph['A'] = _order_node(
before=['C', 'B'],
after=['A0'],
)
graph['B'] = _order_node(
before=['F'],
after=['A'],
)
graph['C'] = _order_node(
before=['E', 'D'],
after=['A'],
)
graph['D'] = _order_node(
before=[],
after=['C'],
)
graph['E'] = _order_node(
before=['B'],
after=['C']
)
graph['F'] = _order_node(
before=[],
after=['B'],
)
graph['A0'] = _order_node(
before=['A0'],
after=[],
)
# We also assert that the order is correct to ensure that
# nodes were visited in the order we expected (the bug is
# visit order sensitive).
expected = [
('F',),
('B',),
('D',),
('E',),
('C',),
('A',),
('A0',)
]
actual = compute_scc(graph)
print("EXPECTED: %s" % str(expected))
print("ACTUAL : %s" % str(actual))
assert expected == actual
if __name__ == '__main__':
unittest.main()
| Debian/britney2 | tests/test_inst_tester.py | Python | gpl-2.0 | 25,980 | [
"VisIt"
] | cecd3f9eac42e9fda657e09930b3c2bb4763ff0ed5af94dce1ca64a2767d13c7 |
""" unit test for Watchdog.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# imports
import os
from mock import MagicMock
# sut
from DIRAC.WorkloadManagementSystem.JobWrapper.Watchdog import Watchdog
mock_exeThread = MagicMock()
mock_spObject = MagicMock()
def test_calibrate():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
def test__performChecks():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
def test__performChecksFull():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
wd.testCPULimit = 1
wd.testMemoryLimit = 1
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
| yujikato/DIRAC | src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_Watchdog.py | Python | gpl-3.0 | 958 | [
"DIRAC"
] | cb21cc872602f4e6bc6b12fb6210dfe955c6f0a572bd558d1c34af08f86b429b |
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Shiv Upadhyay <shivnupadhyay@gmail.com>
#
'''
Valence Virtual Orbitals
ref. 10.1021/acs.jctc.7b00493
'''
import numpy
import scipy.linalg
from pyscf.lo import iao
from pyscf.lo import orth
from pyscf.lo import ibo
from pyscf import __config__
def vvo(mol, orbocc, orbvirt, iaos=None, s=None, verbose=None):
'''Valence Virtual Orbitals ref. 10.1021/acs.jctc.7b00493
Valence virtual orbitals can be formed from the singular value
decomposition of the overlap between the canonical molecular orbitals
and an accurate underlying atomic basis set. This implementation uses
the intrinsic atomic orbital as this underlying set. VVOs can also be
formed from the null space of the overlap of the canonical molecular
orbitals and the underlying atomic basis sets (IAOs). This is not
implemented here.
Args:
mol : the molecule or cell object
orbocc : occupied molecular orbital coefficients
orbvirt : virtual molecular orbital coefficients
Kwargs:
iaos : 2D array
the array of IAOs
s : 2D array
the overlap array in the ao basis
Returns:
VVOs in the basis defined in mol object.
'''
if s is None:
if getattr(mol, 'pbc_intor', None): # whether mol object is a cell
if isinstance(orbocc, numpy.ndarray) and orbocc.ndim == 2:
s = mol.pbc_intor('int1e_ovlp', hermi=1)
else:
raise NotImplementedError('k-points crystal orbitals')
else:
s = mol.intor_symmetric('int1e_ovlp')
if iaos is None:
iaos = iao.iao(mol, orbocc)
nvvo = iaos.shape[1] - orbocc.shape[1]
# Symmetrically orthogonalization of the IAO orbitals as Knizia's
# implementation. The IAO returned by iao.iao function is not orthogonal.
iaos = orth.vec_lowdin(iaos, s)
#S = reduce(np.dot, (orbvirt.T, s, iaos))
S = numpy.einsum('ji,jk,kl->il', orbvirt.conj(), s, iaos, optimize=True)
U, sigma, Vh = scipy.linalg.svd(S)
U = U[:, 0:nvvo]
vvo = numpy.einsum('ik,ji->jk', U, orbvirt, optimize=True)
return vvo
def livvo(mol, orbocc, orbvirt, locmethod='IBO', iaos=None, s=None,
exponent=4, grad_tol=1e-8, max_iter=200, verbose=None):
'''Localized Intrinsic Valence Virtual Orbitals ref. 10.1021/acs.jctc.7b00493
Localized Intrinsic valence virtual orbitals are formed when the valence
virtual orbitals are localized using an IBO-type of localization. Here
the VVOs are created in the IAO basis then the IBO localization functions
are called to localize the VVOs.
Args:
mol : the molecule or cell object
orbocc : occupied molecular orbital coefficients
orbvirt : virtual molecular orbital coefficients
Kwargs:
locmethod : string
the localization method 'PM' for Pipek Mezey localization or 'IBO'
for the IBO localization
iaos : 2D array
the array of IAOs
s : 2D array
the overlap array in the ao basis
Returns:
LIVVOs in the basis defined in mol object.
'''
if s is None:
if getattr(mol, 'pbc_intor', None): # whether mol object is a cell
if isinstance(orbocc, numpy.ndarray) and orbocc.ndim == 2:
s = mol.pbc_intor('int1e_ovlp', hermi=1)
else:
raise NotImplementedError('k-points crystal orbitals')
else:
s = mol.intor_symmetric('int1e_ovlp')
if iaos is None:
iaos = iao.iao(mol, orbocc)
vvos = vvo(mol, orbocc, orbvirt, iaos=iaos, s=s)
locmethod = locmethod.strip().upper()
if locmethod == 'PM':
EXPONENT = getattr(__config__, 'lo_ibo_PipekMezey_exponent', exponent)
livvos = ibo.PipekMezey(mol, vvos, iaos, s, exponent=EXPONENT)
del(EXPONENT)
else:
livvos = ibo.ibo_loc(mol, vvos, iaos, s, exponent=exponent,
grad_tol=grad_tol, max_iter=max_iter,
verbose=verbose)
return livvos
| sunqm/pyscf | pyscf/lo/vvo.py | Python | apache-2.0 | 4,717 | [
"CRYSTAL",
"PySCF"
] | 368ee9c7c6b5c2277831d5a3404c5dff7896b71406af6b89c39d138bc0242948 |
# -*- coding: utf-8 -*-
# Copyright 2017 by Rob Gilmore and Shaurita Hutchins. All rights reserved.
# Based on ClustalOmega wrapper copyright 2011 by Andreas Wilm.
#
# Wrapper for Guidance2 by Rob Gilmore (2017). http://guidance.tau.ac.il/ver2/
# Used _ClustalOmega.py as template.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Command line wrapper for PAL2NAL.
It converts a multiple sequence alignment of proteins and
the corresponding DNA (or mRNA) sequences into a codon alignment."""
from __future__ import print_function
from pathlib import Path
from Bio.Application import _Option, _Switch, AbstractCommandline, _Argument
class Pal2NalCommandline(AbstractCommandline):
u""""Command line wrapper for PAL2NAL.
http://www.bork.embl.de/pal2nal/
Example:
--------
\>>> from Bio.Align.Applications import Pal2NalCommandline
You would typically run the command line with clustalomega_cline() or via
the Python subprocess module, as described in the Biopython tutorial.
Citation:
---------
Mikita Suyama, David Torrents, and Peer Bork (2006)
PAL2NAL: robust conversion of protein sequence alignments into the corresponding codon alignments.
Nucleic Acids Res. 34, W609-W612.
"""
def __init__(self, cmd='pal2nal', **kwargs):
# order parameters in the same order as invoking guidance on the cmd line (e.g. 'perl guidance.pl')
self.parameters = \
[
# Required Parameters
_Argument(['pepaln'],
'protein alignment either in CLUSTAL or FASTA format',
filename=True, is_required=True,
checker_function=lambda x: Path(x).is_file()),
_Argument(['nucfasta'],
'DNA sequences (single multi-fasta or separated files)',
filename=True, is_required=True,
checker_function=lambda x: Path(x).is_file()),
_Switch(['-h', 'help'],
'Show help'),
_Option(['-output', 'output'],
"Output format (clustal|paml|fasta|codon); default = clustal",
equate=False,
checker_function=lambda x: x in ['clustal', 'paml', 'fasta', 'codon']),
_Switch(['-blockonly', 'blockonly'],
"Show only user specified blocks '#' under CLUSTAL alignment (see example)"),
_Switch(['-nogap', 'nogap'],
"Remove columns with gaps and inframe stop codons"),
_Switch(['-nomismatch', 'nomismatch'],
"Remove mismatched codons (mismatch between pep and cDNA) from the output"),
_Option(['-codontable', 'codontable'],
" 1 Universal code (default)\
2 Vertebrate mitochondrial code\
3 Yeast mitochondrial code\
4 Mold, Protozoan, and Coelenterate Mitochondrial code\
and Mycoplasma/Spiroplasma code\
5 Invertebrate mitochondrial\
6 Ciliate, Dasycladacean and Hexamita nuclear code\
9 Echinoderm and Flatworm mitochondrial code\
10 Euplotid nuclear code\
11 Bacterial, archaeal and plant plastid code\
12 Alternative yeast nuclear code\
13 Ascidian mitochondrial code\
14 Alternative flatworm mitochondrial code\
15 Blepharisma nuclear code\
16 Chlorophycean mitochondrial code",
equate=False,
checker_function=lambda x: isinstance(x, int)),
_Option(['>', 'output_file'],
"This issues the bash command that redirects the PAL2NAL"
"alignment to a particular file",
filename=True, equate=False, is_required=True)
]
AbstractCommandline.__init__(self, cmd, **kwargs)
| datasnakes/Datasnakes-Scripts | examples/example-data/Alignment_Filter/pal2nal.py | Python | mit | 4,390 | [
"Biopython"
] | fe111283ecb5d0cc2030df2941e77510d5cb3758910ab8f47d4d6d715be9a125 |
#!/usr/bin/env python
#
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Standard setup script.
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
from distutils.command.install_data import install_data
from distutils.command.sdist import sdist
from distutils.core import setup
from buildbot_worker import version
class our_install_data(install_data):
def finalize_options(self):
self.set_undefined_options('install',
('install_lib', 'install_dir'),
)
install_data.finalize_options(self)
def run(self):
install_data.run(self)
# ensure there's a buildbot_worker/VERSION file
fn = os.path.join(self.install_dir, 'buildbot_worker', 'VERSION')
with open(fn, 'w') as f:
f.write(version)
self.outfiles.append(fn)
class our_sdist(sdist):
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# ensure there's a buildbot_worker/VERSION file
fn = os.path.join(base_dir, 'buildbot_worker', 'VERSION')
open(fn, 'w').write(version)
# ensure that NEWS has a copy of the latest release notes, copied from
# the master tree, with the proper version substituted
src_fn = os.path.join('..', 'master', 'docs', 'relnotes/index.rst')
with open(src_fn) as f:
src = f.read()
src = src.replace('|version|', version)
dst_fn = os.path.join(base_dir, 'NEWS')
with open(dst_fn, 'w') as f:
f.write(src)
setup_args = {
'name': "buildbot-worker",
'version': version,
'description': "Buildbot Worker Daemon",
'long_description': "See the 'buildbot' package for details",
'author': "Brian Warner",
'author_email': "warner-buildbot@lothar.com",
'maintainer': "Dustin J. Mitchell",
'maintainer_email': "dustin@v.igoro.us",
'url': "http://buildbot.net/",
'license': "GNU GPL",
'classifiers': [
'Development Status :: 5 - Production/Stable',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
'packages': [
"buildbot_worker",
"buildbot_worker.util",
"buildbot_worker.backports",
"buildbot_worker.commands",
"buildbot_worker.scripts",
"buildbot_worker.monkeypatches",
"buildbot_worker.test",
"buildbot_worker.test.fake",
"buildbot_worker.test.unit",
"buildbot_worker.test.util",
],
# mention data_files, even if empty, so install_data is called and
# VERSION gets copied
'data_files': [("buildbot_worker", [])],
'cmdclass': {
'install_data': our_install_data,
'sdist': our_sdist
},
'entry_points': {
'console_scripts': [
'buildbot-worker=buildbot_worker.scripts.runner:run',
# this will also be shipped on non windows :-(
'buildbot_worker_windows_service=buildbot_worker.scripts.windows_service:HandleCommandLine',
]}
}
# set zip_safe to false to force Windows installs to always unpack eggs
# into directories, which seems to work better --
# see http://buildbot.net/trac/ticket/907
if sys.platform == "win32":
setup_args['zip_safe'] = False
if sys.version_info[0] >= 3:
twisted_ver = ">= 17.5.0"
else:
twisted_ver = ">= 10.2.0"
try:
# If setuptools is installed, then we'll add setuptools-specific arguments
# to the setup args.
import setuptools # @UnusedImport
except ImportError:
pass
else:
setup_args['install_requires'] = [
'twisted ' + twisted_ver,
'future',
]
# Unit test hard dependencies.
test_deps = [
'mock',
]
setup_args['tests_require'] = test_deps
setup_args['extras_require'] = {
'test': [
'pep8',
# spellcheck introduced in version 1.4.0
'pylint>=1.4.0',
'pyenchant',
'flake8~=2.6.0',
] + test_deps,
}
if '--help-commands' in sys.argv or 'trial' in sys.argv or 'test' in sys.argv:
setup_args['setup_requires'] = [
'setuptools_trial',
]
if os.getenv('NO_INSTALL_REQS'):
setup_args['install_requires'] = None
setup_args['extras_require'] = None
setup(**setup_args)
| rkashapov/buildbot | worker/setup.py | Python | gpl-2.0 | 5,594 | [
"Brian"
] | 3d6ae295483aff128eb059b91a256d9b809d5756d2c0d5aace21eac5c39fe8ee |
""" General Message Queue Interface to create Consumers and Producers
"""
from DIRAC import gLogger, S_OK
from DIRAC.Resources.MessageQueue.MQProducer import MQProducer
from DIRAC.Resources.MessageQueue.MQConsumer import MQConsumer
from DIRAC.Resources.MessageQueue.MQConnectionManager import MQConnectionManager
from DIRAC.Resources.MessageQueue.Utilities import getMQParamsFromCS
from DIRAC.Resources.MessageQueue.Utilities import generateDefaultCallback
__RCSID__ = "$Id$"
connectionManager = MQConnectionManager() # To manage the active MQ connections.
def createConsumer(mqURI, callback=generateDefaultCallback()):
"""
Function creates MQConsumer. All parameters are taken from the
Configuration Service based on the mqURI value.
Args:
mqURI(str):Pseudo URI identifing MQ service. It has the following format
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
callback: callback function that can be used to process the incoming messages
Returns:
S_OK/S_ERROR: with the consumer object in S_OK.
"""
result = _setupConnection(mqURI=mqURI, mType="consumer")
if not result['OK']:
return result
return S_OK(MQConsumer(mqManager=connectionManager,
mqURI=mqURI,
consumerId=result['Value'],
callback=callback))
def createProducer(mqURI):
"""
Function creates MQProducer. All parameters are taken from
the Configuration Service based on the mqURI value.
Args:
mqURI(str):Pseudo URI identifing MQ service. It has the following format
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
Returns:
S_OK/S_ERROR: with the producer object in S_OK.
"""
result = _setupConnection(mqURI=mqURI, mType="producer")
if not result['OK']:
return result
return S_OK(MQProducer(mqManager=connectionManager,
mqURI=mqURI,
producerId=result['Value']))
def _setupConnection(mqURI, mType):
""" Function sets up the active MQ connection. All parameters are taken
from the Configuration Service based on the mqURI
value and the messenger Type mType.
Args:
mqURI(str):Pseudo URI identifing the MQ service. It has the following format:
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
mType(str): 'consumer' or 'producer'
Returns:
S_OK/S_ERROR: with the value of the messenger Id ( e.g. 'consumer4' ) in S_OK.
"""
result = getMQParamsFromCS(mqURI=mqURI)
if not result['OK']:
return result
params = result['Value']
return connectionManager.startConnection(mqURI, params, mType)
| petricm/DIRAC | Resources/MessageQueue/MQCommunication.py | Python | gpl-3.0 | 2,780 | [
"DIRAC"
] | c8dc285226f1a5d2ba9059167d3eee03da1fbacc9a4a68c3896365d1f9536141 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2020, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
"""
The Atomic Universe
#########################
The :class:`~exatomic.container.Universe` object is a subclass of
:class:`~exa.container.Container` that stores data coming from computational
chemistry experiments in a unified and systematic way. Data is organized into
"frames". A frame is an axis that can represent time (e.g. molecular dynamics
simulations), step number (e.g. geometry optimization), or an arbitrary index
(e.g. density functional theory exchange correlation functional).
"""
import six
import numpy as np
import pandas as pd
from exa import DataFrame, Container, TypedMeta
from .frame import Frame, compute_frame_from_atom
from .atom import Atom, UnitAtom, ProjectedAtom, VisualAtom, Frequency
from .two import (AtomTwo, MoleculeTwo, compute_atom_two,
_compute_bond_count, _compute_bonds)
from .molecule import (Molecule, compute_molecule, compute_molecule_com,
compute_molecule_count)
from .field import AtomicField
from .orbital import Orbital, Excitation, MOMatrix, DensityMatrix
from .basis import Overlap, BasisSet, BasisSetOrder
from exatomic.algorithms.orbital import add_molecular_orbitals
from exatomic.algorithms.basis import BasisFunctions, compute_uncontracted_basis_set_order
from .tensor import Tensor
class Meta(TypedMeta):
atom = Atom
frame = Frame
atom_two = AtomTwo
unit_atom = UnitAtom
projected_atom = ProjectedAtom
visual_atom = VisualAtom
frequency = Frequency
molecule = Molecule
molecule_two = MoleculeTwo
field = AtomicField
orbital = Orbital
momatrix = MOMatrix
cart_momatrix = MOMatrix
sphr_momatrix = MOMatrix
excitation = Excitation
overlap = Overlap
density = DensityMatrix
basis_set_order = BasisSetOrder
cart_basis_set_order = BasisSetOrder
sphr_basis_set_order = BasisSetOrder
uncontracted_basis_set_order = BasisSetOrder
basis_set = BasisSet
basis_dims = dict
basis_functions = BasisFunctions
contribution = DataFrame
multipole = DataFrame
tensor = Tensor
class Universe(six.with_metaclass(Meta, Container)):
"""
The atomic container is called a universe because it represents everything
known about the atomistic simulation (whether quantum or classical). This
includes data such as atomic coordinates, molecular orbital energies, as
well as (classical phenomena) such as two body distances, etc.
Attributes:
frame (:class:`~exatomic.core.frame.Frame`): State variables:
atom (:class:`~exatomic.core.atom.Atom`): (Classical) atomic data (e.g. coordinates)
atom_two (:class:`~exatomic.core.two.AtomTwo`): Interatomic distances
molecule (:class:`~exatomic.core.molecule.Molecule`): Molecule information
orbital (:class:`~exatomic.core.orbital.Orbital`): Molecular orbital information
momatrix (:class:`~exatomic.core.orbital.MOMatrix`): Molecular orbital coefficient matrix
frequency (:class:`~exatomic.core.atom.Frequency`): Vibrational modes and atom displacements
excitation (:class:`~exatomic.core.orbital.Excitation`): Electronic excitation information
basis_set (:class:`~exatomic.core.basis.BasisSet`): Basis set specification
overlap (:class:`~exatomic.core.basis.Overlap`): The overlap matrix
basis_functions (:class:`~exatomic.algorithms.basis.BasisFunctions`): Basis function evaluation
field (:class:`~exatomic.core.field.AtomicField`): Scalar fields (MOs, densities, etc.)
"""
_cardinal = "frame"
_getter_prefix = "compute"
@property
def current_momatrix(self):
if self.meta['spherical']:
try: return self.sphr_momatrix
except AttributeError: return self.momatrix
try: return self.cart_momatrix
except AttributeError: return self.momatrix
@property
def current_basis_set_order(self):
if 'uncontracted' in self.meta:
return self.uncontracted_basis_set_order
if self.meta['spherical']:
try: return self.sphr_basis_set_order
except AttributeError: return self.basis_set_order
try: return self.cart_basis_set_order
except AttributeError: return self.basis_set_order
@property
def periodic(self, *args, **kwargs):
return self.frame.is_periodic(*args, **kwargs)
@property
def orthorhombic(self):
return self.frame.orthorhombic()
@classmethod
def from_cclib(cls, ccobj):
from exatomic.interfaces.cclib import universe_from_cclib
return cls(**universe_from_cclib(ccobj))
# Note that compute_* function may be called automatically by typed
# properties defined in UniverseMeta
def compute_frame(self):
"""Compute a minmal frame table."""
self.frame = compute_frame_from_atom(self.atom)
def compute_unit_atom(self):
"""Compute minimal image for periodic systems."""
self.unit_atom = UnitAtom.from_universe(self)
def compute_visual_atom(self):
self.visual_atom = VisualAtom.from_universe(self)
self.compute_molecule_com()
def compute_atom_two(self, *args, **kwargs):
"""
Compute interatomic two body properties (e.g. bonds).
Args:
mapper (dict): Custom radii to use when determining bonds
bond_extra (float): Extra additive factor to use when determining bonds
"""
self.atom_two = compute_atom_two(self, *args, **kwargs)
def compute_bonds(self, *args, **kwargs):
"""
Updates bonds (and molecules).
See Also:
:func:`~exatomic.two.AtomTwo.compute_bonds`
"""
_compute_bonds(self.atom, self.atom_two, *args, **kwargs)
def compute_bond_count(self):
"""
Compute bond counts and attach them to the :class:`~exatomic.atom.Atom` table.
"""
_compute_bond_count(self)
def compute_molecule(self):
"""Compute the :class:`~exatomic.molecule.Molecule` table."""
self.molecule = compute_molecule(self)
self.compute_molecule_count()
def compute_molecule_com(self):
cx, cy, cz = compute_molecule_com(self)
self.molecule['cx'] = cx
self.molecule['cy'] = cy
self.molecule['cz'] = cz
def compute_atom_count(self):
"""Compute number of atoms per frame."""
self.frame['atom_count'] = self.atom.cardinal_groupby().size()
def compute_molecule_count(self):
"""Compute number of molecules per frame."""
self.frame['molecule_count'] = compute_molecule_count(self)
def compute_basis_dims(self):
"""Compute basis dimensions."""
bset = self.basis_set.copy()
bset['set'] = bset['set'].astype(np.int64)
mapr = self.atom.set.map
self.basis_dims = {
'npc': mapr(bset.primitives(False).groupby('set').sum()).astype(int).sum(),
'nps': mapr(bset.primitives(True).groupby('set').sum()).astype(int).sum(),
'ncc': mapr(bset.functions(False).groupby('set').sum()).astype(int).sum(),
'ncs': mapr(bset.functions(True).groupby('set').sum()).astype(int).sum(),
'sets': bset.functions_by_shell()}
def compute_basis_functions(self, **kwargs):
if self.meta['program'] in ['nwchem']:
self.basis_functions = BasisFunctions(self, cartp=False)
else:
self.basis_functions = BasisFunctions(self)
def compute_uncontracted_basis_set_order(self):
"""Compute an uncontracted basis set order."""
self.uncontracted_basis_set_order = compute_uncontracted_basis_set_order(self)
def enumerate_shells(self, frame=0):
"""Extract minimal information from the universe to be used in
numba-compiled numerical procedures.
.. code-block:: python
pointers, atoms, shells = uni.enumerate_shells()
Args:
frame (int): state of the universe (default 0)
"""
atom = self.atom.groupby('frame').get_group(frame)
if self.meta['program'] not in ['molcas', 'adf', 'nwchem', 'gaussian']:
print('Warning: Check spherical shell parameter for {} '
'molecular orbital generation'.format(self.meta['program']))
shls = self.basis_set.shells(self.meta['program'],
self.meta['spherical'],
self.meta['gaussian'])
grps = shls.groupby('set')
# Pointers into (xyzs, shls) arrays
ptrs = np.array([(c, idx) for c, seht in enumerate(atom.set)
for idx in grps.get_group(seht).index])
return ptrs, atom[['x', 'y', 'z']].values, shls[0].values
def add_field(self, field):
"""Adds a field object to the universe.
.. code-block:: python
# Assuming field[n] is of type AtomicField
uni.add_field(field)
uni.add_field([field1, field2])
Args:
field (iter, :class:`exatomic.core.field.AtomicField`): field(s) to add
Warning:
Adding a large number of (high resolution) fields may impact performance.
"""
self._traits_need_update = True
if isinstance(field, AtomicField):
if not hasattr(self, 'field'):
self.field = field
else:
self.field._revert_categories()
new_field_values = self.field.field_values + field.field_values
newdx = range(len(self.field), len(self.field) + len(field))
field.index = newdx
new_field = pd.concat([self.field, field])
self.field = AtomicField(new_field, field_values=new_field_values)
elif isinstance(field, list):
if not hasattr(self, 'field'):
fields = pd.concat(field)
fields.index = range(len(fields))
fields_values = [j for i in field for j in i.field_values]
self.field = AtomicField(fields, field_values=fields_values)
else:
new_field_values = self.field.field_values + [j for i in field for j in i.field_values]
newdx = range(len(self.field), len(self.field) + sum([len(i.field_values) for i in field]))
for i, idx in enumerate(newdx):
field[i].index = [idx]
new_field = pd.concat([self.field] + field)
self.field = AtomicField(new_field, field_values=new_field_values)
else:
raise TypeError('field must be an instance of exatomic.field.AtomicField or a list of them')
def add_molecular_orbitals(self, field_params=None, mocoefs=None,
vector=None, frame=0, replace=False,
inplace=True, verbose=True, irrep=None):
"""Add molecular orbitals to universe.
.. code-block:: python
uni.add_molecular_orbitals() # Default around (HOMO-5, LUMO+7)
uni.add_molecular_orbitals(vector=range(5)) # Specifies the first 5 MOs
uni.add_molecular_orbitals( # Higher resolution fields
field_params={'rmin': -10, # smallest value in 'x', 'y', 'z'
'rmax': 10, # largest value in 'x', 'y', 'z'
'nr': 100}) # number of points between rmin and rmax
uni.field # The field parameters
uni.field.field_values # The generated scalar fields
Args:
field_params (dict, pd.Series): see :func:`exatomic.algorithms.orbital_util.make_fps`
mocoefs (str): column in :class:`~exatomic.core.orbital.MOMatrix`
vector (iter): indices of orbitals to evaluate (0-based)
frame (int): frame of atomic positions for the orbitals
replace (bool): remove previous fields (default False)
inplace (bool): add directly to uni or return :class:`~exatomic.core.field.AtomicField` (default True)
verbose (bool): print timing statistics (default True)
irrep (int): irreducible representation
Warning:
Default behavior just continually adds fields to the universe. This can
affect performance if adding many fields. `replace` modifies this behavior.
Warning:
Specifying very high resolution field parameters, e.g. 'nr' > 100
may slow things down and/or crash the kernel. Use with caution.
"""
if not hasattr(self, 'momatrix'):
raise AttributeError('uni must have momatrix attribute.')
if not hasattr(self, 'basis_set'):
raise AttributeError('uni must have basis_set attribute.')
return add_molecular_orbitals(self, field_params=field_params,
mocoefs=mocoefs, vector=vector,
frame=frame, replace=replace,
inplace=inplace, verbose=verbose,
irrep=irrep)
def write_cube(self, file_name='output', field_number=0):
"""
Write to a file in cube format for a single 3D scalar field in universe object.
.. code-block:: python
uni.add_molecular_orbitals() # Default around (HOMO-5, LUMO+7)
uni.write_cube('cubefile', 0) # write to cubefile.cube for HOMO-5
Args:
file_name (str): name of the output file without file extension
field_number (int): number of the single field starting with 0
Returns:
None
"""
import os
from exatomic.interfaces.cube import Cube
if os.path.isfile(file_name+'.cube'):
raise FileExistsError('File '+file_name+'.cube '+'exists.')
cube_edi = Cube.from_universe(self,field_number)
cube_edi.write(file_name+'.cube')
def __len__(self):
return len(self.frame)
def __init__(self, **kwargs):
super(Universe, self).__init__(**kwargs)
def concat(name=None, description=None, meta=None, *universes):
"""
Warning:
This function is not fully featured or tested yet!
"""
raise NotImplementedError()
def basis_function_contributions(universe, mo, mocoefs='coef',
tol=0.01, ao=None, frame=0):
"""
Provided a universe with momatrix and basis_set_order attributes,
return the major basis function contributions of a particular
molecular orbital.
.. code-block:: python
# display the 16th orbital coefficients > abs(0.15)
basis_function_contributions(uni, 15, tol=0.15) # 0-based indexing!
Args:
universe (class:`exatomic.core.universe.Universe`): a universe
mo (int): molecular orbital index
mocoefs (str): column of interest in universe.momatrix
tol (float): minimum value of coefficient by which to filter
frame (int): frame of the universe (default is zero)
Returns:
joined (pd.DataFrame): a join of momatrix and basis_set_order
"""
small = universe.momatrix.contributions(mo, tol=tol, mocoefs=mocoefs, frame=frame)
chis = small['chi'].values
coefs = small[mocoefs]
coefs.index = chis
joined = pd.concat([universe.basis_set_order.ix[chis], coefs], axis=1)
if ao is None:
return joined
else:
raise NotImplementedError("not clever enough for that.")
| exa-analytics/atomic | exatomic/core/universe.py | Python | apache-2.0 | 15,830 | [
"ADF",
"Gaussian",
"MOLCAS",
"NWChem",
"cclib"
] | 1f31d57d05da38fb42cc5e419829227514fb87fcf362521adeb57061626515e4 |
import logging
import types
import simplejson
from dirac.lib.base import *
from dirac.lib.diset import getRPCClient, getTransferClient
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Utilities import Time, List
from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
from dirac.lib.webBase import defaultRedirect
log = logging.getLogger( __name__ )
class VirtualmachinesController( BaseController ):
def index( self ):
# Return a rendered template
# return render('/some/template.mako')
# or, Return a response
return defaultRedirect()
def browse( self ):
return render( "/systems/virtualmachines/browse.mako" )
def overview( self ):
return render( "/systems/virtualmachines/overview.mako" )
@jsonify
def getInstancesList( self ):
try:
start = int( request.params[ 'start' ] )
except:
start = 0
try:
limit = int( request.params[ 'limit' ] )
except:
limit = 0
try:
sortField = str( request.params[ 'sortField' ] ).replace( "_", "." )
sortDir = str( request.params[ 'sortDirection' ] )
sort = [ ( sortField, sortDir ) ]
except:
return S_ERROR( "Oops! Couldn't understand the request" )
condDict = {}
try:
if 'cond' in request.params:
dec = simplejson.loads( request.params[ 'cond' ] )
for k in dec:
v = dec[ k ]
if type( v ) in ( types.StringType, types.UnicodeType ):
v = [ str( v ) ]
else:
v = [ str( f ) for f in v ]
condDict[ str( k ).replace( "_", "." ) ] = v
except:
raise
try:
if 'statusSelector' in request.params:
condDict[ 'inst.Status' ] = [ str( request.params[ 'statusSelector' ] ) ]
except:
pass
print condDict
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getInstancesContent( condDict, sort, start, limit )
if not result[ 'OK' ]:
return result
svcData = result[ 'Value' ]
data = { 'numRecords' : svcData[ 'TotalRecords' ], 'instances' : [] }
dnMap = {}
for record in svcData[ 'Records' ]:
rD = {}
for iP in range( len( svcData[ 'ParameterNames' ] ) ):
param = svcData[ 'ParameterNames' ][iP].replace( ".", "_" )
if param == 'inst_LastUpdate':
rD[ param ] = record[iP].strftime( "%Y-%m-%d %H:%M:%S" )
else:
rD[ param ] = record[iP]
data[ 'instances' ].append( rD )
return data
@jsonify
def getHistoryForInstanceID( self ):
try:
instanceID = int( request.params[ 'instanceID' ] )
except:
return S_ERROR( "OOps, instance ID has to be an integer " )
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getHistoryForInstanceID( instanceID )
if not result[ 'OK' ]:
return result
svcData = result[ 'Value' ]
data = { 'history' : [] }
for record in svcData[ 'Records' ]:
rD = {}
for iP in range( len( svcData[ 'ParameterNames' ] ) ):
param = svcData[ 'ParameterNames' ][iP].replace( ".", "_" )
if param == 'Update':
rD[ param ] = record[iP].strftime( "%Y-%m-%d %H:%M:%S" )
else:
rD[ param ] = record[iP]
data[ 'history' ].append( rD )
return data
@jsonify
def getInstanceStatusCounters( self ):
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getInstanceCounters()
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
return result
@jsonify
def getHistoryValues( self ):
try:
dbVars = [ str( f ) for f in simplejson.loads( request.params[ 'vars' ] ) ]
except:
dbVars = [ 'Load', 'Jobs', 'TransferredFiles' ]
try:
timespan = int( request.params[ 'timespan' ] )
except:
timespan = 86400
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getHistoryValues( 3600, {}, dbVars, timespan )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
svcData = result[ 'Value' ]
data = []
olderThan = Time.toEpoch() - 400
for record in svcData[ 'Records' ]:
rL = []
for iP in range( len( svcData[ 'ParameterNames' ] ) ):
param = svcData[ 'ParameterNames' ][iP]
if param == 'Update':
rL.append( Time.toEpoch( record[iP] ) )
else:
rL.append( record[iP] )
if rL[0] < olderThan:
data.append( rL )
return S_OK( { 'data': data, 'fields' : svcData[ 'ParameterNames' ] } )
@jsonify
def getRunningInstancesHistory( self ):
try:
bucketSize = int( request.params[ 'bucketSize' ] )
except:
bucketSize = 900
try:
timespan = int( request.params[ 'timespan' ] )
except:
timespan = 86400
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getRunningInstancesHistory( timespan, bucketSize )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
svcData = result[ 'Value' ]
data = []
olderThan = Time.toEpoch() - 400
rL = []
for record in svcData:
eTime = Time.toEpoch( record[0] )
if eTime < olderThan:
rL = [ eTime, int( record[1] ) ]
data.append( rL )
return S_OK( data )
@jsonify
def getRunningInstancesBEPHistory( self ):
try:
bucketSize = int( request.params[ 'bucketSize' ] )
except:
bucketSize = 900
try:
timespan = int( request.params[ 'timespan' ] )
except:
timespan = 86400
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getRunningInstancesBEPHistory( timespan, bucketSize )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
svcData = result[ 'Value' ]
data = []
olderThan = Time.toEpoch() - 400
for record in svcData:
eTime = Time.toEpoch( record[0] )
if eTime < olderThan:
rL = [ eTime, record[1], int( record[2] ) ]
data.append( rL )
return S_OK( data )
@jsonify
def checkVmWebOperation( self ):
try:
operation = str( request.params[ 'operation' ] )
except Exception, e:
print e
return S_ERROR( "Oops! Couldn't understand the request" )
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.checkVmWebOperation( operation )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
data = result[ 'Value' ]
return S_OK( data )
@jsonify
def declareInstancesStopping( self ):
try:
webIds = simplejson.loads( request.params[ 'idList' ] )
except Exception, e:
print e
return S_ERROR( "Oops! Couldn't understand the request" )
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.declareInstancesStopping( webIds )
return result
@jsonify
def getRunningInstancesByRunningPodHistory( self ):
try:
bucketSize = int( request.params[ 'bucketSize' ] )
except:
bucketSize = 900
try:
timespan = int( request.params[ 'timespan' ] )
except:
timespan = 86400
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getRunningInstancesByRunningPodHistory( timespan, bucketSize )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
svcData = result[ 'Value' ]
data = []
olderThan = Time.toEpoch() - 400
for record in svcData:
eTime = Time.toEpoch( record[0] )
if eTime < olderThan:
rL = [ eTime, record[1], int( record[2] ) ]
data.append( rL )
return S_OK( data )
@jsonify
def getRunningInstancesByImageHistory( self ):
try:
bucketSize = int( request.params[ 'bucketSize' ] )
except:
bucketSize = 900
try:
timespan = int( request.params[ 'timespan' ] )
except:
timespan = 86400
rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
result = rpcClient.getRunningInstancesByImageHistory( timespan, bucketSize )
if not result[ 'OK' ]:
return S_ERROR( result[ 'Message' ] )
svcData = result[ 'Value' ]
data = []
olderThan = Time.toEpoch() - 400
for record in svcData:
eTime = Time.toEpoch( record[0] )
if eTime < olderThan:
rL = [ eTime, record[1], int( record[2] ) ]
data.append( rL )
return S_OK( data )
| myco/VMDIRAC | Web/controllers/systems/virtualmachines.py | Python | gpl-3.0 | 8,504 | [
"DIRAC"
] | 2a1cf9326cfc68916a1e67235867cd514d481fc5e48cf36f069532c81017516b |
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Train a small multi-layer perceptron with fully connected layers on MNIST data.
This example has some command line arguments that enable different neon features.
Examples:
python examples/mnist_mlp.py -b gpu -e 10
Run the example for 10 epochs using the NervanaGPU backend
python examples/mnist_mlp.py --eval_freq 1
After each training epoch, process the validation/test data
set through the model and display the cost.
python examples/mnist_mlp.py --serialize 1 -s checkpoint.pkl
After every iteration of training, dump the model to a pickle
file named "checkpoint.pkl". Changing the serialize parameter
changes the frequency at which the model is saved.
python examples/mnist_mlp.py --model_file checkpoint.pkl
Before starting to train the model, set the model state to
the values stored in the checkpoint file named checkpoint.pkl.
"""
from neon.callbacks.callbacks import Callbacks
from neon.data import MNIST
from neon.initializers import Gaussian
from neon.layers import GeneralizedCost, Affine, Tree, Sequential
from neon.models import Model
from neon.optimizers import GradientDescentMomentum
from neon.transforms import Rectlin, Logistic, CrossEntropyBinary, Misclassification
from neon.util.argparser import NeonArgparser
from neon import logger as neon_logger
import deepstacks
from deepstacks.macros import *
from deepstacks.neon import curr_layer,curr_stacks,curr_flags,curr_model
# parse the command line arguments
parser = NeonArgparser(__doc__)
args = parser.parse_args()
# load up the mnist data set
dataset = MNIST(path=args.data_dir)
train_set = dataset.train_iter
valid_set = dataset.valid_iter
# setup weight initialization function
init_norm = Gaussian(loc=0.0, scale=0.01)
# setup model layers
#layers = [Affine(nout=100, init=init_norm, activation=Rectlin()),
# Affine(nout=10, init=init_norm, activation=Logistic(shortcut=True))]
import neon
l_in = deepstacks.neon.InputLayer((None,)+train_set.shape,'image')
network,stacks,paramlayers,errors,watchpoints=deepstacks.neon.build_network(l_in,(
(0,100,0,0,0,0,{'dense','nobias'}),
(0,10,0,0,0,0,{'dense':True,'nonlinearity':Logistic(shortcut=True),'nobias':True}),
))
# setup cost function as CrossEntropy
cost = GeneralizedCost(costfunc=CrossEntropyBinary())
cost,extra_layers,tagslice = deepstacks.neon.get_loss(errors,watchpoints,cost)
network = Tree([network]+extra_layers)
deepstacks.neon.utils.walk(network)
inputs = deepstacks.neon.get_inputs(network)
assert tuple(inputs)==('image',)
#print network.get_description()
layers = network
# setup optimizer
optimizer = GradientDescentMomentum(
0.1, momentum_coef=0.9, stochastic_round=args.rounding)
# initialize model object
mlp = Model(layers=layers)
# configure callbacks
callbacks = Callbacks(mlp, eval_set=valid_set, **args.callback_args)
# run fit
mlp.fit(train_set, optimizer=optimizer,
num_epochs=args.epochs, cost=cost, callbacks=callbacks)
error_rate = mlp.eval(valid_set, metric=Misclassification())
neon_logger.display('Misclassification error = %.1f%%' % (error_rate * 100))
| guoxuesong/deepstacks | examples/neon/mnist_mlp.py | Python | mit | 3,945 | [
"Gaussian"
] | edf3a391a6b8ab884a9a3b7c2beb2d2b458ae5ea58685bfa1548da3685b04a62 |
import vtk
from glob import glob
import os
file_path = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa2/ensight"
out_dir = "/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa2/vtk_out"
file_pattern = "case1_dsa-5-?.????.dat.encas"
filelist = sorted(glob(os.path.join(file_path, file_pattern)))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
append = vtk.vtkAppendFilter()
append.MergePointsOn()
reader = vtk.vtkEnSightGoldBinaryReader()
writer = vtk.vtkXMLPUnstructuredGridWriter()
writer.SetFileName(os.path.join(out_dir,'test_outfile.pvtu'))
writer.SetNumberOfTimeSteps(len(filelist))
#writer.SetTimeStepRange(0,len(filelist)-1)
writer.SetInputConnection(append.GetOutputPort())
writer.Start()
for file_p in filelist:
path, file_name = os.path.split(file_p)
split_name = file_name.split('-')
split_ext = split_name[-1].split('.')
time = float('.'.join(split_ext[0:2]))
print("file time: {0:.4f}".format(time))
print(file_name)
reader.SetFilePath(file_path)
reader.SetCaseFileName(file_name)
reader.Update()
#N = reader.GetNumberOfCellArrays()
N = reader.GetNumberOfVariables()
for i in range(0, N):
append.AddInputData(reader.GetOutput().GetBlock(i))
writer.WriteNextTime(time)
for i in range(0, N):
append.RemoveInputData(reader.GetOutput().GetBlock(i))
writer.Stop()
"""
reader = vtk.vtkEnSightGoldBinaryReader()
reader.SetFilePath("/raid/home/ksansom/caseFiles/mri/VWI_proj/case1/fluent_dsa2/ensight")
reader.SetCaseFileName("case1_dsa-5-6.0000.dat.encas")
reader.Update()
#N = reader.GetNumberOfCellArrays()
N = reader.GetNumberOfVariables()
append = vtk.vtkAppendFilter()
append.MergePointsOn()
for i in range(0, N):
append.AddInputData(reader.GetOutput().GetBlock(i))
append.Update()
umesh = vtk.vtkUnstructuredGrid()
umesh = append.GetOutput()
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetFileName("test.vtu")
writer.SetInputData(umesh)
writer.Update()
"""
| kayarre/Tools | vtk/ensight2vtk_parallel.py | Python | bsd-2-clause | 1,995 | [
"VTK"
] | c51872c8afaeae0ca17e224a8647f5b69fa2b8a871c4569958d88a92a77cc015 |
# -*- coding: utf-8 -*-
"""Main IPython class."""
#-----------------------------------------------------------------------------
# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de>
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import abc
import ast
import atexit
import builtins as builtin_mod
import functools
import os
import re
import runpy
import sys
import tempfile
import traceback
import types
import subprocess
import warnings
from io import open as io_open
from pickleshare import PickleShareDB
from traitlets.config.configurable import SingletonConfigurable
from IPython.core import oinspect
from IPython.core import magic
from IPython.core import page
from IPython.core import prefilter
from IPython.core import ultratb
from IPython.core.alias import Alias, AliasManager
from IPython.core.autocall import ExitAutocall
from IPython.core.builtin_trap import BuiltinTrap
from IPython.core.events import EventManager, available_events
from IPython.core.compilerop import CachingCompiler, check_linecache_ipython
from IPython.core.debugger import Pdb
from IPython.core.display_trap import DisplayTrap
from IPython.core.displayhook import DisplayHook
from IPython.core.displaypub import DisplayPublisher
from IPython.core.error import InputRejected, UsageError
from IPython.core.extensions import ExtensionManager
from IPython.core.formatters import DisplayFormatter
from IPython.core.history import HistoryManager
from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
from IPython.core.logger import Logger
from IPython.core.macro import Macro
from IPython.core.payload import PayloadManager
from IPython.core.prefilter import PrefilterManager
from IPython.core.profiledir import ProfileDir
from IPython.core.usage import default_banner
from IPython.display import display
from IPython.testing.skipdoctest import skip_doctest
from IPython.utils import PyColorize
from IPython.utils import io
from IPython.utils import py3compat
from IPython.utils import openpy
from IPython.utils.decorators import undoc
from IPython.utils.io import ask_yes_no
from IPython.utils.ipstruct import Struct
from IPython.paths import get_ipython_dir
from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists
from IPython.utils.process import system, getoutput
from IPython.utils.strdispatch import StrDispatch
from IPython.utils.syspathcontext import prepended_to_syspath
from IPython.utils.text import format_screen, LSString, SList, DollarFormatter
from IPython.utils.tempdir import TemporaryDirectory
from traitlets import (
Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type,
observe, default,
)
from warnings import warn
from logging import error
import IPython.core.hooks
# NoOpContext is deprecated, but ipykernel imports it from here.
# See https://github.com/ipython/ipykernel/issues/157
from IPython.utils.contexts import NoOpContext
try:
import docrepr.sphinxify as sphx
def sphinxify(doc):
with TemporaryDirectory() as dirname:
return {
'text/html': sphx.sphinxify(doc, dirname),
'text/plain': doc
}
except ImportError:
sphinxify = None
class ProvisionalWarning(DeprecationWarning):
"""
Warning class for unstable features
"""
pass
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
# compiled regexps for autoindent management
dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass')
#-----------------------------------------------------------------------------
# Utilities
#-----------------------------------------------------------------------------
@undoc
def softspace(file, newvalue):
"""Copied from code.py, to remove the dependency"""
oldvalue = 0
try:
oldvalue = file.softspace
except AttributeError:
pass
try:
file.softspace = newvalue
except (AttributeError, TypeError):
# "attribute-less object" or "read-only attributes"
pass
return oldvalue
@undoc
def no_op(*a, **kw):
pass
class SpaceInInput(Exception): pass
def get_default_colors():
"DEPRECATED"
warn('get_default_color is deprecated since IPython 5.0, and returns `Neutral` on all platforms.',
DeprecationWarning, stacklevel=2)
return 'Neutral'
class SeparateUnicode(Unicode):
r"""A Unicode subclass to validate separate_in, separate_out, etc.
This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``.
"""
def validate(self, obj, value):
if value == '0': value = ''
value = value.replace('\\n','\n')
return super(SeparateUnicode, self).validate(obj, value)
@undoc
class DummyMod(object):
"""A dummy module used for IPython's interactive module when
a namespace must be assigned to the module's __dict__."""
pass
class ExecutionResult(object):
"""The result of a call to :meth:`InteractiveShell.run_cell`
Stores information about what took place.
"""
execution_count = None
error_before_exec = None
error_in_exec = None
result = None
@property
def success(self):
return (self.error_before_exec is None) and (self.error_in_exec is None)
def raise_error(self):
"""Reraises error if `success` is `False`, otherwise does nothing"""
if self.error_before_exec is not None:
raise self.error_before_exec
if self.error_in_exec is not None:
raise self.error_in_exec
def __repr__(self):
name = self.__class__.__qualname__
return '<%s object at %x, execution_count=%s error_before_exec=%s error_in_exec=%s result=%s>' %\
(name, id(self), self.execution_count, self.error_before_exec, self.error_in_exec, repr(self.result))
class InteractiveShell(SingletonConfigurable):
"""An enhanced, interactive shell for Python."""
_instance = None
ast_transformers = List([], help=
"""
A list of ast.NodeTransformer subclass instances, which will be applied
to user input before code is run.
"""
).tag(config=True)
autocall = Enum((0,1,2), default_value=0, help=
"""
Make IPython automatically call any callable object even if you didn't
type explicit parentheses. For example, 'str 43' becomes 'str(43)'
automatically. The value can be '0' to disable the feature, '1' for
'smart' autocall, where it is not applied if there are no more
arguments on the line, and '2' for 'full' autocall, where all callable
objects are automatically called (even if no arguments are present).
"""
).tag(config=True)
# TODO: remove all autoindent logic and put into frontends.
# We can't do this yet because even runlines uses the autoindent.
autoindent = Bool(True, help=
"""
Autoindent IPython code entered interactively.
"""
).tag(config=True)
automagic = Bool(True, help=
"""
Enable magic commands to be called without the leading %.
"""
).tag(config=True)
banner1 = Unicode(default_banner,
help="""The part of the banner to be printed before the profile"""
).tag(config=True)
banner2 = Unicode('',
help="""The part of the banner to be printed after the profile"""
).tag(config=True)
cache_size = Integer(1000, help=
"""
Set the size of the output cache. The default is 1000, you can
change it permanently in your config file. Setting it to 0 completely
disables the caching system, and the minimum value accepted is 3 (if
you provide a value less than 3, it is reset to 0 and a warning is
issued). This limit is defined because otherwise you'll spend more
time re-flushing a too small cache than working
"""
).tag(config=True)
color_info = Bool(True, help=
"""
Use colors for displaying information about objects. Because this
information is passed through a pager (like 'less'), and some pagers
get confused with color codes, this capability can be turned off.
"""
).tag(config=True)
colors = CaselessStrEnum(('Neutral', 'NoColor','LightBG','Linux'),
default_value='Neutral',
help="Set the color scheme (NoColor, Neutral, Linux, or LightBG)."
).tag(config=True)
debug = Bool(False).tag(config=True)
disable_failing_post_execute = Bool(False,
help="Don't call post-execute functions that have failed in the past."
).tag(config=True)
display_formatter = Instance(DisplayFormatter, allow_none=True)
displayhook_class = Type(DisplayHook)
display_pub_class = Type(DisplayPublisher)
sphinxify_docstring = Bool(False, help=
"""
Enables rich html representation of docstrings. (This requires the
docrepr module).
""").tag(config=True)
@observe("sphinxify_docstring")
def _sphinxify_docstring_changed(self, change):
if change['new']:
warn("`sphinxify_docstring` is provisional since IPython 5.0 and might change in future versions." , ProvisionalWarning)
enable_html_pager = Bool(False, help=
"""
(Provisional API) enables html representation in mime bundles sent
to pagers.
""").tag(config=True)
@observe("enable_html_pager")
def _enable_html_pager_changed(self, change):
if change['new']:
warn("`enable_html_pager` is provisional since IPython 5.0 and might change in future versions.", ProvisionalWarning)
data_pub_class = None
exit_now = Bool(False)
exiter = Instance(ExitAutocall)
@default('exiter')
def _exiter_default(self):
return ExitAutocall(self)
# Monotonically increasing execution counter
execution_count = Integer(1)
filename = Unicode("<ipython console>")
ipython_dir= Unicode('').tag(config=True) # Set to get_ipython_dir() in __init__
# Input splitter, to transform input line by line and detect when a block
# is ready to be executed.
input_splitter = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
(), {'line_input_checker': True})
# This InputSplitter instance is used to transform completed cells before
# running them. It allows cell magics to contain blank lines.
input_transformer_manager = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
(), {'line_input_checker': False})
logstart = Bool(False, help=
"""
Start logging to the default log file in overwrite mode.
Use `logappend` to specify a log file to **append** logs to.
"""
).tag(config=True)
logfile = Unicode('', help=
"""
The name of the logfile to use.
"""
).tag(config=True)
logappend = Unicode('', help=
"""
Start logging to the given file in append mode.
Use `logfile` to specify a log file to **overwrite** logs to.
"""
).tag(config=True)
object_info_string_level = Enum((0,1,2), default_value=0,
).tag(config=True)
pdb = Bool(False, help=
"""
Automatically call the pdb debugger after every exception.
"""
).tag(config=True)
display_page = Bool(False,
help="""If True, anything that would be passed to the pager
will be displayed as regular output instead."""
).tag(config=True)
# deprecated prompt traits:
prompt_in1 = Unicode('In [\\#]: ',
help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly."
).tag(config=True)
prompt_in2 = Unicode(' .\\D.: ',
help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly."
).tag(config=True)
prompt_out = Unicode('Out[\\#]: ',
help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly."
).tag(config=True)
prompts_pad_left = Bool(True,
help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly."
).tag(config=True)
@observe('prompt_in1', 'prompt_in2', 'prompt_out', 'prompt_pad_left')
def _prompt_trait_changed(self, change):
name = change['name']
warn("InteractiveShell.{name} is deprecated since IPython 4.0"
" and ignored since 5.0, set TerminalInteractiveShell.prompts"
" object directly.".format(name=name))
# protect against weird cases where self.config may not exist:
show_rewritten_input = Bool(True,
help="Show rewritten input, e.g. for autocall."
).tag(config=True)
quiet = Bool(False).tag(config=True)
history_length = Integer(10000,
help='Total length of command history'
).tag(config=True)
history_load_length = Integer(1000, help=
"""
The number of saved history entries to be loaded
into the history buffer at startup.
"""
).tag(config=True)
ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none'],
default_value='last_expr',
help="""
'all', 'last', 'last_expr' or 'none', specifying which nodes should be
run interactively (displaying output from expressions)."""
).tag(config=True)
# TODO: this part of prompt management should be moved to the frontends.
# Use custom TraitTypes that convert '0'->'' and '\\n'->'\n'
separate_in = SeparateUnicode('\n').tag(config=True)
separate_out = SeparateUnicode('').tag(config=True)
separate_out2 = SeparateUnicode('').tag(config=True)
wildcards_case_sensitive = Bool(True).tag(config=True)
xmode = CaselessStrEnum(('Context','Plain', 'Verbose'),
default_value='Context',
help="Switch modes for the IPython exception handlers."
).tag(config=True)
# Subcomponents of InteractiveShell
alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True)
prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True)
display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True)
extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True)
payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True)
history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True)
magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True)
profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True)
@property
def profile(self):
if self.profile_dir is not None:
name = os.path.basename(self.profile_dir.location)
return name.replace('profile_','')
# Private interface
_post_execute = Dict()
# Tracks any GUI loop loaded for pylab
pylab_gui_select = None
last_execution_succeeded = Bool(True, help='Did last executed command succeeded')
def __init__(self, ipython_dir=None, profile_dir=None,
user_module=None, user_ns=None,
custom_exceptions=((), None), **kwargs):
# This is where traits with a config_key argument are updated
# from the values on config.
super(InteractiveShell, self).__init__(**kwargs)
if 'PromptManager' in self.config:
warn('As of IPython 5.0 `PromptManager` config will have no effect'
' and has been replaced by TerminalInteractiveShell.prompts_class')
self.configurables = [self]
# These are relatively independent and stateless
self.init_ipython_dir(ipython_dir)
self.init_profile_dir(profile_dir)
self.init_instance_attrs()
self.init_environment()
# Check if we're in a virtualenv, and set up sys.path.
self.init_virtualenv()
# Create namespaces (user_ns, user_global_ns, etc.)
self.init_create_namespaces(user_module, user_ns)
# This has to be done after init_create_namespaces because it uses
# something in self.user_ns, but before init_sys_modules, which
# is the first thing to modify sys.
# TODO: When we override sys.stdout and sys.stderr before this class
# is created, we are saving the overridden ones here. Not sure if this
# is what we want to do.
self.save_sys_module_state()
self.init_sys_modules()
# While we're trying to have each part of the code directly access what
# it needs without keeping redundant references to objects, we have too
# much legacy code that expects ip.db to exist.
self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db'))
self.init_history()
self.init_encoding()
self.init_prefilter()
self.init_syntax_highlighting()
self.init_hooks()
self.init_events()
self.init_pushd_popd_magic()
self.init_user_ns()
self.init_logger()
self.init_builtins()
# The following was in post_config_initialization
self.init_inspector()
self.raw_input_original = input
self.init_completer()
# TODO: init_io() needs to happen before init_traceback handlers
# because the traceback handlers hardcode the stdout/stderr streams.
# This logic in in debugger.Pdb and should eventually be changed.
self.init_io()
self.init_traceback_handlers(custom_exceptions)
self.init_prompts()
self.init_display_formatter()
self.init_display_pub()
self.init_data_pub()
self.init_displayhook()
self.init_magics()
self.init_alias()
self.init_logstart()
self.init_pdb()
self.init_extension_manager()
self.init_payload()
self.init_deprecation_warnings()
self.hooks.late_startup_hook()
self.events.trigger('shell_initialized', self)
atexit.register(self.atexit_operations)
def get_ipython(self):
"""Return the currently running IPython instance."""
return self
#-------------------------------------------------------------------------
# Trait changed handlers
#-------------------------------------------------------------------------
@observe('ipython_dir')
def _ipython_dir_changed(self, change):
ensure_dir_exists(change['new'])
def set_autoindent(self,value=None):
"""Set the autoindent flag.
If called with no arguments, it acts as a toggle."""
if value is None:
self.autoindent = not self.autoindent
else:
self.autoindent = value
#-------------------------------------------------------------------------
# init_* methods called by __init__
#-------------------------------------------------------------------------
def init_ipython_dir(self, ipython_dir):
if ipython_dir is not None:
self.ipython_dir = ipython_dir
return
self.ipython_dir = get_ipython_dir()
def init_profile_dir(self, profile_dir):
if profile_dir is not None:
self.profile_dir = profile_dir
return
self.profile_dir =\
ProfileDir.create_profile_dir_by_name(self.ipython_dir, 'default')
def init_instance_attrs(self):
self.more = False
# command compiler
self.compile = CachingCompiler()
# Make an empty namespace, which extension writers can rely on both
# existing and NEVER being used by ipython itself. This gives them a
# convenient location for storing additional information and state
# their extensions may require, without fear of collisions with other
# ipython names that may develop later.
self.meta = Struct()
# Temporary files used for various purposes. Deleted at exit.
self.tempfiles = []
self.tempdirs = []
# keep track of where we started running (mainly for crash post-mortem)
# This is not being used anywhere currently.
self.starting_dir = os.getcwd()
# Indentation management
self.indent_current_nsp = 0
# Dict to track post-execution functions that have been registered
self._post_execute = {}
def init_environment(self):
"""Any changes we need to make to the user's environment."""
pass
def init_encoding(self):
# Get system encoding at startup time. Certain terminals (like Emacs
# under Win32 have it set to None, and we need to have a known valid
# encoding to use in the raw_input() method
try:
self.stdin_encoding = sys.stdin.encoding or 'ascii'
except AttributeError:
self.stdin_encoding = 'ascii'
@observe('colors')
def init_syntax_highlighting(self, changes=None):
# Python source parser/formatter for syntax highlighting
pyformat = PyColorize.Parser(style=self.colors, parent=self).format
self.pycolorize = lambda src: pyformat(src,'str')
def refresh_style(self):
# No-op here, used in subclass
pass
def init_pushd_popd_magic(self):
# for pushd/popd management
self.home_dir = get_home_dir()
self.dir_stack = []
def init_logger(self):
self.logger = Logger(self.home_dir, logfname='ipython_log.py',
logmode='rotate')
def init_logstart(self):
"""Initialize logging in case it was requested at the command line.
"""
if self.logappend:
self.magic('logstart %s append' % self.logappend)
elif self.logfile:
self.magic('logstart %s' % self.logfile)
elif self.logstart:
self.magic('logstart')
def init_deprecation_warnings(self):
"""
register default filter for deprecation warning.
This will allow deprecation warning of function used interactively to show
warning to users, and still hide deprecation warning from libraries import.
"""
warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__"))
def init_builtins(self):
# A single, static flag that we set to True. Its presence indicates
# that an IPython shell has been created, and we make no attempts at
# removing on exit or representing the existence of more than one
# IPython at a time.
builtin_mod.__dict__['__IPYTHON__'] = True
builtin_mod.__dict__['display'] = display
self.builtin_trap = BuiltinTrap(shell=self)
def init_inspector(self):
# Object inspector
self.inspector = oinspect.Inspector(oinspect.InspectColors,
PyColorize.ANSICodeColors,
self.colors,
self.object_info_string_level)
def init_io(self):
# This will just use sys.stdout and sys.stderr. If you want to
# override sys.stdout and sys.stderr themselves, you need to do that
# *before* instantiating this class, because io holds onto
# references to the underlying streams.
# io.std* are deprecated, but don't show our own deprecation warnings
# during initialization of the deprecated API.
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
io.stdout = io.IOStream(sys.stdout)
io.stderr = io.IOStream(sys.stderr)
def init_prompts(self):
# Set system prompts, so that scripts can decide if they are running
# interactively.
sys.ps1 = 'In : '
sys.ps2 = '...: '
sys.ps3 = 'Out: '
def init_display_formatter(self):
self.display_formatter = DisplayFormatter(parent=self)
self.configurables.append(self.display_formatter)
def init_display_pub(self):
self.display_pub = self.display_pub_class(parent=self)
self.configurables.append(self.display_pub)
def init_data_pub(self):
if not self.data_pub_class:
self.data_pub = None
return
self.data_pub = self.data_pub_class(parent=self)
self.configurables.append(self.data_pub)
def init_displayhook(self):
# Initialize displayhook, set in/out prompts and printing system
self.displayhook = self.displayhook_class(
parent=self,
shell=self,
cache_size=self.cache_size,
)
self.configurables.append(self.displayhook)
# This is a context manager that installs/revmoes the displayhook at
# the appropriate time.
self.display_trap = DisplayTrap(hook=self.displayhook)
def init_virtualenv(self):
"""Add a virtualenv to sys.path so the user can import modules from it.
This isn't perfect: it doesn't use the Python interpreter with which the
virtualenv was built, and it ignores the --no-site-packages option. A
warning will appear suggesting the user installs IPython in the
virtualenv, but for many cases, it probably works well enough.
Adapted from code snippets online.
http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
"""
if 'VIRTUAL_ENV' not in os.environ:
# Not in a virtualenv
return
# venv detection:
# stdlib venv may symlink sys.executable, so we can't use realpath.
# but others can symlink *to* the venv Python, so we can't just use sys.executable.
# So we just check every item in the symlink tree (generally <= 3)
p = os.path.normcase(sys.executable)
paths = [p]
while os.path.islink(p):
p = os.path.normcase(os.path.join(os.path.dirname(p), os.readlink(p)))
paths.append(p)
p_venv = os.path.normcase(os.environ['VIRTUAL_ENV'])
# In Cygwin paths like "c:\..." and '\cygdrive\c\...' are possible
if p_venv.startswith('\\cygdrive'):
p_venv = p_venv[11:]
elif p_venv[1] == ':':
p_venv = p_venv[2:]
if any(p_venv in p for p in paths):
# Running properly in the virtualenv, don't need to do anything
return
warn("Attempting to work in a virtualenv. If you encounter problems, please "
"install IPython inside the virtualenv.")
if sys.platform == "win32":
virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'Lib', 'site-packages')
else:
virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'lib',
'python%d.%d' % sys.version_info[:2], 'site-packages')
import site
sys.path.insert(0, virtual_env)
site.addsitedir(virtual_env)
#-------------------------------------------------------------------------
# Things related to injections into the sys module
#-------------------------------------------------------------------------
def save_sys_module_state(self):
"""Save the state of hooks in the sys module.
This has to be called after self.user_module is created.
"""
self._orig_sys_module_state = {'stdin': sys.stdin,
'stdout': sys.stdout,
'stderr': sys.stderr,
'excepthook': sys.excepthook}
self._orig_sys_modules_main_name = self.user_module.__name__
self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
def restore_sys_module_state(self):
"""Restore the state of the sys module."""
try:
for k, v in self._orig_sys_module_state.items():
setattr(sys, k, v)
except AttributeError:
pass
# Reset what what done in self.init_sys_modules
if self._orig_sys_modules_main_mod is not None:
sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
#-------------------------------------------------------------------------
# Things related to the banner
#-------------------------------------------------------------------------
@property
def banner(self):
banner = self.banner1
if self.profile and self.profile != 'default':
banner += '\nIPython profile: %s\n' % self.profile
if self.banner2:
banner += '\n' + self.banner2
return banner
def show_banner(self, banner=None):
if banner is None:
banner = self.banner
sys.stdout.write(banner)
#-------------------------------------------------------------------------
# Things related to hooks
#-------------------------------------------------------------------------
def init_hooks(self):
# hooks holds pointers used for user-side customizations
self.hooks = Struct()
self.strdispatchers = {}
# Set all default hooks, defined in the IPython.hooks module.
hooks = IPython.core.hooks
for hook_name in hooks.__all__:
# default hooks have priority 100, i.e. low; user hooks should have
# 0-100 priority
self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False)
if self.display_page:
self.set_hook('show_in_pager', page.as_hook(page.display_page), 90)
def set_hook(self,name,hook, priority=50, str_key=None, re_key=None,
_warn_deprecated=True):
"""set_hook(name,hook) -> sets an internal IPython hook.
IPython exposes some of its internal API as user-modifiable hooks. By
adding your function to one of these hooks, you can modify IPython's
behavior to call at runtime your own routines."""
# At some point in the future, this should validate the hook before it
# accepts it. Probably at least check that the hook takes the number
# of args it's supposed to.
f = types.MethodType(hook,self)
# check if the hook is for strdispatcher first
if str_key is not None:
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_s(str_key, f, priority )
self.strdispatchers[name] = sdp
return
if re_key is not None:
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_re(re.compile(re_key), f, priority )
self.strdispatchers[name] = sdp
return
dp = getattr(self.hooks, name, None)
if name not in IPython.core.hooks.__all__:
print("Warning! Hook '%s' is not one of %s" % \
(name, IPython.core.hooks.__all__ ))
if _warn_deprecated and (name in IPython.core.hooks.deprecated):
alternative = IPython.core.hooks.deprecated[name]
warn("Hook {} is deprecated. Use {} instead.".format(name, alternative), stacklevel=2)
if not dp:
dp = IPython.core.hooks.CommandChainDispatcher()
try:
dp.add(f,priority)
except AttributeError:
# it was not commandchain, plain old func - replace
dp = f
setattr(self.hooks,name, dp)
#-------------------------------------------------------------------------
# Things related to events
#-------------------------------------------------------------------------
def init_events(self):
self.events = EventManager(self, available_events)
self.events.register("pre_execute", self._clear_warning_registry)
def register_post_execute(self, func):
"""DEPRECATED: Use ip.events.register('post_run_cell', func)
Register a function for calling after code execution.
"""
warn("ip.register_post_execute is deprecated, use "
"ip.events.register('post_run_cell', func) instead.", stacklevel=2)
self.events.register('post_run_cell', func)
def _clear_warning_registry(self):
# clear the warning registry, so that different code blocks with
# overlapping line number ranges don't cause spurious suppression of
# warnings (see gh-6611 for details)
if "__warningregistry__" in self.user_global_ns:
del self.user_global_ns["__warningregistry__"]
#-------------------------------------------------------------------------
# Things related to the "main" module
#-------------------------------------------------------------------------
def new_main_mod(self, filename, modname):
"""Return a new 'main' module object for user code execution.
``filename`` should be the path of the script which will be run in the
module. Requests with the same filename will get the same module, with
its namespace cleared.
``modname`` should be the module name - normally either '__main__' or
the basename of the file without the extension.
When scripts are executed via %run, we must keep a reference to their
__main__ module around so that Python doesn't
clear it, rendering references to module globals useless.
This method keeps said reference in a private dict, keyed by the
absolute path of the script. This way, for multiple executions of the
same script we only keep one copy of the namespace (the last one),
thus preventing memory leaks from old references while allowing the
objects from the last execution to be accessible.
"""
filename = os.path.abspath(filename)
try:
main_mod = self._main_mod_cache[filename]
except KeyError:
main_mod = self._main_mod_cache[filename] = types.ModuleType(
modname,
doc="Module created for script run in IPython")
else:
main_mod.__dict__.clear()
main_mod.__name__ = modname
main_mod.__file__ = filename
# It seems pydoc (and perhaps others) needs any module instance to
# implement a __nonzero__ method
main_mod.__nonzero__ = lambda : True
return main_mod
def clear_main_mod_cache(self):
"""Clear the cache of main modules.
Mainly for use by utilities like %reset.
Examples
--------
In [15]: import IPython
In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')
In [17]: len(_ip._main_mod_cache) > 0
Out[17]: True
In [18]: _ip.clear_main_mod_cache()
In [19]: len(_ip._main_mod_cache) == 0
Out[19]: True
"""
self._main_mod_cache.clear()
#-------------------------------------------------------------------------
# Things related to debugging
#-------------------------------------------------------------------------
def init_pdb(self):
# Set calling of pdb on exceptions
# self.call_pdb is a property
self.call_pdb = self.pdb
def _get_call_pdb(self):
return self._call_pdb
def _set_call_pdb(self,val):
if val not in (0,1,False,True):
raise ValueError('new call_pdb value must be boolean')
# store value in instance
self._call_pdb = val
# notify the actual exception handlers
self.InteractiveTB.call_pdb = val
call_pdb = property(_get_call_pdb,_set_call_pdb,None,
'Control auto-activation of pdb at exceptions')
def debugger(self,force=False):
"""Call the pdb debugger.
Keywords:
- force(False): by default, this routine checks the instance call_pdb
flag and does not actually invoke the debugger if the flag is false.
The 'force' option forces the debugger to activate even if the flag
is false.
"""
if not (force or self.call_pdb):
return
if not hasattr(sys,'last_traceback'):
error('No traceback has been produced, nothing to debug.')
return
self.InteractiveTB.debugger(force=True)
#-------------------------------------------------------------------------
# Things related to IPython's various namespaces
#-------------------------------------------------------------------------
default_user_namespaces = True
def init_create_namespaces(self, user_module=None, user_ns=None):
# Create the namespace where the user will operate. user_ns is
# normally the only one used, and it is passed to the exec calls as
# the locals argument. But we do carry a user_global_ns namespace
# given as the exec 'globals' argument, This is useful in embedding
# situations where the ipython shell opens in a context where the
# distinction between locals and globals is meaningful. For
# non-embedded contexts, it is just the same object as the user_ns dict.
# FIXME. For some strange reason, __builtins__ is showing up at user
# level as a dict instead of a module. This is a manual fix, but I
# should really track down where the problem is coming from. Alex
# Schmolck reported this problem first.
# A useful post by Alex Martelli on this topic:
# Re: inconsistent value from __builtins__
# Von: Alex Martelli <aleaxit@yahoo.com>
# Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends
# Gruppen: comp.lang.python
# Michael Hohn <hohn@hooknose.lbl.gov> wrote:
# > >>> print type(builtin_check.get_global_binding('__builtins__'))
# > <type 'dict'>
# > >>> print type(__builtins__)
# > <type 'module'>
# > Is this difference in return value intentional?
# Well, it's documented that '__builtins__' can be either a dictionary
# or a module, and it's been that way for a long time. Whether it's
# intentional (or sensible), I don't know. In any case, the idea is
# that if you need to access the built-in namespace directly, you
# should start with "import __builtin__" (note, no 's') which will
# definitely give you a module. Yeah, it's somewhat confusing:-(.
# These routines return a properly built module and dict as needed by
# the rest of the code, and can also be used by extension writers to
# generate properly initialized namespaces.
if (user_ns is not None) or (user_module is not None):
self.default_user_namespaces = False
self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns)
# A record of hidden variables we have added to the user namespace, so
# we can list later only variables defined in actual interactive use.
self.user_ns_hidden = {}
# Now that FakeModule produces a real module, we've run into a nasty
# problem: after script execution (via %run), the module where the user
# code ran is deleted. Now that this object is a true module (needed
# so doctest and other tools work correctly), the Python module
# teardown mechanism runs over it, and sets to None every variable
# present in that module. Top-level references to objects from the
# script survive, because the user_ns is updated with them. However,
# calling functions defined in the script that use other things from
# the script will fail, because the function's closure had references
# to the original objects, which are now all None. So we must protect
# these modules from deletion by keeping a cache.
#
# To avoid keeping stale modules around (we only need the one from the
# last run), we use a dict keyed with the full path to the script, so
# only the last version of the module is held in the cache. Note,
# however, that we must cache the module *namespace contents* (their
# __dict__). Because if we try to cache the actual modules, old ones
# (uncached) could be destroyed while still holding references (such as
# those held by GUI objects that tend to be long-lived)>
#
# The %reset command will flush this cache. See the cache_main_mod()
# and clear_main_mod_cache() methods for details on use.
# This is the cache used for 'main' namespaces
self._main_mod_cache = {}
# A table holding all the namespaces IPython deals with, so that
# introspection facilities can search easily.
self.ns_table = {'user_global':self.user_module.__dict__,
'user_local':self.user_ns,
'builtin':builtin_mod.__dict__
}
@property
def user_global_ns(self):
return self.user_module.__dict__
def prepare_user_module(self, user_module=None, user_ns=None):
"""Prepare the module and namespace in which user code will be run.
When IPython is started normally, both parameters are None: a new module
is created automatically, and its __dict__ used as the namespace.
If only user_module is provided, its __dict__ is used as the namespace.
If only user_ns is provided, a dummy module is created, and user_ns
becomes the global namespace. If both are provided (as they may be
when embedding), user_ns is the local namespace, and user_module
provides the global namespace.
Parameters
----------
user_module : module, optional
The current user module in which IPython is being run. If None,
a clean module will be created.
user_ns : dict, optional
A namespace in which to run interactive commands.
Returns
-------
A tuple of user_module and user_ns, each properly initialised.
"""
if user_module is None and user_ns is not None:
user_ns.setdefault("__name__", "__main__")
user_module = DummyMod()
user_module.__dict__ = user_ns
if user_module is None:
user_module = types.ModuleType("__main__",
doc="Automatically created module for IPython interactive environment")
# We must ensure that __builtin__ (without the final 's') is always
# available and pointing to the __builtin__ *module*. For more details:
# http://mail.python.org/pipermail/python-dev/2001-April/014068.html
user_module.__dict__.setdefault('__builtin__', builtin_mod)
user_module.__dict__.setdefault('__builtins__', builtin_mod)
if user_ns is None:
user_ns = user_module.__dict__
return user_module, user_ns
def init_sys_modules(self):
# We need to insert into sys.modules something that looks like a
# module but which accesses the IPython namespace, for shelve and
# pickle to work interactively. Normally they rely on getting
# everything out of __main__, but for embedding purposes each IPython
# instance has its own private namespace, so we can't go shoving
# everything into __main__.
# note, however, that we should only do this for non-embedded
# ipythons, which really mimic the __main__.__dict__ with their own
# namespace. Embedded instances, on the other hand, should not do
# this because they need to manage the user local/global namespaces
# only, but they live within a 'normal' __main__ (meaning, they
# shouldn't overtake the execution environment of the script they're
# embedded in).
# This is overridden in the InteractiveShellEmbed subclass to a no-op.
main_name = self.user_module.__name__
sys.modules[main_name] = self.user_module
def init_user_ns(self):
"""Initialize all user-visible namespaces to their minimum defaults.
Certain history lists are also initialized here, as they effectively
act as user namespaces.
Notes
-----
All data structures here are only filled in, they are NOT reset by this
method. If they were not empty before, data will simply be added to
therm.
"""
# This function works in two parts: first we put a few things in
# user_ns, and we sync that contents into user_ns_hidden so that these
# initial variables aren't shown by %who. After the sync, we add the
# rest of what we *do* want the user to see with %who even on a new
# session (probably nothing, so they really only see their own stuff)
# The user dict must *always* have a __builtin__ reference to the
# Python standard __builtin__ namespace, which must be imported.
# This is so that certain operations in prompt evaluation can be
# reliably executed with builtins. Note that we can NOT use
# __builtins__ (note the 's'), because that can either be a dict or a
# module, and can even mutate at runtime, depending on the context
# (Python makes no guarantees on it). In contrast, __builtin__ is
# always a module object, though it must be explicitly imported.
# For more details:
# http://mail.python.org/pipermail/python-dev/2001-April/014068.html
ns = {}
# make global variables for user access to the histories
ns['_ih'] = self.history_manager.input_hist_parsed
ns['_oh'] = self.history_manager.output_hist
ns['_dh'] = self.history_manager.dir_hist
# user aliases to input and output histories. These shouldn't show up
# in %who, as they can have very large reprs.
ns['In'] = self.history_manager.input_hist_parsed
ns['Out'] = self.history_manager.output_hist
# Store myself as the public api!!!
ns['get_ipython'] = self.get_ipython
ns['exit'] = self.exiter
ns['quit'] = self.exiter
# Sync what we've added so far to user_ns_hidden so these aren't seen
# by %who
self.user_ns_hidden.update(ns)
# Anything put into ns now would show up in %who. Think twice before
# putting anything here, as we really want %who to show the user their
# stuff, not our variables.
# Finally, update the real user's namespace
self.user_ns.update(ns)
@property
def all_ns_refs(self):
"""Get a list of references to all the namespace dictionaries in which
IPython might store a user-created object.
Note that this does not include the displayhook, which also caches
objects from the output."""
return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \
[m.__dict__ for m in self._main_mod_cache.values()]
def reset(self, new_session=True):
"""Clear all internal namespaces, and attempt to release references to
user objects.
If new_session is True, a new history session will be opened.
"""
# Clear histories
self.history_manager.reset(new_session)
# Reset counter used to index all histories
if new_session:
self.execution_count = 1
# Flush cached output items
if self.displayhook.do_full_cache:
self.displayhook.flush()
# The main execution namespaces must be cleared very carefully,
# skipping the deletion of the builtin-related keys, because doing so
# would cause errors in many object's __del__ methods.
if self.user_ns is not self.user_global_ns:
self.user_ns.clear()
ns = self.user_global_ns
drop_keys = set(ns.keys())
drop_keys.discard('__builtin__')
drop_keys.discard('__builtins__')
drop_keys.discard('__name__')
for k in drop_keys:
del ns[k]
self.user_ns_hidden.clear()
# Restore the user namespaces to minimal usability
self.init_user_ns()
# Restore the default and user aliases
self.alias_manager.clear_aliases()
self.alias_manager.init_aliases()
# Flush the private list of module references kept for script
# execution protection
self.clear_main_mod_cache()
def del_var(self, varname, by_name=False):
"""Delete a variable from the various namespaces, so that, as
far as possible, we're not keeping any hidden references to it.
Parameters
----------
varname : str
The name of the variable to delete.
by_name : bool
If True, delete variables with the given name in each
namespace. If False (default), find the variable in the user
namespace, and delete references to it.
"""
if varname in ('__builtin__', '__builtins__'):
raise ValueError("Refusing to delete %s" % varname)
ns_refs = self.all_ns_refs
if by_name: # Delete by name
for ns in ns_refs:
try:
del ns[varname]
except KeyError:
pass
else: # Delete by object
try:
obj = self.user_ns[varname]
except KeyError:
raise NameError("name '%s' is not defined" % varname)
# Also check in output history
ns_refs.append(self.history_manager.output_hist)
for ns in ns_refs:
to_delete = [n for n, o in ns.items() if o is obj]
for name in to_delete:
del ns[name]
# displayhook keeps extra references, but not in a dictionary
for name in ('_', '__', '___'):
if getattr(self.displayhook, name) is obj:
setattr(self.displayhook, name, None)
def reset_selective(self, regex=None):
"""Clear selective variables from internal namespaces based on a
specified regular expression.
Parameters
----------
regex : string or compiled pattern, optional
A regular expression pattern that will be used in searching
variable names in the users namespaces.
"""
if regex is not None:
try:
m = re.compile(regex)
except TypeError:
raise TypeError('regex must be a string or compiled pattern')
# Search for keys in each namespace that match the given regex
# If a match is found, delete the key/value pair.
for ns in self.all_ns_refs:
for var in ns:
if m.search(var):
del ns[var]
def push(self, variables, interactive=True):
"""Inject a group of variables into the IPython user namespace.
Parameters
----------
variables : dict, str or list/tuple of str
The variables to inject into the user's namespace. If a dict, a
simple update is done. If a str, the string is assumed to have
variable names separated by spaces. A list/tuple of str can also
be used to give the variable names. If just the variable names are
give (list/tuple/str) then the variable values looked up in the
callers frame.
interactive : bool
If True (default), the variables will be listed with the ``who``
magic.
"""
vdict = None
# We need a dict of name/value pairs to do namespace updates.
if isinstance(variables, dict):
vdict = variables
elif isinstance(variables, (str, list, tuple)):
if isinstance(variables, str):
vlist = variables.split()
else:
vlist = variables
vdict = {}
cf = sys._getframe(1)
for name in vlist:
try:
vdict[name] = eval(name, cf.f_globals, cf.f_locals)
except:
print('Could not get variable %s from %s' %
(name,cf.f_code.co_name))
else:
raise ValueError('variables must be a dict/str/list/tuple')
# Propagate variables to user namespace
self.user_ns.update(vdict)
# And configure interactive visibility
user_ns_hidden = self.user_ns_hidden
if interactive:
for name in vdict:
user_ns_hidden.pop(name, None)
else:
user_ns_hidden.update(vdict)
def drop_by_id(self, variables):
"""Remove a dict of variables from the user namespace, if they are the
same as the values in the dictionary.
This is intended for use by extensions: variables that they've added can
be taken back out if they are unloaded, without removing any that the
user has overwritten.
Parameters
----------
variables : dict
A dictionary mapping object names (as strings) to the objects.
"""
for name, obj in variables.items():
if name in self.user_ns and self.user_ns[name] is obj:
del self.user_ns[name]
self.user_ns_hidden.pop(name, None)
#-------------------------------------------------------------------------
# Things related to object introspection
#-------------------------------------------------------------------------
def _ofind(self, oname, namespaces=None):
"""Find an object in the available namespaces.
self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
Has special code to detect magic functions.
"""
oname = oname.strip()
if not oname.startswith(ESC_MAGIC) and \
not oname.startswith(ESC_MAGIC2) and \
not all(a.isidentifier() for a in oname.split(".")):
return {'found': False}
if namespaces is None:
# Namespaces to search in:
# Put them in a list. The order is important so that we
# find things in the same order that Python finds them.
namespaces = [ ('Interactive', self.user_ns),
('Interactive (global)', self.user_global_ns),
('Python builtin', builtin_mod.__dict__),
]
ismagic = False
isalias = False
found = False
ospace = None
parent = None
obj = None
# Look for the given name by splitting it in parts. If the head is
# found, then we look for all the remaining parts as members, and only
# declare success if we can find them all.
oname_parts = oname.split('.')
oname_head, oname_rest = oname_parts[0],oname_parts[1:]
for nsname,ns in namespaces:
try:
obj = ns[oname_head]
except KeyError:
continue
else:
for idx, part in enumerate(oname_rest):
try:
parent = obj
# The last part is looked up in a special way to avoid
# descriptor invocation as it may raise or have side
# effects.
if idx == len(oname_rest) - 1:
obj = self._getattr_property(obj, part)
else:
obj = getattr(obj, part)
except:
# Blanket except b/c some badly implemented objects
# allow __getattr__ to raise exceptions other than
# AttributeError, which then crashes IPython.
break
else:
# If we finish the for loop (no break), we got all members
found = True
ospace = nsname
break # namespace loop
# Try to see if it's magic
if not found:
obj = None
if oname.startswith(ESC_MAGIC2):
oname = oname.lstrip(ESC_MAGIC2)
obj = self.find_cell_magic(oname)
elif oname.startswith(ESC_MAGIC):
oname = oname.lstrip(ESC_MAGIC)
obj = self.find_line_magic(oname)
else:
# search without prefix, so run? will find %run?
obj = self.find_line_magic(oname)
if obj is None:
obj = self.find_cell_magic(oname)
if obj is not None:
found = True
ospace = 'IPython internal'
ismagic = True
isalias = isinstance(obj, Alias)
# Last try: special-case some literals like '', [], {}, etc:
if not found and oname_head in ["''",'""','[]','{}','()']:
obj = eval(oname_head)
found = True
ospace = 'Interactive'
return {
'obj':obj,
'found':found,
'parent':parent,
'ismagic':ismagic,
'isalias':isalias,
'namespace':ospace
}
@staticmethod
def _getattr_property(obj, attrname):
"""Property-aware getattr to use in object finding.
If attrname represents a property, return it unevaluated (in case it has
side effects or raises an error.
"""
if not isinstance(obj, type):
try:
# `getattr(type(obj), attrname)` is not guaranteed to return
# `obj`, but does so for property:
#
# property.__get__(self, None, cls) -> self
#
# The universal alternative is to traverse the mro manually
# searching for attrname in class dicts.
attr = getattr(type(obj), attrname)
except AttributeError:
pass
else:
# This relies on the fact that data descriptors (with both
# __get__ & __set__ magic methods) take precedence over
# instance-level attributes:
#
# class A(object):
# @property
# def foobar(self): return 123
# a = A()
# a.__dict__['foobar'] = 345
# a.foobar # == 123
#
# So, a property may be returned right away.
if isinstance(attr, property):
return attr
# Nothing helped, fall back.
return getattr(obj, attrname)
def _object_find(self, oname, namespaces=None):
"""Find an object and return a struct with info about it."""
return Struct(self._ofind(oname, namespaces))
def _inspect(self, meth, oname, namespaces=None, **kw):
"""Generic interface to the inspector system.
This function is meant to be called by pdef, pdoc & friends.
"""
info = self._object_find(oname, namespaces)
docformat = sphinxify if self.sphinxify_docstring else None
if info.found:
pmethod = getattr(self.inspector, meth)
# TODO: only apply format_screen to the plain/text repr of the mime
# bundle.
formatter = format_screen if info.ismagic else docformat
if meth == 'pdoc':
pmethod(info.obj, oname, formatter)
elif meth == 'pinfo':
pmethod(info.obj, oname, formatter, info,
enable_html_pager=self.enable_html_pager, **kw)
else:
pmethod(info.obj, oname)
else:
print('Object `%s` not found.' % oname)
return 'not found' # so callers can take other action
def object_inspect(self, oname, detail_level=0):
"""Get object info about oname"""
with self.builtin_trap:
info = self._object_find(oname)
if info.found:
return self.inspector.info(info.obj, oname, info=info,
detail_level=detail_level
)
else:
return oinspect.object_info(name=oname, found=False)
def object_inspect_text(self, oname, detail_level=0):
"""Get object info as formatted text"""
return self.object_inspect_mime(oname, detail_level)['text/plain']
def object_inspect_mime(self, oname, detail_level=0):
"""Get object info as a mimebundle of formatted representations.
A mimebundle is a dictionary, keyed by mime-type.
It must always have the key `'text/plain'`.
"""
with self.builtin_trap:
info = self._object_find(oname)
if info.found:
return self.inspector._get_info(info.obj, oname, info=info,
detail_level=detail_level
)
else:
raise KeyError(oname)
#-------------------------------------------------------------------------
# Things related to history management
#-------------------------------------------------------------------------
def init_history(self):
"""Sets up the command history, and starts regular autosaves."""
self.history_manager = HistoryManager(shell=self, parent=self)
self.configurables.append(self.history_manager)
#-------------------------------------------------------------------------
# Things related to exception handling and tracebacks (not debugging)
#-------------------------------------------------------------------------
debugger_cls = Pdb
def init_traceback_handlers(self, custom_exceptions):
# Syntax error handler.
self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor', parent=self)
# The interactive one is initialized with an offset, meaning we always
# want to remove the topmost item in the traceback, which is our own
# internal code. Valid modes: ['Plain','Context','Verbose']
self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain',
color_scheme='NoColor',
tb_offset = 1,
check_cache=check_linecache_ipython,
debugger_cls=self.debugger_cls, parent=self)
# The instance will store a pointer to the system-wide exception hook,
# so that runtime code (such as magics) can access it. This is because
# during the read-eval loop, it may get temporarily overwritten.
self.sys_excepthook = sys.excepthook
# and add any custom exception handlers the user may have specified
self.set_custom_exc(*custom_exceptions)
# Set the exception mode
self.InteractiveTB.set_mode(mode=self.xmode)
def set_custom_exc(self, exc_tuple, handler):
"""set_custom_exc(exc_tuple, handler)
Set a custom exception handler, which will be called if any of the
exceptions in exc_tuple occur in the mainloop (specifically, in the
run_code() method).
Parameters
----------
exc_tuple : tuple of exception classes
A *tuple* of exception classes, for which to call the defined
handler. It is very important that you use a tuple, and NOT A
LIST here, because of the way Python's except statement works. If
you only want to trap a single exception, use a singleton tuple::
exc_tuple == (MyCustomException,)
handler : callable
handler must have the following signature::
def my_handler(self, etype, value, tb, tb_offset=None):
...
return structured_traceback
Your handler must return a structured traceback (a list of strings),
or None.
This will be made into an instance method (via types.MethodType)
of IPython itself, and it will be called if any of the exceptions
listed in the exc_tuple are caught. If the handler is None, an
internal basic one is used, which just prints basic info.
To protect IPython from crashes, if your handler ever raises an
exception or returns an invalid result, it will be immediately
disabled.
WARNING: by putting in your own exception handler into IPython's main
execution loop, you run a very good chance of nasty crashes. This
facility should only be used if you really know what you are doing."""
if not isinstance(exc_tuple, tuple):
raise TypeError("The custom exceptions must be given as a tuple.")
def dummy_handler(self, etype, value, tb, tb_offset=None):
print('*** Simple custom exception handler ***')
print('Exception type :', etype)
print('Exception value:', value)
print('Traceback :', tb)
def validate_stb(stb):
"""validate structured traceback return type
return type of CustomTB *should* be a list of strings, but allow
single strings or None, which are harmless.
This function will *always* return a list of strings,
and will raise a TypeError if stb is inappropriate.
"""
msg = "CustomTB must return list of strings, not %r" % stb
if stb is None:
return []
elif isinstance(stb, str):
return [stb]
elif not isinstance(stb, list):
raise TypeError(msg)
# it's a list
for line in stb:
# check every element
if not isinstance(line, str):
raise TypeError(msg)
return stb
if handler is None:
wrapped = dummy_handler
else:
def wrapped(self,etype,value,tb,tb_offset=None):
"""wrap CustomTB handler, to protect IPython from user code
This makes it harder (but not impossible) for custom exception
handlers to crash IPython.
"""
try:
stb = handler(self,etype,value,tb,tb_offset=tb_offset)
return validate_stb(stb)
except:
# clear custom handler immediately
self.set_custom_exc((), None)
print("Custom TB Handler failed, unregistering", file=sys.stderr)
# show the exception in handler first
stb = self.InteractiveTB.structured_traceback(*sys.exc_info())
print(self.InteractiveTB.stb2text(stb))
print("The original exception:")
stb = self.InteractiveTB.structured_traceback(
(etype,value,tb), tb_offset=tb_offset
)
return stb
self.CustomTB = types.MethodType(wrapped,self)
self.custom_exceptions = exc_tuple
def excepthook(self, etype, value, tb):
"""One more defense for GUI apps that call sys.excepthook.
GUI frameworks like wxPython trap exceptions and call
sys.excepthook themselves. I guess this is a feature that
enables them to keep running after exceptions that would
otherwise kill their mainloop. This is a bother for IPython
which excepts to catch all of the program exceptions with a try:
except: statement.
Normally, IPython sets sys.excepthook to a CrashHandler instance, so if
any app directly invokes sys.excepthook, it will look to the user like
IPython crashed. In order to work around this, we can disable the
CrashHandler and replace it with this excepthook instead, which prints a
regular traceback using our InteractiveTB. In this fashion, apps which
call sys.excepthook will generate a regular-looking exception from
IPython, and the CrashHandler will only be triggered by real IPython
crashes.
This hook should be used sparingly, only in places which are not likely
to be true IPython errors.
"""
self.showtraceback((etype, value, tb), tb_offset=0)
def _get_exc_info(self, exc_tuple=None):
"""get exc_info from a given tuple, sys.exc_info() or sys.last_type etc.
Ensures sys.last_type,value,traceback hold the exc_info we found,
from whichever source.
raises ValueError if none of these contain any information
"""
if exc_tuple is None:
etype, value, tb = sys.exc_info()
else:
etype, value, tb = exc_tuple
if etype is None:
if hasattr(sys, 'last_type'):
etype, value, tb = sys.last_type, sys.last_value, \
sys.last_traceback
if etype is None:
raise ValueError("No exception to find")
# Now store the exception info in sys.last_type etc.
# WARNING: these variables are somewhat deprecated and not
# necessarily safe to use in a threaded environment, but tools
# like pdb depend on their existence, so let's set them. If we
# find problems in the field, we'll need to revisit their use.
sys.last_type = etype
sys.last_value = value
sys.last_traceback = tb
return etype, value, tb
def show_usage_error(self, exc):
"""Show a short message for UsageErrors
These are special exceptions that shouldn't show a traceback.
"""
print("UsageError: %s" % exc, file=sys.stderr)
def get_exception_only(self, exc_tuple=None):
"""
Return as a string (ending with a newline) the exception that
just occurred, without any traceback.
"""
etype, value, tb = self._get_exc_info(exc_tuple)
msg = traceback.format_exception_only(etype, value)
return ''.join(msg)
def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None,
exception_only=False, running_compiled_code=False):
"""Display the exception that just occurred.
If nothing is known about the exception, this is the method which
should be used throughout the code for presenting user tracebacks,
rather than directly invoking the InteractiveTB object.
A specific showsyntaxerror() also exists, but this method can take
care of calling it if needed, so unless you are explicitly catching a
SyntaxError exception, don't try to analyze the stack manually and
simply call this method."""
try:
try:
etype, value, tb = self._get_exc_info(exc_tuple)
except ValueError:
print('No traceback available to show.', file=sys.stderr)
return
if issubclass(etype, SyntaxError):
# Though this won't be called by syntax errors in the input
# line, there may be SyntaxError cases with imported code.
self.showsyntaxerror(filename, running_compiled_code)
elif etype is UsageError:
self.show_usage_error(value)
else:
if exception_only:
stb = ['An exception has occurred, use %tb to see '
'the full traceback.\n']
stb.extend(self.InteractiveTB.get_exception_only(etype,
value))
else:
try:
# Exception classes can customise their traceback - we
# use this in IPython.parallel for exceptions occurring
# in the engines. This should return a list of strings.
stb = value._render_traceback_()
except Exception:
stb = self.InteractiveTB.structured_traceback(etype,
value, tb, tb_offset=tb_offset)
self._showtraceback(etype, value, stb)
if self.call_pdb:
# drop into debugger
self.debugger(force=True)
return
# Actually show the traceback
self._showtraceback(etype, value, stb)
except KeyboardInterrupt:
print('\n' + self.get_exception_only(), file=sys.stderr)
def _showtraceback(self, etype, evalue, stb):
"""Actually show a traceback.
Subclasses may override this method to put the traceback on a different
place, like a side channel.
"""
print(self.InteractiveTB.stb2text(stb))
def showsyntaxerror(self, filename=None, running_compiled_code=False):
"""Display the syntax error that just occurred.
This doesn't display a stack trace because there isn't one.
If a filename is given, it is stuffed in the exception instead
of what was there before (because Python's parser always uses
"<string>" when reading from a string).
If the syntax error occurred when running a compiled code (i.e. running_compile_code=True),
longer stack trace will be displayed.
"""
etype, value, last_traceback = self._get_exc_info()
if filename and issubclass(etype, SyntaxError):
try:
value.filename = filename
except:
# Not the format we expect; leave it alone
pass
# If the error occured when executing compiled code, we should provide full stacktrace.
elist = traceback.extract_tb(last_traceback) if running_compiled_code else []
stb = self.SyntaxTB.structured_traceback(etype, value, elist)
self._showtraceback(etype, value, stb)
# This is overridden in TerminalInteractiveShell to show a message about
# the %paste magic.
def showindentationerror(self):
"""Called by run_cell when there's an IndentationError in code entered
at the prompt.
This is overridden in TerminalInteractiveShell to show a message about
the %paste magic."""
self.showsyntaxerror()
#-------------------------------------------------------------------------
# Things related to readline
#-------------------------------------------------------------------------
def init_readline(self):
"""DEPRECATED
Moved to terminal subclass, here only to simplify the init logic."""
# Set a number of methods that depend on readline to be no-op
warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated',
DeprecationWarning, stacklevel=2)
self.set_custom_completer = no_op
@skip_doctest
def set_next_input(self, s, replace=False):
""" Sets the 'default' input string for the next command line.
Example::
In [1]: _ip.set_next_input("Hello Word")
In [2]: Hello Word_ # cursor is here
"""
self.rl_next_input = s
def _indent_current_str(self):
"""return the current level of indentation as a string"""
return self.input_splitter.indent_spaces * ' '
#-------------------------------------------------------------------------
# Things related to text completion
#-------------------------------------------------------------------------
def init_completer(self):
"""Initialize the completion machinery.
This creates completion machinery that can be used by client code,
either interactively in-process (typically triggered by the readline
library), programmatically (such as in test suites) or out-of-process
(typically over the network by remote frontends).
"""
from IPython.core.completer import IPCompleter
from IPython.core.completerlib import (module_completer,
magic_run_completer, cd_completer, reset_completer)
self.Completer = IPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
parent=self,
)
self.configurables.append(self.Completer)
# Add custom completers to the basic ones built into IPCompleter
sdisp = self.strdispatchers.get('complete_command', StrDispatch())
self.strdispatchers['complete_command'] = sdisp
self.Completer.custom_completers = sdisp
self.set_hook('complete_command', module_completer, str_key = 'import')
self.set_hook('complete_command', module_completer, str_key = 'from')
self.set_hook('complete_command', module_completer, str_key = '%aimport')
self.set_hook('complete_command', magic_run_completer, str_key = '%run')
self.set_hook('complete_command', cd_completer, str_key = '%cd')
self.set_hook('complete_command', reset_completer, str_key = '%reset')
def complete(self, text, line=None, cursor_pos=None):
"""Return the completed text and a list of completions.
Parameters
----------
text : string
A string of text to be completed on. It can be given as empty and
instead a line/position pair are given. In this case, the
completer itself will split the line like readline does.
line : string, optional
The complete line that text is part of.
cursor_pos : int, optional
The position of the cursor on the input line.
Returns
-------
text : string
The actual text that was completed.
matches : list
A sorted list with all possible completions.
The optional arguments allow the completion to take more context into
account, and are part of the low-level completion API.
This is a wrapper around the completion mechanism, similar to what
readline does at the command line when the TAB key is hit. By
exposing it as a method, it can be used by other non-readline
environments (such as GUIs) for text completion.
Simple usage example:
In [1]: x = 'hello'
In [2]: _ip.complete('x.l')
Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip'])
"""
# Inject names into __builtin__ so we can complete on the added names.
with self.builtin_trap:
return self.Completer.complete(text, line, cursor_pos)
def set_custom_completer(self, completer, pos=0):
"""Adds a new custom completer function.
The position argument (defaults to 0) is the index in the completers
list where you want the completer to be inserted."""
newcomp = types.MethodType(completer,self.Completer)
self.Completer.matchers.insert(pos,newcomp)
def set_completer_frame(self, frame=None):
"""Set the frame of the completer."""
if frame:
self.Completer.namespace = frame.f_locals
self.Completer.global_namespace = frame.f_globals
else:
self.Completer.namespace = self.user_ns
self.Completer.global_namespace = self.user_global_ns
#-------------------------------------------------------------------------
# Things related to magics
#-------------------------------------------------------------------------
def init_magics(self):
from IPython.core import magics as m
self.magics_manager = magic.MagicsManager(shell=self,
parent=self,
user_magics=m.UserMagics(self))
self.configurables.append(self.magics_manager)
# Expose as public API from the magics manager
self.register_magics = self.magics_manager.register
self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics,
m.ConfigMagics, m.DisplayMagics, m.ExecutionMagics,
m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics,
m.NamespaceMagics, m.OSMagics, m.PylabMagics, m.ScriptMagics,
)
# Register Magic Aliases
mman = self.magics_manager
# FIXME: magic aliases should be defined by the Magics classes
# or in MagicsManager, not here
mman.register_alias('ed', 'edit')
mman.register_alias('hist', 'history')
mman.register_alias('rep', 'recall')
mman.register_alias('SVG', 'svg', 'cell')
mman.register_alias('HTML', 'html', 'cell')
mman.register_alias('file', 'writefile', 'cell')
# FIXME: Move the color initialization to the DisplayHook, which
# should be split into a prompt manager and displayhook. We probably
# even need a centralize colors management object.
self.magic('colors %s' % self.colors)
# Defined here so that it's included in the documentation
@functools.wraps(magic.MagicsManager.register_function)
def register_magic_function(self, func, magic_kind='line', magic_name=None):
self.magics_manager.register_function(func,
magic_kind=magic_kind, magic_name=magic_name)
def run_line_magic(self, magic_name, line):
"""Execute the given line magic.
Parameters
----------
magic_name : str
Name of the desired magic function, without '%' prefix.
line : str
The rest of the input line as a single string.
"""
fn = self.find_line_magic(magic_name)
if fn is None:
cm = self.find_cell_magic(magic_name)
etpl = "Line magic function `%%%s` not found%s."
extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, '
'did you mean that instead?)' % magic_name )
error(etpl % (magic_name, extra))
else:
# Note: this is the distance in the stack to the user's frame.
# This will need to be updated if the internal calling logic gets
# refactored, or else we'll be expanding the wrong variables.
stack_depth = 2
magic_arg_s = self.var_expand(line, stack_depth)
# Put magic args in a list so we can call with f(*a) syntax
args = [magic_arg_s]
kwargs = {}
# Grab local namespace if we need it:
if getattr(fn, "needs_local_scope", False):
kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
with self.builtin_trap:
result = fn(*args,**kwargs)
return result
def run_cell_magic(self, magic_name, line, cell):
"""Execute the given cell magic.
Parameters
----------
magic_name : str
Name of the desired magic function, without '%' prefix.
line : str
The rest of the first input line as a single string.
cell : str
The body of the cell as a (possibly multiline) string.
"""
fn = self.find_cell_magic(magic_name)
if fn is None:
lm = self.find_line_magic(magic_name)
etpl = "Cell magic `%%{0}` not found{1}."
extra = '' if lm is None else (' (But line magic `%{0}` exists, '
'did you mean that instead?)'.format(magic_name))
error(etpl.format(magic_name, extra))
elif cell == '':
message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name)
if self.find_line_magic(magic_name) is not None:
message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name)
raise UsageError(message)
else:
# Note: this is the distance in the stack to the user's frame.
# This will need to be updated if the internal calling logic gets
# refactored, or else we'll be expanding the wrong variables.
stack_depth = 2
magic_arg_s = self.var_expand(line, stack_depth)
with self.builtin_trap:
result = fn(magic_arg_s, cell)
return result
def find_line_magic(self, magic_name):
"""Find and return a line magic by name.
Returns None if the magic isn't found."""
return self.magics_manager.magics['line'].get(magic_name)
def find_cell_magic(self, magic_name):
"""Find and return a cell magic by name.
Returns None if the magic isn't found."""
return self.magics_manager.magics['cell'].get(magic_name)
def find_magic(self, magic_name, magic_kind='line'):
"""Find and return a magic of the given type by name.
Returns None if the magic isn't found."""
return self.magics_manager.magics[magic_kind].get(magic_name)
def magic(self, arg_s):
"""DEPRECATED. Use run_line_magic() instead.
Call a magic function by name.
Input: a string containing the name of the magic function to call and
any additional arguments to be passed to the magic.
magic('name -opt foo bar') is equivalent to typing at the ipython
prompt:
In[1]: %name -opt foo bar
To call a magic without arguments, simply use magic('name').
This provides a proper Python function to call IPython's magics in any
valid Python code you can type at the interpreter, including loops and
compound statements.
"""
# TODO: should we issue a loud deprecation warning here?
magic_name, _, magic_arg_s = arg_s.partition(' ')
magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
return self.run_line_magic(magic_name, magic_arg_s)
#-------------------------------------------------------------------------
# Things related to macros
#-------------------------------------------------------------------------
def define_macro(self, name, themacro):
"""Define a new macro
Parameters
----------
name : str
The name of the macro.
themacro : str or Macro
The action to do upon invoking the macro. If a string, a new
Macro object is created by passing the string to it.
"""
from IPython.core import macro
if isinstance(themacro, str):
themacro = macro.Macro(themacro)
if not isinstance(themacro, macro.Macro):
raise ValueError('A macro must be a string or a Macro instance.')
self.user_ns[name] = themacro
#-------------------------------------------------------------------------
# Things related to the running of system commands
#-------------------------------------------------------------------------
def system_piped(self, cmd):
"""Call the given cmd in a subprocess, piping stdout/err
Parameters
----------
cmd : str
Command to execute (can not end in '&', as background processes are
not supported. Should not be a command that expects input
other than simple text.
"""
if cmd.rstrip().endswith('&'):
# this is *far* from a rigorous test
# We do not support backgrounding processes because we either use
# pexpect or pipes to read from. Users can always just call
# os.system() or use ip.system=ip.system_raw
# if they really want a background process.
raise OSError("Background processes not supported.")
# we explicitly do NOT return the subprocess status code, because
# a non-None value would trigger :func:`sys.displayhook` calls.
# Instead, we store the exit_code in user_ns.
self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1))
def system_raw(self, cmd):
"""Call the given cmd in a subprocess using os.system on Windows or
subprocess.call using the system shell on other platforms.
Parameters
----------
cmd : str
Command to execute.
"""
cmd = self.var_expand(cmd, depth=1)
# protect os.system from UNC paths on Windows, which it can't handle:
if sys.platform == 'win32':
from IPython.utils._process_win32 import AvoidUNCPath
with AvoidUNCPath() as path:
if path is not None:
cmd = '"pushd %s &&"%s' % (path, cmd)
try:
ec = os.system(cmd)
except KeyboardInterrupt:
print('\n' + self.get_exception_only(), file=sys.stderr)
ec = -2
else:
# For posix the result of the subprocess.call() below is an exit
# code, which by convention is zero for success, positive for
# program failure. Exit codes above 128 are reserved for signals,
# and the formula for converting a signal to an exit code is usually
# signal_number+128. To more easily differentiate between exit
# codes and signals, ipython uses negative numbers. For instance
# since control-c is signal 2 but exit code 130, ipython's
# _exit_code variable will read -2. Note that some shells like
# csh and fish don't follow sh/bash conventions for exit codes.
executable = os.environ.get('SHELL', None)
try:
# Use env shell instead of default /bin/sh
ec = subprocess.call(cmd, shell=True, executable=executable)
except KeyboardInterrupt:
# intercept control-C; a long traceback is not useful here
print('\n' + self.get_exception_only(), file=sys.stderr)
ec = 130
if ec > 128:
ec = -(ec - 128)
# We explicitly do NOT return the subprocess status code, because
# a non-None value would trigger :func:`sys.displayhook` calls.
# Instead, we store the exit_code in user_ns. Note the semantics
# of _exit_code: for control-c, _exit_code == -signal.SIGNIT,
# but raising SystemExit(_exit_code) will give status 254!
self.user_ns['_exit_code'] = ec
# use piped system by default, because it is better behaved
system = system_piped
def getoutput(self, cmd, split=True, depth=0):
"""Get output (possibly including stderr) from a subprocess.
Parameters
----------
cmd : str
Command to execute (can not end in '&', as background processes are
not supported.
split : bool, optional
If True, split the output into an IPython SList. Otherwise, an
IPython LSString is returned. These are objects similar to normal
lists and strings, with a few convenience attributes for easier
manipulation of line-based output. You can use '?' on them for
details.
depth : int, optional
How many frames above the caller are the local variables which should
be expanded in the command string? The default (0) assumes that the
expansion variables are in the stack frame calling this function.
"""
if cmd.rstrip().endswith('&'):
# this is *far* from a rigorous test
raise OSError("Background processes not supported.")
out = getoutput(self.var_expand(cmd, depth=depth+1))
if split:
out = SList(out.splitlines())
else:
out = LSString(out)
return out
#-------------------------------------------------------------------------
# Things related to aliases
#-------------------------------------------------------------------------
def init_alias(self):
self.alias_manager = AliasManager(shell=self, parent=self)
self.configurables.append(self.alias_manager)
#-------------------------------------------------------------------------
# Things related to extensions
#-------------------------------------------------------------------------
def init_extension_manager(self):
self.extension_manager = ExtensionManager(shell=self, parent=self)
self.configurables.append(self.extension_manager)
#-------------------------------------------------------------------------
# Things related to payloads
#-------------------------------------------------------------------------
def init_payload(self):
self.payload_manager = PayloadManager(parent=self)
self.configurables.append(self.payload_manager)
#-------------------------------------------------------------------------
# Things related to the prefilter
#-------------------------------------------------------------------------
def init_prefilter(self):
self.prefilter_manager = PrefilterManager(shell=self, parent=self)
self.configurables.append(self.prefilter_manager)
# Ultimately this will be refactored in the new interpreter code, but
# for now, we should expose the main prefilter method (there's legacy
# code out there that may rely on this).
self.prefilter = self.prefilter_manager.prefilter_lines
def auto_rewrite_input(self, cmd):
"""Print to the screen the rewritten form of the user's command.
This shows visual feedback by rewriting input lines that cause
automatic calling to kick in, like::
/f x
into::
------> f(x)
after the user's input prompt. This helps the user understand that the
input line was transformed automatically by IPython.
"""
if not self.show_rewritten_input:
return
# This is overridden in TerminalInteractiveShell to use fancy prompts
print("------> " + cmd)
#-------------------------------------------------------------------------
# Things related to extracting values/expressions from kernel and user_ns
#-------------------------------------------------------------------------
def _user_obj_error(self):
"""return simple exception dict
for use in user_expressions
"""
etype, evalue, tb = self._get_exc_info()
stb = self.InteractiveTB.get_exception_only(etype, evalue)
exc_info = {
u'status' : 'error',
u'traceback' : stb,
u'ename' : etype.__name__,
u'evalue' : py3compat.safe_unicode(evalue),
}
return exc_info
def _format_user_obj(self, obj):
"""format a user object to display dict
for use in user_expressions
"""
data, md = self.display_formatter.format(obj)
value = {
'status' : 'ok',
'data' : data,
'metadata' : md,
}
return value
def user_expressions(self, expressions):
"""Evaluate a dict of expressions in the user's namespace.
Parameters
----------
expressions : dict
A dict with string keys and string values. The expression values
should be valid Python expressions, each of which will be evaluated
in the user namespace.
Returns
-------
A dict, keyed like the input expressions dict, with the rich mime-typed
display_data of each value.
"""
out = {}
user_ns = self.user_ns
global_ns = self.user_global_ns
for key, expr in expressions.items():
try:
value = self._format_user_obj(eval(expr, global_ns, user_ns))
except:
value = self._user_obj_error()
out[key] = value
return out
#-------------------------------------------------------------------------
# Things related to the running of code
#-------------------------------------------------------------------------
def ex(self, cmd):
"""Execute a normal python statement in user namespace."""
with self.builtin_trap:
exec(cmd, self.user_global_ns, self.user_ns)
def ev(self, expr):
"""Evaluate python expression expr in user namespace.
Returns the result of evaluation
"""
with self.builtin_trap:
return eval(expr, self.user_global_ns, self.user_ns)
def safe_execfile(self, fname, *where, exit_ignore=False, raise_exceptions=False, shell_futures=False):
"""A safe version of the builtin execfile().
This version will never throw an exception, but instead print
helpful error messages to the screen. This only works on pure
Python files with the .py extension.
Parameters
----------
fname : string
The name of the file to be executed.
where : tuple
One or two namespaces, passed to execfile() as (globals,locals).
If only one is given, it is passed as both.
exit_ignore : bool (False)
If True, then silence SystemExit for non-zero status (it is always
silenced for zero status, as it is so common).
raise_exceptions : bool (False)
If True raise exceptions everywhere. Meant for testing.
shell_futures : bool (False)
If True, the code will share future statements with the interactive
shell. It will both be affected by previous __future__ imports, and
any __future__ imports in the code will affect the shell. If False,
__future__ imports are not shared in either direction.
"""
fname = os.path.abspath(os.path.expanduser(fname))
# Make sure we can open the file
try:
with open(fname):
pass
except:
warn('Could not open file <%s> for safe execution.' % fname)
return
# Find things also in current directory. This is needed to mimic the
# behavior of running a script from the system command line, where
# Python inserts the script's directory into sys.path
dname = os.path.dirname(fname)
with prepended_to_syspath(dname), self.builtin_trap:
try:
glob, loc = (where + (None, ))[:2]
py3compat.execfile(
fname, glob, loc,
self.compile if shell_futures else None)
except SystemExit as status:
# If the call was made with 0 or None exit status (sys.exit(0)
# or sys.exit() ), don't bother showing a traceback, as both of
# these are considered normal by the OS:
# > python -c'import sys;sys.exit(0)'; echo $?
# 0
# > python -c'import sys;sys.exit()'; echo $?
# 0
# For other exit status, we show the exception unless
# explicitly silenced, but only in short form.
if status.code:
if raise_exceptions:
raise
if not exit_ignore:
self.showtraceback(exception_only=True)
except:
if raise_exceptions:
raise
# tb offset is 2 because we wrap execfile
self.showtraceback(tb_offset=2)
def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False):
"""Like safe_execfile, but for .ipy or .ipynb files with IPython syntax.
Parameters
----------
fname : str
The name of the file to execute. The filename must have a
.ipy or .ipynb extension.
shell_futures : bool (False)
If True, the code will share future statements with the interactive
shell. It will both be affected by previous __future__ imports, and
any __future__ imports in the code will affect the shell. If False,
__future__ imports are not shared in either direction.
raise_exceptions : bool (False)
If True raise exceptions everywhere. Meant for testing.
"""
fname = os.path.abspath(os.path.expanduser(fname))
# Make sure we can open the file
try:
with open(fname):
pass
except:
warn('Could not open file <%s> for safe execution.' % fname)
return
# Find things also in current directory. This is needed to mimic the
# behavior of running a script from the system command line, where
# Python inserts the script's directory into sys.path
dname = os.path.dirname(fname)
def get_cells():
"""generator for sequence of code blocks to run"""
if fname.endswith('.ipynb'):
from nbformat import read
nb = read(fname, as_version=4)
if not nb.cells:
return
for cell in nb.cells:
if cell.cell_type == 'code':
yield cell.source
else:
with open(fname) as f:
yield f.read()
with prepended_to_syspath(dname):
try:
for cell in get_cells():
result = self.run_cell(cell, silent=True, shell_futures=shell_futures)
if raise_exceptions:
result.raise_error()
elif not result.success:
break
except:
if raise_exceptions:
raise
self.showtraceback()
warn('Unknown failure executing file: <%s>' % fname)
def safe_run_module(self, mod_name, where):
"""A safe version of runpy.run_module().
This version will never throw an exception, but instead print
helpful error messages to the screen.
`SystemExit` exceptions with status code 0 or None are ignored.
Parameters
----------
mod_name : string
The name of the module to be executed.
where : dict
The globals namespace.
"""
try:
try:
where.update(
runpy.run_module(str(mod_name), run_name="__main__",
alter_sys=True)
)
except SystemExit as status:
if status.code:
raise
except:
self.showtraceback()
warn('Unknown failure executing module: <%s>' % mod_name)
def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True):
"""Run a complete IPython cell.
Parameters
----------
raw_cell : str
The code (including IPython code such as %magic functions) to run.
store_history : bool
If True, the raw and translated cell will be stored in IPython's
history. For user code calling back into IPython's machinery, this
should be set to False.
silent : bool
If True, avoid side-effects, such as implicit displayhooks and
and logging. silent=True forces store_history=False.
shell_futures : bool
If True, the code will share future statements with the interactive
shell. It will both be affected by previous __future__ imports, and
any __future__ imports in the code will affect the shell. If False,
__future__ imports are not shared in either direction.
Returns
-------
result : :class:`ExecutionResult`
"""
result = ExecutionResult()
if (not raw_cell) or raw_cell.isspace():
self.last_execution_succeeded = True
return result
if silent:
store_history = False
if store_history:
result.execution_count = self.execution_count
def error_before_exec(value):
result.error_before_exec = value
self.last_execution_succeeded = False
return result
self.events.trigger('pre_execute')
if not silent:
self.events.trigger('pre_run_cell')
# If any of our input transformation (input_transformer_manager or
# prefilter_manager) raises an exception, we store it in this variable
# so that we can display the error after logging the input and storing
# it in the history.
preprocessing_exc_tuple = None
try:
# Static input transformations
cell = self.input_transformer_manager.transform_cell(raw_cell)
except SyntaxError:
preprocessing_exc_tuple = sys.exc_info()
cell = raw_cell # cell has to exist so it can be stored/logged
else:
if len(cell.splitlines()) == 1:
# Dynamic transformations - only applied for single line commands
with self.builtin_trap:
try:
# use prefilter_lines to handle trailing newlines
# restore trailing newline for ast.parse
cell = self.prefilter_manager.prefilter_lines(cell) + '\n'
except Exception:
# don't allow prefilter errors to crash IPython
preprocessing_exc_tuple = sys.exc_info()
# Store raw and processed history
if store_history:
self.history_manager.store_inputs(self.execution_count,
cell, raw_cell)
if not silent:
self.logger.log(cell, raw_cell)
# Display the exception if input processing failed.
if preprocessing_exc_tuple is not None:
self.showtraceback(preprocessing_exc_tuple)
if store_history:
self.execution_count += 1
return error_before_exec(preprocessing_exc_tuple[2])
# Our own compiler remembers the __future__ environment. If we want to
# run code with a separate __future__ environment, use the default
# compiler
compiler = self.compile if shell_futures else CachingCompiler()
with self.builtin_trap:
cell_name = self.compile.cache(cell, self.execution_count)
with self.display_trap:
# Compile to bytecode
try:
code_ast = compiler.ast_parse(cell, filename=cell_name)
except self.custom_exceptions as e:
etype, value, tb = sys.exc_info()
self.CustomTB(etype, value, tb)
return error_before_exec(e)
except IndentationError as e:
self.showindentationerror()
if store_history:
self.execution_count += 1
return error_before_exec(e)
except (OverflowError, SyntaxError, ValueError, TypeError,
MemoryError) as e:
self.showsyntaxerror()
if store_history:
self.execution_count += 1
return error_before_exec(e)
# Apply AST transformations
try:
code_ast = self.transform_ast(code_ast)
except InputRejected as e:
self.showtraceback()
if store_history:
self.execution_count += 1
return error_before_exec(e)
# Give the displayhook a reference to our ExecutionResult so it
# can fill in the output value.
self.displayhook.exec_result = result
# Execute the user code
interactivity = "none" if silent else self.ast_node_interactivity
has_raised = self.run_ast_nodes(code_ast.body, cell_name,
interactivity=interactivity, compiler=compiler, result=result)
self.last_execution_succeeded = not has_raised
# Reset this so later displayed values do not modify the
# ExecutionResult
self.displayhook.exec_result = None
self.events.trigger('post_execute')
if not silent:
self.events.trigger('post_run_cell')
if store_history:
# Write output to the database. Does nothing unless
# history output logging is enabled.
self.history_manager.store_output(self.execution_count)
# Each cell is a *single* input, regardless of how many lines it has
self.execution_count += 1
return result
def transform_ast(self, node):
"""Apply the AST transformations from self.ast_transformers
Parameters
----------
node : ast.Node
The root node to be transformed. Typically called with the ast.Module
produced by parsing user input.
Returns
-------
An ast.Node corresponding to the node it was called with. Note that it
may also modify the passed object, so don't rely on references to the
original AST.
"""
for transformer in self.ast_transformers:
try:
node = transformer.visit(node)
except InputRejected:
# User-supplied AST transformers can reject an input by raising
# an InputRejected. Short-circuit in this case so that we
# don't unregister the transform.
raise
except Exception:
warn("AST transformer %r threw an error. It will be unregistered." % transformer)
self.ast_transformers.remove(transformer)
if self.ast_transformers:
ast.fix_missing_locations(node)
return node
def run_ast_nodes(self, nodelist, cell_name, interactivity='last_expr',
compiler=compile, result=None):
"""Run a sequence of AST nodes. The execution mode depends on the
interactivity parameter.
Parameters
----------
nodelist : list
A sequence of AST nodes to run.
cell_name : str
Will be passed to the compiler as the filename of the cell. Typically
the value returned by ip.compile.cache(cell).
interactivity : str
'all', 'last', 'last_expr' or 'none', specifying which nodes should be
run interactively (displaying output from expressions). 'last_expr'
will run the last node interactively only if it is an expression (i.e.
expressions in loops or other blocks are not displayed. Other values
for this parameter will raise a ValueError.
compiler : callable
A function with the same interface as the built-in compile(), to turn
the AST nodes into code objects. Default is the built-in compile().
result : ExecutionResult, optional
An object to store exceptions that occur during execution.
Returns
-------
True if an exception occurred while running code, False if it finished
running.
"""
if not nodelist:
return
if interactivity == 'last_expr':
if isinstance(nodelist[-1], ast.Expr):
interactivity = "last"
else:
interactivity = "none"
if interactivity == 'none':
to_run_exec, to_run_interactive = nodelist, []
elif interactivity == 'last':
to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:]
elif interactivity == 'all':
to_run_exec, to_run_interactive = [], nodelist
else:
raise ValueError("Interactivity was %r" % interactivity)
try:
for i, node in enumerate(to_run_exec):
mod = ast.Module([node])
code = compiler(mod, cell_name, "exec")
if self.run_code(code, result):
return True
for i, node in enumerate(to_run_interactive):
mod = ast.Interactive([node])
code = compiler(mod, cell_name, "single")
if self.run_code(code, result):
return True
# Flush softspace
if softspace(sys.stdout, 0):
print()
except:
# It's possible to have exceptions raised here, typically by
# compilation of odd code (such as a naked 'return' outside a
# function) that did parse but isn't valid. Typically the exception
# is a SyntaxError, but it's safest just to catch anything and show
# the user a traceback.
# We do only one try/except outside the loop to minimize the impact
# on runtime, and also because if any node in the node list is
# broken, we should stop execution completely.
if result:
result.error_before_exec = sys.exc_info()[1]
self.showtraceback()
return True
return False
def run_code(self, code_obj, result=None):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to display a
traceback.
Parameters
----------
code_obj : code object
A compiled code object, to be executed
result : ExecutionResult, optional
An object to store exceptions that occur during execution.
Returns
-------
False : successful execution.
True : an error occurred.
"""
# Set our own excepthook in case the user code tries to call it
# directly, so that the IPython crash handler doesn't get triggered
old_excepthook, sys.excepthook = sys.excepthook, self.excepthook
# we save the original sys.excepthook in the instance, in case config
# code (such as magics) needs access to it.
self.sys_excepthook = old_excepthook
outflag = True # happens in more places, so it's easier as default
try:
try:
self.hooks.pre_run_code_hook()
#rprint('Running code', repr(code_obj)) # dbg
exec(code_obj, self.user_global_ns, self.user_ns)
finally:
# Reset our crash handler in place
sys.excepthook = old_excepthook
except SystemExit as e:
if result is not None:
result.error_in_exec = e
self.showtraceback(exception_only=True)
warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1)
except self.custom_exceptions:
etype, value, tb = sys.exc_info()
if result is not None:
result.error_in_exec = value
self.CustomTB(etype, value, tb)
except:
if result is not None:
result.error_in_exec = sys.exc_info()[1]
self.showtraceback(running_compiled_code=True)
else:
outflag = False
return outflag
# For backwards compatibility
runcode = run_code
#-------------------------------------------------------------------------
# Things related to GUI support and pylab
#-------------------------------------------------------------------------
active_eventloop = None
def enable_gui(self, gui=None):
raise NotImplementedError('Implement enable_gui in a subclass')
def enable_matplotlib(self, gui=None):
"""Enable interactive matplotlib and inline figure support.
This takes the following steps:
1. select the appropriate eventloop and matplotlib backend
2. set up matplotlib for interactive use with that backend
3. configure formatters for inline figure display
4. enable the selected gui eventloop
Parameters
----------
gui : optional, string
If given, dictates the choice of matplotlib GUI backend to use
(should be one of IPython's supported backends, 'qt', 'osx', 'tk',
'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
matplotlib (as dictated by the matplotlib build-time options plus the
user's matplotlibrc configuration file). Note that not all backends
make sense in all contexts, for example a terminal ipython can't
display figures inline.
"""
from IPython.core import pylabtools as pt
gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
if gui != 'inline':
# If we have our first gui selection, store it
if self.pylab_gui_select is None:
self.pylab_gui_select = gui
# Otherwise if they are different
elif gui != self.pylab_gui_select:
print ('Warning: Cannot change to a different GUI toolkit: %s.'
' Using %s instead.' % (gui, self.pylab_gui_select))
gui, backend = pt.find_gui_and_backend(self.pylab_gui_select)
pt.activate_matplotlib(backend)
pt.configure_inline_support(self, backend)
# Now we must activate the gui pylab wants to use, and fix %run to take
# plot updates into account
self.enable_gui(gui)
self.magics_manager.registry['ExecutionMagics'].default_runner = \
pt.mpl_runner(self.safe_execfile)
return gui, backend
def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
"""Activate pylab support at runtime.
This turns on support for matplotlib, preloads into the interactive
namespace all of numpy and pylab, and configures IPython to correctly
interact with the GUI event loop. The GUI backend to be used can be
optionally selected with the optional ``gui`` argument.
This method only adds preloading the namespace to InteractiveShell.enable_matplotlib.
Parameters
----------
gui : optional, string
If given, dictates the choice of matplotlib GUI backend to use
(should be one of IPython's supported backends, 'qt', 'osx', 'tk',
'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
matplotlib (as dictated by the matplotlib build-time options plus the
user's matplotlibrc configuration file). Note that not all backends
make sense in all contexts, for example a terminal ipython can't
display figures inline.
import_all : optional, bool, default: True
Whether to do `from numpy import *` and `from pylab import *`
in addition to module imports.
welcome_message : deprecated
This argument is ignored, no welcome message will be displayed.
"""
from IPython.core.pylabtools import import_pylab
gui, backend = self.enable_matplotlib(gui)
# We want to prevent the loading of pylab to pollute the user's
# namespace as shown by the %who* magics, so we execute the activation
# code in an empty namespace, and we update *both* user_ns and
# user_ns_hidden with this information.
ns = {}
import_pylab(ns, import_all)
# warn about clobbered names
ignored = {"__builtins__"}
both = set(ns).intersection(self.user_ns).difference(ignored)
clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ]
self.user_ns.update(ns)
self.user_ns_hidden.update(ns)
return gui, backend, clobbered
#-------------------------------------------------------------------------
# Utilities
#-------------------------------------------------------------------------
def var_expand(self, cmd, depth=0, formatter=DollarFormatter()):
"""Expand python variables in a string.
The depth argument indicates how many frames above the caller should
be walked to look for the local namespace where to expand variables.
The global namespace for expansion is always the user's interactive
namespace.
"""
ns = self.user_ns.copy()
try:
frame = sys._getframe(depth+1)
except ValueError:
# This is thrown if there aren't that many frames on the stack,
# e.g. if a script called run_line_magic() directly.
pass
else:
ns.update(frame.f_locals)
try:
# We have to use .vformat() here, because 'self' is a valid and common
# name, and expanding **ns for .format() would make it collide with
# the 'self' argument of the method.
cmd = formatter.vformat(cmd, args=[], kwargs=ns)
except Exception:
# if formatter couldn't format, just let it go untransformed
pass
return cmd
def mktempfile(self, data=None, prefix='ipython_edit_'):
"""Make a new tempfile and return its filename.
This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp),
but it registers the created filename internally so ipython cleans it up
at exit time.
Optional inputs:
- data(None): if data is given, it gets written out to the temp file
immediately, and the file is closed again."""
dirname = tempfile.mkdtemp(prefix=prefix)
self.tempdirs.append(dirname)
handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
os.close(handle) # On Windows, there can only be one open handle on a file
self.tempfiles.append(filename)
if data:
tmp_file = open(filename,'w')
tmp_file.write(data)
tmp_file.close()
return filename
@undoc
def write(self,data):
"""DEPRECATED: Write a string to the default output"""
warn('InteractiveShell.write() is deprecated, use sys.stdout instead',
DeprecationWarning, stacklevel=2)
sys.stdout.write(data)
@undoc
def write_err(self,data):
"""DEPRECATED: Write a string to the default error output"""
warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead',
DeprecationWarning, stacklevel=2)
sys.stderr.write(data)
def ask_yes_no(self, prompt, default=None, interrupt=None):
if self.quiet:
return True
return ask_yes_no(prompt,default,interrupt)
def show_usage(self):
"""Show a usage message"""
page.page(IPython.core.usage.interactive_usage)
def extract_input_lines(self, range_str, raw=False):
"""Return as a string a set of input history slices.
Parameters
----------
range_str : string
The set of slices is given as a string, like "~5/6-~4/2 4:8 9",
since this function is for use by magic functions which get their
arguments as strings. The number before the / is the session
number: ~n goes n back from the current session.
raw : bool, optional
By default, the processed input is used. If this is true, the raw
input history is used instead.
Notes
-----
Slices can be described with two notations:
* ``N:M`` -> standard python form, means including items N...(M-1).
* ``N-M`` -> include items N..M (closed endpoint).
"""
lines = self.history_manager.get_range_by_str(range_str, raw=raw)
return "\n".join(x for _, _, x in lines)
def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False):
"""Get a code string from history, file, url, or a string or macro.
This is mainly used by magic functions.
Parameters
----------
target : str
A string specifying code to retrieve. This will be tried respectively
as: ranges of input history (see %history for syntax), url,
corresponding .py file, filename, or an expression evaluating to a
string or Macro in the user namespace.
raw : bool
If true (default), retrieve raw history. Has no effect on the other
retrieval mechanisms.
py_only : bool (default False)
Only try to fetch python code, do not try alternative methods to decode file
if unicode fails.
Returns
-------
A string of code.
ValueError is raised if nothing is found, and TypeError if it evaluates
to an object of another type. In each case, .args[0] is a printable
message.
"""
code = self.extract_input_lines(target, raw=raw) # Grab history
if code:
return code
try:
if target.startswith(('http://', 'https://')):
return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie)
except UnicodeDecodeError:
if not py_only :
# Deferred import
from urllib.request import urlopen
response = urlopen(target)
return response.read().decode('latin1')
raise ValueError(("'%s' seem to be unreadable.") % target)
potential_target = [target]
try :
potential_target.insert(0,get_py_filename(target))
except IOError:
pass
for tgt in potential_target :
if os.path.isfile(tgt): # Read file
try :
return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie)
except UnicodeDecodeError :
if not py_only :
with io_open(tgt,'r', encoding='latin1') as f :
return f.read()
raise ValueError(("'%s' seem to be unreadable.") % target)
elif os.path.isdir(os.path.expanduser(tgt)):
raise ValueError("'%s' is a directory, not a regular file." % target)
if search_ns:
# Inspect namespace to load object source
object_info = self.object_inspect(target, detail_level=1)
if object_info['found'] and object_info['source']:
return object_info['source']
try: # User namespace
codeobj = eval(target, self.user_ns)
except Exception:
raise ValueError(("'%s' was not found in history, as a file, url, "
"nor in the user namespace.") % target)
if isinstance(codeobj, str):
return codeobj
elif isinstance(codeobj, Macro):
return codeobj.value
raise TypeError("%s is neither a string nor a macro." % target,
codeobj)
#-------------------------------------------------------------------------
# Things related to IPython exiting
#-------------------------------------------------------------------------
def atexit_operations(self):
"""This will be executed at the time of exit.
Cleanup operations and saving of persistent data that is done
unconditionally by IPython should be performed here.
For things that may depend on startup flags or platform specifics (such
as having readline or not), register a separate atexit function in the
code that has the appropriate information, rather than trying to
clutter
"""
# Close the history session (this stores the end time and line count)
# this must be *before* the tempfile cleanup, in case of temporary
# history db
self.history_manager.end_session()
# Cleanup all tempfiles and folders left around
for tfile in self.tempfiles:
try:
os.unlink(tfile)
except OSError:
pass
for tdir in self.tempdirs:
try:
os.rmdir(tdir)
except OSError:
pass
# Clear all user namespaces to release all references cleanly.
self.reset(new_session=False)
# Run user hooks
self.hooks.shutdown_hook()
def cleanup(self):
self.restore_sys_module_state()
# Overridden in terminal subclass to change prompts
def switch_doctest_mode(self, mode):
pass
class InteractiveShellABC(metaclass=abc.ABCMeta):
"""An abstract base class for InteractiveShell."""
InteractiveShellABC.register(InteractiveShell)
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/IPython/core/interactiveshell.py | Python | bsd-2-clause | 130,796 | [
"VisIt"
] | 67116d21803b44da879bbc700944de50a242df8f999354905ede2e3d6e7135e4 |
""" Modified from simphony-kratos/simkratos/tests/cfd/test_kratos_cfd.py
"""
import os
from mayavi.scripts import mayavi2
from simphony.core.cuba import CUBA
from simphony.engine import kratos
from KratosMultiphysics import *
from KratosMultiphysics.IncompressibleFluidApplication import *
from KratosMultiphysics.FluidDynamicsApplication import *
from KratosMultiphysics.ExternalSolversApplication import *
from KratosMultiphysics.MeshingApplication import *
from simphony.visualisation import mayavi_tools
path = str(os.path.join(
os.path.dirname(__file__),
"CFD_exampleFluid"
))
time_step = 0.001
num_steps = 5
utils = kratos.CFD_Utils()
wrapper = kratos.CFDWrapper()
wrapper.CM[CUBA.TIME_STEP] = time_step
wrapper.CM[CUBA.NUMBER_OF_TIME_STEPS] = num_steps
# Set the meshes that are part of the fluid
wrapper.SPE[kratos.CUBAExt.FLUID_MESHES] = [
"fluid_0", "fluid_1", "fluid_2",
"fluid_3", "fluid_4"
]
# reads kratos data so its interpretable by simphony
kratos_model = utils.read_modelpart(path)
wrapper.BC[CUBA.VELOCITY] = {}
wrapper.BC[CUBA.PRESSURE] = {}
for mesh in kratos_model['meshes']:
wrapper.add_dataset(mesh)
for bc in kratos_model['bcs']:
wrapper.BC[CUBA.VELOCITY][bc['name']] = bc['velocity']
wrapper.BC[CUBA.PRESSURE][bc['name']] = bc['pressure']
@mayavi2.standalone
def view():
mayavi_tools.add_engine_to_mayavi2("kratos", wrapper)
if __name__ == "__main__":
view()
| simphony/simphony-mayavi | examples/krato-cfd-example.py | Python | bsd-2-clause | 1,440 | [
"Mayavi"
] | f0077c374cf1fbb1459ec302b7441802ab6fce7f51895b5938edb91df5819ff7 |
from visualizer import *
from plugins.IRTKPlugin import IRTKPluginMixin
from plugins.VTKPlugin import DatasetTypes,VTKProps
from plugins.SegmentPlugin import SegmentTypes,SegSceneObject,DatafileParams
from ..ui import Ui_CTMotionProp
class CTmotionProjPropWidget(QtWidgets.QWidget,Ui_CTMotionProp):
def __init__(self,parent=None):
QtWidgets.QWidget.__init__(self,parent)
self.setupUi(self)
# names of config values to store in the project's .ini file
#ConfigNames=enum('ctImageStack','paramfile','lvtop','apex','rvtop','rvAtop','rvPtop','resampleStack')
ConfigNames=enum('paramfile')
class CTMotionTrackProject(Project):
def __init__(self,name,parentdir,mgr):
Project.__init__(self,name,parentdir,mgr)
self.addHandlers()
self.Measure=mgr.getPlugin('Measure')
self.CTMotion=mgr.getPlugin('CTMotion')
self.Dicom=mgr.getPlugin('Dicom')
self.CTMotion.project=self
self.header='\nCTMotion.createProject(%r,scriptdir+"/..")\n' %(self.name)
self.logDir=self.getProjectFile('logs')
self.backDir=self.logDir
for n in ConfigNames:
self.configMap[n[0]]=''
@taskmethod('Adding Object to Project')
def checkIncludeObject(self,obj,task=None):
# Only try to save objects that aren't already in the project and which are saveable
# Important: this task method will be called after the project has loaded so won't ask to add things already in the project
if not isinstance(obj,SceneObject) or obj in self.memberObjs or obj.plugin.getObjFiles(obj) is None:
return
def _copy():
pdir=self.getProjectDir()
files=list(map(os.path.abspath,obj.plugin.getObjFiles(obj) or []))
if not files or any(not f.startswith(pdir) for f in files):
newname=self.CTMotion.getUniqueObjName(obj.getName())
self.mgr.renameSceneObject(obj,newname)
filename=self.getProjectFile(obj.getName())
if isinstance(obj,ImageSceneObject):
self.CTMotion.saveToNifti([obj],True)
elif isinstance(obj,MeshSceneObject):
self.CTMotion.VTK.saveObject(obj,filename,setFilenames=True)
else:
obj.plugin.saveObject(obj,filename,setFilenames=True)
Project.addObject(self,obj)
self.save()
msg="Do you want to add %r to the project?\nThis requires saving/copying the object's file data into the project directory."%(obj.getName())
self.mgr.win.chooseYesNoDialog(msg,'Adding Object',_copy)
def getPropBox(self):
prop=Project.getPropBox(self)
# remove the UI for changing the project location
cppdel(prop.chooseLocLayout)
cppdel(prop.dirButton)
cppdel(prop.chooseLocLabel)
self.ctprop=CTmotionProjPropWidget()
prop.verticalLayout.insertWidget(prop.verticalLayout.count()-1,self.ctprop)
self.ctprop.ctDicomButton.clicked.connect(self._loadCTButton)
self.ctprop.niftiButton.clicked.connect(self._loadNiftiButton)
self.ctprop.chooseParamButton.clicked.connect(self._chooseParamFile)
self.ctprop.trackButton.clicked.connect(self._trackButton)
self.ctprop.applyTrackButton.clicked.connect(self._applyTrack)
self.ctprop.isoCreateButton.clicked.connect(self._createIsoImage)
self.ctprop.paramEdit.textChanged.connect(self.updateConfigFromProp)
if not os.path.isdir(self.logDir):
os.mkdir(self.logDir)
return prop
def updateConfigFromProp(self,*args):
param=str(self.ctprop.paramEdit.text())
# if not param:
# param=self.CTMotion.tsffd
# self.ctprop.paramEdit.setText(self.CTMotion.tsffd)
if os.path.isfile(param):
self.configMap[ConfigNames._paramfile]=param
def updatePropBox(self,proj,prop):
Project.updatePropBox(self,proj,prop)
self.ctprop.paramEdit.setText(self.configMap[ConfigNames._paramfile])
sceneimgs=[o for o in self.memberObjs if isinstance(o,ImageSceneObject)]
scenemeshes=[o for o in self.memberObjs if isinstance(o,MeshSceneObject)]
names=sorted(o.getName() for o in sceneimgs)
fillList(self.ctprop.isoCreateBox,names)
fillList(self.ctprop.trackImgBox,names)
fillList(self.ctprop.trackMaskBox,names,defaultitem='None')
names=sorted(o.getName() for o in scenemeshes)
fillList(self.ctprop.trackObjBox,names)
trackdirs=list(map(os.path.basename,self.CTMotion.getTrackingDirs()))
fillList(self.ctprop.trackDataBox,sorted(trackdirs))
# refill the measurement plugin's known tracking sources
self.Measure.removeTrackSource(self.CTMotion.applyMotionTrackPoints)
for td in trackdirs:
self.Measure.addTrackSource(td,self.CTMotion.applyMotionTrackPoints)
def renameObject(self,obj,oldname):
newname=getValidFilename(obj.getName())
obj.setName(newname)
conflicts=obj.plugin.checkFileOverwrite(obj,self.getProjectDir())
if conflicts:
raise IOError('Renaming object would overwrite the following project files: '+', '.join(map(os.path.basename,conflicts)))
obj.plugin.renameObjFiles(obj,oldname)
for n,v in self.checkboxMap.items():
if v==oldname:
self.checkboxMap[n]=newname
for n,v in self.configMap.items():
if v==oldname:
self.configMap[n]=newname
self.save()
def _loadCTButton(self):
@taskroutine('Loading Objects')
def _loadObj(f,task):
obj=Future.get(f)
if obj:
filenames=self.CTMotion.saveToNifti([obj])
self.CTMotion.loadNiftiFiles(filenames)
series=self.Dicom.showChooseSeriesDialog(subject='CT Series')
if len(series)>0:
f=self.Dicom.showTimeMultiSeriesDialog(series)
self.mgr.checkFutureResult(f)
self.mgr.runTasks(_loadObj(f))
def _loadNiftiButton(self):
filenames=self.mgr.win.chooseFileDialog('Choose NIfTI filename',filterstr='NIfTI Files (*.nii *.nii.gz)',chooseMultiple=True)
if len(filenames)>0:
self.CTMotion.loadNiftiFiles(filenames)
def _chooseParamFile(self):
filename=self.mgr.win.chooseFileDialog('Choose Parameter file')
if filename:
if not os.path.isfile(filename):
self.mgr.showMsg('Cannot find file %r'%filename,'No Parameter File')
else:
self.ctprop.paramEdit.setText(filename)
self.configMap[ConfigNames._paramfile]=filename
self.saveConfig()
def _trackButton(self):
name=str(self.ctprop.trackImgBox.currentText())
mask=str(self.ctprop.trackMaskBox.currentText())
paramfile=str(self.ctprop.paramEdit.text())
trackname=str(self.ctprop.trackName.text())
onefile=self.ctprop.oneFileCheck.isChecked()
f=self.CTMotion.startRegisterMotionTrack(name,mask,trackname,paramfile,None,onefile)
self.mgr.checkFutureResult(f)
def _applyTrack(self):
name=str(self.ctprop.trackObjBox.currentText())
trackname=str(self.ctprop.trackDataBox.currentText())
f=self.CTMotion.applyMotionTrack(name,trackname)
self.mgr.checkFutureResult(f)
def _createIsoImage(self):
name=str(self.ctprop.isoCreateBox.currentText())
cropEmpty=self.ctprop.emptyCropBox.isChecked()
f=self.CTMotion.createIsotropicObject(name,cropEmpty)
self.mgr.checkFutureResult(f)
class CTMotionTrackPlugin(ImageScenePlugin,IRTKPluginMixin):
def __init__(self):
ImageScenePlugin.__init__(self,'CTMotion')
self.project=None
def init(self,plugid,win,mgr):
ImageScenePlugin.init(self,plugid,win,mgr)
IRTKPluginMixin.init(self,plugid,win,mgr)
self.Segment=self.mgr.getPlugin('Segment')
if self.win!=None:
self.win.addMenuItem('Project','CTMotionTrackProj'+str(plugid),'&CT Motion Track Project',self._newProjDialog)
def createProject(self,name,parentdir):
if self.mgr.project==None:
self.mgr.createProjectObj(name,parentdir,CTMotionTrackProject)
def _newProjDialog(self):
def chooseProjDir(name):
newdir=self.win.chooseDirDialog('Choose Project Root Directory')
if len(newdir)>0:
self.mgr.createProjectObj(name,newdir,CTMotionTrackProject)
self.win.chooseStrDialog('Choose Project Name','Project',chooseProjDir)
def getCWD(self):
return self.project.getProjectDir()
def getLogFile(self,filename):
return os.path.join(self.project.logDir,ensureExt(filename,'.log'))
def getLocalFile(self,name):
return self.project.getProjectFile(name)
def addObject(self,obj):
if obj not in self.mgr.objs:
self.mgr.addSceneObject(obj)
self.project.addObject(obj)
self.project.save()
@taskmethod('Load Nifti Files')
def loadNiftiFiles(self,filenames,task=None):
isEmpty=len(self.project.memberObjs)==0
objs=IRTKPluginMixin.loadNiftiFiles(self,filenames)
if isEmpty:
self.mgr.callThreadSafe(self.project.updateConfigFromProp)
self.project.save()
return objs
addPlugin(CTMotionTrackPlugin())
| ericspod/Eidolon | eidolon/plugins/CTMotionTrackPlugin.py | Python | gpl-3.0 | 9,497 | [
"VTK"
] | 4608ba0362a874a76f80ae78b159d3047eb042ca17bd55e7a22edc7c0d6db3b1 |
from bot.command_map import command_map
import logging
import os
import requests
from bs4 import BeautifulSoup
logger = logging.getLogger()
cryptos = (
'1ST', '2GIVE', '808', 'AC', 'ACT', 'ADA', 'ADK', 'ADL', 'ADT', 'ADX',
'AE', 'AEON', 'AGRS', 'AMBER', 'AMP', 'ANC', 'ANS', 'ANT', 'APX', 'ARDR',
'ARK', 'ATB', 'ATCC', 'AUR', 'AVT', 'B3', 'BAT', 'BAY', 'BCAP', 'BCC',
'BCH', 'BCN', 'BCY', 'BDL', 'BELA', 'BET', 'BIS', 'BIT', 'BITB', 'BITBTC',
'BITCNY', 'BITEUR', 'BITGBP', 'BITOK', 'BITSILVER', 'BITUSD', 'BLAS',
'BLK', 'BLN', 'BLOCK', 'BLOCKPAY', 'BMC', 'BNB', 'BNT', 'BOST', 'BQ',
'BQX', 'BTA', 'BTC', 'BTCD', 'BTM', 'BTS', 'BTSR', 'BTX', 'BURST', 'BUZZ',
'BYC', 'BYTOM', 'CANN', 'CAT', 'CCRB', 'CDT', 'CFI', 'CHIPS', 'CLAM',
'CLOAK', 'CMP', 'COSS', 'COVAL', 'CRBIT', 'CREA', 'CREDO', 'CRW', 'CTR',
'CURE', 'CVC', 'DAR', 'DASH', 'DAY', 'DCN', 'DCR', 'DCT', 'DDF', 'DENT',
'DFS', 'DGB', 'DGC', 'DGD', 'DICE', 'DNT', 'DOGE', 'DOPE', 'DTB', 'DYN',
'EAC', 'EBST', 'EBTC', 'ECN', 'EDG', 'ELIX', 'EMB', 'EMC', 'EMC2', 'EOS',
'EOT', 'EQT', 'ETC', 'ETH', 'ETHD', 'ETP', 'ETT', 'EXP', 'FBC', 'FCT',
'FID', 'FLDC', 'FLO', 'FLT', 'FRST', 'FTC', 'FUN', 'GAM', 'GAME', 'GAS',
'GBG', 'GBYTE', 'GCR', 'GLD', 'GNO', 'GNT', 'GOLOS', 'GRC', 'GRWI', 'GUP',
'GXS', 'HBN', 'HEAT', 'HMQ', 'HSR', 'HUSH', 'HVN', 'ICN', 'ICO', 'IFC',
'IFT', 'INCNT', 'IND', 'INF', 'INPAY', 'INXT', 'IOC', 'ION', 'IOP', 'IOT',
'IQT', 'IXC', 'IXT', 'KEXCOIN', 'KICK', 'KIN', 'KMD', 'KNC', 'KORE',
'KRS', 'LBC', 'LGD', 'LINDA', 'LINK', 'LKK', 'LMC', 'LRC', 'LSK', 'LTC',
'LUN', 'MAGN', 'MAID', 'MANA', 'MAX', 'MBRS', 'MCAP', 'MCO', 'MDA', 'MEC',
'MEME', 'MGC', 'MGO', 'MINEX', 'MINT', 'MLN', 'MNE', 'MONA', 'MRT', 'MSP',
'MTH', 'MUE', 'MUSIC', 'MYB', 'MYR', 'MYST', 'MZC', 'NAMO', 'NAUT', 'NAV',
'NBT', 'NDAO', 'NDC', 'NEBL', 'NEOS', 'NET', 'NLC2', 'NLG', 'NMC', 'NMR',
'NOBL', 'NOTE', 'NSR', 'NTO', 'NVC', 'NXC', 'NXS', 'NXT', 'OAX', 'OBITS',
'OCL', 'ODN', 'OK', 'OMG', 'OMNI', 'ONION', 'OPT', 'PART', 'PASC', 'PAY',
'PBT', 'PING', 'PINK', 'PIVX', 'PIX', 'PLBT', 'PLR', 'PLU', 'POE',
'POSW', 'POT', 'PPC', 'PPT', 'PPY', 'PRO', 'PST', 'PTC', 'PTOY', 'PURA',
'QAU', 'QRK', 'QRL', 'QTL', 'QTUM', 'QWARK', 'RADS', 'RAIN', 'RBIES',
'RBX', 'RBY', 'RDD', 'REC', 'RED', 'REP', 'RIC', 'RISE', 'RLC', 'RLT',
'ROUND', 'RRT', 'RUP', 'RVT', 'SALT', 'SAN', 'SBD', 'SC', 'SDC', 'SEC',
'SEQ', 'SHIFT', 'SIGMA', 'SIGT', 'SJCX', 'SKIN', 'SKY', 'SLS', 'SMART',
'SNC', 'SNGLS', 'SNM', 'SNRG', 'SNT', 'SPR', 'START', 'STEEM', 'STORJ',
'STRAT', 'STRC', 'STX', 'SUB', 'SWT', 'SYS', 'TAAS', 'TCC', 'TFL',
'TIME', 'TIX', 'TKN', 'TKR', 'TKS', 'TNT', 'TOA', 'TRC', 'TRIG', 'TRST',
'TRX', 'UBQ', 'ULA', 'UNITY', 'UNO', 'UNY', 'URO', 'USDT', 'VEN', 'VERI',
'VIA', 'VIB', 'VIVO', 'VOISE', 'VOX', 'VPN', 'VRC', 'VRM', 'VRS', 'VSL',
'VTC', 'VTR', 'WAVES', 'WCT', 'WDC', 'WGO', 'WGR', 'WINGS', 'WTC', 'WTT',
'XAS', 'XAUR', 'XBC', 'XBY', 'XCN', 'XCP', 'XDN', 'XEL', 'XEM', 'XID',
'XLM', 'XMR', 'XMT', 'XPM', 'XPY', 'XRB', 'XRL', 'XRP', 'XSPEC', 'XST',
'XTZ', 'XVC', 'XVG', 'XWC', 'XZC', 'XZR', 'YBC', 'YOYOW', 'ZCC', 'ZCL',
'ZEC', 'ZEN', 'ZET', 'ZRX')
@command_map.register_command(send_to_sns=True)
def stock(query: list = None, user: str = None):
'''
Get Stock/Crypto Currency Prices
--------------------------------------------------------
*Usage:*
`!stock AMZN MSFT GOOG BTC`
--------------------------------------------------------
'''
response = ''
if not query:
query = ['amzn']
for symbol in query:
if symbol.upper() in cryptos:
params = {
'function': 'DIGITAL_CURRENCY_INTRADAY',
'symbol': symbol,
'apikey': os.environ['ALPHA_VANTAGE_API_KEY'],
'market': 'USD'
}
data_key = 'Time Series (Digital Currency Intraday)'
url = 'https://www.alphavantage.co/query'
request = requests.get(url, params=params)
if request.ok:
try:
# Get first item in request.json()
data = next(iter(
request.json()[data_key].values()
))
response += "*{}*: {}\n" \
"".format(
symbol.upper(),
'${:,.2f}'.format(
float(data['1a. price (USD)']))
)
except (KeyError, Exception) as error:
logger.error("EXCEPTION: Error {}".format(error))
response += "Error from Alpha Vantage. " \
"Did they change something? `{}`\n" \
"".format(error)
logger.error("AlphaVantage request:\n{}".format(
request.json()))
else:
logger.error("Unable to get Stock from Alpha Vantage.\n"
"Error Code: {}\n Error: {}".format(
request.status_code,
request.content))
response += "Unable to get stock price from Alpha Vantage " \
"for `{}`. Try again in a bit.".format(symbol)
else:
url = 'https://www.nasdaq.com/symbol/{}/real-time'
try:
req = requests.get(url.format(symbol.lower()))
soup = BeautifulSoup(req.text, "html.parser")
amount = soup.find('div', class_='qwidget-dollar').contents[0]
percent = soup.find('div', class_='qwidget-percent').contents[0]
arrow = str(next(soup.find('div', id='qwidget-arrow').children))
if 'green' in arrow:
change = '↑ {}'.format(percent)
elif 'red' in arrow:
change = '`↓ {}`'.format(percent)
else:
change = percent
response += '*{}*: {} {}\n'.format(symbol.upper(), amount, change)
except Exception as e:
response += 'Unable to get price for {}, is this really a thing?\n'.format(symbol.upper())
logger.debug("Returning Stock Info: {}".format(response))
return response
| LEXmono/q | bot/commands/stock.py | Python | apache-2.0 | 6,452 | [
"Amber"
] | a2faa90a4e240e8a15ead6d6ec5016321f8c8114ea3edd55824ac32b41a1ada5 |
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from ._base import _validate
from skbio.util._decorator import experimental
@experimental(as_of="0.4.0")
def ace(counts, rare_threshold=10):
"""Calculate the ACE metric (Abundance-based Coverage Estimator).
Parameters
----------
counts : 1-D array_like, int
Vector of counts.
rare_threshold : int, optional
Threshold at which an OTU containing as many or fewer individuals will
be considered rare.
Returns
-------
double
Computed ACE metric.
Raises
------
ValueError
If every rare OTU is a singleton.
Notes
-----
ACE was first introduced in [1]_ and [2]_. The implementation here is based
on the description given in the EstimateS manual [3]_.
If no rare OTUs exist, returns the number of abundant OTUs. The default
value of 10 for `rare_threshold` is based on [4]_.
If `counts` contains zeros, indicating OTUs which are known to exist in the
environment but did not appear in the sample, they will be ignored for the
purpose of calculating the number of rare OTUs.
References
----------
.. [1] Chao, A. & S.-M Lee. 1992 Estimating the number of classes via
sample coverage. Journal of the American Statistical Association 87,
210-217.
.. [2] Chao, A., M.-C. Ma, & M. C. K. Yang. 1993. Stopping rules and
estimation for recapture debugging with unequal failure rates.
Biometrika 80, 193-201.
.. [3] http://viceroy.eeb.uconn.edu/estimates/
.. [4] Chao, A., W.-H. Hwang, Y.-C. Chen, and C.-Y. Kuo. 2000. Estimating
the number of shared species in two communities. Statistica Sinica
10:227-246.
"""
counts = _validate(counts)
freq_counts = np.bincount(counts)
s_rare = _otus_rare(freq_counts, rare_threshold)
singles = freq_counts[1]
if singles > 0 and singles == s_rare:
raise ValueError("The only rare OTUs are singletons, so the ACE "
"metric is undefined. EstimateS suggests using "
"bias-corrected Chao1 instead.")
s_abun = _otus_abundant(freq_counts, rare_threshold)
if s_rare == 0:
return s_abun
n_rare = _number_rare(freq_counts, rare_threshold)
c_ace = 1 - singles / n_rare
top = s_rare * _number_rare(freq_counts, rare_threshold, gamma=True)
bottom = c_ace * n_rare * (n_rare - 1)
gamma_ace = (top / bottom) - 1
if gamma_ace < 0:
gamma_ace = 0
return s_abun + (s_rare / c_ace) + ((singles / c_ace) * gamma_ace)
def _otus_rare(freq_counts, rare_threshold):
"""Count number of rare OTUs."""
return freq_counts[1:rare_threshold + 1].sum()
def _otus_abundant(freq_counts, rare_threshold):
"""Count number of abundant OTUs."""
return freq_counts[rare_threshold + 1:].sum()
def _number_rare(freq_counts, rare_threshold, gamma=False):
"""Return number of individuals in rare OTUs.
``gamma=True`` generates the ``n_rare`` used for the variation coefficient.
"""
n_rare = 0
if gamma:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j) * (i - 1)
else:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j)
return n_rare
| wdwvt1/scikit-bio | skbio/diversity/alpha/_ace.py | Python | bsd-3-clause | 3,748 | [
"scikit-bio"
] | 33bb0fcdbe9afaedf4c7d81505202a67fdad1468511beffd713b7771d7fef7b7 |
# This program is free software. It comes without any warranty, to the extent
# permitted by applicable law. You may use it, redistribute it and/or modify
# it, in whole or in part, provided that you do so at your own risk and do not
# hold the developers or copyright holders liable for any claim, damages, or
# other liabilities arising in connection with the software.
#
# Developed by Gerald Knizia and Garnet K.-L. Chan, 2012;
# (c) Princeton University, 2012
from os import path
MakeIntegralsExecutable = "/home/cgk/dev/wmme/wmme"
FciExecutable = "/home/cgk/dev/fci/fci"
TmpDir = None
#TmpDir = "/dev/shm/"
# ^- if not None, use this as prefix for temporary files. Otherwise
# use whatever mkdtmp() generates (likely related to TMPDIR environment
# variable)
#BasisLibDir = "/home/cgk/dev/ct8k/"
BasisLibDir = path.join(path.dirname(MakeIntegralsExecutable),"bases")
from numpy import set_printoptions, nan
set_printoptions(precision=8,linewidth=10060,suppress=True,threshold=nan)
pResultFmt = " %-32s%18.12f"
pResultFmtAnnoted = " %-32s%18.12f (%s)"
pResultFmtS = " %-32s%18s"
pResultFmtI = " %-32s%5i"
pResultFmtIAnnoted = " %-32s%5i (%s)"
THRORB = 1e-10 # threshold for orbital gradient in RHF
THRDEN = 1e-8 # threshold for change in energy in RHF
MAXIT_HF = 10002 # maximum num. iterations for RHF
THRDVC = 1e-6 # threshold for change in full system correlation potential
MAXIT_VC = 128 # maximum num. iterations for correlation potential
IPRINT = 0 # 3: print full sys & sub.sys calculations
# 1: print report on sub.sys for each vcorr iteration
# 0: print vcorr summary and sub.sys info only after final iteration.
VLOC_FIT_TYPE = ["ImpRdm", "FullRdm", "EnvRdm", "EnvAndCoupling", "Coupling", "ImpRdmAndEnvRdm"][0]
# controls whether breaking of spin-symmetry is allowed.
WF_TYPE = ["RHF","UHF"][0]
VLOC_TYPE = ["Local","ImpAndEnv","Diagonal"][0]
USE_INT2E = True # decides between interacting and non-interacting embedding.
ABSORB_HF_CORE = False # if true, start with a RHF calculation on the full system and
# remove core orbitals and electrons from all further considerations.
# Does not work with UHF.
ToAng2006 = 0.529177249 # 2006 value.
ToAng = 0.5291772108 # molpro default.
ToAngCp2k = 0.529177211 # CP2K default
# occupation number of an occupied orbital
if WF_TYPE == "RHF":
ORB_OCC = 2.
else:
assert(WF_TYPE == "UHF")
ORB_OCC = 1.
| zhengbx/BBQ | dmet_hyd.20130129/settings.py | Python | gpl-3.0 | 2,546 | [
"CP2K",
"Molpro"
] | 3c2f49110aa11c99e4a7bfb525a3ad74ae3f8743af6e3826474fc88c229a599e |
import contextlib
import functools
import warnings
import pandas as pd
from . import indexing
from . import groupby
from . import ops
from . import utils
from . import variable
from .alignment import align
from .common import AbstractArray, BaseDataObject
from .coordinates import DataArrayCoordinates, Indexes
from .dataset import Dataset
from .pycompat import iteritems, basestring, OrderedDict, zip
from .utils import FrozenOrderedDict
from .variable import as_variable, _as_compatible_data, Coordinate
def _infer_coords_and_dims(shape, coords, dims):
"""All the logic for creating a new DataArray"""
if (coords is not None and not utils.is_dict_like(coords)
and len(coords) != len(shape)):
raise ValueError('coords is not dict-like, but it has %s items, '
'which does not match the %s dimensions of the '
'data' % (len(coords), len(shape)))
if isinstance(dims, basestring):
dims = [dims]
if dims is None:
dims = ['dim_%s' % n for n in range(len(shape))]
if coords is not None and len(coords) == len(shape):
# try to infer dimensions from coords
if utils.is_dict_like(coords):
dims = list(coords.keys())
else:
for n, (dim, coord) in enumerate(zip(dims, coords)):
if getattr(coord, 'name', None) is None:
coord = as_variable(coord, key=dim).to_coord()
dims[n] = coord.name
else:
for d in dims:
if not isinstance(d, basestring):
raise TypeError('dimension %s is not a string' % d)
if coords is not None and not utils.is_dict_like(coords):
# ensure coordinates have the right dimensions
coords = [Coordinate(dim, coord, getattr(coord, 'attrs', {}))
for dim, coord in zip(dims, coords)]
if coords is None:
coords = {}
elif not utils.is_dict_like(coords):
coords = OrderedDict(zip(dims, coords))
return coords, dims
class _LocIndexer(object):
def __init__(self, data_array):
self.data_array = data_array
def _remap_key(self, key):
def lookup_positions(dim, labels):
index = self.data_array.indexes[dim]
return indexing.convert_label_indexer(index, labels)
if utils.is_dict_like(key):
return dict((dim, lookup_positions(dim, labels))
for dim, labels in iteritems(key))
else:
# expand the indexer so we can handle Ellipsis
key = indexing.expanded_indexer(key, self.data_array.ndim)
return tuple(lookup_positions(dim, labels) for dim, labels
in zip(self.data_array.dims, key))
def __getitem__(self, key):
return self.data_array[self._remap_key(key)]
def __setitem__(self, key, value):
self.data_array[self._remap_key(key)] = value
class DataArray(AbstractArray, BaseDataObject):
"""N-dimensional array with labeled coordinates and dimensions.
DataArray provides a wrapper around numpy ndarrays that uses labeled
dimensions and coordinates to support metadata aware operations. The API is
similar to that for the pandas Series or DataFrame, but DataArray objects
can have any number of dimensions, and their contents have fixed data
types.
Additional features over raw numpy arrays:
- Apply operations over dimensions by name: ``x.sum('time')``.
- Select or assign values by integer location (like numpy): ``x[:10]``
or by label (like pandas): ``x.loc['2014-01-01']`` or
``x.sel(time='2014-01-01')``.
- Mathematical operations (e.g., ``x - y``) vectorize across multiple
dimensions (known in numpy as "broadcasting") based on dimension names,
regardless of their original order.
- Keep track of arbitrary metadata in the form of a Python dictionary:
``x.attrs``
- Convert to a pandas Series: ``x.to_series()``.
Getting items from or doing mathematical operations with a DataArray
always returns another DataArray.
Attributes
----------
dims : tuple
Dimension names associated with this array.
values : np.ndarray
Access or modify DataArray values as a numpy array.
coords : dict-like
Dictionary of Coordinate objects that label values along each dimension.
name : str or None
Name of this array.
attrs : OrderedDict
Dictionary for holding arbitrary metadata.
"""
groupby_cls = groupby.DataArrayGroupBy
def __init__(self, data, coords=None, dims=None, name=None,
attrs=None, encoding=None):
"""
Parameters
----------
data : array_like
Values for this array. Must be an ``numpy.ndarray``, ndarray like,
or castable to an ``ndarray``. If a self-described xray or pandas
object, attempts are made to use this array's metadata to fill in
other unspecified arguments. A view of the array's data is used
instead of a copy if possible.
coords : sequence or dict of array_like objects, optional
Coordinates (tick labels) to use for indexing along each dimension.
If dict-like, should be a mapping from dimension names to the
corresponding coordinates.
dims : str or sequence of str, optional
Name(s) of the the data dimension(s). Must be either a string (only
for 1D data) or a sequence of strings with length equal to the
number of dimensions. If this argument is omited, dimension names
are taken from ``coords`` (if possible) and otherwise default to
``['dim_0', ... 'dim_n']``.
name : str or None, optional
Name of this array.
attrs : dict_like or None, optional
Attributes to assign to the new variable. By default, an empty
attribute dictionary is initialized.
encoding : dict_like or None, optional
Dictionary specifying how to encode this array's data into a
serialized format like netCDF4. Currently used keys (for netCDF)
include '_FillValue', 'scale_factor', 'add_offset', 'dtype',
'units' and 'calendar' (the later two only for datetime arrays).
Unrecognized keys are ignored.
"""
# try to fill in arguments from data if they weren't supplied
if coords is None:
coords = getattr(data, 'coords', None)
if isinstance(data, pd.Series):
coords = [data.index]
elif isinstance(data, pd.DataFrame):
coords = [data.index, data.columns]
elif isinstance(data, (pd.Index, variable.Coordinate)):
coords = [data]
elif isinstance(data, pd.Panel):
coords = [data.items, data.major_axis, data.minor_axis]
if dims is None:
dims = getattr(data, 'dims', getattr(coords, 'dims', None))
if name is None:
name = getattr(data, 'name', None)
if attrs is None:
attrs = getattr(data, 'attrs', None)
if encoding is None:
encoding = getattr(data, 'encoding', None)
data = _as_compatible_data(data)
coords, dims = _infer_coords_and_dims(data.shape, coords, dims)
dataset = Dataset(coords=coords)
# insert data afterwards in case of redundant coords/data
dataset[name] = (dims, data, attrs, encoding)
for k, v in iteritems(dataset.coords):
if any(d not in dims for d in v.dims):
raise ValueError('coordinate %s has dimensions %s, but these '
'are not a subset of the DataArray '
'dimensions %s' % (k, v.dims, dims))
# these fully describe a DataArray:
self._dataset = dataset
self._name = name
@classmethod
def _new_from_dataset(cls, dataset, name):
"""Private constructor for the benefit of Dataset.__getitem__ (skips
all validation)
"""
obj = object.__new__(cls)
obj._dataset = dataset._copy_listed([name], keep_attrs=False)
if name not in obj._dataset:
# handle virtual variables
try:
_, name = name.split('.', 1)
except Exception:
raise KeyError(name)
obj._name = name
if name not in dataset._dims:
obj._dataset._coord_names.discard(name)
return obj
@classmethod
def _new_from_dataset_no_copy(cls, dataset, name):
obj = object.__new__(cls)
obj._dataset = dataset
obj._name = name
return obj
def _with_replaced_dataset(self, dataset):
obj = object.__new__(type(self))
obj._name = self.name
obj._dataset = dataset
return obj
def _to_dataset_split(self, dim):
def subset(dim, label):
array = self.loc[{dim: label}].drop(dim)
array.attrs = {}
return array
variables = OrderedDict([(str(label), subset(dim, label))
for label in self.indexes[dim]])
coords = self.coords.to_dataset()
del coords[dim]
return Dataset(variables, coords, self.attrs)
def _to_dataset_whole(self, name):
if name is None:
return self._dataset.copy()
else:
return self.rename(name)._dataset
def to_dataset(self, dim=None, name=None):
"""Convert a DataArray to a Dataset.
Parameters
----------
dim : str, optional
Name of the dimension on this array along which to split this array
into separate variables. If not provided, this array is converted
into a Dataset of one variable.
name : str, optional
Name to substitute for this array's name. Only valid is ``dim`` is
not provided.
Returns
-------
dataset : Dataset
"""
if dim is not None and dim not in self.dims:
warnings.warn('the order of the arguments on DataArray.to_dataset '
'has changed; you now need to supply ``name`` as '
'a keyword argument',
FutureWarning, stacklevel=2)
name = dim
dim = None
if dim is not None:
if name is not None:
raise TypeError('cannot supply both dim and name arguments')
return self._to_dataset_split(dim)
else:
return self._to_dataset_whole(name)
@property
def name(self):
"""The name of this array.
"""
return self._name
@contextlib.contextmanager
def _set_new_dataset(self):
"""Context manager to use for modifying _dataset, in a manner that
can be safely rolled back if an error is encountered.
"""
ds = self._dataset.copy(deep=False)
yield ds
self._dataset = ds
@name.setter
def name(self, value):
with self._set_new_dataset() as ds:
ds.rename({self.name: value}, inplace=True)
self._name = value
@property
def variable(self):
return self._dataset._variables[self.name]
@property
def dtype(self):
return self.variable.dtype
@property
def shape(self):
return self.variable.shape
@property
def size(self):
return self.variable.size
@property
def nbytes(self):
return self.variable.nbytes
@property
def ndim(self):
return self.variable.ndim
def __len__(self):
return len(self.variable)
@property
def data(self):
"""The array's data as a dask or numpy array"""
return self.variable.data
@property
def values(self):
"""The array's data as a numpy.ndarray"""
return self.variable.values
@values.setter
def values(self, value):
self.variable.values = value
@property
def _in_memory(self):
return self.variable._in_memory
def to_index(self):
"""Convert this variable to a pandas.Index. Only possible for 1D
arrays.
"""
return self.variable.to_index()
@property
def dims(self):
"""Dimension names associated with this array."""
return self.variable.dims
@dims.setter
def dims(self, value):
raise AttributeError('you cannot assign dims on a DataArray. Use '
'.rename() or .swap_dims() instead.')
def _item_key_to_dict(self, key):
if utils.is_dict_like(key):
return key
else:
key = indexing.expanded_indexer(key, self.ndim)
return dict(zip(self.dims, key))
def __getitem__(self, key):
if isinstance(key, basestring):
return self.coords[key]
else:
# orthogonal array indexing
return self.isel(**self._item_key_to_dict(key))
def __setitem__(self, key, value):
if isinstance(key, basestring):
self.coords[key] = value
else:
# orthogonal array indexing
self.variable[key] = value
def __delitem__(self, key):
del self._dataset[key]
@property
def __attr_sources__(self):
"""List of places to look-up items for attribute-style access"""
return [self.coords, self.attrs]
def __contains__(self, key):
return key in self._dataset
@property
def loc(self):
"""Attribute for location based indexing like pandas.
"""
return _LocIndexer(self)
@property
def attrs(self):
"""Dictionary storing arbitrary metadata with this array."""
return self.variable.attrs
@attrs.setter
def attrs(self, value):
self.variable.attrs = value
@property
def encoding(self):
"""Dictionary of format-specific settings for how this array should be
serialized."""
return self.variable.encoding
@encoding.setter
def encoding(self, value):
self.variable.encoding = value
@property
def indexes(self):
"""OrderedDict of pandas.Index objects used for label based indexing
"""
return Indexes(self)
@property
def coords(self):
"""Dictionary-like container of coordinate arrays.
"""
return DataArrayCoordinates(self)
def reset_coords(self, names=None, drop=False, inplace=False):
"""Given names of coordinates, reset them to become variables.
Parameters
----------
names : str or list of str, optional
Name(s) of non-index coordinates in this dataset to reset into
variables. By default, all non-index coordinates are reset.
drop : bool, optional
If True, remove coordinates instead of converting them into
variables.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset, or DataArray if ``drop == True``
"""
if inplace and not drop:
raise ValueError('cannot reset coordinates in-place on a '
'DataArray without ``drop == True``')
if names is None:
names = (self._dataset._coord_names - set(self.dims)
- set([self.name]))
ds = self._dataset.reset_coords(names, drop, inplace)
return ds[self.name] if drop else ds
def load(self):
"""Manually trigger loading of this array's data from disk or a
remote source into memory and return this array.
Normally, it should not be necessary to call this method in user code,
because all xray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.
"""
self._dataset.load()
return self
def load_data(self): # pragma: no cover
warnings.warn('the DataArray method `load_data` has been deprecated; '
'use `load` instead',
FutureWarning, stacklevel=2)
return self.load()
def copy(self, deep=True):
"""Returns a copy of this array.
If `deep=True`, a deep copy is made of all variables in the underlying
dataset. Otherwise, a shallow copy is made, so each variable in the new
array's dataset is also a variable in this array's dataset.
"""
ds = self._dataset.copy(deep=deep)
return self._with_replaced_dataset(ds)
def __copy__(self):
return self.copy(deep=False)
def __deepcopy__(self, memo=None):
# memo does nothing but is required for compatability with
# copy.deepcopy
return self.copy(deep=True)
# mutable objects should not be hashable
__hash__ = None
@property
def chunks(self):
"""Block dimensions for this array's data or None if it's not a dask
array.
"""
return self.variable.chunks
def chunk(self, chunks=None):
"""Coerce this array's data into a dask arrays with the given chunks.
If this variable is a non-dask array, it will be converted to dask
array. If it's a dask array, it will be rechunked to the given chunk
sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int, tuple or dict, optional
Chunk sizes along each dimension, e.g., ``5``, ``(5, 5)`` or
``{'x': 5, 'y': 5}``.
Returns
-------
chunked : xray.DataArray
"""
if isinstance(chunks, (list, tuple)):
chunks = dict(zip(self.dims, chunks))
ds = self._dataset.chunk(chunks)
return self._with_replaced_dataset(ds)
def isel(self, **indexers):
"""Return a new DataArray whose dataset is given by integer indexing
along the specified dimension(s).
See Also
--------
Dataset.isel
DataArray.sel
"""
ds = self._dataset.isel(**indexers)
return self._with_replaced_dataset(ds)
def sel(self, method=None, **indexers):
"""Return a new DataArray whose dataset is given by selecting
index labels along the specified dimension(s).
See Also
--------
Dataset.sel
DataArray.isel
"""
return self.isel(**indexing.remap_label_indexers(self, indexers,
method=method))
def reindex_like(self, other, method=None, copy=True):
"""Conform this object onto the indexes of another object, filling
in missing values with NaN.
Parameters
----------
other : Dataset or DataArray
Object with an 'indexes' attribute giving a mapping from dimension
names to pandas.Index objects, which provides coordinates upon
which to index the variables in this dataset. The indexes on this
other object need not be the same as the indexes on this
dataset. Any mis-matched index values will be filled in with
NaN, and any mis-matched dimension names will simply be ignored.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values from other not found on this
data array:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
copy : bool, optional
If `copy=True`, the returned array's dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this array's dataset are returned.
Returns
-------
reindexed : DataArray
Another dataset array, with this array's data but coordinates from
the other object.
See Also
--------
DataArray.reindex
align
"""
return self.reindex(method=method, copy=copy, **other.indexes)
def reindex(self, method=None, copy=True, **indexers):
"""Conform this object onto a new set of indexes, filling in
missing values with NaN.
Parameters
----------
copy : bool, optional
If `copy=True`, the returned array's dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this array's dataset are returned.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values in ``indexers`` not found on
this data array:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
**indexers : dict
Dictionary with keys given by dimension names and values given by
arrays of coordinates tick labels. Any mis-matched coordinate values
will be filled in with NaN, and any mis-matched dimension names will
simply be ignored.
Returns
-------
reindexed : DataArray
Another dataset array, with this array's data but replaced
coordinates.
See Also
--------
DataArray.reindex_like
align
"""
ds = self._dataset.reindex(method=method, copy=copy, **indexers)
return self._with_replaced_dataset(ds)
def rename(self, new_name_or_name_dict):
"""Returns a new DataArray with renamed coordinates and/or a new name.
Parameters
----------
new_name_or_name_dict : str or dict-like
If the argument is dict-like, it it used as a mapping from old
names to new names for coordinates (and/or this array itself).
Otherwise, use the argument as the new name for this array.
Returns
-------
renamed : DataArray
Renamed array or array with renamed coordinates.
See Also
--------
Dataset.rename
DataArray.swap_dims
"""
if utils.is_dict_like(new_name_or_name_dict):
name_dict = new_name_or_name_dict
new_name = name_dict.get(self.name, self.name)
else:
new_name = new_name_or_name_dict
name_dict = {self.name: new_name}
renamed_dataset = self._dataset.rename(name_dict)
return renamed_dataset[new_name]
def swap_dims(self, dims_dict):
"""Returns a new DataArray with swapped dimensions.
Parameters
----------
dims_dict : dict-like
Dictionary whose keys are current dimension names and whose values
are new names. Each value must already be a coordinate on this
array.
inplace : bool, optional
If True, swap dimensions in-place. Otherwise, return a new object.
Returns
-------
renamed : Dataset
DataArray with swapped dimensions.
See Also
--------
DataArray.rename
Dataset.swap_dims
"""
ds = self._dataset.swap_dims(dims_dict)
return self._with_replaced_dataset(ds)
def transpose(self, *dims):
"""Return a new DataArray object with transposed dimensions.
Parameters
----------
*dims : str, optional
By default, reverse the dimensions. Otherwise, reorder the
dimensions to this order.
Returns
-------
transposed : DataArray
The returned DataArray's array is transposed.
Notes
-----
Although this operation returns a view of this array's data, it is
not lazy -- the data will be fully loaded.
See Also
--------
numpy.transpose
Dataset.transpose
"""
ds = self._dataset.copy()
ds[self.name] = self.variable.transpose(*dims)
return self._with_replaced_dataset(ds)
def squeeze(self, dim=None):
"""Return a new DataArray object with squeezed data.
Parameters
----------
dim : None or str or tuple of str, optional
Selects a subset of the length one dimensions. If a dimension is
selected with length greater than one, an error is raised. If
None, all length one dimensions are squeezed.
Returns
-------
squeezed : DataArray
This array, but with with all or a subset of the dimensions of
length 1 removed.
Notes
-----
Although this operation returns a view of this array's data, it is
not lazy -- the data will be fully loaded.
See Also
--------
numpy.squeeze
"""
ds = self._dataset.squeeze(dim)
return self._with_replaced_dataset(ds)
def drop(self, labels, dim=None):
"""Drop coordinates or index labels from this DataArray.
Parameters
----------
labels : str
Names of coordinate variables or index labels to drop.
dim : str, optional
Dimension along which to drop index labels. By default (if
``dim is None``), drops coordinates rather than index labels.
Returns
-------
dropped : DataArray
"""
if utils.is_scalar(labels):
labels = [labels]
if dim is None and self.name in labels:
raise ValueError('cannot drop this DataArray from itself')
ds = self._dataset.drop(labels, dim)
return self._with_replaced_dataset(ds)
def dropna(self, dim, how='any', thresh=None):
"""Returns a new array with dropped labels for missing values along
the provided dimension.
Parameters
----------
dim : str
Dimension along which to drop missing values. Dropping along
multiple dimensions simultaneously is not yet supported.
how : {'any', 'all'}, optional
* any : if any NA values are present, drop that label
* all : if all values are NA, drop that label
thresh : int, default None
If supplied, require this many non-NA values.
Returns
-------
DataArray
"""
ds = self._dataset.dropna(dim, how=how, thresh=thresh)
return self._with_replaced_dataset(ds)
def fillna(self, value):
"""Fill missing values in this object.
This operation follows the normal broadcasting and alignment rules that
xray uses for binary arithmetic, except the result is aligned to this
object (``join='left'``) instead of aligned to the intersection of
index coordinates (``join='inner'``).
Parameters
----------
value : scalar, ndarray or DataArray
Used to fill all matching missing values in this array. If the
argument is a DataArray, it is first aligned with (reindexed to)
this array.
Returns
-------
DataArray
"""
if utils.is_dict_like(value):
raise TypeError('cannot provide fill value as a dictionary with '
'fillna on a DataArray')
return self._fillna(value)
def reduce(self, func, dim=None, axis=None, keep_attrs=False, **kwargs):
"""Reduce this array by applying `func` along some dimension(s).
Parameters
----------
func : function
Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis.
dim : str or sequence of str, optional
Dimension(s) over which to apply `func`.
axis : int or sequence of int, optional
Axis(es) over which to repeatedly apply `func`. Only one of the
'dim' and 'axis' arguments can be supplied. If neither are
supplied, then the reduction is calculated over the flattened array
(by calling `f(x)` without an axis argument).
keep_attrs : bool, optional
If True, the variable's attributes (`attrs`) will be copied from
the original object to the new one. If False (default), the new
object will be returned without attributes.
**kwargs : dict
Additional keyword arguments passed on to `func`.
Returns
-------
reduced : DataArray
DataArray with this object's array replaced with an array with
summarized data and the indicated dimension(s) removed.
"""
var = self.variable.reduce(func, dim, axis, keep_attrs, **kwargs)
ds = self._dataset.drop(set(self.dims) - set(var.dims))
ds[self.name] = var
return self._with_replaced_dataset(ds)
@classmethod
def _concat(cls, arrays, dim='concat_dim', indexers=None,
mode='different', concat_over=None, compat='equals'):
datasets = []
for n, arr in enumerate(arrays):
if n == 0:
name = arr.name
elif name != arr.name:
if compat == 'identical':
raise ValueError('array names not identical')
else:
arr = arr.rename(name)
datasets.append(arr._dataset)
if concat_over is None:
concat_over = set()
elif isinstance(concat_over, basestring):
concat_over = set([concat_over])
concat_over = set(concat_over) | set([name])
ds = Dataset._concat(datasets, dim, indexers, concat_over=concat_over)
return cls._new_from_dataset_no_copy(ds, name)
def to_pandas(self):
"""Convert this array into a pandas object with the same shape.
The type of the returned object depends on the number of DataArray
dimensions:
* 1D -> `pandas.Series`
* 2D -> `pandas.DataFrame`
* 3D -> `pandas.Panel`
Only works for arrays with 3 or fewer dimensions.
The DataArray constructor performs the inverse transformation.
"""
# TODO: consolidate the info about pandas constructors and the
# attributes that correspond to their indexes into a separate module?
constructors = {0: lambda x: x,
1: pd.Series,
2: pd.DataFrame,
3: pd.Panel}
try:
constructor = constructors[self.ndim]
except KeyError:
raise ValueError('cannot convert arrays with %s dimensions into '
'pandas objects' % self.ndim)
return constructor(self.values, *self.indexes.values())
def to_dataframe(self):
"""Convert this array and its coordinates into a tidy pandas.DataFrame.
The DataFrame is indexed by the Cartesian product of index coordinates
(in the form of a :py:class:`pandas.MultiIndex`).
Other coordinates are included as columns in the DataFrame.
"""
# TODO: add a 'name' parameter
dims = OrderedDict(zip(self.dims, self.shape))
return self._dataset._to_dataframe(dims)
def to_series(self):
"""Convert this array into a pandas.Series.
The Series is indexed by the Cartesian product of index coordinates
(in the form of a :py:class:`pandas.MultiIndex`).
"""
index = self.coords.to_index()
return pd.Series(self.values.reshape(-1), index=index, name=self.name)
@classmethod
def from_series(cls, series):
"""Convert a pandas.Series into an xray.DataArray.
If the series's index is a MultiIndex, it will be expanded into a
tensor product of one-dimensional coordinates (filling in missing values
with NaN). Thus this operation should be the inverse of the `to_series`
method.
"""
# TODO: add a 'name' parameter
df = pd.DataFrame({series.name: series})
ds = Dataset.from_dataframe(df)
return cls._new_from_dataset_no_copy(ds, series.name)
def to_cdms2(self):
"""Convert this array into a cdms2.Variable
"""
from ..convert import to_cdms2
return to_cdms2(self)
@classmethod
def from_cdms2(cls, variable):
"""Convert a cdms2.Variable into an xray.DataArray
"""
from ..convert import from_cdms2
return from_cdms2(variable)
def _all_compat(self, other, compat_str):
"""Helper function for equals and identical"""
compat = lambda x, y: getattr(x.variable, compat_str)(y.variable)
return (utils.dict_equiv(self.coords, other.coords, compat=compat)
and compat(self, other))
def broadcast_equals(self, other):
"""Two DataArrays are broadcast equal if they are equal after
broadcasting them against each other such that they have the same
dimensions.
See Also
--------
DataArray.equals
DataArray.identical
"""
try:
return self._all_compat(other, 'broadcast_equals')
except (TypeError, AttributeError):
return False
def equals(self, other):
"""True if two DataArrays have the same dimensions, coordinates and
values; otherwise False.
DataArrays can still be equal (like pandas objects) if they have NaN
values in the same locations.
This method is necessary because `v1 == v2` for ``DataArray``
does element-wise comparisions (like numpy.ndarrays).
See Also
--------
DataArray.broadcast_equals
DataArray.identical
"""
try:
return self._all_compat(other, 'equals')
except (TypeError, AttributeError):
return False
def identical(self, other):
"""Like equals, but also checks the array name and attributes, and
attributes on all coordinates.
See Also
--------
DataArray.broadcast_equals
DataArray.equal
"""
try:
return (self.name == other.name
and self._all_compat(other, 'identical'))
except (TypeError, AttributeError):
return False
__default_name = object()
def _result_name(self, other=None):
if self.name in self.dims:
# these names match dimension, so if we preserve them we will also
# rename indexes
return None
if other is None:
# shortcut
return self.name
other_name = getattr(other, 'name', self.__default_name)
other_dims = getattr(other, 'dims', ())
if other_name in other_dims:
# same trouble as above
return None
# use the same naming heuristics as pandas:
# https://github.com/ContinuumIO/blaze/issues/458#issuecomment-51936356
if other_name is self.__default_name or other_name == self.name:
return self.name
return None
def __array_wrap__(self, obj, context=None):
new_var = self.variable.__array_wrap__(obj, context)
ds = self.coords.to_dataset()
name = self._result_name()
ds[name] = new_var
return self._new_from_dataset_no_copy(ds, name)
@staticmethod
def _unary_op(f):
@functools.wraps(f)
def func(self, *args, **kwargs):
return self.__array_wrap__(f(self.variable.data, *args, **kwargs))
return func
@staticmethod
def _binary_op(f, reflexive=False, join='inner', **ignored_kwargs):
@functools.wraps(f)
def func(self, other):
if isinstance(other, (Dataset, groupby.GroupBy)):
return NotImplemented
if hasattr(other, 'indexes'):
self, other = align(self, other, join=join, copy=False)
empty_indexes = [d for d, s in zip(self.dims, self.shape)
if s == 0]
if empty_indexes:
raise ValueError('no overlapping labels for some '
'dimensions: %s' % empty_indexes)
other_coords = getattr(other, 'coords', None)
other_variable = getattr(other, 'variable', other)
ds = self.coords.merge(other_coords)
name = self._result_name(other)
ds[name] = (f(self.variable, other_variable)
if not reflexive
else f(other_variable, self.variable))
result = self._new_from_dataset_no_copy(ds, name)
return result
return func
@staticmethod
def _inplace_binary_op(f):
@functools.wraps(f)
def func(self, other):
if isinstance(other, groupby.GroupBy):
raise TypeError('in-place operations between a DataArray and '
'a grouped object are not permitted')
other_coords = getattr(other, 'coords', None)
other_variable = getattr(other, 'variable', other)
with self.coords._merge_inplace(other_coords):
f(self.variable, other_variable)
return self
return func
# priority most be higher than Variable to properly work with binary ufuncs
ops.inject_all_ops_and_reduce_methods(DataArray, priority=60)
| clarkfitzg/xray | xray/core/dataarray.py | Python | apache-2.0 | 38,383 | [
"NetCDF"
] | 5daed3593dbcc1c0d78afae472787a0a16072c6d548574379537b524c2a5e5ff |
import vtk
import numpy as np
nx = 101
ny = 11
x = np.linspace(0,100,nx)
y = np.linspace(0,10,ny)
points = vtk.vtkPoints()
points2 = vtk.vtkPoints()
polys = vtk.vtkCellArray()
polys2 = vtk.vtkCellArray()
temps = vtk.vtkFloatArray()
temps.SetNumberOfComponents(1)
temps.SetName("Temps")
# temps2 = vtk.vtkFloatArray()
# temps2.SetNumberOfComponents(1)
# temps2.SetName("Temps2")
for j in range(0, ny-1):
for i in range(0, nx-1):
p1 = [x[i], y[j], 0]
p2 = [x[i], y[j+1], 0]
p3 = [x[i+1], y[j+1], 0]
p4 = [x[i+1], y[j], 0]
p12 = [x[i], y[j]+12, 0]
p22 = [x[i], y[j+1]+12, 0]
p32 = [x[i+1], y[j+1]+12, 0]
p42 = [x[i+1], y[j]+12, 0]
temps.InsertNextTypedTuple([x[i]])
temps.InsertNextTypedTuple([x[i]])
temps.InsertNextTypedTuple([x[i+1]])
temps.InsertNextTypedTuple([x[i+1]])
# temps2.InsertNextTypedTuple([x[i]])
# temps2.InsertNextTypedTuple([x[i]])
# temps2.InsertNextTypedTuple([x[i+1]])
# temps2.InsertNextTypedTuple([x[i+1]])
pid1 = points.InsertNextPoint(p1)
pid2 = points.InsertNextPoint(p2)
pid3 = points.InsertNextPoint(p3)
pid4 = points.InsertNextPoint(p4)
quad = vtk.vtkQuad()
quad.GetPointIds().SetId(0, pid1)
quad.GetPointIds().SetId(1, pid2)
quad.GetPointIds().SetId(2, pid3)
quad.GetPointIds().SetId(3, pid4)
polys.InsertNextCell(quad)
# pid1 = points2.InsertNextPoint(p12)
# pid2 = points2.InsertNextPoint(p22)
# pid3 = points2.InsertNextPoint(p32)
# pid4 = points2.InsertNextPoint(p42)
#
# quad = vtk.vtkQuad()
# quad.GetPointIds().SetId(0, pid1)
# quad.GetPointIds().SetId(1, pid2)
# quad.GetPointIds().SetId(2, pid3)
# quad.GetPointIds().SetId(3, pid4)
#
# polys2.InsertNextCell(quad)
geodata = vtk.vtkPolyData()
geodata.SetPoints(points)
geodata.SetPolys(polys)
geodata.GetPointData().SetScalars(temps)
mappergeo = vtk.vtkPolyDataMapper()
mappergeo.SetScalarRange(0,100)
# geodata2 = vtk.vtkPolyData()
# geodata2.SetPoints(points2)
# geodata2.SetPolys(polys2)
# geodata2.GetPointData().SetScalars(temps2)
# contour = vtk.vtkBandedPolyDataContourFilter()
# contour.SetInputData(geodata2)
# zmin, zmax = geodata2.GetScalarRange()
# contour.SetNumberOfContours(10)
# contour.GenerateValues(10, zmin, zmax)
# contour.SetScalarModeToValue()
# contour.GenerateContourEdgesOn()
# contour.ClippingOn()
# contour.Update()
#
# mappergeo2 = vtk.vtkPolyDataMapper()
# mappergeo2.SetScalarRange(0, 100)
if vtk.VTK_MAJOR_VERSION <= 5:
mappergeo.SetInput(geodata)
else:
mappergeo.SetInputData(geodata)
# mappergeo2.SetInputConnection(contour.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mappergeo)
# actor2 = vtk.vtkActor()
# actor2.SetMapper(mappergeo2)
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
WIDTH = 640
HEIGHT = 480
renderWindow.SetSize(WIDTH, HEIGHT)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderer.SetBackground(.9, .9, .9)
renderer.AddActor(actor)
# renderer.AddActor(actor2)
renderWindow.Render()
renderWindowInteractor.Start()
| tjssmy/CuviewerPy | vtkTests/BandedStrip.py | Python | mit | 3,330 | [
"VTK"
] | a33f3f522aecb2db4351ac5e5504ab2eebcd0b1396c51628463f1271ec14768c |
"""
A shell tool that constructs a query from arguments and flags and
outputs results. This is different from a PipeTool as StartTool can
only occur at the beginning of a chain.
"""
from octopus.server.DBInterface import DBInterface
from octopus.shelltool.CmdLineTool import CmdLineTool
class StartTool(CmdLineTool):
def __init__(self, DESCRIPTION):
CmdLineTool.__init__(self, DESCRIPTION)
# @Override
def _constructQuery(self):
"""
Create a query from arguments that will be passed to the
database.
"""
pass
# @Override
def _handleResult(self, res):
"""
Process the result of the query.
"""
pass
def _runImpl(self):
query = self._constructQuery()
self.dbInterface = DBInterface()
self.dbInterface.connectToDatabase()
res = self.dbInterface.runGremlinQuery(query)
self._handleResult(res)
| octopus-platform/bjoern | python/octopus-tools/octopus/shelltool/StartTool.py | Python | gpl-3.0 | 962 | [
"Octopus"
] | 1fc0c0b656184314dbbae053ae1b9c2a46c9af158610c4535c61f79822d4177d |
"""
This script find best fitting Halo Occcupation Distribution (HOD) parameters
that simultaneously fit the number density n(z) and projected correlation
function.
mpirun -n 4 python3 fit_hod_mpi.py param.json
Options:
param [=param.json]: parameter file
nmocks [=1]: number of mock catalogues used for corr_projected
nrands [=1]: number of random catalogues used for corr_projected
Input:
halo_lightcone/lightcone_<imock>.h5; imock = 1 - nmocks
rand_lightcone/lightcone_<irand>.h5; irand = 1 - nrands
Output:
mocks/mock_<n>.txt
randoms/random_<n>.txt
Notation:
*domain* is a tuble of (region, z_min, z_max) such as ('w1', '1.0', '1.2')
*data* is defined for each domain which contains:
halo_lightcones
random_lightcones
galaxy_catalogues
random_catalogues
"""
import os
import sys
import argparse
import json
import signal
import numpy as np
import mockgallib as mock
signal.signal(signal.SIGINT, signal.SIG_DFL) # stop with ctrl-c
#
# Command-line options
#
parser = argparse.ArgumentParser()
parser.add_argument('--param', default='param.json',
help='parameter json file')
parser.add_argument('--nmocks', default='1', help='number of mock catalogues')
parser.add_argument('--nrands', default='1', help='number of random catalogues')
arg = parser.parse_args()
mock.set_loglevel(0)
#
# Read parameter file and initialise modules
#
#
print('Parameter file: %s' % arg.param)
with open(arg.param, 'r') as f:
param = json.load(f)
# omega_m
omega_m = param['omega_m']
print('Setting cosmology: omega_m= %.4f' % omega_m)
mock.cosmology.set(omega_m)
# power_spectrum
print('Using linear power spectrum: ', param['power_spectrum'])
mock.power.init(param['power_spectrum'])
# redshift_bins
def read_redshift_bins(redshift_bins):
arr = []
for zbin in redshift_bins:
arr.append((float(zbin['zmin']), float(zbin['zmax'])))
return arr
redshift_bins = read_redshift_bins(param['redshift_bins'])
print(redshift_bins)
# nz
nbar_obs= np.loadtxt(param['nz'], delimiter=' ')
#
# The main data structure defined for each region x redshift bin
#
class Data:
"""Data is defined on each domain (region, z_min, z_max)
"""
def __init__(self, domain):
self.domain = domain
self.reg = domain[0]
self.z_min = float(domain[1])
self.z_max = float(domain[2])
# Load lightcones
self.halo_lightcones = mock.LightCones()
self.halo_lightcones.load_h5(
['halo_lightcone/%s/lightcone_%05d.h5' %
(domain[0], n + 1)
for n in range(int(arg.nmocks))])
self.rand_lightcones = mock.LightCones()
self.rand_lightcones.load_h5(
['rand_lightcone/%s/lightcone_%05d.h5' %
(domain[0], n + 1)
for n in range(int(arg.nrands))])
# Catalogues will be generated from lightcones for given hod
self.galaxy_catalogues = mock.Catalogues()
self.random_catalogues = mock.Catalogues()
self.corr = mock.CorrelationFunction(
rp_min=0.5, rp_max=60.0, nbin=20, pi_max= 60.0, pi_nbin= 20)
# VIPERS projected correlation function
self.wp_obs = np.loadtxt('data/vipers/try1/corr_projected_%s_%s_%s.txt'
% domain, delimiter=' ')
def generate_catalogues(self, hod):
"""Generate galaxy and random catalogues from given HOD"""
self.galaxy_catalogues.generate_galaxies(hod, self.halo_lightcones,
self.z_min, self.z_max)
self.random_catalogues.generate_randoms(hod, self.rand_lightcones,
self.z_min, self.z_max)
def chi2(self, hod):
"""Compute chi2 between HOD and observation
projected correlation functions wp's.
Assumed that both wps are computed in the same coditions.
"""
self.generate_catalogues(hod)
self.corr.compute_corr_projected(self.galaxy_catalogues,
self.random_catalogues)
wp = self.corr.wp
diff = (self.corr.wp - self.wp_obs[:,1])/self.wp_obs[:,2]
chi2 = np.sum(diff**2)
return chi2
def write_corr_projected(self, index):
"""Write projected correlation function"""
arg = self.domain + (index,)
filename = 'log/corr_%s_%s_%s_%05d.txt' % arg
rp = self.corr.rp
wp = self.corr.wp
with open(filename, 'w') as f:
for i in range(len(rp)):
f.write('%e %e\n' % (rp[i], wp[i]))
regs = ['w1', 'w4']
domains = []
for zbin in redshift_bins:
domains.append(('w1', zbin[0], zbin[1]))
data = {}
for domain in domains:
data[domain] = Data(domain)
#
# Set HOD parameters (initial guess)
#
hod = mock.Hod()
hod.set_coef([12, 0.0, 0.0, 0, 0.1, 0.0, 1.5, 0.0, 1.5, 0.0])
#
# Setup HOD parameter fitting
#
nbar= mock.NbarFitting(hod, nbar_obs, 0.6, 1.2)
#
# Setup output
#
outdir = 'log'
if not os.path.exists(outdir):
os.mkdir(outdir)
def write_nbar_fitting(nz, index):
"""Write nbar_fitting to an ascii file
Args:
nz: NbarFitting object
iter: index of output filename
Output:
outdir/nbar_<iter>.txt file
Column 1: z
Column 2: nbar_obs
Column 3: nbar_HOD
"""
filename = '%s/nz_%05d.txt' % (outdir, index)
with open(filename, 'w') as f:
for i in range(len(nz)):
f.write('%e %e %e\n' % (nz.z[i], nz.nbar_obs[i], nz.nbar_hod[i]))
def write_hod_params(h, index):
"""Write HOD parameters as a function of z
Column 1: z
Column 2: log10 M
Column 3: sigma
Column 4: log10 M1
Column 5: alpha
"""
filename = 'log/hod_%05d.txt' % index
f = open(filename, 'w')
f.write('# c= ' + hod.get_coef().__repr__() + '\n')
for z in np.arange(0.4, 1.2, 0.01):
logMmin = h.logMmin(z)
sigma = h.sigma(z)
logM1 = h.logM1(z)
alpha = h.alpha(z)
f.write('%.4f %.6f %.6f %.6f %.6f\n' %
(z, logMmin, sigma, logM1, alpha))
f.close()
flog = open('%s/fit_hod.log' % outdir, 'w', 1)
iter = 0
def cost_function(x):
"""Compute the chi^2
Args:
x[0]: M1/M_min
x[1]: sigma
x[2]: alpha
domains: An arrain of domains
domain is a tuble containing the survey region x redshift bin info
domain[0] (str): 'w1' or 'w4'
domain[1] (str): redshift_min
domain[2] (str): redshift_max
"""
hod[6] = x[0] #log10 M1 = log10 (M_min + c_6 + c_7*(z - z_0)
#hod[7] = x[1]
#hod[6] = x[0] # log10 M1
#hod[6] = x[0] # log10 M1
#hod[4] = x[1] # sigma
#hod[8] = x[2] # alpha
print('eval %.3f' % (x[0]))
print(hod.coef)
# Find best fitting logMmin(z) function
nbar.fit()
#logMmin = hod.logMmin(0.6)
#print('logMmin= ', logMmin)
chi2 = 0
for domain, d in data.items():
chi2 += d.chi2(hod)
return chi2
def logging_minimization(x):
global iter
iter = iter + 1
print('callback called')
#hod[4] = x[1]
hod[6] = x[0]
#hod[7] = x[1]
#hod[8] = x[2]
# Find best fitting logMmin(z) function
nbar.fit()
write_nbar_fitting(nbar, iter)
chi2 = 0
for domain, d in data.items():
chi2 += d.chi2(hod)
d.write_corr_projected(iter)
print('chi2 = %.3f | %.3f' % (chi2, x[0]))
flog.write('%.3f %.4f\n' % (chi2, x[0]))
write_hod_params(hod, iter)
return None
#
# Chi2 minimization
#
# x = [logM1, sigma, alpha] HOD parameters to be optimised
#
#x0 = [13.0, 0.1, 1.5] # starting point M1, sigma, alpha
#ss = [1.5, 0.05, 0.5]
x0 = [1.5]
ss = [0.2]
#opt = scipy.optimize.minimize(cost_function, x0, method='Nelder-Mead',
# tol=0.01,
# callback=logging_minimization)
#x = mock.minimise(cost_function, None, x0, ss)
x = mock.minimise(cost_function, logging_minimization, x0, ss)
#x = mock.minimise(test, logging_minimization, x0, ss)
print('minimum', x)
flog.close()
#write_nbar_fitting(nbar, iter)
#write_corr_projected(corr, zbin, iter)
| junkoda/mockgallib | script/fit_hod.py | Python | gpl-3.0 | 8,407 | [
"Galaxy"
] | 7f6fadbdc6348ab3225d226c29dc3ce99eed34e6cdfcae1f1bdcefc7f99ef3d1 |
# coding: utf-8
from __future__ import unicode_literals
import base64
import datetime
import hashlib
import json
import netrc
import os
import random
import re
import socket
import sys
import time
import math
from ..compat import (
compat_cookiejar,
compat_cookies,
compat_etree_fromstring,
compat_getpass,
compat_http_client,
compat_os_name,
compat_str,
compat_urllib_error,
compat_urllib_parse_unquote,
compat_urllib_parse_urlencode,
compat_urllib_request,
compat_urlparse,
compat_xml_parse_error,
)
from ..downloader.f4m import (
get_base_url,
remove_encrypted_media,
)
from ..utils import (
NO_DEFAULT,
age_restricted,
base_url,
bug_reports_message,
clean_html,
compiled_regex_type,
determine_ext,
determine_protocol,
error_to_compat_str,
ExtractorError,
extract_attributes,
fix_xml_ampersands,
float_or_none,
GeoRestrictedError,
GeoUtils,
int_or_none,
js_to_json,
mimetype2ext,
orderedSet,
parse_codecs,
parse_duration,
parse_iso8601,
parse_m3u8_attributes,
RegexNotFoundError,
sanitized_Request,
sanitize_filename,
unescapeHTML,
unified_strdate,
unified_timestamp,
update_Request,
update_url_query,
urljoin,
url_basename,
xpath_element,
xpath_text,
xpath_with_ns,
)
class InfoExtractor(object):
"""Information Extractor class.
Information extractors are the classes that, given a URL, extract
information about the video (or videos) the URL refers to. This
information includes the real video URL, the video title, author and
others. The information is stored in a dictionary which is then
passed to the YoutubeDL. The YoutubeDL processes this
information possibly downloading the video to the file system, among
other possible outcomes.
The type field determines the type of the result.
By far the most common value (and the default if _type is missing) is
"video", which indicates a single video.
For a video, the dictionaries must include the following fields:
id: Video identifier.
title: Video title, unescaped.
Additionally, it must contain either a formats entry or a url one:
formats: A list of dictionaries for each format available, ordered
from worst to best quality.
Potential fields:
* url Mandatory. The URL of the video file
* manifest_url
The URL of the manifest file in case of
fragmented media (DASH, hls, hds)
* ext Will be calculated from URL if missing
* format A human-readable description of the format
("mp4 container with h264/opus").
Calculated from the format_id, width, height.
and format_note fields if missing.
* format_id A short description of the format
("mp4_h264_opus" or "19").
Technically optional, but strongly recommended.
* format_note Additional info about the format
("3D" or "DASH video")
* width Width of the video, if known
* height Height of the video, if known
* resolution Textual description of width and height
* tbr Average bitrate of audio and video in KBit/s
* abr Average audio bitrate in KBit/s
* acodec Name of the audio codec in use
* asr Audio sampling rate in Hertz
* vbr Average video bitrate in KBit/s
* fps Frame rate
* vcodec Name of the video codec in use
* container Name of the container format
* filesize The number of bytes, if known in advance
* filesize_approx An estimate for the number of bytes
* player_url SWF Player URL (used for rtmpdump).
* protocol The protocol that will be used for the actual
download, lower-case.
"http", "https", "rtsp", "rtmp", "rtmpe",
"m3u8", "m3u8_native" or "http_dash_segments".
* fragment_base_url
Base URL for fragments. Each fragment's path
value (if present) will be relative to
this URL.
* fragments A list of fragments of a fragmented media.
Each fragment entry must contain either an url
or a path. If an url is present it should be
considered by a client. Otherwise both path and
fragment_base_url must be present. Here is
the list of all potential fields:
* "url" - fragment's URL
* "path" - fragment's path relative to
fragment_base_url
* "duration" (optional, int or float)
* "filesize" (optional, int)
* preference Order number of this format. If this field is
present and not None, the formats get sorted
by this field, regardless of all other values.
-1 for default (order by other properties),
-2 or smaller for less than default.
< -1000 to hide the format (if there is
another one which is strictly better)
* language Language code, e.g. "de" or "en-US".
* language_preference Is this in the language mentioned in
the URL?
10 if it's what the URL is about,
-1 for default (don't know),
-10 otherwise, other values reserved for now.
* quality Order number of the video quality of this
format, irrespective of the file format.
-1 for default (order by other properties),
-2 or smaller for less than default.
* source_preference Order number for this video source
(quality takes higher priority)
-1 for default (order by other properties),
-2 or smaller for less than default.
* http_headers A dictionary of additional HTTP headers
to add to the request.
* stretched_ratio If given and not 1, indicates that the
video's pixels are not square.
width : height ratio as float.
* no_resume The server does not support resuming the
(HTTP or RTMP) download. Boolean.
url: Final video URL.
ext: Video filename extension.
format: The video format, defaults to ext (used for --get-format)
player_url: SWF Player URL (used for rtmpdump).
The following fields are optional:
alt_title: A secondary title of the video.
display_id An alternative identifier for the video, not necessarily
unique, but available before title. Typically, id is
something like "4234987", title "Dancing naked mole rats",
and display_id "dancing-naked-mole-rats"
thumbnails: A list of dictionaries, with the following entries:
* "id" (optional, string) - Thumbnail format ID
* "url"
* "preference" (optional, int) - quality of the image
* "width" (optional, int)
* "height" (optional, int)
* "resolution" (optional, string "{width}x{height"},
deprecated)
* "filesize" (optional, int)
thumbnail: Full URL to a video thumbnail image.
description: Full video description.
uploader: Full name of the video uploader.
license: License name the video is licensed under.
creator: The creator of the video.
release_date: The date (YYYYMMDD) when the video was released.
timestamp: UNIX timestamp of the moment the video became available.
upload_date: Video upload date (YYYYMMDD).
If not explicitly set, calculated from timestamp.
uploader_id: Nickname or id of the video uploader.
uploader_url: Full URL to a personal webpage of the video uploader.
location: Physical location where the video was filmed.
subtitles: The available subtitles as a dictionary in the format
{tag: subformats}. "tag" is usually a language code, and
"subformats" is a list sorted from lower to higher
preference, each element is a dictionary with the "ext"
entry and one of:
* "data": The subtitles file contents
* "url": A URL pointing to the subtitles file
"ext" will be calculated from URL if missing
automatic_captions: Like 'subtitles', used by the YoutubeIE for
automatically generated captions
duration: Length of the video in seconds, as an integer or float.
view_count: How many users have watched the video on the platform.
like_count: Number of positive ratings of the video
dislike_count: Number of negative ratings of the video
repost_count: Number of reposts of the video
average_rating: Average rating give by users, the scale used depends on the webpage
comment_count: Number of comments on the video
comments: A list of comments, each with one or more of the following
properties (all but one of text or html optional):
* "author" - human-readable name of the comment author
* "author_id" - user ID of the comment author
* "id" - Comment ID
* "html" - Comment as HTML
* "text" - Plain text of the comment
* "timestamp" - UNIX timestamp of comment
* "parent" - ID of the comment this one is replying to.
Set to "root" to indicate that this is a
comment to the original video.
age_limit: Age restriction for the video, as an integer (years)
webpage_url: The URL to the video webpage, if given to youtube-dl it
should allow to get the same result again. (It will be set
by YoutubeDL if it's missing)
categories: A list of categories that the video falls in, for example
["Sports", "Berlin"]
tags: A list of tags assigned to the video, e.g. ["sweden", "pop music"]
is_live: True, False, or None (=unknown). Whether this video is a
live stream that goes on instead of a fixed-length video.
start_time: Time in seconds where the reproduction should start, as
specified in the URL.
end_time: Time in seconds where the reproduction should end, as
specified in the URL.
chapters: A list of dictionaries, with the following entries:
* "start_time" - The start time of the chapter in seconds
* "end_time" - The end time of the chapter in seconds
* "title" (optional, string)
The following fields should only be used when the video belongs to some logical
chapter or section:
chapter: Name or title of the chapter the video belongs to.
chapter_number: Number of the chapter the video belongs to, as an integer.
chapter_id: Id of the chapter the video belongs to, as a unicode string.
The following fields should only be used when the video is an episode of some
series, programme or podcast:
series: Title of the series or programme the video episode belongs to.
season: Title of the season the video episode belongs to.
season_number: Number of the season the video episode belongs to, as an integer.
season_id: Id of the season the video episode belongs to, as a unicode string.
episode: Title of the video episode. Unlike mandatory video title field,
this field should denote the exact title of the video episode
without any kind of decoration.
episode_number: Number of the video episode within a season, as an integer.
episode_id: Id of the video episode, as a unicode string.
The following fields should only be used when the media is a track or a part of
a music album:
track: Title of the track.
track_number: Number of the track within an album or a disc, as an integer.
track_id: Id of the track (useful in case of custom indexing, e.g. 6.iii),
as a unicode string.
artist: Artist(s) of the track.
genre: Genre(s) of the track.
album: Title of the album the track belongs to.
album_type: Type of the album (e.g. "Demo", "Full-length", "Split", "Compilation", etc).
album_artist: List of all artists appeared on the album (e.g.
"Ash Borer / Fell Voices" or "Various Artists", useful for splits
and compilations).
disc_number: Number of the disc or other physical medium the track belongs to,
as an integer.
release_year: Year (YYYY) when the album was released.
Unless mentioned otherwise, the fields should be Unicode strings.
Unless mentioned otherwise, None is equivalent to absence of information.
_type "playlist" indicates multiple videos.
There must be a key "entries", which is a list, an iterable, or a PagedList
object, each element of which is a valid dictionary by this specification.
Additionally, playlists can have "id", "title", "description", "uploader",
"uploader_id", "uploader_url" attributes with the same semantics as videos
(see above).
_type "multi_video" indicates that there are multiple videos that
form a single show, for examples multiple acts of an opera or TV episode.
It must have an entries key like a playlist and contain all the keys
required for a video at the same time.
_type "url" indicates that the video must be extracted from another
location, possibly by a different extractor. Its only required key is:
"url" - the next URL to extract.
The key "ie_key" can be set to the class name (minus the trailing "IE",
e.g. "Youtube") if the extractor class is known in advance.
Additionally, the dictionary may have any properties of the resolved entity
known in advance, for example "title" if the title of the referred video is
known ahead of time.
_type "url_transparent" entities have the same specification as "url", but
indicate that the given additional information is more precise than the one
associated with the resolved URL.
This is useful when a site employs a video service that hosts the video and
its technical metadata, but that video service does not embed a useful
title, description etc.
Subclasses of this one should re-define the _real_initialize() and
_real_extract() methods and define a _VALID_URL regexp.
Probably, they should also be added to the list of extractors.
_GEO_BYPASS attribute may be set to False in order to disable
geo restriction bypass mechanisms for a particular extractor.
Though it won't disable explicit geo restriction bypass based on
country code provided with geo_bypass_country. (experimental)
_GEO_COUNTRIES attribute may contain a list of presumably geo unrestricted
countries for this extractor. One of these countries will be used by
geo restriction bypass mechanism right away in order to bypass
geo restriction, of course, if the mechanism is not disabled. (experimental)
NB: both these geo attributes are experimental and may change in future
or be completely removed.
Finally, the _WORKING attribute should be set to False for broken IEs
in order to warn the users and skip the tests.
"""
_ready = False
_downloader = None
_x_forwarded_for_ip = None
_GEO_BYPASS = True
_GEO_COUNTRIES = None
_WORKING = True
def __init__(self, downloader=None):
"""Constructor. Receives an optional downloader."""
self._ready = False
self._x_forwarded_for_ip = None
self.set_downloader(downloader)
@classmethod
def suitable(cls, url):
"""Receives a URL and returns True if suitable for this IE."""
# This does not use has/getattr intentionally - we want to know whether
# we have cached the regexp for *this* class, whereas getattr would also
# match the superclass
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
return cls._VALID_URL_RE.match(url) is not None
@classmethod
def _match_id(cls, url):
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
m = cls._VALID_URL_RE.match(url)
assert m
return compat_str(m.group('id'))
@classmethod
def working(cls):
"""Getter method for _WORKING."""
return cls._WORKING
def initialize(self):
"""Initializes an instance (authentication, etc)."""
self._initialize_geo_bypass(self._GEO_COUNTRIES)
if not self._ready:
self._real_initialize()
self._ready = True
def _initialize_geo_bypass(self, countries):
"""
Initialize geo restriction bypass mechanism.
This method is used to initialize geo bypass mechanism based on faking
X-Forwarded-For HTTP header. A random country from provided country list
is selected and a random IP belonging to this country is generated. This
IP will be passed as X-Forwarded-For HTTP header in all subsequent
HTTP requests.
This method will be used for initial geo bypass mechanism initialization
during the instance initialization with _GEO_COUNTRIES.
You may also manually call it from extractor's code if geo countries
information is not available beforehand (e.g. obtained during
extraction) or due to some another reason.
"""
if not self._x_forwarded_for_ip:
country_code = self._downloader.params.get('geo_bypass_country', None)
# If there is no explicit country for geo bypass specified and
# the extractor is known to be geo restricted let's fake IP
# as X-Forwarded-For right away.
if (not country_code and
self._GEO_BYPASS and
self._downloader.params.get('geo_bypass', True) and
countries):
country_code = random.choice(countries)
if country_code:
self._x_forwarded_for_ip = GeoUtils.random_ipv4(country_code)
if self._downloader.params.get('verbose', False):
self._downloader.to_screen(
'[debug] Using fake IP %s (%s) as X-Forwarded-For.'
% (self._x_forwarded_for_ip, country_code.upper()))
def extract(self, url):
"""Extracts URL information and returns it in list of dicts."""
try:
for _ in range(2):
try:
self.initialize()
ie_result = self._real_extract(url)
if self._x_forwarded_for_ip:
ie_result['__x_forwarded_for_ip'] = self._x_forwarded_for_ip
return ie_result
except GeoRestrictedError as e:
if self.__maybe_fake_ip_and_retry(e.countries):
continue
raise
except ExtractorError:
raise
except compat_http_client.IncompleteRead as e:
raise ExtractorError('A network error has occurred.', cause=e, expected=True)
except (KeyError, StopIteration) as e:
raise ExtractorError('An extractor error has occurred.', cause=e)
def __maybe_fake_ip_and_retry(self, countries):
if (not self._downloader.params.get('geo_bypass_country', None) and
self._GEO_BYPASS and
self._downloader.params.get('geo_bypass', True) and
not self._x_forwarded_for_ip and
countries):
country_code = random.choice(countries)
self._x_forwarded_for_ip = GeoUtils.random_ipv4(country_code)
if self._x_forwarded_for_ip:
self.report_warning(
'Video is geo restricted. Retrying extraction with fake IP %s (%s) as X-Forwarded-For.'
% (self._x_forwarded_for_ip, country_code.upper()))
return True
return False
def set_downloader(self, downloader):
"""Sets the downloader for this IE."""
self._downloader = downloader
def _real_initialize(self):
"""Real initialization process. Redefine in subclasses."""
pass
def _real_extract(self, url):
"""Real extraction process. Redefine in subclasses."""
pass
@classmethod
def ie_key(cls):
"""A string for getting the InfoExtractor with get_info_extractor"""
return compat_str(cls.__name__[:-2])
@property
def IE_NAME(self):
return compat_str(type(self).__name__[:-2])
def _request_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, data=None, headers={}, query={}):
""" Returns the response handle """
if note is None:
self.report_download_webpage(video_id)
elif note is not False:
if video_id is None:
self.to_screen('%s' % (note,))
else:
self.to_screen('%s: %s' % (video_id, note))
if isinstance(url_or_request, compat_urllib_request.Request):
url_or_request = update_Request(
url_or_request, data=data, headers=headers, query=query)
else:
if query:
url_or_request = update_url_query(url_or_request, query)
if data is not None or headers:
url_or_request = sanitized_Request(url_or_request, data, headers)
try:
return self._downloader.urlopen(url_or_request)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
if errnote is False:
return False
if errnote is None:
errnote = 'Unable to download webpage'
errmsg = '%s: %s' % (errnote, error_to_compat_str(err))
if fatal:
raise ExtractorError(errmsg, sys.exc_info()[2], cause=err)
else:
self._downloader.report_warning(errmsg)
return False
def _download_webpage_handle(self, url_or_request, video_id, note=None, errnote=None, fatal=True, encoding=None, data=None, headers={}, query={}):
""" Returns a tuple (page content as string, URL handle) """
# Strip hashes from the URL (#1038)
if isinstance(url_or_request, (compat_str, str)):
url_or_request = url_or_request.partition('#')[0]
# Some sites check X-Forwarded-For HTTP header in order to figure out
# the origin of the client behind proxy. This allows bypassing geo
# restriction by faking this header's value to IP that belongs to some
# geo unrestricted country. We will do so once we encounter any
# geo restriction error.
if self._x_forwarded_for_ip:
if 'X-Forwarded-For' not in headers:
headers['X-Forwarded-For'] = self._x_forwarded_for_ip
urlh = self._request_webpage(url_or_request, video_id, note, errnote, fatal, data=data, headers=headers, query=query)
if urlh is False:
assert not fatal
return False
content = self._webpage_read_content(urlh, url_or_request, video_id, note, errnote, fatal, encoding=encoding)
return (content, urlh)
@staticmethod
def _guess_encoding_from_content(content_type, webpage_bytes):
m = re.match(r'[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+\s*;\s*charset=(.+)', content_type)
if m:
encoding = m.group(1)
else:
m = re.search(br'<meta[^>]+charset=[\'"]?([^\'")]+)[ /\'">]',
webpage_bytes[:1024])
if m:
encoding = m.group(1).decode('ascii')
elif webpage_bytes.startswith(b'\xff\xfe'):
encoding = 'utf-16'
else:
encoding = 'utf-8'
return encoding
def __check_blocked(self, content):
first_block = content[:512]
if ('<title>Access to this site is blocked</title>' in content and
'Websense' in first_block):
msg = 'Access to this webpage has been blocked by Websense filtering software in your network.'
blocked_iframe = self._html_search_regex(
r'<iframe src="([^"]+)"', content,
'Websense information URL', default=None)
if blocked_iframe:
msg += ' Visit %s for more details' % blocked_iframe
raise ExtractorError(msg, expected=True)
if '<title>The URL you requested has been blocked</title>' in first_block:
msg = (
'Access to this webpage has been blocked by Indian censorship. '
'Use a VPN or proxy server (with --proxy) to route around it.')
block_msg = self._html_search_regex(
r'</h1><p>(.*?)</p>',
content, 'block message', default=None)
if block_msg:
msg += ' (Message: "%s")' % block_msg.replace('\n', ' ')
raise ExtractorError(msg, expected=True)
if ('<title>TTK :: Доступ к ресурсу ограничен</title>' in content and
'blocklist.rkn.gov.ru' in content):
raise ExtractorError(
'Access to this webpage has been blocked by decision of the Russian government. '
'Visit http://blocklist.rkn.gov.ru/ for a block reason.',
expected=True)
def _webpage_read_content(self, urlh, url_or_request, video_id, note=None, errnote=None, fatal=True, prefix=None, encoding=None):
content_type = urlh.headers.get('Content-Type', '')
webpage_bytes = urlh.read()
if prefix is not None:
webpage_bytes = prefix + webpage_bytes
if not encoding:
encoding = self._guess_encoding_from_content(content_type, webpage_bytes)
if self._downloader.params.get('dump_intermediate_pages', False):
self.to_screen('Dumping request to ' + urlh.geturl())
dump = base64.b64encode(webpage_bytes).decode('ascii')
self._downloader.to_screen(dump)
if self._downloader.params.get('write_pages', False):
basen = '%s_%s' % (video_id, urlh.geturl())
if len(basen) > 240:
h = '___' + hashlib.md5(basen.encode('utf-8')).hexdigest()
basen = basen[:240 - len(h)] + h
raw_filename = basen + '.dump'
filename = sanitize_filename(raw_filename, restricted=True)
self.to_screen('Saving request to ' + filename)
# Working around MAX_PATH limitation on Windows (see
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx)
if compat_os_name == 'nt':
absfilepath = os.path.abspath(filename)
if len(absfilepath) > 259:
filename = '\\\\?\\' + absfilepath
with open(filename, 'wb') as outf:
outf.write(webpage_bytes)
try:
content = webpage_bytes.decode(encoding, 'replace')
except LookupError:
content = webpage_bytes.decode('utf-8', 'replace')
self.__check_blocked(content)
return content
def _download_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, tries=1, timeout=5, encoding=None, data=None, headers={}, query={}):
""" Returns the data of the page as a string """
success = False
try_count = 0
while success is False:
try:
res = self._download_webpage_handle(url_or_request, video_id, note, errnote, fatal, encoding=encoding, data=data, headers=headers, query=query)
success = True
except compat_http_client.IncompleteRead as e:
try_count += 1
if try_count >= tries:
raise e
self._sleep(timeout, video_id)
if res is False:
return res
else:
content, _ = res
return content
def _download_xml(self, url_or_request, video_id,
note='Downloading XML', errnote='Unable to download XML',
transform_source=None, fatal=True, encoding=None,
data=None, headers={}, query={}):
"""Return the xml as an xml.etree.ElementTree.Element"""
xml_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal,
encoding=encoding, data=data, headers=headers, query=query)
if xml_string is False:
return xml_string
return self._parse_xml(
xml_string, video_id, transform_source=transform_source,
fatal=fatal)
def _parse_xml(self, xml_string, video_id, transform_source=None, fatal=True):
if transform_source:
xml_string = transform_source(xml_string)
try:
return compat_etree_fromstring(xml_string.encode('utf-8'))
except compat_xml_parse_error as ve:
errmsg = '%s: Failed to parse XML ' % video_id
if fatal:
raise ExtractorError(errmsg, cause=ve)
else:
self.report_warning(errmsg + str(ve))
def _download_json(self, url_or_request, video_id,
note='Downloading JSON metadata',
errnote='Unable to download JSON metadata',
transform_source=None,
fatal=True, encoding=None, data=None, headers={}, query={}):
json_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal,
encoding=encoding, data=data, headers=headers, query=query)
if (not fatal) and json_string is False:
return None
return self._parse_json(
json_string, video_id, transform_source=transform_source, fatal=fatal)
def _parse_json(self, json_string, video_id, transform_source=None, fatal=True):
if transform_source:
json_string = transform_source(json_string)
try:
return json.loads(json_string)
except ValueError as ve:
errmsg = '%s: Failed to parse JSON ' % video_id
if fatal:
raise ExtractorError(errmsg, cause=ve)
else:
self.report_warning(errmsg + str(ve))
def report_warning(self, msg, video_id=None):
idstr = '' if video_id is None else '%s: ' % video_id
self._downloader.report_warning(
'[%s] %s%s' % (self.IE_NAME, idstr, msg))
def to_screen(self, msg):
"""Print msg to screen, prefixing it with '[ie_name]'"""
self._downloader.to_screen('[%s] %s' % (self.IE_NAME, msg))
def report_extraction(self, id_or_name):
"""Report information extraction."""
self.to_screen('%s: Extracting information' % id_or_name)
def report_download_webpage(self, video_id):
"""Report webpage download."""
self.to_screen('%s: Downloading webpage' % video_id)
def report_age_confirmation(self):
"""Report attempt to confirm age."""
self.to_screen('Confirming age')
def report_login(self):
"""Report attempt to log in."""
self.to_screen('Logging in')
@staticmethod
def raise_login_required(msg='This video is only available for registered users'):
raise ExtractorError(
'%s. Use --username and --password or --netrc to provide account credentials.' % msg,
expected=True)
@staticmethod
def raise_geo_restricted(msg='This video is not available from your location due to geo restriction', countries=None):
raise GeoRestrictedError(msg, countries=countries)
# Methods for following #608
@staticmethod
def url_result(url, ie=None, video_id=None, video_title=None):
"""Returns a URL that points to a page that should be processed"""
# TODO: ie should be the class used for getting the info
video_info = {'_type': 'url',
'url': url,
'ie_key': ie}
if video_id is not None:
video_info['id'] = video_id
if video_title is not None:
video_info['title'] = video_title
return video_info
def playlist_from_matches(self, matches, playlist_id=None, playlist_title=None, getter=None, ie=None):
urls = orderedSet(
self.url_result(self._proto_relative_url(getter(m) if getter else m), ie)
for m in matches)
return self.playlist_result(
urls, playlist_id=playlist_id, playlist_title=playlist_title)
@staticmethod
def playlist_result(entries, playlist_id=None, playlist_title=None, playlist_description=None):
"""Returns a playlist"""
video_info = {'_type': 'playlist',
'entries': entries}
if playlist_id:
video_info['id'] = playlist_id
if playlist_title:
video_info['title'] = playlist_title
if playlist_description:
video_info['description'] = playlist_description
return video_info
def _search_regex(self, pattern, string, name, default=NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Perform a regex search on the given string, using a single or a list of
patterns returning the first matching group.
In case of failure return a default value or raise a WARNING or a
RegexNotFoundError, depending on fatal, specifying the field name.
"""
if isinstance(pattern, (str, compat_str, compiled_regex_type)):
mobj = re.search(pattern, string, flags)
else:
for p in pattern:
mobj = re.search(p, string, flags)
if mobj:
break
if not self._downloader.params.get('no_color') and compat_os_name != 'nt' and sys.stderr.isatty():
_name = '\033[0;34m%s\033[0m' % name
else:
_name = name
if mobj:
if group is None:
# return the first matching group
return next(g for g in mobj.groups() if g is not None)
else:
return mobj.group(group)
elif default is not NO_DEFAULT:
return default
elif fatal:
raise RegexNotFoundError('Unable to extract %s' % _name)
else:
self._downloader.report_warning('unable to extract %s' % _name + bug_reports_message())
return None
def _html_search_regex(self, pattern, string, name, default=NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Like _search_regex, but strips HTML tags and unescapes entities.
"""
res = self._search_regex(pattern, string, name, default, fatal, flags, group)
if res:
return clean_html(res).strip()
else:
return res
def _get_netrc_login_info(self, netrc_machine=None):
username = None
password = None
netrc_machine = netrc_machine or self._NETRC_MACHINE
if self._downloader.params.get('usenetrc', False):
try:
info = netrc.netrc().authenticators(netrc_machine)
if info is not None:
username = info[0]
password = info[2]
else:
raise netrc.NetrcParseError(
'No authenticators for %s' % netrc_machine)
except (IOError, netrc.NetrcParseError) as err:
self._downloader.report_warning(
'parsing .netrc: %s' % error_to_compat_str(err))
return username, password
def _get_login_info(self, username_option='username', password_option='password', netrc_machine=None):
"""
Get the login info as (username, password)
First look for the manually specified credentials using username_option
and password_option as keys in params dictionary. If no such credentials
available look in the netrc file using the netrc_machine or _NETRC_MACHINE
value.
If there's no info available, return (None, None)
"""
if self._downloader is None:
return (None, None)
downloader_params = self._downloader.params
# Attempt to use provided username and password or .netrc data
if downloader_params.get(username_option) is not None:
username = downloader_params[username_option]
password = downloader_params[password_option]
else:
username, password = self._get_netrc_login_info(netrc_machine)
return username, password
def _get_tfa_info(self, note='two-factor verification code'):
"""
Get the two-factor authentication info
TODO - asking the user will be required for sms/phone verify
currently just uses the command line option
If there's no info available, return None
"""
if self._downloader is None:
return None
downloader_params = self._downloader.params
if downloader_params.get('twofactor') is not None:
return downloader_params['twofactor']
return compat_getpass('Type %s and press [Return]: ' % note)
# Helper functions for extracting OpenGraph info
@staticmethod
def _og_regexes(prop):
content_re = r'content=(?:"([^"]+?)"|\'([^\']+?)\'|\s*([^\s"\'=<>`]+?))'
property_re = (r'(?:name|property)=(?:\'og:%(prop)s\'|"og:%(prop)s"|\s*og:%(prop)s\b)'
% {'prop': re.escape(prop)})
template = r'<meta[^>]+?%s[^>]+?%s'
return [
template % (property_re, content_re),
template % (content_re, property_re),
]
@staticmethod
def _meta_regex(prop):
return r'''(?isx)<meta
(?=[^>]+(?:itemprop|name|property|id|http-equiv)=(["\']?)%s\1)
[^>]+?content=(["\'])(?P<content>.*?)\2''' % re.escape(prop)
def _og_search_property(self, prop, html, name=None, **kargs):
if not isinstance(prop, (list, tuple)):
prop = [prop]
if name is None:
name = 'OpenGraph %s' % prop[0]
og_regexes = []
for p in prop:
og_regexes.extend(self._og_regexes(p))
escaped = self._search_regex(og_regexes, html, name, flags=re.DOTALL, **kargs)
if escaped is None:
return None
return unescapeHTML(escaped)
def _og_search_thumbnail(self, html, **kargs):
return self._og_search_property('image', html, 'thumbnail URL', fatal=False, **kargs)
def _og_search_description(self, html, **kargs):
return self._og_search_property('description', html, fatal=False, **kargs)
def _og_search_title(self, html, **kargs):
return self._og_search_property('title', html, **kargs)
def _og_search_video_url(self, html, name='video url', secure=True, **kargs):
regexes = self._og_regexes('video') + self._og_regexes('video:url')
if secure:
regexes = self._og_regexes('video:secure_url') + regexes
return self._html_search_regex(regexes, html, name, **kargs)
def _og_search_url(self, html, **kargs):
return self._og_search_property('url', html, **kargs)
def _html_search_meta(self, name, html, display_name=None, fatal=False, **kwargs):
if not isinstance(name, (list, tuple)):
name = [name]
if display_name is None:
display_name = name[0]
return self._html_search_regex(
[self._meta_regex(n) for n in name],
html, display_name, fatal=fatal, group='content', **kwargs)
def _dc_search_uploader(self, html):
return self._html_search_meta('dc.creator', html, 'uploader')
def _rta_search(self, html):
# See http://www.rtalabel.org/index.php?content=howtofaq#single
if re.search(r'(?ix)<meta\s+name="rating"\s+'
r' content="RTA-5042-1996-1400-1577-RTA"',
html):
return 18
return 0
def _media_rating_search(self, html):
# See http://www.tjg-designs.com/WP/metadata-code-examples-adding-metadata-to-your-web-pages/
rating = self._html_search_meta('rating', html)
if not rating:
return None
RATING_TABLE = {
'safe for kids': 0,
'general': 8,
'14 years': 14,
'mature': 17,
'restricted': 19,
}
return RATING_TABLE.get(rating.lower())
def _family_friendly_search(self, html):
# See http://schema.org/VideoObject
family_friendly = self._html_search_meta(
'isFamilyFriendly', html, default=None)
if not family_friendly:
return None
RATING_TABLE = {
'1': 0,
'true': 0,
'0': 18,
'false': 18,
}
return RATING_TABLE.get(family_friendly.lower())
def _twitter_search_player(self, html):
return self._html_search_meta('twitter:player', html,
'twitter card player')
def _search_json_ld(self, html, video_id, expected_type=None, **kwargs):
json_ld = self._search_regex(
r'(?s)<script[^>]+type=(["\'])application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>',
html, 'JSON-LD', group='json_ld', **kwargs)
default = kwargs.get('default', NO_DEFAULT)
if not json_ld:
return default if default is not NO_DEFAULT else {}
# JSON-LD may be malformed and thus `fatal` should be respected.
# At the same time `default` may be passed that assumes `fatal=False`
# for _search_regex. Let's simulate the same behavior here as well.
fatal = kwargs.get('fatal', True) if default == NO_DEFAULT else False
return self._json_ld(json_ld, video_id, fatal=fatal, expected_type=expected_type)
def _json_ld(self, json_ld, video_id, fatal=True, expected_type=None):
if isinstance(json_ld, compat_str):
json_ld = self._parse_json(json_ld, video_id, fatal=fatal)
if not json_ld:
return {}
info = {}
if not isinstance(json_ld, (list, tuple, dict)):
return info
if isinstance(json_ld, dict):
json_ld = [json_ld]
def extract_video_object(e):
assert e['@type'] == 'VideoObject'
info.update({
'url': e.get('contentUrl'),
'title': unescapeHTML(e.get('name')),
'description': unescapeHTML(e.get('description')),
'thumbnail': e.get('thumbnailUrl') or e.get('thumbnailURL'),
'duration': parse_duration(e.get('duration')),
'timestamp': unified_timestamp(e.get('uploadDate')),
'filesize': float_or_none(e.get('contentSize')),
'tbr': int_or_none(e.get('bitrate')),
'width': int_or_none(e.get('width')),
'height': int_or_none(e.get('height')),
'view_count': int_or_none(e.get('interactionCount')),
})
for e in json_ld:
if e.get('@context') == 'http://schema.org':
item_type = e.get('@type')
if expected_type is not None and expected_type != item_type:
return info
if item_type in ('TVEpisode', 'Episode'):
info.update({
'episode': unescapeHTML(e.get('name')),
'episode_number': int_or_none(e.get('episodeNumber')),
'description': unescapeHTML(e.get('description')),
})
part_of_season = e.get('partOfSeason')
if isinstance(part_of_season, dict) and part_of_season.get('@type') in ('TVSeason', 'Season', 'CreativeWorkSeason'):
info['season_number'] = int_or_none(part_of_season.get('seasonNumber'))
part_of_series = e.get('partOfSeries') or e.get('partOfTVSeries')
if isinstance(part_of_series, dict) and part_of_series.get('@type') in ('TVSeries', 'Series', 'CreativeWorkSeries'):
info['series'] = unescapeHTML(part_of_series.get('name'))
elif item_type == 'Article':
info.update({
'timestamp': parse_iso8601(e.get('datePublished')),
'title': unescapeHTML(e.get('headline')),
'description': unescapeHTML(e.get('articleBody')),
})
elif item_type == 'VideoObject':
extract_video_object(e)
continue
video = e.get('video')
if isinstance(video, dict) and video.get('@type') == 'VideoObject':
extract_video_object(video)
break
return dict((k, v) for k, v in info.items() if v is not None)
@staticmethod
def _hidden_inputs(html):
html = re.sub(r'<!--(?:(?!<!--).)*-->', '', html)
hidden_inputs = {}
for input in re.findall(r'(?i)(<input[^>]+>)', html):
attrs = extract_attributes(input)
if not input:
continue
if attrs.get('type') not in ('hidden', 'submit'):
continue
name = attrs.get('name') or attrs.get('id')
value = attrs.get('value')
if name and value is not None:
hidden_inputs[name] = value
return hidden_inputs
def _form_hidden_inputs(self, form_id, html):
form = self._search_regex(
r'(?is)<form[^>]+?id=(["\'])%s\1[^>]*>(?P<form>.+?)</form>' % form_id,
html, '%s form' % form_id, group='form')
return self._hidden_inputs(form)
def _sort_formats(self, formats, field_preference=None):
if not formats:
raise ExtractorError('No video formats found')
for f in formats:
# Automatically determine tbr when missing based on abr and vbr (improves
# formats sorting in some cases)
if 'tbr' not in f and f.get('abr') is not None and f.get('vbr') is not None:
f['tbr'] = f['abr'] + f['vbr']
def _formats_key(f):
# TODO remove the following workaround
from ..utils import determine_ext
if not f.get('ext') and 'url' in f:
f['ext'] = determine_ext(f['url'])
if isinstance(field_preference, (list, tuple)):
return tuple(
f.get(field)
if f.get(field) is not None
else ('' if field == 'format_id' else -1)
for field in field_preference)
preference = f.get('preference')
if preference is None:
preference = 0
if f.get('ext') in ['f4f', 'f4m']: # Not yet supported
preference -= 0.5
protocol = f.get('protocol') or determine_protocol(f)
proto_preference = 0 if protocol in ['http', 'https'] else (-0.5 if protocol == 'rtsp' else -0.1)
if f.get('vcodec') == 'none': # audio only
preference -= 50
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['aac', 'mp3', 'm4a', 'webm', 'ogg', 'opus']
else:
ORDER = ['webm', 'opus', 'ogg', 'mp3', 'aac', 'm4a']
ext_preference = 0
try:
audio_ext_preference = ORDER.index(f['ext'])
except ValueError:
audio_ext_preference = -1
else:
if f.get('acodec') == 'none': # video only
preference -= 40
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['flv', 'mp4', 'webm']
else:
ORDER = ['webm', 'flv', 'mp4']
try:
ext_preference = ORDER.index(f['ext'])
except ValueError:
ext_preference = -1
audio_ext_preference = 0
return (
preference,
f.get('language_preference') if f.get('language_preference') is not None else -1,
f.get('quality') if f.get('quality') is not None else -1,
f.get('tbr') if f.get('tbr') is not None else -1,
f.get('filesize') if f.get('filesize') is not None else -1,
f.get('vbr') if f.get('vbr') is not None else -1,
f.get('height') if f.get('height') is not None else -1,
f.get('width') if f.get('width') is not None else -1,
proto_preference,
ext_preference,
f.get('abr') if f.get('abr') is not None else -1,
audio_ext_preference,
f.get('fps') if f.get('fps') is not None else -1,
f.get('filesize_approx') if f.get('filesize_approx') is not None else -1,
f.get('source_preference') if f.get('source_preference') is not None else -1,
f.get('format_id') if f.get('format_id') is not None else '',
)
formats.sort(key=_formats_key)
def _check_formats(self, formats, video_id):
if formats:
formats[:] = filter(
lambda f: self._is_valid_url(
f['url'], video_id,
item='%s video format' % f.get('format_id') if f.get('format_id') else 'video'),
formats)
@staticmethod
def _remove_duplicate_formats(formats):
format_urls = set()
unique_formats = []
for f in formats:
if f['url'] not in format_urls:
format_urls.add(f['url'])
unique_formats.append(f)
formats[:] = unique_formats
def _is_valid_url(self, url, video_id, item='video', headers={}):
url = self._proto_relative_url(url, scheme='http:')
# For now assume non HTTP(S) URLs always valid
if not (url.startswith('http://') or url.startswith('https://')):
return True
try:
self._request_webpage(url, video_id, 'Checking %s URL' % item, headers=headers)
return True
except ExtractorError as e:
if isinstance(e.cause, compat_urllib_error.URLError):
self.to_screen(
'%s: %s URL is invalid, skipping' % (video_id, item))
return False
raise
def http_scheme(self):
""" Either "http:" or "https:", depending on the user's preferences """
return (
'http:'
if self._downloader.params.get('prefer_insecure', False)
else 'https:')
def _proto_relative_url(self, url, scheme=None):
if url is None:
return url
if url.startswith('//'):
if scheme is None:
scheme = self.http_scheme()
return scheme + url
else:
return url
def _sleep(self, timeout, video_id, msg_template=None):
if msg_template is None:
msg_template = '%(video_id)s: Waiting for %(timeout)s seconds'
msg = msg_template % {'video_id': video_id, 'timeout': timeout}
self.to_screen(msg)
time.sleep(timeout)
def _extract_f4m_formats(self, manifest_url, video_id, preference=None, f4m_id=None,
transform_source=lambda s: fix_xml_ampersands(s).strip(),
fatal=True, m3u8_id=None):
manifest = self._download_xml(
manifest_url, video_id, 'Downloading f4m manifest',
'Unable to download f4m manifest',
# Some manifests may be malformed, e.g. prosiebensat1 generated manifests
# (see https://github.com/rg3/youtube-dl/issues/6215#issuecomment-121704244)
transform_source=transform_source,
fatal=fatal)
if manifest is False:
return []
return self._parse_f4m_formats(
manifest, manifest_url, video_id, preference=preference, f4m_id=f4m_id,
transform_source=transform_source, fatal=fatal, m3u8_id=m3u8_id)
def _parse_f4m_formats(self, manifest, manifest_url, video_id, preference=None, f4m_id=None,
transform_source=lambda s: fix_xml_ampersands(s).strip(),
fatal=True, m3u8_id=None):
# currently youtube-dl cannot decode the playerVerificationChallenge as Akamai uses Adobe Alchemy
akamai_pv = manifest.find('{http://ns.adobe.com/f4m/1.0}pv-2.0')
if akamai_pv is not None and ';' in akamai_pv.text:
playerVerificationChallenge = akamai_pv.text.split(';')[0]
if playerVerificationChallenge.strip() != '':
return []
formats = []
manifest_version = '1.0'
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/1.0}media')
if not media_nodes:
manifest_version = '2.0'
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/2.0}media')
# Remove unsupported DRM protected media from final formats
# rendition (see https://github.com/rg3/youtube-dl/issues/8573).
media_nodes = remove_encrypted_media(media_nodes)
if not media_nodes:
return formats
manifest_base_url = get_base_url(manifest)
bootstrap_info = xpath_element(
manifest, ['{http://ns.adobe.com/f4m/1.0}bootstrapInfo', '{http://ns.adobe.com/f4m/2.0}bootstrapInfo'],
'bootstrap info', default=None)
vcodec = None
mime_type = xpath_text(
manifest, ['{http://ns.adobe.com/f4m/1.0}mimeType', '{http://ns.adobe.com/f4m/2.0}mimeType'],
'base URL', default=None)
if mime_type and mime_type.startswith('audio/'):
vcodec = 'none'
for i, media_el in enumerate(media_nodes):
tbr = int_or_none(media_el.attrib.get('bitrate'))
width = int_or_none(media_el.attrib.get('width'))
height = int_or_none(media_el.attrib.get('height'))
format_id = '-'.join(filter(None, [f4m_id, compat_str(i if tbr is None else tbr)]))
# If <bootstrapInfo> is present, the specified f4m is a
# stream-level manifest, and only set-level manifests may refer to
# external resources. See section 11.4 and section 4 of F4M spec
if bootstrap_info is None:
media_url = None
# @href is introduced in 2.0, see section 11.6 of F4M spec
if manifest_version == '2.0':
media_url = media_el.attrib.get('href')
if media_url is None:
media_url = media_el.attrib.get('url')
if not media_url:
continue
manifest_url = (
media_url if media_url.startswith('http://') or media_url.startswith('https://')
else ((manifest_base_url or '/'.join(manifest_url.split('/')[:-1])) + '/' + media_url))
# If media_url is itself a f4m manifest do the recursive extraction
# since bitrates in parent manifest (this one) and media_url manifest
# may differ leading to inability to resolve the format by requested
# bitrate in f4m downloader
ext = determine_ext(manifest_url)
if ext == 'f4m':
f4m_formats = self._extract_f4m_formats(
manifest_url, video_id, preference=preference, f4m_id=f4m_id,
transform_source=transform_source, fatal=fatal)
# Sometimes stream-level manifest contains single media entry that
# does not contain any quality metadata (e.g. http://matchtv.ru/#live-player).
# At the same time parent's media entry in set-level manifest may
# contain it. We will copy it from parent in such cases.
if len(f4m_formats) == 1:
f = f4m_formats[0]
f.update({
'tbr': f.get('tbr') or tbr,
'width': f.get('width') or width,
'height': f.get('height') or height,
'format_id': f.get('format_id') if not tbr else format_id,
'vcodec': vcodec,
})
formats.extend(f4m_formats)
continue
elif ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
manifest_url, video_id, 'mp4', preference=preference,
m3u8_id=m3u8_id, fatal=fatal))
continue
formats.append({
'format_id': format_id,
'url': manifest_url,
'manifest_url': manifest_url,
'ext': 'flv' if bootstrap_info is not None else None,
'protocol': 'f4m',
'tbr': tbr,
'width': width,
'height': height,
'vcodec': vcodec,
'preference': preference,
})
return formats
def _m3u8_meta_format(self, m3u8_url, ext=None, preference=None, m3u8_id=None):
return {
'format_id': '-'.join(filter(None, [m3u8_id, 'meta'])),
'url': m3u8_url,
'ext': ext,
'protocol': 'm3u8',
'preference': preference - 100 if preference else -100,
'resolution': 'multiple',
'format_note': 'Quality selection URL',
}
def _extract_m3u8_formats(self, m3u8_url, video_id, ext=None,
entry_protocol='m3u8', preference=None,
m3u8_id=None, note=None, errnote=None,
fatal=True, live=False):
res = self._download_webpage_handle(
m3u8_url, video_id,
note=note or 'Downloading m3u8 information',
errnote=errnote or 'Failed to download m3u8 information',
fatal=fatal)
if res is False:
return []
m3u8_doc, urlh = res
m3u8_url = urlh.geturl()
return self._parse_m3u8_formats(
m3u8_doc, m3u8_url, ext=ext, entry_protocol=entry_protocol,
preference=preference, m3u8_id=m3u8_id, live=live)
def _parse_m3u8_formats(self, m3u8_doc, m3u8_url, ext=None,
entry_protocol='m3u8', preference=None,
m3u8_id=None, live=False):
if '#EXT-X-FAXS-CM:' in m3u8_doc: # Adobe Flash Access
return []
if re.search(r'#EXT-X-SESSION-KEY:.*?URI="skd://', m3u8_doc): # Apple FairPlay
return []
formats = []
format_url = lambda u: (
u
if re.match(r'^https?://', u)
else compat_urlparse.urljoin(m3u8_url, u))
# References:
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-21
# 2. https://github.com/rg3/youtube-dl/issues/12211
# We should try extracting formats only from master playlists [1, 4.3.4],
# i.e. playlists that describe available qualities. On the other hand
# media playlists [1, 4.3.3] should be returned as is since they contain
# just the media without qualities renditions.
# Fortunately, master playlist can be easily distinguished from media
# playlist based on particular tags availability. As of [1, 4.3.3, 4.3.4]
# master playlist tags MUST NOT appear in a media playist and vice versa.
# As of [1, 4.3.3.1] #EXT-X-TARGETDURATION tag is REQUIRED for every
# media playlist and MUST NOT appear in master playlist thus we can
# clearly detect media playlist with this criterion.
if '#EXT-X-TARGETDURATION' in m3u8_doc: # media playlist, return as is
return [{
'url': m3u8_url,
'format_id': m3u8_id,
'ext': ext,
'protocol': entry_protocol,
'preference': preference,
}]
groups = {}
last_stream_inf = {}
def extract_media(x_media_line):
media = parse_m3u8_attributes(x_media_line)
# As per [1, 4.3.4.1] TYPE, GROUP-ID and NAME are REQUIRED
media_type, group_id, name = media.get('TYPE'), media.get('GROUP-ID'), media.get('NAME')
if not (media_type and group_id and name):
return
groups.setdefault(group_id, []).append(media)
if media_type not in ('VIDEO', 'AUDIO'):
return
media_url = media.get('URI')
if media_url:
format_id = []
for v in (m3u8_id, group_id, name):
if v:
format_id.append(v)
f = {
'format_id': '-'.join(format_id),
'url': format_url(media_url),
'manifest_url': m3u8_url,
'language': media.get('LANGUAGE'),
'ext': ext,
'protocol': entry_protocol,
'preference': preference,
}
if media_type == 'AUDIO':
f['vcodec'] = 'none'
formats.append(f)
def build_stream_name():
# Despite specification does not mention NAME attribute for
# EXT-X-STREAM-INF tag it still sometimes may be present (see [1]
# or vidio test in TestInfoExtractor.test_parse_m3u8_formats)
# 1. http://www.vidio.com/watch/165683-dj_ambred-booyah-live-2015
stream_name = last_stream_inf.get('NAME')
if stream_name:
return stream_name
# If there is no NAME in EXT-X-STREAM-INF it will be obtained
# from corresponding rendition group
stream_group_id = last_stream_inf.get('VIDEO')
if not stream_group_id:
return
stream_group = groups.get(stream_group_id)
if not stream_group:
return stream_group_id
rendition = stream_group[0]
return rendition.get('NAME') or stream_group_id
for line in m3u8_doc.splitlines():
if line.startswith('#EXT-X-STREAM-INF:'):
last_stream_inf = parse_m3u8_attributes(line)
elif line.startswith('#EXT-X-MEDIA:'):
extract_media(line)
elif line.startswith('#') or not line.strip():
continue
else:
tbr = float_or_none(
last_stream_inf.get('AVERAGE-BANDWIDTH') or
last_stream_inf.get('BANDWIDTH'), scale=1000)
format_id = []
if m3u8_id:
format_id.append(m3u8_id)
stream_name = build_stream_name()
# Bandwidth of live streams may differ over time thus making
# format_id unpredictable. So it's better to keep provided
# format_id intact.
if not live:
format_id.append(stream_name if stream_name else '%d' % (tbr if tbr else len(formats)))
manifest_url = format_url(line.strip())
f = {
'format_id': '-'.join(format_id),
'url': manifest_url,
'manifest_url': m3u8_url,
'tbr': tbr,
'ext': ext,
'fps': float_or_none(last_stream_inf.get('FRAME-RATE')),
'protocol': entry_protocol,
'preference': preference,
}
resolution = last_stream_inf.get('RESOLUTION')
if resolution:
mobj = re.search(r'(?P<width>\d+)[xX](?P<height>\d+)', resolution)
if mobj:
f['width'] = int(mobj.group('width'))
f['height'] = int(mobj.group('height'))
# Unified Streaming Platform
mobj = re.search(
r'audio.*?(?:%3D|=)(\d+)(?:-video.*?(?:%3D|=)(\d+))?', f['url'])
if mobj:
abr, vbr = mobj.groups()
abr, vbr = float_or_none(abr, 1000), float_or_none(vbr, 1000)
f.update({
'vbr': vbr,
'abr': abr,
})
codecs = parse_codecs(last_stream_inf.get('CODECS'))
f.update(codecs)
audio_group_id = last_stream_inf.get('AUDIO')
# As per [1, 4.3.4.1.1] any EXT-X-STREAM-INF tag which
# references a rendition group MUST have a CODECS attribute.
# However, this is not always respected, for example, [2]
# contains EXT-X-STREAM-INF tag which references AUDIO
# rendition group but does not have CODECS and despite
# referencing audio group an audio group, it represents
# a complete (with audio and video) format. So, for such cases
# we will ignore references to rendition groups and treat them
# as complete formats.
if audio_group_id and codecs and f.get('vcodec') != 'none':
audio_group = groups.get(audio_group_id)
if audio_group and audio_group[0].get('URI'):
# TODO: update acodec for audio only formats with
# the same GROUP-ID
f['acodec'] = 'none'
formats.append(f)
last_stream_inf = {}
return formats
@staticmethod
def _xpath_ns(path, namespace=None):
if not namespace:
return path
out = []
for c in path.split('/'):
if not c or c == '.':
out.append(c)
else:
out.append('{%s}%s' % (namespace, c))
return '/'.join(out)
def _extract_smil_formats(self, smil_url, video_id, fatal=True, f4m_params=None, transform_source=None):
smil = self._download_smil(smil_url, video_id, fatal=fatal, transform_source=transform_source)
if smil is False:
assert not fatal
return []
namespace = self._parse_smil_namespace(smil)
return self._parse_smil_formats(
smil, smil_url, video_id, namespace=namespace, f4m_params=f4m_params)
def _extract_smil_info(self, smil_url, video_id, fatal=True, f4m_params=None):
smil = self._download_smil(smil_url, video_id, fatal=fatal)
if smil is False:
return {}
return self._parse_smil(smil, smil_url, video_id, f4m_params=f4m_params)
def _download_smil(self, smil_url, video_id, fatal=True, transform_source=None):
return self._download_xml(
smil_url, video_id, 'Downloading SMIL file',
'Unable to download SMIL file', fatal=fatal, transform_source=transform_source)
def _parse_smil(self, smil, smil_url, video_id, f4m_params=None):
namespace = self._parse_smil_namespace(smil)
formats = self._parse_smil_formats(
smil, smil_url, video_id, namespace=namespace, f4m_params=f4m_params)
subtitles = self._parse_smil_subtitles(smil, namespace=namespace)
video_id = os.path.splitext(url_basename(smil_url))[0]
title = None
description = None
upload_date = None
for meta in smil.findall(self._xpath_ns('./head/meta', namespace)):
name = meta.attrib.get('name')
content = meta.attrib.get('content')
if not name or not content:
continue
if not title and name == 'title':
title = content
elif not description and name in ('description', 'abstract'):
description = content
elif not upload_date and name == 'date':
upload_date = unified_strdate(content)
thumbnails = [{
'id': image.get('type'),
'url': image.get('src'),
'width': int_or_none(image.get('width')),
'height': int_or_none(image.get('height')),
} for image in smil.findall(self._xpath_ns('.//image', namespace)) if image.get('src')]
return {
'id': video_id,
'title': title or video_id,
'description': description,
'upload_date': upload_date,
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
def _parse_smil_namespace(self, smil):
return self._search_regex(
r'(?i)^{([^}]+)?}smil$', smil.tag, 'namespace', default=None)
def _parse_smil_formats(self, smil, smil_url, video_id, namespace=None, f4m_params=None, transform_rtmp_url=None):
base = smil_url
for meta in smil.findall(self._xpath_ns('./head/meta', namespace)):
b = meta.get('base') or meta.get('httpBase')
if b:
base = b
break
formats = []
rtmp_count = 0
http_count = 0
m3u8_count = 0
srcs = []
media = smil.findall(self._xpath_ns('.//video', namespace)) + smil.findall(self._xpath_ns('.//audio', namespace))
for medium in media:
src = medium.get('src')
if not src or src in srcs:
continue
srcs.append(src)
bitrate = float_or_none(medium.get('system-bitrate') or medium.get('systemBitrate'), 1000)
filesize = int_or_none(medium.get('size') or medium.get('fileSize'))
width = int_or_none(medium.get('width'))
height = int_or_none(medium.get('height'))
proto = medium.get('proto')
ext = medium.get('ext')
src_ext = determine_ext(src)
streamer = medium.get('streamer') or base
if proto == 'rtmp' or streamer.startswith('rtmp'):
rtmp_count += 1
formats.append({
'url': streamer,
'play_path': src,
'ext': 'flv',
'format_id': 'rtmp-%d' % (rtmp_count if bitrate is None else bitrate),
'tbr': bitrate,
'filesize': filesize,
'width': width,
'height': height,
})
if transform_rtmp_url:
streamer, src = transform_rtmp_url(streamer, src)
formats[-1].update({
'url': streamer,
'play_path': src,
})
continue
src_url = src if src.startswith('http') else compat_urlparse.urljoin(base, src)
src_url = src_url.strip()
if proto == 'm3u8' or src_ext == 'm3u8':
m3u8_formats = self._extract_m3u8_formats(
src_url, video_id, ext or 'mp4', m3u8_id='hls', fatal=False)
if len(m3u8_formats) == 1:
m3u8_count += 1
m3u8_formats[0].update({
'format_id': 'hls-%d' % (m3u8_count if bitrate is None else bitrate),
'tbr': bitrate,
'width': width,
'height': height,
})
formats.extend(m3u8_formats)
continue
if src_ext == 'f4m':
f4m_url = src_url
if not f4m_params:
f4m_params = {
'hdcore': '3.2.0',
'plugin': 'flowplayer-3.2.0.1',
}
f4m_url += '&' if '?' in f4m_url else '?'
f4m_url += compat_urllib_parse_urlencode(f4m_params)
formats.extend(self._extract_f4m_formats(f4m_url, video_id, f4m_id='hds', fatal=False))
continue
if src_url.startswith('http') and self._is_valid_url(src, video_id):
http_count += 1
formats.append({
'url': src_url,
'ext': ext or src_ext or 'flv',
'format_id': 'http-%d' % (bitrate or http_count),
'tbr': bitrate,
'filesize': filesize,
'width': width,
'height': height,
})
continue
return formats
def _parse_smil_subtitles(self, smil, namespace=None, subtitles_lang='en'):
urls = []
subtitles = {}
for num, textstream in enumerate(smil.findall(self._xpath_ns('.//textstream', namespace))):
src = textstream.get('src')
if not src or src in urls:
continue
urls.append(src)
ext = textstream.get('ext') or mimetype2ext(textstream.get('type')) or determine_ext(src)
lang = textstream.get('systemLanguage') or textstream.get('systemLanguageName') or textstream.get('lang') or subtitles_lang
subtitles.setdefault(lang, []).append({
'url': src,
'ext': ext,
})
return subtitles
def _extract_xspf_playlist(self, playlist_url, playlist_id, fatal=True):
xspf = self._download_xml(
playlist_url, playlist_id, 'Downloading xpsf playlist',
'Unable to download xspf manifest', fatal=fatal)
if xspf is False:
return []
return self._parse_xspf(xspf, playlist_id)
def _parse_xspf(self, playlist, playlist_id):
NS_MAP = {
'xspf': 'http://xspf.org/ns/0/',
's1': 'http://static.streamone.nl/player/ns/0',
}
entries = []
for track in playlist.findall(xpath_with_ns('./xspf:trackList/xspf:track', NS_MAP)):
title = xpath_text(
track, xpath_with_ns('./xspf:title', NS_MAP), 'title', default=playlist_id)
description = xpath_text(
track, xpath_with_ns('./xspf:annotation', NS_MAP), 'description')
thumbnail = xpath_text(
track, xpath_with_ns('./xspf:image', NS_MAP), 'thumbnail')
duration = float_or_none(
xpath_text(track, xpath_with_ns('./xspf:duration', NS_MAP), 'duration'), 1000)
formats = [{
'url': location.text,
'format_id': location.get(xpath_with_ns('s1:label', NS_MAP)),
'width': int_or_none(location.get(xpath_with_ns('s1:width', NS_MAP))),
'height': int_or_none(location.get(xpath_with_ns('s1:height', NS_MAP))),
} for location in track.findall(xpath_with_ns('./xspf:location', NS_MAP))]
self._sort_formats(formats)
entries.append({
'id': playlist_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
})
return entries
def _extract_mpd_formats(self, mpd_url, video_id, mpd_id=None, note=None, errnote=None, fatal=True, formats_dict={}):
res = self._download_webpage_handle(
mpd_url, video_id,
note=note or 'Downloading MPD manifest',
errnote=errnote or 'Failed to download MPD manifest',
fatal=fatal)
if res is False:
return []
mpd, urlh = res
mpd_base_url = base_url(urlh.geturl())
return self._parse_mpd_formats(
compat_etree_fromstring(mpd.encode('utf-8')), mpd_id, mpd_base_url,
formats_dict=formats_dict, mpd_url=mpd_url)
def _parse_mpd_formats(self, mpd_doc, mpd_id=None, mpd_base_url='', formats_dict={}, mpd_url=None):
"""
Parse formats from MPD manifest.
References:
1. MPEG-DASH Standard, ISO/IEC 23009-1:2014(E),
http://standards.iso.org/ittf/PubliclyAvailableStandards/c065274_ISO_IEC_23009-1_2014.zip
2. https://en.wikipedia.org/wiki/Dynamic_Adaptive_Streaming_over_HTTP
"""
if mpd_doc.get('type') == 'dynamic':
return []
namespace = self._search_regex(r'(?i)^{([^}]+)?}MPD$', mpd_doc.tag, 'namespace', default=None)
def _add_ns(path):
return self._xpath_ns(path, namespace)
def is_drm_protected(element):
return element.find(_add_ns('ContentProtection')) is not None
def extract_multisegment_info(element, ms_parent_info):
ms_info = ms_parent_info.copy()
# As per [1, 5.3.9.2.2] SegmentList and SegmentTemplate share some
# common attributes and elements. We will only extract relevant
# for us.
def extract_common(source):
segment_timeline = source.find(_add_ns('SegmentTimeline'))
if segment_timeline is not None:
s_e = segment_timeline.findall(_add_ns('S'))
if s_e:
ms_info['total_number'] = 0
ms_info['s'] = []
for s in s_e:
r = int(s.get('r', 0))
ms_info['total_number'] += 1 + r
ms_info['s'].append({
't': int(s.get('t', 0)),
# @d is mandatory (see [1, 5.3.9.6.2, Table 17, page 60])
'd': int(s.attrib['d']),
'r': r,
})
start_number = source.get('startNumber')
if start_number:
ms_info['start_number'] = int(start_number)
timescale = source.get('timescale')
if timescale:
ms_info['timescale'] = int(timescale)
segment_duration = source.get('duration')
if segment_duration:
ms_info['segment_duration'] = float(segment_duration)
def extract_Initialization(source):
initialization = source.find(_add_ns('Initialization'))
if initialization is not None:
ms_info['initialization_url'] = initialization.attrib['sourceURL']
segment_list = element.find(_add_ns('SegmentList'))
if segment_list is not None:
extract_common(segment_list)
extract_Initialization(segment_list)
segment_urls_e = segment_list.findall(_add_ns('SegmentURL'))
if segment_urls_e:
ms_info['segment_urls'] = [segment.attrib['media'] for segment in segment_urls_e]
else:
segment_template = element.find(_add_ns('SegmentTemplate'))
if segment_template is not None:
extract_common(segment_template)
media = segment_template.get('media')
if media:
ms_info['media'] = media
initialization = segment_template.get('initialization')
if initialization:
ms_info['initialization'] = initialization
else:
extract_Initialization(segment_template)
return ms_info
mpd_duration = parse_duration(mpd_doc.get('mediaPresentationDuration'))
formats = []
for period in mpd_doc.findall(_add_ns('Period')):
period_duration = parse_duration(period.get('duration')) or mpd_duration
period_ms_info = extract_multisegment_info(period, {
'start_number': 1,
'timescale': 1,
})
for adaptation_set in period.findall(_add_ns('AdaptationSet')):
if is_drm_protected(adaptation_set):
continue
adaption_set_ms_info = extract_multisegment_info(adaptation_set, period_ms_info)
for representation in adaptation_set.findall(_add_ns('Representation')):
if is_drm_protected(representation):
continue
representation_attrib = adaptation_set.attrib.copy()
representation_attrib.update(representation.attrib)
# According to [1, 5.3.7.2, Table 9, page 41], @mimeType is mandatory
mime_type = representation_attrib['mimeType']
content_type = mime_type.split('/')[0]
if content_type == 'text':
# TODO implement WebVTT downloading
pass
elif content_type in ('video', 'audio'):
base_url = ''
for element in (representation, adaptation_set, period, mpd_doc):
base_url_e = element.find(_add_ns('BaseURL'))
if base_url_e is not None:
base_url = base_url_e.text + base_url
if re.match(r'^https?://', base_url):
break
if mpd_base_url and not re.match(r'^https?://', base_url):
if not mpd_base_url.endswith('/') and not base_url.startswith('/'):
mpd_base_url += '/'
base_url = mpd_base_url + base_url
representation_id = representation_attrib.get('id')
lang = representation_attrib.get('lang')
url_el = representation.find(_add_ns('BaseURL'))
filesize = int_or_none(url_el.attrib.get('{http://youtube.com/yt/2012/10/10}contentLength') if url_el is not None else None)
bandwidth = int_or_none(representation_attrib.get('bandwidth'))
f = {
'format_id': '%s-%s' % (mpd_id, representation_id) if mpd_id else representation_id,
'url': base_url,
'manifest_url': mpd_url,
'ext': mimetype2ext(mime_type),
'width': int_or_none(representation_attrib.get('width')),
'height': int_or_none(representation_attrib.get('height')),
'tbr': float_or_none(bandwidth, 1000),
'asr': int_or_none(representation_attrib.get('audioSamplingRate')),
'fps': int_or_none(representation_attrib.get('frameRate')),
'language': lang if lang not in ('mul', 'und', 'zxx', 'mis') else None,
'format_note': 'DASH %s' % content_type,
'filesize': filesize,
}
f.update(parse_codecs(representation_attrib.get('codecs')))
representation_ms_info = extract_multisegment_info(representation, adaption_set_ms_info)
def prepare_template(template_name, identifiers):
t = representation_ms_info[template_name]
t = t.replace('$RepresentationID$', representation_id)
t = re.sub(r'\$(%s)\$' % '|'.join(identifiers), r'%(\1)d', t)
t = re.sub(r'\$(%s)%%([^$]+)\$' % '|'.join(identifiers), r'%(\1)\2', t)
t.replace('$$', '$')
return t
# @initialization is a regular template like @media one
# so it should be handled just the same way (see
# https://github.com/rg3/youtube-dl/issues/11605)
if 'initialization' in representation_ms_info:
initialization_template = prepare_template(
'initialization',
# As per [1, 5.3.9.4.2, Table 15, page 54] $Number$ and
# $Time$ shall not be included for @initialization thus
# only $Bandwidth$ remains
('Bandwidth', ))
representation_ms_info['initialization_url'] = initialization_template % {
'Bandwidth': bandwidth,
}
def location_key(location):
return 'url' if re.match(r'^https?://', location) else 'path'
if 'segment_urls' not in representation_ms_info and 'media' in representation_ms_info:
media_template = prepare_template('media', ('Number', 'Bandwidth', 'Time'))
media_location_key = location_key(media_template)
# As per [1, 5.3.9.4.4, Table 16, page 55] $Number$ and $Time$
# can't be used at the same time
if '%(Number' in media_template and 's' not in representation_ms_info:
segment_duration = None
if 'total_number' not in representation_ms_info and 'segment_duration' in representation_ms_info:
segment_duration = float_or_none(representation_ms_info['segment_duration'], representation_ms_info['timescale'])
representation_ms_info['total_number'] = int(math.ceil(float(period_duration) / segment_duration))
representation_ms_info['fragments'] = [{
media_location_key: media_template % {
'Number': segment_number,
'Bandwidth': bandwidth,
},
'duration': segment_duration,
} for segment_number in range(
representation_ms_info['start_number'],
representation_ms_info['total_number'] + representation_ms_info['start_number'])]
else:
# $Number*$ or $Time$ in media template with S list available
# Example $Number*$: http://www.svtplay.se/klipp/9023742/stopptid-om-bjorn-borg
# Example $Time$: https://play.arkena.com/embed/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411
representation_ms_info['fragments'] = []
segment_time = 0
segment_d = None
segment_number = representation_ms_info['start_number']
def add_segment_url():
segment_url = media_template % {
'Time': segment_time,
'Bandwidth': bandwidth,
'Number': segment_number,
}
representation_ms_info['fragments'].append({
media_location_key: segment_url,
'duration': float_or_none(segment_d, representation_ms_info['timescale']),
})
for num, s in enumerate(representation_ms_info['s']):
segment_time = s.get('t') or segment_time
segment_d = s['d']
add_segment_url()
segment_number += 1
for r in range(s.get('r', 0)):
segment_time += segment_d
add_segment_url()
segment_number += 1
segment_time += segment_d
elif 'segment_urls' in representation_ms_info and 's' in representation_ms_info:
# No media template
# Example: https://www.youtube.com/watch?v=iXZV5uAYMJI
# or any YouTube dashsegments video
fragments = []
segment_index = 0
timescale = representation_ms_info['timescale']
for s in representation_ms_info['s']:
duration = float_or_none(s['d'], timescale)
for r in range(s.get('r', 0) + 1):
segment_uri = representation_ms_info['segment_urls'][segment_index]
fragments.append({
location_key(segment_uri): segment_uri,
'duration': duration,
})
segment_index += 1
representation_ms_info['fragments'] = fragments
elif 'segment_urls' in representation_ms_info:
# Segment URLs with no SegmentTimeline
# Example: https://www.seznam.cz/zpravy/clanek/cesko-zasahne-vitr-o-sile-vichrice-muze-byt-i-zivotu-nebezpecny-39091
# https://github.com/rg3/youtube-dl/pull/14844
fragments = []
segment_duration = float_or_none(
representation_ms_info['segment_duration'],
representation_ms_info['timescale']) if 'segment_duration' in representation_ms_info else None
for segment_url in representation_ms_info['segment_urls']:
fragment = {
location_key(segment_url): segment_url,
}
if segment_duration:
fragment['duration'] = segment_duration
fragments.append(fragment)
representation_ms_info['fragments'] = fragments
# NB: MPD manifest may contain direct URLs to unfragmented media.
# No fragments key is present in this case.
if 'fragments' in representation_ms_info:
f.update({
'fragment_base_url': base_url,
'fragments': [],
'protocol': 'http_dash_segments',
})
if 'initialization_url' in representation_ms_info:
initialization_url = representation_ms_info['initialization_url']
if not f.get('url'):
f['url'] = initialization_url
f['fragments'].append({location_key(initialization_url): initialization_url})
f['fragments'].extend(representation_ms_info['fragments'])
try:
existing_format = next(
fo for fo in formats
if fo['format_id'] == representation_id)
except StopIteration:
full_info = formats_dict.get(representation_id, {}).copy()
full_info.update(f)
formats.append(full_info)
else:
existing_format.update(f)
else:
self.report_warning('Unknown MIME type %s in DASH manifest' % mime_type)
return formats
def _extract_ism_formats(self, ism_url, video_id, ism_id=None, note=None, errnote=None, fatal=True):
res = self._download_webpage_handle(
ism_url, video_id,
note=note or 'Downloading ISM manifest',
errnote=errnote or 'Failed to download ISM manifest',
fatal=fatal)
if res is False:
return []
ism, urlh = res
return self._parse_ism_formats(
compat_etree_fromstring(ism.encode('utf-8')), urlh.geturl(), ism_id)
def _parse_ism_formats(self, ism_doc, ism_url, ism_id=None):
"""
Parse formats from ISM manifest.
References:
1. [MS-SSTR]: Smooth Streaming Protocol,
https://msdn.microsoft.com/en-us/library/ff469518.aspx
"""
if ism_doc.get('IsLive') == 'TRUE' or ism_doc.find('Protection') is not None:
return []
duration = int(ism_doc.attrib['Duration'])
timescale = int_or_none(ism_doc.get('TimeScale')) or 10000000
formats = []
for stream in ism_doc.findall('StreamIndex'):
stream_type = stream.get('Type')
if stream_type not in ('video', 'audio'):
continue
url_pattern = stream.attrib['Url']
stream_timescale = int_or_none(stream.get('TimeScale')) or timescale
stream_name = stream.get('Name')
for track in stream.findall('QualityLevel'):
fourcc = track.get('FourCC')
# TODO: add support for WVC1 and WMAP
if fourcc not in ('H264', 'AVC1', 'AACL'):
self.report_warning('%s is not a supported codec' % fourcc)
continue
tbr = int(track.attrib['Bitrate']) // 1000
# [1] does not mention Width and Height attributes. However,
# they're often present while MaxWidth and MaxHeight are
# missing, so should be used as fallbacks
width = int_or_none(track.get('MaxWidth') or track.get('Width'))
height = int_or_none(track.get('MaxHeight') or track.get('Height'))
sampling_rate = int_or_none(track.get('SamplingRate'))
track_url_pattern = re.sub(r'{[Bb]itrate}', track.attrib['Bitrate'], url_pattern)
track_url_pattern = compat_urlparse.urljoin(ism_url, track_url_pattern)
fragments = []
fragment_ctx = {
'time': 0,
}
stream_fragments = stream.findall('c')
for stream_fragment_index, stream_fragment in enumerate(stream_fragments):
fragment_ctx['time'] = int_or_none(stream_fragment.get('t')) or fragment_ctx['time']
fragment_repeat = int_or_none(stream_fragment.get('r')) or 1
fragment_ctx['duration'] = int_or_none(stream_fragment.get('d'))
if not fragment_ctx['duration']:
try:
next_fragment_time = int(stream_fragment[stream_fragment_index + 1].attrib['t'])
except IndexError:
next_fragment_time = duration
fragment_ctx['duration'] = (next_fragment_time - fragment_ctx['time']) / fragment_repeat
for _ in range(fragment_repeat):
fragments.append({
'url': re.sub(r'{start[ _]time}', compat_str(fragment_ctx['time']), track_url_pattern),
'duration': fragment_ctx['duration'] / stream_timescale,
})
fragment_ctx['time'] += fragment_ctx['duration']
format_id = []
if ism_id:
format_id.append(ism_id)
if stream_name:
format_id.append(stream_name)
format_id.append(compat_str(tbr))
formats.append({
'format_id': '-'.join(format_id),
'url': ism_url,
'manifest_url': ism_url,
'ext': 'ismv' if stream_type == 'video' else 'isma',
'width': width,
'height': height,
'tbr': tbr,
'asr': sampling_rate,
'vcodec': 'none' if stream_type == 'audio' else fourcc,
'acodec': 'none' if stream_type == 'video' else fourcc,
'protocol': 'ism',
'fragments': fragments,
'_download_params': {
'duration': duration,
'timescale': stream_timescale,
'width': width or 0,
'height': height or 0,
'fourcc': fourcc,
'codec_private_data': track.get('CodecPrivateData'),
'sampling_rate': sampling_rate,
'channels': int_or_none(track.get('Channels', 2)),
'bits_per_sample': int_or_none(track.get('BitsPerSample', 16)),
'nal_unit_length_field': int_or_none(track.get('NALUnitLengthField', 4)),
},
})
return formats
def _parse_html5_media_entries(self, base_url, webpage, video_id, m3u8_id=None, m3u8_entry_protocol='m3u8', mpd_id=None, preference=None):
def absolute_url(video_url):
return compat_urlparse.urljoin(base_url, video_url)
def parse_content_type(content_type):
if not content_type:
return {}
ctr = re.search(r'(?P<mimetype>[^/]+/[^;]+)(?:;\s*codecs="?(?P<codecs>[^"]+))?', content_type)
if ctr:
mimetype, codecs = ctr.groups()
f = parse_codecs(codecs)
f['ext'] = mimetype2ext(mimetype)
return f
return {}
def _media_formats(src, cur_media_type, type_info={}):
full_url = absolute_url(src)
ext = type_info.get('ext') or determine_ext(full_url)
if ext == 'm3u8':
is_plain_url = False
formats = self._extract_m3u8_formats(
full_url, video_id, ext='mp4',
entry_protocol=m3u8_entry_protocol, m3u8_id=m3u8_id,
preference=preference, fatal=False)
elif ext == 'mpd':
is_plain_url = False
formats = self._extract_mpd_formats(
full_url, video_id, mpd_id=mpd_id, fatal=False)
else:
is_plain_url = True
formats = [{
'url': full_url,
'vcodec': 'none' if cur_media_type == 'audio' else None,
}]
return is_plain_url, formats
entries = []
# amp-video and amp-audio are very similar to their HTML5 counterparts
# so we wll include them right here (see
# https://www.ampproject.org/docs/reference/components/amp-video)
media_tags = [(media_tag, media_type, '')
for media_tag, media_type
in re.findall(r'(?s)(<(?:amp-)?(video|audio)[^>]*/>)', webpage)]
media_tags.extend(re.findall(
# We only allow video|audio followed by a whitespace or '>'.
# Allowing more characters may end up in significant slow down (see
# https://github.com/rg3/youtube-dl/issues/11979, example URL:
# http://www.porntrex.com/maps/videositemap.xml).
r'(?s)(<(?P<tag>(?:amp-)?(?:video|audio))(?:\s+[^>]*)?>)(.*?)</(?P=tag)>', webpage))
for media_tag, media_type, media_content in media_tags:
media_info = {
'formats': [],
'subtitles': {},
}
media_attributes = extract_attributes(media_tag)
src = media_attributes.get('src')
if src:
_, formats = _media_formats(src, media_type)
media_info['formats'].extend(formats)
media_info['thumbnail'] = media_attributes.get('poster')
if media_content:
for source_tag in re.findall(r'<source[^>]+>', media_content):
source_attributes = extract_attributes(source_tag)
src = source_attributes.get('src')
if not src:
continue
f = parse_content_type(source_attributes.get('type'))
is_plain_url, formats = _media_formats(src, media_type, f)
if is_plain_url:
# res attribute is not standard but seen several times
# in the wild
f.update({
'height': int_or_none(source_attributes.get('res')),
'format_id': source_attributes.get('label'),
})
f.update(formats[0])
media_info['formats'].append(f)
else:
media_info['formats'].extend(formats)
for track_tag in re.findall(r'<track[^>]+>', media_content):
track_attributes = extract_attributes(track_tag)
kind = track_attributes.get('kind')
if not kind or kind in ('subtitles', 'captions'):
src = track_attributes.get('src')
if not src:
continue
lang = track_attributes.get('srclang') or track_attributes.get('lang') or track_attributes.get('label')
media_info['subtitles'].setdefault(lang, []).append({
'url': absolute_url(src),
})
if media_info['formats'] or media_info['subtitles']:
entries.append(media_info)
return entries
def _extract_akamai_formats(self, manifest_url, video_id, hosts={}):
formats = []
hdcore_sign = 'hdcore=3.7.0'
f4m_url = re.sub(r'(https?://[^/]+)/i/', r'\1/z/', manifest_url).replace('/master.m3u8', '/manifest.f4m')
hds_host = hosts.get('hds')
if hds_host:
f4m_url = re.sub(r'(https?://)[^/]+', r'\1' + hds_host, f4m_url)
if 'hdcore=' not in f4m_url:
f4m_url += ('&' if '?' in f4m_url else '?') + hdcore_sign
f4m_formats = self._extract_f4m_formats(
f4m_url, video_id, f4m_id='hds', fatal=False)
for entry in f4m_formats:
entry.update({'extra_param_to_segment_url': hdcore_sign})
formats.extend(f4m_formats)
m3u8_url = re.sub(r'(https?://[^/]+)/z/', r'\1/i/', manifest_url).replace('/manifest.f4m', '/master.m3u8')
hls_host = hosts.get('hls')
if hls_host:
m3u8_url = re.sub(r'(https?://)[^/]+', r'\1' + hls_host, m3u8_url)
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
return formats
def _extract_wowza_formats(self, url, video_id, m3u8_entry_protocol='m3u8_native', skip_protocols=[]):
query = compat_urlparse.urlparse(url).query
url = re.sub(r'/(?:manifest|playlist|jwplayer)\.(?:m3u8|f4m|mpd|smil)', '', url)
url_base = self._search_regex(
r'(?:(?:https?|rtmp|rtsp):)?(//[^?]+)', url, 'format url')
http_base_url = '%s:%s' % ('http', url_base)
formats = []
def manifest_url(manifest):
m_url = '%s/%s' % (http_base_url, manifest)
if query:
m_url += '?%s' % query
return m_url
if 'm3u8' not in skip_protocols:
formats.extend(self._extract_m3u8_formats(
manifest_url('playlist.m3u8'), video_id, 'mp4',
m3u8_entry_protocol, m3u8_id='hls', fatal=False))
if 'f4m' not in skip_protocols:
formats.extend(self._extract_f4m_formats(
manifest_url('manifest.f4m'),
video_id, f4m_id='hds', fatal=False))
if 'dash' not in skip_protocols:
formats.extend(self._extract_mpd_formats(
manifest_url('manifest.mpd'),
video_id, mpd_id='dash', fatal=False))
if re.search(r'(?:/smil:|\.smil)', url_base):
if 'smil' not in skip_protocols:
rtmp_formats = self._extract_smil_formats(
manifest_url('jwplayer.smil'),
video_id, fatal=False)
for rtmp_format in rtmp_formats:
rtsp_format = rtmp_format.copy()
rtsp_format['url'] = '%s/%s' % (rtmp_format['url'], rtmp_format['play_path'])
del rtsp_format['play_path']
del rtsp_format['ext']
rtsp_format.update({
'url': rtsp_format['url'].replace('rtmp://', 'rtsp://'),
'format_id': rtmp_format['format_id'].replace('rtmp', 'rtsp'),
'protocol': 'rtsp',
})
formats.extend([rtmp_format, rtsp_format])
else:
for protocol in ('rtmp', 'rtsp'):
if protocol not in skip_protocols:
formats.append({
'url': '%s:%s' % (protocol, url_base),
'format_id': protocol,
'protocol': protocol,
})
return formats
def _find_jwplayer_data(self, webpage, video_id=None, transform_source=js_to_json):
mobj = re.search(
r'(?s)jwplayer\((?P<quote>[\'"])[^\'" ]+(?P=quote)\)(?!</script>).*?\.setup\s*\((?P<options>[^)]+)\)',
webpage)
if mobj:
try:
jwplayer_data = self._parse_json(mobj.group('options'),
video_id=video_id,
transform_source=transform_source)
except ExtractorError:
pass
else:
if isinstance(jwplayer_data, dict):
return jwplayer_data
def _extract_jwplayer_data(self, webpage, video_id, *args, **kwargs):
jwplayer_data = self._find_jwplayer_data(
webpage, video_id, transform_source=js_to_json)
return self._parse_jwplayer_data(
jwplayer_data, video_id, *args, **kwargs)
def _parse_jwplayer_data(self, jwplayer_data, video_id=None, require_title=True,
m3u8_id=None, mpd_id=None, rtmp_params=None, base_url=None):
# JWPlayer backward compatibility: flattened playlists
# https://github.com/jwplayer/jwplayer/blob/v7.4.3/src/js/api/config.js#L81-L96
if 'playlist' not in jwplayer_data:
jwplayer_data = {'playlist': [jwplayer_data]}
entries = []
# JWPlayer backward compatibility: single playlist item
# https://github.com/jwplayer/jwplayer/blob/v7.7.0/src/js/playlist/playlist.js#L10
if not isinstance(jwplayer_data['playlist'], list):
jwplayer_data['playlist'] = [jwplayer_data['playlist']]
for video_data in jwplayer_data['playlist']:
# JWPlayer backward compatibility: flattened sources
# https://github.com/jwplayer/jwplayer/blob/v7.4.3/src/js/playlist/item.js#L29-L35
if 'sources' not in video_data:
video_data['sources'] = [video_data]
this_video_id = video_id or video_data['mediaid']
formats = self._parse_jwplayer_formats(
video_data['sources'], video_id=this_video_id, m3u8_id=m3u8_id,
mpd_id=mpd_id, rtmp_params=rtmp_params, base_url=base_url)
subtitles = {}
tracks = video_data.get('tracks')
if tracks and isinstance(tracks, list):
for track in tracks:
if not isinstance(track, dict):
continue
if track.get('kind') != 'captions':
continue
track_url = urljoin(base_url, track.get('file'))
if not track_url:
continue
subtitles.setdefault(track.get('label') or 'en', []).append({
'url': self._proto_relative_url(track_url)
})
entry = {
'id': this_video_id,
'title': unescapeHTML(video_data['title'] if require_title else video_data.get('title')),
'description': video_data.get('description'),
'thumbnail': self._proto_relative_url(video_data.get('image')),
'timestamp': int_or_none(video_data.get('pubdate')),
'duration': float_or_none(jwplayer_data.get('duration') or video_data.get('duration')),
'subtitles': subtitles,
}
# https://github.com/jwplayer/jwplayer/blob/master/src/js/utils/validator.js#L32
if len(formats) == 1 and re.search(r'^(?:http|//).*(?:youtube\.com|youtu\.be)/.+', formats[0]['url']):
entry.update({
'_type': 'url_transparent',
'url': formats[0]['url'],
})
else:
self._sort_formats(formats)
entry['formats'] = formats
entries.append(entry)
if len(entries) == 1:
return entries[0]
else:
return self.playlist_result(entries)
def _parse_jwplayer_formats(self, jwplayer_sources_data, video_id=None,
m3u8_id=None, mpd_id=None, rtmp_params=None, base_url=None):
urls = []
formats = []
for source in jwplayer_sources_data:
if not isinstance(source, dict):
continue
source_url = self._proto_relative_url(source.get('file'))
if not source_url:
continue
if base_url:
source_url = compat_urlparse.urljoin(base_url, source_url)
if source_url in urls:
continue
urls.append(source_url)
source_type = source.get('type') or ''
ext = mimetype2ext(source_type) or determine_ext(source_url)
if source_type == 'hls' or ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
source_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id=m3u8_id, fatal=False))
elif ext == 'mpd':
formats.extend(self._extract_mpd_formats(
source_url, video_id, mpd_id=mpd_id, fatal=False))
elif ext == 'smil':
formats.extend(self._extract_smil_formats(
source_url, video_id, fatal=False))
# https://github.com/jwplayer/jwplayer/blob/master/src/js/providers/default.js#L67
elif source_type.startswith('audio') or ext in (
'oga', 'aac', 'mp3', 'mpeg', 'vorbis'):
formats.append({
'url': source_url,
'vcodec': 'none',
'ext': ext,
})
else:
height = int_or_none(source.get('height'))
if height is None:
# Often no height is provided but there is a label in
# format like "1080p", "720p SD", or 1080.
height = int_or_none(self._search_regex(
r'^(\d{3,4})[pP]?(?:\b|$)', compat_str(source.get('label') or ''),
'height', default=None))
a_format = {
'url': source_url,
'width': int_or_none(source.get('width')),
'height': height,
'tbr': int_or_none(source.get('bitrate')),
'ext': ext,
}
if source_url.startswith('rtmp'):
a_format['ext'] = 'flv'
# See com/longtailvideo/jwplayer/media/RTMPMediaProvider.as
# of jwplayer.flash.swf
rtmp_url_parts = re.split(
r'((?:mp4|mp3|flv):)', source_url, 1)
if len(rtmp_url_parts) == 3:
rtmp_url, prefix, play_path = rtmp_url_parts
a_format.update({
'url': rtmp_url,
'play_path': prefix + play_path,
})
if rtmp_params:
a_format.update(rtmp_params)
formats.append(a_format)
return formats
def _live_title(self, name):
""" Generate the title for a live video """
now = datetime.datetime.now()
now_str = now.strftime('%Y-%m-%d %H:%M')
return name + ' ' + now_str
def _int(self, v, name, fatal=False, **kwargs):
res = int_or_none(v, **kwargs)
if 'get_attr' in kwargs:
print(getattr(v, kwargs['get_attr']))
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _float(self, v, name, fatal=False, **kwargs):
res = float_or_none(v, **kwargs)
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _set_cookie(self, domain, name, value, expire_time=None, port=None,
path='/', secure=False, discard=False, rest={}, **kwargs):
cookie = compat_cookiejar.Cookie(
0, name, value, port, port is not None, domain, True,
domain.startswith('.'), path, True, secure, expire_time,
discard, None, None, rest)
self._downloader.cookiejar.set_cookie(cookie)
def _get_cookies(self, url):
""" Return a compat_cookies.SimpleCookie with the cookies for the url """
req = sanitized_Request(url)
self._downloader.cookiejar.add_cookie_header(req)
return compat_cookies.SimpleCookie(req.get_header('Cookie'))
def get_testcases(self, include_onlymatching=False):
t = getattr(self, '_TEST', None)
if t:
assert not hasattr(self, '_TESTS'), \
'%s has _TEST and _TESTS' % type(self).__name__
tests = [t]
else:
tests = getattr(self, '_TESTS', [])
for t in tests:
if not include_onlymatching and t.get('only_matching', False):
continue
t['name'] = type(self).__name__[:-len('IE')]
yield t
def is_suitable(self, age_limit):
""" Test whether the extractor is generally suitable for the given
age limit (i.e. pornographic sites are not, all others usually are) """
any_restricted = False
for tc in self.get_testcases(include_onlymatching=False):
if tc.get('playlist', []):
tc = tc['playlist'][0]
is_restricted = age_restricted(
tc.get('info_dict', {}).get('age_limit'), age_limit)
if not is_restricted:
return True
any_restricted = any_restricted or is_restricted
return not any_restricted
def extract_subtitles(self, *args, **kwargs):
if (self._downloader.params.get('writesubtitles', False) or
self._downloader.params.get('listsubtitles')):
return self._get_subtitles(*args, **kwargs)
return {}
def _get_subtitles(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
@staticmethod
def _merge_subtitle_items(subtitle_list1, subtitle_list2):
""" Merge subtitle items for one language. Items with duplicated URLs
will be dropped. """
list1_urls = set([item['url'] for item in subtitle_list1])
ret = list(subtitle_list1)
ret.extend([item for item in subtitle_list2 if item['url'] not in list1_urls])
return ret
@classmethod
def _merge_subtitles(cls, subtitle_dict1, subtitle_dict2):
""" Merge two subtitle dictionaries, language by language. """
ret = dict(subtitle_dict1)
for lang in subtitle_dict2:
ret[lang] = cls._merge_subtitle_items(subtitle_dict1.get(lang, []), subtitle_dict2[lang])
return ret
def extract_automatic_captions(self, *args, **kwargs):
if (self._downloader.params.get('writeautomaticsub', False) or
self._downloader.params.get('listsubtitles')):
return self._get_automatic_captions(*args, **kwargs)
return {}
def _get_automatic_captions(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
def mark_watched(self, *args, **kwargs):
if (self._downloader.params.get('mark_watched', False) and
(self._get_login_info()[0] is not None or
self._downloader.params.get('cookiefile') is not None)):
self._mark_watched(*args, **kwargs)
def _mark_watched(self, *args, **kwargs):
raise NotImplementedError('This method must be implemented by subclasses')
def geo_verification_headers(self):
headers = {}
geo_verification_proxy = self._downloader.params.get('geo_verification_proxy')
if geo_verification_proxy:
headers['Ytdl-request-proxy'] = geo_verification_proxy
return headers
def _generic_id(self, url):
return compat_urllib_parse_unquote(os.path.splitext(url.rstrip('/').split('/')[-1])[0])
def _generic_title(self, url):
return compat_urllib_parse_unquote(os.path.splitext(url_basename(url))[0])
class SearchInfoExtractor(InfoExtractor):
"""
Base class for paged search queries extractors.
They accept URLs in the format _SEARCH_KEY(|all|[0-9]):{query}
Instances should define _SEARCH_KEY and _MAX_RESULTS.
"""
@classmethod
def _make_valid_url(cls):
return r'%s(?P<prefix>|[1-9][0-9]*|all):(?P<query>[\s\S]+)' % cls._SEARCH_KEY
@classmethod
def suitable(cls, url):
return re.match(cls._make_valid_url(), url) is not None
def _real_extract(self, query):
mobj = re.match(self._make_valid_url(), query)
if mobj is None:
raise ExtractorError('Invalid search query "%s"' % query)
prefix = mobj.group('prefix')
query = mobj.group('query')
if prefix == '':
return self._get_n_results(query, 1)
elif prefix == 'all':
return self._get_n_results(query, self._MAX_RESULTS)
else:
n = int(prefix)
if n <= 0:
raise ExtractorError('invalid download number %s for query "%s"' % (n, query))
elif n > self._MAX_RESULTS:
self._downloader.report_warning('%s returns max %i results (you requested %i)' % (self._SEARCH_KEY, self._MAX_RESULTS, n))
n = self._MAX_RESULTS
return self._get_n_results(query, n)
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
raise NotImplementedError('This method must be implemented by subclasses')
@property
def SEARCH_KEY(self):
return self._SEARCH_KEY
| oskar456/youtube-dl | youtube_dl/extractor/common.py | Python | unlicense | 122,985 | [
"VisIt"
] | 0b1a6e91ae65e5e5ea63af1884f12bdc422b22a1ca0d2f1ca866424b5660bea4 |
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/config_selector.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy/address_filtering.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/priority/priority.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/eds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/lrs.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_routing.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/resolving_lb_policy.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/service_config.cc',
'src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc',
'src/core/ext/filters/client_channel/service_config_parser.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_idle/client_idle_filter.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/message_compress/message_decompress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c',
'src/core/ext/upb-generated/envoy/annotations/resource.upb.c',
'src/core/ext/upb-generated/envoy/config/accesslog/v3/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/circuit_breaker.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/cluster.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/filter.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/outlier_detection.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/address.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/backoff.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/base.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/config_source.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/event_service_config.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/grpc_service.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/health_check.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/http_uri.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/proxy_protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/socket_option.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint_components.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/load_report.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/api_listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener_components.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/udp_listener_config.upb.c',
'src/core/ext/upb-generated/envoy/config/rbac/v3/rbac.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route_components.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/scoped_route.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/http_tracer.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/cert.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/common.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/secret.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls.upb.c',
'src/core/ext/upb-generated/envoy/service/cluster/v3/cds.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/ads.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/discovery.upb.c',
'src/core/ext/upb-generated/envoy/service/endpoint/v3/eds.upb.c',
'src/core/ext/upb-generated/envoy/service/listener/v3/lds.upb.c',
'src/core/ext/upb-generated/envoy/service/load_stats/v3/lrs.upb.c',
'src/core/ext/upb-generated/envoy/service/route/v3/rds.upb.c',
'src/core/ext/upb-generated/envoy/service/route/v3/srds.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/number.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/path.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/regex.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/string.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/value.upb.c',
'src/core/ext/upb-generated/envoy/type/metadata/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/tracing/v3/custom_tag.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/http.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/percent.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/range.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/semantic_version.upb.c',
'src/core/ext/upb-generated/gogoproto/gogo.upb.c',
'src/core/ext/upb-generated/google/api/annotations.upb.c',
'src/core/ext/upb-generated/google/api/expr/v1alpha1/syntax.upb.c',
'src/core/ext/upb-generated/google/api/http.upb.c',
'src/core/ext/upb-generated/google/protobuf/any.upb.c',
'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c',
'src/core/ext/upb-generated/google/protobuf/duration.upb.c',
'src/core/ext/upb-generated/google/protobuf/empty.upb.c',
'src/core/ext/upb-generated/google/protobuf/struct.upb.c',
'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c',
'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c',
'src/core/ext/upb-generated/google/rpc/status.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c',
'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c',
'src/core/ext/upb-generated/udpa/annotations/status.upb.c',
'src/core/ext/upb-generated/udpa/annotations/versioning.upb.c',
'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/ext/xds/xds_api.cc',
'src/core/ext/xds/xds_bootstrap.cc',
'src/core/ext/xds/xds_channel_secure.cc',
'src/core/ext/xds/xds_client.cc',
'src/core/ext/xds/xds_client_stats.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_args.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_abseil.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/arena.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/global_config_env.cc',
'src/core/lib/gprpp/host_port.cc',
'src/core/lib/gprpp/mpscq.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/dualstack_socket_posix.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_apple.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/executor/mpmcqueue.cc',
'src/core/lib/iomgr/executor/threadpool.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/poller/eventmanager_libuv.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/iomgr/work_serializer.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/credentials/tls/tls_credentials.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_linux.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/security_connector/ssl_utils_config.cc',
'src/core/lib/security/security_connector/tls/tls_security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/authority_override.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'third_party/abseil-cpp/absl/base/dynamic_annotations.cc',
'third_party/abseil-cpp/absl/base/internal/cycleclock.cc',
'third_party/abseil-cpp/absl/base/internal/low_level_alloc.cc',
'third_party/abseil-cpp/absl/base/internal/raw_logging.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock_wait.cc',
'third_party/abseil-cpp/absl/base/internal/sysinfo.cc',
'third_party/abseil-cpp/absl/base/internal/thread_identity.cc',
'third_party/abseil-cpp/absl/base/internal/throw_delegate.cc',
'third_party/abseil-cpp/absl/base/internal/unscaledcycleclock.cc',
'third_party/abseil-cpp/absl/base/log_severity.cc',
'third_party/abseil-cpp/absl/debugging/internal/address_is_readable.cc',
'third_party/abseil-cpp/absl/debugging/internal/demangle.cc',
'third_party/abseil-cpp/absl/debugging/internal/elf_mem_image.cc',
'third_party/abseil-cpp/absl/debugging/internal/vdso_support.cc',
'third_party/abseil-cpp/absl/debugging/stacktrace.cc',
'third_party/abseil-cpp/absl/debugging/symbolize.cc',
'third_party/abseil-cpp/absl/numeric/int128.cc',
'third_party/abseil-cpp/absl/status/status.cc',
'third_party/abseil-cpp/absl/status/status_payload_printer.cc',
'third_party/abseil-cpp/absl/strings/ascii.cc',
'third_party/abseil-cpp/absl/strings/charconv.cc',
'third_party/abseil-cpp/absl/strings/cord.cc',
'third_party/abseil-cpp/absl/strings/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_bigint.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc',
'third_party/abseil-cpp/absl/strings/internal/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/memutil.cc',
'third_party/abseil-cpp/absl/strings/internal/ostringstream.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/arg.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/float_conversion.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/output.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc',
'third_party/abseil-cpp/absl/strings/internal/utf8.cc',
'third_party/abseil-cpp/absl/strings/match.cc',
'third_party/abseil-cpp/absl/strings/numbers.cc',
'third_party/abseil-cpp/absl/strings/str_cat.cc',
'third_party/abseil-cpp/absl/strings/str_replace.cc',
'third_party/abseil-cpp/absl/strings/str_split.cc',
'third_party/abseil-cpp/absl/strings/string_view.cc',
'third_party/abseil-cpp/absl/strings/substitute.cc',
'third_party/abseil-cpp/absl/synchronization/barrier.cc',
'third_party/abseil-cpp/absl/synchronization/blocking_counter.cc',
'third_party/abseil-cpp/absl/synchronization/internal/create_thread_identity.cc',
'third_party/abseil-cpp/absl/synchronization/internal/graphcycles.cc',
'third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem.cc',
'third_party/abseil-cpp/absl/synchronization/internal/waiter.cc',
'third_party/abseil-cpp/absl/synchronization/mutex.cc',
'third_party/abseil-cpp/absl/synchronization/notification.cc',
'third_party/abseil-cpp/absl/time/civil_time.cc',
'third_party/abseil-cpp/absl/time/clock.cc',
'third_party/abseil-cpp/absl/time/duration.cc',
'third_party/abseil-cpp/absl/time/format.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/civil_time_detail.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_if.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_impl.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_info.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_libc.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_posix.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/zone_info_source.cc',
'third_party/abseil-cpp/absl/time/time.cc',
'third_party/abseil-cpp/absl/types/bad_optional_access.cc',
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
'third_party/boringssl-with-bazel/err_data.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_dup.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_gentm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_mbstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_object.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_octet.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_print.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strnid.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_type.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utctm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utf8.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_lib.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_par.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn_pack.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_string.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_dec.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_enc.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_fre.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_new.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_typ.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_utl.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/time_support.c',
'third_party/boringssl-with-bazel/src/crypto/base64/base64.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio_mem.c',
'third_party/boringssl-with-bazel/src/crypto/bio/connect.c',
'third_party/boringssl-with-bazel/src/crypto/bio/fd.c',
'third_party/boringssl-with-bazel/src/crypto/bio/file.c',
'third_party/boringssl-with-bazel/src/crypto/bio/hexdump.c',
'third_party/boringssl-with-bazel/src/crypto/bio/pair.c',
'third_party/boringssl-with-bazel/src/crypto/bio/printf.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket_helper.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/convert.c',
'third_party/boringssl-with-bazel/src/crypto/buf/buf.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/asn1_compat.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/ber.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbb.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbs.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/unicode.c',
'third_party/boringssl-with-bazel/src/crypto/chacha/chacha.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/derive_key.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesccm.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_null.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_tls.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl-with-bazel/src/crypto/cmac/cmac.c',
'third_party/boringssl-with-bazel/src/crypto/conf/conf.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-intel.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-ppc64le.c',
'third_party/boringssl-with-bazel/src/crypto/crypto.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/curve25519.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/spake25519.c',
'third_party/boringssl-with-bazel/src/crypto/dh/check.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/dh/params.c',
'third_party/boringssl-with-bazel/src/crypto/digest_extra/digest_extra.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_derive.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/hash_to_curve.c',
'third_party/boringssl-with-bazel/src/crypto/ecdh_extra/ecdh_extra.c',
'third_party/boringssl-with-bazel/src/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/engine/engine.c',
'third_party/boringssl-with-bazel/src/crypto/err/err.c',
'third_party/boringssl-with-bazel/src/crypto/evp/digestsign.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_ctx.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/pbkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/print.c',
'third_party/boringssl-with-bazel/src/crypto/evp/scrypt.c',
'third_party/boringssl-with-bazel/src/crypto/evp/sign.c',
'third_party/boringssl-with-bazel/src/crypto/ex_data.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/bcm.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/fips_shared_support.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/is_fips.c',
'third_party/boringssl-with-bazel/src/crypto/hkdf/hkdf.c',
'third_party/boringssl-with-bazel/src/crypto/hrss/hrss.c',
'third_party/boringssl-with-bazel/src/crypto/lhash/lhash.c',
'third_party/boringssl-with-bazel/src/crypto/mem.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj_xref.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_all.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_info.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_lib.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_oth.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pk8.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_xaux.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl-with-bazel/src/crypto/pool/pool.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/deterministic.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/rand_extra.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/windows.c',
'third_party/boringssl-with-bazel/src/crypto/rc4/rc4.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_c11.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_lock.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_print.c',
'third_party/boringssl-with-bazel/src/crypto/siphash/siphash.c',
'third_party/boringssl-with-bazel/src/crypto/stack/stack.c',
'third_party/boringssl-with-bazel/src/crypto/thread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_none.c',
'third_party/boringssl-with-bazel/src/crypto/thread_pthread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_win.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/pmbtoken.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/trust_token.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_digest.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_sign.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_strex.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_verify.c',
'third_party/boringssl-with-bazel/src/crypto/x509/algorithm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/asn1_gen.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_dir.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_file.c',
'third_party/boringssl-with-bazel/src/crypto/x509/i2d_pr.c',
'third_party/boringssl-with-bazel/src/crypto/x509/rsa_pss.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_att.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_cmp.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_d2.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_def.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_ext.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_lu.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_obj.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_r2x.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_set.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_trs.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_txt.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_v3.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vfy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vpm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509cset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509rset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_algor.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_all.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_attrib.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_exten.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pubkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_sig.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_val.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_cache.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_data.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_map.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_node.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_tree.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akeya.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_alt.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bitst.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_conf.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_cpols.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_crld.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_enum.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_extku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_genn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ia5.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_int.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ncons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ocsp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pci.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcia.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_prn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_purp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_skey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_utl.c',
'third_party/boringssl-with-bazel/src/ssl/bio_ssl.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_both.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_srtp.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_record.cc',
'third_party/boringssl-with-bazel/src/ssl/handoff.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_client.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_server.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_both.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_aead_ctx.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_asn1.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_buffer.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cert.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cipher.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_file.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_key_share.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_privkey.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_session.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_stat.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_transcript.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_versions.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_x509.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_both.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_client.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_server.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_record.cc',
'third_party/cares/cares/ares__close_sockets.c',
'third_party/cares/cares/ares__get_hostent.c',
'third_party/cares/cares/ares__read_line.c',
'third_party/cares/cares/ares__timeval.c',
'third_party/cares/cares/ares_cancel.c',
'third_party/cares/cares/ares_create_query.c',
'third_party/cares/cares/ares_data.c',
'third_party/cares/cares/ares_destroy.c',
'third_party/cares/cares/ares_expand_name.c',
'third_party/cares/cares/ares_expand_string.c',
'third_party/cares/cares/ares_fds.c',
'third_party/cares/cares/ares_free_hostent.c',
'third_party/cares/cares/ares_free_string.c',
'third_party/cares/cares/ares_getenv.c',
'third_party/cares/cares/ares_gethostbyaddr.c',
'third_party/cares/cares/ares_gethostbyname.c',
'third_party/cares/cares/ares_getnameinfo.c',
'third_party/cares/cares/ares_getopt.c',
'third_party/cares/cares/ares_getsock.c',
'third_party/cares/cares/ares_init.c',
'third_party/cares/cares/ares_library_init.c',
'third_party/cares/cares/ares_llist.c',
'third_party/cares/cares/ares_mkquery.c',
'third_party/cares/cares/ares_nowarn.c',
'third_party/cares/cares/ares_options.c',
'third_party/cares/cares/ares_parse_a_reply.c',
'third_party/cares/cares/ares_parse_aaaa_reply.c',
'third_party/cares/cares/ares_parse_mx_reply.c',
'third_party/cares/cares/ares_parse_naptr_reply.c',
'third_party/cares/cares/ares_parse_ns_reply.c',
'third_party/cares/cares/ares_parse_ptr_reply.c',
'third_party/cares/cares/ares_parse_soa_reply.c',
'third_party/cares/cares/ares_parse_srv_reply.c',
'third_party/cares/cares/ares_parse_txt_reply.c',
'third_party/cares/cares/ares_platform.c',
'third_party/cares/cares/ares_process.c',
'third_party/cares/cares/ares_query.c',
'third_party/cares/cares/ares_search.c',
'third_party/cares/cares/ares_send.c',
'third_party/cares/cares/ares_strcasecmp.c',
'third_party/cares/cares/ares_strdup.c',
'third_party/cares/cares/ares_strerror.c',
'third_party/cares/cares/ares_strsplit.c',
'third_party/cares/cares/ares_timeout.c',
'third_party/cares/cares/ares_version.c',
'third_party/cares/cares/ares_writev.c',
'third_party/cares/cares/bitncmp.c',
'third_party/cares/cares/inet_net_pton.c',
'third_party/cares/cares/inet_ntop.c',
'third_party/cares/cares/windows_port.c',
'third_party/re2/re2/bitstate.cc',
'third_party/re2/re2/compile.cc',
'third_party/re2/re2/dfa.cc',
'third_party/re2/re2/filtered_re2.cc',
'third_party/re2/re2/mimics_pcre.cc',
'third_party/re2/re2/nfa.cc',
'third_party/re2/re2/onepass.cc',
'third_party/re2/re2/parse.cc',
'third_party/re2/re2/perl_groups.cc',
'third_party/re2/re2/prefilter.cc',
'third_party/re2/re2/prefilter_tree.cc',
'third_party/re2/re2/prog.cc',
'third_party/re2/re2/re2.cc',
'third_party/re2/re2/regexp.cc',
'third_party/re2/re2/set.cc',
'third_party/re2/re2/simplify.cc',
'third_party/re2/re2/stringpiece.cc',
'third_party/re2/re2/tostring.cc',
'third_party/re2/re2/unicode_casefold.cc',
'third_party/re2/re2/unicode_groups.cc',
'third_party/re2/util/pcre.cc',
'third_party/re2/util/rune.cc',
'third_party/re2/util/strutil.cc',
'third_party/upb/upb/decode.c',
'third_party/upb/upb/encode.c',
'third_party/upb/upb/msg.c',
'third_party/upb/upb/port.c',
'third_party/upb/upb/table.c',
'third_party/upb/upb/upb.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
ASM_SOURCE_FILES = {
'crypto_ios_aarch64': [
'third_party/boringssl-with-bazel/ios-aarch64/crypto/chacha/chacha-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/test/trampoline-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/aesv8-armx64.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/armv8-mont.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/ghashv8-armx64.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/ghash-neon-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/sha1-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/sha256-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/sha512-armv8.S',
'third_party/boringssl-with-bazel/ios-aarch64/crypto/fipsmodule/vpaes-armv8.S',
],
'crypto_mac_x86_64': [
'third_party/boringssl-with-bazel/mac-x86_64/crypto/chacha/chacha-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/test/trampoline-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/aesni-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/ghash-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/md5-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/p256-x86_64-asm.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/rdrand-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/rsaz-avx2.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/sha1-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/sha256-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/sha512-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/vpaes-x86_64.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/x86_64-mont5.S',
'third_party/boringssl-with-bazel/mac-x86_64/crypto/fipsmodule/x86_64-mont.S',
],
'crypto_win_x86': [
'third_party/boringssl-with-bazel/win-x86/crypto/chacha/chacha-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/test/trampoline-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/aesni-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/bn-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/co-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-ssse3-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/md5-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha1-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha256-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha512-586.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/vpaes-x86.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/x86-mont.asm',
],
'crypto_linux_ppc64le': [
'third_party/boringssl-with-bazel/linux-ppc64le/crypto/test/trampoline-ppc.S',
'third_party/boringssl-with-bazel/linux-ppc64le/crypto/fipsmodule/aesp8-ppc.S',
'third_party/boringssl-with-bazel/linux-ppc64le/crypto/fipsmodule/ghashp8-ppc.S',
],
'crypto_mac_x86': [
'third_party/boringssl-with-bazel/mac-x86/crypto/chacha/chacha-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/test/trampoline-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/aesni-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/bn-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/co-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/ghash-ssse3-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/ghash-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/md5-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/sha1-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/sha256-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/sha512-586.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/vpaes-x86.S',
'third_party/boringssl-with-bazel/mac-x86/crypto/fipsmodule/x86-mont.S',
],
'crypto_linux_x86': [
'third_party/boringssl-with-bazel/linux-x86/crypto/chacha/chacha-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/test/trampoline-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/aesni-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/bn-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/co-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-ssse3-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/md5-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha1-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha256-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha512-586.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/vpaes-x86.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/x86-mont.S',
],
'crypto_ios_arm': [
'third_party/boringssl-with-bazel/ios-arm/crypto/chacha/chacha-armv4.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/test/trampoline-armv4.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/aesv8-armx32.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/armv4-mont.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/bsaes-armv7.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/ghash-armv4.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/ghashv8-armx32.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/sha1-armv4-large.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/sha256-armv4.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/sha512-armv4.S',
'third_party/boringssl-with-bazel/ios-arm/crypto/fipsmodule/vpaes-armv7.S',
],
'crypto_linux_x86_64': [
'third_party/boringssl-with-bazel/linux-x86_64/crypto/chacha/chacha-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/test/trampoline-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/md5-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rdrand-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rsaz-avx2.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha1-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha256-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha512-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/vpaes-x86_64.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont5.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont.S',
'third_party/boringssl-with-bazel/src/crypto/hrss/asm/poly_rq_mul.S',
],
'crypto_win_x86_64': [
'third_party/boringssl-with-bazel/win-x86_64/crypto/chacha/chacha-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/test/trampoline-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/md5-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256-x86_64-asm.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rdrand-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rsaz-avx2.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha1-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha256-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha512-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/vpaes-x86_64.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont5.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont.asm',
],
'crypto_linux_aarch64': [
'third_party/boringssl-with-bazel/linux-aarch64/crypto/chacha/chacha-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/test/trampoline-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/aesv8-armx64.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/armv8-mont.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghashv8-armx64.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghash-neon-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha1-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha256-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha512-armv8.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/vpaes-armv8.S',
],
'crypto_linux_arm': [
'third_party/boringssl-with-bazel/linux-arm/crypto/chacha/chacha-armv4.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/test/trampoline-armv4.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/aesv8-armx32.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/armv4-mont.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/bsaes-armv7.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghash-armv4.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghashv8-armx32.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha1-armv4-large.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha256-armv4.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha512-armv4.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/vpaes-armv7.S',
'third_party/boringssl-with-bazel/src/crypto/curve25519/asm/x25519-asm-arm.S',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm_asm.S',
],
}
| firebase/grpc | src/python/grpcio/grpc_core_dependencies.py | Python | apache-2.0 | 66,878 | [
"ORCA"
] | 5a54b63d2133962e61d2cbcb35ebb46141937f91084492c2b0872b5c66f20e59 |
"""MPF plugin for sounds. Includes SoundController, Channel, Sound, Track, and
StreamTrack parent classes."""
# sound.py
# Mission Pinball Framework
# Written by Brian Madden & Gabe Knuth
# Released under the MIT License. (See license info at the end of this file.)
# Documentation and more info at http://missionpinball.com/mpf
import logging
import time
import Queue
import uuid
import copy
from mpf.system.assets import Asset, AssetManager
from mpf.system.config import Config
global import_success
try:
import pygame
import pygame.locals
import_success = True
except:
import_success = False
def preload_check(machine):
if import_success:
return True
else:
return False
class SoundController(object):
"""Parent class for the sound controller which is responsible for all audio,
sounds, and music in the machine. There is only one of these per machine.
Args:
machine: The main machine controller object.
"""
def __init__(self, machine):
self.log = logging.getLogger('SoundController')
self.machine = machine
if 'soundsystem' not in self.machine.config:
self.config = dict()
return # todo move to preload_check()
self.log.debug("Loading the Sound Controller")
self.machine.sound = self
self.config = self.machine.config['soundsystem']
self.tracks = dict() # k = track name, v = track obj
self.stream_track = None
self.pygame_channels = list()
self.sound_events = dict()
self.volume = 1.0
if 'volume_steps' not in self.config:
self.config['volume_steps'] = 20
if 'initial_volume' in self.config:
self.volume = self.config['initial_volume']
self.set_volume(volume=self.volume)
self.machine.request_pygame()
# Get the pygame pre-initiaiization audio requests in
# 0 is the 'auto' setting for all of these
if 'buffer' not in self.config or self.config['buffer'] == 'auto':
self.config['buffer'] = 0
if 'bits' not in self.config or self.config['bits'] == 'auto':
self.config['bits'] = 0
if 'frequency' not in self.config or self.config['frequency'] == 'auto':
self.config['frequency'] = 0
if 'channels' not in self.config:
self.config['channels'] = 1
pygame.mixer.pre_init(frequency=self.config['frequency'],
size=self.config['bits'],
channels=self.config['channels'],
buffer=self.config['buffer']
)
# Note pygame docs says pre_init() kwarg should be 'buffersize', but
# it's actually 'buffer'.
# Register events
self.machine.events.add_handler('action_set_volume', self.set_volume)
self.machine.events.add_handler('pygame_initialized', self._initialize)
if 'soundplayer' in self.machine.config:
self.machine.events.add_handler('init_phase_5',
self.register_sound_events,
config=self.machine.config['soundplayer'])
self.machine.modes.register_start_method(self.register_sound_events,
'soundplayer')
def _initialize(self):
# Initialize the sound controller. Not done in __init__() because we
# need Pygame to be setup first.
frequency, bits, channels = pygame.mixer.get_init()
self.log.info("Pygame Sound Mixer configuration. Freq: %s, Bits: %s, "
"Channels: %s", frequency, bits, channels)
# Configure Pygame to use the correct number of channels. We need one
# for each simultaneous sound we want to play.
num_channels = 0 # How many total
if 'tracks' in self.config:
for item in self.config['tracks'].values():
if 'simultaneous_sounds' in item:
num_channels += item['simultaneous_sounds']
else:
num_channels += 1
if not num_channels:
num_channels = 1
pygame.mixer.set_num_channels(num_channels)
# Configure Tracks
if 'tracks' in self.config:
for k, v in self.config['tracks'].iteritems():
self.create_track(name=k, config=v)
else:
self.create_track(name='default')
# Configure streaming track
if 'stream' in self.config:
if 'name' not in self.config['stream']:
self.config['stream']['name'] = 'music'
self.stream_track = StreamTrack(self.machine, self.config)
# Create the sound AssetManager
AssetManager(
machine=self.machine,
config_section=config_section,
path_string=(self.machine.config['mediacontroller']['paths'][path_string]),
asset_class=asset_class,
asset_attribute=asset_attribute,
file_extensions=file_extensions)
def create_track(self, name, config=None):
""" Creates a new MPF track add registers in the central track list.
Args:
name: String name of this track used for identifying where sounds
are played.
config: Config dictionary for this track.
Note: "Tracks" in MPF are like channels.. you might have a "music"
track, a "voice" track, a "sound effects" track, etc.
"""
self.tracks[name] = Track(self.machine, name, self.pygame_channels,
config)
def register_sound_events(self, config, mode=None, priority=0):
# config is SoundPlayer subection of config dict
self.log.debug("Processing SoundPlayer configuration. Base Priority: "
"%s", priority)
self.log.debug("config: %s", config)
key_list = list()
for entry_name in config:
if 'block' not in config[entry_name]:
config[entry_name]['block'] = False
block = config[entry_name].pop('block')
key_list.append(self.register_sound_event(config=config[entry_name],
priority=priority,
block=block))
return self.unregister_sound_events, key_list
def unregister_sound_events(self, key_list):
self.log.debug("Unloading SoundPlayer events")
for key in key_list:
self.unregister_sound_event(key)
def register_sound_event(self, config, priority=0, block=False):
"""Sets up game sounds from the config file.
Args:
config: Python dictionary which contains the game sounds settings.
"""
if 'sound' not in config:
return False
elif type(config['sound']) is str:
config['sound'] = self.machine.sounds[config['sound']]
# this is kind of weird because once the sound has been registered, the
# sound will still be converted from the string to the object. This is
# an unintended side effect of passing around a dict, but I guess it's
# ok? We just have to check to make sure we have a string before we
# try to convert it to an object. If not, the conversion has already
# been done.
if 'start_events' not in config:
config['start_events'] = list()
else:
config['start_events'] = Config.string_to_list(
config['start_events'])
if 'stop_events' not in config:
config['stop_events'] = list()
else:
config['stop_events'] = Config.string_to_list(
config['stop_events'])
if 'duration' not in config or config['duration'] is None:
config['duration'] = None
if 'loops' not in config or config['loops'] is None:
config['loops'] = 0
if 'priority' not in config or config['priority'] is None:
config['priority'] = 0
if 'fade_in' not in config or config['fade_in'] is None:
config['fade_in'] = 0
if 'fade_out' not in config or config['fade_out'] is None:
config['fade_out'] = 0
if 'channel' not in config or config['channel'] is None:
config['channel'] = 'auto'
if 'volume' not in config or config['volume'] is None:
config['volume'] = 1
elif config['volume'] > 2:
config['volume'] = 2
config['key'] = uuid.uuid4()
#config['event_keys'] = set()
for event in config['start_events']:
settings = copy.copy(config)
settings.pop('start_events')
settings.pop('stop_events')
if event not in self.sound_events:
self.sound_events[event] = list()
self.machine.events.add_handler(event,
self._sound_event_callback,
event_name=event)
kwargs = dict() # temp
sound_event_entry = dict()
sound_event_entry['settings'] = settings
sound_event_entry['kwargs'] = kwargs
sound_event_entry['priority'] = priority
sound_event_entry['block'] = block
sound_event_entry['type'] = 'start'
self.sound_events[event].append(sound_event_entry)
for event in config['stop_events']:
settings = copy.copy(config)
settings.pop('start_events')
settings.pop('stop_events')
if event not in self.sound_events:
self.sound_events[event] = list()
self.machine.events.add_handler(event,
self._sound_event_callback,
event_name=event)
kwargs = dict() # temp
sound_event_entry = dict()
sound_event_entry['settings'] = settings
sound_event_entry['kwargs'] = kwargs
sound_event_entry['priority'] = priority
sound_event_entry['block'] = block
sound_event_entry['type'] = 'stop'
self.sound_events[event].append(sound_event_entry)
# todo sort by priority
return config['key']
def unregister_sound_event(self, key):
for event in self.sound_events.keys():
for entry in self.sound_events[event][:]:
if entry['settings']['key'] == key:
self.sound_events[event].remove(entry)
if not self.sound_events[event]:
self.machine.events.remove_handler_by_event(event,
self._sound_event_callback)
del self.sound_events[event]
def _sound_event_callback(self, event_name, **kwargs):
# Loop through all the sound events for this event
if event_name not in self.sound_events:
self.log.critical("got sound callback but did not find event?")
raise Exception()
sound_list = self.sound_events[event_name]
for sound in sound_list:
sound_obj = sound['settings']['sound']
kwargs = sound['settings']
if sound['type'] == 'start':
sound_obj.play(**kwargs)
elif sound['type'] == 'stop':
sound_obj.stop(**kwargs)
def set_volume(self, volume=None, change=None, **kwargs):
"""Sets the overall volume of the sound system.
Args:
volume: The new volume level, a floating point value between 0.0
and 1.0. 1.0 is full volume. 0.0 is mute.
change: A positive or negative value between 0.0 and 1.0 of a
change in volume that will be made.
kwargs: Not used here. Included because this method is often
called from events which might contain additional kwargs.
Note that the volume can never be increased above 1.0. This sound
volume level only affects MPF. You might have to set the overall
system sound to in the OS.
"""
old_volume = self.volume
if volume:
self.volume = float(volume)
elif change:
self.volume += float(change)
if self.volume > 1.0:
self.volume = 1.0
elif self.volume < 0:
self.volume = 0.0
display_volume = int(self.volume * self.config['volume_steps'])
if display_volume == self.config['volume_steps']:
display_volume = "MAX"
elif display_volume:
display_volume = str(display_volume)
else:
display_volume = "OFF" # todo move to config
# todo change volume of currently playing sounds
for channel in self.pygame_channels:
if channel.pygame_channel.get_busy():
playing_sound = channel.pygame_channel.get_sound()
new_volume = (1.0 *
self.volume *
channel.current_sound.config['volume'] *
channel.parent_track.volume)
playing_sound.set_volume(new_volume)
if self.stream_track and pygame.mixer.music.get_busy():
new_volume = (1.0 *
self.volume *
self.stream_track.volume *
self.stream_track.current_sound.config['volume'])
pygame.mixer.music.set_volume(new_volume)
self.machine.events.post('volume_change', volume=self.volume,
change=old_volume-self.volume,
display_volume=display_volume)
def get_volume(self):
return self.volume
class Track(object):
"""Parent class for an MPF track. Each sound track in MPF can be made up
of one or more Pygame sound channels to support multiple simultaneous
sounds.
Args:
machine: The main machine controller object.
name: A string of the name this channel will be referred to, such as
"voice" or "sfx."
global_channel_list: A python list which keeps track of the global
Pygame channels in use.
config: A python dictionary containing the configuration settings for
this track.
"""
def __init__(self, machine, name, global_channel_list, config):
self.log = logging.getLogger('Track.' + name)
self.log.debug("Creating Track with config: %s", config)
self.name = name
self.config = config
self.pygame_channels = list()
self.volume = 1
self.queue = Queue.PriorityQueue()
if 'simultaneous_sounds' not in self.config:
self.config['simultaneous_sounds'] = 1
if 'preload' not in self.config:
self.config['preload'] = False
if 'volume' in self.config:
self.volume = self.config['volume']
for track in range(self.config['simultaneous_sounds']):
self.create_channel(machine, global_channel_list)
machine.events.add_handler('timer_tick', self._tick)
def create_channel(self, machine, global_channel_list):
"""Factory method which creates a Pygame sound channel to be used with
this track.
Args:
machine: The main machine object.
global_channel_list: A list which contains the global list of
Pygame channels in use by MPF.
"""
next_channel_num = len(global_channel_list)
this_channel_object = Channel(machine, self, next_channel_num)
global_channel_list.append(this_channel_object)
self.pygame_channels.append(this_channel_object)
def play(self, sound, priority, **settings):
"""Plays a sound on this track.
Args:
sound: The MPF sound object you want to play.
priority: The relative priority of this sound.
**settings: One or more additional settings for this playback.
This method will automatically find an available Pygame channel to use.
If this new sound has a higher priority than the lowest playing sound,
it will interrupt that sound to play. Otherwise it will be added to the
queue to be played when a channel becomes available.
"""
# Make sure we have a sound object. If not we assume the sound is being
# loaded (is that dumb?) and we add it to the queue so it will be
# picked up on the next loop.
if not sound.sound_object:
self.queue_sound(sound, priority, **settings)
return
# We have a sound object. Do we have an available channel?
found_available_channel = False
# todo check to see if this sound is already playing and what our
# settings are for that.
for channel in self.pygame_channels: # todo change to generator
if channel.current_sound_priority == -1:
found_available_channel = True
channel.play(sound, priority=priority, **settings)
break
# No available channels. What do we do with this sound now? Options:
# 1. If the priority of the lowest currently-playing sound is lower than
# ours, kill that sound and replace it with the new one.
# 2. Add this to the queue, arranged by priority
if not found_available_channel:
lowest_channel = min(self.pygame_channels)
if lowest_channel.current_sound_priority < priority:
lowest_channel.play(sound, priority=priority, **settings)
else:
if sound.expiration_time:
exp_time = time.time() + sound.expiration_time
else:
exp_time = None
self.queue_sound(sound, priority=priority, exp_time=exp_time,
**settings)
def stop(self, sound):
try:
sound.sound_object.stop()
except AttributeError:
pass
def queue_sound(self, sound, priority, exp_time=None, **settings):
"""Adds a sound to the queue to be played when a Pygame channel becomes
free.
Args:
sound: The MPF sound object.
priority: The priority of this sound.
exp_time: Real world time of when this sound will expire. (It will
not play if the queue is freed up after it expires.)
**settings: Additional settings for this sound's playback.
Note that this method will insert this sound into a position in the
queue based on its priority, so highest-priority sounds are played
first.
"""
# Note the negative operator in front of priority since this queue
# retrieves the lowest values first, and MPF uses higher values for
# higher priorities.
self.queue.put([-priority, sound, exp_time, settings])
def get_sound(self):
"""Returns the next sound from the queue to be played.
Returns: A tuple of the sound object, the priority, and dictionary of
additional settings for that sound. If the queue is empty, returns
None.
This method will ensure that the sound returned has not expired. If the
next sound in the queue is expired, it removes it and returns the next
one.
"""
try:
next_sound = self.queue.get_nowait()
except Queue.Empty:
return
if not next_sound[2] or next_sound[2] >= time.time():
return next_sound[1], next_sound[0], next_sound[3]
else:
self.get_sound() # todo this is bad, right?
def _tick(self):
if not self.queue.empty():
sound, priority, settings = self.get_sound()
self.play(sound, priority=priority, **settings)
class StreamTrack(object):
"""Parent class for MPF's "Stream" track which corresponds to Pygame's
music channel.
Args:
machine: The main machine object.
config: Python dictionary containing the configuration settings for
this track.
Sounds played on this track are streamed from disk rather than loaded into
memory. This is good for background music since those files can be large
and there's only one playing at a time.
"""
def __init__(self, machine, config):
self.log = logging.getLogger('Streaming Channel')
self.log.debug("Creating Stream Track with config: %s", config)
self.machine_sound = machine.sound
self.config = config
self.name = 'music'
self.volume = 1
self.current_sound = None
if 'name' in self.config:
self.name = self.config['name']
if 'volume' in self.config:
self.volume = self.config['volume']
self.config['preload'] = False
def play(self, sound, **settings):
"""Plays a sound on this track.
Args:
sound: The MPF sound object to play.
**settings: Additional settings for this sound's playback.
This stream track only supports playing one sound at a time, so if
you call this when a sound is currently playing, the new sound will
stop the current sound.
"""
self.current_sound = sound
pygame.mixer.music.load(sound.file_name)
volume = (1.0 *
self.volume *
sound.config['volume'] *
self.machine_sound.volume)
if 'volume' in settings:
volume *= settings['volume']
pygame.mixer.music.set_volume(volume)
self.log.info("Playing Sound: %s Vol: %s", sound.file_name,
pygame.mixer.music.get_volume())
if 'loops' not in settings:
settings['loops'] = 1
pygame.mixer.music.play(settings['loops'])
def stop(self, sound=None):
"""Stops the playing sound and resets the current position to the
beginning.
"""
pygame.mixer.music.stop()
# todo add support for fade out
def pause(self):
"""Pauses the current sound and remembers the current position so
playback can be resumed from the same point via the unpause() method.
"""
pygame.mixer.music.pause()
# todo add support for fade out
def unpause(self):
"""Resumes playing of a previously-paused sound. If the sound was not
paused, it starts playing it from the beginning.
"""
pygame.mixer.music.unpause()
# todo add support for fade in
def fadeout(self, ms):
"""Fades the sound out.
Args:
ms: The number of milliseconds to fade out the sound.
"""
pygame.mixer.music.fadeout(ms)
# todo add support for MPF time duration strings
class Channel(object):
"""Parent class that holds a Pygame sound channel. One or more of these are
tied to an MPF Track.
Args:
machine: The main machine object.
parent_track: The MPF track object this channel belongs to.
channel_number: Integer number that is used to identify this channel.
"""
def __init__(self, machine, parent_track, channel_number):
self.log = logging.getLogger('Sound Channel ' + str(channel_number))
self.machine_sound = machine.sound
self.current_sound_priority = -1
self.current_sound = None
self.pygame_channel = pygame.mixer.Channel(channel_number)
self.parent_track = parent_track
# configure this pygame channel to post a pygame event when it's done
# playing a sound
self.pygame_channel.set_endevent(
pygame.locals.USEREVENT + channel_number)
# add a pygame event handler so this channel object gets notified of
# the above
machine.register_pygame_handler(
pygame.locals.USEREVENT + channel_number, self.sound_is_done)
def __cmp__(self, other):
# Used so we can sort the channel list by the priority of the current
# playing sound
return cmp(self.current_sound_priority, other.current_sound_priority)
def sound_is_done(self):
"""Indicates that the sound that was playing on this channel is now
done.
This is the callback method that's automatically called by Pygame. It
will check the queue and automatically play any queued sounds."""
self.current_sound_priority = -1
if not self.parent_track.queue.empty():
sound, priority, settings = self.parent_track.get_sound()
self.play(sound, priority=priority, **settings)
def play(self, sound, **settings):
"""Plays a sound on this channel.
Args:
sound: The sound object to play.
**settings: Additional settings for this sound's playback.
"""
self.current_sound = sound
self.current_sound_priority = settings['priority']
if 'loops' in settings:
loops = settings['loops']
# calculate the volume for this sound
# start with the sound volume, multiply the overall and track volumes
volume = (1.0 *
self.parent_track.volume *
sound.config['volume'] *
self.machine_sound.volume)
if 'volume' in settings:
volume *= settings['volume']
# set the sound's current volume
sound.sound_object.set_volume(volume)
self.log.info("Playing Sound: %s Vol: %s", sound.file_name,
sound.sound_object.get_volume())
self.pygame_channel.play(sound.sound_object, loops)
class Sound(Asset):
def _initialize_asset(self):
if self.config['track'] in self.machine.sound.tracks:
self.track = self.machine.sound.tracks[self.config['track']]
elif self.config['track'] == self.machine.sound.stream_track.name:
self.track = self.machine.sound.stream_track
else:
self.asset_manager.log.critical("Music track not found: %s",
self.config['track'])
raise Exception()
self.sound_object = None
self.priority = 0
self.expiration_time = None
if 'volume' not in self.config:
self.config['volume'] = 1
if 'max_queue_time' not in self.config: # todo
self.config['max_queue_time'] = None
if 'max_simultaneous_playing' not in self.config: # todo
self.config['max_simultaneous_playing'] = None
if 'fade_in' not in self.config: # todo
self.config['fade_in'] = 0
if 'fade_out' not in self.config: # todo
self.config['fade_out'] = 0
if 'loops' not in self.config: # todo
self.config['loops'] = None
if 'start_time' not in self.config: # todo
self.config['start_time'] = None
if 'end_time' not in self.config: # todo
self.config['end_time'] = None
def _load(self, callback):
try:
self.sound_object = pygame.mixer.Sound(self.file_name)
except pygame.error:
self.asset_manager.log.error("Pygame Error for file %s. '%s'",
self.file_name, pygame.get_error())
self.loaded = True
if callback:
callback()
def _unload(self):
self.sound_object = None
def play(self, loops=0, priority=0, fade_in=0, volume=1, **kwargs):
"""Plays this sound.
Args:
loops: Integer of how many times you'd like this sound to repeat.
A value of -1 means it will loop forever.
priority: The relative priority of this sound which controls what
happens if the track this sound is playing on is playing the
max simultaneous sounds.
fade_in: MPF time string for how long this sound should fade in
when it starts.
volume: Volume for this sound as a float between 0.0 and 1.0. Zero
is mute, 1 is full volume, anything in between is in between.
**kwargs: Catch all since this method might be used as an event
callback which could include random kwargs.
"""
self.asset_manager.log.info("Playing sound. Loops: %s, Priority: %s, "
"Fade in: %s, Vol: %s, kwargs: %s",
loops, priority, fade_in, volume, kwargs)
if not self.sound_object:
self.load()
if 'sound' in kwargs:
kwargs.pop('sound')
self.track.play(self, priority=priority, loops=loops, volume=volume,
fade_in=fade_in, **kwargs)
def stop(self, fade_out=0, reset=True, **kwargs):
"""Stops this sound playing.
Args:
fade_out: MPF time string for how long this sound will fade out as
it stops.
reset: Boolean for whether this sound should reset its playback
position to the beginning. Default is True.
**kwargs: Catch all since this method might be used as an event
callback which could include random kwargs.
"""
#self.sound_object.stop()
self.track.stop(self)
asset_class = Sound
asset_attribute = 'sounds' # self.machine.<asset_attribute>
#display_element_class = ImageDisplayElement
create_asset_manager = True
path_string = 'sounds'
config_section = 'sounds'
file_extensions = ('ogg', 'wav')
# The MIT License (MIT)
# Copyright (c) 2013-2015 Brian Madden and Gabe Knuth
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| jabdoa2/mpf | mpf/media_controller/core/sound.py | Python | mit | 31,100 | [
"Brian"
] | a54bc410bd978b5a128459d0ccbda916c0b5a9a89921da677831512c142a62a8 |
# -*- coding: utf-8 -*-
"""
Bok choy acceptance tests for problems in the LMS
"""
from textwrap import dedent
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.lms.problem import ProblemPage
from common.test.acceptance.tests.helpers import UniqueCourseTest
from openedx.core.lib.tests import attr
class ProblemsTest(UniqueCourseTest):
"""
Base class for tests of problems in the LMS.
"""
def setUp(self):
super(ProblemsTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.username = "test_student_{uuid}".format(uuid=self.unique_id[0:8])
self.email = "{username}@example.com".format(username=self.username)
self.password = "keep it secret; keep it safe."
self.xqueue_grade_response = None
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with a hierarchy and problems
course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
problem = self.get_problem()
sequential = self.get_sequential()
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
sequential.add_children(problem)
)
).install()
# Auto-auth register for the course.
AutoAuthPage(
self.browser,
username=self.username,
email=self.email,
password=self.password,
course_id=self.course_id,
staff=True
).visit()
def get_problem(self):
""" Subclasses should override this to complete the fixture """
raise NotImplementedError()
def get_sequential(self):
""" Subclasses can override this to add a sequential with metadata """
return XBlockFixtureDesc('sequential', 'Test Subsection')
class CAPAProblemA11yBaseTestMixin(object):
"""Base TestCase Class to verify CAPA problem accessibility."""
def test_a11y(self):
"""
Verifies that there are no accessibility issues for a particular problem type
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# Set the scope to the problem question
problem_page.a11y_audit.config.set_scope(
include=['.wrapper-problem-response']
)
# Run the accessibility audit.
problem_page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
class CAPAProblemChoiceA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify accessibility for checkboxes and multiplechoice CAPA problems."""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<choiceresponse>
<label>question 1 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<checkboxgroup>
<choice correct="true">True</choice>
<choice correct="false">False</choice>
</checkboxgroup>
</choiceresponse>
<multiplechoiceresponse>
<label>question 2 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<choicegroup type="MultipleChoice">
<choice correct="false">Alpha <choicehint>A hint</choicehint></choice>
<choice correct="true">Beta</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'Problem A11Y TEST', data=xml)
@attr('a11y')
class ProblemTextInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify TextInput problem accessibility."""
def get_problem(self):
"""
TextInput problem XML.
"""
xml = dedent("""
<problem>
<stringresponse answer="fight" type="ci">
<label>who wishes to _____ must first count the cost.</label>
<description>Appear weak when you are strong, and strong when you are weak.</description>
<description>In the midst of chaos, there is also opportunity.</description>
<textline size="40"/>
</stringresponse>
<stringresponse answer="force" type="ci">
<label>A leader leads by example not by _____.</label>
<description>The supreme art of war is to subdue the enemy without fighting.</description>
<description>Great results, can be achieved with small forces.</description>
<textline size="40"/>
</stringresponse>
</problem>""")
return XBlockFixtureDesc('problem', 'TEXTINPUT PROBLEM', data=xml)
@attr('a11y')
class CAPAProblemDropDownA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify accessibility for dropdowns(optioninput) CAPA problems."""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<optionresponse>
<p>You can use this template as a guide to the simple editor markdown and OLX markup to use for
dropdown problems. Edit this component to replace this template with your own assessment.</p>
<label>Which of the following is a fruit</label>
<description>Choose wisely</description>
<optioninput>
<option correct="False">radish</option>
<option correct="True">appple</option>
<option correct="False">carrot</option>
</optioninput>
</optionresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'Problem A11Y TEST', data=xml)
@attr('a11y')
class ProblemNumericalInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""Tests NumericalInput accessibility."""
def get_problem(self):
"""NumericalInput problem XML."""
xml = dedent("""
<problem>
<numericalresponse answer="10*i">
<label>The square of what number is -100?</label>
<description>Use scientific notation to answer.</description>
<formulaequationinput/>
</numericalresponse>
</problem>""")
return XBlockFixtureDesc('problem', 'NUMERICALINPUT PROBLEM', data=xml)
@attr('a11y')
class ProblemMathExpressionInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""Tests MathExpressionInput accessibility."""
def get_problem(self):
"""MathExpressionInput problem XML."""
xml = dedent(r"""
<problem>
<script type="loncapa/python">
derivative = "n*x^(n-1)"
</script>
<formularesponse type="ci" samples="x,n@1,2:3,4#10" answer="$derivative">
<label>Let \( x\) be a variable, and let \( n\) be an arbitrary constant. What is the derivative of \( x^n\)?</label>
<description>Enter the equation</description>
<responseparam type="tolerance" default="0.00001"/>
<formulaequationinput size="40"/>
</formularesponse>
</problem>""")
return XBlockFixtureDesc('problem', 'MATHEXPRESSIONINPUT PROBLEM', data=xml)
| stvstnfrd/edx-platform | common/test/acceptance/tests/lms/test_lms_problems.py | Python | agpl-3.0 | 7,862 | [
"VisIt"
] | baf762a86274e225b71aaa9cb6307580c123bcc30d82c4d02154219220eda456 |
from behave import when, given, then
from selenium.webdriver.support.ui import Select
@when(u'I visit the "{page_name}" page')
def step_impl(context, page_name):
context.browser.get(context.get_url(page_name))
@when(u'I click the x button next to "{text}"')
def step_impl(context, text):
button = context.browser.find_element_by_xpath(
"//p/span[contains(text(), '%s')]/../following-sibling::button" % text
)
button.click()
| libretees/libreshop | libreshop/tests/steps/store.py | Python | gpl-3.0 | 452 | [
"VisIt"
] | 80a8c0b0651574674cea19c633380a58180e1c16afdf990d8aa17bd9d1c3e507 |
import sys
import nest
import nest.raster_plot
# ###########################################
# Configuration
# ###########################################
nest.SetKernelStatus({"resolution": 0.1})
if len(sys.argv) > 1:
nb_threads = sys.argv[1]
else:
nb_threads = 1
nest.SetKernelStatus({"local_num_threads": int(nb_threads)})
# ###########################################
# Parameters
# ###########################################
J_ex = 0.1 # excitatory weight
J_in = -0.5 # inhibitory weight
p_rate = 20.0 # external Poisson rate
simtime = 100. # simulation time
Nrec = 1000 # number of neurons to record from
neuron_params= {"C_m": 1.0, "tau_m": 20.0, "t_ref": 2.0,
"E_L": 0.0, "V_reset": 0.0, "V_m": 0.0, "V_th": 20.0}
# ###########################################
# Neuron models
# ###########################################
nest.SetDefaults("iaf_psc_delta", neuron_params)
nest.SetDefaults("poisson_generator",{"rate": p_rate})
nest.SetDefaults("spike_detector",{"withtime": True, "withgid": True})
# ###########################################
# Populations
# ###########################################
nodes_ex = nest.Create("iaf_psc_delta", 10000)
nodes_in = nest.Create("iaf_psc_delta", 2500)
parrots = nest.Create("parrot_neuron", 12500)
noise = nest.Create("poisson_generator")
espikes = nest.Create("spike_detector")
# ###########################################
# Synapse models
# ###########################################
nest.CopyModel("static_synapse_hom_wd", "excitatory", {"weight":J_ex, "delay":1.5})
nest.CopyModel("static_synapse_hom_wd", "inhibitory", {"weight":J_in, "delay":1.5})
# ###########################################
# Projections
# ###########################################
nest.Connect(nodes_ex, nodes_ex+nodes_in, {"rule": 'fixed_indegree', "indegree": 1000}, "excitatory")
nest.Connect(nodes_in, nodes_ex+nodes_in, {"rule": 'fixed_indegree', "indegree": 250}, "inhibitory")
nest.Connect(noise, parrots, 'all_to_all')
nest.Connect(parrots, nodes_ex+nodes_in, {"rule": 'fixed_indegree', "indegree": 1000}, "excitatory")
nest.Connect(nodes_ex[:Nrec], espikes, 'all_to_all')
# ###########################################
# Simulation
# ###########################################
from time import time
ts = time()
nest.Simulate(simtime)
print 'Simulating', simtime, 'ms took', time() - ts, 'seconds.'
# ###########################################
# Data analysis
# ###########################################
events = nest.GetStatus(espikes,"n_events")
N_rec_local_E = sum(nest.GetStatus(nodes_ex[:Nrec], 'local'))
rate_ex= events[0]/simtime*1000.0/N_rec_local_E
print("Mean firing rate: %.2f Hz" % rate_ex)
# Plot results
nest.raster_plot.from_device(espikes, hist=True)
nest.raster_plot.show()
| vitay/ANNarchy | benchmarks/brunel/Nest-Brunel-static.py | Python | gpl-2.0 | 2,779 | [
"NEURON"
] | 96a97737325fdb40426d50854ef9d2d687c7ead180a2114756f0b047594c1332 |
# Copyright 2018 Joseph Wright <joseph@cloudboss.co>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
import base64
import contextlib
import itertools
import json
import os
import re
import subprocess
import sys
import threading as t
import time
import tempfile
import yaml
import Queue
from friend.strings import random_alphanum, snake_to_pascal_obj
import jinja2 as j
import pkg_resources as pr
import voluptuous as v
DEFAULT_ANSIBLE_USER = 'ec2-user'
DEFAULT_ANSIBLE_PORT = 22
DEFAULT_ANSIBLE_CONNECTION = 'ssh'
class ConnectionTimeout(Exception):
pass
class ConfigurationError(Exception):
pass
class StateError(Exception):
pass
class ItemNotFound(Exception):
pass
class Spinner(t.Thread):
def __init__(self, waitable, state='to be available'):
t.Thread.__init__(self)
self.msg = 'Waiting for {} {} ... '.format(waitable, state)
self.running = False
self.chars = itertools.cycle(r'-\|/')
self.q = Queue.Queue()
def __enter__(self):
self.start()
def __exit__(self, _exc_type, _exc_val, _exc_tb):
self.running = False
self.q.get()
print('\bok')
def run(self):
print(self.msg, end='')
self.running = True
while self.running:
print('\b{}'.format(next(self.chars)), end='')
sys.stdout.flush()
time.sleep(0.5)
self.q.put(None)
def gen_keyname():
return 'bossimage-' + random_alphanum(10)
def user_data(config):
if type(config['user_data']) == dict:
with open(config['user_data']['file']) as f:
return f.read()
connection = get_connection(config)
if not config['user_data'] and connection == 'winrm':
ud = pr.resource_string('bossimage', 'win-userdata.txt')
else:
ud = config['user_data']
return ud
def create_keypair(ec2, keyname, keyfile):
kp = ec2.create_key_pair(KeyName=keyname)
print('Created keypair {}'.format(keyname))
with open(keyfile, 'w') as f:
f.write(kp.key_material)
os.chmod(keyfile, 0600)
def tag_instance(ec2, tags, instance):
ec2.create_tags(
Resources=[instance.id],
Tags=[{'Key': k, 'Value': v} for k, v in tags.items()]
)
print('Tagged instance with {}'.format(tags))
def role_name():
env_role = os.getenv('BI_ROLE_NAME')
return env_role if env_role else os.path.basename(os.getcwd())
def role_version():
def file_version():
if os.path.exists('.role-version'):
with open('.role-version') as f:
return f.read().strip()
return 'unset'
env_version = os.getenv('BI_ROLE_VERSION')
return env_version if env_version else file_version()
def decrypt_password(password_file, keyfile):
openssl = subprocess.Popen([
'openssl', 'rsautl', '-decrypt',
'-in', password_file,
'-inkey', keyfile,
], stdout=subprocess.PIPE)
password, _ = openssl.communicate()
return password
def parse_inventory(fdesc):
inventory = {}
section = None
for line in fdesc.readlines():
whitespace_match = re.match('^\s*$', line)
if whitespace_match:
continue
section_match = re.match('^\s*\[(?P<section>\w+)\]\s*', line)
if section_match:
section = section_match.groupdict()['section']
continue
parts = re.split('\s*', line.strip())
ip = parts[0]
args = {
k: v for k, v in
[(part[0], '='.join(part[1:]))
for part in [p.split('=') for p in parts[1:]]]
}
inventory.setdefault(section, {})
inventory[section][ip] = args
return inventory
def inventory_entry(ip, keyfile, password, config):
inventory_args = config.get('inventory_args')
if inventory_args:
inventory_args.setdefault('ansible_ssh_private_key_file', keyfile)
inventory_args.setdefault('ansible_user', DEFAULT_ANSIBLE_USER)
inventory_args.setdefault('ansible_port', DEFAULT_ANSIBLE_PORT)
inventory_args.setdefault('ansible_connection',
DEFAULT_ANSIBLE_CONNECTION)
if password:
inventory_args.setdefault('ansible_password', password)
else:
inventory_args = {
'ansible_ssh_private_key_file': keyfile,
'ansible_user': config['username'],
'ansible_port': config['port'],
'ansible_connection': config['connection'],
}
if password:
inventory_args['ansible_password'] = password
return {ip: inventory_args}
def format_inventory_entry(host, inventory_args):
fmt = ' '.join('{}={}'.format(k, v) for k, v in inventory_args.items())
return '{} {}'.format(host, fmt)
def write_inventory(path, inventory):
inventory_string = ''
for section, hosts in inventory.items():
inventory_string += '[{}]\n'.format(section)
for host, inventory_args in hosts.items():
fmt = format_inventory_entry(host, inventory_args)
inventory_string += '{}\n'.format(fmt)
with open(path, 'w') as f:
f.write(inventory_string)
os.chmod(path, 0600)
@contextlib.contextmanager
def load_inventory(instance):
files = instance_files(instance)
if os.path.exists(files['inventory']):
with open(files['inventory']) as f:
inventory = parse_inventory(f)
else:
inventory = dict()
yield inventory
write_inventory(files['inventory'], inventory)
def write_playbook(playbook, config):
with open(playbook, 'w') as f:
f.write(yaml.safe_dump([dict(
hosts='build',
become=config['become'],
roles=[role_name()],
)]))
def get_connection(config):
inventory_args = config.get('inventory_args')
if inventory_args:
connection = inventory_args.get('ansible_connection',
DEFAULT_ANSIBLE_CONNECTION)
else:
connection = config['connection']
return connection
def get_windows_password(ec2_instance, keyfile):
with Spinner('password'):
encrypted_password = wait_for_password(ec2_instance)
password_file = tempfile.mktemp(dir='.boss')
with open(password_file, 'w') as f:
f.write(base64.decodestring(encrypted_password))
password = decrypt_password(password_file, keyfile)
os.unlink(password_file)
return password
def create_instance(ec2, config, image_id, keyname):
instance_params = dict(
ImageId=image_id,
InstanceType=config['instance_type'],
MinCount=1,
MaxCount=1,
KeyName=keyname,
NetworkInterfaces=[dict(
DeviceIndex=0,
AssociatePublicIpAddress=config['associate_public_ip_address'],
)],
BlockDeviceMappings=snake_to_pascal_obj(
config['block_device_mappings'],
),
UserData=user_data(config),
)
if config['subnet']:
subnet_id = subnet_id_for(ec2.subnets, config['subnet'])
instance_params['NetworkInterfaces'][0]['SubnetId'] = subnet_id
if config['security_groups']:
sg_ids = [sg_id_for(ec2.security_groups, name)
for name in config['security_groups']]
instance_params['NetworkInterfaces'][0]['Groups'] = sg_ids
if config['iam_instance_profile']:
instance_params['IamInstanceProfile'] = {
'Name': config['iam_instance_profile']
}
(ec2_instance,) = ec2.create_instances(**instance_params)
print('Created instance {}'.format(ec2_instance.id))
with Spinner('instance', 'to be running'):
ec2_instance.wait_until_running()
if config['tags']:
tag_instance(ec2, config['tags'], ec2_instance)
ec2_instance.reload()
return ec2_instance
def wait_for_image(image):
while(True):
image.reload()
if image.state == 'available':
break
else:
time.sleep(15)
def wait_for_password(ec2_instance):
while True:
ec2_instance.reload()
pd = ec2_instance.password_data()
if pd['PasswordData']:
return pd['PasswordData']
else:
time.sleep(15)
def wait_for_connection(addr, port, inventory, group, end):
env = os.environ.copy()
env.update(dict(ANSIBLE_HOST_KEY_CHECKING='False'))
while(True):
if time.time() > end:
message = 'Timeout while connecting to {}:{}'.format(addr, port)
raise ConnectionTimeout(message)
try:
with open('/dev/null', 'wb') as devnull:
ret = subprocess.call([
'ansible', group,
'-i', inventory, '-m', 'raw', '-a', 'exit'
], stderr=devnull, stdout=devnull, env=env)
if ret == 0:
break
else:
raise
except:
time.sleep(15)
def make_build(ec2, instance, config, verbosity):
phase = 'build'
if not os.path.exists('.boss'):
os.mkdir('.boss')
files = instance_files(instance)
keyfile = files['keyfile']
with load_state(instance) as state:
if 'keyname' not in state:
keyname = gen_keyname()
create_keypair(ec2, keyname, keyfile)
state['keyname'] = keyname
with load_state(instance) as state:
if phase not in state:
ec2_instance = create_instance(
ec2,
config,
ami_id_for(ec2.images, config['source_ami']),
state['keyname'],
)
if config['associate_public_ip_address']:
ip_address = ec2_instance.public_ip_address
else:
ip_address = ec2_instance.private_ip_address
state[phase] = {
'id': ec2_instance.id,
'ip': ip_address,
}
ensure_inventory(
ec2, instance, phase, config, keyfile,
state[phase]['id'], state[phase]['ip'])
with load_inventory(instance) as inventory:
for entry in inventory[phase]:
port = inventory[phase][entry]['ansible_port']
with Spinner('connection to {}:{}'.format(
state[phase]['ip'], port)):
wait_for_connection(
state[phase]['ip'], port, files['inventory'],
phase, time.time() + config['connection_timeout']
)
if not os.path.exists(files['playbook']):
write_playbook(files['playbook'], config)
return run_ansible(verbosity, files['inventory'], files['playbook'],
config['extra_vars'], 'requirements.yml')
def make_test(ec2, instance, config, verbosity):
phase = 'test'
with load_state(instance) as state:
if phase not in state and 'image' not in state:
raise StateError('Cannot run `make test` before `make image`')
if phase not in state:
ec2_instance = create_instance(
ec2, config, state['image']['id'], state['keyname']
)
if config['associate_public_ip_address']:
ip_address = ec2_instance.public_ip_address
else:
ip_address = ec2_instance.private_ip_address
state[phase] = {
'id': ec2_instance.id,
'ip': ip_address,
}
files = instance_files(instance)
ensure_inventory(
ec2, instance, phase, config, files['keyfile'],
state[phase]['id'], state[phase]['ip'])
with Spinner('connection to {}:{}'.format(
state[phase]['ip'], config['port'])):
wait_for_connection(
state[phase]['ip'], config['port'], files['inventory'],
phase, time.time() + config['connection_timeout']
)
return run_ansible(verbosity, files['inventory'], config['playbook'], {},
'tests/requirements.yml')
def ensure_inventory(ec2, instance, phase, config, keyfile, ident, ip):
with load_inventory(instance) as inventory:
if phase not in inventory:
ec2_instance = ec2.Instance(id=ident)
connection = get_connection(config)
if connection == 'winrm':
password = get_windows_password(ec2_instance, keyfile)
else:
password = None
inventory[phase] = inventory_entry(ip, keyfile, password, config)
def run_ansible(verbosity, inventory, playbook, extra_vars, requirements):
roles_path = '.boss/roles'
env = os.environ.copy()
env.update(dict(
ANSIBLE_ROLES_PATH='{}:..'.format(roles_path),
ANSIBLE_HOST_KEY_CHECKING='False',
))
if os.path.exists(requirements):
ansible_galaxy_args = [
'ansible-galaxy', 'install',
'-r', requirements,
'-p', roles_path,
]
if verbosity:
ansible_galaxy_args.append('-' + 'v' * verbosity)
ansible_galaxy = subprocess.Popen(ansible_galaxy_args, env=env)
ansible_galaxy.wait()
ansible_playbook_args = ['ansible-playbook', '-i', inventory]
if verbosity:
ansible_playbook_args.append('-' + 'v' * verbosity)
if extra_vars:
ansible_playbook_args += ['--extra-vars', json.dumps(extra_vars)]
ansible_playbook_args.append(playbook)
ansible_playbook = subprocess.Popen(ansible_playbook_args, env=env)
return ansible_playbook.wait()
def make_image(ec2, instance, config, wait):
phase = 'image'
with load_state(instance) as state:
if phase in state:
return
if 'build' not in state:
raise StateError('Cannot run `make image` before `make build`')
ec2_instance = ec2.Instance(id=state['build']['id'])
ec2_instance.load()
config.update({
'role': role_name(),
'version': role_version(),
'arch': ec2_instance.architecture,
'hv': ec2_instance.hypervisor,
'vtype': ec2_instance.virtualization_type,
})
image_name = config['ami_name'] % config
image = ec2_instance.create_image(Name=image_name)
print('Created image {} with name {}'.format(image.id, image_name))
state[phase] = {'id': image.id}
if wait:
with Spinner(phase):
wait_for_image(image)
def clean_build(ec2, instance):
clean_instance(ec2, instance, 'build')
def clean_test(ec2, instance):
clean_instance(ec2, instance, 'test')
def clean_instance(ec2, instance, phase):
with load_state(instance) as state:
if phase not in state:
print('No {} instance found for {}'.format(phase, instance))
return
ec2_instance = ec2.Instance(id=state[phase]['id'])
ec2_instance.terminate()
print('Deleted instance {}'.format(ec2_instance.id))
del(state[phase])
with load_inventory(instance) as inventory:
del(inventory[phase])
if 'build' not in state and 'test' not in state:
with load_state(instance) as state:
delete_keypair(ec2, state)
if 'build' not in state and 'image' not in state and 'test' not in state:
delete_files(instance_files(instance))
def clean_image(ec2, instance):
with load_state(instance) as state:
if 'image' not in state:
print('No image found for {}'.format(instance))
return
(image,) = ec2.images.filter(ImageIds=[state['image']['id']])
image.deregister()
print('Deregistered image {}'.format(state['image']['id']))
del(state['image'])
if 'build' not in state and 'test' not in state:
delete_files(instance_files(instance))
def delete_keypair(ec2, state):
kp = ec2.KeyPair(name=state['keyname'])
kp.delete()
print('Deleted keypair {}'.format(kp.name))
del(state['keyname'])
def delete_files(files):
for f in files.values():
try:
os.unlink(f)
except OSError:
print('Error removing {}, skipping'.format(f))
def statuses(config):
def exists(instance):
return os.path.exists('.boss/{}-state.yml'.format(instance))
return [(instance, exists(instance)) for instance in config.keys()]
def login(instance, config, phase='build'):
files = instance_files(instance)
with load_inventory(instance) as inventory:
if phase not in inventory:
raise ItemNotFound('No {} instance found'.format(phase))
for host, inventory_args in inventory[phase].items():
user = inventory_args['ansible_user']
ssh = subprocess.Popen(['ssh', '-i', files['keyfile'], '-l', user, host])
ssh.wait()
def instance_files(instance):
return dict(
state='.boss/{}-state.yml'.format(instance),
keyfile='.boss/{}.pem'.format(instance),
inventory='.boss/{}.inventory'.format(instance),
playbook='.boss/{}-playbook.yml'.format(instance),
)
@contextlib.contextmanager
def load_state(instance):
files = instance_files(instance)
if not os.path.exists(files['state']):
state = dict()
else:
with open(files['state']) as f:
state = yaml.safe_load(f)
yield state
with open(files['state'], 'w') as f:
f.write(yaml.safe_dump(state))
def resource_id_for(collection, collection_desc, name, prefix, flt):
if name.startswith(prefix):
return name
item = list(collection.filter(Filters=[flt]))
if item:
return item[0].id
else:
desc = '{} "{}"'.format(collection_desc, name)
raise ItemNotFound(desc)
def ami_id_for(images, name):
return resource_id_for(
images, 'image', name, 'ami-',
{'Name': 'name', 'Values': [name]}
)
def sg_id_for(security_groups, name):
return resource_id_for(
security_groups, 'security group', name, 'sg-',
{'Name': 'group-name', 'Values': [name]}
)
def subnet_id_for(subnets, name):
return resource_id_for(
subnets, 'subnet ', name, 'subnet-',
{'Name': 'tag:Name', 'Values': [name]}
)
def load_config(path='.boss.yml'):
loader = j.FileSystemLoader('.')
try:
template = loader.load(j.Environment(), path, os.environ)
yml = template.render()
doc = yaml.load(yml)
return transform_config(doc)
except j.TemplateNotFound:
error = 'Error loading {}: not found'.format(path)
raise ConfigurationError(error)
except j.TemplateSyntaxError as e:
error = 'Error loading {}: {}, line {}'.format(path, e, e.lineno)
raise ConfigurationError(error)
except IOError as e:
error = 'Error loading {}: {}'.format(path, e.strerror)
raise ConfigurationError(error)
except v.Invalid as e:
error = 'Error validating {}: {}'.format(path, e)
raise ConfigurationError(error)
def invalid(kind, item):
return v.Invalid('Invalid {}: {}'.format(kind, item))
def re_validator(pat, s, kind):
if not re.match(pat, s):
raise invalid(kind, s)
return s
def is_subnet_id(s):
return re_validator(r'^subnet-[0-9a-f]{8,}$', s, 'subnet_id')
def is_snapshot_id(s):
return re_validator(r'^snap-[0-9a-f]{8,}$', s, 'snapshot_id')
def is_virtual_name(s):
return re_validator(r'^ephemeral\d+$', s, 'virtual_name')
def validate(doc):
base = v.Schema({
v.Optional('instance_type'): str,
v.Optional('username'): str,
v.Optional('connection'): str,
v.Optional('connection_timeout'): int,
v.Optional('inventory_args'): {str: v.Or(str, int, bool)},
v.Optional('port'): int,
v.Optional('associate_public_ip_address'): bool,
v.Optional('subnet'): str,
v.Optional('security_groups'): [str],
v.Optional('iam_instance_profile'): str,
v.Optional('tags'): {str: str},
v.Optional('user_data'): v.Or(
str,
{'file': str},
),
v.Optional('block_device_mappings'): [dict],
})
defaults = {
v.Optional('instance_type', default='t2.micro'): str,
v.Optional('username', default=DEFAULT_ANSIBLE_USER): str,
v.Optional('connection', default=DEFAULT_ANSIBLE_CONNECTION): str,
v.Optional('connection_timeout', default=600): int,
v.Optional('port', default=DEFAULT_ANSIBLE_PORT): int,
v.Optional('associate_public_ip_address', default=True): bool,
v.Optional('subnet', default=''): str,
v.Optional('security_groups', default=[]): [str],
v.Optional('iam_instance_profile', default=''): str,
v.Optional('tags', default={}): {str: str},
v.Optional('user_data', default=''): v.Or(
str,
{'file': str},
),
v.Optional('block_device_mappings', default=[]): [dict],
}
build = base.extend({
v.Required('source_ami'): str,
v.Optional('become', default=True): bool,
v.Optional('extra_vars', default={}): dict,
})
image = {
v.Optional('ami_name'): str,
}
test = base.extend({
v.Optional('playbook', default='tests/test.yml'): str
})
ami_name = '%(role)s.%(profile)s.%(platform)s.%(vtype)s.%(arch)s.%(version)s' # noqa
platform = base.extend({
v.Required('name'): str,
v.Required('build'): build,
v.Optional('image', default={'ami_name': ami_name}): image,
v.Optional('test', default={'playbook': 'tests/test.yml'}): test,
})
profile = {
v.Required('name'): str,
v.Optional('extra_vars', default={}): dict
}
return v.Schema({
v.Optional('defaults', default={}): defaults,
v.Required('platforms'): [platform],
v.Optional('profiles', default=[{
'name': 'default', 'extra_vars': {}
}]): [profile],
})(doc)
def transform_config(doc):
doc.setdefault('defaults', {})
validated = validate(doc)
transformed = {}
excluded_items = ('name', 'build', 'image', 'test')
for platform in validated['platforms']:
for profile in validated['profiles']:
instance = '{}-{}'.format(platform['name'], profile['name'])
transformed[instance] = {}
transformed[instance]['build'] = validated['defaults'].copy()
transformed[instance]['build'].update({
k: v for k, v in platform.items() if k not in excluded_items
})
transformed[instance]['build'].update(platform['build'].copy())
transformed[instance]['build'].update({
'extra_vars': profile['extra_vars'].copy(),
'platform': platform['name'],
'profile': profile['name'],
})
transformed[instance]['image'] = platform['image'].copy()
transformed[instance]['image'].update({
'platform': platform['name'],
'profile': profile['name'],
})
transformed[instance]['test'] = validated['defaults'].copy()
transformed[instance]['test'].update({
k: v for k, v in platform.items() if k not in excluded_items
})
transformed[instance]['test'].update(platform['test'].copy())
transformed[instance]['platform'] = platform['name']
transformed[instance]['profile'] = profile['name']
return transformed
| cloudboss/bossimage | bossimage/core.py | Python | mit | 24,516 | [
"Galaxy"
] | 6bd2e9a4034a60ee84b7954e94e4f382d654f3d5966973da539aaf7583c34fb1 |
"""
A VTK RenderWindowInteractor widget for wxPython.
Find wxPython info at http://wxPython.org
Created by Prabhu Ramachandran, April 2002
Based on wxVTKRenderWindow.py
Fixes and updates by Charl P. Botha 2003-2008
Updated to new wx namespace and some cleaning up by Andrea Gavana,
December 2006
"""
"""
Please see the example at the end of this file.
----------------------------------------
Creation:
wxVTKRenderWindowInteractor(parent, ID, stereo=0, [wx keywords]):
You should create a wx.PySimpleApp() or some other wx**App before
creating the window.
Behaviour:
Uses __getattr__ to make the wxVTKRenderWindowInteractor behave just
like a vtkGenericRenderWindowInteractor.
----------------------------------------
"""
# import usual libraries
import math, os, sys
import wx
import vtk
# wxPython 2.4.0.4 and newer prefers the use of True and False, standard
# booleans in Python 2.2 but not earlier. Here we define these values if
# they don't exist so that we can use True and False in the rest of the
# code. At the time of this writing, that happens exactly ONCE in
# CreateTimer()
try:
True
except NameError:
True = 1
False = 0
# a few configuration items, see what works best on your system
# Use GLCanvas as base class instead of wx.Window.
# This is sometimes necessary under wxGTK or the image is blank.
# (in wxWindows 2.3.1 and earlier, the GLCanvas had scroll bars)
baseClass = wx.Window
if wx.Platform == "__WXGTK__":
import wx.glcanvas
baseClass = wx.glcanvas.GLCanvas
# Keep capturing mouse after mouse is dragged out of window
# (in wxGTK 2.3.2 there is a bug that keeps this from working,
# but it is only relevant in wxGTK if there are multiple windows)
_useCapture = (wx.Platform == "__WXMSW__")
# end of configuration items
class EventTimer(wx.Timer):
"""Simple wx.Timer class.
"""
def __init__(self, iren):
"""Default class constructor.
@param iren: current render window
"""
wx.Timer.__init__(self)
self.iren = iren
def Notify(self):
""" The timer has expired.
"""
self.iren.TimerEvent()
class wxVTKRenderWindowInteractor(baseClass):
"""
A wxRenderWindow for wxPython.
Use GetRenderWindow() to get the vtkRenderWindow.
Create with the keyword stereo=1 in order to
generate a stereo-capable window.
"""
# class variable that can also be used to request instances that use
# stereo; this is overridden by the stereo=1/0 parameter. If you set
# it to True, the NEXT instantiated object will attempt to allocate a
# stereo visual. E.g.:
# wxVTKRenderWindowInteractor.USE_STEREO = True
# myRWI = wxVTKRenderWindowInteractor(parent, -1)
USE_STEREO = False
def __init__(self, parent, ID, *args, **kw):
"""Default class constructor.
@param parent: parent window
@param ID: window id
@param **kw: wxPython keywords (position, size, style) plus the
'stereo' keyword
"""
# private attributes
self.__RenderWhenDisabled = 0
# First do special handling of some keywords:
# stereo, position, size, style
stereo = 0
if kw.has_key('stereo'):
if kw['stereo']:
stereo = 1
del kw['stereo']
elif self.USE_STEREO:
stereo = 1
position, size = wx.DefaultPosition, wx.DefaultSize
if kw.has_key('position'):
position = kw['position']
del kw['position']
if kw.has_key('size'):
size = kw['size']
del kw['size']
# wx.WANTS_CHARS says to give us e.g. TAB
# wx.NO_FULL_REPAINT_ON_RESIZE cuts down resize flicker under GTK
style = wx.WANTS_CHARS | wx.NO_FULL_REPAINT_ON_RESIZE
if kw.has_key('style'):
style = style | kw['style']
del kw['style']
# the enclosing frame must be shown under GTK or the windows
# don't connect together properly
if wx.Platform != '__WXMSW__':
l = []
p = parent
while p: # make a list of all parents
l.append(p)
p = p.GetParent()
l.reverse() # sort list into descending order
for p in l:
p.Show(1)
if baseClass.__name__ == 'GLCanvas':
# code added by cpbotha to enable stereo and double
# buffering correctly where the user requests this; remember
# that the glXContext in this case is NOT allocated by VTK,
# but by WX, hence all of this.
# Initialize GLCanvas with correct attriblist
attribList = [wx.glcanvas.WX_GL_RGBA,
wx.glcanvas.WX_GL_MIN_RED, 1,
wx.glcanvas.WX_GL_MIN_GREEN, 1,
wx.glcanvas.WX_GL_MIN_BLUE, 1,
wx.glcanvas.WX_GL_DEPTH_SIZE, 16,
wx.glcanvas.WX_GL_DOUBLEBUFFER]
if stereo:
attribList.append(wx.glcanvas.WX_GL_STEREO)
try:
baseClass.__init__(self, parent, ID, position, size, style,
attribList=attribList)
except wx.PyAssertionError:
# visual couldn't be allocated, so we go back to default
baseClass.__init__(self, parent, ID, position, size, style)
if stereo:
# and make sure everyone knows that the stereo
# visual wasn't set.
stereo = 0
else:
baseClass.__init__(self, parent, ID, position, size, style)
# create the RenderWindow and initialize it
self._Iren = vtk.vtkGenericRenderWindowInteractor()
self._Iren.SetRenderWindow( vtk.vtkRenderWindow() )
self._Iren.AddObserver('CreateTimerEvent', self.CreateTimer)
self._Iren.AddObserver('DestroyTimerEvent', self.DestroyTimer)
self._Iren.GetRenderWindow().AddObserver('CursorChangedEvent',
self.CursorChangedEvent)
try:
self._Iren.GetRenderWindow().SetSize(size.width, size.height)
except AttributeError:
self._Iren.GetRenderWindow().SetSize(size[0], size[1])
if stereo:
self._Iren.GetRenderWindow().StereoCapableWindowOn()
self._Iren.GetRenderWindow().SetStereoTypeToCrystalEyes()
self.__handle = None
self.BindEvents()
# with this, we can make sure that the reparenting logic in
# Render() isn't called before the first OnPaint() has
# successfully been run (and set up the VTK/WX display links)
self.__has_painted = False
# set when we have captured the mouse.
self._own_mouse = False
# used to store WHICH mouse button led to mouse capture
self._mouse_capture_button = 0
# A mapping for cursor changes.
self._cursor_map = {0: wx.CURSOR_ARROW, # VTK_CURSOR_DEFAULT
1: wx.CURSOR_ARROW, # VTK_CURSOR_ARROW
2: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZENE
3: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZENWSE
4: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZESW
5: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZESE
6: wx.CURSOR_SIZENS, # VTK_CURSOR_SIZENS
7: wx.CURSOR_SIZEWE, # VTK_CURSOR_SIZEWE
8: wx.CURSOR_SIZING, # VTK_CURSOR_SIZEALL
9: wx.CURSOR_HAND, # VTK_CURSOR_HAND
10: wx.CURSOR_CROSS, # VTK_CURSOR_CROSSHAIR
}
def BindEvents(self):
"""Binds all the necessary events for navigation, sizing,
drawing.
"""
# refresh window by doing a Render
self.Bind(wx.EVT_PAINT, self.OnPaint)
# turn off background erase to reduce flicker
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
# Bind the events to the event converters
self.Bind(wx.EVT_RIGHT_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_LEFT_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_MIDDLE_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_RIGHT_UP, self.OnButtonUp)
self.Bind(wx.EVT_LEFT_UP, self.OnButtonUp)
self.Bind(wx.EVT_MIDDLE_UP, self.OnButtonUp)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnMouseWheel)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_ENTER_WINDOW, self.OnEnter)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
# If we use EVT_KEY_DOWN instead of EVT_CHAR, capital versions
# of all characters are always returned. EVT_CHAR also performs
# other necessary keyboard-dependent translations.
self.Bind(wx.EVT_CHAR, self.OnKeyDown)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.Bind(wx.EVT_SIZE, self.OnSize)
# the wx 2.8.7.1 documentation states that you HAVE to handle
# this event if you make use of CaptureMouse, which we do.
if _useCapture and hasattr(wx, 'EVT_MOUSE_CAPTURE_LOST'):
self.Bind(wx.EVT_MOUSE_CAPTURE_LOST,
self.OnMouseCaptureLost)
def __getattr__(self, attr):
"""Makes the object behave like a
vtkGenericRenderWindowInteractor.
"""
if attr == '__vtk__':
return lambda t=self._Iren: t
elif hasattr(self._Iren, attr):
return getattr(self._Iren, attr)
else:
raise AttributeError, self.__class__.__name__ + \
" has no attribute named " + attr
def CreateTimer(self, obj, evt):
""" Creates a timer.
"""
self._timer = EventTimer(self)
self._timer.Start(10, True)
def DestroyTimer(self, obj, evt):
"""The timer is a one shot timer so will expire automatically.
"""
return 1
def _CursorChangedEvent(self, obj, evt):
"""Change the wx cursor if the renderwindow's cursor was
changed.
"""
cur = self._cursor_map[obj.GetCurrentCursor()]
c = wx.StockCursor(cur)
self.SetCursor(c)
def CursorChangedEvent(self, obj, evt):
"""Called when the CursorChangedEvent fires on the render
window."""
# This indirection is needed since when the event fires, the
# current cursor is not yet set so we defer this by which time
# the current cursor should have been set.
wx.CallAfter(self._CursorChangedEvent, obj, evt)
def HideCursor(self):
"""Hides the cursor."""
c = wx.StockCursor(wx.CURSOR_BLANK)
self.SetCursor(c)
def ShowCursor(self):
"""Shows the cursor."""
rw = self._Iren.GetRenderWindow()
cur = self._cursor_map[rw.GetCurrentCursor()]
c = wx.StockCursor(cur)
self.SetCursor(c)
def GetDisplayId(self):
"""Function to get X11 Display ID from WX and return it in a format
that can be used by VTK Python.
We query the X11 Display with a new call that was added in wxPython
2.6.0.1. The call returns a SWIG object which we can query for the
address and subsequently turn into an old-style SWIG-mangled string
representation to pass to VTK.
"""
d = None
try:
d = wx.GetXDisplay()
except NameError:
# wx.GetXDisplay was added by Robin Dunn in wxPython 2.6.0.1
# if it's not available, we can't pass it. In general,
# things will still work; on some setups, it'll break.
pass
else:
# wx returns None on platforms where wx.GetXDisplay is not relevant
if d:
d = hex(d)
# On wxPython-2.6.3.2 and above there is no leading '0x'.
if not d.startswith('0x'):
d = '0x' + d
# we now have 0xdeadbeef
# VTK wants it as: _deadbeef_void_p (pre-SWIG-1.3 style)
d = '_%s_%s\0' % (d[2:], 'void_p')
return d
def OnMouseCaptureLost(self, event):
"""This is signalled when we lose mouse capture due to an
external event, such as when a dialog box is shown. See the
wx documentation.
"""
# the documentation seems to imply that by this time we've
# already lost capture. I have to assume that we don't need
# to call ReleaseMouse ourselves.
if _useCapture and self._own_mouse:
self._own_mouse = False
def OnPaint(self,event):
"""Handles the wx.EVT_PAINT event for
wxVTKRenderWindowInteractor.
"""
# wx should continue event processing after this handler.
# We call this BEFORE Render(), so that if Render() raises
# an exception, wx doesn't re-call OnPaint repeatedly.
event.Skip()
dc = wx.PaintDC(self)
# make sure the RenderWindow is sized correctly
self._Iren.GetRenderWindow().SetSize(self.GetSizeTuple())
# Tell the RenderWindow to render inside the wx.Window.
if not self.__handle:
# on relevant platforms, set the X11 Display ID
d = self.GetDisplayId()
if d:
self._Iren.GetRenderWindow().SetDisplayId(d)
# store the handle
self.__handle = self.GetHandle()
# and give it to VTK
self._Iren.GetRenderWindow().SetWindowInfo(str(self.__handle))
# now that we've painted once, the Render() reparenting logic
# is safe
self.__has_painted = True
self.Render()
def OnSize(self,event):
"""Handles the wx.EVT_SIZE event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue (we call this before the
# Render(), in case it raises an exception)
event.Skip()
try:
width, height = event.GetSize()
except:
width = event.GetSize().width
height = event.GetSize().height
self._Iren.SetSize(width, height)
self._Iren.ConfigureEvent()
# this will check for __handle
self.Render()
def OnMotion(self,event):
"""Handles the wx.EVT_MOTION event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
# we call this early in case any of the VTK code raises an
# exception.
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.MouseMoveEvent()
def OnEnter(self,event):
"""Handles the wx.EVT_ENTER_WINDOW event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.EnterEvent()
def OnLeave(self,event):
"""Handles the wx.EVT_LEAVE_WINDOW event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.LeaveEvent()
def OnButtonDown(self,event):
"""Handles the wx.EVT_LEFT/RIGHT/MIDDLE_DOWN events for
wxVTKRenderWindowInteractor.
"""
# allow wx event processing to continue
# on wxPython 2.6.0.1, omitting this will cause problems with
# the initial focus, resulting in the wxVTKRWI ignoring keypresses
# until we focus elsewhere and then refocus the wxVTKRWI frame
# we do it this early in case any of the following VTK code
# raises an exception.
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
button = 0
if event.RightDown():
self._Iren.RightButtonPressEvent()
button = 'Right'
elif event.LeftDown():
self._Iren.LeftButtonPressEvent()
button = 'Left'
elif event.MiddleDown():
self._Iren.MiddleButtonPressEvent()
button = 'Middle'
# save the button and capture mouse until the button is released
# we only capture the mouse if it hasn't already been captured
if _useCapture and not self._own_mouse:
self._own_mouse = True
self._mouse_capture_button = button
self.CaptureMouse()
def OnButtonUp(self,event):
"""Handles the wx.EVT_LEFT/RIGHT/MIDDLE_UP events for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
button = 0
if event.RightUp():
button = 'Right'
elif event.LeftUp():
button = 'Left'
elif event.MiddleUp():
button = 'Middle'
# if the same button is released that captured the mouse, and
# we have the mouse, release it.
# (we need to get rid of this as soon as possible; if we don't
# and one of the event handlers raises an exception, mouse
# is never released.)
if _useCapture and self._own_mouse and \
button==self._mouse_capture_button:
self.ReleaseMouse()
self._own_mouse = False
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
if button == 'Right':
self._Iren.RightButtonReleaseEvent()
elif button == 'Left':
self._Iren.LeftButtonReleaseEvent()
elif button == 'Middle':
self._Iren.MiddleButtonReleaseEvent()
def OnMouseWheel(self,event):
"""Handles the wx.EVT_MOUSEWHEEL event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
if event.GetWheelRotation() > 0:
self._Iren.MouseWheelForwardEvent()
else:
self._Iren.MouseWheelBackwardEvent()
def OnKeyDown(self,event):
"""Handles the wx.EVT_KEY_DOWN event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
keycode, keysym = event.GetKeyCode(), None
key = chr(0)
if keycode < 256:
key = chr(keycode)
# wxPython 2.6.0.1 does not return a valid event.Get{X,Y}()
# for this event, so we use the cached position.
(x,y)= self._Iren.GetEventPosition()
self._Iren.SetEventInformation(x, y,
ctrl, shift, key, 0,
keysym)
self._Iren.KeyPressEvent()
self._Iren.CharEvent()
def OnKeyUp(self,event):
"""Handles the wx.EVT_KEY_UP event for
wxVTKRenderWindowInteractor.
"""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
keycode, keysym = event.GetKeyCode(), None
key = chr(0)
if keycode < 256:
key = chr(keycode)
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, key, 0,
keysym)
self._Iren.KeyReleaseEvent()
def GetRenderWindow(self):
"""Returns the render window (vtkRenderWindow).
"""
return self._Iren.GetRenderWindow()
def Render(self):
"""Actually renders the VTK scene on screen.
"""
RenderAllowed = 1
if not self.__RenderWhenDisabled:
# the user doesn't want us to render when the toplevel frame
# is disabled - first find the top level parent
topParent = wx.GetTopLevelParent(self)
if topParent:
# if it exists, check whether it's enabled
# if it's not enabeld, RenderAllowed will be false
RenderAllowed = topParent.IsEnabled()
if RenderAllowed:
if self.__handle and self.__handle == self.GetHandle():
self._Iren.GetRenderWindow().Render()
elif self.GetHandle() and self.__has_painted:
# this means the user has reparented us; let's adapt to the
# new situation by doing the WindowRemap dance
self._Iren.GetRenderWindow().SetNextWindowInfo(
str(self.GetHandle()))
# make sure the DisplayId is also set correctly
d = self.GetDisplayId()
if d:
self._Iren.GetRenderWindow().SetDisplayId(d)
# do the actual remap with the new parent information
self._Iren.GetRenderWindow().WindowRemap()
# store the new situation
self.__handle = self.GetHandle()
self._Iren.GetRenderWindow().Render()
def SetRenderWhenDisabled(self, newValue):
"""Change value of __RenderWhenDisabled ivar.
If __RenderWhenDisabled is false (the default), this widget will not
call Render() on the RenderWindow if the top level frame (i.e. the
containing frame) has been disabled.
This prevents recursive rendering during wx.SafeYield() calls.
wx.SafeYield() can be called during the ProgressMethod() callback of
a VTK object to have progress bars and other GUI elements updated -
it does this by disabling all windows (disallowing user-input to
prevent re-entrancy of code) and then handling all outstanding
GUI events.
However, this often triggers an OnPaint() method for wxVTKRWIs,
resulting in a Render(), resulting in Update() being called whilst
still in progress.
"""
self.__RenderWhenDisabled = bool(newValue)
#--------------------------------------------------------------------
def wxVTKRenderWindowInteractorConeExample():
"""Like it says, just a simple example
"""
# every wx app needs an app
app = wx.PySimpleApp()
# create the top-level frame, sizer and wxVTKRWI
frame = wx.Frame(None, -1, "wxVTKRenderWindowInteractor", size=(400,400))
widget = wxVTKRenderWindowInteractor(frame, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(widget, 1, wx.EXPAND)
frame.SetSizer(sizer)
frame.Layout()
# It would be more correct (API-wise) to call widget.Initialize() and
# widget.Start() here, but Initialize() calls RenderWindow.Render().
# That Render() call will get through before we can setup the
# RenderWindow() to render via the wxWidgets-created context; this
# causes flashing on some platforms and downright breaks things on
# other platforms. Instead, we call widget.Enable(). This means
# that the RWI::Initialized ivar is not set, but in THIS SPECIFIC CASE,
# that doesn't matter.
widget.Enable(1)
widget.AddObserver("ExitEvent", lambda o,e,f=frame: f.Close())
ren = vtk.vtkRenderer()
widget.GetRenderWindow().AddRenderer(ren)
cone = vtk.vtkConeSource()
cone.SetResolution(8)
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInput(cone.GetOutput())
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
ren.AddActor(coneActor)
# show the window
frame.Show()
app.MainLoop()
if __name__ == "__main__":
wxVTKRenderWindowInteractorConeExample()
| Wuteyan/VTK | Wrapping/Python/vtk/wx/wxVTKRenderWindowInteractor.py | Python | bsd-3-clause | 24,647 | [
"VTK"
] | 116b6b4d1398211db15c2247b0fe0e0f89ba5d2c54ba8f98bb73283f42abd06d |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Various classes representing distributed inputs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import sys
import six
from tensorflow.python import tf2
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.experimental.ops import distribute
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import multi_device_iterator_ops
from tensorflow.python.data.ops import optional_ops
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.distribute import input_ops
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.eager import context
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import type_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.types import distribute as distribute_types
from tensorflow.python.util import nest
from tensorflow.python.util.compat import collections_abc
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.tf_export import tf_export
from tensorflow.tools.docs import doc_controls
def get_distributed_dataset(dataset,
input_workers,
strategy,
split_batch_by=None,
input_context=None):
"""Returns a distributed dataset from the given tf.data.Dataset instance.
This is a common function that is used by all strategies to return a
distributed dataset. The distributed dataset instance returned is different
depending on if we are in a TF 1 or TF 2 context. The distributed dataset
instances returned differ from each other in the APIs supported by each of
them.
Args:
dataset: a tf.data.Dataset instance.
input_workers: an InputWorkers object which specifies devices on which
iterators should be created.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
split_batch_by: Optional integer. If present, we "split" each batch of the
dataset by `split_batch_by` value.
input_context: `InputContext` for sharding. Only pass this in for between
graph multi-worker cases where there is only one `input_worker`. In
these cases, we will shard based on the `input_pipeline_id` and
`num_input_pipelines` in the `InputContext`.
Returns:
A distributed dataset instance.
"""
if tf2.enabled():
return DistributedDataset(
dataset,
input_workers,
strategy,
split_batch_by=split_batch_by,
input_context=input_context)
else:
return DistributedDatasetV1(
dataset,
input_workers,
strategy,
split_batch_by=split_batch_by,
input_context=input_context)
def get_distributed_datasets_from_function(dataset_fn,
input_workers,
input_contexts,
strategy):
"""Returns a distributed dataset from the given input function.
This is a common function that is used by all strategies to return a
distributed dataset. The distributed dataset instance returned is different
depending on if we are in a TF 1 or TF 2 context. The distributed dataset
instances returned differ from each other in the APIs supported by each of
them.
Args:
dataset_fn: a function that returns a tf.data.Dataset instance.
input_workers: an InputWorkers object which specifies devices on which
iterators should be created.
input_contexts: A list of `InputContext` instances to be passed to call(s)
to `dataset_fn`. Length and order should match worker order in
`worker_device_pairs`.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
Returns:
A distributed dataset instance.
"""
if tf2.enabled():
return DistributedDatasetsFromFunction(
dataset_fn,
input_workers,
input_contexts,
strategy)
else:
return DistributedDatasetsFromFunctionV1(
dataset_fn,
input_workers,
input_contexts,
strategy)
@tf_export("distribute.DistributedIterator", v1=[])
class DistributedIteratorInterface(collections_abc.Iterator,
distribute_types.Iterator):
"""An iterator over `tf.distribute.DistributedDataset`.
`tf.distribute.DistributedIterator` is the primary mechanism for enumerating
elements of a `tf.distribute.DistributedDataset`. It supports the Python
Iterator protocol, which means it can be iterated over using a for-loop or by
fetching individual elements explicitly via `get_next()`.
You can create a `tf.distribute.DistributedIterator` by calling `iter` on
a `tf.distribute.DistributedDataset` or creating a python loop over a
`tf.distribute.DistributedDataset`.
Visit the [tutorial](https://www.tensorflow.org/tutorials/distribute/input)
on distributed input for more examples and caveats.
"""
def get_next(self):
"""Returns the next input from the iterator for all replicas.
Example use:
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.range(100).batch(2)
>>> dist_dataset = strategy.experimental_distribute_dataset(dataset)
>>> dist_dataset_iterator = iter(dist_dataset)
>>> @tf.function
... def one_step(input):
... return input
>>> step_num = 5
>>> for _ in range(step_num):
... strategy.run(one_step, args=(dist_dataset_iterator.get_next(),))
>>> strategy.experimental_local_results(dist_dataset_iterator.get_next())
(<tf.Tensor: shape=(1,), dtype=int64, numpy=array([10])>,
<tf.Tensor: shape=(1,), dtype=int64, numpy=array([11])>)
Returns:
A single `tf.Tensor` or a `tf.distribute.DistributedValues` which contains
the next input for all replicas.
Raises:
`tf.errors.OutOfRangeError`: If the end of the iterator has been reached.
"""
raise NotImplementedError(
"DistributedIterator.get_next() must be implemented in descendants.")
@property
def element_spec(self):
# pylint: disable=line-too-long
"""The type specification of an element of `tf.distribute.DistributedIterator`.
Example usage:
>>> global_batch_size = 16
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.from_tensors(([1.],[2])).repeat(100).batch(global_batch_size)
>>> distributed_iterator = iter(strategy.experimental_distribute_dataset(dataset))
>>> distributed_iterator.element_spec
(PerReplicaSpec(TensorSpec(shape=(None, 1), dtype=tf.float32, name=None),
TensorSpec(shape=(None, 1), dtype=tf.float32, name=None)),
PerReplicaSpec(TensorSpec(shape=(None, 1), dtype=tf.int32, name=None),
TensorSpec(shape=(None, 1), dtype=tf.int32, name=None)))
Returns:
A nested structure of `tf.TypeSpec` objects matching the structure of an
element of this `tf.distribute.DistributedIterator`. This returned value
is typically a `tf.distribute.DistributedValues` object and specifies the
`tf.TensorSpec` of individual components.
"""
raise NotImplementedError(
"DistributedIterator.element_spec() must be implemented in descendants")
def get_next_as_optional(self):
# pylint: disable=line-too-long
"""Returns a `tf.experimental.Optional` that contains the next value for all replicas.
If the `tf.distribute.DistributedIterator` has reached the end of the
sequence, the returned `tf.experimental.Optional` will have no value.
Example usage:
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> global_batch_size = 2
>>> steps_per_loop = 2
>>> dataset = tf.data.Dataset.range(10).batch(global_batch_size)
>>> distributed_iterator = iter(
... strategy.experimental_distribute_dataset(dataset))
>>> def step_fn(x):
... # train the model with inputs
... return x
>>> @tf.function
... def train_fn(distributed_iterator):
... for _ in tf.range(steps_per_loop):
... optional_data = distributed_iterator.get_next_as_optional()
... if not optional_data.has_value():
... break
... per_replica_results = strategy.run(step_fn, args=(optional_data.get_value(),))
... tf.print(strategy.experimental_local_results(per_replica_results))
>>> train_fn(distributed_iterator)
... # ([0 1], [2 3])
... # ([4], [])
Returns:
An `tf.experimental.Optional` object representing the next value from the
`tf.distribute.DistributedIterator` (if it has one) or no value.
"""
# pylint: enable=line-too-long
raise NotImplementedError(
"get_next_as_optional() not implemented in descendants")
@tf_export("distribute.DistributedDataset", v1=[])
class DistributedDatasetInterface(collections_abc.Iterable,
distribute_types.Iterable):
# pylint: disable=line-too-long
"""Represents a dataset distributed among devices and machines.
A `tf.distribute.DistributedDataset` could be thought of as a "distributed"
dataset. When you use `tf.distribute` API to scale training to multiple
devices or machines, you also need to distribute the input data, which leads
to a `tf.distribute.DistributedDataset` instance, instead of a
`tf.data.Dataset` instance in the non-distributed case. In TF 2.x,
`tf.distribute.DistributedDataset` objects are Python iterables.
Note: `tf.distribute.DistributedDataset` instances are *not* of type
`tf.data.Dataset`. It only supports two usages we will mention below:
iteration and `element_spec`. We don't support any other APIs to transform or
inspect the dataset.
There are two APIs to create a `tf.distribute.DistributedDataset` object:
`tf.distribute.Strategy.experimental_distribute_dataset(dataset)`and
`tf.distribute.Strategy.experimental_distribute_datasets_from_function(dataset_fn)`.
*When to use which?* When you have a `tf.data.Dataset` instance, and the
regular batch splitting (i.e. re-batch the input `tf.data.Dataset` instance
with a new batch size that is equal to the global batch size divided by the
number of replicas in sync) and autosharding (i.e. the
`tf.data.experimental.AutoShardPolicy` options) work for you, use the former
API. Otherwise, if you are *not* using a canonical `tf.data.Dataset` instance,
or you would like to customize the batch splitting or sharding, you can wrap
these logic in a `dataset_fn` and use the latter API. Both API handles
prefetch to device for the user. For more details and examples, follow the
links to the APIs.
There are two main usages of a `DistributedDataset` object:
1. Iterate over it to generate the input for a single device or multiple
devices, which is a `tf.distribute.DistributedValues` instance. To do this,
you can:
* use a pythonic for-loop construct:
>>> global_batch_size = 4
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.from_tensors(([1.],[1.])).repeat(4).batch(global_batch_size)
>>> dist_dataset = strategy.experimental_distribute_dataset(dataset)
>>> @tf.function
... def train_step(input):
... features, labels = input
... return labels - 0.3 * features
>>> for x in dist_dataset:
... # train_step trains the model using the dataset elements
... loss = strategy.run(train_step, args=(x,))
... print("Loss is", loss)
Loss is PerReplica:{
0: tf.Tensor(
[[0.7]
[0.7]], shape=(2, 1), dtype=float32),
1: tf.Tensor(
[[0.7]
[0.7]], shape=(2, 1), dtype=float32)
}
Placing the loop inside a `tf.function` will give a performance boost.
However `break` and `return` are currently not supported if the loop is
placed inside a `tf.function`. We also don't support placing the loop
inside a `tf.function` when using
`tf.distribute.experimental.MultiWorkerMirroredStrategy` or
`tf.distribute.experimental.TPUStrategy` with multiple workers.
* use `__iter__` to create an explicit iterator, which is of type
`tf.distribute.DistributedIterator`
>>> global_batch_size = 4
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> train_dataset = tf.data.Dataset.from_tensors(([1.],[1.])).repeat(50).batch(global_batch_size)
>>> train_dist_dataset = strategy.experimental_distribute_dataset(train_dataset)
>>> @tf.function
... def distributed_train_step(dataset_inputs):
... def train_step(input):
... loss = tf.constant(0.1)
... return loss
... per_replica_losses = strategy.run(train_step, args=(dataset_inputs,))
... return strategy.reduce(tf.distribute.ReduceOp.SUM, per_replica_losses,axis=None)
>>> EPOCHS = 2
>>> STEPS = 3
>>> for epoch in range(EPOCHS):
... total_loss = 0.0
... num_batches = 0
... dist_dataset_iterator = iter(train_dist_dataset)
... for _ in range(STEPS):
... total_loss += distributed_train_step(next(dist_dataset_iterator))
... num_batches += 1
... average_train_loss = total_loss / num_batches
... template = ("Epoch {}, Loss: {:.4f}")
... print (template.format(epoch+1, average_train_loss))
Epoch 1, Loss: 0.2000
Epoch 2, Loss: 0.2000
To achieve a performance improvement, you can also wrap the `strategy.run`
call with a `tf.range` inside a `tf.function`. This runs multiple steps in a
`tf.function`. Autograph will convert it to a `tf.while_loop` on the worker.
However, it is less flexible comparing with running a single step inside
`tf.function`. For example, you cannot run things eagerly or arbitrary
python code within the steps.
2. Inspect the `tf.TypeSpec` of the data generated by `DistributedDataset`.
`tf.distribute.DistributedDataset` generates
`tf.distribute.DistributedValues` as input to the devices. If you pass the
input to a `tf.function` and would like to specify the shape and type of
each Tensor argument to the function, you can pass a `tf.TypeSpec` object to
the `input_signature` argument of the `tf.function`. To get the
`tf.TypeSpec` of the input, you can use the `element_spec` property of the
`tf.distribute.DistributedDataset` or `tf.distribute.DistributedIterator`
object.
For example:
>>> global_batch_size = 4
>>> epochs = 1
>>> steps_per_epoch = 1
>>> mirrored_strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.from_tensors(([2.])).repeat(100).batch(global_batch_size)
>>> dist_dataset = mirrored_strategy.experimental_distribute_dataset(dataset)
>>> @tf.function(input_signature=[dist_dataset.element_spec])
... def train_step(per_replica_inputs):
... def step_fn(inputs):
... return tf.square(inputs)
... return mirrored_strategy.run(step_fn, args=(per_replica_inputs,))
>>> for _ in range(epochs):
... iterator = iter(dist_dataset)
... for _ in range(steps_per_epoch):
... output = train_step(next(iterator))
... print(output)
PerReplica:{
0: tf.Tensor(
[[4.]
[4.]], shape=(2, 1), dtype=float32),
1: tf.Tensor(
[[4.]
[4.]], shape=(2, 1), dtype=float32)
}
Visit the [tutorial](https://www.tensorflow.org/tutorials/distribute/input)
on distributed input for more examples and caveats.
"""
def __iter__(self):
"""Creates an iterator for the `tf.distribute.DistributedDataset`.
The returned iterator implements the Python Iterator protocol.
Example usage:
>>> global_batch_size = 4
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.from_tensor_slices([1, 2, 3, 4]).repeat().batch(global_batch_size)
>>> distributed_iterator = iter(strategy.experimental_distribute_dataset(dataset))
>>> print(next(distributed_iterator))
PerReplica:{
0: tf.Tensor([1 2], shape=(2,), dtype=int32),
1: tf.Tensor([3 4], shape=(2,), dtype=int32)
}
Returns:
An `tf.distribute.DistributedIterator` instance for the given
`tf.distribute.DistributedDataset` object to enumerate over the
distributed data.
"""
raise NotImplementedError("Must be implemented in descendants")
@property
def element_spec(self):
"""The type specification of an element of this `tf.distribute.DistributedDataset`.
Example usage:
>>> global_batch_size = 16
>>> strategy = tf.distribute.MirroredStrategy(["GPU:0", "GPU:1"])
>>> dataset = tf.data.Dataset.from_tensors(([1.],[2])).repeat(100).batch(global_batch_size)
>>> dist_dataset = strategy.experimental_distribute_dataset(dataset)
>>> dist_dataset.element_spec
(PerReplicaSpec(TensorSpec(shape=(None, 1), dtype=tf.float32, name=None),
TensorSpec(shape=(None, 1), dtype=tf.float32, name=None)),
PerReplicaSpec(TensorSpec(shape=(None, 1), dtype=tf.int32, name=None),
TensorSpec(shape=(None, 1), dtype=tf.int32, name=None)))
Returns:
A nested structure of `tf.TypeSpec` objects matching the structure of an
element of this `tf.distribute.DistributedDataset`. This returned value is
typically a `tf.distribute.DistributedValues` object and specifies the
`tf.TensorSpec` of individual components.
"""
raise NotImplementedError(
"DistributedDataset.element_spec must be implemented in descendants.")
@doc_controls.do_not_generate_docs
def reduce(self, initial_state, reduce_func):
raise NotImplementedError(
"DistributedDataset.reduce must be implemented in descendants.")
class InputWorkers(object):
"""A 1-to-many mapping from input worker devices to compute devices."""
def __init__(self, worker_device_pairs):
"""Initialize an `InputWorkers` object.
Args:
worker_device_pairs: A sequence of pairs:
`(input device, a tuple of compute devices fed by that input device)`.
"""
self._worker_device_pairs = worker_device_pairs
self._input_worker_devices = tuple(d for d, _ in self._worker_device_pairs)
self._fed_devices = tuple(tuple(device_util.canonicalize(d) for d in f)
for _, f in self._worker_device_pairs)
@property
def num_workers(self):
return len(self._input_worker_devices)
@property
def worker_devices(self):
return self._input_worker_devices
def compute_devices_for_worker(self, worker_index):
return self._fed_devices[worker_index]
def __repr__(self):
devices = self.worker_devices
debug_repr = ",\n".join(" %d %s: %s" %
(i, devices[i], self._fed_devices[i])
for i in range(len(devices)))
return "%s:{\n%s}" % (self.__class__.__name__, debug_repr)
def serialize(self):
return self._worker_device_pairs
def deserialize(self, worker_device_pairs):
return InputWorkers(worker_device_pairs)
def _get_next_as_optional(iterator, strategy, return_per_replica=False):
"""Returns an empty dataset indicator and the next input from the iterator.
Args:
iterator: a DistributedIterator object.
strategy: the `tf.distribute.Strategy` instance.
return_per_replica: a boolean. If True, the returned data will be wrapped
with `PerReplica` structure. Otherwise it is a 2D
num_input_workers*num_replicas_per_worker list.
Returns:
A tuple (a boolean tensor indicating whether the next batch has value
globally, data from all replicas).
"""
replicas = []
worker_has_values = []
worker_devices = []
for i, worker in enumerate(iterator._input_workers.worker_devices): # pylint: disable=protected-access
with ops.device(worker):
worker_has_value, next_element = (
iterator._iterators[i].get_next_as_list()) # pylint: disable=protected-access
# Collective all-reduce requires explicit devices for inputs.
with ops.device("/cpu:0"):
# Converting to integers for all-reduce.
worker_has_value = math_ops.cast(worker_has_value, dtypes.int64)
worker_devices.append(worker_has_value.device)
worker_has_values.append(worker_has_value)
# Make `replicas` a flat list of values across all replicas.
replicas.append(next_element)
if return_per_replica:
flattened_data = []
for per_worker_data in replicas:
flattened_data.extend(per_worker_data)
replicas = distribute_utils.regroup(flattened_data)
# Run an all-reduce to see whether any worker has values.
# TODO(b/131423105): we should be able to short-cut the all-reduce in some
# cases.
if getattr(strategy.extended, "_support_per_replica_values", True):
# `reduce` expects a `PerReplica`, so we pass it one, even
# though it doesn't actually have a value per replica
worker_has_values = values.PerReplica(worker_has_values)
global_has_value = strategy.reduce(
reduce_util.ReduceOp.SUM, worker_has_values, axis=None)
else:
assert len(worker_has_values) == 1
global_has_value = worker_has_values[0]
global_has_value = array_ops.reshape(
math_ops.cast(global_has_value, dtypes.bool), [])
return global_has_value, replicas
def _is_statically_shaped(tensor_class, shape):
"""Test if an iterator output is statically shaped.
For sparse and ragged tensors this only tests the batch dimension.
Args:
tensor_class: a class from an iterator.output_classes list.
shape: a TensorShape from an iterator.output_shapes list.
Returns:
True if the shape is static, false otherwise.
"""
if (tensor_class == sparse_tensor.SparseTensor or
isinstance(tensor_class, ragged_tensor.RaggedTensorSpec)):
# For sparse or ragged tensor, we should only check the first
# dimension in order to get_next_as_optional. This is because
# when these tensors get batched by dataset only the batch dimension
# is set.
if shape.rank > 0 and shape.as_list()[0] is None:
return False
return True
return shape.is_fully_defined()
def _get_static_shape(iterators):
"""Returns a boolean indicating if the input is fully defined."""
static_shape = True
for iterator in iterators:
if not isinstance(iterator, (_SingleWorkerOwnedDatasetIterator,
_SingleWorkerDatasetIterator)):
continue
flattened = zip(nest.flatten(iterator.output_shapes),
nest.flatten(iterator.output_classes))
for output_shape, output_class in flattened:
if not _is_statically_shaped(output_class, output_shape):
static_shape = False
break
return static_shape
class DistributedIteratorBase(DistributedIteratorInterface):
"""Common implementation for all input iterators."""
# pylint: disable=super-init-not-called
def __init__(self, input_workers, iterators, strategy):
static_shape = _get_static_shape(iterators)
# TODO(b/133073708): we currently need a flag to control the usage because
# there is a performance difference between get_next() and
# get_next_as_optional(). And we only enable get_next_as_optional when the
# output shapes are not static.
#
# TODO(rxsang): We want to always enable the get_next_as_optional behavior
# when user passed input_fn instead of dataset.
if getattr(
strategy.extended, "experimental_enable_get_next_as_optional", False):
self._enable_get_next_as_optional = (
not static_shape) or strategy.extended._in_multi_worker_mode()
else:
self._enable_get_next_as_optional = False
assert isinstance(input_workers, InputWorkers)
if not input_workers.worker_devices:
raise ValueError("Should have at least one worker for input iterator.")
self._iterators = iterators
self._input_workers = input_workers
self._strategy = strategy
def next(self):
return self.__next__()
def __next__(self):
try:
return self.get_next()
except errors.OutOfRangeError:
raise StopIteration
def __iter__(self):
return self
def get_next_as_optional(self):
global_has_value, replicas = _get_next_as_optional(
self, self._strategy, return_per_replica=True)
def return_none():
return optional_ops.Optional.empty(self._element_spec)
return control_flow_ops.cond(
global_has_value, lambda: optional_ops.Optional.from_value(replicas),
return_none)
def get_next(self, name=None):
"""Returns the next input from the iterator for all replicas."""
if not self._enable_get_next_as_optional:
replicas = []
for i, worker in enumerate(self._input_workers.worker_devices):
if name is not None:
d = tf_device.DeviceSpec.from_string(worker)
new_name = "%s_%s_%d" % (name, d.job, d.task)
else:
new_name = None
with ops.device(worker):
# Make `replicas` a flat list of values across all replicas.
replicas.extend(
self._iterators[i].get_next_as_list_static_shapes(new_name))
return distribute_utils.regroup(replicas)
out_of_range_replicas = []
def out_of_range_fn(worker_index, device):
"""This function will throw an OutOfRange error."""
# As this will be only called when there is no data left, so calling
# get_next() will trigger an OutOfRange error.
data = self._iterators[worker_index].get_next(device)
out_of_range_replicas.append(data)
return data
global_has_value, replicas = _get_next_as_optional(
self, self._strategy, return_per_replica=False)
results = []
for i, worker in enumerate(self._input_workers.worker_devices):
with ops.device(worker):
devices = self._input_workers.compute_devices_for_worker(i)
for j, device in enumerate(devices):
with ops.device(device):
# pylint: disable=undefined-loop-variable
# pylint: disable=cell-var-from-loop
# It is fine for the lambda to capture variables from the loop as
# the lambda is executed in the loop as well.
result = control_flow_ops.cond(
global_has_value,
lambda: replicas[i][j],
lambda: out_of_range_fn(i, device),
strict=True,
)
# pylint: enable=cell-var-from-loop
# pylint: enable=undefined-loop-variable
results.append(result)
replicas = results
return distribute_utils.regroup(replicas)
class DistributedIteratorV1(DistributedIteratorBase):
"""Input Iterator for a distributed dataset."""
# We need a private initializer method for re-initializing multidevice
# iterators when used with Keras training loops. If we don't reinitialize the
# iterator we run into memory leak issues (b/123315763).
@property
def _initializer(self):
init_ops = []
for it in self._iterators:
init_ops.extend(it.initialize())
return control_flow_ops.group(init_ops)
@deprecated(None, "Use the iterator's `initializer` property instead.")
def initialize(self):
"""Initialize underlying iterators.
Returns:
A list of any initializer ops that should be run.
"""
return self._initializer
@property
def initializer(self):
"""Returns a list of ops that initialize the iterator."""
return self.initialize()
# TODO(priyag): Remove when we switch to using `MultiDeviceIterator` for TPUs.
@property
def output_classes(self):
return self._iterators[0].output_classes
# TODO(priyag): Remove when we switch to using `MultiDeviceIterator` for TPUs.
@property
def output_shapes(self):
return self._iterators[0].output_shapes
# TODO(priyag): Remove when we switch to using `MultiDeviceIterator` for TPUs.
@property
def output_types(self):
return self._iterators[0].output_types
# TODO(priyag): Remove when we switch to using `MultiDeviceIterator` for TPUs.
def get_iterator(self, worker):
for i, w in enumerate(self._input_workers.worker_devices):
if worker == w:
return self._iterators[i]
return None
@property
def element_spec(self):
"""The type specification of an element of this iterator."""
return self._element_spec
class DistributedIteratorSpec(type_spec.TypeSpec):
"""Type specification for `DistributedIterator`."""
__slots__ = ["_input_workers", "_element_spec", "_strategy"]
def __init__(self, input_workers, element_spec, strategy):
# We don't want to allow deserialization of this class because we don't
# serialize the strategy object. Currently the only places where
# _deserialize is called is when we save/restore using SavedModels.
if isinstance(input_workers, tuple):
raise NotImplementedError("DistributedIteratorSpec does not have support "
"for deserialization.")
else:
self._input_workers = input_workers
self._element_spec = element_spec
self._strategy = strategy
@property
def value_type(self):
return DistributedIterator
def _serialize(self):
# We cannot serialize the strategy object so we convert it to an id that we
# can use for comparison.
return (self._input_workers.serialize(),
self._element_spec, id(self._strategy))
def _deserialize(self):
raise ValueError("Deserialization is currently unsupported for "
"DistributedIteratorSpec.")
# Overriding this method so that we can merge and reconstruct the spec object
def most_specific_compatible_type(self, other):
"""Returns the most specific TypeSpec compatible with `self` and `other`.
Args:
other: A `TypeSpec`.
Raises:
ValueError: If there is no TypeSpec that is compatible with both `self`
and `other`.
"""
# pylint: disable=protected-access
if type(self) is not type(other):
raise ValueError("No TypeSpec is compatible with both %s and %s" %
(self, other))
if self._input_workers.serialize() != other._input_workers.serialize():
raise ValueError("_input_workers is not compatible with both %s "
"and %s" % (self, other))
if self._strategy is not other._strategy:
raise ValueError("tf.distribute strategy is not compatible with both %s "
"and %s" % (self, other))
element_spec = nest.map_structure(
lambda a, b: a.most_specific_compatible_type(b), self._element_spec,
other._element_spec)
return DistributedIteratorSpec(self._input_workers, element_spec,
self._strategy)
@property
def _component_specs(self):
specs = []
worker_device_pairs = self._input_workers._worker_device_pairs # pylint: disable=protected-access
for i, (input_device, compute_devices) in enumerate(worker_device_pairs):
element_spec = nest.map_structure(
functools.partial(_replace_per_replica_spec, i=i), self._element_spec)
specs.append(_SingleWorkerDatasetIteratorSpec(input_device,
compute_devices,
element_spec))
return specs
def _to_components(self, value):
return value._iterators # pylint: disable=protected-access
def _from_components(self, components):
return DistributedIterator(input_workers=self._input_workers,
iterators=None,
components=components,
element_spec=self._element_spec,
strategy=self._strategy)
@staticmethod
def from_value(value):
# pylint: disable=protected-access
return DistributedIteratorSpec(value._input_workers, value._element_spec,
value._strategy)
def _with_tensor_ranks_only(self):
element_spec = nest.map_structure(
lambda s: s._with_tensor_ranks_only(), # pylint: disable=protected-access
self._element_spec)
return DistributedIteratorSpec(self._input_workers, element_spec,
self._strategy)
class DistributedIterator(DistributedIteratorBase,
composite_tensor.CompositeTensor):
"""Input Iterator for a distributed dataset."""
def __init__(self, input_workers=None, iterators=None, strategy=None,
components=None, element_spec=None):
if input_workers is None:
raise ValueError("`input_workers` should be "
"provided.")
error_message = ("Either `input_workers` or "
"both `components` and `element_spec` need to be "
"provided.")
if iterators is None:
if (components is None or element_spec is None):
raise ValueError(error_message)
self._element_spec = element_spec
self._input_workers = input_workers
self._iterators = components
static_shape = _get_static_shape(self._iterators)
self._strategy = strategy
if getattr(strategy.extended,
"experimental_enable_get_next_as_optional", False):
self._enable_get_next_as_optional = (
not static_shape) or strategy.extended._in_multi_worker_mode()
else:
self._enable_get_next_as_optional = False
else:
if (components is not None and element_spec is not None):
raise ValueError(error_message)
super(DistributedIterator, self).__init__(input_workers, iterators,
strategy)
@property
def element_spec(self):
return self._element_spec
@property
def _type_spec(self):
return DistributedIteratorSpec(self._input_workers,
self.element_spec,
self._strategy)
class _IterableInput(DistributedDatasetInterface):
"""Base class for iterable inputs for distribution strategies."""
# pylint: disable=super-init-not-called
def __init__(self, input_workers):
assert isinstance(input_workers, InputWorkers)
self._input_workers = input_workers
def __iter__(self):
raise NotImplementedError("must be implemented in descendants")
def reduce(self, initial_state, reduce_fn):
"""Execute a `reduce_fn` over all the elements of the input."""
iterator = iter(self)
has_data, data = _get_next_as_optional(
iterator, self._strategy, return_per_replica=True)
def cond(has_data, data, state):
del data, state # Unused.
return has_data
def loop_body(has_data, data, state):
"""Executes `reduce_fn` in a loop till the dataset is empty."""
del has_data # Unused.
state = reduce_fn(state, data)
has_data, data = _get_next_as_optional(
iterator, self._strategy, return_per_replica=True)
return has_data, data, state
has_data, data, final_state = control_flow_ops.while_loop(
cond, loop_body, [has_data, data, initial_state], parallel_iterations=1)
return final_state
class DistributedDataset(_IterableInput):
"""Distributed dataset that supports prefetching to multiple devices."""
def __init__(self,
dataset,
input_workers,
strategy,
split_batch_by=None,
input_context=None):
"""Distribute the dataset on all workers.
If `split_batch_by` is not None, we "split" each batch of the dataset by
`split_batch_by` value.
Args:
dataset: `tf.data.Dataset` that will be used as the input source.
input_workers: an `InputWorkers` object.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
split_batch_by: Optional integer. If present, we "split" each batch of the
dataset by `split_batch_by` value.
input_context: `InputContext` for sharding. Only pass this in for between
graph multi-worker cases where there is only one `input_worker`. In
these cases, we will shard based on the `input_pipeline_id` and
`num_input_pipelines` in the `InputContext`.
"""
super(DistributedDataset, self).__init__(input_workers=input_workers)
# We clone and shard the dataset on each worker. The current setup tries to
# shard the dataset by files if possible so that each worker sees a
# different subset of files. If that is not possible, will attempt to shard
# the final input such that each worker will run the entire preprocessing
# pipeline and only receive its own shard of the dataset.
if split_batch_by:
try:
# pylint: disable=protected-access
with ops.colocate_with(dataset._variant_tensor):
dataset = distribute._LegacyRebatchDataset(dataset, split_batch_by)
# Add a prefetch to pipeline rebatching for performance.
# TODO(rachelim): Instead of inserting an extra prefetch stage here,
# leverage static graph rewrites to insert _RebatchDataset before
# the final `prefetch` if it exists.
dataset = dataset.prefetch(split_batch_by)
except errors.InvalidArgumentError as e:
if "without encountering a batch" in str(e):
six.reraise(
ValueError,
ValueError(
"Call the `batch` method on the input Dataset in order to be "
"able to split your input across {} replicas.\n Please "
"the tf.distribute.Strategy guide. {}".format(
split_batch_by, e)),
sys.exc_info()[2])
else:
raise
self._cloned_datasets = []
if input_context:
# Between-graph where we rely on the input_context for sharding
assert input_workers.num_workers == 1
dataset = input_ops.auto_shard_dataset(dataset,
input_context.num_input_pipelines,
input_context.input_pipeline_id)
self._cloned_datasets.append(dataset)
else:
replicated_ds = distribute.replicate(dataset,
input_workers.worker_devices)
for i, worker in enumerate(input_workers.worker_devices):
with ops.device(worker):
cloned_dataset = replicated_ds[worker]
cloned_dataset = cloned_dataset.with_options(dataset.options())
cloned_dataset = input_ops.auto_shard_dataset(
cloned_dataset, len(input_workers.worker_devices), i)
self._cloned_datasets.append(cloned_dataset)
self._input_workers = input_workers
self._strategy = strategy
self._element_spec = _create_distributed_tensor_spec(self._strategy,
dataset.element_spec) # pylint: disable=protected-access
def __iter__(self):
if not (context.executing_eagerly() or
ops.get_default_graph().building_function):
raise RuntimeError("__iter__() is only supported inside of tf.function "
"or when eager execution is enabled.")
# This is an optional flag that can be used to turn off using
# OwnedMultiDeviceIterators and instead use the legacy MultiDeviceIterators
# as a stop gap solution that will allow us to roll out this change.
enable_legacy_iterators = getattr(self._strategy,
"_enable_legacy_iterators", False)
worker_iterators = _create_iterators_per_worker(self._cloned_datasets,
self._input_workers,
enable_legacy_iterators)
if enable_legacy_iterators:
iterator = DistributedIteratorV1(self._input_workers, worker_iterators,
self._strategy)
else:
iterator = DistributedIterator(self._input_workers, worker_iterators,
self._strategy)
iterator._element_spec = self.element_spec # pylint: disable=protected-access
# When async eager is enabled, sometimes the iterator may not finish
# initialization before passing to a multi device function, add a sync point
# here to make sure all underlying iterators are initialized.
if context.executing_eagerly():
context.async_wait()
return iterator
@property
def element_spec(self):
"""The type specification of an element of this dataset."""
return self._element_spec
class DistributedDatasetV1(DistributedDataset):
"""Distributed dataset that supports prefetching to multiple devices."""
def __init__(self,
dataset,
input_workers,
strategy,
split_batch_by=None,
input_context=None):
self._input_workers = input_workers
super(DistributedDatasetV1, self).__init__(
dataset,
input_workers,
strategy,
split_batch_by=split_batch_by,
input_context=input_context)
def make_one_shot_iterator(self):
"""Get a one time use iterator for DistributedDatasetV1.
Note: This API is deprecated. Please use `for ... in dataset:` to iterate
over the dataset or `iter` to create an iterator.
Returns:
A DistributedIteratorV1 instance.
"""
return self._make_one_shot_iterator()
def _make_one_shot_iterator(self):
"""Get an iterator for DistributedDatasetV1."""
# Graph mode with one shot iterator is disabled because we have to call
# `initialize` on the iterator which is only required if we are using a
# tf.distribute strategy.
if not context.executing_eagerly():
raise ValueError("Cannot create a one shot iterator. Please use "
"`make_initializable_iterator()` instead.")
return self._get_iterator()
def make_initializable_iterator(self):
"""Get an initializable iterator for DistributedDatasetV1.
Note: This API is deprecated. Please use
`tf.compat.v1.data.make_initializable_iterator(dataset)` to create an
initializable iterator.
Returns:
A DistributedIteratorV1 instance.
"""
return self._make_initializable_iterator()
def _make_initializable_iterator(self, shared_name=None): # pylint: disable=unused-argument
"""Get an initializable iterator for DistributedDatasetV1."""
# Eager mode generates already initialized iterators. Hence we cannot create
# an initializable iterator.
if context.executing_eagerly():
raise ValueError("Cannot create initializable iterator in Eager mode. "
"Please use `iter()` instead.")
return self._get_iterator()
def _get_iterator(self):
worker_iterators = _create_iterators_per_worker(self._cloned_datasets,
self._input_workers,
True)
iterator = DistributedIteratorV1(self._input_workers, worker_iterators,
self._strategy)
iterator._element_spec = self.element_spec # pylint: disable=protected-access
# When async eager is enabled, sometimes the iterator may not finish
# initialization before passing to a multi device function, add a sync point
# here to make sure all underlying iterators are initialized.
if context.executing_eagerly():
context.async_wait()
return iterator
def __iter__(self):
if (ops.executing_eagerly_outside_functions() or
ops.get_default_graph().building_function):
return self._get_iterator()
raise RuntimeError("__iter__() is only supported inside of tf.function "
"or when eager execution is enabled.")
# TODO(priyag): Add other replication modes.
class DistributedDatasetsFromFunction(_IterableInput):
"""Inputs created from dataset function."""
def __init__(self, dataset_fn, input_workers, input_contexts, strategy):
"""Makes an iterable from datasets created by the given function.
Args:
dataset_fn: A function that returns a `Dataset` given an `InputContext`.
input_workers: an `InputWorkers` object.
input_contexts: A list of `InputContext` instances to be passed to call(s)
to `dataset_fn`. Length and order should match worker order in
`worker_device_pairs`.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
"""
super(DistributedDatasetsFromFunction, self).__init__(
input_workers=input_workers)
if input_workers.num_workers != len(input_contexts):
raise ValueError(
"Number of input workers (%d) is not same as number of "
"input_contexts (%d)" %
(input_workers.num_workers, len(input_contexts)))
self._input_workers = input_workers
self._input_contexts = input_contexts
self._strategy = strategy
self._datasets, element_spec = (
_create_datasets_per_worker_with_input_context(self._input_contexts,
self._input_workers,
dataset_fn))
self._element_spec = _create_distributed_tensor_spec(
self._strategy, element_spec)
def __iter__(self):
if (ops.executing_eagerly_outside_functions() or
ops.get_default_graph().building_function):
# This is an optional flag that can be used to turn off using
# OwnedMultiDeviceIterators and instead use the legacy
# MultiDeviceIterators as a stop gap solution that will allow us to roll
# out this change.
enable_legacy_iterators = getattr(self._strategy,
"_enable_legacy_iterators", False)
iterators = _create_iterators_per_worker(self._datasets,
self._input_workers,
enable_legacy_iterators)
if enable_legacy_iterators:
iterator = DistributedIteratorV1(self._input_workers, iterators,
self._strategy)
else:
iterator = DistributedIterator(self._input_workers, iterators,
self._strategy)
iterator._element_spec = self._element_spec # pylint: disable=protected-access
# When async eager is enabled, sometimes the iterator may not finish
# initialization before passing to a multi device function, add a sync
# point here to make sure all underlying iterators are initialized.
if context.executing_eagerly():
context.async_wait()
return iterator
raise RuntimeError("__iter__() is only supported inside of tf.function "
"or when eager execution is enabled.")
@property
def element_spec(self):
"""The type specification of an element of this dataset."""
return self._element_spec
class DistributedDatasetsFromFunctionV1(DistributedDatasetsFromFunction):
"""Inputs created from dataset function."""
def _make_initializable_iterator(self, shared_name=None):
"""Get an initializable iterator for DistributedDatasetsFromFunctionV1."""
del shared_name # Unused
# Eager mode generates already initialized iterators. Hence we cannot create
# an initializable iterator.
if context.executing_eagerly():
raise ValueError("Cannot create initializable iterator in Eager mode. "
"Please use `iter()` instead.")
return self._get_iterator()
def _make_one_shot_iterator(self):
"""Get an iterator for iterating over DistributedDatasetsFromFunctionV1."""
# Graph mode with one shot iterator is disabled because we have to call
# `initialize` on the iterator which is only required if we are using a
# tf.distribute strategy.
if not context.executing_eagerly():
raise ValueError("Cannot create a one shot iterator. Please use "
"`make_initializable_iterator()` instead.")
return self._get_iterator()
def _get_iterator(self):
iterators = _create_iterators_per_worker(self._datasets,
self._input_workers, True)
iterator = DistributedIteratorV1(self._input_workers, iterators,
self._strategy)
iterator._element_spec = self._element_spec # pylint: disable=protected-access
# When async eager is enabled, sometimes the iterator may not finish
# initialization before passing to a multi device function, add a sync point
# here to make sure all underlying iterators are initialized.
if context.executing_eagerly():
context.async_wait()
return iterator
def __iter__(self):
if (ops.executing_eagerly_outside_functions() or
ops.get_default_graph().building_function):
return self._get_iterator()
raise RuntimeError("__iter__() is only supported inside of tf.function "
"or when eager execution is enabled.")
# TODO(anjalisridhar): This class will be soon be removed in favor of newer
# APIs.
class InputFunctionIterator(DistributedIteratorV1):
"""Iterator created from input function."""
def __init__(self, input_fn, input_workers, input_contexts, strategy):
"""Make an iterator for input provided via an input function.
Currently implements PER_WORKER mode, in which the `input_fn` is called
once on each worker.
TODO(priyag): Add other replication modes.
Args:
input_fn: Input function that returns a `tf.data.Dataset` object.
input_workers: an `InputWorkers` object.
input_contexts: A list of `InputContext` instances to be passed to call(s)
to `input_fn`. Length and order should match worker order in
`worker_device_pairs`.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
"""
assert isinstance(input_workers, InputWorkers)
if input_workers.num_workers != len(input_contexts):
raise ValueError(
"Number of input workers (%d) is not same as number of "
"input_contexts (%d)" %
(input_workers.num_workers, len(input_contexts)))
iterators = []
for i, ctx in enumerate(input_contexts):
worker = input_workers.worker_devices[i]
with ops.device(worker):
result = input_fn(ctx)
devices = input_workers.compute_devices_for_worker(i)
if isinstance(result, dataset_ops.DatasetV2):
iterator = _SingleWorkerDatasetIterator(result, worker, devices)
elif callable(result):
iterator = _SingleWorkerCallableIterator(result, worker, devices)
else:
raise ValueError(
"input_fn must return a tf.data.Dataset or a callable.")
iterators.append(iterator)
super(InputFunctionIterator, self).__init__(input_workers, iterators,
strategy)
self._enable_get_next_as_optional = False
# TODO(anjalisridhar): This class will soon be removed and users should move
# to using DistributedIterator.
class DatasetIterator(DistributedIteratorV1):
"""Iterator created from input dataset."""
def __init__(self,
dataset,
input_workers,
strategy,
split_batch_by=None,
input_context=None):
"""Make an iterator for the dataset on given devices.
If `split_batch_by` is not None, we "split" each batch of the
dataset by `split_batch_by` value.
Args:
dataset: `tf.data.Dataset` that will be used as the input source.
input_workers: an `InputWorkers` object.
strategy: a `tf.distribute.Strategy` object, used to run all-reduce to
handle last partial batch.
split_batch_by: Optional integer. If present, we "split" each batch of the
dataset by `split_batch_by` value.
input_context: `InputContext` for sharding. Only pass this in for between
graph multi-worker cases where there is only one `input_worker`. In
these cases, we will shard based on the `input_pipeline_id` and
`num_input_pipelines` in the `InputContext`.
"""
dist_dataset = DistributedDatasetV1(
dataset,
input_workers,
strategy,
split_batch_by=split_batch_by,
input_context=input_context)
worker_iterators = _create_iterators_per_worker(
dist_dataset._cloned_datasets, input_workers, True) # pylint: disable=protected-access
super(DatasetIterator, self).__init__(
input_workers,
worker_iterators, # pylint: disable=protected-access
strategy)
self._element_spec = dist_dataset.element_spec
def _dummy_tensor_fn(value_structure):
"""A function to create dummy tensors from `value_structure`."""
def create_dummy_tensor(spec):
"""Create a dummy tensor with possible batch dimensions set to 0."""
if isinstance(spec, ragged_tensor.RaggedTensorSpec):
# Splice out the ragged dimensions.
# pylint: disable=protected-access
feature_shape = spec._shape[:1].concatenate(
spec._shape[(1 + spec._ragged_rank):])
feature_type = spec._dtype
# pylint: enable=protected-access
else:
feature_shape = spec.shape
feature_type = spec.dtype
# Ideally we should set the batch dimension to 0, however as in
# DistributionStrategy we don't know the batch dimension, we try to
# guess it as much as possible. If the feature has unknown dimensions, we
# will set them to 0. If the feature shape is already static, we guess the
# first dimension as batch dimension and set it to 0.
dims = ([dim if dim is not None else 0 for dim in feature_shape.as_list()]
if feature_shape else [])
if dims and (isinstance(spec, ragged_tensor.RaggedTensorSpec) or
feature_shape.is_fully_defined()):
dims[0] = tensor_shape.Dimension(0)
if isinstance(spec, sparse_tensor.SparseTensorSpec):
return sparse_tensor.SparseTensor(
values=array_ops.zeros(0, feature_type),
indices=array_ops.zeros((0, len(dims)), dtypes.int64),
dense_shape=dims)
# Create the dummy tensor.
dummy_tensor = array_ops.zeros(tensor_shape.TensorShape(dims), feature_type)
if isinstance(spec, ragged_tensor.RaggedTensorSpec):
# Reinsert the ragged dimensions with size 0.
# pylint: disable=protected-access
row_splits = array_ops.zeros(1, spec._row_splits_dtype)
dummy_tensor = ragged_tensor.RaggedTensor.from_nested_row_splits(
dummy_tensor, (row_splits,) * spec._ragged_rank, validate=False)
# pylint: enable=protected-access
return dummy_tensor
return nest.map_structure(create_dummy_tensor, value_structure)
def _recover_shape_fn(data, value_structure):
"""Recover the shape of `data` the same as shape of `value_structure`."""
flattened_data = nest.flatten(data)
for i, spec in enumerate(nest.flatten(value_structure)):
for target, source in zip(
nest.flatten(flattened_data[i], expand_composites=True),
nest.flatten(spec, expand_composites=True)):
target.set_shape(source.shape)
# `SparseTensor` shape is not determined by the shape of its component
# tensors. Rather, its shape depends on a tensor's values.
if isinstance(spec, sparse_tensor.SparseTensorSpec) and spec.shape:
dense_shape = spec.shape
with ops.device(flattened_data[i].op.device):
# For partially defined shapes, fill in missing values from tensor.
if not dense_shape.is_fully_defined():
dense_shape = array_ops.stack([
flattened_data[i].dense_shape[j] if dim is None else dim
for j, dim in enumerate(dense_shape.as_list())
])
flattened_data[i] = sparse_tensor.SparseTensor(
indices=flattened_data[i].indices,
values=flattened_data[i].values,
dense_shape=dense_shape)
data = nest.pack_sequence_as(data, flattened_data)
return data
class _SingleWorkerDatasetIteratorBase(object):
"""Iterator for a single `tf.data.Dataset`."""
def __init__(self, dataset, worker, devices):
"""Create iterator for the `dataset` to fetch data to worker's `devices` .
A `MultiDeviceIterator` or `OwnedMultiDeviceIterator` is used to prefetch
input to the devices on the given worker.
Args:
dataset: A `tf.data.Dataset` instance.
worker: Worker on which ops should be created.
devices: Distribute data from `dataset` to these devices.
"""
self._dataset = dataset
self._worker = worker
self._devices = devices
self._element_spec = dataset.element_spec
self._make_iterator()
def _make_iterator(self):
raise NotImplementedError("must be implemented in descendants")
def get_next(self, device, name=None):
"""Get next element for the given device."""
del name
with ops.device(self._worker):
return self._iterator.get_next(device)
def get_next_as_list_static_shapes(self, name=None):
"""Get next element from the underlying iterator.
Runs the iterator get_next() within a device scope. Since this doesn't use
get_next_as_optional(), is is considerably faster than get_next_as_list()
(but can only be used when the shapes are static).
Args:
name: not used.
Returns:
A list consisting of the next data from each device.
"""
del name
with ops.device(self._worker):
return self._iterator.get_next()
def get_next_as_list(self, name=None):
"""Get next element from underlying iterator.
If there is no data left, a list of dummy tensors with possible batch
dimensions set to 0 will be returned. Use of get_next_as_optional() and
extra logic adds overhead compared to get_next_as_list_static_shapes(), but
allows us to handle non-static shapes.
Args:
name: not used.
Returns:
A boolean tensor indicates whether there is any data in next element and
the real data as the next element or a list of dummy tensors if no data
left.
"""
del name
with ops.device(self._worker):
data_list = self._iterator.get_next_as_optional()
result = []
for i, data in enumerate(data_list):
# Place the condition op in the same device as the data so the data
# doesn't need to be sent back to the worker.
with ops.device(self._devices[i]):
# Data will be fetched in order, so we only need to check if the first
# replica has value to see whether there is data left for this single
# worker.
if i == 0:
worker_has_value = data.has_value()
# pylint: disable=unnecessary-lambda
# pylint: disable=cell-var-from-loop
real_data = control_flow_ops.cond(
data.has_value(),
lambda: data.get_value(),
lambda: _dummy_tensor_fn(data.element_spec),
strict=True,
)
# Some dimensions in `replicas` will become unknown after we
# conditionally return the real tensors or the dummy tensors. Recover
# the shapes from `data.element_spec`. We only need to do this in
# non eager mode because we always know the runtime shape of the
# tensors in eager mode.
if not context.executing_eagerly():
real_data = _recover_shape_fn(real_data, data.element_spec)
result.append(real_data)
# pylint: enable=cell-var-from-loop
# pylint: enable=unnecessary-lambda
return worker_has_value, result
class _SingleWorkerDatasetIteratorSpec(type_spec.TypeSpec):
"""Type specification for `_SingleWorkerOwnedDatasetIterator`."""
__slots__ = ["_worker", "_devices", "_element_spec"]
def __init__(self, worker, devices, element_spec):
self._worker = worker
self._devices = tuple(device_util.canonicalize(d) for d in devices)
self._element_spec = element_spec
@property
def value_type(self):
return _SingleWorkerOwnedDatasetIterator
def _serialize(self):
return (self._worker, self._devices, self._element_spec)
@property
def _component_specs(self):
specs = []
specs.append(multi_device_iterator_ops.MultiDeviceIteratorSpec(
self._devices, self._worker, element_spec=self._element_spec))
return specs
def _to_components(self, value):
return [value._iterator] # pylint: disable=protected-access
def _from_components(self, components):
return _SingleWorkerOwnedDatasetIterator(
dataset=None,
worker=self._worker,
devices=self._devices,
components=components,
element_spec=self._element_spec)
@staticmethod
def from_value(value):
# pylint: disable=protected-access
return _SingleWorkerDatasetIteratorSpec(value._worker, value._devices,
value._element_spec)
class _SingleWorkerOwnedDatasetIterator(_SingleWorkerDatasetIteratorBase,
composite_tensor.CompositeTensor):
"""Iterator for a DistributedDataset instance."""
def __init__(self, dataset=None, worker=None, devices=None, components=None,
element_spec=None):
"""Create iterator for the `dataset` to fetch data to worker's `devices` .
`OwnedMultiDeviceIterator` is used to prefetch input to the devices on the
given worker. The lifetime of this iterator is tied to the encompassing
python object. Once we go out of scope of the python object or return from
a tf.function the underlying iterator resource is deleted.
Args:
dataset: A `tf.data.Dataset` instance.
worker: Worker on which ops should be created.
devices: Distribute data from `dataset` to these devices.
components: Tensor components to construct the
_SingleWorkerOwnedDatasetIterator from.
element_spec: A nested structure of `TypeSpec` objects that represents the
type specification of elements of the iterator.
"""
if worker is None or devices is None:
raise ValueError("Both `worker` and `devices` should be provided")
error_message = ("Either `dataset` or both `components` and `element_spec` "
"need to be provided.")
if dataset is None:
if (components is None or element_spec is None):
raise ValueError(error_message)
self._element_spec = element_spec
self._worker = worker
self._devices = devices
self._iterator = components[0]
else:
if (components is not None or element_spec is not None):
raise ValueError(error_message)
super(_SingleWorkerOwnedDatasetIterator, self).__init__(dataset, worker,
devices)
def _make_iterator(self):
"""Make appropriate iterator on the dataset."""
if not self._worker:
raise ValueError("Worked device must be specified when creating an "
"owned iterator.")
host_device = device_util.get_host_for_device(self._worker)
with ops.device(self._worker):
self._iterator = multi_device_iterator_ops.OwnedMultiDeviceIterator(
self._dataset, self._devices, source_device=host_device)
@property
def element_spec(self):
return self._element_spec
@property
def _type_spec(self):
return _SingleWorkerDatasetIteratorSpec(self._worker, self._devices,
self._element_spec)
@property
def output_classes(self):
"""Returns the class of each component of an element of this iterator.
The expected values are `tf.Tensor` and `tf.SparseTensor`.
Returns:
A nested structure of Python `type` objects corresponding to each
component of an element of this dataset.
"""
return nest.map_structure(
lambda component_spec: component_spec._to_legacy_output_classes(), # pylint: disable=protected-access
self._element_spec)
@property
def output_shapes(self):
"""Returns the shape of each component of an element of this iterator.
Returns:
A nested structure of `tf.TensorShape` objects corresponding to each
component of an element of this dataset.
"""
return nest.map_structure(
lambda component_spec: component_spec._to_legacy_output_shapes(), # pylint: disable=protected-access
self._element_spec)
@property
def output_types(self):
"""Returns the type of each component of an element of this iterator.
Returns:
A nested structure of `tf.DType` objects corresponding to each component
of an element of this dataset.
"""
return nest.map_structure(
lambda component_spec: component_spec._to_legacy_output_types(), # pylint: disable=protected-access
self._element_spec)
class _SingleWorkerDatasetIterator(_SingleWorkerDatasetIteratorBase):
"""Iterator for a single DistributedDatasetV1 instance."""
def _make_iterator(self):
"""Make appropriate iterator on the dataset."""
with ops.device(self._worker):
self._iterator = multi_device_iterator_ops.MultiDeviceIterator(
self._dataset, self._devices)
def initialize(self):
"""Initialize underlying iterator.
In eager execution, this simply recreates the underlying iterator.
In graph execution, it returns the initializer ops for the underlying
iterator.
Returns:
A list of any initializer ops that should be run.
"""
if ops.executing_eagerly_outside_functions():
self._iterator._eager_reset() # pylint: disable=protected-access
return []
else:
return [self._iterator.initializer]
@property
def output_classes(self):
return dataset_ops.get_legacy_output_classes(self._iterator)
@property
def output_shapes(self):
return dataset_ops.get_legacy_output_shapes(self._iterator)
@property
def output_types(self):
return dataset_ops.get_legacy_output_types(self._iterator)
class _SingleWorkerCallableIterator(object):
"""Iterator for a single tensor-returning callable."""
def __init__(self, fn, worker, devices):
self._fn = fn
self._worker = worker
self._devices = devices
def get_next(self, device, name=None):
"""Get next element for the given device from the callable."""
del device, name
with ops.device(self._worker):
return self._fn()
def get_next_as_list_static_shapes(self, name=None):
"""Get next element from the callable."""
del name
with ops.device(self._worker):
data_list = [self._fn() for _ in self._devices]
return data_list
def get_next_as_list(self, name=None):
"""Get next element from the callable."""
del name
with ops.device(self._worker):
data_list = [self._fn() for _ in self._devices]
return constant_op.constant(True), data_list
def initialize(self):
# TODO(petebu) Should this throw an exception instead?
return []
def _create_iterators_per_worker(worker_datasets, input_workers,
enable_legacy_iterators):
"""Create a multidevice iterator on each of the workers."""
assert isinstance(input_workers, InputWorkers)
assert len(worker_datasets) == len(input_workers.worker_devices)
iterators = []
for i, worker in enumerate(input_workers.worker_devices):
with ops.device(worker):
worker_devices = input_workers.compute_devices_for_worker(i)
if tf2.enabled() and not enable_legacy_iterators:
iterator = _SingleWorkerOwnedDatasetIterator(worker_datasets[i], worker,
worker_devices)
else:
iterator = _SingleWorkerDatasetIterator(worker_datasets[i], worker,
worker_devices)
iterators.append(iterator)
return iterators
def _create_datasets_per_worker_with_input_context(input_contexts,
input_workers, dataset_fn):
"""Create device datasets per worker given a dataset function."""
datasets = []
for i, ctx in enumerate(input_contexts):
worker = input_workers.worker_devices[i]
with ops.device(worker):
dataset = dataset_fn(ctx)
datasets.append(dataset)
return datasets, dataset.element_spec
# TODO(sourabhbajaj): Remove this in lieu of distributed datasets
def _get_batched_dataset(d):
"""Get the batched dataset from `d`."""
# pylint: disable=protected-access
if isinstance(d, dataset_ops.DatasetV1Adapter):
d = d._dataset
if isinstance(d, (dataset_ops.BatchDataset, batching._MapAndBatchDataset)):
return d
elif isinstance(d, (dataset_ops.PrefetchDataset,
dataset_ops._OptionsDataset)):
return _get_batched_dataset(d._input_dataset)
raise ValueError(
"Unable to get batched dataset from the input dataset. `batch` "
"`map_and_batch` need to be the last operations on the dataset. "
"The batch operations can be followed by a prefetch.")
def _get_batched_dataset_attributes(d):
"""Get `batch_size`, `drop_remainder` of dataset."""
# pylint: disable=protected-access
assert isinstance(d,
(dataset_ops.BatchDataset, batching._MapAndBatchDataset))
if isinstance(d, dataset_ops.BatchDataset):
batch_size = d._batch_size
drop_remainder = d._drop_remainder
elif isinstance(d, batching._MapAndBatchDataset):
batch_size = d._batch_size_t
drop_remainder = d._drop_remainder_t
# pylint: enable=protected-access
if tensor_util.is_tensor(batch_size):
batch_size = tensor_util.constant_value(batch_size)
if tensor_util.is_tensor(drop_remainder):
drop_remainder = tensor_util.constant_value(drop_remainder)
return batch_size, drop_remainder
# TODO(sourabhbajaj): Remove this in lieu of distributed datasets
def _get_dataset_attributes(dataset):
"""Get the underlying attributes from the dataset object."""
# pylint: disable=protected-access
# First, get batch_size and drop_remainder from the dataset. We need
# to walk back the dataset creation process and find the batched version in
# order to get the attributes.
batched_dataset = _get_batched_dataset(dataset)
batch_size, drop_remainder = _get_batched_dataset_attributes(batched_dataset)
# Second, prefetch buffer should be get from the original dataset.
prefetch_buffer = None
if isinstance(dataset, dataset_ops.PrefetchDataset):
prefetch_buffer = dataset._buffer_size
elif (isinstance(dataset, dataset_ops.DatasetV1Adapter)
and isinstance(dataset._dataset, dataset_ops.PrefetchDataset)):
prefetch_buffer = dataset._dataset._buffer_size
return batch_size, drop_remainder, prefetch_buffer
class MultiStepContext(object):
"""A context object that can be used to capture things when running steps.
This context object is useful when running multiple steps at a time using the
`experimental_run_steps_on_iterator` API. For e.g. it allows the user's step
function to specify which outputs to emit at what frequency. Currently it
supports capturing output from the last step, as well as capturing non tensor
outputs. In the future it will be augmented to support other use cases such
as output each N steps.
"""
def __init__(self):
"""Initialize an output context.
Returns:
A context object.
"""
self._last_step_outputs = {}
self._last_step_outputs_reduce_ops = {}
self._non_tensor_outputs = {}
@property
def last_step_outputs(self):
"""A dictionary consisting of outputs to be captured on last step.
Keys in the dictionary are names of tensors to be captured, as specified
when `set_last_step_output` is called.
Values in the dictionary are the tensors themselves. If
`set_last_step_output` was called with a `reduce_op` for this output,
then the value is the reduced value.
Returns:
A dictionary with last step outputs.
"""
return self._last_step_outputs
def _set_last_step_outputs(self, outputs):
"""Replace the entire dictionary of last step outputs."""
if not isinstance(outputs, dict):
raise ValueError("Need a dictionary to set last_step_outputs.")
self._last_step_outputs = outputs
def set_last_step_output(self, name, output, reduce_op=None):
"""Set `output` with `name` to be outputted from the last step.
Args:
name: String, name to identify the output. Doesn't need to match tensor
name.
output: The tensors that should be outputted with `name`. See below for
actual types supported.
reduce_op: Reduction method to use to reduce outputs from multiple
replicas. Required if `set_last_step_output` is called in a replica
context. Optional in cross_replica_context.
When present, the outputs from all the replicas are reduced using the
current distribution strategy's `reduce` method. Hence, the type of
`output` must be what's supported by the corresponding `reduce` method.
For e.g. if using MirroredStrategy and reduction is set, output
must be a `PerReplica` value.
The reduce method is also recorded in a dictionary
`_last_step_outputs_reduce_ops` for later interpreting of the
outputs as already reduced or not.
"""
if distribution_strategy_context.in_cross_replica_context():
self._last_step_outputs_reduce_ops[name] = reduce_op
if reduce_op is None:
self._last_step_outputs[name] = output
else:
distribution = distribution_strategy_context.get_strategy()
self._last_step_outputs[name] = distribution.reduce(reduce_op, output,
axis=None)
else:
assert reduce_op is not None
def merge_fn(distribution, value):
self._last_step_outputs[name] = distribution.reduce(reduce_op, value,
axis=None)
# Setting this inside the `merge_fn` because all replicas share the same
# context object, so it's more robust to set it only once (even if all
# the replicas are trying to set the same value).
self._last_step_outputs_reduce_ops[name] = reduce_op
distribution_strategy_context.get_replica_context().merge_call(
merge_fn, args=(output,))
@property
def non_tensor_outputs(self):
"""A dictionary consisting of any non tensor outputs to be captured."""
return self._non_tensor_outputs
def set_non_tensor_output(self, name, output):
"""Set `output` with `name` to be captured as a non tensor output."""
if distribution_strategy_context.in_cross_replica_context():
self._non_tensor_outputs[name] = output
else:
def merge_fn(distribution, value):
# NOTE(priyag): For non tensor outputs, we simply return all the values
# in a list as reduction doesn't make sense on non tensors.
self._non_tensor_outputs[name] = (
distribution.experimental_local_results(value))
distribution_strategy_context.get_replica_context().merge_call(
merge_fn, args=(output,))
def _create_distributed_tensor_spec(strategy, tensor_spec):
"""Create a `tf.TypeSpec` for a given strategy and input `tensor_spec`.
Args:
strategy: The given `tf.distribute` strategy.
tensor_spec: `tf.TensorSpec` of a given value. The batch dimension of the
shape should be None if you have partial batches.
Returns:
A `tf.TypeSpec` that matches the values produced by a given strategy. This
can be a `tf.TensorSpec` or a `PerRelicaSpec`.
"""
num_replicas = len(strategy.extended.worker_devices)
# If the number of devices used in the strategy is just 1 then we return
# the tensor_spec as is.
if num_replicas == 1:
return tensor_spec
# If the number of devices is greater than 1 then we assume the input to
# tf.function is a per replica type.
def _get_value_per_replica(tensor_spec_per_input):
value_specs = [tensor_spec_per_input for _ in range(num_replicas)]
return values.PerReplicaSpec(*value_specs)
return nest.map_structure(_get_value_per_replica, tensor_spec)
def _replace_per_replica_spec(spec, i):
"""If `spec` is a `PerReplicaSpec`, then return its `i`th value_spec."""
if isinstance(spec, values.PerReplicaSpec):
return spec._value_specs[i] # pylint: disable=protected-access
else:
return spec
| aldian/tensorflow | tensorflow/python/distribute/input_lib.py | Python | apache-2.0 | 78,526 | [
"VisIt"
] | d30e47304ac642ac0c57960b43a72791d79baf58e07d0ab5528bfe3eba54078f |
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The vis package implements various visualization tools. For example, a VTK
Structure viewer.
"""
| vorwerkc/pymatgen | pymatgen/vis/__init__.py | Python | mit | 195 | [
"VTK",
"pymatgen"
] | 7ec361730efbba900e1d02459be9d54a4404fe44105b5a2dbe556f5d7392c4ba |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Enumerator with the libxc identifiers.
This is a low level object, client code should not interact with LibxcFunc directly
but use the API provided by the Xcfunc object defined in core.xcfunc.py.
Part of this module is automatically generated so be careful when refactoring stuff.
Use the script ~pymatgen/dev_scripts/regen_libxcfunc.py to regenerate the enum values.
"""
from __future__ import division, unicode_literals, print_function
import json
import os
from io import open
from enum import Enum, unique
from monty.json import MontyEncoder
# The libxc version used to generate this file!
libxc_version = "3.0.0"
__author__ = "Matteo Giantomassi"
__copyright__ = "Copyright 2016, The Materials Project"
__version__ = libxc_version
__maintainer__ = "Matteo Giantomassi"
__email__ = "gmatteo@gmail.com"
__status__ = "Production"
__date__ = "May 16, 2016"
# Loads libxc info from json file
with open(os.path.join(os.path.dirname(__file__), "libxc_docs.json"), "rt") as fh:
_all_xcfuncs = {int(k): v for k, v in json.load(fh).items()}
# @unique
class LibxcFunc(Enum):
"""
Enumerator with the identifiers. This object is used by Xcfunc
declared in xcfunc.py to create an internal representation of the XC functional.
This is a low level object, client code should not interact with LibxcFunc directly
but use the API provided by Xcfunc.
"""
#begin_include_dont_touch
LDA_C_1D_CSC = 18
LDA_C_1D_LOOS = 26
LDA_C_2D_AMGB = 15
LDA_C_2D_PRM = 16
LDA_C_GOMBAS = 24
LDA_C_HL = 4
LDA_C_GL = 5
LDA_C_vBH = 17
LDA_C_ML1 = 22
LDA_C_ML2 = 23
LDA_C_PW = 12
LDA_C_PW_MOD = 13
LDA_C_OB_PW = 14
LDA_C_PW_RPA = 25
LDA_C_PZ = 9
LDA_C_PZ_MOD = 10
LDA_C_OB_PZ = 11
LDA_C_RC04 = 27
LDA_C_RPA = 3
LDA_C_VWN = 7
LDA_C_VWN_1 = 28
LDA_C_VWN_2 = 29
LDA_C_VWN_3 = 30
LDA_C_VWN_4 = 31
LDA_C_VWN_RPA = 8
LDA_C_WIGNER = 2
LDA_K_TF = 50
LDA_K_LP = 51
LDA_X = 1
LDA_C_XALPHA = 6
LDA_X_1D = 21
LDA_X_2D = 19
LDA_XC_KSDT = 259
LDA_XC_TETER93 = 20
LDA_XC_ZLP = 43
GGA_C_AM05 = 135
GGA_C_FT97 = 88
GGA_C_LM = 137
GGA_C_LYP = 131
GGA_C_OP_B88 = 87
GGA_C_OP_PBE = 86
GGA_C_OP_G96 = 85
GGA_C_OP_PW91 = 262
GGA_C_OP_XALPHA = 84
GGA_C_OPTC = 200
GGA_C_P86 = 132
GGA_C_PBE = 130
GGA_C_PBE_SOL = 133
GGA_C_XPBE = 136
GGA_C_PBE_JRGX = 138
GGA_C_RGE2 = 143
GGA_C_APBE = 186
GGA_C_SPBE = 89
GGA_C_REGTPSS = 83
GGA_C_ZPBESOL = 63
GGA_C_PBEINT = 62
GGA_C_ZPBEINT = 61
GGA_C_PBELOC = 246
GGA_C_BGCP = 39
GGA_C_PBEFE = 258
GGA_C_PW91 = 134
GGA_C_Q2D = 47
GGA_C_SOGGA11 = 152
GGA_C_SOGGA11_X = 159
GGA_C_TCA = 100
GGA_C_REVTCA = 99
GGA_C_WI0 = 153
GGA_C_WI = 148
GGA_C_WL = 147
GGA_K_DK = 516
GGA_K_PERDEW = 517
GGA_K_VSK = 518
GGA_K_VJKS = 519
GGA_K_ERNZERHOF = 520
GGA_K_MEYER = 57
GGA_K_OL1 = 512
GGA_X_OL2 = 183
GGA_K_OL2 = 513
GGA_K_PEARSON = 511
GGA_K_TFVW = 52
GGA_K_VW = 500
GGA_K_GE2 = 501
GGA_K_GOLDEN = 502
GGA_K_YT65 = 503
GGA_K_BALTIN = 504
GGA_K_LIEB = 505
GGA_K_ABSP1 = 506
GGA_K_ABSP2 = 507
GGA_K_GR = 508
GGA_K_LUDENA = 509
GGA_K_GP85 = 510
GGA_X_2D_B86 = 128
GGA_X_2D_B86_MGC = 124
GGA_X_2D_B88 = 127
GGA_X_2D_PBE = 129
GGA_X_AIRY = 192
GGA_X_LAG = 193
GGA_X_AK13 = 56
GGA_X_AM05 = 120
GGA_X_B86 = 103
GGA_X_B86_MGC = 105
GGA_X_B86_R = 41
GGA_X_B88 = 106
GGA_X_OPTB88_VDW = 139
GGA_X_MB88 = 149
GGA_K_LLP = 522
GGA_K_FR_B88 = 514
GGA_K_THAKKAR = 523
GGA_X_BAYESIAN = 125
GGA_X_BPCCAC = 98
GGA_X_C09X = 158
GGA_X_CAP = 270
GGA_X_DK87_R1 = 111
GGA_X_DK87_R2 = 112
GGA_X_EV93 = 35
GGA_X_FT97_A = 114
GGA_X_FT97_B = 115
GGA_X_G96 = 107
GGA_X_HCTH_A = 34
GGA_X_HERMAN = 104
GGA_X_HJS_PBE = 525
GGA_X_HJS_PBE_SOL = 526
GGA_X_HJS_B88 = 527
GGA_X_HJS_B97X = 528
GGA_X_HJS_B88_V2 = 46
GGA_X_HTBS = 191
GGA_X_ITYH = 529
GGA_X_KT1 = 145
GGA_XC_KT2 = 146
GGA_X_LB = 160
GGA_X_LBM = 182
GGA_X_LG93 = 113
GGA_X_LV_RPW86 = 58
GGA_X_MPBE = 122
GGA_X_N12 = 82
GGA_X_GAM = 32
GGA_X_OPTX = 110
GGA_X_PBE = 101
GGA_X_PBE_R = 102
GGA_X_PBE_SOL = 116
GGA_X_XPBE = 123
GGA_X_PBE_JSJR = 126
GGA_X_PBEK1_VDW = 140
GGA_X_RGE2 = 142
GGA_X_APBE = 184
GGA_X_PBEINT = 60
GGA_X_PBE_TCA = 59
GGA_X_LAMBDA_LO_N = 45
GGA_X_LAMBDA_CH_N = 44
GGA_X_LAMBDA_OC2_N = 40
GGA_X_PBE_MOL = 49
GGA_X_BGCP = 38
GGA_X_PBEFE = 265
GGA_K_APBE = 185
GGA_K_REVAPBE = 55
GGA_K_TW1 = 187
GGA_K_TW2 = 188
GGA_K_TW3 = 189
GGA_K_TW4 = 190
GGA_K_APBEINT = 54
GGA_K_REVAPBEINT = 53
GGA_X_PBEA = 121
GGA_X_PW86 = 108
GGA_X_RPW86 = 144
GGA_K_FR_PW86 = 515
GGA_X_PW91 = 109
GGA_X_MPW91 = 119
GGA_K_LC94 = 521
GGA_X_Q2D = 48
GGA_X_RPBE = 117
GGA_X_SFAT = 530
GGA_X_SOGGA11 = 151
GGA_X_SSB_SW = 90
GGA_X_SSB = 91
GGA_X_SSB_D = 92
GGA_X_VMT_PBE = 71
GGA_X_VMT_GE = 70
GGA_X_VMT84_PBE = 69
GGA_X_VMT84_GE = 68
GGA_X_WC = 118
GGA_X_WPBEH = 524
GGA_XC_XLYP = 166
GGA_XC_PBE1W = 173
GGA_XC_MPWLYP1W = 174
GGA_XC_PBELYP1W = 175
GGA_XC_B97_D = 170
GGA_XC_HCTH_93 = 161
GGA_XC_HCTH_120 = 162
GGA_XC_HCTH_147 = 163
GGA_XC_HCTH_407 = 164
GGA_C_HCTH_A = 97
GGA_XC_B97_GGA1 = 96
GGA_XC_HCTH_P14 = 95
GGA_XC_HCTH_P76 = 94
GGA_XC_HCTH_407P = 93
GGA_C_N12 = 80
GGA_C_N12_SX = 79
GGA_C_GAM = 33
GGA_XC_EDF1 = 165
GGA_X_OPTPBE_VDW = 141
GGA_XC_MOHLYP = 194
GGA_XC_MOHLYP2 = 195
GGA_X_SOGGA = 150
GGA_XC_OBLYP_D = 67
GGA_XC_OPWLYP_D = 66
GGA_XC_OPBE_D = 65
GGA_XC_TH_FL = 196
GGA_XC_TH_FC = 197
GGA_XC_TH_FCFO = 198
GGA_XC_TH_FCO = 199
GGA_XC_TH1 = 154
GGA_XC_TH2 = 155
GGA_XC_TH3 = 156
GGA_XC_TH4 = 157
GGA_XC_VV10 = 255
HYB_GGA_XC_CAP0 = 477
HYB_GGA_X_N12_SX = 81
HYB_GGA_X_SOGGA11_X = 426
HYB_GGA_XC_B97 = 407
HYB_GGA_XC_B97_1 = 408
HYB_GGA_XC_B97_2 = 410
HYB_GGA_XC_B97_K = 413
HYB_GGA_XC_B97_3 = 414
HYB_GGA_XC_SB98_1a = 420
HYB_GGA_XC_SB98_1b = 421
HYB_GGA_XC_SB98_1c = 422
HYB_GGA_XC_SB98_2a = 423
HYB_GGA_XC_SB98_2b = 424
HYB_GGA_XC_SB98_2c = 425
HYB_GGA_XC_WB97 = 463
HYB_GGA_XC_WB97X = 464
HYB_GGA_XC_WB97X_V = 466
HYB_GGA_XC_WB97X_D = 471
HYB_GGA_XC_B97_1p = 266
HYB_GGA_XC_LC_VV10 = 469
HYB_GGA_XC_B1WC = 412
HYB_GGA_XC_B1LYP = 416
HYB_GGA_XC_B1PW91 = 417
HYB_GGA_XC_mPW1PW = 418
HYB_GGA_XC_mPW1K = 405
HYB_GGA_XC_BHANDH = 435
HYB_GGA_XC_BHANDHLYP = 436
HYB_GGA_XC_MPWLYP1M = 453
HYB_GGA_XC_B3PW91 = 401
HYB_GGA_XC_B3LYP = 402
HYB_GGA_XC_B3LYP5 = 475
HYB_GGA_XC_B3P86 = 403
HYB_GGA_XC_MPW3PW = 415
HYB_GGA_XC_MPW3LYP = 419
HYB_GGA_XC_MB3LYP_RC04 = 437
HYB_GGA_XC_REVB3LYP = 454
HYB_GGA_XC_B3LYPs = 459
HYB_GGA_XC_CAM_B3LYP = 433
HYB_GGA_XC_TUNED_CAM_B3LYP = 434
HYB_GGA_XC_CAMY_B3LYP = 470
HYB_GGA_XC_CAMY_BLYP = 455
HYB_GGA_XC_EDF2 = 476
HYB_GGA_XC_HSE03 = 427
HYB_GGA_XC_HSE06 = 428
HYB_GGA_XC_LRC_WPBEH = 465
HYB_GGA_XC_LRC_WPBE = 473
HYB_GGA_XC_HJS_PBE = 429
HYB_GGA_XC_HJS_PBE_SOL = 430
HYB_GGA_XC_HJS_B88 = 431
HYB_GGA_XC_HJS_B97X = 432
HYB_GGA_XC_LCY_BLYP = 468
HYB_GGA_XC_LCY_PBE = 467
HYB_GGA_XC_O3LYP = 404
HYB_GGA_XC_X3LYP = 411
HYB_GGA_XC_PBEH = 406
HYB_GGA_XC_PBE0_13 = 456
HYB_GGA_XC_HPBEINT = 472
MGGA_XC_TPSSLYP1W = 242
MGGA_C_BC95 = 240
MGGA_C_CC06 = 229
MGGA_C_CS = 72
MGGA_C_M08_HX = 78
MGGA_C_M08_SO = 77
MGGA_C_M11 = 76
MGGA_C_M11_L = 75
MGGA_C_MN12_L = 74
MGGA_C_MN12_SX = 73
MGGA_C_MN15_L = 261
MGGA_C_MN15 = 269
MGGA_C_PKZB = 239
MGGA_C_TPSS = 231
MGGA_C_REVTPSS = 241
MGGA_C_TPSSLOC = 247
MGGA_C_SCAN = 267
MGGA_C_M05 = 237
MGGA_C_M05_2X = 238
MGGA_C_VSXC = 232
MGGA_C_M06_L = 233
MGGA_C_M06_HF = 234
MGGA_C_M06 = 235
MGGA_C_M06_2X = 236
MGGA_C_DLDF = 37
MGGA_X_2D_PRHG07 = 210
MGGA_X_2D_PRHG07_PRP10 = 211
MGGA_X_BR89 = 206
MGGA_X_BJ06 = 207
MGGA_X_TB09 = 208
MGGA_X_RPP09 = 209
MGGA_X_GVT4 = 204
MGGA_X_LTA = 201
MGGA_X_M05 = 214
MGGA_X_M05_2X = 215
MGGA_X_M06_2X = 218
MGGA_X_M06_L = 203
MGGA_X_M06_HF = 216
MGGA_X_M06 = 217
MGGA_X_M08_HX = 219
MGGA_X_M08_SO = 220
MGGA_X_M11 = 225
MGGA_X_M11_L = 226
MGGA_X_MBEEF = 249
MGGA_X_MBEEFVDW = 250
MGGA_X_MK00 = 230
MGGA_X_MK00B = 243
MGGA_X_MN12_L = 227
MGGA_X_MN15_L = 260
MGGA_X_MS0 = 221
MGGA_X_MS1 = 222
MGGA_X_MS2 = 223
MGGA_X_MVS = 257
MGGA_X_PKZB = 213
MGGA_X_SCAN = 263
MGGA_X_TAU_HCTH = 205
MGGA_X_TPSS = 202
MGGA_X_MODTPSS = 245
MGGA_X_REVTPSS = 212
MGGA_X_BLOC = 244
MGGA_XC_B97M_V = 254
MGGA_XC_OTPSS_D = 64
MGGA_XC_ZLP = 42
HYB_MGGA_X_MVSH = 474
HYB_MGGA_XC_M05 = 438
HYB_MGGA_XC_M05_2X = 439
HYB_MGGA_XC_B88B95 = 440
HYB_MGGA_XC_B86B95 = 441
HYB_MGGA_XC_PW86B95 = 442
HYB_MGGA_XC_BB1K = 443
HYB_MGGA_XC_MPW1B95 = 445
HYB_MGGA_XC_MPWB1K = 446
HYB_MGGA_XC_X1B95 = 447
HYB_MGGA_XC_XB1K = 448
HYB_MGGA_XC_M06_HF = 444
HYB_MGGA_XC_M06 = 449
HYB_MGGA_XC_M06_2X = 450
HYB_MGGA_XC_PW6B95 = 451
HYB_MGGA_XC_PWB6K = 452
HYB_MGGA_XC_TPSSH = 457
HYB_MGGA_XC_REVTPSSH = 458
HYB_MGGA_X_DLDF = 36
HYB_MGGA_XC_M08_HX = 460
HYB_MGGA_XC_M08_SO = 461
HYB_MGGA_XC_M11 = 462
HYB_MGGA_X_MN12_SX = 248
HYB_MGGA_X_MN15 = 268
HYB_MGGA_X_MS2H = 224
HYB_MGGA_X_SCAN0 = 264
HYB_MGGA_XC_WB97M_V = 531
#end_include_dont_touch
def __init__(self, num):
info = _all_xcfuncs[self.value]
self.kind = info["Kind"]
self.family = info["Family"]
def __str__(self):
return "name=%s, kind=%s, family=%s" % (self.name, self.kind, self.family)
@staticmethod
def all_families():
"""
List of strings with the libxc families.
Note that XC_FAMILY if removed from the string e.g. XC_FAMILY_LDA becomes LDA
"""
return sorted(set(d["Family"] for d in _all_xcfuncs.values()))
@staticmethod
def all_kinds():
"""
List of strings with the libxc kinds.
Also in this case, the string is obtained by remove the XC_ prefix.
XC_CORRELATION --> CORRELATION
"""
return sorted(set(d["Kind"] for d in _all_xcfuncs.values()))
@property
def info_dict(self):
"""Dictionary with metadata. see libxc_docs.json"""
return _all_xcfuncs[self.value]
@property
def is_x_kind(self):
"""True if this is an exchange-only functional"""
return self.kind == "EXCHANGE"
@property
def is_c_kind(self):
"""True if this is a correlation-only functional"""
return self.kind == "CORRELATION"
@property
def is_k_kind(self):
"""True if this is a kinetic functional"""
return self.kind == "KINETIC"
@property
def is_xc_kind(self):
"""True if this is a exchange+correlation functional"""
return self.kind == "EXCHANGE_CORRELATION"
@property
def is_lda_family(self):
"""True if this functional belongs to the LDA family."""
return self.family == "LDA"
@property
def is_gga_family(self):
"""True if this functional belongs to the GGA family."""
return self.family == "GGA"
@property
def is_mgga_family(self):
"""True if this functional belongs to the meta-GGA family."""
return self.family == "MGGA"
@property
def is_hyb_gga_family(self):
"""True if this functional belongs to the hybrid + GGA family."""
return self.family == "HYB_GGA"
@property
def is_hyb_mgga_family(self):
"""True if this functional belongs to the hybrid + meta-GGA family."""
return self.family == "HYB_MGGA"
def as_dict(self):
"""
Makes LibxcFunc obey the general json interface used in pymatgen for
easier serialization.
"""
return {"name": self.name,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
@staticmethod
def from_dict(d):
"""
Makes LibxcFunc obey the general json interface used in pymatgen for
easier serialization.
"""
return LibxcFunc[d["name"]]
def to_json(self):
"""
Returns a json string representation of the MSONable object.
"""
return json.dumps(self.as_dict(), cls=MontyEncoder)
if __name__ == "__main__":
for xc in LibxcFunc:
print(xc)
| xhqu1981/pymatgen | pymatgen/core/libxcfunc.py | Python | mit | 13,121 | [
"pymatgen"
] | 1073e56107cc370aa334d243186edadec5b1711978a744f822c02b5926c7f0c0 |
"""
=============================================
Integration and ODEs (:mod:`scipy.integrate`)
=============================================
.. currentmodule:: scipy.integrate
Integrating functions, given function object
============================================
.. autosummary::
:toctree: generated/
quad -- General purpose integration
quad_vec -- General purpose integration of vector-valued functions
dblquad -- General purpose double integration
tplquad -- General purpose triple integration
nquad -- General purpose N-D integration
fixed_quad -- Integrate func(x) using Gaussian quadrature of order n
quadrature -- Integrate with given tolerance using Gaussian quadrature
romberg -- Integrate func using Romberg integration
quad_explain -- Print information for use of quad
newton_cotes -- Weights and error coefficient for Newton-Cotes integration
IntegrationWarning -- Warning on issues during integration
AccuracyWarning -- Warning on issues during quadrature integration
Integrating functions, given fixed samples
==========================================
.. autosummary::
:toctree: generated/
trapezoid -- Use trapezoidal rule to compute integral.
cumulative_trapezoid -- Use trapezoidal rule to cumulatively compute integral.
simpson -- Use Simpson's rule to compute integral from samples.
romb -- Use Romberg Integration to compute integral from
-- (2**k + 1) evenly-spaced samples.
.. seealso::
:mod:`scipy.special` for orthogonal polynomials (special) for Gaussian
quadrature roots and weights for other weighting factors and regions.
Solving initial value problems for ODE systems
==============================================
The solvers are implemented as individual classes, which can be used directly
(low-level usage) or through a convenience function.
.. autosummary::
:toctree: generated/
solve_ivp -- Convenient function for ODE integration.
RK23 -- Explicit Runge-Kutta solver of order 3(2).
RK45 -- Explicit Runge-Kutta solver of order 5(4).
DOP853 -- Explicit Runge-Kutta solver of order 8.
Radau -- Implicit Runge-Kutta solver of order 5.
BDF -- Implicit multi-step variable order (1 to 5) solver.
LSODA -- LSODA solver from ODEPACK Fortran package.
OdeSolver -- Base class for ODE solvers.
DenseOutput -- Local interpolant for computing a dense output.
OdeSolution -- Class which represents a continuous ODE solution.
Old API
-------
These are the routines developed earlier for SciPy. They wrap older solvers
implemented in Fortran (mostly ODEPACK). While the interface to them is not
particularly convenient and certain features are missing compared to the new
API, the solvers themselves are of good quality and work fast as compiled
Fortran code. In some cases, it might be worth using this old API.
.. autosummary::
:toctree: generated/
odeint -- General integration of ordinary differential equations.
ode -- Integrate ODE using VODE and ZVODE routines.
complex_ode -- Convert a complex-valued ODE to real-valued and integrate.
Solving boundary value problems for ODE systems
===============================================
.. autosummary::
:toctree: generated/
solve_bvp -- Solve a boundary value problem for a system of ODEs.
""" # noqa: E501
from ._quadrature import *
from ._odepack_py import *
from ._quadpack_py import *
from ._ode import *
from ._bvp import solve_bvp
from ._ivp import (solve_ivp, OdeSolution, DenseOutput,
OdeSolver, RK23, RK45, DOP853, Radau, BDF, LSODA)
from ._quad_vec import quad_vec
__all__ = [s for s in dir() if not s.startswith('_')]
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| grlee77/scipy | scipy/integrate/__init__.py | Python | bsd-3-clause | 3,957 | [
"Gaussian"
] | 51fc242311883fae1b305034734e0d8ab786b5a887a37e35446707e079e24ba0 |
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Installing test models against a session."""
from builtins import object
from collections import defaultdict
from google.cloud.forseti.services import utils
class ModelCreatorClient(object):
"""Model creator client."""
def __init__(self, session, data_access):
self.session = session
self.data_access = data_access
self.explain = self
def add_resource(self, resource_type_name, parent_type_name, no_parent):
return self.data_access.add_resource_by_name(self.session,
resource_type_name,
parent_type_name,
no_parent)
def add_member(self, child, parents):
return self.data_access.add_member(self.session, child, parents)
def add_role(self, role_name, permissions):
return self.data_access.add_role_by_name(self.session,
role_name,
permissions)
def get_iam_policy(self, full_resource_name):
policy_dict = self.data_access.get_iam_policy(
self.session, utils.full_to_type_name(full_resource_name))
class PolicyAccessor(dict):
def __init__(self, *args, **kwargs):
super(PolicyAccessor, self).__init__(*args, **kwargs)
self.policy = self
self.bindings = self['bindings'] if 'bindings' in self else []
self.etag = self['etag'] if 'etag' in self else None
return PolicyAccessor(policy_dict)
def set_iam_policy(self, full_resource_name, policy):
return self.data_access.set_iam_policy(
self.session, utils.full_to_type_name(full_resource_name), policy,
update_members=True)
def expand_special_members(self):
self.data_access.expand_special_members(self.session)
def commit(self):
self.session.commit()
self.data_access.denorm_group_in_group(self.session)
self.session.commit()
class ModelCreator(object):
"""Model creator."""
def __init__(self, model, client):
self._install_model(model, client)
client.commit()
def _install_model(self, model, client):
self._install_resources(model['resources'], client)
self._install_memberships(model['memberships'], client)
self._install_roles(model['roles'], client)
self._install_bindings(model['bindings'], client)
def _recursive_install_resources(self, node, model, client, parent):
"""Install resources."""
client.add_resource(node, parent, bool(not parent))
for root, tree in model.items():
self._recursive_install_resources(root, tree, client, node)
def _install_resources(self, model_view, client):
"""Install resources."""
for root, tree in model_view.items():
self._recursive_install_resources(root, tree, client, '')
def _recursive_invert_membership(self, node, model, parentship):
if node not in parentship:
parentship[node] = set()
for child in model.keys():
parentship[child].add(node)
for root, tree in model.items():
self._recursive_invert_membership(root, tree, parentship)
return parentship
def _cyclic(self, g):
path = set()
visited = set()
def visit(vertex):
if vertex in visited:
return False
visited.add(vertex)
path.add(vertex)
for neighbour in g.get(vertex, ()):
if neighbour in path or visit(neighbour):
return True
path.remove(vertex)
return False
return any(visit(v) for v in g)
def _install_memberships(self, model_view, client):
parent_relationship = defaultdict(set)
for root, tree in model_view.items():
self._recursive_invert_membership(root, tree, parent_relationship)
if self._cyclic(parent_relationship):
raise Exception('Cyclic membership relation not supported!')
installed_members = set()
while len(parent_relationship) > 0:
for child, parents in parent_relationship.items():
if parents.issubset(installed_members):
break
installed_members.add(child)
client.add_member(child, list(parents))
parent_relationship.pop(child)
def _install_roles(self, model_view, client):
for role, permissions in model_view.items():
client.add_role(role, permissions)
def _install_bindings(self, model_view, client):
for resource_name, bindings in model_view.items():
reply = client.get_iam_policy(resource_name)
if len(reply.policy.bindings) > 0:
raise Exception('policy should have been empty')
client.set_iam_policy(resource_name,
{'bindings': bindings,
'etag': reply.policy.etag})
client.expand_special_members()
| forseti-security/forseti-security | tests/services/model_tester.py | Python | apache-2.0 | 5,808 | [
"VisIt"
] | 915500d52a305d351fb2b020526899479439c0428caa0d4b605a692d0c51ad37 |
# -*- coding: utf-8 -*-
"""A plugin that extracts browser history from events."""
from __future__ import unicode_literals
import collections
import re
from urllib import parse as urlparse
from plaso.analysis import interface
from plaso.analysis import logger
from plaso.analysis import manager
from plaso.containers import reports
# Create a lightweight object that is used to store timeline based information
# about each search term.
# pylint: disable=invalid-name
SEARCH_OBJECT = collections.namedtuple(
'SEARCH_OBJECT', 'time source engine search_term')
class BrowserSearchPlugin(interface.AnalysisPlugin):
"""Analyze browser search entries from events."""
NAME = 'browser_search'
_EVENT_TAG_LABELS = ['browser_search']
_SUPPORTED_EVENT_DATA_TYPES = frozenset([
'chrome:autofill:entry',
'chrome:cache:entry',
'chrome:cookie:entry',
'chrome:extension_activity:activity_log',
'chrome:history:file_downloaded',
'chrome:history:page_visited',
'firefox:cache:record',
'firefox:cookie:entry',
'firefox:places:bookmark_annotation',
'firefox:places:bookmark_folder',
'firefox:places:bookmark',
'firefox:places:page_visited',
'firefox:downloads:download',
'cookie:google:analytics:utma',
'cookie:google:analytics:utmb',
'cookie:google:analytics:utmt',
'cookie:google:analytics:utmz',
'msiecf:leak',
'msiecf:redirected',
'msiecf:url',
'msie:webcache:container',
'msie:webcache:containers',
'msie:webcache:leak_file',
'msie:webcache:partitions',
'opera:history:entry',
'opera:history:typed_entry',
'safari:cookie:entry',
'safari:history:visit',
'safari:history:visit_sqlite'])
# TODO: use groups to build a single RE.
# Here we define filters and callback methods for all hits on each filter.
_URL_FILTERS = frozenset([
('Bing', re.compile(r'bing\.com/search'), '_ExtractSearchQueryFromURL'),
('DuckDuckGo', re.compile(r'duckduckgo\.com'),
'_ExtractDuckDuckGoSearchQuery'),
('GMail', re.compile(r'mail\.google\.com'),
'_ExtractGMailSearchQuery'),
('Google Docs', re.compile(r'docs\.google\.com'),
'_ExtractGoogleDocsSearchQuery'),
('Google Drive', re.compile(r'drive\.google\.com/drive/search'),
'_ExtractGoogleSearchQuery'),
('Google Search',
re.compile(r'(www\.|encrypted\.|/)google\.[^/]*/search'),
'_ExtractGoogleSearchQuery'),
('Google Sites', re.compile(r'sites\.google\.com/site'),
'_ExtractGoogleSearchQuery'),
('Yahoo', re.compile(r'yahoo\.com/search'),
'_ExtractYahooSearchQuery'),
('Yandex', re.compile(r'yandex\.com/search'),
'_ExtractYandexSearchQuery'),
('Youtube', re.compile(r'youtube\.com'),
'_ExtractYouTubeSearchQuery'),
])
def __init__(self):
"""Initializes an analysis plugin."""
super(BrowserSearchPlugin, self).__init__()
self._counter = collections.Counter()
# Store a list of search terms in a timeline format.
# The format is key = timestamp, value = (source, engine, search term).
self._search_term_timeline = []
def _DecodeURL(self, url):
"""Decodes the URL, replaces %XX to their corresponding characters.
Args:
url (str): encoded URL.
Returns:
str: decoded URL.
"""
if not url:
return ''
decoded_url = urlparse.unquote(url)
if isinstance(decoded_url, bytes):
try:
decoded_url = decoded_url.decode('utf-8')
except UnicodeDecodeError as exception:
decoded_url = decoded_url.decode('utf-8', errors='replace')
logger.warning(
'Unable to decode URL: {0:s} with error: {1!s}'.format(
url, exception))
return decoded_url
def _ExtractDuckDuckGoSearchQuery(self, url):
"""Extracts a search query from a DuckDuckGo search URL.
DuckDuckGo: https://duckduckgo.com/?q=query
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'q=' not in url:
return None
return self._GetBetweenQEqualsAndAmpersand(url).replace('+', ' ')
def _ExtractGMailSearchQuery(self, url):
"""Extracts a search query from a GMail search URL.
GMail: https://mail.google.com/mail/u/0/#search/query[/?]
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'search/' not in url:
return None
_, _, line = url.partition('search/')
line, _, _ = line.partition('/')
line, _, _ = line.partition('?')
return line.replace('+', ' ')
def _ExtractGoogleDocsSearchQuery(self, url):
"""Extracts a search query from a Google docs URL.
Google Docs: https://docs.google.com/.*/u/0/?q=query
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'q=' not in url:
return None
line = self._GetBetweenQEqualsAndAmpersand(url)
if not line:
return None
return line.replace('+', ' ')
def _ExtractGoogleSearchQuery(self, url):
"""Extracts a search query from a Google URL.
Google Drive: https://drive.google.com/drive/search?q=query
Google Search: https://www.google.com/search?q=query
Google Sites: https://sites.google.com/site/.*/system/app/pages/
search?q=query
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'search' not in url or 'q=' not in url:
return None
line = self._GetBetweenQEqualsAndAmpersand(url)
if not line:
return None
return line.replace('+', ' ')
def _ExtractYahooSearchQuery(self, url):
"""Extracts a search query from a Yahoo search URL.
Examples:
https://search.yahoo.com/search?p=query
https://search.yahoo.com/search;?p=query
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'p=' not in url:
return None
_, _, line = url.partition('p=')
before_and, _, _ = line.partition('&')
if not before_and:
return None
yahoo_search_url = before_and.split()[0]
return yahoo_search_url.replace('+', ' ')
def _ExtractYandexSearchQuery(self, url):
"""Extracts a search query from a Yandex search URL.
Yandex: https://www.yandex.com/search/?text=query
Args:
url (str): URL.
Returns:
str: search query or None if no query was found.
"""
if 'text=' not in url:
return None
_, _, line = url.partition('text=')
before_and, _, _ = line.partition('&')
if not before_and:
return None
yandex_search_url = before_and.split()[0]
return yandex_search_url.replace('+', ' ')
def _ExtractYouTubeSearchQuery(self, url):
"""Extracts a search query from a YouTube search URL.
YouTube: https://www.youtube.com/results?search_query=query
Args:
url (str): URL.
Returns:
str: search query.
"""
return self._ExtractSearchQueryFromURL(url)
def _ExtractSearchQueryFromURL(self, url):
"""Extracts a search query from the URL.
Bing: https://www.bing.com/search?q=query
GitHub: https://github.com/search?q=query
Args:
url (str): URL.
Returns:
str: search query, the value between 'q=' and '&' or None if no
query was found.
"""
if 'search' not in url or 'q=' not in url:
return None
return self._GetBetweenQEqualsAndAmpersand(url).replace('+', ' ')
def _GetBetweenQEqualsAndAmpersand(self, url):
"""Retrieves the substring between the substrings 'q=' and '&'.
Args:
url (str): URL.
Returns:
str: search query, the value between 'q=' and '&' or None if no query
was found.
"""
# Make sure we're analyzing the query part of the URL.
_, _, url = url.partition('?')
# Look for a key value pair named 'q'.
_, _, url = url.partition('q=')
if not url:
return ''
# Strip additional key value pairs.
url, _, _ = url.partition('&')
return url
def CompileReport(self, mediator):
"""Compiles an analysis report.
Args:
mediator (AnalysisMediator): mediates interactions between
analysis plugins and other components, such as storage and dfvfs.
Returns:
AnalysisReport: analysis report.
"""
results = {}
for key, count in self._counter.items():
search_engine, _, search_term = key.partition(':')
results.setdefault(search_engine, {})
results[search_engine][search_term] = count
lines_of_text = []
for search_engine, terms in sorted(results.items()):
lines_of_text.append(' == ENGINE: {0:s} =='.format(search_engine))
for search_term, count in sorted(
terms.items(), key=lambda x: (x[1], x[0]), reverse=True):
lines_of_text.append('{0:d} {1:s}'.format(count, search_term))
# An empty string is added to have SetText create an empty line.
lines_of_text.append('')
lines_of_text.append('')
report_text = '\n'.join(lines_of_text)
analysis_report = reports.AnalysisReport(
plugin_name=self.NAME, text=report_text)
analysis_report.report_array = self._search_term_timeline
analysis_report.report_dict = results
return analysis_report
def ExamineEvent(self, mediator, event, event_data, event_data_stream):
"""Analyzes an event.
Args:
mediator (AnalysisMediator): mediates interactions between
analysis plugins and other components, such as storage and dfvfs.
event (EventObject): event.
event_data (EventData): event data.
event_data_stream (EventDataStream): event data stream.
"""
if event_data.data_type not in self._SUPPORTED_EVENT_DATA_TYPES:
return
url = getattr(event_data, 'url', None)
if not url:
return
parser_or_plugin_name = getattr(event_data, 'parser', 'N/A')
for engine, url_expression, method_name in self._URL_FILTERS:
callback_method = getattr(self, method_name, None)
if not callback_method:
logger.warning('Missing method: {0:s}'.format(callback_method))
continue
match = url_expression.search(url)
if not match:
continue
search_query = callback_method(url)
if not search_query:
logger.warning('Missing search query for URL: {0:s}'.format(url))
continue
search_query = self._DecodeURL(search_query)
if not search_query:
continue
event_tag = self._CreateEventTag(event, self._EVENT_TAG_LABELS)
mediator.ProduceEventTag(event_tag)
self._counter['{0:s}:{1:s}'.format(engine, search_query)] += 1
# Add the timeline format for each search term.
search_object = SEARCH_OBJECT(
event.timestamp, parser_or_plugin_name, engine, search_query)
self._search_term_timeline.append(search_object)
manager.AnalysisPluginManager.RegisterPlugin(BrowserSearchPlugin)
| rgayon/plaso | plaso/analysis/browser_search.py | Python | apache-2.0 | 11,072 | [
"VisIt"
] | ade9d054bc0917c11d6ba5a741737d29aa0852a3d5a39e4b2a81a77c275f855a |
"""Detect presence and estimate sample-of-arrival of a DSSS signal."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import numpy as np
from thrifty import toads_data
from thrifty.signal_utils import Signal
def _clip_offset(offset, max_=0.6):
return -max_ if offset < -max_ else max_ if offset > max_ else offset
def calculate_window(block_len, history_len, template_len):
"""Calculate interval of values that are unique in a correlation block.
Returns a half-open interval [start, stop).
The minimum `history_len` is `template_len - 1`, but extra values are
required at both sides of the correlation peak in order to perform
interpolation. It is thus necessary to increase the `history_size` of
the data blocks to secure padding in case the correlation peak is at
the edge of the correlation block. It is necessary to limit peak
detection to the range of values within the correlation block that are
unique to that block to prevent duplicate detections.
"""
assert history_len >= template_len - 1
corr_len = block_len - template_len + 1
padding = history_len - template_len + 1
left_pad = padding // 2
right_pad = padding-left_pad
start, stop = left_pad, corr_len-right_pad
return start, stop
class SoaEstimator(object):
"""A SoA estimator that uses the default algorithms.
The default despreader, detector and interpolator algorithms will be used:
- Despreader: correlate using FFT
- Detector: Simple threshold detector.
- Interpolator: Parabolic interpolator.
Parameters
----------
template : :class:`numpy.ndarray`
Template signal.
thresh_coeffs : (float, float, float) tuple
Coefficients of threshold formula: (constant, snr, stddev).
block_len : int
Size of data blocks.
history_len : int
Number of samples at the end of each block that are repeated in the
next block.
"""
def __init__(self, template, thresh_coeffs, block_len, history_len):
self.template = Signal(template)
self.template_energy = np.sum(self.template.power)
template_len = len(template)
self.corr_len = block_len - template_len + 1
self.template_padded = np.concatenate([self.template,
np.zeros(self.corr_len-1)])
self.template_padded = Signal(self.template_padded)
self.template_fft = self.template_padded.fft
self.interpolate = gaussian_interpolation
self.window = calculate_window(block_len, history_len, template_len)
self.thresh_coeffs = thresh_coeffs
def soa_estimate(self, fft):
"""Estimate the SoA of the given signal."""
# assert len(fft) == block_len
corr = self.despread(fft)
peak_idx, peak_mag = self.get_peak(corr)
noise_rms = self.estimate_noise(peak_mag, fft)
threshold = self.calculate_threshold(corr, noise_rms)
detected = peak_mag > threshold
# detected, peak_idx, peak_ampl, noise_rms = self.peak_detect(corr_mag)
offset = 0 if not detected else self.interpolate(corr.mag, peak_idx)
offset = _clip_offset(offset)
info = toads_data.CorrDetectionInfo(peak_idx, offset,
peak_mag, noise_rms)
return detected, info, corr
def __call__(self, fft):
return self.soa_estimate(fft)
def despread(self, fft):
"""Correlate / despread using FFT."""
corr_fft = fft * self.template_fft.conj
corr_full = corr_fft.ifft
corr = corr_full[:self.corr_len]
return corr
def get_peak(self, corr):
"""Calculate peak index and estimate sqrt(power) of peak."""
return get_peak(corr, self.window)
def estimate_noise(self, peak_mag, fft):
"""Estimate noise from signal's rms / power."""
# Can be sped up by using RMS value of signal before carrier recovery.
signal_energy = fft.rms**2
# alternative: signal_energy = np.sum(np.abs(np.fft.ifft(fft))**2)
signal_corr_energy = signal_energy * self.template_energy
# Subtract twice the peak power to compensate for both the correlation
# peak's energy and the energy of the unmodulated carrier.
peak_power = peak_mag**2
noise_power = (signal_corr_energy - peak_power) / len(fft)
noise_rms = np.sqrt(noise_power)
return noise_rms
def calculate_threshold(self, corr, noise_rms):
"""Calculate detector threshold given the formula's coefficients."""
return calculate_threshold(corr, noise_rms, self.thresh_coeffs)
def calculate_threshold(corr, noise_rms, thresh_coeffs):
"""Calculate detector threshold given the formula's coefficients."""
thresh_const, thresh_snr, thresh_stddev = thresh_coeffs
stddev = np.std(corr.mag) if thresh_stddev else 0
thresh = (thresh_const +
thresh_snr * noise_rms**2 +
thresh_stddev * stddev**2)
return np.sqrt(thresh)
def get_peak(corr, window):
"""Calculate peak index and estimate sqrt(power) of peak."""
corr_mag = corr.mag
start, stop = window
peak_idx = np.argmax(corr_mag[start:stop]) + start
peak_mag = corr_mag[peak_idx]
return peak_idx, peak_mag
def parabolic_interpolation(corr_mag, peak_idx):
"""Sub-sample SoA estimation using parabolic interpolation."""
# pylint: disable=invalid-name
if peak_idx == 0 or peak_idx == len(corr_mag) - 1:
logging.warn("Parabolic interpolation failed: peak_idx out of bounds."
" Please ensure history_len >= template_len + 1.")
return 0
a, b, c = corr_mag[peak_idx-1], corr_mag[peak_idx], corr_mag[peak_idx+1]
offset = 0.5 * (c - a) / (2 * b - a - c)
return offset
def gaussian_interpolation(corr_mag, peak_idx):
"""Sub-sample SoA estimation using Gaussian interpolation."""
# pylint: disable=invalid-name
if peak_idx == 0 or peak_idx == len(corr_mag) - 1:
logging.warn("Gaussian interpolation failed: peak_idx out of bounds."
" Please ensure history_len >= template_len + 1.")
return 0
a, b, c = corr_mag[peak_idx-1], corr_mag[peak_idx], corr_mag[peak_idx+1]
a, b, c = np.log(a), np.log(b), np.log(c)
offset = 0.5 * (c - a) / (2 * b - a - c)
return offset
| swkrueger/Thrifty | thrifty/soa_estimator.py | Python | gpl-3.0 | 6,511 | [
"Gaussian"
] | 231cef32a1afb910b7458698c906105ed6ea4d7297f756ea368bef6c9baddf13 |
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
import sys
class TestClip(Testing.vtkTest):
def testImage2DScalar(self):
planes = ['XY', 'XZ', 'YZ']
expectedNCells = [38, 46, 42]
for plane, nCells in zip(planes,expectedNCells):
r = vtk.vtkRTAnalyticSource()
r.SetXFreq(600);
r.SetYFreq(400);
r.SetZFreq(900);
if plane == 'XY':
r.SetWholeExtent(-5, 5, -5, 5, 0, 0)
elif plane == 'XZ':
r.SetWholeExtent(-5, 5, 0, 0, -5, 5)
else:
r.SetWholeExtent(0, 0, -5, 5, -5, 5)
r.Update()
c = vtk.vtkTableBasedClipDataSet()
c.SetInputConnection(r.GetOutputPort())
c.SetUseValueAsOffset(0)
c.SetValue(150)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), nCells)
def testImage(self):
r = vtk.vtkRTAnalyticSource()
r.SetWholeExtent(-5, 5, -5, 5, -5, 5)
r.Update()
s = vtk.vtkSphere()
s.SetRadius(2)
s.SetCenter(0,0,0)
c = vtk.vtkTableBasedClipDataSet()
c.SetInputConnection(r.GetOutputPort())
c.SetClipFunction(s)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), 64)
def testRectilinear(self):
rt = vtk.vtkRTAnalyticSource()
rt.SetWholeExtent(-5, 5, -5, 5, -5, 5)
rt.Update()
i = rt.GetOutput()
r = vtk.vtkRectilinearGrid()
dims = i.GetDimensions()
r.SetDimensions(dims)
exts = i.GetExtent()
orgs = i.GetOrigin()
xs = vtk.vtkFloatArray()
xs.SetNumberOfTuples(dims[0])
for d in range(dims[0]):
xs.SetTuple1(d, orgs[0] + exts[0] + d)
r.SetXCoordinates(xs)
ys = vtk.vtkFloatArray()
ys.SetNumberOfTuples(dims[1])
for d in range(dims[1]):
ys.SetTuple1(d, orgs[1] + exts[2] + d)
r.SetYCoordinates(ys)
zs = vtk.vtkFloatArray()
zs.SetNumberOfTuples(dims[2])
for d in range(dims[2]):
zs.SetTuple1(d, orgs[2] + exts[4] + d)
r.SetZCoordinates(zs)
s = vtk.vtkSphere()
s.SetRadius(2)
s.SetCenter(0,0,0)
c = vtk.vtkTableBasedClipDataSet()
c.SetInputData(r)
c.SetClipFunction(s)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), 64)
def testStructured2D(self):
planes = ['XY', 'XZ', 'YZ']
expectedNCells = [42, 34, 68]
for plane, nCells in zip(planes,expectedNCells):
rt = vtk.vtkRTAnalyticSource()
if plane == 'XY':
rt.SetWholeExtent(-5, 5, -5, 5, 0, 0)
elif plane == 'XZ':
rt.SetWholeExtent(-5, 5, 0, 0, -5, 5)
else:
rt.SetWholeExtent(0, 0, -5, 5, -5, 5)
rt.Update()
i = rt.GetOutput()
st = vtk.vtkStructuredGrid()
st.SetDimensions(i.GetDimensions())
nps = i.GetNumberOfPoints()
ps = vtk.vtkPoints()
ps.SetNumberOfPoints(nps)
for idx in range(nps):
ps.SetPoint(idx, i.GetPoint(idx))
st.SetPoints(ps)
cyl = vtk.vtkCylinder()
cyl.SetRadius(2)
cyl.SetCenter(0,0,0)
transform = vtk.vtkTransform()
transform.RotateWXYZ(45,20,1,10)
cyl.SetTransform(transform)
c = vtk.vtkTableBasedClipDataSet()
c.SetInputData(st)
c.SetClipFunction(cyl)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), nCells)
def testStructured(self):
rt = vtk.vtkRTAnalyticSource()
rt.SetWholeExtent(-5, 5, -5, 5, -5, 5)
rt.Update()
i = rt.GetOutput()
st = vtk.vtkStructuredGrid()
st.SetDimensions(i.GetDimensions())
nps = i.GetNumberOfPoints()
ps = vtk.vtkPoints()
ps.SetNumberOfPoints(nps)
for idx in range(nps):
ps.SetPoint(idx, i.GetPoint(idx))
st.SetPoints(ps)
s = vtk.vtkSphere()
s.SetRadius(2)
s.SetCenter(0,0,0)
c = vtk.vtkTableBasedClipDataSet()
c.SetInputData(st)
c.SetClipFunction(s)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), 64)
def testUnstructured(self):
rt = vtk.vtkRTAnalyticSource()
rt.SetWholeExtent(-5, 5, -5, 5, -5, 5)
t = vtk.vtkThreshold()
t.SetInputConnection(rt.GetOutputPort())
t.ThresholdByUpper(-10)
s = vtk.vtkSphere()
s.SetRadius(2)
s.SetCenter(0,0,0)
c = vtk.vtkTableBasedClipDataSet()
c.SetInputConnection(t.GetOutputPort())
c.SetClipFunction(s)
c.SetInsideOut(1)
c.Update()
self.assertEqual(c.GetOutput().GetNumberOfCells(), 64)
eg = vtk.vtkEnSightGoldReader()
eg.SetCaseFileName(VTK_DATA_ROOT + "/Data/EnSight/elements.case")
eg.Update()
pl = vtk.vtkPlane()
pl.SetOrigin(3.5, 3.5, 0.5)
pl.SetNormal(0, 0, 1)
c.SetInputConnection(eg.GetOutputPort())
c.SetClipFunction(pl)
c.SetInsideOut(1)
c.Update()
data = c.GetOutputDataObject(0).GetBlock(0)
self.assertEqual(data.GetNumberOfCells(), 75)
rw = vtk.vtkRenderWindow()
ren = vtk.vtkRenderer()
rw.AddRenderer(ren)
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(data)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
ren.AddActor(actor)
ac = ren.GetActiveCamera()
ac.SetPosition(-7.9, 9.7, 14.6)
ac.SetFocalPoint(3.5, 3.5, 0.5)
ac.SetViewUp(0.08, 0.93, -0.34)
rw.Render()
ren.ResetCameraClippingRange()
rtTester = vtk.vtkTesting()
for arg in sys.argv[1:]:
rtTester.AddArgument(arg)
rtTester.AddArgument("-V")
rtTester.AddArgument("tableBasedClip.png")
rtTester.SetRenderWindow(rw)
rw.Render()
rtResult = rtTester.RegressionTest(10)
if __name__ == "__main__":
Testing.main([(TestClip, 'test')])
| mspark93/VTK | Filters/General/Testing/Python/tableBasedClip.py | Python | bsd-3-clause | 6,508 | [
"VTK"
] | 9b23e7cae906379492a15c6ccd4b22671c57281428b568884ea772fde0742d6e |
#!/usr/local/bin/jython
# -*- coding: utf-8 -*-
# Written by Shunsuke Haga
# Date: 2017/04/12
#
# Written for Python 2.7
# This script download Chicago marathon race result(2016) and store it as a mdb format.
import urllib, urllib2
import os
try:
from bs4 import BeautifulSoup
except:
print "Need to install library, BeautifulSoup to scrape page!"
exit()
import time
import sys
import re
reload(sys)
sys.setdefaultencoding('utf8')
try:
from com.ziclix.python.sql import zxJDBC
except:
print "Run in jython!"
exit()
class Runner:
def __init__(self, listinfo = [], *args):
self.gender = ""
self.plc_all = listinfo[0] if listinfo[0] is not None else ""
self.plc_gen = listinfo[1] if listinfo[1] is not None else ""
self.plc_div = listinfo[2] if listinfo[2] is not None else ""
namecountry = re.findall("(.+?)(?:\([A-Z]+\))$", listinfo[3])
self.name = namecountry[0] if len(namecountry) > 0 else listinfo[3]
self.first = self.name.split(",")[0]
self.last = self.name.split(",")[1].replace(" ", "")
country = re.findall("\(([A-Z]+)\)$", listinfo[3])
self.country = country[0] if len(country) > 0 else ""
self.loc = "\"" + listinfo[4] + "\"" if listinfo[4] is not None else ""
self.bib = listinfo[5] if listinfo[5] is not None else ""
self.div = listinfo[6] if listinfo[6] is not None else ""
self.age = listinfo[7] if listinfo[7] is not None else ""
self.half = listinfo[8] if listinfo[8] is not None else ""
self.finish = listinfo[9] if listinfo[9] is not None else ""
def __str__(self):
return "[ Gender: " + self.gender \
+ ", Place Overall: " + self.plc_all \
+ ", Place Gender: " + self.plc_gen \
+ ", Place Division: " + self.plc_div \
+ ", Name: " + self.name \
+ ", Country: " + self.country \
+ ", Location: " + self.loc \
+ ", Bib: " + self.bib \
+ ", Division: " + self.div \
+ ", Age: " + self.age \
+ ", Half: " + self.half \
+ ", Finish: " + self.finish + "]"
def set_gender(self, gender):
self.gender = gender
def result (self):
return [self.gender, self.plc_all, self.plc_gen , self.plc_div , self.first, self.last, self.country, self.loc , self.bib , self.div , self.age , self.half , self.finish ]
def main():
with open("test.html") as html, open("output.csv", 'w') as output:
output.write("Gender, Place Overall, Place Gender, Place Division, Last Name, First Name, Country, Location, Bib, Division, Age, Half, Finish\n")
for gender in ["M", "W"]:
for pagenum in range(1,5): # Setting it to 5 pages
print "Operating page:" + str(pagenum) + " for gender:" + gender
url = "http://results.chicagomarathon.com/2016/?page=" + str(pagenum) + "&event=MAR&lang=EN_CAP&num_results=1000&pid=list&search%5Bsex%5D=" + gender
html = urllib2.urlopen(url)
soup = BeautifulSoup(html.read())
for index, tr in enumerate(soup.find_all("tr")[1:]):
runner = Runner([ td.string if index is not 3 else td.find("a").string \
for index, td in enumerate(tr.find_all("td")) ])
runner.set_gender(gender)
#print runner.result()
output.write(','.join(runner.result()) + "\n")
time.sleep(3)
# set up constants
currentdir = os.path.dirname(os.path.abspath(__file__))
#DRV = 'net.ucanaccess.jdbc.UcanloadDriver'; MDB = "//" + currentdir + "/output.mdb"
MDB = "jdbc:ucanaccess://" + currentdir + "/output.mdb"; DRV = "net.ucanaccess.jdbc.UcanloadDriver"
#jdbc_url, username, password, driver_class = "jdbc:ucanaccess:" + MDB, "", "", "net.ucanaccess.jdbc.UcanloadDriver"
#cnxn = zxJDBC.connect(MDB, None, None, DRV)
#cnxn = zxJDBC.connect(jdbc_url, username, password, driver_class)
cnxn = zxJDBC.connect(MDB, "", "", driver_class)
crsr = cnxn.cursor()
print crsr.tables(None, None, '%', ('TABLE',))
print crsr.fetchall()
#crsr.execute("SELECT * FROM Amber ORDER BY Country")
#print crsr.schema("Amber")
crsr.execute("SELECT * FROM Amber ORDER BY G")
print crsr.description
for row in crsr.fetchall():
print row[0], row[1], row[2], row[3], row[4], row[5], row[6]
crsr.close()
cnxn.close()
if __name__ == '__main__':
main()
| shunsuke218/CIT285-2017-final | download_data.py | Python | mit | 4,586 | [
"Amber"
] | e37e644aa51a3b465f93f1c0df34d5c3ce74af223aa713a939debc3cf267648c |
"""Python toolkit for Tinker Pop 3 Gremlin Server."""
from goblin.app import Goblin
from goblin.driver import AsyncGraph, Cluster, DriverRemoteConnection, Graph
from goblin.element import Edge, Vertex, VertexProperty
from goblin.properties import Boolean, Float, Integer, Property, String
| ZEROFAIL/goblin | goblin/__init__.py | Python | agpl-3.0 | 290 | [
"TINKER"
] | 54b8814370a3d59f5d85114733365483697f3861340bcda149a09bdcfc81b724 |
# GridCal
# Copyright (C) 2022 Santiago Peñate Vera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import numpy as np
import numba as nb
from numba.pycc import CC
from numba.typed import List
import math
# @nb.njit("i4[:](i8)")
@nb.njit()
def ialloc(n):
return np.zeros(n, dtype=nb.int32)
# @nb.njit("f8[:](i8)")
@nb.njit()
def xalloc(n):
return np.zeros(n, dtype=nb.float64)
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:], i8))(i8, i8, i8)")
@nb.njit()
def csc_spalloc_f(m, n, nzmax):
"""
Allocate a sparse matrix (triplet form or compressed-column form).
@param m: number of rows
@param n: number of columns
@param nzmax: maximum number of entries
@return: m, n, Aindptr, Aindices, Adata, Anzmax
"""
Anzmax = max(nzmax, 1)
Aindptr = ialloc(n + 1)
Aindices = ialloc(Anzmax)
Adata = xalloc(Anzmax)
return m, n, Aindptr, Aindices, Adata, Anzmax
# @nb.njit("(f8[:], f8[:], i8)")
@nb.njit()
def _copy_f(src, dest, length):
for i in range(length):
dest[i] = src[i]
# @nb.njit("(i4[:], i4[:], i8)")
@nb.njit()
def _copy_i(src, dest, length):
for i in range(length):
dest[i] = src[i]
# @nb.njit("i8(i4[:], i4[:], i8)")
@nb.njit()
def csc_cumsum_i(p, c, n):
"""
p [0..n] = cumulative sum of c [0..n-1], and then copy p [0..n-1] into c
@param p: size n+1, cumulative sum of c
@param c: size n, overwritten with p [0..n-1] on output
@param n: length of c
@return: sum (c), null on error
"""
nz = 0
nz2 = 0.0
for i in range(n):
p[i] = nz
nz += c[i]
nz2 += c[i] # also in double to avoid CS_INT overflow
c[i] = p[i] # also copy p[0..n-1] back into c[0..n-1]
p[n] = nz
return int(nz2) # return sum (c [0..n-1])
# @nb.njit("Tuple((i4[:], f8[:], i8))(i8, i4[:], i4[:], f8[:], i8)")
@nb.njit()
def csc_sprealloc_f(An, Aindptr, Aindices, Adata, nzmax):
"""
Change the max # of entries a sparse matrix can hold.
:param An: number of columns
:param Aindptr: csc column pointers
:param Aindices: csc row indices
:param Adata: csc data
:param nzmax:new maximum number of entries
:return: indices, data, nzmax
"""
if nzmax <= 0:
nzmax = Aindptr[An]
length = min(nzmax, len(Aindices))
Ainew = np.empty(nzmax, dtype=nb.int32)
for i in range(length):
Ainew[i] = Aindices[i]
length = min(nzmax, len(Adata))
Axnew = np.empty(nzmax, dtype=nb.float64)
for i in range(length):
Axnew[i] = Adata[i]
return Ainew, Axnew, nzmax
# @nb.njit("i8(i4[:], i4[:], f8[:], i8, f8, i4[:], f8[:], i8, i4[:], i8)")
@nb.njit()
def csc_scatter_f(Ap, Ai, Ax, j, beta, w, x, mark, Ci, nz):
"""
Scatters and sums a sparse vector A(:,j) into a dense vector, x = x + beta * A(:,j)
:param Ap:
:param Ai:
:param Ax:
:param j: the column of A to use
:param beta: scalar multiplied by A(:,j)
:param w: size m, node i is marked if w[i] = mark
:param x: size m, ignored if null
:param mark: mark value of w
:param Ci: pattern of x accumulated in C.i
:param nz: pattern of x placed in C starting at C.i[nz]
:return: new value of nz, -1 on error, x and w are modified
"""
for p in range(Ap[j], Ap[j + 1]):
i = Ai[p] # A(i,j) is nonzero
if w[i] < mark:
w[i] = mark # i is new entry in column j
Ci[nz] = i # add i to pattern of C(:,j)
nz += 1
x[i] = beta * Ax[p] # x(i) = beta*A(i,j)
else:
x[i] += beta * Ax[p] # i exists in C(:,j) already
return nz
# @nb.njit("i8(i4[:], i4[:], f8[:], i8, f8, i4[:], f8[:], i8, i4[:], i8)")
@nb.njit()
def csc_scatter_ff(Aindptr, Aindices, Adata, j, beta, w, x, mark, Ci, nz):
"""
Scatters and sums a sparse vector A(:,j) into a dense vector, x = x + beta * A(:,j)
:param Aindptr:
:param Aindices:
:param Adata:
:param j: the column of A to use
:param beta: scalar multiplied by A(:,j)
:param w: size m, node i is marked if w[i] = mark
:param x: size m, ignored if null
:param mark: mark value of w
:param Ci: pattern of x accumulated in C.i
:param nz: pattern of x placed in C starting at C.i[nz]
:return: new value of nz, -1 on error, x and w are modified
"""
for p in range(Aindptr[j], Aindptr[j + 1]):
i = Aindices[p] # A(i,j) is nonzero
if w[i] < mark:
w[i] = mark # i is new entry in column j
Ci[nz] = i # add i to pattern of C(:,j)
nz += 1
x[i] = beta * Adata[p] # x(i) = beta*A(i,j)
else:
x[i] += beta * Adata[p] # i exists in C(:,j) already
return nz
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:]))(i8, i8, i4[:], i4[:], f8[:], i8, i8, i4[:], i4[:], f8[:], f8, f8)")
@nb.njit()
def csc_add_ff(Am, An, Aindptr, Aindices, Adata,
Bm, Bn, Bindptr, Bindices, Bdata, alpha, beta):
"""
C = alpha*A + beta*B
@param A: column-compressed matrix
@param B: column-compressed matrix
@param alpha: scalar alpha
@param beta: scalar beta
@return: C=alpha*A + beta*B, null on error (Cm, Cn, Cp, Ci, Cx)
"""
nz = 0
m, anz, n, Bp, Bx = Am, Aindptr[An], Bn, Bindptr, Bdata
bnz = Bp[n]
w = np.zeros(m, dtype=nb.int32)
x = xalloc(m) # get workspace
Cm, Cn, Cp, Ci, Cx, Cnzmax = csc_spalloc_f(m, n, anz + bnz) # allocate result
for j in range(n):
Cp[j] = nz # column j of C starts here
nz = csc_scatter_f(Aindptr, Aindices, Adata, j, alpha, w, x, j + 1, Ci, nz) # alpha*A(:,j)
nz = csc_scatter_f(Bindptr, Bindices, Bdata, j, beta, w, x, j + 1, Ci, nz) # beta*B(:,j)
for p in range(Cp[j], nz):
Cx[p] = x[Ci[p]]
Cp[n] = nz # finalize the last column of C
return Cm, Cn, Cp, Ci, Cx # success; free workspace, return C
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:], i8))(i8, i8, i4[:], i4[:], f8[:], i8, i8, i4[:], i4[:], f8[:])",
# parallel=False, nogil=True, fastmath=False, cache=True) # fastmath=True breaks the code
@nb.njit()
def csc_multiply_ff(Am, An, Ap, Ai, Ax,
Bm, Bn, Bp, Bi, Bx):
"""
Sparse matrix multiplication, C = A*B where A and B are CSC sparse matrices
:param Am: number of rows in A
:param An: number of columns in A
:param Ap: column pointers of A
:param Ai: indices of A
:param Ax: data of A
:param Bm: number of rows in B
:param Bn: number of columns in B
:param Bp: column pointers of B
:param Bi: indices of B
:param Bx: data of B
:return: Cm, Cn, Cp, Ci, Cx, Cnzmax
"""
assert An == Bm
nz = 0
anz = Ap[An]
bnz = Bp[Bn]
Cm = Am
Cn = Bn
t = nb
w = np.zeros(Cn, dtype=t.int32) # ialloc(m) # get workspace
x = np.empty(Cn, dtype=t.float64) # xalloc(m) # get workspace
# allocate result
Cnzmax = int(math.sqrt(Cm)) * anz + bnz # the trick here is to allocate just enough memory to avoid reallocating
Cp = np.empty(Cn + 1, dtype=t.int32)
Ci = np.empty(Cnzmax, dtype=t.int32)
Cx = np.empty(Cnzmax, dtype=t.float64)
for j in range(Cn):
# claim more space
if nz + Cm > Cnzmax:
# Ci, Cx, Cnzmax = csc_sprealloc_f(Cn, Cp, Ci, Cx, 2 * Cnzmax + m)
print('Re-Allocating')
Cnzmax = 2 * Cnzmax + Cm
if Cnzmax <= 0:
Cnzmax = Cp[An]
length = min(Cnzmax, len(Ci))
Cinew = np.empty(Cnzmax, dtype=nb.int32)
for i in range(length):
Cinew[i] = Ci[i]
Ci = Cinew
length = min(Cnzmax, len(Cx))
Cxnew = np.empty(Cnzmax, dtype=nb.float64)
for i in range(length):
Cxnew[i] = Cx[i]
Cx = Cxnew
# column j of C starts here
Cp[j] = nz
# perform the multiplication
for pb in range(Bp[j], Bp[j + 1]):
for pa in range(Ap[Bi[pb]], Ap[Bi[pb] + 1]):
ia = Ai[pa]
if w[ia] < j + 1:
w[ia] = j + 1
Ci[nz] = ia
nz += 1
x[ia] = Bx[pb] * Ax[pa]
else:
x[ia] += Bx[pb] * Ax[pa]
for pc in range(Cp[j], nz):
Cx[pc] = x[Ci[pc]]
Cp[Cn] = nz # finalize the last column of C
# cut the arrays to their nominal size nnz
# Ci, Cx, Cnzmax = csc_sprealloc_f(Cn, Cp, Ci, Cx, 0)
Cnzmax = Cp[Cn]
Cinew = Ci[:Cnzmax]
Cxnew = Cx[:Cnzmax]
return Cm, Cn, Cp, Cinew, Cxnew, Cnzmax
# @nb.njit("f8[:](i8, i8, i4[:], i4[:], f8[:], f8[:])", parallel=False)
@nb.njit()
def csc_mat_vec_ff(m, n, Ap, Ai, Ax, x):
"""
Sparse matrix times dense column vector, y = A * x.
:param m: number of rows
:param n: number of columns
:param Ap: pointers
:param Ai: indices
:param Ax: data
:param x: vector x (n)
:return: vector y (m)
"""
assert n == x.shape[0]
y = np.zeros(m, dtype=nb.float64)
for j in range(n):
for p in range(Ap[j], Ap[j + 1]):
y[Ai[p]] += Ax[p] * x[j]
return y
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:]))(i8, i8, i4[:], i4[:], f8[:], i8)")
@nb.njit()
def coo_to_csc(m, n, Ti, Tj, Tx, nz):
"""
C = compressed-column form of a triplet matrix T. The columns of C are
not sorted, and duplicate entries may be present in C.
@param T: triplet matrix
@return: Cm, Cn, Cp, Ci, Cx
"""
Cm, Cn, Cp, Ci, Cx, nz = csc_spalloc_f(m, n, nz) # allocate result
w = w = np.zeros(n, dtype=nb.int32) # get workspace
for k in range(nz):
w[Tj[k]] += 1 # column counts
csc_cumsum_i(Cp, w, n) # column pointers
for k in range(nz):
p = w[Tj[k]]
w[Tj[k]] += 1
Ci[p] = Ti[k] # A(i,j) is the pth entry in C
# if Cx is not None:
Cx[p] = Tx[k]
return Cm, Cn, Cp, Ci, Cx
# @nb.njit("void(i8, i8, i4[:], i4[:], f8[:], i4[:], i4[:], f8[:])")
@nb.njit()
def csc_to_csr(m, n, Ap, Ai, Ax, Bp, Bi, Bx):
"""
Convert a CSC Matrix into a CSR Matrix
:param m: number of rows
:param n: number of columns
:param Ap: indptr of the CSC matrix
:param Ai: indices of the CSC matrix
:param Ax: data of the CSC matrix
:param Bp: indptr of the CSR matrix (to compute, size 'm+1', has to be initialized to zeros)
:param Bi: indices of the CSR matrix (to compute, size nnz)
:param Bx: data of the CSR matrix (to compute, size nnz)
"""
nnz = Ap[n]
for k in range(nnz):
Bp[Ai[k]] += 1
cum_sum = 0
for col in range(m):
temp = Bp[col]
Bp[col] = cum_sum
cum_sum += temp
Bp[m] = nnz
for row in range(n):
for jj in range(Ap[row], Ap[row+1]):
col = Ai[jj]
dest = Bp[col]
Bi[dest] = row
Bx[dest] = Ax[jj]
Bp[col] += 1
last = 0
for col in range(m):
temp = Bp[col]
Bp[col] = last
last = temp
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:]))(i8, i8, i4[:], i4[:], f8[:])")
@nb.njit()
def csc_transpose(m, n, Ap, Ai, Ax):
"""
Transpose matrix
:param m: A.m
:param n: A.n
:param Ap: A.indptr
:param Ai: A.indices
:param Ax: A.data
:return: Cm, Cn, Cp, Ci, Cx
"""
"""
Computes the transpose of a sparse matrix, C =A';
@param A: column-compressed matrix
@param allocate_values: pattern only if false, both pattern and values otherwise
@return: C=A', null on error
"""
Cm, Cn, Cp, Ci, Cx, Cnzmax = csc_spalloc_f(m=n, n=m, nzmax=Ap[n]) # allocate result
w = ialloc(m) # get workspace
for p in range(Ap[n]):
w[Ai[p]] += 1 # row counts
csc_cumsum_i(Cp, w, m) # row pointers
for j in range(n):
for p in range(Ap[j], Ap[j + 1]):
q = w[Ai[p]]
w[Ai[p]] += 1
Ci[q] = j # place A(i,j) as entry C(j,i)
Cx[q] = Ax[p]
return Cm, Cn, Cp, Ci, Cx
# @nb.njit("i4(i4, i4, i4[:])")
@nb.njit()
def binary_find(N, x, array):
"""
Binary search
:param N: size of the array
:param x: value
:param array: array
:return: position where it is found. -1 if it is not found
"""
lower = 0
upper = N
while (lower + 1) < upper:
mid = int((lower + upper) / 2)
if x < array[mid]:
upper = mid
else:
lower = mid
if array[lower] <= x:
return lower
return -1
# @nb.njit("Tuple((i8, i4[:], i4[:], f8[:]))(i8, i8, i4[:], i4[:], f8[:], i4[:], i4[:])")
def csc_sub_matrix_old(Am, Anz, Ap, Ai, Ax, rows, cols):
"""
Get SCS arbitrary sub-matrix
:param Am: number of rows
:param Anz: number of non-zero entries
:param Ap: Column pointers
:param Ai: Row indices
:param Ax: Data
:param rows: row indices to keep
:param cols: column indices to keep
:return: CSC sub-matrix (n, new_col_ptr, new_row_ind, new_val)
"""
n_cols = len(cols)
Bx = np.zeros(Anz, dtype=np.float64)
Bi = np.empty(Anz, dtype=np.int32)
Bp = np.empty(n_cols + 1, dtype=np.int32)
n = 0
p = 0
Bp[p] = 0
for j in cols: # for each column selected ...
i = 0
for r in rows:
for k in range(Ap[j], Ap[j + 1]): # for each row of the column j of A...
if Ai[k] == r:
Bx[n] = Ax[k] # store the value
Bi[n] = i # row index in the new matrix
i += 1
n += 1
if i == 0:
i += 1
p += 1
Bp[p] = n
Bp[p] = n
return n, Bp, Bi[:n], Bx[:n]
@nb.njit()
def csc_sub_matrix(Am, Annz, Ap, Ai, Ax, rows, cols):
"""
CSC matrix sub-matrix view
:param Am: number of rows
:param Annz: number of non-zero entries
:param Ap: Column pointers
:param Ai: Row indices
:param Ax: Data
:param rows: array of selected rows: must be sorted! to use the binary search
:param cols: array of columns: should be sorted
:return:
"""
n_rows = len(rows)
n_cols = len(cols)
nnz = 0
p = 0
Bx = np.empty(Annz, dtype=nb.float64) # data
Bi = np.empty(Annz, dtype=nb.int32) # indices
Bp = np.empty(n_cols + 1, dtype=nb.int32) # pointers
Bp[p] = 0
# generate lookup for the non immediate axis (for CSC it is the rows) -> index lookup
lookup = np.zeros(Am, dtype=nb.int32)
lookup[rows] = np.arange(len(rows), dtype=nb.int32)
for j in cols: # sliced columns
for k in range(Ap[j], Ap[j + 1]): # rows of A[:, j]
# row index translation to the "rows" space
i = Ai[k]
ii = lookup[i]
if rows[ii] == i:
# entry found
Bx[nnz] = Ax[k]
Bi[nnz] = ii
nnz += 1
p += 1
Bp[p] = nnz
Bp[p] = nnz
# numba does not support resize...
# new_val = np.resize(new_val, nnz)
# new_row_ind = np.resize(new_row_ind, nnz)
return Bx, Bi, Bp, n_rows, n_cols, nnz
@nb.njit()
def csc_sub_matrix_cols(Am, Anz, Ap, Ai, Ax, cols):
"""
Get SCS arbitrary sub-matrix with all the rows
:param Am: number of rows
:param Anz: number of non-zero entries
:param Ap: Column pointers
:param Ai: Row indices
:param Ax: Data
:param cols: column indices to keep
:return: CSC sub-matrix (n, new_col_ptr, new_row_ind, new_val)
"""
n_cols = len(cols)
n = 0
p = 0
Bx = np.empty(Anz, dtype=nb.float64)
Bi = np.empty(Anz, dtype=nb.int32)
Bp = np.empty(n_cols + 1, dtype=nb.int32)
Bp[p] = 0
for j in cols: # for each column selected ...
for k in range(Ap[j], Ap[j + 1]): # for each row of the column j of A...
# store the values if the row was found in rows
Bx[n] = Ax[k] # store the value
Bi[n] = Ai[k] # store the row index
n += 1
p += 1
Bp[p] = n
Bp[p] = n
return n, Bp, Bi[:n], Bx[:n]
def csc_sub_matrix_rows(An, Anz, Ap, Ai, Ax, rows):
"""
Get SCS arbitrary sub-matrix
:param An: number of rows
:param Anz: number of non-zero entries
:param Ap: Column pointers
:param Ai: Row indices
:param Ax: Data
:param rows: row indices to keep
:return: CSC sub-matrix (n, new_col_ptr, new_row_ind, new_val)
"""
n_rows = len(rows)
n = 0
p = 0
Bx = np.zeros(Anz, dtype=np.float64)
Bi = np.empty(Anz, dtype=np.int32)
Bp = np.empty(An + 1, dtype=np.int32)
Bp[p] = 0
for j in range(An): # for each column selected ...
i = 0
for r in rows:
for k in range(Ap[j], Ap[j + 1]): # for each row of the column j of A...
if Ai[k] == r:
Bx[n] = Ax[k] # store the value
Bi[n] = i # row index in the new matrix
n += 1
i += 1
if i == 0:
i += 1
p += 1
Bp[p] = n
Bp[p] = n
return n, Bp, Bi[:n], Bx[:n]
# @nb.njit("f8[:, :](i8, i8, i4[:], i4[:], f8[:])")
def csc_to_dense(m, n, indptr, indices, data):
"""
Convert csc matrix to dense
:param m:
:param n:
:param indptr:
:param indices:
:param data:
:return: 2d numpy array
"""
val = np.zeros((m, n), dtype=np.float64)
for j in range(n):
for p in range(indptr[j], indptr[j + 1]):
val[indices[p], j] = data[p]
return val
# @nb.njit("Tuple((i4[:], i4[:], f8[:]))(i8, f8)")
@nb.njit()
def csc_diagonal(m, value=1.0):
"""
Build CSC diagonal matrix of the given value
:param m: size
:param value: value
:return: CSC matrix
"""
indptr = np.empty(m + 1, dtype=np.int32)
indices = np.empty(m, dtype=np.int32)
data = np.empty(m, dtype=np.float64)
for i in range(m):
indptr[i] = i
indices[i] = i
data[i] = value
indptr[m] = m
return indices, indptr, data
# @nb.njit("Tuple((i4[:], i4[:], f8[:]))(i8, f8[:])")
@nb.njit()
def csc_diagonal_from_array(m, array):
"""
:param m:
:param array:
:return:
"""
indptr = np.empty(m + 1, dtype=np.int32)
indices = np.empty(m, dtype=np.int32)
data = np.empty(m, dtype=np.float64)
for i in range(m):
indptr[i] = i
indices[i] = i
data[i] = array[i]
indptr[m] = m
return indices, indptr, data
# @nb.njit("Tuple((i8, i8, i4[:], i4[:], f8[:]))"
# "(i8, i8, i4[:], i4[:], f8[:], "
# "i8, i8, i4[:], i4[:], f8[:], "
# "i8, i8, i4[:], i4[:], f8[:], "
# "i8, i8, i4[:], i4[:], f8[:])",
# parallel=False, nogil=True, fastmath=True, cache=True)
@nb.njit()
def csc_stack_4_by_4_ff(am, an, Ai, Ap, Ax,
bm, bn, Bi, Bp, Bx,
cm, cn, Ci, Cp, Cx,
dm, dn, Di, Dp, Dx):
"""
stack csc sparse float matrices like this:
| A | B |
| C | D |
:param am:
:param an:
:param Ai:
:param Ap:
:param Ax:
:param bm:
:param bn:
:param Bi:
:param Bp:
:param Bx:
:param cm:
:param cn:
:param Ci:
:param Cp:
:param Cx:
:param dm:
:param dn:
:param Di:
:param Dp:
:param Dx:
:return:
"""
# check dimensional compatibility
assert am == bm
assert cm == dm
assert an == cn
assert bn == dn
nnz = Ap[an] + Bp[bn] + Cp[cn] + Dp[dn]
m = am + cm
n = an + bn
indptr = np.zeros(n + 1, dtype=nb.int32)
indices = np.zeros(nnz, dtype=nb.int32)
data = np.zeros(nnz, dtype=nb.float64)
cnt = 0
indptr[0] = 0
for j in range(an): # for every column, same as range(cols + 1) For A and C
for k in range(Ap[j], Ap[j + 1]): # for every entry in the column from A
indices[cnt] = Ai[k] # row index
data[cnt] = Ax[k]
cnt += 1
for k in range(Cp[j], Cp[j + 1]): # for every entry in the column from C
indices[cnt] = Ci[k] + am # row index
data[cnt] = Cx[k]
cnt += 1
indptr[j + 1] = cnt
for j in range(bn): # for every column, same as range(cols + 1) For B and D
for k in range(Bp[j], Bp[j + 1]): # for every entry in the column from B
indices[cnt] = Bi[k] # row index
data[cnt] = Bx[k]
cnt += 1
for k in range(Dp[j], Dp[j + 1]): # for every entry in the column from D
indices[cnt] = Di[k] + bm # row index
data[cnt] = Dx[k]
cnt += 1
indptr[an + j + 1] = cnt
return m, n, indices, indptr, data
# @nb.njit("f8(i8, i4[:], f8[:])")
@nb.njit()
def csc_norm(n, Ap, Ax):
"""
Computes the 1-norm of a sparse matrix = max (sum (abs (A))), largest
column sum.
@param A: column-compressed matrix
@return: the 1-norm if successful, -1 on error
"""
norm = 0
for j in range(n):
s = 0
for p in range(Ap[j], Ap[j + 1]):
s += abs(Ax[p])
norm = max(norm, s)
return norm
@nb.njit()
def find_islands(node_number, indptr, indices):
"""
Method to get the islands of a graph
This is the non-recursive version
:return: islands list where each element is a list of the node indices of the island
"""
# Mark all the vertices as not visited
visited = np.zeros(node_number, dtype=nb.boolean)
# storage structure for the islands (list of lists)
islands = List.empty_list(List.empty_list(nb.int64))
# set the island index
island_idx = 0
# go though all the vertices...
for node in range(node_number):
# if the node has not been visited...
if not visited[node]:
# add new island, because the recursive process has already visited all the island connected to v
# if island_idx >= len(islands):
islands.append(List.empty_list(nb.int64))
# ------------------------------------------------------------------------------------------------------
# DFS: store in the island all the reachable vertices from current vertex "node"
#
# declare a stack with the initial node to visit (node)
stack = List.empty_list(nb.int64)
stack.append(node)
while len(stack) > 0:
# pick the first element of the stack
v = stack.pop(0)
# if v has not been visited...
if not visited[v]:
# mark as visited
visited[v] = True
# add element to the island
islands[island_idx].append(v)
# Add the neighbours of v to the stack
start = indptr[v]
end = indptr[v + 1]
for i in range(start, end):
k = indices[i] # get the column index in the CSC scheme
if not visited[k]:
stack.append(k)
# ------------------------------------------------------------------------------------------------------
# increase the islands index, because all the other connected vertices have been visited
island_idx += 1
# sort the islands to maintain raccord
# for island in islands:
# island.sort()
return islands
# @nb.njit("Tuple((i4[:], i4[:], f8[:], i8, i8))(i8, i4[:], i4[:], f8[:], i8[:])")
@nb.njit()
def sp_submat_c_numba(nrows, ptrs, indices, values, cols):
"""
slice CSC columns
:param nrows: number of rows of the matrix
:param ptrs: row pointers
:param indices: column indices
:param values: data
:param cols: vector of columns to slice
:return: new_indices, new_col_ptr, new_val, nrows, ncols
"""
# pass1: determine the number of non-zeros
nnz = 0
for j in cols:
for k in range(ptrs[j], ptrs[j+1]):
nnz += 1
# pass2: size the vector and perform the slicing
ncols = len(cols)
n = 0
p = 0
new_val = np.empty(nnz, dtype=nb.float64)
new_indices = np.empty(nnz, dtype=nb.int32)
new_col_ptr = np.empty(ncols + 1, dtype=nb.int32)
new_col_ptr[p] = 0
for j in cols:
for k in range(ptrs[j], ptrs[j + 1]):
new_val[n] = values[k]
new_indices[n] = indices[k]
n += 1
p += 1
new_col_ptr[p] = n
return new_indices, new_col_ptr, new_val, nrows, ncols
@nb.njit(nogil=True, fastmath=True, cache=True)
def csc_stack_2d_ff_row_major(mats_data, mats_indptr, mats_indices, mats_cols, mats_rows, m_rows=1, m_cols=1):
"""
Assemble matrix from a list of matrices representing a "super matrix"
|mat11 | mat12 | mat13 |
|mat21 | mat22 | mat23 |
turns into:
mats = [mat11, mat12, mat13, mat21, mat22, mat23]
m_rows = 2
m_cols = 3
:param mats_data: array of numpy arrays with the data of each CSC matrix
:param mats_indptr: array of numpy arrays with the indptr of each CSC matrix
:param mats_indices: array of numpy arrays with the indices of each CSC matrix
:param mats_cols: array with the number of columns of each CSC matrix
:param mats_rows: array with the number of rows of each CSC matrix
:param m_rows: number of rows of the mats structure
:param m_cols: number of cols of the mats structure
:return: Final assembled matrix
"""
'''
Row major: A(r,c) element is at A[c + r * n_columns];
Col major: A(r,c) element is at A[r + c * n_rows];
'''
# pass 1: compute the number of non zero
nnz = 0
nrows = 0
ncols = 0
for r in range(m_rows):
nrows += mats_rows[r * m_cols] # equivalent to mats[r, 0]
for c in range(m_cols):
col = mats_cols[c + r * m_cols] # equivalent to mats[r, c]
nnz += mats_indptr[r * m_cols + c][col]
if r == 0:
ncols += col
# pass 2: fill in the data
indptr = np.empty(ncols + 1, dtype=np.int32)
indices = np.empty(nnz, dtype=np.int32)
data = np.empty(nnz, dtype=np.float64)
cnt = 0
indptr[0] = 0
offset_col = 0
for c in range(m_cols): # for each column of the array of matrices
# number of columns
n = mats_cols[c] # equivalent to mats[0, c]
if n > 0:
for j in range(n): # for every column of the column of matrices
offset_row = 0
for r in range(m_rows): # for each row of the array of rows
# number of rows
m = mats_rows[r * m_cols + c] # equivalent to mats[r, c].shape[0]
if m > 0:
Ap = mats_indptr[r * m_cols + c]
Ai = mats_indices[r * m_cols + c]
Ax = mats_data[r * m_cols + c]
for k in range(Ap[j], Ap[j + 1]): # for every entry in the column from A
indices[cnt] = Ai[k] + offset_row # row index
data[cnt] = Ax[k]
cnt += 1
offset_row += m
indptr[offset_col + j + 1] = cnt
offset_col += n
return data, indices, indptr, nrows, ncols
@nb.njit(nogil=True, fastmath=True, cache=True)
def csc_stack_2d_ff_col_major(mats_data, mats_indptr, mats_indices, mats_cols, mats_rows, m_rows=1, m_cols=1):
"""
Assemble matrix from a list of matrices representing a "super matrix"
|mat11 | mat12 | mat13 |
|mat21 | mat22 | mat23 |
turns into:
mats = [mat11, mat12, mat13, mat21, mat22, mat23]
m_rows = 2
m_cols = 3
:param mats_data: array of numpy arrays with the data of each CSC matrix
:param mats_indptr: array of numpy arrays with the indptr of each CSC matrix
:param mats_indices: array of numpy arrays with the indices of each CSC matrix
:param mats_cols: array with the number of columns of each CSC matrix
:param mats_rows: array with the number of rows of each CSC matrix
:param m_rows: number of rows of the mats structure
:param m_cols: number of cols of the mats structure
:return: Final assembled matrix
"""
'''
Col major: A(r,c) element is at A[r + c * n_rows];
'''
# pass 1: compute the number of non zero
nnz = 0
nrows = 0
ncols = 0
for r in range(m_rows):
nrows += mats_rows[r] # equivalent to mats[r, 0]
for c in range(m_cols):
col = mats_cols[r + c * m_rows] # equivalent to mats[r, c]
nnz += mats_indptr[r + c * m_rows][col]
if r == 0:
ncols += col
# pass 2: fill in the data
indptr = np.empty(ncols + 1, dtype=np.int32)
indices = np.empty(nnz, dtype=np.int32)
data = np.empty(nnz, dtype=np.float64)
cnt = 0
indptr[0] = 0
offset_col = 0
for c in range(m_cols): # for each column of the array of matrices
# number of columns
n = mats_cols[c * m_rows] # equivalent to mats[0, c]
if n > 0:
for j in range(n): # for every column of the column of matrices
offset_row = 0
for r in range(m_rows): # for each row of the array of rows
# number of rows
m = mats_rows[r + c * m_rows] # equivalent to mats[r, c].shape[0]
if m > 0:
Ap = mats_indptr[r + c * m_rows]
Ai = mats_indices[r + c * m_rows]
Ax = mats_data[r + c * m_rows]
for k in range(Ap[j], Ap[j + 1]): # for every entry in the column from A
indices[cnt] = Ai[k] + offset_row # row index
data[cnt] = Ax[k]
cnt += 1
offset_row += m
indptr[offset_col + j + 1] = cnt
offset_col += n
return data, indices, indptr, nrows, ncols | SanPen/GridCal | src/GridCal/Engine/Sparse/csc_numba.py | Python | lgpl-3.0 | 30,959 | [
"VisIt"
] | ca563ec2fe37f9637d8e71b981407494cec3f9546d07ff864d05df18e89cbedb |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Views tests for the OSF."""
from __future__ import absolute_import
import datetime as dt
import httplib as http
import json
import math
import time
import unittest
import urllib
import datetime
import mock
from nose.tools import * # noqa PEP8 asserts
from modularodm import Q
from modularodm.exceptions import ValidationError
from framework import auth
from framework.auth import User, Auth
from framework.auth.exceptions import InvalidTokenError
from framework.auth.utils import impute_names_model
from framework.celery_tasks import handlers
from framework.exceptions import HTTPError
from tests.base import (
assert_is_redirect,
capture_signals,
fake,
get_default_metaschema,
OsfTestCase,
)
from tests.factories import (
ApiOAuth2ApplicationFactory, ApiOAuth2PersonalTokenFactory, AuthUserFactory,
BookmarkCollectionFactory, CollectionFactory, MockAddonNodeSettings, NodeFactory,
NodeLogFactory, PrivateLinkFactory, ProjectWithAddonFactory, ProjectFactory,
RegistrationFactory, UnconfirmedUserFactory, UnregUserFactory, UserFactory, WatchConfigFactory,
InstitutionFactory,
)
from tests.test_features import requires_search
from website import mailchimp_utils
from website import mails, settings
from website.addons.github.tests.factories import GitHubAccountFactory
from website.models import Node, NodeLog, Pointer
from website.profile.utils import add_contributor_json, serialize_unregistered
from website.profile.views import fmt_date_or_none, update_osf_help_mails_subscription
from website.project.decorators import check_can_access
from website.project.model import has_anonymous_link
from website.project.signals import contributor_added
from website.project.views.contributor import (
deserialize_contributors,
notify_added_contributor,
send_claim_email,
send_claim_registered_email,
)
from website.project.views.node import _should_show_wiki_widget, _view_project, abbrev_authors
from website.util import api_url_for, web_url_for
from website.util import permissions, rubeus
class Addon(MockAddonNodeSettings):
@property
def complete(self):
return True
def archive_errors(self):
return 'Error'
class Addon2(MockAddonNodeSettings):
@property
def complete(self):
return True
def archive_errors(self):
return 'Error'
class TestViewingProjectWithPrivateLink(OsfTestCase):
def setUp(self):
super(TestViewingProjectWithPrivateLink, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.append(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_edit_private_link_empty(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.append(node)
link.save()
url = node.api_url_for("project_private_link_edit")
res = self.app.put_json(url, {'pk': link._id, 'value': ''}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Title cannot be blank', res.body)
def test_edit_private_link_invalid(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.append(node)
link.save()
url = node.api_url_for("project_private_link_edit")
res = self.app.put_json(url, {'pk': link._id, 'value': '<a></a>'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Invalid link name.', res.body)
@mock.patch('framework.auth.core.Auth.private_link')
def test_can_be_anonymous_for_public_project(self, mock_property):
mock_property.return_value(mock.MagicMock())
mock_property.anonymous = True
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.append(self.project)
anonymous_link.save()
self.project.set_privacy('public')
self.project.save()
self.project.reload()
auth = Auth(user=self.user, private_key=anonymous_link.key)
assert_true(has_anonymous_link(self.project, auth))
def test_has_private_link_key(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_equal(res.status_code, 200)
def test_not_logged_in_no_key(self):
res = self.app.get(self.project_url, {'view_only': None})
assert_is_redirect(res)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 301)
assert_equal(
res.request.path,
'/login'
)
def test_logged_in_no_private_key(self):
res = self.app.get(self.project_url, {'view_only': None}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_logged_in_has_key(self):
res = self.app.get(
self.project_url, {'view_only': self.link.key}, auth=self.user.auth)
assert_equal(res.status_code, 200)
@unittest.skip('Skipping for now until we find a way to mock/set the referrer')
def test_prepare_private_key(self):
res = self.app.get(self.project_url, {'key': self.link.key})
res = res.click('Registrations')
assert_is_redirect(res)
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.request.GET['key'], self.link.key)
def test_cannot_access_registrations_or_forks_with_anon_key(self):
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.append(self.project)
anonymous_link.save()
self.project.is_public = False
self.project.save()
url = self.project_url + 'registrations/?view_only={}'.format(anonymous_link.key)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
url = self.project_url + 'forks/?view_only={}'.format(anonymous_link.key)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_can_access_registrations_and_forks_with_not_anon_key(self):
link = PrivateLinkFactory(anonymous=False)
link.nodes.append(self.project)
link.save()
self.project.is_public = False
self.project.save()
url = self.project_url + 'registrations/?view_only={}'.format(self.link.key)
res = self.app.get(url)
assert_equal(res.status_code, 200)
url = self.project_url + 'forks/?view_only={}'.format(self.link.key)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_check_can_access_valid(self):
contributor = AuthUserFactory()
self.project.add_contributor(contributor, auth=Auth(self.project.creator))
self.project.save()
assert_true(check_can_access(self.project, contributor))
def test_check_user_access_invalid(self):
noncontrib = AuthUserFactory()
with assert_raises(HTTPError):
check_can_access(self.project, noncontrib)
def test_check_user_access_if_user_is_None(self):
assert_false(check_can_access(self.project, None))
class TestProjectViews(OsfTestCase):
ADDONS_UNDER_TEST = {
'addon1': {
'node_settings': Addon,
},
'addon2': {
'node_settings': Addon2,
},
}
def setUp(self):
super(TestProjectViews, self).setUp()
self.user1 = AuthUserFactory()
self.user1.save()
self.consolidate_auth1 = Auth(user=self.user1)
self.auth = self.user1.auth
self.user2 = AuthUserFactory()
self.auth2 = self.user2.auth
# A project has 2 contributors
self.project = ProjectFactory(
title="Ham",
description='Honey-baked',
creator=self.user1
)
self.project.add_contributor(self.user2, auth=Auth(self.user1))
self.project.save()
self.project2 = ProjectFactory(
title="Tofu",
description='Glazed',
creator=self.user1
)
self.project2.add_contributor(self.user2, auth=Auth(self.user1))
self.project2.save()
def test_node_setting_with_multiple_matched_institution_email_domains(self):
# User has alternate emails matching more than one institution's email domains
inst1 = InstitutionFactory(email_domains=['foo.bar'])
inst2 = InstitutionFactory(email_domains=['baz.qux'])
user = AuthUserFactory()
user.emails.append('queen@foo.bar')
user.emails.append('brian@baz.qux')
user.save()
project = ProjectFactory(creator=user)
# node settings page loads without error
url = project.web_url_for('node_setting')
res = self.app.get(url, auth=user.auth)
assert_equal(res.status_code, 200)
# user is automatically affiliated with institutions
# that matched email domains
user.reload()
assert_in(inst1, user.affiliated_institutions)
assert_in(inst2, user.affiliated_institutions)
def test_edit_title_empty(self):
node = ProjectFactory(creator=self.user1)
url = node.api_url_for("edit_node")
res = self.app.post_json(url, {'name': 'title', 'value': ''}, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Title cannot be blank', res.body)
def test_edit_title_invalid(self):
node = ProjectFactory(creator=self.user1)
url = node.api_url_for("edit_node")
res = self.app.post_json(url, {'name': 'title', 'value': '<a></a>'}, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Invalid title.', res.body)
def test_cannot_remove_only_visible_contributor(self):
self.project.visible_contributor_ids.remove(self.user1._id)
self.project.save()
url = self.project.api_url_for('project_remove_contributor')
res = self.app.post_json(
url, {'contributorID': self.user2._id,
'nodeIDs': [self.project._id]}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_equal(res.json['message_long'], 'Must have at least one bibliographic contributor')
assert_true(self.project.is_contributor(self.user2))
def test_remove_only_visible_contributor_return_false(self):
self.project.visible_contributor_ids.remove(self.user1._id)
self.project.save()
ret = self.project.remove_contributor(contributor=self.user2, auth=self.consolidate_auth1)
assert_false(ret)
self.project.reload()
assert_true(self.project.is_contributor(self.user2))
def test_can_view_nested_project_as_admin(self):
self.parent_project = NodeFactory(
title='parent project',
category='project',
parent=self.project,
is_public=False
)
self.parent_project.save()
self.child_project = NodeFactory(
title='child project',
category='project',
parent=self.parent_project,
is_public=False
)
self.child_project.save()
url = self.child_project.web_url_for('view_project')
res = self.app.get(url, auth=self.auth)
assert_not_in('Private Project', res.body)
assert_in('parent project', res.body)
def test_edit_description(self):
url = "/api/v1/project/{0}/edit/".format(self.project._id)
self.app.post_json(url,
{"name": "description", "value": "Deep-fried"},
auth=self.auth)
self.project.reload()
assert_equal(self.project.description, "Deep-fried")
def test_project_api_url(self):
url = self.project.api_url
res = self.app.get(url, auth=self.auth)
data = res.json
assert_equal(data['node']['category'], 'Project')
assert_equal(data['node']['node_type'], 'project')
assert_equal(data['node']['title'], self.project.title)
assert_equal(data['node']['is_public'], self.project.is_public)
assert_equal(data['node']['is_registration'], False)
assert_equal(data['node']['id'], self.project._primary_key)
assert_equal(data['node']['watched_count'], 0)
assert_true(data['user']['is_contributor'])
assert_equal(data['node']['description'], self.project.description)
assert_equal(data['node']['url'], self.project.url)
assert_equal(data['node']['tags'], [t._primary_key for t in self.project.tags])
assert_in('forked_date', data['node'])
assert_in('watched_count', data['node'])
assert_in('registered_from_url', data['node'])
# TODO: Test "parent" and "user" output
def test_add_contributor_post(self):
# Two users are added as a contributor via a POST request
project = ProjectFactory(creator=self.user1, is_public=True)
user2 = UserFactory()
user3 = UserFactory()
url = "/api/v1/project/{0}/contributors/".format(project._id)
dict2 = add_contributor_json(user2)
dict3 = add_contributor_json(user3)
dict2.update({
'permission': 'admin',
'visible': True,
})
dict3.update({
'permission': 'write',
'visible': False,
})
self.app.post_json(
url,
{
'users': [dict2, dict3],
'node_ids': [project._id],
},
content_type="application/json",
auth=self.auth,
).maybe_follow()
project.reload()
assert_in(user2._id, project.contributors)
# A log event was added
assert_equal(project.logs[-1].action, "contributor_added")
assert_equal(len(project.contributors), 3)
assert_in(user2._id, project.permissions)
assert_in(user3._id, project.permissions)
assert_equal(project.permissions[user2._id], ['read', 'write', 'admin'])
assert_equal(project.permissions[user3._id], ['read', 'write'])
def test_manage_permissions(self):
url = self.project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': self.project.creator._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user1._id, 'permission': 'read',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
assert_equal(self.project.get_permissions(self.user1), ['read'])
assert_equal(self.project.get_permissions(self.user2), ['read', 'write', 'admin'])
def test_manage_permissions_again(self):
url = self.project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': self.user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
self.app.post_json(
url,
{
'contributors': [
{'id': self.user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'read',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
assert_equal(self.project.get_permissions(self.user2), ['read'])
assert_equal(self.project.get_permissions(self.user1), ['read', 'write', 'admin'])
def test_contributor_manage_reorder(self):
# Two users are added as a contributor via a POST request
project = ProjectFactory(creator=self.user1, is_public=True)
reg_user1, reg_user2 = UserFactory(), UserFactory()
project.add_contributors(
[
{'user': reg_user1, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
{'user': reg_user2, 'permissions': [
'read', 'write', 'admin'], 'visible': False},
]
)
# Add a non-registered user
unregistered_user = project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=self.consolidate_auth1,
save=True,
)
url = project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': reg_user2._id, 'permission': 'admin',
'registered': True, 'visible': False},
{'id': project.creator._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': unregistered_user._id, 'permission': 'admin',
'registered': False, 'visible': True},
{'id': reg_user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
project.reload()
assert_equal(
# Note: Cast ForeignList to list for comparison
list(project.contributors),
[reg_user2, project.creator, unregistered_user, reg_user1]
)
assert_equal(
project.visible_contributors,
[project.creator, unregistered_user, reg_user1]
)
def test_project_remove_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.project._id]}
self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth).maybe_follow()
self.project.reload()
assert_not_in(self.user2._id, self.project.contributors)
# A log event was added
assert_equal(self.project.logs[-1].action, "contributor_removed")
def test_multiple_project_remove_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.project._id, self.project2._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth).maybe_follow()
self.project.reload()
self.project2.reload()
assert_not_in(self.user2._id, self.project.contributors)
assert_not_in('/dashboard/', res.json)
assert_not_in(self.user2._id, self.project2.contributors)
# A log event was added
assert_equal(self.project.logs[-1].action, "contributor_removed")
def test_private_project_remove_self_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# user2 removes self
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth2).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['redirectUrl'], '/dashboard/')
assert_not_in(self.user2._id, self.project.contributors)
def test_public_project_remove_self_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# user2 removes self
self.public_project = ProjectFactory(creator=self.user1, is_public=True)
self.public_project.add_contributor(self.user2, auth=Auth(self.user1))
self.public_project.save()
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.public_project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth2).maybe_follow()
self.public_project.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['redirectUrl'], '/' + self.public_project._id + '/')
assert_not_in(self.user2._id, self.public_project.contributors)
def test_project_remove_other_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": self.user1._id,
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
expect_errors=True,
auth=self.auth2).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 403)
assert_equal(res.json['message_long'],
'You do not have permission to perform this action. '
'If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:support@osf.io">support@osf.io</a>.'
)
assert_in(self.user1._id, self.project.contributors)
def test_project_remove_fake_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": 'badid',
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
expect_errors=True,
auth=self.auth).maybe_follow()
self.project.reload()
# Assert the contributor id was invalid
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Contributor not found.')
assert_not_in('badid', self.project.contributors)
def test_project_remove_self_only_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": self.user1._id,
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
expect_errors=True,
auth=self.auth).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Could not remove contributor.')
assert_in(self.user1._id, self.project.contributors)
def test_get_contributors_abbrev(self):
# create a project with 3 registered contributors
project = ProjectFactory(creator=self.user1, is_public=True)
reg_user1, reg_user2 = UserFactory(), UserFactory()
project.add_contributors(
[
{'user': reg_user1, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
{'user': reg_user2, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
]
)
# add an unregistered contributor
project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=self.consolidate_auth1,
save=True,
)
url = project.api_url_for('get_node_contributors_abbrev')
res = self.app.get(url, auth=self.auth)
assert_equal(len(project.contributors), 4)
assert_equal(len(res.json['contributors']), 3)
assert_equal(len(res.json['others_count']), 1)
assert_equal(res.json['contributors'][0]['separator'], ',')
assert_equal(res.json['contributors'][1]['separator'], ',')
assert_equal(res.json['contributors'][2]['separator'], ' &')
def test_edit_node_title(self):
url = "/api/v1/project/{0}/edit/".format(self.project._id)
# The title is changed though posting form data
self.app.post_json(url, {"name": "title", "value": "Bacon"},
auth=self.auth).maybe_follow()
self.project.reload()
# The title was changed
assert_equal(self.project.title, "Bacon")
# A log event was saved
assert_equal(self.project.logs[-1].action, "edit_title")
def test_make_public(self):
self.project.is_public = False
self.project.save()
url = "/api/v1/project/{0}/permissions/public/".format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.project.reload()
assert_true(self.project.is_public)
assert_equal(res.json['status'], 'success')
def test_make_private(self):
self.project.is_public = True
self.project.save()
url = "/api/v1/project/{0}/permissions/private/".format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.project.reload()
assert_false(self.project.is_public)
assert_equal(res.json['status'], 'success')
def test_cant_make_public_if_not_admin(self):
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.is_public = False
self.project.save()
url = "/api/v1/project/{0}/permissions/public/".format(self.project._id)
res = self.app.post_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_false(self.project.is_public)
def test_cant_make_private_if_not_admin(self):
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.is_public = True
self.project.save()
url = "/api/v1/project/{0}/permissions/private/".format(self.project._id)
res = self.app.post_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_true(self.project.is_public)
def test_add_tag(self):
url = self.project.api_url_for('project_add_tag')
self.app.post_json(url, {'tag': "foo'ta#@%#%^&g?"}, auth=self.auth)
self.project.reload()
assert_in("foo'ta#@%#%^&g?", self.project.tags)
assert_equal("foo'ta#@%#%^&g?", self.project.logs[-1].params['tag'])
def test_remove_tag(self):
self.project.add_tag("foo'ta#@%#%^&g?", auth=self.consolidate_auth1, save=True)
assert_in("foo'ta#@%#%^&g?", self.project.tags)
url = self.project.api_url_for("project_remove_tag")
self.app.delete_json(url, {"tag": "foo'ta#@%#%^&g?"}, auth=self.auth)
self.project.reload()
assert_not_in("foo'ta#@%#%^&g?", self.project.tags)
assert_equal("tag_removed", self.project.logs[-1].action)
assert_equal("foo'ta#@%#%^&g?", self.project.logs[-1].params['tag'])
# Regression test for #OSF-5257
def test_removal_empty_tag_throws_error(self):
url = self.project.api_url_for('project_remove_tag')
res= self.app.delete_json(url, {'tag': ''}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
# Regression test for #OSF-5257
def test_removal_unknown_tag_throws_error(self):
self.project.add_tag('narf', auth=self.consolidate_auth1, save=True)
url = self.project.api_url_for('project_remove_tag')
res= self.app.delete_json(url, {'tag': 'troz'}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, http.CONFLICT)
# Regression test for https://github.com/CenterForOpenScience/osf.io/issues/1478
@mock.patch('website.archiver.tasks.archive')
def test_registered_projects_contributions(self, mock_archive):
# register a project
self.project.register_node(get_default_metaschema(), Auth(user=self.project.creator), '', None)
# get the first registered project of a project
url = self.project.api_url_for('get_registrations')
res = self.app.get(url, auth=self.auth)
data = res.json
pid = data['nodes'][0]['id']
url2 = api_url_for('get_summary', pid=pid)
# count contributions
res2 = self.app.get(url2, auth=self.auth)
data = res2.json
assert_is_not_none(data['summary']['nlogs'])
def test_forks_contributions(self):
# fork a project
self.project.fork_node(Auth(user=self.project.creator))
# get the first forked project of a project
url = self.project.api_url_for('get_forks')
res = self.app.get(url, auth=self.auth)
data = res.json
pid = data['nodes'][0]['id']
url2 = api_url_for('get_summary', pid=pid)
# count contributions
res2 = self.app.get(url2, auth=self.auth)
data = res2.json
assert_is_not_none(data['summary']['nlogs'])
@mock.patch('framework.transactions.commands.begin')
@mock.patch('framework.transactions.commands.rollback')
@mock.patch('framework.transactions.commands.commit')
def test_get_logs(self, *mock_commands):
# Add some logs
for _ in range(5):
self.project.add_log('file_added', params={'node': self.project._id}, auth=self.consolidate_auth1)
self.project.save()
url = self.project.api_url_for('get_logs')
res = self.app.get(url, auth=self.auth)
for mock_command in mock_commands:
assert_false(mock_command.called)
self.project.reload()
data = res.json
assert_equal(len(data['logs']), len(self.project.logs))
assert_equal(data['total'], len(self.project.logs))
assert_equal(data['page'], 0)
assert_equal(data['pages'], 1)
most_recent = data['logs'][0]
assert_equal(most_recent['action'], 'file_added')
def test_get_logs_invalid_page_input(self):
url = self.project.api_url_for('get_logs')
invalid_input = 'invalid page'
res = self.app.get(
url, {'page': invalid_input}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(
res.json['message_long'],
'Invalid value for "page".'
)
def test_get_logs_negative_page_num(self):
url = self.project.api_url_for('get_logs')
invalid_input = -1
res = self.app.get(
url, {'page': invalid_input}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(
res.json['message_long'],
'Invalid value for "page".'
)
def test_get_logs_page_num_beyond_limit(self):
url = self.project.api_url_for('get_logs')
size = 10
page_num = math.ceil(len(self.project.logs) / float(size))
res = self.app.get(
url, {'page': page_num}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(
res.json['message_long'],
'Invalid value for "page".'
)
def test_get_logs_with_count_param(self):
# Add some logs
for _ in range(5):
self.project.add_log('file_added', params={'node': self.project._id}, auth=self.consolidate_auth1)
self.project.save()
url = self.project.api_url_for('get_logs')
res = self.app.get(url, {'count': 3}, auth=self.auth)
assert_equal(len(res.json['logs']), 3)
# 1 project create log, 1 add contributor log, then 5 generated logs
assert_equal(res.json['total'], 5 + 2)
assert_equal(res.json['page'], 0)
assert_equal(res.json['pages'], 3)
def test_get_logs_defaults_to_ten(self):
# Add some logs
for _ in range(12):
self.project.add_log('file_added', params={'node': self.project._id}, auth=self.consolidate_auth1)
self.project.save()
url = self.project.api_url_for('get_logs')
res = self.app.get(url, auth=self.auth)
assert_equal(len(res.json['logs']), 10)
# 1 project create log, 1 add contributor log, then 5 generated logs
assert_equal(res.json['total'], 12 + 2)
assert_equal(res.json['page'], 0)
assert_equal(res.json['pages'], 2)
def test_get_more_logs(self):
# Add some logs
for _ in range(12):
self.project.add_log('file_added', params={'node': self.project._id}, auth=self.consolidate_auth1)
self.project.save()
url = self.project.api_url_for('get_logs')
res = self.app.get(url, {"page": 1}, auth=self.auth)
assert_equal(len(res.json['logs']), 4)
# 1 project create log, 1 add contributor log, then 12 generated logs
assert_equal(res.json['total'], 12 + 2)
assert_equal(res.json['page'], 1)
assert_equal(res.json['pages'], 2)
def test_logs_private(self):
"""Add logs to a public project, then to its private component. Get
the ten most recent logs; assert that ten logs are returned and that
all belong to the project and not its component.
"""
# Add some logs
for _ in range(15):
self.project.add_log(
auth=self.consolidate_auth1,
action='file_added',
params={'node': self.project._id}
)
self.project.is_public = True
self.project.save()
child = NodeFactory(parent=self.project)
for _ in range(5):
child.add_log(
auth=self.consolidate_auth1,
action='file_added',
params={'node': child._id}
)
url = self.project.api_url_for('get_logs')
res = self.app.get(url).maybe_follow()
assert_equal(len(res.json['logs']), 10)
# 1 project create log, 1 add contributor log, then 15 generated logs
assert_equal(res.json['total'], 15 + 2)
assert_equal(res.json['page'], 0)
assert_equal(res.json['pages'], 2)
assert_equal(
[self.project._id] * 10,
[
log['params']['node']
for log in res.json['logs']
]
)
def test_can_view_public_log_from_private_project(self):
project = ProjectFactory(is_public=True)
fork = project.fork_node(auth=self.consolidate_auth1)
url = fork.api_url_for('get_logs')
res = self.app.get(url, auth=self.auth)
assert_equal(
[each['action'] for each in res.json['logs']],
['node_forked', 'project_created'],
)
project.is_public = False
project.save()
res = self.app.get(url, auth=self.auth)
assert_equal(
[each['action'] for each in res.json['logs']],
['node_forked', 'project_created'],
)
def test_for_private_component_log(self):
for _ in range(5):
self.project.add_log(
auth=self.consolidate_auth1,
action='file_added',
params={'node': self.project._id}
)
self.project.is_public = True
self.project.save()
child = NodeFactory(parent=self.project)
child.is_public = False
child.set_title("foo", auth=self.consolidate_auth1)
child.set_title("bar", auth=self.consolidate_auth1)
child.save()
url = self.project.api_url_for('get_logs')
res = self.app.get(url).maybe_follow()
assert_equal(len(res.json['logs']), 7)
assert_not_in(
child._id,
[
log['params']['node']
for log in res.json['logs']
]
)
def test_remove_project(self):
url = self.project.api_url
res = self.app.delete_json(url, {}, auth=self.auth).maybe_follow()
self.project.reload()
assert_equal(self.project.is_deleted, True)
assert_in('url', res.json)
assert_equal(res.json['url'], '/dashboard/')
def test_suspended_project(self):
node = NodeFactory(parent=self.project, creator=self.user1)
node.remove_node(Auth(self.user1))
node.suspended = True
node.save()
url = node.api_url
res = self.app.get(url, auth=Auth(self.user1), expect_errors=True)
assert_equal(res.status_code, 451)
def test_private_link_edit_name(self):
link = PrivateLinkFactory()
link.nodes.append(self.project)
link.save()
assert_equal(link.name, "link")
url = self.project.api_url + 'private_link/edit/'
self.app.put_json(
url,
{'pk': link._id, "value": "new name"},
auth=self.auth,
).maybe_follow()
self.project.reload()
link.reload()
assert_equal(link.name, "new name")
def test_remove_private_link(self):
link = PrivateLinkFactory()
link.nodes.append(self.project)
link.save()
url = self.project.api_url_for('remove_private_link')
self.app.delete_json(
url,
{'private_link_id': link._id},
auth=self.auth,
).maybe_follow()
self.project.reload()
link.reload()
assert_true(link.is_deleted)
def test_remove_component(self):
node = NodeFactory(parent=self.project, creator=self.user1)
url = node.api_url
res = self.app.delete_json(url, {}, auth=self.auth).maybe_follow()
node.reload()
assert_equal(node.is_deleted, True)
assert_in('url', res.json)
assert_equal(res.json['url'], self.project.url)
def test_cant_remove_component_if_not_admin(self):
node = NodeFactory(parent=self.project, creator=self.user1)
non_admin = AuthUserFactory()
node.add_contributor(
non_admin,
permissions=['read', 'write'],
save=True,
)
url = node.api_url
res = self.app.delete_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
).maybe_follow()
assert_equal(res.status_code, http.FORBIDDEN)
assert_false(node.is_deleted)
def test_watch_and_unwatch(self):
url = self.project.api_url_for('togglewatch_post')
self.app.post_json(url, {}, auth=self.auth)
res = self.app.get(self.project.api_url, auth=self.auth)
assert_equal(res.json['node']['watched_count'], 1)
self.app.post_json(url, {}, auth=self.auth)
res = self.app.get(self.project.api_url, auth=self.auth)
assert_equal(res.json['node']['watched_count'], 0)
def test_view_project_returns_whether_to_show_wiki_widget(self):
user = AuthUserFactory()
project = ProjectFactory.build(creator=user, is_public=True)
project.add_contributor(user)
project.save()
url = project.api_url_for('view_project')
res = self.app.get(url, auth=user.auth)
assert_equal(res.status_code, http.OK)
assert_in('show_wiki_widget', res.json['user'])
def test_fork_count_does_not_include_deleted_forks(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user)
auth = Auth(project.creator)
fork = project.fork_node(auth)
project.save()
fork.remove_node(auth)
fork.save()
url = project.api_url_for('view_project')
res = self.app.get(url, auth=user.auth)
assert_in('fork_count', res.json['node'])
assert_equal(0, res.json['node']['fork_count'])
def test_statistic_page_redirect(self):
url = self.project.web_url_for('project_statistics_redirect')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302)
assert_in(self.project.web_url_for('project_statistics', _guid=True), res.location)
def test_registration_retraction_redirect(self):
url = self.project.web_url_for('node_registration_retraction_redirect')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302)
assert_in(self.project.web_url_for('node_registration_retraction_get', _guid=True), res.location)
def test_update_node(self):
url = self.project.api_url_for('update_node')
res = self.app.put_json(url, {'title': 'newtitle'}, auth=self.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(self.project.title, 'newtitle')
# Regression test
def test_update_node_with_tags(self):
self.project.add_tag('cheezebørger', auth=Auth(self.project.creator), save=True)
url = self.project.api_url_for('update_node')
res = self.app.put_json(url, {'title': 'newtitle'}, auth=self.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(self.project.title, 'newtitle')
# Regression test
def test_get_registrations_sorted_by_registered_date_descending(self):
# register a project several times, with various registered_dates
registrations = []
for days_ago in (21, 3, 2, 8, 13, 5, 1):
registration = RegistrationFactory(project=self.project)
reg_date = registration.registered_date - dt.timedelta(days_ago)
registration.registered_date = reg_date
registration.save()
registrations.append(registration)
registrations.sort(key=lambda r: r.registered_date, reverse=True)
expected = [ r._id for r in registrations ]
registrations_url = self.project.api_url_for('get_registrations')
res = self.app.get(registrations_url, auth=self.auth)
data = res.json
actual = [ n['id'] for n in data['nodes'] ]
assert_equal(actual, expected)
class TestEditableChildrenViews(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, is_public=False)
self.child = ProjectFactory(parent=self.project, creator=self.user, is_public=True)
self.grandchild = ProjectFactory(parent=self.child, creator=self.user, is_public=False)
self.great_grandchild = ProjectFactory(parent=self.grandchild, creator=self.user, is_public=True)
self.great_great_grandchild = ProjectFactory(parent=self.great_grandchild, creator=self.user, is_public=False)
url = self.project.api_url_for('get_editable_children')
self.project_results = self.app.get(url, auth=self.user.auth).json
def test_get_editable_children(self):
assert_equal(len(self.project_results['children']), 4)
assert_equal(self.project_results['node']['id'], self.project._id)
def test_editable_children_order(self):
assert_equal(self.project_results['children'][0]['id'], self.child._id)
assert_equal(self.project_results['children'][1]['id'], self.grandchild._id)
assert_equal(self.project_results['children'][2]['id'], self.great_grandchild._id)
assert_equal(self.project_results['children'][3]['id'], self.great_great_grandchild._id)
def test_editable_children_indents(self):
assert_equal(self.project_results['children'][0]['indent'], 0)
assert_equal(self.project_results['children'][1]['indent'], 1)
assert_equal(self.project_results['children'][2]['indent'], 2)
assert_equal(self.project_results['children'][3]['indent'], 3)
def test_editable_children_parents(self):
assert_equal(self.project_results['children'][0]['parent_id'], self.project._id)
assert_equal(self.project_results['children'][1]['parent_id'], self.child._id)
assert_equal(self.project_results['children'][2]['parent_id'], self.grandchild._id)
assert_equal(self.project_results['children'][3]['parent_id'], self.great_grandchild._id)
def test_editable_children_privacy(self):
assert_false(self.project_results['node']['is_public'])
assert_true(self.project_results['children'][0]['is_public'])
assert_false(self.project_results['children'][1]['is_public'])
assert_true(self.project_results['children'][2]['is_public'])
assert_false(self.project_results['children'][3]['is_public'])
def test_editable_children_titles(self):
assert_equal(self.project_results['node']['title'], self.project.title)
assert_equal(self.project_results['children'][0]['title'], self.child.title)
assert_equal(self.project_results['children'][1]['title'], self.grandchild.title)
assert_equal(self.project_results['children'][2]['title'], self.great_grandchild.title)
assert_equal(self.project_results['children'][3]['title'], self.great_great_grandchild.title)
class TestChildrenViews(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
def test_get_children(self):
project = ProjectFactory(creator=self.user)
child = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_children')
res = self.app.get(url, auth=self.user.auth)
nodes = res.json['nodes']
assert_equal(len(nodes), 1)
assert_equal(nodes[0]['id'], child._primary_key)
def test_get_children_includes_pointers(self):
project = ProjectFactory(creator=self.user)
pointed = ProjectFactory()
project.add_pointer(pointed, Auth(self.user))
project.save()
url = project.api_url_for('get_children')
res = self.app.get(url, auth=self.user.auth)
nodes = res.json['nodes']
assert_equal(len(nodes), 1)
assert_equal(nodes[0]['title'], pointed.title)
pointer = Pointer.find_one(Q('node', 'eq', pointed))
assert_equal(nodes[0]['id'], pointer._primary_key)
def test_get_children_filter_for_permissions(self):
# self.user has admin access to this project
project = ProjectFactory(creator=self.user)
# self.user only has read access to this project, which project points
# to
read_only_pointed = ProjectFactory()
read_only_creator = read_only_pointed.creator
read_only_pointed.add_contributor(self.user, auth=Auth(read_only_creator), permissions=['read'])
read_only_pointed.save()
# self.user only has read access to this project, which is a subproject
# of project
read_only = ProjectFactory()
read_only_pointed.add_contributor(self.user, auth=Auth(read_only_creator), permissions=['read'])
project.nodes.append(read_only)
# self.user adds a pointer to read_only
project.add_pointer(read_only_pointed, Auth(self.user))
project.save()
url = project.api_url_for('get_children')
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['nodes']), 2)
url = project.api_url_for('get_children', permissions='write')
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['nodes']), 0)
def test_get_children_render_nodes_receives_auth(self):
project = ProjectFactory(creator=self.user)
NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_children')
res = self.app.get(url, auth=self.user.auth)
perm = res.json['nodes'][0]['permissions']
assert_equal(perm, 'admin')
class TestGetNodeTree(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.user2 = AuthUserFactory()
def test_get_single_node(self):
project = ProjectFactory(creator=self.user)
# child = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth)
node_id = res.json[0]['node']['id']
assert_equal(node_id, project._primary_key)
def test_get_node_with_children(self):
project = ProjectFactory(creator=self.user)
child1 = NodeFactory(parent=project, creator=self.user)
child2 = NodeFactory(parent=project, creator=self.user2)
child3 = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth)
tree = res.json[0]
parent_node_id = tree['node']['id']
child1_id = tree['children'][0]['node']['id']
child2_id = tree['children'][1]['node']['id']
child3_id = tree['children'][2]['node']['id']
assert_equal(parent_node_id, project._primary_key)
assert_equal(child1_id, child1._primary_key)
assert_equal(child2_id, child2._primary_key)
assert_equal(child3_id, child3._primary_key)
def test_get_node_not_parent_owner(self):
project = ProjectFactory(creator=self.user2)
child = NodeFactory(parent=project, creator=self.user2)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
assert_equal(res.json, [])
# Parent node should show because of user2 read access, the children should not
def test_get_node_parent_not_admin(self):
project = ProjectFactory(creator=self.user)
project.add_contributor(self.user2, auth=Auth(self.user))
project.save()
child1 = NodeFactory(parent=project, creator=self.user)
child2 = NodeFactory(parent=project, creator=self.user)
child3 = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user2.auth)
tree = res.json[0]
parent_node_id = tree['node']['id']
children = tree['children']
assert_equal(parent_node_id, project._primary_key)
assert_equal(children, [])
class TestUserProfile(OsfTestCase):
def setUp(self):
super(TestUserProfile, self).setUp()
self.user = AuthUserFactory()
def test_sanitization_of_edit_profile(self):
url = api_url_for('edit_profile', uid=self.user._id)
post_data = {'name': 'fullname', 'value': 'new<b> name</b> '}
request = self.app.post(url, post_data, auth=self.user.auth)
assert_equal('new name', request.json['name'])
def test_fmt_date_or_none(self):
with assert_raises(HTTPError) as cm:
#enter a date before 1900
fmt_date_or_none(dt.datetime(1890, 10, 31, 18, 23, 29, 227))
# error should be raised because date is before 1900
assert_equal(cm.exception.code, http.BAD_REQUEST)
def test_unserialize_social(self):
url = api_url_for('unserialize_social')
payload = {
'profileWebsites': ['http://frozen.pizza.com/reviews'],
'twitter': 'howtopizza',
'github': 'frozenpizzacode',
}
self.app.put_json(
url,
payload,
auth=self.user.auth,
)
self.user.reload()
for key, value in payload.iteritems():
assert_equal(self.user.social[key], value)
assert_true(self.user.social['researcherId'] is None)
# Regression test for help-desk ticket
def test_making_email_primary_is_not_case_sensitive(self):
user = AuthUserFactory(username='fred@queen.test')
# make confirmed email have different casing
user.emails[0] = user.emails[0].capitalize()
user.save()
url = api_url_for('update_user')
res = self.app.put_json(
url,
{'id': user._id, 'emails': [{'address': 'fred@queen.test', 'primary': True, 'confirmed': True}]},
auth=user.auth
)
assert_equal(res.status_code, 200)
def test_unserialize_social_validation_failure(self):
url = api_url_for('unserialize_social')
# profileWebsites URL is invalid
payload = {
'profileWebsites': ['http://goodurl.com', 'http://invalidurl'],
'twitter': 'howtopizza',
'github': 'frozenpizzacode',
}
res = self.app.put_json(
url,
payload,
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Invalid personal URL.')
def test_serialize_social_editable(self):
self.user.social['twitter'] = 'howtopizza'
self.user.social['profileWebsites'] = ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com']
self.user.save()
url = api_url_for('serialize_social')
res = self.app.get(
url,
auth=self.user.auth,
)
assert_equal(res.json.get('twitter'), 'howtopizza')
assert_equal(res.json.get('profileWebsites'), ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com'])
assert_true(res.json.get('github') is None)
assert_true(res.json['editable'])
def test_serialize_social_not_editable(self):
user2 = AuthUserFactory()
self.user.social['twitter'] = 'howtopizza'
self.user.social['profileWebsites'] = ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com']
self.user.save()
url = api_url_for('serialize_social', uid=self.user._id)
res = self.app.get(
url,
auth=user2.auth,
)
assert_equal(res.json.get('twitter'), 'howtopizza')
assert_equal(res.json.get('profileWebsites'), ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com'])
assert_true(res.json.get('github') is None)
assert_false(res.json['editable'])
def test_serialize_social_addons_editable(self):
self.user.add_addon('github')
oauth_settings = GitHubAccountFactory()
oauth_settings.save()
self.user.external_accounts.append(oauth_settings)
self.user.save()
url = api_url_for('serialize_social')
res = self.app.get(
url,
auth=self.user.auth,
)
assert_equal(
res.json['addons']['github'],
'abc'
)
def test_serialize_social_addons_not_editable(self):
user2 = AuthUserFactory()
self.user.add_addon('github')
oauth_settings = GitHubAccountFactory()
oauth_settings.save()
self.user.external_accounts.append(oauth_settings)
self.user.save()
url = api_url_for('serialize_social', uid=self.user._id)
res = self.app.get(
url,
auth=user2.auth,
)
assert_not_in('addons', res.json)
def test_unserialize_and_serialize_jobs(self):
jobs = [{
'institution': 'an institution',
'department': 'a department',
'title': 'a title',
'startMonth': 'January',
'startYear': '2001',
'endMonth': 'March',
'endYear': '2001',
'ongoing': False,
}, {
'institution': 'another institution',
'department': None,
'title': None,
'startMonth': 'May',
'startYear': '2001',
'endMonth': None,
'endYear': None,
'ongoing': True,
}]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(len(self.user.jobs), 2)
url = api_url_for('serialize_jobs')
res = self.app.get(
url,
auth=self.user.auth,
)
for i, job in enumerate(jobs):
assert_equal(job, res.json['contents'][i])
def test_unserialize_and_serialize_schools(self):
schools = [{
'institution': 'an institution',
'department': 'a department',
'degree': 'a degree',
'startMonth': 1,
'startYear': '2001',
'endMonth': 5,
'endYear': '2001',
'ongoing': False,
}, {
'institution': 'another institution',
'department': None,
'degree': None,
'startMonth': 5,
'startYear': '2001',
'endMonth': None,
'endYear': None,
'ongoing': True,
}]
payload = {'contents': schools}
url = api_url_for('unserialize_schools')
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(len(self.user.schools), 2)
url = api_url_for('serialize_schools')
res = self.app.get(
url,
auth=self.user.auth,
)
for i, job in enumerate(schools):
assert_equal(job, res.json['contents'][i])
def test_unserialize_jobs(self):
jobs = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'title': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# jobs field is updated
assert_equal(self.user.jobs, jobs)
def test_unserialize_names(self):
fake_fullname_w_spaces = ' {} '.format(fake.name())
names = {
'full': fake_fullname_w_spaces,
'given': 'Tea',
'middle': 'Gray',
'family': 'Pot',
'suffix': 'Ms.',
}
url = api_url_for('unserialize_names')
res = self.app.put_json(url, names, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# user is updated
assert_equal(self.user.fullname, fake_fullname_w_spaces.strip())
assert_equal(self.user.given_name, names['given'])
assert_equal(self.user.middle_names, names['middle'])
assert_equal(self.user.family_name, names['family'])
assert_equal(self.user.suffix, names['suffix'])
def test_unserialize_schools(self):
schools = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'degree': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': schools}
url = api_url_for('unserialize_schools')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# schools field is updated
assert_equal(self.user.schools, schools)
def test_unserialize_jobs_valid(self):
jobs = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'title': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_get_current_user_gravatar_default_size(self):
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_gravatar = res.json['gravatar_url']
assert_true(current_user_gravatar is not None)
url = api_url_for('get_gravatar', uid=self.user._id)
res = self.app.get(url, auth=self.user.auth)
my_user_gravatar = res.json['gravatar_url']
assert_equal(current_user_gravatar, my_user_gravatar)
def test_get_other_user_gravatar_default_size(self):
user2 = AuthUserFactory()
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_gravatar = res.json['gravatar_url']
url = api_url_for('get_gravatar', uid=user2._id)
res = self.app.get(url, auth=self.user.auth)
user2_gravatar = res.json['gravatar_url']
assert_true(user2_gravatar is not None)
assert_not_equal(current_user_gravatar, user2_gravatar)
def test_get_current_user_gravatar_specific_size(self):
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_default_gravatar = res.json['gravatar_url']
url = api_url_for('current_user_gravatar', size=11)
res = self.app.get(url, auth=self.user.auth)
current_user_small_gravatar = res.json['gravatar_url']
assert_true(current_user_small_gravatar is not None)
assert_not_equal(current_user_default_gravatar, current_user_small_gravatar)
def test_get_other_user_gravatar_specific_size(self):
user2 = AuthUserFactory()
url = api_url_for('get_gravatar', uid=user2._id)
res = self.app.get(url, auth=self.user.auth)
gravatar_default_size = res.json['gravatar_url']
url = api_url_for('get_gravatar', uid=user2._id, size=11)
res = self.app.get(url, auth=self.user.auth)
gravatar_small = res.json['gravatar_url']
assert_true(gravatar_small is not None)
assert_not_equal(gravatar_default_size, gravatar_small)
def test_update_user_timezone(self):
assert_equal(self.user.timezone, 'Etc/UTC')
payload = {'timezone': 'America/New_York', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.timezone, 'America/New_York')
def test_update_user_locale(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': 'de_DE', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'de_DE')
def test_update_user_locale_none(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': None, 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'en_US')
def test_update_user_locale_empty_string(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': '', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'en_US')
def test_cannot_update_user_without_user_id(self):
user1 = AuthUserFactory()
url = api_url_for('update_user')
header = {'emails': [{'address': user1.username}]}
res = self.app.put_json(url, header, auth=user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], '"id" is required')
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_emails_return_emails(self, send_mail):
user1 = AuthUserFactory()
url = api_url_for('update_user')
email = 'test@cos.io'
header = {'id': user1._id,
'emails': [{'address': user1.username, 'primary': True, 'confirmed': True},
{'address': email, 'primary': False, 'confirmed': False}
]}
res = self.app.put_json(url, header, auth=user1.auth)
assert_equal(res.status_code, 200)
assert_in('emails', res.json['profile'])
assert_equal(len(res.json['profile']['emails']), 2)
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation_return_emails(self, send_mail):
user1 = AuthUserFactory()
url = api_url_for('resend_confirmation')
email = 'test@cos.io'
header = {'id': user1._id,
'email': {'address': email, 'primary': False, 'confirmed': False}
}
res = self.app.put_json(url, header, auth=user1.auth)
assert_equal(res.status_code, 200)
assert_in('emails', res.json['profile'])
assert_equal(len(res.json['profile']['emails']), 2)
@mock.patch('framework.auth.views.mails.send_mail')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail):
email = fake.email()
self.user.emails.append(email)
list_name = 'foo'
self.user.mailchimp_mailing_lists[list_name] = True
self.user.save()
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
list_id = mailchimp_utils.get_list_id_from_name(list_name)
url = api_url_for('update_user', uid=self.user._id)
emails = [
{'address': self.user.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}]
payload = {'locale': '', 'id': self.user._id, 'emails': emails}
self.app.put_json(url, payload, auth=self.user.auth)
mock_client.lists.unsubscribe.assert_called_with(
id=list_id,
email={'email': self.user.username},
send_goodbye=True
)
mock_client.lists.subscribe.assert_called_with(
id=list_id,
email={'email': email},
merge_vars={
'fname': self.user.given_name,
'lname': self.user.family_name,
},
double_optin=False,
update_existing=True
)
handlers.celery_teardown_request()
@mock.patch('framework.auth.views.mails.send_mail')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api, send_mail):
email = fake.email()
self.user.emails.append(email)
list_name = 'foo'
self.user.mailchimp_mailing_lists[list_name] = False
self.user.save()
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
url = api_url_for('update_user', uid=self.user._id)
emails = [
{'address': self.user.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}]
payload = {'locale': '', 'id': self.user._id, 'emails': emails}
self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(mock_client.lists.unsubscribe.call_count, 0)
assert_equal(mock_client.lists.subscribe.call_count, 0)
handlers.celery_teardown_request()
# TODO: Uncomment once outstanding issues with this feature are addressed
# def test_twitter_redirect_success(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# res = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter']))
# assert_equals(res.status_code, http.FOUND)
# assert_in(self.user.url, res.location)
# def test_twitter_redirect_is_case_insensitive(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# res1 = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter']))
# res2 = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter'].lower()))
# assert_equal(res1.location, res2.location)
# def test_twitter_redirect_unassociated_twitter_handle_returns_404(self):
# unassociated_handle = fake.last_name()
# expected_error = 'There is no active user associated with the Twitter handle: {0}.'.format(unassociated_handle)
# res = self.app.get(
# web_url_for('redirect_to_twitter', twitter_handle=unassociated_handle),
# expect_errors=True
# )
# assert_equal(res.status_code, http.NOT_FOUND)
# assert_true(expected_error in res.body)
# def test_twitter_redirect_handle_with_multiple_associated_accounts_redirects_to_selection_page(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# user2 = AuthUserFactory()
# user2.social['twitter'] = self.user.social['twitter']
# user2.save()
# expected_error = 'There are multiple OSF accounts associated with the Twitter handle: <strong>{0}</strong>.'.format(self.user.social['twitter'])
# res = self.app.get(
# web_url_for(
# 'redirect_to_twitter',
# twitter_handle=self.user.social['twitter'],
# expect_error=True
# )
# )
# assert_equal(res.status_code, http.MULTIPLE_CHOICES)
# assert_true(expected_error in res.body)
# assert_true(self.user.url in res.body)
# assert_true(user2.url in res.body)
class TestUserProfileApplicationsPage(OsfTestCase):
def setUp(self):
super(TestUserProfileApplicationsPage, self).setUp()
self.user = AuthUserFactory()
self.user2 = AuthUserFactory()
self.platform_app = ApiOAuth2ApplicationFactory(owner=self.user)
self.detail_url = web_url_for('oauth_application_detail', client_id=self.platform_app.client_id)
def test_non_owner_cant_access_detail_page(self):
res = self.app.get(self.detail_url, auth=self.user2.auth, expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_owner_cant_access_deleted_application(self):
self.platform_app.is_active = False
self.platform_app.save()
res = self.app.get(self.detail_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.GONE)
def test_owner_cant_access_nonexistent_application(self):
url = web_url_for('oauth_application_detail', client_id='nonexistent')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.NOT_FOUND)
def test_url_has_not_broken(self):
assert_equal(self.platform_app.url, self.detail_url)
class TestUserProfileTokensPage(OsfTestCase):
def setUp(self):
super(TestUserProfileTokensPage, self).setUp()
self.user = AuthUserFactory()
self.token = ApiOAuth2PersonalTokenFactory()
self.detail_url = web_url_for('personal_access_token_detail', _id=self.token._id)
def test_url_has_not_broken(self):
assert_equal(self.token.url, self.detail_url)
class TestUserAccount(OsfTestCase):
def setUp(self):
super(TestUserAccount, self).setUp()
self.user = AuthUserFactory()
self.user.set_password('password')
self.user.save()
@mock.patch('website.profile.views.push_status_message')
def test_password_change_valid(self, mock_push_status_message):
old_password = 'password'
new_password = 'Pa$$w0rd'
confirm_password = new_password
url = web_url_for('user_account_password')
post_data = {
'old_password': old_password,
'new_password': new_password,
'confirm_password': confirm_password,
}
res = self.app.post(url, post_data, auth=(self.user.username, old_password))
assert_true(302, res.status_code)
res = res.follow(auth=(self.user.username, new_password))
assert_true(200, res.status_code)
self.user.reload()
assert_true(self.user.check_password(new_password))
assert_true(mock_push_status_message.called)
assert_in('Password updated successfully', mock_push_status_message.mock_calls[0][1][0])
@mock.patch('website.profile.views.push_status_message')
def test_password_change_invalid(self, mock_push_status_message, old_password='', new_password='',
confirm_password='', error_message='Old password is invalid'):
url = web_url_for('user_account_password')
post_data = {
'old_password': old_password,
'new_password': new_password,
'confirm_password': confirm_password,
}
res = self.app.post(url, post_data, auth=self.user.auth)
assert_true(302, res.status_code)
res = res.follow(auth=self.user.auth)
assert_true(200, res.status_code)
self.user.reload()
assert_false(self.user.check_password(new_password))
assert_true(mock_push_status_message.called)
error_strings = [e[1][0] for e in mock_push_status_message.mock_calls]
assert_in(error_message, error_strings)
def test_password_change_invalid_old_password(self):
self.test_password_change_invalid(
old_password='invalid old password',
new_password='new password',
confirm_password='new password',
error_message='Old password is invalid',
)
def test_password_change_invalid_confirm_password(self):
self.test_password_change_invalid(
old_password='password',
new_password='new password',
confirm_password='invalid confirm password',
error_message='Password does not match the confirmation',
)
def test_password_change_invalid_new_password_length(self):
self.test_password_change_invalid(
old_password='password',
new_password='12345',
confirm_password='12345',
error_message='Password should be at least six characters',
)
def test_password_change_invalid_blank_password(self, old_password='', new_password='', confirm_password=''):
self.test_password_change_invalid(
old_password=old_password,
new_password=new_password,
confirm_password=confirm_password,
error_message='Passwords cannot be blank',
)
def test_password_change_invalid_blank_new_password(self):
for password in ('', ' '):
self.test_password_change_invalid_blank_password('password', password, 'new password')
def test_password_change_invalid_blank_confirm_password(self):
for password in ('', ' '):
self.test_password_change_invalid_blank_password('password', 'new password', password)
@mock.patch('framework.auth.views.mails.send_mail')
def test_user_cannot_request_account_export_before_throttle_expires(self, send_mail):
url = api_url_for('request_export')
self.app.post(url, auth=self.user.auth)
assert_true(send_mail.called)
res = self.app.post(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(send_mail.call_count, 1)
@mock.patch('framework.auth.views.mails.send_mail')
def test_user_cannot_request_account_deactivation_before_throttle_expires(self, send_mail):
url = api_url_for('request_deactivation')
self.app.post(url, auth=self.user.auth)
assert_true(send_mail.called)
res = self.app.post(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(send_mail.call_count, 1)
class TestAddingContributorViews(OsfTestCase):
def setUp(self):
super(TestAddingContributorViews, self).setUp()
self.creator = AuthUserFactory()
self.project = ProjectFactory(creator=self.creator)
self.auth = Auth(self.project.creator)
# Authenticate all requests
self.app.authenticate(*self.creator.auth)
contributor_added.connect(notify_added_contributor)
def test_serialize_unregistered_without_record(self):
name, email = fake.name(), fake.email()
res = serialize_unregistered(fullname=name, email=email)
assert_equal(res['fullname'], name)
assert_equal(res['email'], email)
assert_equal(res['id'], None)
assert_false(res['registered'])
assert_true(res['gravatar'])
assert_false(res['active'])
def test_deserialize_contributors(self):
contrib = UserFactory()
unreg = UnregUserFactory()
name, email = fake.name(), fake.email()
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [
add_contributor_json(contrib),
serialize_unregistered(fake.name(), unreg.username),
unreg_no_record
]
contrib_data[0]['permission'] = 'admin'
contrib_data[1]['permission'] = 'write'
contrib_data[2]['permission'] = 'read'
contrib_data[0]['visible'] = True
contrib_data[1]['visible'] = True
contrib_data[2]['visible'] = True
res = deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator))
assert_equal(len(res), len(contrib_data))
assert_true(res[0]['user'].is_registered)
assert_false(res[1]['user'].is_registered)
assert_true(res[1]['user']._id)
assert_false(res[2]['user'].is_registered)
assert_true(res[2]['user']._id)
def test_deserialize_contributors_validates_fullname(self):
name = "<img src=1 onerror=console.log(1)>"
email = fake.email()
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [unreg_no_record]
contrib_data[0]['permission'] = 'admin'
contrib_data[0]['visible'] = True
with assert_raises(ValidationError):
deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator),
validate=True)
def test_deserialize_contributors_validates_email(self):
name = fake.name()
email = "!@#$%%^&*"
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [unreg_no_record]
contrib_data[0]['permission'] = 'admin'
contrib_data[0]['visible'] = True
with assert_raises(ValidationError):
deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator),
validate=True)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_deserialize_contributors_sends_unreg_contributor_added_signal(self, _):
unreg = UnregUserFactory()
from website.project.signals import unreg_contributor_added
serialized = [serialize_unregistered(fake.name(), unreg.username)]
serialized[0]['visible'] = True
with capture_signals() as mock_signals:
deserialize_contributors(self.project, serialized,
auth=Auth(self.creator))
assert_equal(mock_signals.signals_sent(), set([unreg_contributor_added]))
def test_serialize_unregistered_with_record(self):
name, email = fake.name(), fake.email()
user = self.project.add_unregistered_contributor(fullname=name,
email=email, auth=Auth(self.project.creator))
self.project.save()
res = serialize_unregistered(
fullname=name,
email=email
)
assert_false(res['active'])
assert_false(res['registered'])
assert_equal(res['id'], user._primary_key)
assert_true(res['gravatar_url'])
assert_equal(res['fullname'], name)
assert_equal(res['email'], email)
def test_add_contributor_with_unreg_contribs_and_reg_contribs(self):
n_contributors_pre = len(self.project.contributors)
reg_user = UserFactory()
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
self.project.reload()
assert_equal(len(self.project.contributors),
n_contributors_pre + len(payload['users']))
new_unreg = auth.get_user(email=email)
assert_false(new_unreg.is_registered)
# unclaimed record was added
new_unreg.reload()
assert_in(self.project._primary_key, new_unreg.unclaimed_records)
rec = new_unreg.get_unclaimed_record(self.project._primary_key)
assert_equal(rec['name'], name)
assert_equal(rec['email'], email)
@mock.patch('website.project.views.contributor.send_claim_email')
def test_add_contributors_post_only_sends_one_email_to_unreg_user(
self, mock_send_claim_email):
# Project has components
comp1, comp2 = NodeFactory(
creator=self.creator), NodeFactory(creator=self.creator)
self.project.nodes.append(comp1)
self.project.nodes.append(comp2)
self.project.save()
# An unreg user is added to the project AND its components
unreg_user = { # dict because user has not previous unreg record
'id': None,
'registered': False,
'fullname': fake.name(),
'email': fake.email(),
'permission': 'admin',
'visible': True,
}
payload = {
'users': [unreg_user],
'node_ids': [comp1._primary_key, comp2._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert_true(self.project.can_edit(user=self.creator))
self.app.post_json(url, payload, auth=self.creator.auth)
# finalize_invitation should only have been called once
assert_equal(mock_send_claim_email.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail):
# Project has components
comp1 = NodeFactory(creator=self.creator, parent=self.project)
comp2 = NodeFactory(creator=self.creator, parent=self.project)
# A registered user is added to the project AND its components
user = UserFactory()
user_dict = {
'id': user._id,
'fullname': user.fullname,
'email': user.username,
'permission': 'write',
'visible': True}
payload = {
'users': [user_dict],
'node_ids': [comp1._primary_key, comp2._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert self.project.can_edit(user=self.creator)
self.app.post_json(url, payload, auth=self.creator.auth)
# send_mail should only have been called once
assert_equal(mock_send_mail.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail):
# Project has a component with a sub-component
component = NodeFactory(creator=self.creator, parent=self.project)
sub_component = NodeFactory(creator=self.creator, parent=component)
# A registered user is added to the project and the sub-component, but NOT the component
user = UserFactory()
user_dict = {
'id': user._id,
'fullname': user.fullname,
'email': user.username,
'permission': 'write',
'visible': True}
payload = {
'users': [user_dict],
'node_ids': [sub_component._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert self.project.can_edit(user=self.creator)
self.app.post_json(url, payload, auth=self.creator.auth)
# send_mail is called for both the project and the sub-component
assert_equal(mock_send_mail.call_count, 2)
@mock.patch('website.project.views.contributor.send_claim_email')
def test_email_sent_when_unreg_user_is_added(self, send_mail):
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
payload = {
'users': [pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
assert_true(send_mail.called)
assert_true(send_mail.called_with(email=email))
@mock.patch('website.mails.send_mail')
def test_email_sent_when_reg_user_is_added(self, send_mail):
contributor = UserFactory()
contributors = [{
'user': contributor,
'visible': True,
'permissions': ['read', 'write']
}]
project = ProjectFactory()
project.add_contributors(contributors, auth=self.auth)
project.save()
assert_true(send_mail.called)
send_mail.assert_called_with(
contributor.username,
mails.CONTRIBUTOR_ADDED,
user=contributor,
node=project,
referrer_name=self.auth.user.fullname,
all_global_subscriptions_none=False)
assert_almost_equal(contributor.contributor_added_email_records[project._id]['last_sent'], int(time.time()), delta=1)
@mock.patch('website.mails.send_mail')
def test_contributor_added_email_not_sent_to_unreg_user(self, send_mail):
unreg_user = UnregUserFactory()
contributors = [{
'user': unreg_user,
'visible': True,
'permissions': ['read', 'write']
}]
project = ProjectFactory()
project.add_contributors(contributors, auth=Auth(self.project.creator))
project.save()
assert_false(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_forking_project_does_not_send_contributor_added_email(self, send_mail):
project = ProjectFactory()
project.fork_node(auth=Auth(project.creator))
assert_false(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_templating_project_does_not_send_contributor_added_email(self, send_mail):
project = ProjectFactory()
project.use_as_template(auth=Auth(project.creator))
assert_false(send_mail.called)
@mock.patch('website.archiver.tasks.archive')
@mock.patch('website.mails.send_mail')
def test_registering_project_does_not_send_contributor_added_email(self, send_mail, mock_archive):
project = ProjectFactory()
project.register_node(get_default_metaschema(), Auth(user=project.creator), '', None)
assert_false(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail):
contributor = UserFactory()
project = ProjectFactory()
auth = Auth(project.creator)
notify_added_contributor(project, contributor, auth)
assert_true(send_mail.called)
# 2nd call does not send email because throttle period has not expired
notify_added_contributor(project, contributor, auth)
assert_equal(send_mail.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail):
throttle = 0.5
contributor = UserFactory()
project = ProjectFactory()
auth = Auth(project.creator)
notify_added_contributor(project, contributor, auth, throttle=throttle)
assert_true(send_mail.called)
time.sleep(1) # throttle period expires
notify_added_contributor(project, contributor, auth, throttle=throttle)
assert_equal(send_mail.call_count, 2)
def test_add_multiple_contributors_only_adds_one_log(self):
n_logs_pre = len(self.project.logs)
reg_user = UserFactory()
name = fake.name()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': fake.email(),
'permission': 'write',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
self.project.reload()
assert_equal(len(self.project.logs), n_logs_pre + 1)
def test_add_contribs_to_multiple_nodes(self):
child = NodeFactory(parent=self.project, creator=self.creator)
n_contributors_pre = len(child.contributors)
reg_user = UserFactory()
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': [self.project._primary_key, child._primary_key]
}
url = "/api/v1/project/{0}/contributors/".format(self.project._id)
self.app.post_json(url, payload).maybe_follow()
child.reload()
assert_equal(len(child.contributors),
n_contributors_pre + len(payload['users']))
def tearDown(self):
super(TestAddingContributorViews, self).tearDown()
contributor_added.disconnect(notify_added_contributor)
class TestUserInviteViews(OsfTestCase):
def setUp(self):
super(TestUserInviteViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.invite_url = '/api/v1/project/{0}/invite_contributor/'.format(
self.project._primary_key)
def test_invite_contributor_post_if_not_in_db(self):
name, email = fake.name(), fake.email()
res = self.app.post_json(
self.invite_url,
{'fullname': name, 'email': email},
auth=self.user.auth,
)
contrib = res.json['contributor']
assert_true(contrib['id'] is None)
assert_equal(contrib['fullname'], name)
assert_equal(contrib['email'], email)
def test_invite_contributor_post_if_unreg_already_in_db(self):
# A n unreg user is added to a different project
name, email = fake.name(), fake.email()
project2 = ProjectFactory()
unreg_user = project2.add_unregistered_contributor(fullname=name, email=email,
auth=Auth(project2.creator))
project2.save()
res = self.app.post_json(self.invite_url,
{'fullname': name, 'email': email}, auth=self.user.auth)
expected = add_contributor_json(unreg_user)
expected['fullname'] = name
expected['email'] = email
assert_equal(res.json['contributor'], expected)
def test_invite_contributor_post_if_emaiL_already_registered(self):
reg_user = UserFactory()
# Tries to invite user that is already regiestered
res = self.app.post_json(self.invite_url,
{'fullname': fake.name(), 'email': reg_user.username},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_invite_contributor_post_if_user_is_already_contributor(self):
unreg_user = self.project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=Auth(self.project.creator)
)
self.project.save()
# Tries to invite unreg user that is already a contributor
res = self.app.post_json(self.invite_url,
{'fullname': fake.name(), 'email': unreg_user.username},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_invite_contributor_with_no_email(self):
name = fake.name()
res = self.app.post_json(self.invite_url,
{'fullname': name, 'email': None}, auth=self.user.auth)
assert_equal(res.status_code, http.OK)
data = res.json
assert_equal(data['status'], 'success')
assert_equal(data['contributor']['fullname'], name)
assert_true(data['contributor']['email'] is None)
assert_false(data['contributor']['registered'])
def test_invite_contributor_requires_fullname(self):
res = self.app.post_json(self.invite_url,
{'email': 'brian@queen.com', 'fullname': ''}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_to_given_email(self, send_mail):
project = ProjectFactory()
given_email = fake.email()
unreg_user = project.add_unregistered_contributor(
fullname=fake.name(),
email=given_email,
auth=Auth(project.creator),
)
project.save()
send_claim_email(email=given_email, user=unreg_user, node=project)
assert_true(send_mail.called)
assert_true(send_mail.called_with(
to_addr=given_email,
mail=mails.INVITE
))
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_to_referrer(self, send_mail):
project = ProjectFactory()
referrer = project.creator
given_email, real_email = fake.email(), fake.email()
unreg_user = project.add_unregistered_contributor(fullname=fake.name(),
email=given_email, auth=Auth(
referrer)
)
project.save()
send_claim_email(email=real_email, user=unreg_user, node=project)
assert_true(send_mail.called)
# email was sent to referrer
send_mail.assert_called_with(
referrer.username,
mails.FORWARD_INVITE,
user=unreg_user,
referrer=referrer,
claim_url=unreg_user.get_claim_url(project._id, external=True),
email=real_email.lower().strip(),
fullname=unreg_user.get_unclaimed_record(project._id)['name'],
node=project
)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_before_throttle_expires(self, send_mail):
project = ProjectFactory()
given_email = fake.email()
unreg_user = project.add_unregistered_contributor(
fullname=fake.name(),
email=given_email,
auth=Auth(project.creator),
)
project.save()
send_claim_email(email=fake.email(), user=unreg_user, node=project)
send_mail.reset_mock()
# 2nd call raises error because throttle hasn't expired
with assert_raises(HTTPError):
send_claim_email(email=fake.email(), user=unreg_user, node=project)
assert_false(send_mail.called)
class TestClaimViews(OsfTestCase):
def setUp(self):
super(TestClaimViews, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
self.given_name = fake.name()
self.given_email = fake.email()
self.user = self.project.add_unregistered_contributor(
fullname=self.given_name,
email=self.given_email,
auth=Auth(user=self.referrer)
)
self.project.save()
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_with_registered_user_id(self, send_mail):
# registered user who is attempting to claim the unclaimed contributor
reg_user = UserFactory()
payload = {
# pk of unreg user record
'pk': self.user._primary_key,
'claimerId': reg_user._primary_key
}
url = '/api/v1/user/{uid}/{pid}/claim/email/'.format(
uid=self.user._primary_key,
pid=self.project._primary_key,
)
res = self.app.post_json(url, payload)
# mail was sent
assert_equal(send_mail.call_count, 2)
# ... to the correct address
referrer_call = send_mail.call_args_list[0]
claimer_call = send_mail.call_args_list[1]
args, _ = referrer_call
assert_equal(args[0], self.referrer.username)
args, _ = claimer_call
assert_equal(args[0], reg_user.username)
# view returns the correct JSON
assert_equal(res.json, {
'status': 'success',
'email': reg_user.username,
'fullname': self.given_name,
})
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_registered_email(self, mock_send_mail):
reg_user = UserFactory()
send_claim_registered_email(
claimer=reg_user,
unreg_user=self.user,
node=self.project
)
assert_equal(mock_send_mail.call_count, 2)
first_call_args = mock_send_mail.call_args_list[0][0]
assert_equal(first_call_args[0], self.referrer.username)
second_call_args = mock_send_mail.call_args_list[1][0]
assert_equal(second_call_args[0], reg_user.username)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail):
reg_user = UserFactory()
send_claim_registered_email(
claimer=reg_user,
unreg_user=self.user,
node=self.project,
)
mock_send_mail.reset_mock()
# second call raises error because it was called before throttle period
with assert_raises(HTTPError):
send_claim_registered_email(
claimer=reg_user,
unreg_user=self.user,
node=self.project,
)
assert_false(mock_send_mail.called)
@mock.patch('website.project.views.contributor.send_claim_registered_email')
def test_claim_user_post_with_email_already_registered_sends_correct_email(
self, send_claim_registered_email):
reg_user = UserFactory()
payload = {
'value': reg_user.username,
'pk': self.user._primary_key
}
url = self.project.api_url_for('claim_user_post', uid=self.user._id)
self.app.post_json(url, payload)
assert_true(send_claim_registered_email.called)
def test_user_with_removed_unclaimed_url_claiming(self):
""" Tests that when an unclaimed user is removed from a project, the
unregistered user object does not retain the token.
"""
self.project.remove_contributor(self.user, Auth(user=self.referrer))
assert_not_in(
self.project._primary_key,
self.user.unclaimed_records.keys()
)
def test_user_with_claim_url_cannot_claim_twice(self):
""" Tests that when an unclaimed user is replaced on a project with a
claimed user, the unregistered user object does not retain the token.
"""
reg_user = AuthUserFactory()
self.project.replace_contributor(self.user, reg_user)
assert_not_in(
self.project._primary_key,
self.user.unclaimed_records.keys()
)
def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self):
reg_user = AuthUserFactory()
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(url, auth=reg_user.auth)
assert_equal(res.status_code, 302)
res = res.follow(auth=reg_user.auth)
token = self.user.get_unclaimed_record(self.project._primary_key)['token']
expected = self.project.web_url_for(
'claim_user_registered',
uid=self.user._id,
token=token,
)
assert_equal(res.request.path, expected)
def test_get_valid_form(self):
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(url).maybe_follow()
assert_equal(res.status_code, 200)
def test_invalid_claim_form_raise_400(self):
uid = self.user._primary_key
pid = self.project._primary_key
url = '/user/{uid}/{pid}/claim/?token=badtoken'.format(**locals())
res = self.app.get(url, expect_errors=True).maybe_follow()
assert_equal(res.status_code, 400)
def test_posting_to_claim_form_with_valid_data(self):
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.post(url, {
'username': self.user.username,
'password': 'killerqueen',
'password2': 'killerqueen'
})
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_in('login?service=', location)
assert_in('username', location)
assert_in('verification_key', location)
assert_in(self.project._primary_key, location)
self.user.reload()
assert_true(self.user.is_registered)
assert_true(self.user.is_active)
assert_not_in(self.project._primary_key, self.user.unclaimed_records)
def test_posting_to_claim_form_removes_all_unclaimed_data(self):
# user has multiple unclaimed records
p2 = ProjectFactory(creator=self.referrer)
self.user.add_unclaimed_record(node=p2, referrer=self.referrer,
given_name=fake.name())
self.user.save()
assert_true(len(self.user.unclaimed_records.keys()) > 1) # sanity check
url = self.user.get_claim_url(self.project._primary_key)
self.app.post(url, {
'username': self.given_email,
'password': 'bohemianrhap',
'password2': 'bohemianrhap'
})
self.user.reload()
assert_equal(self.user.unclaimed_records, {})
def test_posting_to_claim_form_sets_fullname_to_given_name(self):
# User is created with a full name
original_name = fake.name()
unreg = UnregUserFactory(fullname=original_name)
# User invited with a different name
different_name = fake.name()
new_user = self.project.add_unregistered_contributor(
email=unreg.username,
fullname=different_name,
auth=Auth(self.project.creator),
)
self.project.save()
# Goes to claim url
claim_url = new_user.get_claim_url(self.project._id)
self.app.post(claim_url, {
'username': unreg.username,
'password': 'killerqueen', 'password2': 'killerqueen'
})
unreg.reload()
# Full name was set correctly
assert_equal(unreg.fullname, different_name)
# CSL names were set correctly
parsed_name = impute_names_model(different_name)
assert_equal(unreg.given_name, parsed_name['given_name'])
assert_equal(unreg.family_name, parsed_name['family_name'])
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_returns_fullname(self, send_mail):
url = '/api/v1/user/{0}/{1}/claim/email/'.format(self.user._primary_key,
self.project._primary_key)
res = self.app.post_json(url,
{'value': self.given_email,
'pk': self.user._primary_key},
auth=self.referrer.auth)
assert_equal(res.json['fullname'], self.given_name)
assert_true(send_mail.called)
assert_true(send_mail.called_with(to_addr=self.given_email))
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail):
email = fake.email() # email that is different from the one the referrer gave
url = '/api/v1/user/{0}/{1}/claim/email/'.format(self.user._primary_key,
self.project._primary_key)
self.app.post_json(url,
{'value': email, 'pk': self.user._primary_key}
)
assert_true(send_mail.called)
assert_equal(send_mail.call_count, 2)
call_to_invited = send_mail.mock_calls[0]
assert_true(call_to_invited.called_with(
to_addr=email
))
call_to_referrer = send_mail.mock_calls[1]
assert_true(call_to_referrer.called_with(
to_addr=self.given_email
))
def test_claim_url_with_bad_token_returns_400(self):
url = self.project.web_url_for(
'claim_user_registered',
uid=self.user._id,
token='badtoken',
)
res = self.app.get(url, auth=self.referrer.auth, expect_errors=400)
assert_equal(res.status_code, 400)
def test_cannot_claim_user_with_user_who_is_already_contributor(self):
# user who is already a contirbutor to the project
contrib = AuthUserFactory()
self.project.add_contributor(contrib, auth=Auth(self.project.creator))
self.project.save()
# Claiming user goes to claim url, but contrib is already logged in
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(
url,
auth=contrib.auth,
).follow(
auth=contrib.auth,
expect_errors=True,
)
# Response is a 400
assert_equal(res.status_code, 400)
class TestWatchViews(OsfTestCase):
def setUp(self):
super(TestWatchViews, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.auth = self.user.auth # used for requests auth
# A public project
self.project = ProjectFactory(is_public=True)
self.project.save()
# Manually reset log date to 100 days ago so it won't show up in feed
self.project.logs[0].date = dt.datetime.utcnow() - dt.timedelta(days=100)
self.project.logs[0].save()
# A log added now
self.last_log = self.project.add_log(
NodeLog.TAG_ADDED,
params={'node': self.project._primary_key},
auth=self.consolidate_auth,
log_date=dt.datetime.utcnow(),
save=True,
)
# Clear watched list
self.user.watched = []
self.user.save()
def test_watching_a_project_appends_to_users_watched_list(self):
n_watched_then = len(self.user.watched)
url = '/api/v1/project/{0}/watch/'.format(self.project._id)
res = self.app.post_json(url,
params={"digest": True},
auth=self.auth)
assert_equal(res.json['watchCount'], 1)
self.user.reload()
n_watched_now = len(self.user.watched)
assert_equal(res.status_code, 200)
assert_equal(n_watched_now, n_watched_then + 1)
assert_true(self.user.watched[-1].digest)
def test_watching_project_twice_returns_400(self):
url = "/api/v1/project/{0}/watch/".format(self.project._id)
res = self.app.post_json(url,
params={},
auth=self.auth)
assert_equal(res.status_code, 200)
# User tries to watch a node she's already watching
res2 = self.app.post_json(url,
params={},
auth=self.auth,
expect_errors=True)
assert_equal(res2.status_code, http.BAD_REQUEST)
def test_unwatching_a_project_removes_from_watched_list(self):
# The user has already watched a project
watch_config = WatchConfigFactory(node=self.project)
self.user.watch(watch_config)
self.user.save()
n_watched_then = len(self.user.watched)
url = '/api/v1/project/{0}/unwatch/'.format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.user.reload()
n_watched_now = len(self.user.watched)
assert_equal(res.status_code, 200)
assert_equal(n_watched_now, n_watched_then - 1)
assert_false(self.user.is_watching(self.project))
def test_toggle_watch(self):
# The user is not watching project
assert_false(self.user.is_watching(self.project))
url = "/api/v1/project/{0}/togglewatch/".format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
# The response json has a watchcount and watched property
assert_equal(res.json['watchCount'], 1)
assert_true(res.json['watched'])
assert_equal(res.status_code, 200)
self.user.reload()
# The user is now watching the project
assert_true(res.json['watched'])
assert_true(self.user.is_watching(self.project))
def test_toggle_watch_node(self):
# The project has a public sub-node
node = NodeFactory(creator=self.user, parent=self.project, is_public=True)
url = "/api/v1/project/{}/node/{}/togglewatch/".format(self.project._id,
node._id)
res = self.app.post_json(url, {}, auth=self.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# The user is now watching the sub-node
assert_true(res.json['watched'])
assert_true(self.user.is_watching(node))
def test_get_watched_logs(self):
project = ProjectFactory()
# Add some logs
for _ in range(12):
project.add_log('file_added', params={'node': project._id}, auth=self.consolidate_auth)
project.save()
watch_cfg = WatchConfigFactory(node=project)
self.user.watch(watch_cfg)
self.user.save()
url = api_url_for("watched_logs_get")
res = self.app.get(url, auth=self.auth)
assert_equal(len(res.json['logs']), 10)
# 1 project create log then 12 generated logs
assert_equal(res.json['total'], 12 + 1)
assert_equal(res.json['page'], 0)
assert_equal(res.json['pages'], 2)
assert_equal(res.json['logs'][0]['action'], 'file_added')
def test_get_more_watched_logs(self):
project = ProjectFactory()
# Add some logs
for _ in range(12):
project.add_log('file_added', params={'node': project._id}, auth=self.consolidate_auth)
project.save()
watch_cfg = WatchConfigFactory(node=project)
self.user.watch(watch_cfg)
self.user.save()
url = api_url_for("watched_logs_get")
page = 1
res = self.app.get(url, {'page': page}, auth=self.auth)
assert_equal(len(res.json['logs']), 3)
# 1 project create log then 12 generated logs
assert_equal(res.json['total'], 12 + 1)
assert_equal(res.json['page'], page)
assert_equal(res.json['pages'], 2)
assert_equal(res.json['logs'][0]['action'], 'file_added')
def test_get_more_watched_logs_invalid_page(self):
project = ProjectFactory()
watch_cfg = WatchConfigFactory(node=project)
self.user.watch(watch_cfg)
self.user.save()
url = api_url_for("watched_logs_get")
invalid_page = 'invalid page'
res = self.app.get(
url, {'page': invalid_page}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(
res.json['message_long'],
'Invalid value for "page".'
)
def test_get_more_watched_logs_invalid_size(self):
project = ProjectFactory()
watch_cfg = WatchConfigFactory(node=project)
self.user.watch(watch_cfg)
self.user.save()
url = api_url_for("watched_logs_get")
invalid_size = 'invalid size'
res = self.app.get(
url, {'size': invalid_size}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(
res.json['message_long'],
'Invalid value for "size".'
)
class TestPointerViews(OsfTestCase):
def setUp(self):
super(TestPointerViews, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
def _make_pointer_only_user_can_see(self, user, project, save=False):
node = ProjectFactory(creator=user)
project.add_pointer(node, auth=Auth(user=user), save=save)
def test_pointer_list_write_contributor_can_remove_private_component_entry(self):
"""Ensure that write contributors see the button to delete a pointer,
even if they cannot see what it is pointing at"""
url = web_url_for('view_project', pid=self.project._id)
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS)
self._make_pointer_only_user_can_see(user2, self.project)
self.project.save()
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, 200)
has_controls = res.lxml.xpath('//li[@node_reference]/p[starts-with(normalize-space(text()), "Private Link")]//i[contains(@class, "remove-pointer")]')
assert_true(has_controls)
def test_pointer_list_write_contributor_can_remove_public_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
for i in xrange(3):
self.project.add_pointer(ProjectFactory(creator=self.user),
auth=Auth(user=self.user))
self.project.save()
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, 200)
has_controls = res.lxml.xpath(
'//li[@node_reference]//i[contains(@class, "remove-pointer")]')
assert_equal(len(has_controls), 3)
def test_pointer_list_read_contributor_cannot_remove_private_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=[permissions.READ])
self._make_pointer_only_user_can_see(user2, self.project)
self.project.save()
res = self.app.get(url, auth=user2.auth).maybe_follow()
assert_equal(res.status_code, 200)
pointer_nodes = res.lxml.xpath('//li[@node_reference]')
has_controls = res.lxml.xpath('//li[@node_reference]/p[starts-with(normalize-space(text()), "Private Link")]//i[contains(@class, "remove-pointer")]')
assert_equal(len(pointer_nodes), 1)
assert_false(has_controls)
def test_pointer_list_read_contributor_cannot_remove_public_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
self.project.add_pointer(ProjectFactory(creator=self.user),
auth=Auth(user=self.user))
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=[permissions.READ])
self.project.save()
res = self.app.get(url, auth=user2.auth).maybe_follow()
assert_equal(res.status_code, 200)
pointer_nodes = res.lxml.xpath('//li[@node_reference]')
has_controls = res.lxml.xpath(
'//li[@node_reference]//i[contains(@class, "remove-pointer")]')
assert_equal(len(pointer_nodes), 1)
assert_equal(len(has_controls), 0)
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1109
def test_get_pointed_excludes_folders(self):
pointer_project = ProjectFactory(is_public=True) # project that points to another project
pointed_project = ProjectFactory(creator=self.user) # project that other project points to
pointer_project.add_pointer(pointed_project, Auth(pointer_project.creator), save=True)
# Project is in an organizer collection
collection = CollectionFactory(creator=pointed_project.creator)
collection.add_pointer(pointed_project, Auth(pointed_project.creator), save=True)
url = pointed_project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
# pointer_project's id is included in response, but folder's id is not
pointer_ids = [each['id'] for each in res.json['pointed']]
assert_in(pointer_project._id, pointer_ids)
assert_not_in(collection._id, pointer_ids)
def test_add_pointers(self):
url = self.project.api_url + 'pointer/'
node_ids = [
NodeFactory()._id
for _ in range(5)
]
self.app.post_json(
url,
{'nodeIds': node_ids},
auth=self.user.auth,
).maybe_follow()
self.project.reload()
assert_equal(
len(self.project.nodes),
5
)
def test_add_the_same_pointer_more_than_once(self):
url = self.project.api_url + 'pointer/'
double_node = NodeFactory()
self.app.post_json(
url,
{'nodeIds': [double_node._id]},
auth=self.user.auth,
)
res = self.app.post_json(
url,
{'nodeIds': [double_node._id]},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_add_pointers_no_user_logg_in(self):
url = self.project.api_url_for('add_pointers')
node_ids = [
NodeFactory()._id
for _ in range(5)
]
res = self.app.post_json(
url,
{'nodeIds': node_ids},
auth=None,
expect_errors=True
)
assert_equal(res.status_code, 401)
def test_add_pointers_public_non_contributor(self):
project2 = ProjectFactory()
project2.set_privacy('public')
project2.save()
url = self.project.api_url_for('add_pointers')
self.app.post_json(
url,
{'nodeIds': [project2._id]},
auth=self.user.auth,
).maybe_follow()
self.project.reload()
assert_equal(
len(self.project.nodes),
1
)
def test_add_pointers_contributor(self):
user2 = AuthUserFactory()
self.project.add_contributor(user2)
self.project.save()
url = self.project.api_url_for('add_pointers')
node_ids = [
NodeFactory()._id
for _ in range(5)
]
self.app.post_json(
url,
{'nodeIds': node_ids},
auth=user2.auth,
).maybe_follow()
self.project.reload()
assert_equal(
len(self.project.nodes),
5
)
def test_add_pointers_not_provided(self):
url = self.project.api_url + 'pointer/'
res = self.app.post_json(url, {}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_move_pointers(self):
project_two = ProjectFactory(creator=self.user)
url = api_url_for('move_pointers')
node = NodeFactory()
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
assert_equal(len(self.project.nodes), 1)
assert_equal(len(project_two.nodes), 0)
user_auth = self.user.auth
move_request = \
{
'fromNodeId': self.project._id,
'toNodeId': project_two._id,
'pointerIds': [pointer.node._id],
}
self.app.post_json(
url,
move_request,
auth=user_auth,
).maybe_follow()
self.project.reload()
project_two.reload()
assert_equal(len(self.project.nodes), 0)
assert_equal(len(project_two.nodes), 1)
def test_remove_pointer(self):
url = self.project.api_url + 'pointer/'
node = NodeFactory()
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
self.app.delete_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth,
)
self.project.reload()
assert_equal(
len(self.project.nodes),
0
)
def test_remove_pointer_not_provided(self):
url = self.project.api_url + 'pointer/'
res = self.app.delete_json(url, {}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_remove_pointer_not_found(self):
url = self.project.api_url + 'pointer/'
res = self.app.delete_json(
url,
{'pointerId': None},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_remove_pointer_not_in_nodes(self):
url = self.project.api_url + 'pointer/'
node = NodeFactory()
pointer = Pointer(node=node)
res = self.app.delete_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_fork_pointer(self):
url = self.project.api_url + 'pointer/fork/'
node = NodeFactory(creator=self.user)
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
self.app.post_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth
)
def test_fork_pointer_not_provided(self):
url = self.project.api_url + 'pointer/fork/'
res = self.app.post_json(url, {}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, 400)
def test_fork_pointer_not_found(self):
url = self.project.api_url + 'pointer/fork/'
res = self.app.post_json(
url,
{'pointerId': None},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_fork_pointer_not_in_nodes(self):
url = self.project.api_url + 'pointer/fork/'
node = NodeFactory()
pointer = Pointer(node=node)
res = self.app.post_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_before_register_with_pointer(self):
"Assert that link warning appears in before register callback."
node = NodeFactory()
self.project.add_pointer(node, auth=self.consolidate_auth)
url = self.project.api_url + 'fork/before/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your fork' in prompt
]
assert_equal(len(prompts), 1)
def test_before_fork_with_pointer(self):
"""Assert that link warning appears in before fork callback."""
node = NodeFactory()
self.project.add_pointer(node, auth=self.consolidate_auth)
url = self.project.api_url + 'beforeregister/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your registration' in prompt
]
assert_equal(len(prompts), 1)
def test_before_register_no_pointer(self):
"""Assert that link warning does not appear in before register callback."""
url = self.project.api_url + 'fork/before/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your fork' in prompt
]
assert_equal(len(prompts), 0)
def test_before_fork_no_pointer(self):
"""Assert that link warning does not appear in before fork callback."""
url = self.project.api_url + 'beforeregister/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your registration' in prompt
]
assert_equal(len(prompts), 0)
def test_get_pointed(self):
pointing_node = ProjectFactory(creator=self.user)
pointing_node.add_pointer(self.project, auth=Auth(self.user))
url = self.project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
pointed = res.json['pointed']
assert_equal(len(pointed), 1)
assert_equal(pointed[0]['url'], pointing_node.url)
assert_equal(pointed[0]['title'], pointing_node.title)
assert_equal(pointed[0]['authorShort'], abbrev_authors(pointing_node))
def test_get_pointed_private(self):
secret_user = UserFactory()
pointing_node = ProjectFactory(creator=secret_user)
pointing_node.add_pointer(self.project, auth=Auth(secret_user))
url = self.project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
pointed = res.json['pointed']
assert_equal(len(pointed), 1)
assert_equal(pointed[0]['url'], None)
assert_equal(pointed[0]['title'], 'Private Component')
assert_equal(pointed[0]['authorShort'], 'Private Author(s)')
class TestPublicViews(OsfTestCase):
def test_explore(self):
res = self.app.get("/explore/").maybe_follow()
assert_equal(res.status_code, 200)
class TestAuthViews(OsfTestCase):
def setUp(self):
super(TestAuthViews, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_ok(self, _):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
user = User.find_one(Q('username', 'eq', email))
assert_equal(user.fullname, name)
# Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_email_case_insensitive(self, _):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': str(email).upper(),
'password': password,
}
)
user = User.find_one(Q('username', 'eq', email))
assert_equal(user.fullname, name)
@mock.patch('framework.auth.views.send_confirm_email')
def test_register_scrubs_username(self, _):
url = api_url_for('register_user')
name = "<i>Eunice</i> O' \"Cornwallis\"<script type='text/javascript' src='http://www.cornify.com/js/cornify.js'></script><script type='text/javascript'>cornify_add()</script>"
email, password = fake.email(), 'underpressure'
res = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
expected_scrub_username = "Eunice O' \"Cornwallis\"cornify_add()"
user = User.find_one(Q('username', 'eq', email))
assert_equal(res.status_code, http.OK)
assert_equal(user.fullname, expected_scrub_username)
def test_register_email_mismatch(self):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
res = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email + 'lol',
'password': password,
},
expect_errors=True,
)
assert_equal(res.status_code, http.BAD_REQUEST)
users = User.find(Q('username', 'eq', email))
assert_equal(users.count(), 0)
def test_register_after_being_invited_as_unreg_contributor(self):
# Regression test for:
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/861
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1021
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1026
# A user is invited as an unregistered contributor
project = ProjectFactory()
name, email = fake.name(), fake.email()
project.add_unregistered_contributor(fullname=name, email=email, auth=Auth(project.creator))
project.save()
# The new, unregistered user
new_user = User.find_one(Q('username', 'eq', email))
# Instead of following the invitation link, they register at the regular
# registration page
# They use a different name when they register, but same email
real_name = fake.name()
password = 'myprecious'
url = api_url_for('register_user')
payload = {
'fullName': real_name,
'email1': email,
'email2': email,
'password': password,
}
# Send registration request
self.app.post_json(url, payload)
new_user.reload()
# New user confirms by following confirmation link
confirm_url = new_user.get_confirmation_url(email, external=False)
self.app.get(confirm_url)
new_user.reload()
# Password and fullname should be updated
assert_true(new_user.is_confirmed)
assert_true(new_user.check_password(password))
assert_equal(new_user.fullname, real_name)
@mock.patch('framework.auth.views.send_confirm_email')
def test_register_sends_user_registered_signal(self, mock_send_confirm_email):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
with capture_signals() as mock_signals:
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_registered,
auth.signals.unconfirmed_user_created]))
assert_true(mock_send_confirm_email.called)
def test_resend_confirmation_get(self):
res = self.app.get('/resend/')
assert_equal(res.status_code, 200)
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation(self, send_mail):
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.save()
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth)
assert_true(send_mail.called)
assert_true(send_mail.called_with(
to_addr=email
))
self.user.reload()
assert_not_equal(token, self.user.get_confirmation_token(email))
with assert_raises(InvalidTokenError):
self.user.get_unconfirmed_email_for_token(token)
@mock.patch('framework.auth.views.mails.send_mail')
def test_click_confirmation_email(self, send_mail):
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token, self.user.username)
res = self.app.get(url)
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], True)
assert_equal(res.status_code, 302)
login_url = 'login?service'
assert_in(login_url, res.body)
def test_get_email_to_add_no_email(self):
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications, [])
def test_get_unconfirmed_email(self):
email = 'test@example.com'
self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications, [])
def test_get_email_to_add(self):
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token, self.user.username)
self.app.get(url)
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], True)
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications[0]['address'], 'test@example.com')
def test_add_email(self):
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
put_email_url = api_url_for('unconfirmed_email_add')
res = self.app.put_json(put_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(res.json_body['status'], 'success')
assert_equal(self.user.emails[1], 'test@example.com')
def test_remove_email(self):
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
remove_email_url = api_url_for('unconfirmed_email_remove')
remove_res = self.app.delete_json(remove_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(remove_res.json_body['status'], 'success')
assert_equal(self.user.unconfirmed_email_info, [])
def test_add_expired_email(self):
# Do not return expired token and removes it from user.email_verifications
email = 'test@example.com'
token = self.user.add_unconfirmed_email(email)
self.user.email_verifications[token]['expiration'] = dt.datetime.utcnow() - dt.timedelta(days=100)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['email'], email)
self.user.clean_email_verifications(given_token=token)
unconfirmed_emails = self.user.unconfirmed_email_info
assert_equal(unconfirmed_emails, [])
assert_equal(self.user.email_verifications, {})
def test_clean_email_verifications(self):
# Do not return bad token and removes it from user.email_verifications
email = 'test@example.com'
token = 'blahblahblah'
self.user.email_verifications[token] = {'expiration': dt.datetime.utcnow() + dt.timedelta(days=1),
'email': email,
'confirmed': False }
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['email'], email)
self.user.clean_email_verifications(given_token=token)
unconfirmed_emails = self.user.unconfirmed_email_info
assert_equal(unconfirmed_emails, [])
assert_equal(self.user.email_verifications, {})
def test_clean_email_verifications_when_email_verifications_is_none(self):
self.user.email_verifications = None
self.user.save()
ret = self.user.clean_email_verifications()
assert_equal(ret, None)
assert_equal(self.user.email_verifications, {})
def test_add_invalid_email(self):
# Do not return expired token and removes it from user.email_verifications
email = u'\u0000\u0008\u000b\u000c\u000e\u001f\ufffe\uffffHello@yourmom.com'
# illegal_str = u'\u0000\u0008\u000b\u000c\u000e\u001f\ufffe\uffffHello'
# illegal_str += unichr(0xd800) + unichr(0xdbff) + ' World'
# email = 'test@example.com'
with assert_raises(ValidationError):
self.user.add_unconfirmed_email(email)
def test_add_email_merge(self):
email = "copy@cat.com"
dupe = UserFactory(
username=email,
emails=[email]
)
dupe.save()
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
put_email_url = api_url_for('unconfirmed_email_add')
res = self.app.put_json(put_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(res.json_body['status'], 'success')
assert_equal(self.user.emails[1], 'copy@cat.com')
def test_resend_confirmation_without_user_id(self):
email = 'test@example.com'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
res = self.app.put_json(url, {'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], '"id" is required')
def test_resend_confirmation_without_email(self):
url = api_url_for('resend_confirmation')
res = self.app.put_json(url, {'id': self.user._id}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_resend_confirmation_not_work_for_primary_email(self):
email = 'test@example.com'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': True, 'confirmed': False}
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Cannnot resend confirmation for confirmed emails')
def test_resend_confirmation_not_work_for_confirmed_email(self):
email = 'test@example.com'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': True}
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Cannnot resend confirmation for confirmed emails')
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail):
email = 'test@example.com'
self.user.save()
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth)
assert_true(send_mail.called)
# 2nd call does not send email because throttle period has not expired
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_confirm_email_clears_unclaimed_records_and_revokes_token(self):
unclaimed_user = UnconfirmedUserFactory()
# unclaimed user has been invited to a project.
referrer = UserFactory()
project = ProjectFactory(creator=referrer)
unclaimed_user.add_unclaimed_record(project, referrer, 'foo')
unclaimed_user.save()
# sanity check
assert_equal(len(unclaimed_user.email_verifications.keys()), 1)
# user goes to email confirmation link
token = unclaimed_user.get_confirmation_token(unclaimed_user.username)
url = web_url_for('confirm_email_get', uid=unclaimed_user._id, token=token)
res = self.app.get(url)
assert_equal(res.status_code, 302)
# unclaimed records and token are cleared
unclaimed_user.reload()
assert_equal(unclaimed_user.unclaimed_records, {})
assert_equal(len(unclaimed_user.email_verifications.keys()), 0)
def test_confirmation_link_registers_user(self):
user = User.create_unconfirmed('brian@queen.com', 'bicycle123', 'Brian May')
assert_false(user.is_registered) # sanity check
user.save()
confirmation_url = user.get_confirmation_url('brian@queen.com', external=False)
res = self.app.get(confirmation_url)
assert_equal(res.status_code, 302, 'redirects to settings page')
res = res.follow()
user.reload()
assert_true(user.is_registered)
# TODO: Use mock add-on
class TestAddonUserViews(OsfTestCase):
def setUp(self):
super(TestAddonUserViews, self).setUp()
self.user = AuthUserFactory()
def test_choose_addons_add(self):
"""Add add-ons; assert that add-ons are attached to project.
"""
url = '/api/v1/settings/addons/'
self.app.post_json(
url,
{'github': True},
auth=self.user.auth,
).maybe_follow()
self.user.reload()
assert_true(self.user.get_addon('github'))
def test_choose_addons_remove(self):
# Add, then delete, add-ons; assert that add-ons are not attached to
# project.
url = '/api/v1/settings/addons/'
self.app.post_json(
url,
{'github': True},
auth=self.user.auth,
).maybe_follow()
self.app.post_json(
url,
{'github': False},
auth=self.user.auth
).maybe_follow()
self.user.reload()
assert_false(self.user.get_addon('github'))
class TestConfigureMailingListViews(OsfTestCase):
@classmethod
def setUpClass(cls):
super(TestConfigureMailingListViews, cls).setUpClass()
cls._original_enable_email_subscriptions = settings.ENABLE_EMAIL_SUBSCRIPTIONS
settings.ENABLE_EMAIL_SUBSCRIPTIONS = True
def test_user_unsubscribe_and_subscribe_help_mailing_list(self):
user = AuthUserFactory()
url = api_url_for('user_choose_mailing_lists')
payload = {settings.OSF_HELP_LIST: False}
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
assert_false(user.osf_mailing_lists[settings.OSF_HELP_LIST])
payload = {settings.OSF_HELP_LIST: True}
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
assert_true(user.osf_mailing_lists[settings.OSF_HELP_LIST])
def test_get_notifications(self):
user = AuthUserFactory()
mailing_lists = dict(user.osf_mailing_lists.items() + user.mailchimp_mailing_lists.items())
url = api_url_for('user_notifications')
res = self.app.get(url, auth=user.auth)
assert_equal(mailing_lists, res.json['mailing_lists'])
def test_osf_help_mails_subscribe(self):
user = UserFactory()
user.osf_mailing_lists[settings.OSF_HELP_LIST] = False
user.save()
update_osf_help_mails_subscription(user, True)
assert_true(user.osf_mailing_lists[settings.OSF_HELP_LIST])
def test_osf_help_mails_unsubscribe(self):
user = UserFactory()
user.osf_mailing_lists[settings.OSF_HELP_LIST] = True
user.save()
update_osf_help_mails_subscription(user, False)
assert_false(user.osf_mailing_lists[settings.OSF_HELP_LIST])
@unittest.skipIf(settings.USE_CELERY, 'Subscription must happen synchronously for this test')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_user_choose_mailing_lists_updates_user_dict(self, mock_get_mailchimp_api):
user = AuthUserFactory()
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
list_id = mailchimp_utils.get_list_id_from_name(list_name)
payload = {settings.MAILCHIMP_GENERAL_LIST: True}
url = api_url_for('user_choose_mailing_lists')
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
# check user.mailing_lists is updated
assert_true(user.mailchimp_mailing_lists[settings.MAILCHIMP_GENERAL_LIST])
assert_equal(
user.mailchimp_mailing_lists[settings.MAILCHIMP_GENERAL_LIST],
payload[settings.MAILCHIMP_GENERAL_LIST]
)
# check that user is subscribed
mock_client.lists.subscribe.assert_called_with(id=list_id,
email={'email': user.username},
merge_vars={
'fname': user.given_name,
'lname': user.family_name,
},
double_optin=False,
update_existing=True)
def test_get_mailchimp_get_endpoint_returns_200(self):
url = api_url_for('mailchimp_get_endpoint')
res = self.app.get(url)
assert_equal(res.status_code, 200)
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_mailchimp_webhook_subscribe_action_does_not_change_user(self, mock_get_mailchimp_api):
""" Test that 'subscribe' actions sent to the OSF via mailchimp
webhooks update the OSF database.
"""
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is not subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': False}
user.save()
# user subscribes and webhook sends request to OSF
data = {
'type': 'subscribe',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field is updated on the OSF
user.reload()
assert_true(user.mailchimp_mailing_lists[list_name])
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_mailchimp_webhook_profile_action_does_not_change_user(self, mock_get_mailchimp_api):
""" Test that 'profile' actions sent to the OSF via mailchimp
webhooks do not cause any database changes.
"""
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': True}
user.save()
# user hits subscribe again, which will update the user's existing info on mailchimp
# webhook sends request (when configured to update on changes made through the API)
data = {
'type': 'profile',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field does not change
user.reload()
assert_true(user.mailchimp_mailing_lists[list_name])
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_sync_data_from_mailchimp_unsubscribes_user(self, mock_get_mailchimp_api):
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': True}
user.save()
# user unsubscribes through mailchimp and webhook sends request
data = {
'type': 'unsubscribe',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field is updated on the OSF
user.reload()
assert_false(user.mailchimp_mailing_lists[list_name])
def test_sync_data_from_mailchimp_fails_without_secret_key(self):
user = AuthUserFactory()
payload = {'values': {'type': 'unsubscribe',
'data': {'list_id': '12345',
'email': 'freddie@cos.io'}}}
url = api_url_for('sync_data_from_mailchimp')
res = self.app.post_json(url, payload, auth=user.auth, expect_errors=True)
assert_equal(res.status_code, http.UNAUTHORIZED)
@classmethod
def tearDownClass(cls):
super(TestConfigureMailingListViews, cls).tearDownClass()
settings.ENABLE_EMAIL_SUBSCRIPTIONS = cls._original_enable_email_subscriptions
# TODO: Move to OSF Storage
class TestFileViews(OsfTestCase):
def setUp(self):
super(TestFileViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory.build(creator=self.user, is_public=True)
self.project.add_contributor(self.user)
self.project.save()
def test_files_get(self):
url = self.project.api_url_for('collect_file_trees')
res = self.app.get(url, auth=self.user.auth)
expected = _view_project(self.project, auth=Auth(user=self.user))
assert_equal(res.status_code, http.OK)
assert_equal(res.json['node'], expected['node'])
assert_in('tree_js', res.json)
assert_in('tree_css', res.json)
def test_grid_data(self):
url = self.project.api_url_for('grid_data')
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, http.OK)
expected = rubeus.to_hgrid(self.project, auth=Auth(self.user))
data = res.json['data']
assert_equal(len(data), len(expected))
class TestTagViews(OsfTestCase):
def setUp(self):
super(TestTagViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
@unittest.skip('Tags endpoint disabled for now.')
def test_tag_get_returns_200(self):
url = web_url_for('project_tag', tag='foo')
res = self.app.get(url)
assert_equal(res.status_code, 200)
@requires_search
class TestSearchViews(OsfTestCase):
def setUp(self):
super(TestSearchViews, self).setUp()
import website.search.search as search
search.delete_all()
self.project = ProjectFactory(creator=UserFactory(fullname='Robbie Williams'))
self.contrib = UserFactory(fullname='Brian May')
for i in range(0, 12):
UserFactory(fullname='Freddie Mercury{}'.format(i))
def tearDown(self):
super(TestSearchViews, self).tearDown()
import website.search.search as search
search.delete_all()
def test_search_contributor(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': self.contrib.fullname})
assert_equal(res.status_code, 200)
result = res.json['users']
assert_equal(len(result), 1)
brian = result[0]
assert_equal(brian['fullname'], self.contrib.fullname)
assert_in('gravatar_url', brian)
assert_equal(brian['registered'], self.contrib.is_registered)
assert_equal(brian['active'], self.contrib.is_active)
def test_search_pagination_default(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': 'fr'})
assert_equal(res.status_code, 200)
result = res.json['users']
pages = res.json['pages']
page = res.json['page']
assert_equal(len(result), 5)
assert_equal(pages, 3)
assert_equal(page, 0)
def test_search_pagination_default_page_1(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': 'fr', 'page': 1})
assert_equal(res.status_code, 200)
result = res.json['users']
page = res.json['page']
assert_equal(len(result), 5)
assert_equal(page, 1)
def test_search_pagination_default_page_2(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': 'fr', 'page': 2})
assert_equal(res.status_code, 200)
result = res.json['users']
page = res.json['page']
assert_equal(len(result), 2)
assert_equal(page, 2)
def test_search_pagination_smaller_pages(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': 'fr', 'size': 5})
assert_equal(res.status_code, 200)
result = res.json['users']
pages = res.json['pages']
page = res.json['page']
assert_equal(len(result), 5)
assert_equal(page, 0)
assert_equal(pages, 3)
def test_search_pagination_smaller_pages_page_2(self):
url = api_url_for('search_contributor')
res = self.app.get(url, {'query': 'fr', 'page': 2, 'size': 5, })
assert_equal(res.status_code, 200)
result = res.json['users']
pages = res.json['pages']
page = res.json['page']
assert_equal(len(result), 2)
assert_equal(page, 2)
assert_equal(pages, 3)
def test_search_projects(self):
url = '/search/'
res = self.app.get(url, {'q': self.project.title})
assert_equal(res.status_code, 200)
class TestODMTitleSearch(OsfTestCase):
""" Docs from original method:
:arg term: The substring of the title.
:arg category: Category of the node.
:arg isDeleted: yes, no, or either. Either will not add a qualifier for that argument in the search.
:arg isFolder: yes, no, or either. Either will not add a qualifier for that argument in the search.
:arg isRegistration: yes, no, or either. Either will not add a qualifier for that argument in the search.
:arg includePublic: yes or no. Whether the projects listed should include public projects.
:arg includeContributed: yes or no. Whether the search should include projects the current user has
contributed to.
:arg ignoreNode: a list of nodes that should not be included in the search.
:return: a list of dictionaries of projects
"""
def setUp(self):
super(TestODMTitleSearch, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, title="foo")
self.project_two = ProjectFactory(creator=self.user_two, title="bar")
self.public_project = ProjectFactory(creator=self.user_two, is_public=True, title="baz")
self.registration_project = RegistrationFactory(creator=self.user, title="qux")
self.folder = CollectionFactory(creator=self.user, title="quux")
self.dashboard = BookmarkCollectionFactory(creator=self.user, title="Dashboard")
self.url = api_url_for('search_projects_by_title')
def test_search_projects_by_title(self):
res = self.app.get(self.url, {'term': self.project.title}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.public_project.title,
'includePublic': 'yes',
'includeContributed': 'no'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.project.title,
'includePublic': 'no',
'includeContributed': 'yes'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.project.title,
'includePublic': 'no',
'includeContributed': 'yes',
'isRegistration': 'no'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.project.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isRegistration': 'either'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.public_project.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isRegistration': 'either'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.registration_project.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isRegistration': 'either'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 2)
res = self.app.get(self.url,
{
'term': self.registration_project.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isRegistration': 'no'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 1)
res = self.app.get(self.url,
{
'term': self.folder.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isFolder': 'yes'
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
res = self.app.get(self.url,
{
'term': self.folder.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isFolder': 'no'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 0)
res = self.app.get(self.url,
{
'term': self.dashboard.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isFolder': 'no'
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), 0)
res = self.app.get(self.url,
{
'term': self.dashboard.title,
'includePublic': 'yes',
'includeContributed': 'yes',
'isFolder': 'yes'
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
class TestReorderComponents(OsfTestCase):
def setUp(self):
super(TestReorderComponents, self).setUp()
self.creator = AuthUserFactory()
self.contrib = AuthUserFactory()
# Project is public
self.project = ProjectFactory.create(creator=self.creator, is_public=True)
self.project.add_contributor(self.contrib, auth=Auth(self.creator))
# subcomponent that only creator can see
self.public_component = NodeFactory(creator=self.creator, is_public=True)
self.private_component = NodeFactory(creator=self.creator, is_public=False)
self.project.nodes.append(self.public_component)
self.project.nodes.append(self.private_component)
self.project.save()
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/489
def test_reorder_components_with_private_component(self):
# contrib tries to reorder components
payload = {
'new_list': [
'{0}:node'.format(self.private_component._primary_key),
'{0}:node'.format(self.public_component._primary_key),
]
}
url = self.project.api_url_for('project_reorder_components')
res = self.app.post_json(url, payload, auth=self.contrib.auth)
assert_equal(res.status_code, 200)
class TestWikiWidgetViews(OsfTestCase):
def setUp(self):
super(TestWikiWidgetViews, self).setUp()
# project with no home wiki page
self.project = ProjectFactory()
self.read_only_contrib = AuthUserFactory()
self.project.add_contributor(self.read_only_contrib, permissions='read')
self.noncontributor = AuthUserFactory()
# project with no home wiki content
self.project2 = ProjectFactory(creator=self.project.creator)
self.project2.add_contributor(self.read_only_contrib, permissions='read')
self.project2.update_node_wiki(name='home', content='', auth=Auth(self.project.creator))
def test_show_wiki_for_contributors_when_no_wiki_or_content(self):
assert_true(_should_show_wiki_widget(self.project, self.project.creator))
assert_true(_should_show_wiki_widget(self.project2, self.project.creator))
def test_show_wiki_is_false_for_read_contributors_when_no_wiki_or_content(self):
assert_false(_should_show_wiki_widget(self.project, self.read_only_contrib))
assert_false(_should_show_wiki_widget(self.project2, self.read_only_contrib))
def test_show_wiki_is_false_for_noncontributors_when_no_wiki_or_content(self):
assert_false(_should_show_wiki_widget(self.project, self.noncontributor))
assert_false(_should_show_wiki_widget(self.project2, self.read_only_contrib))
class TestForkViews(OsfTestCase):
def setUp(self):
super(TestForkViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory.build(creator=self.user, is_public=True)
self.consolidated_auth = Auth(user=self.project.creator)
self.user.save()
self.project.save()
def test_fork_private_project_non_contributor(self):
self.project.set_privacy("private")
self.project.save()
url = self.project.api_url_for('node_fork_page')
non_contributor = AuthUserFactory()
res = self.app.post_json(url,
auth=non_contributor.auth,
expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_fork_public_project_non_contributor(self):
url = self.project.api_url_for('node_fork_page')
non_contributor = AuthUserFactory()
res = self.app.post_json(url, auth=non_contributor.auth)
assert_equal(res.status_code, 200)
def test_fork_project_contributor(self):
contributor = AuthUserFactory()
self.project.set_privacy("private")
self.project.add_contributor(contributor)
self.project.save()
url = self.project.api_url_for('node_fork_page')
res = self.app.post_json(url, auth=contributor.auth)
assert_equal(res.status_code, 200)
def test_registered_forks_dont_show_in_fork_list(self):
fork = self.project.fork_node(self.consolidated_auth)
RegistrationFactory(project=fork)
url = self.project.api_url_for('get_forks')
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['nodes']), 1)
assert_equal(res.json['nodes'][0]['id'], fork._id)
class TestProjectCreation(OsfTestCase):
def setUp(self):
super(TestProjectCreation, self).setUp()
self.creator = AuthUserFactory()
self.url = api_url_for('project_new_post')
self.user1 = AuthUserFactory()
self.user2 = AuthUserFactory()
self.project = ProjectFactory(creator=self.user1)
self.project.add_contributor(self.user2, auth=Auth(self.user1))
self.project.save()
def tearDown(self):
super(TestProjectCreation, self).tearDown()
def test_needs_title(self):
res = self.app.post_json(self.url, {}, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_create_component_strips_html(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user)
url = web_url_for('project_new_node', pid=project._id)
post_data = {'title': '<b>New <blink>Component</blink> Title</b>', 'category': ''}
request = self.app.post(url, post_data, auth=user.auth).follow()
project.reload()
child = project.nodes[0]
# HTML has been stripped
assert_equal(child.title, 'New Component Title')
def test_strip_html_from_title(self):
payload = {
'title': 'no html <b>here</b>'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_equal('no html here', node.title)
def test_only_needs_title(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
def test_title_must_be_one_long(self):
payload = {
'title': ''
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_title_must_be_less_than_200(self):
payload = {
'title': ''.join([str(x) for x in xrange(0, 250)])
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_fails_to_create_project_with_whitespace_title(self):
payload = {
'title': ' '
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_creates_a_project(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.title, 'Im a real title')
def test_create_component_add_contributors_admin(self):
url = web_url_for('project_new_node', pid=self.project._id)
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=self.user1.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(self.user1, child.contributors)
assert_in(self.user2, child.contributors)
# check redirect url
assert_in('/contributors/', res.location)
def test_create_component_with_contributors_read_write(self):
url = web_url_for('project_new_node', pid=self.project._id)
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.save()
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=non_admin.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(non_admin, child.contributors)
assert_in(self.user1, child.contributors)
assert_in(self.user2, child.contributors)
assert_equal(child.get_permissions(non_admin), ['read', 'write', 'admin'])
# check redirect url
assert_in('/contributors/', res.location)
def test_create_component_with_contributors_read(self):
url = web_url_for('project_new_node', pid=self.project._id)
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read'])
self.project.save()
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=non_admin.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_create_component_add_no_contributors(self):
url = web_url_for('project_new_node', pid=self.project._id)
post_data = {'title': 'New Component With Contributors Title', 'category': ''}
res = self.app.post(url, post_data, auth=self.user1.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(self.user1, child.contributors)
assert_not_in(self.user2, child.contributors)
# check redirect url
assert_not_in('/contributors/', res.location)
def test_new_project_returns_serialized_node_data(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = res.json['newNode']
assert_true(node)
assert_equal(node['title'], 'Im a real title')
def test_description_works(self):
payload = {
'title': 'Im a real title',
'description': 'I describe things!'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.description, 'I describe things!')
def test_can_template(self):
other_node = ProjectFactory(creator=self.creator)
payload = {
'title': 'Im a real title',
'template': other_node._id
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.template_node, other_node)
def test_project_before_template_no_addons(self):
project = ProjectFactory()
res = self.app.get(project.api_url_for('project_before_template'), auth=project.creator.auth)
assert_equal(res.json['prompts'], [])
def test_project_before_template_with_addons(self):
project = ProjectWithAddonFactory(addon='github')
res = self.app.get(project.api_url_for('project_before_template'), auth=project.creator.auth)
assert_in('GitHub', res.json['prompts'])
def test_project_new_from_template_non_user(self):
project = ProjectFactory()
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=None)
assert_equal(res.status_code, 302)
res2 = res.follow(expect_errors=True)
assert_equal(res2.status_code, 301)
assert_equal(res2.request.path, '/login')
def test_project_new_from_template_public_non_contributor(self):
non_contributor = AuthUserFactory()
project = ProjectFactory(is_public=True)
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=non_contributor.auth)
assert_equal(res.status_code, 201)
def test_project_new_from_template_contributor(self):
contributor = AuthUserFactory()
project = ProjectFactory(is_public=False)
project.add_contributor(contributor)
project.save()
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=contributor.auth)
assert_equal(res.status_code, 201)
class TestUnconfirmedUserViews(OsfTestCase):
def test_can_view_profile(self):
user = UnconfirmedUserFactory()
url = web_url_for('profile_view_id', uid=user._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
class TestProfileNodeList(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.public = ProjectFactory(is_public=True)
self.public_component = NodeFactory(parent=self.public, is_public=True)
self.private = ProjectFactory(is_public=False)
self.deleted = ProjectFactory(is_public=True, is_deleted=True)
for node in (self.public, self.public_component, self.private, self.deleted):
node.add_contributor(self.user, auth=Auth(node.creator))
node.save()
def test_get_public_projects(self):
url = api_url_for('get_public_projects', uid=self.user._id)
res = self.app.get(url)
node_ids = [each['id'] for each in res.json['nodes']]
assert_in(self.public._id, node_ids)
assert_not_in(self.private._id, node_ids)
assert_not_in(self.deleted._id, node_ids)
assert_not_in(self.public_component._id, node_ids)
def test_get_public_components(self):
url = api_url_for('get_public_components', uid=self.user._id)
res = self.app.get(url)
node_ids = [each['id'] for each in res.json['nodes']]
assert_in(self.public_component._id, node_ids)
assert_not_in(self.public._id, node_ids)
assert_not_in(self.private._id, node_ids)
assert_not_in(self.deleted._id, node_ids)
class TestStaticFileViews(OsfTestCase):
def test_robots_dot_txt(self):
res = self.app.get('/robots.txt')
assert_equal(res.status_code, 200)
assert_in('User-agent', res)
assert_in('text/plain', res.headers['Content-Type'])
def test_favicon(self):
res = self.app.get('/favicon.ico')
assert_equal(res.status_code, 200)
assert_in('image/vnd.microsoft.icon', res.headers['Content-Type'])
def test_getting_started_page(self):
res = self.app.get('/getting-started/')
assert_equal(res.status_code, 302)
assert_equal(res.location, 'http://help.osf.io/')
def test_help_redirect(self):
res = self.app.get('/help/')
assert_equal(res.status_code,302)
class TestUserConfirmSignal(OsfTestCase):
def test_confirm_user_signal_called_when_user_claims_account(self):
unclaimed_user = UnconfirmedUserFactory()
# unclaimed user has been invited to a project.
referrer = UserFactory()
project = ProjectFactory(creator=referrer)
unclaimed_user.add_unclaimed_record(project, referrer, 'foo')
unclaimed_user.save()
token = unclaimed_user.get_unclaimed_record(project._primary_key)['token']
with capture_signals() as mock_signals:
url = web_url_for('claim_user_form', pid=project._id, uid=unclaimed_user._id, token=token)
payload = {'username': unclaimed_user.username,
'password': 'password',
'password2': 'password'}
res = self.app.post(url, payload)
assert_equal(res.status_code, 302)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_confirmed]))
def test_confirm_user_signal_called_when_user_confirms_email(self):
unconfirmed_user = UnconfirmedUserFactory()
unconfirmed_user.save()
# user goes to email confirmation link
token = unconfirmed_user.get_confirmation_token(unconfirmed_user.username)
with capture_signals() as mock_signals:
url = web_url_for('confirm_email_get', uid=unconfirmed_user._id, token=token)
res = self.app.get(url)
assert_equal(res.status_code, 302)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_confirmed]))
if __name__ == '__main__':
unittest.main()
| abought/osf.io | tests/test_views.py | Python | apache-2.0 | 183,143 | [
"Brian"
] | bcb8a8d81e2d765111211c8c0b22c351b4129b927f0a363ee2d1c1022b6ab685 |
#!/usr/bin/python
"""
Copyright 2013 Paul Willworth <ioscode@gmail.com>
This file is part of Galaxy Harvester.
Galaxy Harvester is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Galaxy Harvester is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Galaxy Harvester. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import cgi
import Cookie
import dbSession
import dbShared
import MySQLdb
import ghShared
import ghLists
from jinja2 import Environment, FileSystemLoader
# Get current url
try:
url = os.environ['SCRIPT_NAME']
except KeyError:
url = ''
uiTheme = ''
form = cgi.FieldStorage()
# Get Cookies
useCookies = 1
cookies = Cookie.SimpleCookie()
try:
cookies.load(os.environ['HTTP_COOKIE'])
except KeyError:
useCookies = 0
if useCookies:
try:
currentUser = cookies['userID'].value
except KeyError:
currentUser = ''
try:
loginResult = cookies['loginAttempt'].value
except KeyError:
loginResult = 'success'
try:
sid = cookies['gh_sid'].value
except KeyError:
sid = form.getfirst('gh_sid', '')
try:
uiTheme = cookies['uiTheme'].value
except KeyError:
uiTheme = ''
else:
currentUser = ''
loginResult = form.getfirst('loginAttempt', '')
sid = form.getfirst('gh_sid', '')
# escape input to prevent sql injection
sid = dbShared.dbInsertSafe(sid)
# Get a session
logged_state = 0
linkappend = ''
if loginResult == None:
loginResult = 'success'
sess = dbSession.getSession(sid, 2592000)
if (sess != ''):
logged_state = 1
currentUser = sess
if (uiTheme == ''):
uiTheme = dbShared.getUserAttr(currentUser, 'themeName')
if (useCookies == 0):
linkappend = 'gh_sid=' + sid
else:
if (uiTheme == ''):
uiTheme = 'crafter'
pictureName = dbShared.getUserAttr(currentUser, 'pictureName')
print 'Content-type: text/html\n'
env = Environment(loader=FileSystemLoader('templates'))
env.globals['BASE_SCRIPT_URL'] = ghShared.BASE_SCRIPT_URL
template = env.get_template('resourcefinder.html')
print template.render(uiTheme=uiTheme, loggedin=logged_state, currentUser=currentUser, loginResult=loginResult, linkappend=linkappend, url=url, pictureName=pictureName, imgNum=ghShared.imgNum, galaxyList=ghLists.getGalaxyList(), planetList=ghLists.getPlanetList(), resourceTypeList=ghLists.getResourceTypeList(), resourceGroupList=ghLists.getResourceGroupList())
| clreinki/GalaxyHarvester | resourceFinder.py | Python | agpl-3.0 | 2,766 | [
"Galaxy"
] | c9fd04471b954a4420aa2a136f0b4c9ae534d67498496d26b9d20682580e4d4c |
#
#@BEGIN LICENSE
#
# PSI4: an ab initio quantum chemistry software package
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#@END LICENSE
#
from __future__ import print_function
"""Module with a *procedures* dictionary specifying available quantum
chemical methods and functions driving the main quantum chemical
functionality, namely single-point energies, geometry optimizations,
properties, and vibrational frequency calculations.
"""
import sys
import psi4
import p4util
import p4const
from proc import *
from functional import *
from p4regex import *
# never import wrappers or aliases into this file
# Procedure lookup tables
procedures = {
'energy': {
'scf' : run_scf,
'mcscf' : run_mcscf,
'dcft' : run_dcft,
'oldmp2' : run_oldmp2,
'dfmp2' : run_dfmp2,
'df-mp2' : run_dfmp2,
'conv-mp2' : run_mp2,
'mp3' : run_mp3,
'mp2.5' : run_mp2_5,
'mp2' : run_mp2_select,
'omp2' : run_omp2,
'scs-omp2' : run_scs_omp2,
'scsn-omp2' : run_scs_omp2,
'scs-mi-omp2' : run_scs_omp2,
'scs-omp2-vdw' : run_scs_omp2,
'sos-omp2' : run_sos_omp2,
'sos-pi-omp2' : run_sos_omp2,
'omp3' : run_omp3,
'scs-omp3' : run_scs_omp3,
'scsn-omp3' : run_scs_omp3,
'scs-mi-omp3' : run_scs_omp3,
'scs-omp3-vdw' : run_scs_omp3,
'sos-omp3' : run_sos_omp3,
'sos-pi-omp3' : run_sos_omp3,
'ocepa' : run_ocepa,
'cepa0' : run_cepa0,
'omp2.5' : run_omp2_5,
'sapt0' : run_sapt,
'sapt2' : run_sapt,
'sapt2+' : run_sapt,
'sapt2+(3)' : run_sapt,
'sapt2+3' : run_sapt,
'sapt2+(ccd)' : run_sapt,
'sapt2+(3)(ccd)': run_sapt,
'sapt2+3(ccd)' : run_sapt,
'sapt0-ct' : run_sapt_ct,
'sapt2-ct' : run_sapt_ct,
'sapt2+-ct' : run_sapt_ct,
'sapt2+(3)-ct' : run_sapt_ct,
'sapt2+3-ct' : run_sapt_ct,
'sapt2+(ccd)-ct' : run_sapt_ct,
'sapt2+(3)(ccd)-ct' : run_sapt_ct,
'sapt2+3(ccd)-ct' : run_sapt_ct,
'mp2c' : run_mp2c,
'ccenergy' : run_ccenergy, # full control over ccenergy
'ccsd' : run_ccenergy,
'ccsd(t)' : run_ccenergy,
'ccsd(at)' : run_ccenergy,
'a-ccsd(t)' : run_ccenergy,
'cc2' : run_ccenergy,
'cc3' : run_ccenergy,
'mrcc' : run_mrcc, # interface to Kallay's MRCC program
'bccd' : run_bccd,
'bccd(t)' : run_bccd_t,
'eom-ccsd' : run_eom_cc,
'eom-cc2' : run_eom_cc,
'eom-cc3' : run_eom_cc,
'detci' : run_detci, # full control over detci
'mp' : run_detci, # arbitrary order mp(n)
'detci-mp' : run_detci, # arbitrary order mp(n)
'zapt' : run_detci, # arbitrary order zapt(n)
'cisd' : run_detci,
'cisdt' : run_detci,
'cisdtq' : run_detci,
'ci' : run_detci, # arbitrary order ci(n)
'fci' : run_detci,
'adc' : run_adc,
'cphf' : run_libfock,
'cis' : run_libfock,
'tdhf' : run_libfock,
'cpks' : run_libfock,
'tda' : run_libfock,
'tddft' : run_libfock,
'psimrcc' : run_psimrcc,
'psimrcc_scf' : run_psimrcc_scf,
'hf' : run_scf,
'rhf' : run_scf,
'uhf' : run_scf,
'rohf' : run_scf,
'rscf' : run_scf,
'uscf' : run_scf,
'roscf' : run_scf,
'qcisd' : run_fnocc,
'qcisd(t)' : run_fnocc,
'mp4(sdq)' : run_fnocc,
'fno-ccsd' : run_fnocc,
'fno-ccsd(t)' : run_fnocc,
'fno-qcisd' : run_fnocc,
'fno-qcisd(t)' : run_fnocc,
'fno-mp3' : run_fnocc,
'fno-mp4(sdq)' : run_fnocc,
'fno-mp4' : run_fnocc,
'fnocc-mp' : run_fnocc,
'df-ccsd' : run_fnodfcc,
'df-ccsd(t)' : run_fnodfcc,
'fno-df-ccsd' : run_fnodfcc,
'fno-df-ccsd(t)': run_fnodfcc,
'fno-cepa(0)' : run_cepa,
'fno-cepa(1)' : run_cepa,
'fno-cepa(3)' : run_cepa,
'fno-acpf' : run_cepa,
'fno-aqcc' : run_cepa,
'fno-sdci' : run_cepa,
'fno-dci' : run_cepa,
'cepa(0)' : run_cepa,
'cepa(1)' : run_cepa,
'cepa(3)' : run_cepa,
'acpf' : run_cepa,
'aqcc' : run_cepa,
'sdci' : run_cepa,
'dci' : run_cepa,
# Upon adding a method to this list, add it to the docstring in energy() below
# If you must add an alias to this list (e.g., dfmp2/df-mp2), please search the
# whole driver to find uses of name in return values and psi variables and
# extend the logic to encompass the new alias.
},
'gradient' : {
'scf' : run_scf_gradient,
'ccsd' : run_cc_gradient,
'ccsd(t)' : run_cc_gradient,
'mp2' : run_mp2_select_gradient,
'conv-mp2' : run_mp2_gradient,
'df-mp2' : run_dfmp2_gradient,
'dfmp2' : run_dfmp2_gradient,
'eom-ccsd' : run_eom_cc_gradient,
'dcft' : run_dcft_gradient,
'omp2' : run_omp2_gradient,
'omp3' : run_omp3_gradient,
'mp3' : run_mp3_gradient,
'mp2.5' : run_mp2_5_gradient,
'omp2.5' : run_omp2_5_gradient,
'cepa0' : run_cepa0_gradient,
'ocepa' : run_ocepa_gradient
# Upon adding a method to this list, add it to the docstring in optimize() below
},
'hessian' : {
# Upon adding a method to this list, add it to the docstring in frequency() below
},
'property' : {
'scf' : run_scf_property,
'cc2' : run_cc_property,
'ccsd' : run_cc_property,
'df-mp2' : run_dfmp2_property,
'dfmp2' : run_dfmp2_property,
'eom-cc2' : run_cc_property,
'eom-ccsd' : run_cc_property,
# Upon adding a method to this list, add it to the docstring in property() below
}}
# Integrate DFT with driver routines
for ssuper in superfunctional_list():
procedures['energy'][ssuper.name().lower()] = run_dft
for ssuper in superfunctional_list():
if ((not ssuper.is_c_hybrid()) and (not ssuper.is_c_lrc()) and (not ssuper.is_x_lrc())):
procedures['gradient'][ssuper.name().lower()] = run_dft_gradient
def energy(name, **kwargs):
r"""Function to compute the single-point electronic energy.
:returns: (*float*) Total electronic energy in Hartrees. SAPT returns interaction energy.
:PSI variables:
.. hlist::
:columns: 1
* :psivar:`CURRENT ENERGY <CURRENTENERGY>`
* :psivar:`CURRENT REFERENCE ENERGY <CURRENTREFERENCEENERGY>`
* :psivar:`CURRENT CORRELATION ENERGY <CURRENTCORRELATIONENERGY>`
.. comment In this table immediately below, place methods that should only be called by
.. comment developers at present. This table won't show up in the manual.
.. comment
.. comment .. _`table:energy_devel`:
.. comment
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | name | calls method |
.. comment +=========================+=======================================================================================+
.. comment | mp2c | coupled MP2 (MP2C) |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | mp2-drpa | random phase approximation? |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | cphf | coupled-perturbed Hartree-Fock? |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | cpks | coupled-perturbed Kohn-Sham? |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | cis | CI singles (CIS) |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | tda | Tamm-Dankoff approximation (TDA) |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | tdhf | time-dependent HF (TDHF) |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. comment | tddft | time-dependent DFT (TDDFT) |
.. comment +-------------------------+---------------------------------------------------------------------------------------+
.. _`table:energy_gen`:
+-------------------------+---------------------------------------------------------------------------------------+
| name | calls method |
+=========================+=======================================================================================+
| scf | Hartree--Fock (HF) or density functional theory (DFT) :ref:`[manual] <sec:scf>` |
+-------------------------+---------------------------------------------------------------------------------------+
| dcft | density cumulant functional theory :ref:`[manual] <sec:dcft>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mcscf | multiconfigurational self consistent field (SCF) |
+-------------------------+---------------------------------------------------------------------------------------+
| mp2 | 2nd-order Moller-Plesset perturbation theory (MP2) :ref:`[manual] <sec:dfmp2>` |
+-------------------------+---------------------------------------------------------------------------------------+
| df-mp2 | MP2 with density fitting :ref:`[manual] <sec:dfmp2>` |
+-------------------------+---------------------------------------------------------------------------------------+
| conv-mp2 | conventional MP2 (non-density-fitting) :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp3 | 3rd-order Moller-Plesset perturbation theory (MP3) :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp2.5 | average of MP2 and MP3 :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp4(sdq) | 4th-order MP perturbation theory (MP4) less triples :ref:`[manual] <sec:fnompn>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp4 | full MP4 :ref:`[manual] <sec:fnompn>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp\ *n* | *n*\ th-order Moller--Plesset (MP) perturbation theory :ref:`[manual] <sec:arbpt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| zapt\ *n* | *n*\ th-order z-averaged perturbation theory (ZAPT) :ref:`[manual] <sec:arbpt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp2 | orbital-optimized second-order MP perturbation theory :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp3 | orbital-optimized third-order MP perturbation theory :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp2.5 | orbital-optimized MP2.5 :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ocepa | orbital-optimized coupled electron pair approximation :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cepa0 | coupled electron pair approximation, equiv. linear. CCD :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cepa(0) | coupled electron pair approximation variant 0 :ref:`[manual] <sec:fnocepa>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cepa(1) | coupled electron pair approximation variant 1 :ref:`[manual] <sec:fnocepa>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cepa(3) | coupled electron pair approximation variant 3 :ref:`[manual] <sec:fnocepa>` |
+-------------------------+---------------------------------------------------------------------------------------+
| acpf | averaged coupled-pair functional :ref:`[manual] <sec:fnocepa>` |
+-------------------------+---------------------------------------------------------------------------------------+
| aqcc | averaged quadratic coupled cluster :ref:`[manual] <sec:fnocepa>` |
+-------------------------+---------------------------------------------------------------------------------------+
| qcisd | quadratic CI singles doubles (QCISD) :ref:`[manual] <sec:fnocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cc2 | approximate coupled cluster singles and doubles (CC2) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ccsd | coupled cluster singles and doubles (CCSD) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| bccd | Brueckner coupled cluster doubles (BCCD) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| qcisd(t) | QCISD with perturbative triples :ref:`[manual] <sec:fnocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ccsd(t) | CCSD with perturbative triples (CCSD(T)) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| fno-df-ccsd(t) | CCSD(T) with density fitting and frozen natural orbitals :ref:`[manual] <sec:fnocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| bccd(t) | BCCD with perturbative triples :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cc3 | approximate CC singles, doubles, and triples (CC3) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ccenergy | **expert** full control over ccenergy module |
+-------------------------+---------------------------------------------------------------------------------------+
| cisd | configuration interaction (CI) singles and doubles (CISD) :ref:`[manual] <sec:ci>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cisdt | CI singles, doubles, and triples (CISDT) :ref:`[manual] <sec:ci>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cisdtq | CI singles, doubles, triples, and quadruples (CISDTQ) :ref:`[manual] <sec:ci>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ci\ *n* | *n*\ th-order CI :ref:`[manual] <sec:ci>` |
+-------------------------+---------------------------------------------------------------------------------------+
| fci | full configuration interaction (FCI) :ref:`[manual] <sec:ci>` |
+-------------------------+---------------------------------------------------------------------------------------+
| detci | **expert** full control over detci module |
+-------------------------+---------------------------------------------------------------------------------------+
| gaussian-2 (g2) | gaussian-2 composite method :ref:`[manual] <sec:fnogn>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt0 | 0th-order symmetry adapted perturbation theory (SAPT) :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2 | 2nd-order SAPT, traditional definition :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+ | SAPT including all 2nd-order terms :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(3) | SAPT including perturbative triples :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+3 | SAPT including all 3rd-order terms :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(ccd) | SAPT2+ with CC-based dispersion :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(3)(ccd) | SAPT2+(3) with CC-based dispersion :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+3(ccd) | SAPT2+3 with CC-based dispersion :ref:`[manual] <sec:sapt>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt0-ct | 0th-order SAPT plus charge transfer (CT) calculation :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2-ct | SAPT2 plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+-ct | SAPT2+ plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(3)-ct | SAPT2+(3) plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+3-ct | SAPT2+3 plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(ccd)-ct | SAPT2+(CCD) plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+(3)(ccd)-ct | SAPT2+(3)(CCD) plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| sapt2+3(ccd)-ct | SAPT2+3(CCD) plus CT :ref:`[manual] <sec:saptct>` |
+-------------------------+---------------------------------------------------------------------------------------+
| adc | 2nd-order algebraic diagrammatic construction (ADC) :ref:`[manual] <sec:adc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-cc2 | EOM-CC2 :ref:`[manual] <sec:eomcc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-ccsd | equation of motion (EOM) CCSD :ref:`[manual] <sec:eomcc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-cc3 | EOM-CC3 :ref:`[manual] <sec:eomcc>` |
+-------------------------+---------------------------------------------------------------------------------------+
.. _`table:energy_scf`:
+-------------------------+---------------------------------------------------------------------------------------+
| name | calls method (aliases to *name* = 'scf') |
+=========================+=======================================================================================+
| hf | HF |
+-------------------------+---------------------------------------------------------------------------------------+
| rhf | HF with restricted reference |
+-------------------------+---------------------------------------------------------------------------------------+
| uhf | HF with unrestricted reference |
+-------------------------+---------------------------------------------------------------------------------------+
| rohf | HF with restricted open-shell reference |
+-------------------------+---------------------------------------------------------------------------------------+
| rscf | HF or DFT with restricted reference |
+-------------------------+---------------------------------------------------------------------------------------+
| uscf | HF or DFT with unrestricted reference |
+-------------------------+---------------------------------------------------------------------------------------+
| roscf | HF or DFT with restricted open-shell reference |
+-------------------------+---------------------------------------------------------------------------------------+
.. include:: autodoc_dft_energy.rst
.. _`table:energy_mrcc`:
+-------------------------+---------------------------------------------------------------------------------------+
| name | calls method in Kallay's MRCC program :ref:`[manual] <sec:mrcc>` |
+=========================+=======================================================================================+
| mrccsd | CC through doubles |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt | CC through triples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq | CC through quadruples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp | CC through quintuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqph | CC through sextuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsd(t) | CC through doubles with perturbative triples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt(q) | CC through triples with perturbative quadruples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq(p) | CC through quadruples with pertubative quintuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp(h) | CC through quintuples with pertubative sextuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsd(t)_l | |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt(q)_l | |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq(p)_l | |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp(h)_l | |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt-1a | CC through doubles with iterative triples (cheapest terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq-1a | CC through triples with iterative quadruples (cheapest terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp-1a | CC through quadruples with iterative quintuples (cheapest terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqph-1a | CC through quintuples with iterative sextuples (cheapest terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt-1b | CC through doubles with iterative triples (cheaper terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq-1b | CC through triples with iterative quadruples (cheaper terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp-1b | CC through quadruples with iterative quintuples (cheaper terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqph-1b | CC through quintuples with iterative sextuples (cheaper terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrcc2 | approximate CC through doubles |
+-------------------------+---------------------------------------------------------------------------------------+
| mrcc3 | approximate CC through triples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrcc4 | approximate CC through quadruples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrcc5 | approximate CC through quintuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrcc6 | approximate CC through sextuples |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdt-3 | CC through doubles with iterative triples (all but the most expensive terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtq-3 | CC through triples with iterative quadruples (all but the most expensive terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqp-3 | CC through quadruples with iterative quintuples (all but the most expensive terms) |
+-------------------------+---------------------------------------------------------------------------------------+
| mrccsdtqph-3 | CC through quintuples with iterative sextuples (all but the most expensive terms) |
+-------------------------+---------------------------------------------------------------------------------------+
:type name: string
:param name: ``'scf'`` || ``'df-mp2'`` || ``'ci5'`` || etc.
First argument, usually unlabeled. Indicates the computational method
to be applied to the system.
:type molecule: :ref:`molecule <op_py_molecule>`
:param molecule: ``h2o`` || etc.
The target molecule, if not the last molecule defined.
.. comment :type cast_up: :ref:`boolean <op_py_boolean>` or string
.. comment :param cast_up: ``'on'`` || |dl| ``'off'`` |dr| || ``'3-21g'`` || ``'cc-pVDZ'`` || etc.
.. comment Indicates whether, to accelerate convergence for the scf portion of
.. comment the *name* calculation, a preliminary scf should be performed with a
.. comment small basis set (3-21G if a basis name is not supplied as keyword
.. comment value) followed by projection into the full target basis.
.. comment .. deprecated:: Sept-2012
.. comment Use option |scf__basis_guess| instead.
.. comment :type cast_up_df: :ref:`boolean <op_py_boolean>` or string
.. comment :param cast_up_df: ``'on'`` || |dl| ``'off'`` |dr| || ``'cc-pVDZ-RI'`` || ``'aug-cc-pVDZ-JKFIT'`` || etc.
.. comment Indicates whether, when *cast_up* is active, to run the preliminary
.. comment scf in density-fitted mode or what fitting basis to employ (when
.. comment available for all elements, cc-pVDZ-RI is the default).
.. comment .. deprecated:: Sept-2012
.. comment Use option |scf__df_basis_guess| instead.
:type bypass_scf: :ref:`boolean <op_py_boolean>`
:param bypass_scf: ``'on'`` || |dl| ``'off'`` |dr|
Indicates whether, for *name* values built atop of scf calculations,
the scf step is skipped. Suitable when special steps are taken to get
the scf to converge in an explicit preceeding scf step.
:examples:
>>> # [1] Coupled-cluster singles and doubles calculation with psi code
>>> energy('ccsd')
>>> # [2] Charge-transfer SAPT calculation with scf projection from small into
>>> # requested basis, with specified projection fitting basis
>>> set basis_guess true
>>> set df_basis_guess jun-cc-pVDZ-JKFIT
>>> energy('sapt0-ct')
>>> # [3] Arbitrary-order MPn calculation
>>> energy('mp4')
>>> # [4] Converge scf as singlet, then run detci as triplet upon singlet reference
>>> molecule H2 {\\n0 1\\nH\\nH 1 0.74\\n}
>>> energy('scf')
>>> H2.set_multiplicity(3)
>>> energy('detci', bypass_scf=True)
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
optstash = p4util.OptionsState(
['SCF', 'E_CONVERGENCE'],
['SCF', 'D_CONVERGENCE'],
['E_CONVERGENCE'])
# Make sure the molecule the user provided is the active one
if 'molecule' in kwargs:
activate(kwargs['molecule'])
del kwargs['molecule']
molecule = psi4.get_active_molecule()
molecule.update_geometry()
# Allow specification of methods to arbitrary order
lowername, level = parse_arbitrary_order(lowername)
if level:
kwargs['level'] = level
try:
# Set method-dependent scf convergence criteria
if not psi4.has_option_changed('SCF', 'E_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 6)
else:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 8)
if not psi4.has_option_changed('SCF', 'D_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 6)
else:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 8)
# Set post-scf convergence criteria (global will cover all correlated modules)
if not psi4.has_global_option_changed('E_CONVERGENCE'):
if not procedures['energy'][lowername] == run_scf and not procedures['energy'][lowername] == run_dft:
psi4.set_global_option('E_CONVERGENCE', 6)
procedures['energy'][lowername](lowername, **kwargs)
except KeyError:
raise ValidationError('Energy method %s not available.' % (lowername))
optstash.restore()
return psi4.get_variable('CURRENT ENERGY')
def gradient(name, **kwargs):
r"""Function complementary to optimize(). Carries out one gradient pass,
deciding analytic or finite difference.
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
dertype = 1
optstash = p4util.OptionsState(
['SCF', 'E_CONVERGENCE'],
['SCF', 'D_CONVERGENCE'],
['E_CONVERGENCE'])
# Order of precedence:
# 1. Default for wavefunction
# 2. Value obtained from kwargs, if user changed it
# 3. If user provides a custom 'func' use that
# Allow specification of methods to arbitrary order
lowername, level = parse_arbitrary_order(lowername)
if level:
kwargs['level'] = level
# 1. set the default to that of the provided name
if lowername in procedures['gradient']:
dertype = 1
elif lowername in procedures['energy']:
dertype = 0
func = energy
# 2. Check if the user passes dertype into this function
if 'dertype' in kwargs:
opt_dertype = kwargs['dertype']
if der0th.match(str(opt_dertype)):
dertype = 0
func = energy
elif der1st.match(str(opt_dertype)):
dertype = 1
else:
raise ValidationError('Requested derivative level \'dertype\' %s not valid for helper function optimize.' % (opt_dertype))
# 3. if the user provides a custom function THAT takes precendence
if ('opt_func' in kwargs) or ('func' in kwargs):
if ('func' in kwargs):
kwargs['opt_func'] = kwargs['func']
del kwargs['func']
dertype = 0
func = kwargs['opt_func']
# Summary validation
if (dertype == 1) and (lowername in procedures['gradient']):
pass
elif (dertype == 0) and (func is energy) and (lowername in procedures['energy']):
pass
elif (dertype == 0) and not(func is energy):
pass
else:
raise ValidationError('Requested method \'name\' %s and derivative level \'dertype\' %s are not available.'
% (lowername, dertype))
# no analytic derivatives for scf_type cd
if psi4.get_option('SCF', 'SCF_TYPE') == 'CD':
if (dertype == 1):
raise ValidationError('No analytic derivatives for SCF_TYPE CD.')
# Make sure the molecule the user provided is the active one
if ('molecule' in kwargs):
activate(kwargs['molecule'])
del kwargs['molecule']
molecule = psi4.get_active_molecule()
molecule.update_geometry()
psi4.set_global_option('BASIS', psi4.get_global_option('BASIS'))
# S/R: Mode of operation- whether finite difference opt run in one job or files farmed out
opt_mode = 'continuous'
if ('mode' in kwargs) and (dertype == 0):
opt_mode = kwargs['mode']
if (opt_mode.lower() == 'continuous'):
pass
elif (opt_mode.lower() == 'sow'):
pass
elif (opt_mode.lower() == 'reap'):
if('linkage' in kwargs):
opt_linkage = kwargs['linkage']
else:
raise ValidationError('Optimize execution mode \'reap\' requires a linkage option.')
else:
raise ValidationError('Optimize execution mode \'%s\' not valid.' % (opt_mode))
# Set method-dependent scf convergence criteria (test on procedures['energy'] since that's guaranteed)
if not psi4.has_option_changed('SCF', 'E_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 8)
else:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 10)
if not psi4.has_option_changed('SCF', 'D_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 8)
else:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 10)
# Set post-scf convergence criteria (global will cover all correlated modules)
if not psi4.has_global_option_changed('E_CONVERGENCE'):
if not procedures['energy'][lowername] == run_scf and not procedures['energy'][lowername] == run_dft:
psi4.set_global_option('E_CONVERGENCE', 8)
# Does dertype indicate an analytic procedure both exists and is wanted?
if (dertype == 1):
# Nothing to it but to do it. Gradient information is saved
# into the current reference wavefunction
procedures['gradient'][lowername](lowername, **kwargs)
if 'mode' in kwargs and kwargs['mode'].lower() == 'sow':
raise ValidationError('Optimize execution mode \'sow\' not valid for analytic gradient calculation.')
psi4.wavefunction().energy()
optstash.restore()
return psi4.get_variable('CURRENT ENERGY')
else:
# If not, perform finite difference of energies
opt_iter = 1
if ('opt_iter' in kwargs):
opt_iter = kwargs['opt_iter']
if opt_iter == 1:
print('Performing finite difference calculations')
# Obtain list of displacements
displacements = psi4.fd_geoms_1_0()
ndisp = len(displacements)
# This version is pretty dependent on the reference geometry being last (as it is now)
print(' %d displacements needed ...' % (ndisp), end="")
energies = []
# S/R: Write instructions for sow/reap procedure to output file and reap input file
if (opt_mode.lower() == 'sow'):
instructionsO = """\n The optimization sow/reap procedure has been selected through mode='sow'. In addition\n"""
instructionsO += """ to this output file (which contains no quantum chemical calculations), this job\n"""
instructionsO += """ has produced a number of input files (OPT-%s-*.in) for individual components\n""" % (str(opt_iter))
instructionsO += """ and a single input file (OPT-master.in) with an optimize(mode='reap') command.\n"""
instructionsO += """ These files may look very peculiar since they contain processed and pickled python\n"""
instructionsO += """ rather than normal input. Follow the instructions in OPT-master.in to continue.\n\n"""
instructionsO += """ Alternatively, a single-job execution of the gradient may be accessed through\n"""
instructionsO += """ the optimization wrapper option mode='continuous'.\n\n"""
psi4.print_out(instructionsO)
instructionsM = """\n# Follow the instructions below to carry out this optimization cycle.\n#\n"""
instructionsM += """# (1) Run all of the OPT-%s-*.in input files on any variety of computer architecture.\n""" % (str(opt_iter))
instructionsM += """# The output file names must be as given below.\n#\n"""
for rgt in range(ndisp):
pre = 'OPT-' + str(opt_iter) + '-' + str(rgt + 1)
instructionsM += """# psi4 -i %-27s -o %-27s\n""" % (pre + '.in', pre + '.out')
instructionsM += """#\n# (2) Gather all the resulting output files in a directory. Place input file\n"""
instructionsM += """# OPT-master.in into that directory and run it. The job will be minimal in\n"""
instructionsM += """# length and give summary results for the gradient step in its output file.\n#\n"""
if opt_iter == 1:
instructionsM += """# psi4 -i %-27s -o %-27s\n#\n""" % ('OPT-master.in', 'OPT-master.out')
else:
instructionsM += """# psi4 -a -i %-27s -o %-27s\n#\n""" % ('OPT-master.in', 'OPT-master.out')
instructionsM += """# After each optimization iteration, the OPT-master.in file is overwritten so return here\n"""
instructionsM += """# for new instructions. With the use of the psi4 -a flag, OPT-master.out is not\n"""
instructionsM += """# overwritten and so maintains a history of the job. To use the (binary) optimizer\n"""
instructionsM += """# data file to accelerate convergence, the OPT-master jobs must run on the same computer.\n\n"""
fmaster = open('OPT-master.in', 'w')
fmaster.write('# This is a psi4 input file auto-generated from the gradient() wrapper.\n\n')
fmaster.write(p4util.format_molecule_for_input(molecule))
fmaster.write(p4util.format_options_for_input())
p4util.format_kwargs_for_input(fmaster, 2, **kwargs)
fmaster.write("""%s('%s', **kwargs)\n\n""" % (optimize.__name__, lowername))
fmaster.write(instructionsM)
fmaster.close()
for n, displacement in enumerate(displacements):
rfile = 'OPT-%s-%s' % (opt_iter, n + 1)
#rfile = 'OPT-fd-%s' % (n + 1)
# Build string of title banner
banners = ''
banners += """psi4.print_out('\\n')\n"""
banners += """p4util.banner(' Gradient %d Computation: Displacement %d ')\n""" % (opt_iter, n + 1)
banners += """psi4.print_out('\\n')\n\n"""
if (opt_mode.lower() == 'continuous'):
# Print information to output.dat
psi4.print_out('\n')
p4util.banner('Loading displacement %d of %d' % (n + 1, ndisp))
# Print information to the screen
print(' %d' % (n + 1), end="")
if (n + 1) == ndisp:
print('\n', end="")
# Load in displacement into the active molecule
psi4.get_active_molecule().set_geometry(displacement)
# Perform the energy calculation
#E = func(lowername, **kwargs)
func(lowername, **kwargs)
E = psi4.get_variable('CURRENT ENERGY')
#E = func(**kwargs)
# Save the energy
energies.append(E)
# S/R: Write each displaced geometry to an input file
elif (opt_mode.lower() == 'sow'):
psi4.get_active_molecule().set_geometry(displacement)
# S/R: Prepare molecule, options, and kwargs
freagent = open('%s.in' % (rfile), 'w')
freagent.write('# This is a psi4 input file auto-generated from the gradient() wrapper.\n\n')
freagent.write(p4util.format_molecule_for_input(molecule))
freagent.write(p4util.format_options_for_input())
p4util.format_kwargs_for_input(freagent, **kwargs)
# S/R: Prepare function call and energy save
freagent.write("""electronic_energy = %s('%s', **kwargs)\n\n""" % (func.__name__, lowername))
freagent.write("""psi4.print_out('\\nGRADIENT RESULT: computation %d for item %d """ % (os.getpid(), n + 1))
freagent.write("""yields electronic energy %20.12f\\n' % (electronic_energy))\n\n""")
freagent.close()
# S/R: Read energy from each displaced geometry output file and save in energies array
elif (opt_mode.lower() == 'reap'):
exec(banners)
psi4.set_variable('NUCLEAR REPULSION ENERGY', molecule.nuclear_repulsion_energy())
energies.append(p4util.extract_sowreap_from_output(rfile, 'GRADIENT', n, opt_linkage, True))
# S/R: Quit sow after writing files
if (opt_mode.lower() == 'sow'):
optstash.restore()
return 0.0
if (opt_mode.lower() == 'reap'):
psi4.set_variable('CURRENT ENERGY', energies[-1])
# Obtain the gradient
psi4.fd_1_0(energies)
# The last item in the list is the reference energy, return it
optstash.restore()
return energies[-1]
def property(name, **kwargs):
r"""Function to compute various properties.
:aliases: prop()
:returns: none.
.. caution:: Some features are not yet implemented. Buy a developer a coffee.
- This function at present handles property functions only for CC methods.
Consult the keywords sections for other modules for further property capabilities.
+-------------------------+---------------------------------------------------------------------------------------+
| name | calls method |
+=========================+=======================================================================================+
| scf | Self-consistent field method(s) |
+-------------------------+---------------------------------------------------------------------------------------+
| cc2 | 2nd-order approximate CCSD |
+-------------------------+---------------------------------------------------------------------------------------+
| ccsd | coupled cluster singles and doubles (CCSD) |
+-------------------------+---------------------------------------------------------------------------------------+
| df-mp2 | MP2 with density fitting |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-cc2 | 2nd-order approximate EOM-CCSD |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-ccsd | equation-of-motion coupled cluster singles and doubles (EOM-CCSD) |
+-------------------------+---------------------------------------------------------------------------------------+
:type name: string
:param name: ``'ccsd'`` || etc.
First argument, usually unlabeled. Indicates the computational method
to be applied to the system.
:type properties: array of strings
:param properties: |dl| ``[]`` |dr| || ``['rotation', 'polarizability', 'oscillator_strength', 'roa']`` || etc.
Indicates which properties should be computed.
:type molecule: :ref:`molecule <op_py_molecule>`
:param molecule: ``h2o`` || etc.
The target molecule, if not the last molecule defined.
:examples:
>>> # [1] Optical rotation calculation
>>> property('cc2', properties=['rotation'])
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
optstash = p4util.OptionsState(
['SCF', 'E_CONVERGENCE'],
['SCF', 'D_CONVERGENCE'],
['E_CONVERGENCE'])
# Make sure the molecule the user provided is the active one
if ('molecule' in kwargs):
activate(kwargs['molecule'])
del kwargs['molecule']
molecule = psi4.get_active_molecule()
molecule.update_geometry()
#psi4.set_global_option('BASIS', psi4.get_global_option('BASIS'))
# Allow specification of methods to arbitrary order
lowername, level = parse_arbitrary_order(lowername)
if level:
kwargs['level'] = level
try:
# Set method-dependent scf convergence criteria (test on procedures['energy'] since that's guaranteed)
# SCF properties have been set as 6/5 so as to match those
# run normally through OEProp so subject to change
if not psi4.has_option_changed('SCF', 'E_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 6)
else:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 10)
if not psi4.has_option_changed('SCF', 'D_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 6)
else:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 10)
# Set post-scf convergence criteria (global will cover all correlated modules)
if not psi4.has_global_option_changed('E_CONVERGENCE'):
if not procedures['energy'][lowername] == run_scf and not procedures['energy'][lowername] == run_dft:
psi4.set_global_option('E_CONVERGENCE', 8)
returnvalue = procedures['property'][lowername](lowername, **kwargs)
except KeyError:
raise ValidationError('Property method %s not available.' % (lowername))
optstash.restore()
return returnvalue
## Aliases ##
prop = property
def optimize(name, **kwargs):
r"""Function to perform a geometry optimization.
:aliases: opt()
:returns: (*float*) Total electronic energy of optimized structure in Hartrees.
:PSI variables:
.. hlist::
:columns: 1
* :psivar:`CURRENT ENERGY <CURRENTENERGY>`
.. note:: Analytic gradients area available for all methods in the table
below. Optimizations with other methods in the energy table proceed
by finite differences.
.. _`table:grad_gen`:
+-------------------------+---------------------------------------------------------------------------------------+
| name | calls method |
+=========================+=======================================================================================+
| scf | Hartree--Fock (HF) or density functional theory (DFT) :ref:`[manual] <sec:scf>` |
+-------------------------+---------------------------------------------------------------------------------------+
| dcft | density cumulant functional theory :ref:`[manual] <sec:dcft>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp2 | 2nd-order Moller-Plesset perturbation theory (MP2) :ref:`[manual] <sec:dfmp2>` |
+-------------------------+---------------------------------------------------------------------------------------+
| df-mp2 | MP2 with density fitting :ref:`[manual] <sec:dfmp2>` |
+-------------------------+---------------------------------------------------------------------------------------+
| conv-mp2 | conventional MP2 (non-density-fitting) :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp2.5 | MP2.5 :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| mp3 | third-order MP perturbation theory :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp2 | orbital-optimized second-order MP perturbation theory :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp2.5 | orbital-optimized MP2.5 :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| omp3 | orbital-optimized third-order MP perturbation theory :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ocepa | orbital-optimized coupled electron pair approximation :ref:`[manual] <sec:occ>` |
+-------------------------+---------------------------------------------------------------------------------------+
| cepa0 | coupled electron pair approximation(0) :ref:`[manual] <sec:convocc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ccsd | coupled cluster singles and doubles (CCSD) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| ccsd(t) | CCSD with perturbative triples (CCSD(T)) :ref:`[manual] <sec:cc>` |
+-------------------------+---------------------------------------------------------------------------------------+
| eom-ccsd | equation of motion (EOM) CCSD :ref:`[manual] <sec:eomcc>` |
+-------------------------+---------------------------------------------------------------------------------------+
.. include:: autodoc_dft_opt.rst
.. warning:: Optimizations where the molecule is specified in Z-matrix format
with dummy atoms will result in the geometry being converted to a Cartesian representation.
:type name: string
:param name: ``'scf'`` || ``'df-mp2'`` || ``'ci5'`` || etc.
First argument, usually unlabeled. Indicates the computational method
to be applied to the database. May be any valid argument to
:py:func:`~driver.energy`.
:type func: :ref:`function <op_py_function>`
:param func: |dl| ``gradient`` |dr| || ``energy`` || ``cbs``
Indicates the type of calculation to be performed on the molecule.
The default dertype accesses ``'gradient'`` or ``'energy'``, while
``'cbs'`` performs a multistage finite difference calculation.
If a nested series of python functions is intended (see :ref:`sec:intercalls`),
use keyword ``opt_func`` instead of ``func``.
:type mode: string
:param mode: |dl| ``'continuous'`` |dr| || ``'sow'`` || ``'reap'``
For a finite difference of energies optimization, indicates whether
the calculations required to complete the
optimization are to be run in one file (``'continuous'``) or are to be
farmed out in an embarrassingly parallel fashion
(``'sow'``/``'reap'``). For the latter, run an initial job with
``'sow'`` and follow instructions in its output file.
:type dertype: :ref:`dertype <op_py_dertype>`
:param dertype: ``'gradient'`` || ``'energy'``
Indicates whether analytic (if available) or finite difference
optimization is to be performed.
:type molecule: :ref:`molecule <op_py_molecule>`
:param molecule: ``h2o`` || etc.
The target molecule, if not the last molecule defined.
:examples:
>>> # [1] Analytic scf optimization
>>> optimize('scf')
>>> # [2] Finite difference mp5 optimization
>>> opt('mp5')
>>> # [3] Forced finite difference ccsd optimization
>>> optimize('ccsd', dertype=1)
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
full_hess_every = psi4.get_local_option('OPTKING', 'FULL_HESS_EVERY')
steps_since_last_hessian = 0
# are we in sow/reap mode?
isSowReap = False
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'sow'):
isSowReap = True
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'reap'):
isSowReap = True
optstash = p4util.OptionsState(
['SCF', 'GUESS'])
n = 1
if ('opt_iter' in kwargs):
n = kwargs['opt_iter']
psi4.get_active_molecule().update_geometry()
mol = psi4.get_active_molecule()
mol.update_geometry()
initial_sym = mol.schoenflies_symbol()
while n <= psi4.get_global_option('GEOM_MAXITER'):
mol = psi4.get_active_molecule()
mol.update_geometry()
current_sym = mol.schoenflies_symbol()
if initial_sym != current_sym:
raise Exception("Point group changed! You should restart using " +\
"the last geometry in the output, after carefully " +\
"making sure all symmetry-dependent information in " +\
"the input, such as DOCC, is correct.")
kwargs['opt_iter'] = n
# Use orbitals from previous iteration as a guess
if (n > 1) and (not isSowReap):
psi4.set_local_option('SCF', 'GUESS', 'READ')
# Compute the gradient
thisenergy = gradient(name, **kwargs)
# S/R: Quit after getting new displacements or if forming gradient fails
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'sow'):
return 0.0
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'reap') and (thisenergy == 0.0):
return 0.0
# S/R: Move opt data file from last pass into namespace for this pass
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'reap') and (n != 0):
psi4.IOManager.shared_object().set_specific_retention(1, True)
psi4.IOManager.shared_object().set_specific_path(1, './')
if 'opt_datafile' in kwargs:
restartfile = kwargs.pop('opt_datafile')
if(psi4.me() == 0):
shutil.copy(restartfile, p4util.get_psifile(1))
# compute Hessian as requested; frequency wipes out gradient so stash it
if ((full_hess_every > -1) and (n == 1)) or (steps_since_last_hessian + 1 == full_hess_every):
G = psi4.get_gradient()
psi4.IOManager.shared_object().set_specific_retention(1, True)
psi4.IOManager.shared_object().set_specific_path(1, './')
frequencies(name, **kwargs)
steps_since_last_hessian = 0
psi4.set_gradient(G)
psi4.set_global_option('CART_HESS_READ', True)
elif ((full_hess_every == -1) and (psi4.get_global_option('CART_HESS_READ')) and (n == 1)):
pass
# Do nothing; user said to read existing hessian once
else:
psi4.set_global_option('CART_HESS_READ', False)
steps_since_last_hessian += 1
# print 'cart_hess_read', psi4.get_global_option('CART_HESS_READ')
# Take step
if psi4.optking() == psi4.PsiReturnType.EndLoop:
print('Optimizer: Optimization complete!')
psi4.print_out('\n Final optimized geometry and variables:\n')
psi4.get_active_molecule().print_in_input_format()
# Check if user wants to see the intcos; if so, don't delete them.
if (psi4.get_option('OPTKING', 'INTCOS_GENERATE_EXIT') == False):
psi4.opt_clean()
psi4.clean()
# S/R: Clean up opt input file
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'reap'):
fmaster = open('OPT-master.in', 'w')
fmaster.write('# This is a psi4 input file auto-generated from the gradient() wrapper.\n\n')
fmaster.write('# Optimization complete!\n\n')
fmaster.close()
optstash.restore()
return thisenergy
psi4.print_out('\n Structure for next step:\n')
psi4.get_active_molecule().print_in_input_format()
# S/R: Preserve opt data file for next pass and switch modes to get new displacements
if ('mode' in kwargs) and (kwargs['mode'].lower() == 'reap'):
kwargs['opt_datafile'] = p4util.get_psifile(1)
kwargs['mode'] = 'sow'
n += 1
psi4.print_out('\tOptimizer: Did not converge!')
optstash.restore()
return 0.0
## Aliases ##
opt = optimize
def parse_arbitrary_order(name):
r"""Function to parse name string into a method family like CI or MRCC and specific
level information like 4 for CISDTQ or MRCCSDTQ.
"""
namelower = name.lower()
# matches 'mrccsdt(q)'
if namelower.startswith('mrcc'):
# grabs 'sdt(q)'
ccfullname = namelower[4:]
# A negative order indicates perturbative method
methods = {
'sd' : { 'method' : 1, 'order' : 2, 'fullname' : 'CCSD' },
'sdt' : { 'method' : 1, 'order' : 3, 'fullname' : 'CCSDT' },
'sdtq' : { 'method' : 1, 'order' : 4, 'fullname' : 'CCSDTQ' },
'sdtqp' : { 'method' : 1, 'order' : 5, 'fullname' : 'CCSDTQP' },
'sdtqph' : { 'method' : 1, 'order' : 6, 'fullname' : 'CCSDTQPH' },
'sd(t)' : { 'method' : 3, 'order' : -3, 'fullname' : 'CCSD(T)' },
'sdt(q)' : { 'method' : 3, 'order' : -4, 'fullname' : 'CCSDT(Q)' },
'sdtq(p)' : { 'method' : 3, 'order' : -5, 'fullname' : 'CCSDTQ(P)' },
'sdtqp(h)' : { 'method' : 3, 'order' : -6, 'fullname' : 'CCSDTQP(H)' },
'sd(t)_l' : { 'method' : 4, 'order' : -3, 'fullname' : 'CCSD(T)_L' },
'sdt(q)_l' : { 'method' : 4, 'order' : -4, 'fullname' : 'CCSDT(Q)_L' },
'sdtq(p)_l' : { 'method' : 4, 'order' : -5, 'fullname' : 'CCSDTQ(P)_L' },
'sdtqp(h)_l' : { 'method' : 4, 'order' : -6, 'fullname' : 'CCSDTQP(H)_L' },
'sdt-1a' : { 'method' : 5, 'order' : 3, 'fullname' : 'CCSDT-1a' },
'sdtq-1a' : { 'method' : 5, 'order' : 4, 'fullname' : 'CCSDTQ-1a' },
'sdtqp-1a' : { 'method' : 5, 'order' : 5, 'fullname' : 'CCSDTQP-1a' },
'sdtqph-1a' : { 'method' : 5, 'order' : 6, 'fullname' : 'CCSDTQPH-1a' },
'sdt-1b' : { 'method' : 6, 'order' : 3, 'fullname' : 'CCSDT-1b' },
'sdtq-1b' : { 'method' : 6, 'order' : 4, 'fullname' : 'CCSDTQ-1b' },
'sdtqp-1b' : { 'method' : 6, 'order' : 5, 'fullname' : 'CCSDTQP-1b' },
'sdtqph-1b' : { 'method' : 6, 'order' : 6, 'fullname' : 'CCSDTQPH-1b' },
'2' : { 'method' : 7, 'order' : 2, 'fullname' : 'CC2' },
'3' : { 'method' : 7, 'order' : 3, 'fullname' : 'CC3' },
'4' : { 'method' : 7, 'order' : 4, 'fullname' : 'CC4' },
'5' : { 'method' : 7, 'order' : 5, 'fullname' : 'CC5' },
'6' : { 'method' : 7, 'order' : 6, 'fullname' : 'CC6' },
'sdt-3' : { 'method' : 8, 'order' : 3, 'fullname' : 'CCSDT-3' },
'sdtq-3' : { 'method' : 8, 'order' : 4, 'fullname' : 'CCSDTQ-3' },
'sdtqp-3' : { 'method' : 8, 'order' : 5, 'fullname' : 'CCSDTQP-3' },
'sdtqph-3' : { 'method' : 8, 'order' : 6, 'fullname' : 'CCSDTQPH-3' }
}
# looks for 'sdt(q)' in dictionary
if ccfullname in methods:
return 'mrcc', methods[ccfullname]
else:
raise ValidationError('MRCC method \'%s\' invalid.' % (namelower))
elif re.match(r'^[a-z]+\d+$', namelower):
decompose = re.compile(r'^([a-z]+)(\d+)$').match(namelower)
namestump = decompose.group(1)
namelevel = int(decompose.group(2))
if (namestump == 'mp') or (namestump == 'zapt') or (namestump == 'ci'):
# Let 'mp2' and 'mp3' pass through as themselves to occ module
if (namestump == 'mp') and ((namelevel == 2) or (namelevel == 3)):
return namelower, None
# Let 'mp4' be redirected to fnocc module if rhf
elif (namestump == 'mp') and (namelevel == 4):
if psi4.get_option('SCF', 'REFERENCE') == 'RHF':
return 'fnocc-mp', 4
else:
return 'detci-mp', 4
# Otherwise return method and order
else:
return namestump, namelevel
else:
return namelower, None
else:
return namelower, None
def hessian(name, **kwargs):
r"""Function complementary to :py:func:`~frequency`. Computes force
constants, deciding analytic, finite difference of gradients, or
finite difference of energies.
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
dertype = 2
optstash = p4util.OptionsState(
['SCF', 'E_CONVERGENCE'],
['SCF', 'D_CONVERGENCE'],
['E_CONVERGENCE'])
# Order of precedence:
# 1. Default for wavefunction
# 2. Value obtained from kwargs, if user changed it
# 3. If user provides a custom 'func' use that
# Allow specification of methods to arbitrary order
lowername, level = parse_arbitrary_order(lowername)
if level:
kwargs['level'] = level
# 1. set the default to that of the provided name
if lowername in procedures['hessian']:
dertype = 2
elif lowername in procedures['gradient']:
dertype = 1
func = gradient
elif lowername in procedures['energy']:
dertype = 0
func = energy
# 2. Check if the user passes dertype into this function
if 'dertype' in kwargs:
freq_dertype = kwargs['dertype']
if der0th.match(str(freq_dertype)):
dertype = 0
func = energy
elif der1st.match(str(freq_dertype)):
dertype = 1
func = gradient
elif der2nd.match(str(freq_dertype)):
dertype = 2
else:
raise ValidationError('Requested derivative level \'dertype\' %s not valid for helper function frequency.' % (freq_dertype))
# 3. if the user provides a custom function THAT takes precedence
if ('freq_func' in kwargs) or ('func' in kwargs):
if ('func' in kwargs):
kwargs['freq_func'] = kwargs['func']
del kwargs['func']
dertype = 0
func = kwargs['freq_func']
# Summary validation
if (dertype == 2) and (lowername in procedures['hessian']):
pass
elif (dertype == 1) and (func is gradient) and (lowername in procedures['gradient']):
pass
elif (dertype == 1) and not(func is gradient):
pass
elif (dertype == 0) and (func is energy) and (lowername in procedures['energy']):
pass
elif (dertype == 0) and not(func is energy):
pass
else:
raise ValidationError('Requested method \'name\' %s and derivative level \'dertype\' %s are not available.'
% (lowername, dertype))
# Make sure the molecule the user provided is the active one
if ('molecule' in kwargs):
activate(kwargs['molecule'])
del kwargs['molecule']
molecule = psi4.get_active_molecule()
molecule.update_geometry()
psi4.set_global_option('BASIS', psi4.get_global_option('BASIS'))
# S/R: Mode of operation- whether finite difference opt run in one job or files farmed out
freq_mode = 'continuous'
if ('mode' in kwargs) and ((dertype == 0) or (dertype == 1)):
freq_mode = kwargs['mode']
if (freq_mode.lower() == 'continuous'):
pass
elif (freq_mode.lower() == 'sow'):
pass
elif (freq_mode.lower() == 'reap'):
if('linkage' in kwargs):
freq_linkage = kwargs['linkage']
else:
raise ValidationError('Frequency execution mode \'reap\' requires a linkage option.')
else:
raise ValidationError('Frequency execution mode \'%s\' not valid.' % (freq_mode))
# Set method-dependent scf convergence criteria (test on procedures['energy'] since that's guaranteed)
if not psi4.has_option_changed('SCF', 'E_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 8)
else:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 10)
if not psi4.has_option_changed('SCF', 'D_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 8)
else:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 10)
# Set post-scf convergence criteria (global will cover all correlated modules)
if not psi4.has_global_option_changed('E_CONVERGENCE'):
if not procedures['energy'][lowername] == run_scf and not procedures['energy'][lowername] == run_dft:
psi4.set_global_option('E_CONVERGENCE', 8)
# Select certain irreps
if 'irrep' in kwargs:
irrep = parse_cotton_irreps(kwargs['irrep']) - 1 # externally, A1 irrep is 1, internally 0
else:
irrep = -1 # -1 implies do all irreps
# Does an analytic procedure exist for the requested method?
if (dertype == 2):
# We have the desired method. Do it.
procedures['hessian'][lowername](lowername, **kwargs)
optstash.restore()
if 'mode' in kwargs and kwargs['mode'].lower() == 'sow':
raise ValidationError('Frequency execution mode \'sow\' not valid for analytic frequency calculation.')
# TODO: check that current energy's being set to the right figure when this code is actually used
psi4.set_variable('CURRENT ENERGY', psi4.wavefunction().energy())
# TODO: return hessian matrix
elif (dertype == 1):
# Ok, we're doing frequencies by gradients
print('Performing finite difference by gradient calculations')
func = procedures['gradient'][lowername]
if 'mode' in kwargs and kwargs['mode'].lower() == 'sow':
raise ValidationError('Frequency execution mode \'sow\' not yet implemented for finite difference of analytic gradient calculation.')
# Obtain list of displacements
displacements = psi4.fd_geoms_freq_1(irrep)
molecule.reinterpret_coordentry(False)
molecule.fix_orientation(True)
# Make a note of the undisplaced molecule's symmetry
psi4.set_parent_symmetry(molecule.schoenflies_symbol())
ndisp = len(displacements)
print(' %d displacements needed.' % ndisp)
#print displacements to output.dat
#for n, displacement in enumerate(displacements):
# displacement.print_out();
gradients = []
for n, displacement in enumerate(displacements):
# Print information to output.dat
psi4.print_out('\n')
p4util.banner('Loading displacement %d of %d' % (n + 1, ndisp))
# Print information to the screen
print(' %d' % (n + 1), end="")
if (n + 1) == ndisp:
print('\n', end="")
sys.stdout.flush()
# Load in displacement into the active molecule (xyz coordinates only)
molecule.set_geometry(displacement)
# Perform the gradient calculation
func(lowername, **kwargs)
# Save the gradient
G = psi4.get_gradient()
gradients.append(G)
# clean may be necessary when changing irreps of displacements
psi4.clean()
psi4.fd_freq_1(gradients, irrep)
print(' Computation complete.')
# Clear the "parent" symmetry now
psi4.set_parent_symmetry("")
# TODO: These need to be restored to the user specified setting
psi4.get_active_molecule().fix_orientation(False)
# But not this one, it always goes back to True
psi4.get_active_molecule().reinterpret_coordentry(True)
optstash.restore()
# TODO: add return statement of hessian matrix
# TODO: set current energy to un-displaced energy
else:
# If not, perform finite difference of energies
print('Performing finite difference calculations by energies')
# Set method-dependent scf convergence criteria (test on procedures['energy'] since that's guaranteed)
optstash.restore()
if not psi4.has_option_changed('SCF', 'E_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 10)
else:
psi4.set_local_option('SCF', 'E_CONVERGENCE', 11)
if not psi4.has_option_changed('SCF', 'D_CONVERGENCE'):
if procedures['energy'][lowername] == run_scf or procedures['energy'][lowername] == run_dft:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 10)
else:
psi4.set_local_option('SCF', 'D_CONVERGENCE', 11)
# Set post-scf convergence criteria (global will cover all correlated modules)
if not psi4.has_global_option_changed('E_CONVERGENCE'):
if not procedures['energy'][lowername] == run_scf and not procedures['energy'][lowername] == run_dft:
psi4.set_global_option('E_CONVERGENCE', 10)
# Obtain list of displacements
displacements = psi4.fd_geoms_freq_0(irrep)
molecule.fix_orientation(True)
molecule.reinterpret_coordentry(False)
# Make a note of the undisplaced molecule's symmetry
psi4.set_parent_symmetry(molecule.schoenflies_symbol())
ndisp = len(displacements)
# This version is pretty dependent on the reference geometry being last (as it is now)
print(' %d displacements needed.' % ndisp)
energies = []
# S/R: Write instructions for sow/reap procedure to output file and reap input file
if (freq_mode.lower() == 'sow'):
instructionsO = """\n# The frequency sow/reap procedure has been selected through mode='sow'. In addition\n"""
instructionsO += """# to this output file (which contains no quantum chemical calculations), this job\n"""
instructionsO += """# has produced a number of input files (FREQ-*.in) for individual components\n"""
instructionsO += """# and a single input file (FREQ-master.in) with a frequency(mode='reap') command.\n"""
instructionsO += """# These files may look very peculiar since they contain processed and pickled python\n"""
instructionsO += """# rather than normal input. Follow the instructions below (repeated in FREQ-master.in)\n"""
instructionsO += """# to continue.\n#\n"""
instructionsO += """# Alternatively, a single-job execution of the hessian may be accessed through\n"""
instructionsO += """# the frequency wrapper option mode='continuous'.\n#\n"""
psi4.print_out(instructionsO)
instructionsM = """\n# Follow the instructions below to carry out this frequency computation.\n#\n"""
instructionsM += """# (1) Run all of the FREQ-*.in input files on any variety of computer architecture.\n"""
instructionsM += """# The output file names must be as given below (these are the defaults when executed\n"""
instructionsM += """# as `psi4 FREQ-1.in`, etc.).\n#\n"""
for rgt in range(ndisp):
pre = 'FREQ-' + str(rgt + 1)
instructionsM += """# psi4 -i %-27s -o %-27s\n""" % (pre + '.in', pre + '.out')
instructionsM += """#\n# (2) Gather all the resulting output files in a directory. Place input file\n"""
instructionsM += """# FREQ-master.in into that directory and run it. The job will be minimal in\n"""
instructionsM += """# length and give summary results for the frequency computation in its output file.\n#\n"""
instructionsM += """# psi4 -i %-27s -o %-27s\n#\n\n""" % ('FREQ-master.in', 'FREQ-master.out')
fmaster = open('FREQ-master.in', 'w')
fmaster.write('# This is a psi4 input file auto-generated from the hessian() wrapper.\n\n')
fmaster.write(p4util.format_molecule_for_input(molecule))
fmaster.write(p4util.format_options_for_input())
p4util.format_kwargs_for_input(fmaster, 2, **kwargs)
fmaster.write("""%s('%s', **kwargs)\n\n""" % (frequency.__name__, lowername))
fmaster.write(instructionsM)
fmaster.close()
psi4.print_out(instructionsM)
for n, displacement in enumerate(displacements):
rfile = 'FREQ-%s' % (n + 1)
# Build string of title banner
banners = ''
banners += """psi4.print_out('\\n')\n"""
banners += """p4util.banner(' Hessian Computation: Energy Displacement %d ')\n""" % (n + 1)
banners += """psi4.print_out('\\n')\n\n"""
if (freq_mode.lower() == 'continuous'):
# Print information to output.dat
psi4.print_out('\n')
p4util.banner('Loading displacement %d of %d' % (n + 1, ndisp))
# Print information to the screen
print(' %d' % (n + 1), end="")
if (n + 1) == ndisp:
print('\n', end='')
sys.stdout.flush()
# Load in displacement into the active molecule
molecule.set_geometry(displacement)
# Perform the energy calculation
func(lowername, **kwargs)
# Save the energy
energies.append(psi4.get_variable('CURRENT ENERGY'))
# clean may be necessary when changing irreps of displacements
psi4.clean()
# S/R: Write each displaced geometry to an input file
elif (freq_mode.lower() == 'sow'):
molecule.set_geometry(displacement)
# S/R: Prepare molecule, options, and kwargs
freagent = open('%s.in' % (rfile), 'w')
freagent.write('# This is a psi4 input file auto-generated from the gradient() wrapper.\n\n')
freagent.write(p4util.format_molecule_for_input(molecule))
freagent.write(p4util.format_options_for_input())
p4util.format_kwargs_for_input(freagent, **kwargs)
# S/R: Prepare function call and energy save
freagent.write("""electronic_energy = %s('%s', **kwargs)\n\n""" % (func.__name__, lowername))
freagent.write("""psi4.print_out('\\nHESSIAN RESULT: computation %d for item %d """ % (os.getpid(), n + 1))
freagent.write("""yields electronic energy %20.12f\\n' % (electronic_energy))\n\n""")
freagent.close()
# S/R: Read energy from each displaced geometry output file and save in energies array
elif (freq_mode.lower() == 'reap'):
exec(banners)
psi4.set_variable('NUCLEAR REPULSION ENERGY', molecule.nuclear_repulsion_energy())
energies.append(p4util.extract_sowreap_from_output(rfile, 'HESSIAN', n, freq_linkage, True))
# S/R: Quit sow after writing files
if (freq_mode.lower() == 'sow'):
optstash.restore()
return None
# Obtain the gradient. This function stores the gradient in the wavefunction.
psi4.fd_freq_0(energies, irrep)
print(' Computation complete.')
# Clear the "parent" symmetry now
psi4.set_parent_symmetry("")
# TODO: These need to be restored to the user specified setting
psi4.get_active_molecule().fix_orientation(False)
# But not this one, it always goes back to True
psi4.get_active_molecule().reinterpret_coordentry(True)
# Clear the "parent" symmetry now
psi4.set_parent_symmetry("")
# The last item in the list is the reference energy, return it
optstash.restore()
psi4.set_variable('CURRENT ENERGY', energies[-1])
#TODO: return hessian matrix
def frequency(name, **kwargs):
r"""Function to compute harmonic vibrational frequencies.
:aliases: frequencies(), freq()
:returns: (*float*) Total electronic energy in Hartrees.
.. note:: Analytic hessians are not available. Frequencies will proceed through
finite differences according to availability of gradients or energies.
.. caution:: Some features are not yet implemented. Buy a developer a coffee.
- Implement sow/reap mode for finite difference of gradients. Presently only for findif of energies.
.. _`table:freq_gen`:
:type name: string
:param name: ``'scf'`` || ``'df-mp2'`` || ``'ci5'`` || etc.
First argument, usually unlabeled. Indicates the computational method
to be applied to the system.
:type dertype: :ref:`dertype <op_py_dertype>`
:param dertype: |dl| ``'hessian'`` |dr| || ``'gradient'`` || ``'energy'``
Indicates whether analytic (if available- they're not), finite
difference of gradients (if available) or finite difference of
energies is to be performed.
:type mode: string
:param mode: |dl| ``'continuous'`` |dr| || ``'sow'`` || ``'reap'``
For a finite difference of energies or gradients frequency, indicates
whether the calculations required to complet the frequency are to be run
in one file (``'continuous'``) or are to be farmed out in an
embarrassingly parallel fashion (``'sow'``/``'reap'``)/ For the latter,
run an initial job with ``'sow'`` and follow instructions in its output file.
:type irrep: int or string
:param irrep: |dl| ``-1`` |dr| || ``1`` || ``'b2'`` || ``'App'`` || etc.
Indicates which symmetry block (:ref:`Cotton <table:irrepOrdering>` ordering) of vibrational
frequencies to be computed. ``1``, ``'1'``, or ``'a1'`` represents
:math:`a_1`, requesting only the totally symmetric modes.
``-1`` indicates a full frequency calculation.
:type molecule: :ref:`molecule <op_py_molecule>`
:param molecule: ``h2o`` || etc.
The target molecule, if not the last molecule defined.
:examples:
>>> # [1] <example description>
>>> <example python command>
>>> # [2] Frequency calculation for b2 modes through finite difference of gradients
>>> frequencies('scf', dertype=1, irrep=4)
"""
lowername = name.lower()
kwargs = p4util.kwargs_lower(kwargs)
# Compute the hessian
hessian(name, **kwargs)
if not (('mode' in kwargs) and (kwargs['mode'].lower() == 'sow')):
# call thermo module
psi4.thermo()
#TODO add return current energy once satisfied that's set to energy at eq, not a findif
return psi4.get_variable('CURRENT ENERGY')
## Aliases ##
frequencies = frequency
freq = frequency
def molden(filename):
"""Function to write wavefunction information in molden
format to *filename*
"""
m = psi4.MoldenWriter(psi4.wavefunction())
m.write(filename)
def parse_cotton_irreps(irrep):
r"""Function to return validated Cotton ordering index from string or integer
irreducible representation *irrep*.
"""
cotton = {
'c1': {
'a': 1,
'1': 1
},
'ci': {
'ag': 1,
'au': 2,
'1': 1,
'2': 2
},
'c2': {
'a': 1,
'b': 2,
'1': 1,
'2': 2
},
'cs': {
'ap': 1,
'app': 2,
'1': 1,
'2': 2
},
'd2': {
'a': 1,
'b1': 2,
'b2': 3,
'b3': 4,
'1': 1,
'2': 2,
'3': 3,
'4': 4
},
'c2v': {
'a1': 1,
'a2': 2,
'b1': 3,
'b2': 4,
'1': 1,
'2': 2,
'3': 3,
'4': 4
},
'c2h': {
'ag': 1,
'bg': 2,
'au': 3,
'bu': 4,
'1': 1,
'2': 2,
'3': 3,
'4': 4,
},
'd2h': {
'ag': 1,
'b1g': 2,
'b2g': 3,
'b3g': 4,
'au': 5,
'b1u': 6,
'b2u': 7,
'b3u': 8,
'1': 1,
'2': 2,
'3': 3,
'4': 4,
'5': 5,
'6': 6,
'7': 7,
'8': 8
}
}
point_group = psi4.get_active_molecule().schoenflies_symbol().lower()
irreducible_representation = str(irrep).lower()
try:
return cotton[point_group][irreducible_representation]
except KeyError:
raise ValidationError("Irrep \'%s\' not valid for point group \'%s\'." % (str(irrep), point_group))
| spring01/libPSI | lib/python/driver.py | Python | gpl-2.0 | 93,244 | [
"Gaussian",
"Psi4"
] | bd38b0f7499dd38684c8302b33d998c3a01c4cba602d11da562e6763766dc4e4 |
# -*- coding: utf-8 -*-
r"""This file is part of SkyLab
Skylab is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
import scipy.signal
import scipy.stats
def kernel_func(X, Y):
r"""Smooth histogram Y with kernel Y
"""
if Y is None:
return X
return (
scipy.signal.convolve(X, Y, mode="same") /
scipy.signal.convolve(np.ones_like(X), Y, mode="same")
)
def poisson_percentile(mu, x, y, yval):
r"""Calculate upper percentile using a Poisson distribution.
Parameters
----------
mu : float
Mean value of Poisson distribution
x : array_like,
Trials of variable that is expected to be Poisson distributed
y : array_like
Observed variable connected to `x`
yval : float
Value to calculate the percentile at
Returns
-------
score : float
Value at percentile *alpha*
err : float
Uncertainty on `score`
"""
x = np.asarray(x, dtype=np.int)
y = np.asarray(y, dtype=np.float)
w = poisson_weight(x, mu)
# Get percentile at yval.
m = y > yval
u = np.sum(w[m], dtype=np.float)
if u == 0.:
return 1., 1.
err = np.sqrt(np.sum(w[m]**2)) / np.sum(w)
return u / np.sum(w, dtype=np.float), err
def poisson_weight(vals, mean, weights=None):
r"""Calculate weights for a sample that it resembles a Poisson.
Parameters
----------
vals : array_like
Random integers to be weighted
mean : float
Poisson mean
weights : array_like, optional
Weights for each event
Returns
-------
ndarray
Weights for each event
"""
mean = float(mean)
vals = np.asarray(vals, dtype=np.int)
if weights is None:
weights = np.ones_like(vals, dtype=np.float)
# Get occurences of integers.
bincount = np.bincount(vals, weights=weights)
n_max = len(bincount)
# Get poisson probability.
if mean > 0:
p = scipy.stats.poisson(mean).pmf(range(n_max))
else:
p = np.zeros(n_max, dtype=np.float)
p[0] = 1.
# Weights for each integer
w = np.zeros_like(bincount, dtype=np.float)
m = bincount > 0
w[m] = p[m] / bincount[m]
w = w[np.searchsorted(np.arange(n_max), vals)]
return w * weights
class delta_chi2(object):
"""Modified chi-square distribution
Combine chi-square distribution and delta distribution at zero.
Parameters
----------
df : float
Number of degree of freedom
loc : float, optional
Shift probability density.
scale : float, optional
Scale probability density.
Attributes
----------
params : tuple(float)
Shape, location, and scale parameters of chi-square distribution
eta : float
Fraction of over-fluctuations
eta_err : float
Uncertainty on `eta`
ks : float
KS test stastistic
"""
def __init__(self, eta, df, loc=0., scale=1.):
self.eta = eta
self.params = (df, loc, scale)
self._chi2 = scipy.stats.chi2(df, loc, scale)
self.eta_err = np.nan
self.ks = np.nan
def __getstate__(self):
return dict(
params=self.params, eta=self.eta, eta_err=self.eta_err, ks=self.ks)
def __setstate__(self, state):
for key in state:
setattr(self, key, state[key])
self._chi2 = scipy.stats.chi2(*self.params)
def __str__(self):
return (
"Delta Distribution plus chi-square {0:s}\n"
"\tSeparation factor = {1:8.3%} +/- {2:8.3%}\n"
"\t\tNDoF = {3:6.2f}\n"
"\t\tMean = {4:6.2f}\n"
"\t\tScale = {5:6.2f}\n"
"\t\tKS = {6:7.2%}").format(
repr(self), self.eta, self.eta_err,
self.params[0], self.params[1], self.params[2], self.ks)
def pdf(self, x):
r"""Probability density function
"""
x = np.asarray(x)
density = np.where(x > 0., self.eta * self._chi2.pdf(x), 1. - self.eta)
if density.ndim == 0:
density = np.asscalar(density)
return density
def logpdf(self, x):
r"""Logarithmic probability density function
"""
x = np.asarray(x)
density = np.where(
x > 0., np.log(self.eta) + self._chi2.logpdf(x),
np.log(1. - self.eta))
if density.ndim == 0:
density = np.asscalar(density)
return density
def cdf(self, x):
r"""Probability mass function
"""
return (1. - self.eta) + self.eta * self._chi2.cdf(x)
def logcdf(self, x):
r"""Logarithmic probability mass function
"""
return np.log(1. - self.eta) + np.log(self.eta) + self._chi2.logcdf(x)
def sf(self, x):
r"""Survival function
"""
x = np.asarray(x)
probability = np.where(x > 0., self.eta * self._chi2.sf(x), 1.)
if probability.ndim == 0:
probability = np.asscalar(probability)
return probability
def logsf(self, x):
r"""Logarithmic survival function
"""
x = np.asarray(x)
probability = np.where(
x > 0., np.log(self.eta) + self._chi2.logsf(x), 0.)
if probability.ndim == 0:
probability = np.asscalar(probability)
return probability
def isf(self, x):
r"""Inverse survival function
"""
x = np.asarray(x)
ts = np.where(x < self.eta, self._chi2.isf(x / self.eta), 0.)
if ts.ndim == 0:
ts = np.asscalar(ts)
return ts
class FitDeltaChi2(object):
"""Fit `delta_chi2` to test statistic.
Parameters
----------
df : float, optional
Seed for number of degree of freedom
\*\*others
Optional keyword arguments passed to chi-square function
See Also
--------
scipy.stats.chi2.fit
"""
def __init__(self, df=np.nan, **others):
self.df = df
self.others = others
def fit(self, data):
r"""Computes the fraction of over-fluctuations, fits a
chi-square distribution to the values larger than zero, and
performs a KS test.
Parameters
----------
data : array_like
Test statistic values
Returns
-------
delta_chi2
Probability density function
"""
data = np.asarray(data)
seeds = []
if np.isfinite(self.df):
seeds.append(self.df)
params = scipy.stats.chi2.fit(data[data > 0.], *seeds, **self.others)
eta = float(np.count_nonzero(data > 0.)) / len(data)
pdf = delta_chi2(eta, *params)
pdf.eta_err = np.sqrt(pdf.eta * (1. - pdf.eta) / len(data))
pdf.ks = scipy.stats.kstest(data[data > 0.], "chi2", args=params)[0]
return pdf
class delta_exp(object):
r"""Gaussian tail
Approximate test statistic using a polynomial fit to the cumulative
test statistic distribution.
Attributes
----------
coeff : ndarray
Polynomial coefficients, highest power first
eta : float
Fraction of over-fluctuations
eta_err : float
Uncertainty on `eta`
"""
def __init__(self, coeff, eta, eta_err=None):
self.coeff = coeff
self.eta = eta
self.eta_err = eta_err
def __getstate__(self):
return dict(coeff=self.coeff, eta=self.eta, eta_err=self.eta_err)
def __setstate__(self, state):
self.coeff = state.pop("coeff")
self.eta = state.pop("eta")
self.eta_err = state.pop("eta_err")
def pdf(self, x):
r"""Probability densitiy function
"""
x = np.asarray(x)
density = np.polyval(np.polyder(self.coeff), x) *\
np.exp(np.polyval(self.coeff, x))
density = np.where(x > 0., density, self.eta)
if density.ndim == 0:
density = np.asscalar(density)
return density
def sf(self, x):
r"""Survival function
"""
x = np.asarray(x)
probability = np.where(
x > 0., np.exp(np.polyval(self.coeff, x)), self.eta)
if probability.ndim == 0:
probability = np.asscalar(probability)
return probability
def isf(self, x):
r"""Inverse survival function
"""
@np.vectorize
def get_root(x):
if x > self.eta:
return 0.
coeff = np.copy(self.coeff)
coeff[-1] -= np.log(x)
roots = np.roots(coeff)
roots = np.real(roots[np.isreal(roots)])
return np.amax(roots[roots > 0])
ts = get_root(x)
if ts.ndim == 0:
ts = np.asscalar(ts)
return ts
class FitDeltaExp(object):
r"""Fit polynomial to cumulative test statistic distrubtion.
Attributes
----------
deg : int
Degree of the fitting polynomial
"""
def __init__(self, deg):
self.deg = deg
def fit(self, data):
r"""Perform fit given `data`.
Parameters
----------
data : array_like
Test statistic values
Returns
-------
delta_exp_frozen
Probability density function
"""
data = np.asarray(data)
# Get amount of over-fluctuations.
eta = float(np.count_nonzero(data > 0.)) / len(data)
eta_err = np.sqrt(eta * (1. - eta) / len(data))
# Sort data and construct cumulative distribution.
x = np.sort(data[data > 0.])
y = np.linspace(1., 0., len(x) + 1)[:-1]
coeff = np.polyfit(x, np.log(y), self.deg)
return delta_exp(coeff, eta, eta_err)
class twoside_chi2(object):
r"""Modified chi-square distribution
Combine two chi-square distributions, which are normalized to
conserve the total normalization, where one of the functions is
defined for positive values and the other one for negative values.
Parameters
----------
df : tuple(float)
Numbers of degree of freedom
loc : tuple(float), optional
Shift probability densities.
scale : tuple(float), optional
Scale probability densities.
Attributes
----------
params : ndarray
Shape, location, and scale parameters of left and right
chi-square distributions
eta : float
Fraction of over-fluctuations
eta_err : float
Uncertainty on `eta`
ks : tuple(float)
KS test stastistics
"""
def __init__(self, eta, df, loc=0., scale=1.):
params = np.empty(shape=(2, 3))
params[:, 0] = df
params[:, 1] = loc
params[:, 2] = scale
self.eta = eta
self.params = params
self._chi2 = tuple(scipy.stats.chi2(*p) for p in params)
self.eta_err = np.nan
self.ks = (np.nan, np.nan)
def __getstate__(self):
return dict(
params=self.params, eta=self.eta, eta_err=self.eta_err, ks=self.ks)
def __setstate__(self, state):
for key in state:
setattr(self, key, state[key])
self._chi2 = tuple(scipy.stats.chi2(*p) for p in self.params)
def __str__(self):
return (
"Two-sided chi-square {0:s}\n"
"\tSeparation factor = {1:8.3%} +/- {2:8.3%}\n"
"\tRight side:\n"
"\t\tNDoF = {3[0]:6.2f}\n"
"\t\tMean = {3[1]:6.2f}\n"
"\t\tScale = {3[2]:6.2f}\n"
"\t\tKS = {5[1]:7.2%}\n"
"\tLeft side:\n"
"\t\tNDoF = {4[0]:6.2f}\n"
"\t\tMean = {4[1]:6.2f}\n"
"\t\tScale = {4[2]:6.2f}\n"
"\t\tKS = {5[1]:7.2%}\n"
).format(
repr(self), self.eta, self.eta_err,
self.params[0], self.params[1], self.ks)
def pdf(self, x):
r"""Probability density function
"""
x = np.asarray(x)
density = self.eta * self._chi2[0].pdf(x) +\
(1. - self.eta) * self._chi2[1].pdf(-x)
return density
def logpdf(self, x):
r"""Logarithmic probability density function
"""
x = np.asarray(x)
density = np.where(
x > 0., np.log(self.eta) + self._chi2[0].logpdf(x),
np.log(1. - self.eta) + self._chi2[1].logpdf(-x))
if density.ndim == 0:
density = np.asscalar(density)
return density
def cdf(self, x):
r"""Probability mass function
"""
x = np.asarray(x)
probability = self.eta * self._chi2[0].cdf(x) +\
(1. - self.eta) * self._chi2[1].sf(-x)
return probability
def logcdf(self, x):
r"""Logarithmic probability mass function
"""
x = np.asarray(x)
probability = np.where(
x > 0., np.log(self.eta) + self._chi2[0].logcdf(x),
np.log(1. - self.eta) + self._chi2[1].logsf(-x))
if probability.ndim == 0:
probability = np.asscalar(probability)
return probability
def sf(self, x):
r"""Survival function
"""
x = np.asarray(x)
probability = self.eta * self._chi2[0].sf(x) +\
(1. - self.eta) * self._chi2[1].cdf(-x)
return probability
def logsf(self, x):
r"""Logarithmic survival function
"""
x = np.asarray(x)
probability = np.where(
x > 0., np.log(self.eta) + self._chi2[0].logsf(x),
np.log(1. - self.eta) + self._chi2[1].logcdf(-x))
if probability.ndim == 0:
probability = np.asscalar(probability)
return probability
def isf(self, x):
r"""Inverse survival function
"""
x = np.asarray(x)
ts = np.where(
x < self.eta, self._chi2[0].isf(x / self.eta),
-self._chi2[1].ppf(1. - (1. - x) / (1. - self.eta)))
if ts.ndim == 0:
ts = np.asscalar(ts)
return ts
class FitTwoSideChi2(object):
"""Fit `twoside_chi2` to test statistic.
Parameters
----------
df : tuple(float), optional
Seeds for number of degree of freedom
left : dict(str, float), optional
Optional keyword arguments passed to left chi-square function
right : dict(str, float), optional
Optional keyword arguments passed to right chi-square function
See Also
--------
scipy.stats.chi2.fit
"""
def __init__(self, df=np.nan, left={}, right={}):
self.df = np.empty(2)
self.df[:] = df
self.others = (left, right)
def fit(self, data):
r"""Computes the fraction of over-fluctuations, fits chi-square
distributions, and performs KS tests.
Parameters
----------
data : array_like
Test statistic values
Returns
-------
twoside_chi2
Probability density function
"""
data = np.asarray(data)
seeds = [[n] if np.isfinite(n) else [] for n in self.df]
params = np.vstack((
scipy.stats.chi2.fit(data[data > 0.], *seeds[0], **self.others[0]),
scipy.stats.chi2.fit(-data[data < 0.], *seeds[1], **self.others[1])
))
eta = float(np.count_nonzero(data > 0.)) / len(data)
pdf = twoside_chi2(eta, *params.T)
pdf.eta_err = np.sqrt(pdf.eta * (1. - pdf.eta) / len(data))
pdf.ks = (
scipy.stats.kstest(data[data > 0.], "chi2", args=params[0])[1],
scipy.stats.kstest(-data[data < 0.], "chi2", args=params[1])[1]
)
return pdf
def rotate(ra1, dec1, ra2, dec2, ra3, dec3):
r"""Rotation matrix for rotation of (ra1, dec1) onto (ra2, dec2).
The rotation is performed on (ra3, dec3).
"""
def cross_matrix(x):
r"""Calculate cross product matrix
A[ij] = x_i * y_j - y_i * x_j
"""
skv = np.roll(np.roll(np.diag(x.ravel()), 1, 1), -1, 0)
return skv - skv.T
ra1 = np.atleast_1d(ra1)
dec1 = np.atleast_1d(dec1)
ra2 = np.atleast_1d(ra2)
dec2 = np.atleast_1d(dec2)
ra3 = np.atleast_1d(ra3)
dec3 = np.atleast_1d(dec3)
assert(
len(ra1) == len(dec1) == len(ra2) == len(dec2) == len(ra3) == len(dec3)
)
alpha = np.arccos(np.cos(ra2 - ra1) * np.cos(dec1) * np.cos(dec2)
+ np.sin(dec1) * np.sin(dec2))
vec1 = np.vstack([np.cos(ra1) * np.cos(dec1),
np.sin(ra1) * np.cos(dec1),
np.sin(dec1)]).T
vec2 = np.vstack([np.cos(ra2) * np.cos(dec2),
np.sin(ra2) * np.cos(dec2),
np.sin(dec2)]).T
vec3 = np.vstack([np.cos(ra3) * np.cos(dec3),
np.sin(ra3) * np.cos(dec3),
np.sin(dec3)]).T
nvec = np.cross(vec1, vec2)
norm = np.sqrt(np.sum(nvec**2, axis=1))
nvec[norm > 0] /= norm[np.newaxis, norm > 0].T
one = np.diagflat(np.ones(3))
nTn = np.array([np.outer(nv, nv) for nv in nvec])
nx = np.array([cross_matrix(nv) for nv in nvec])
R = np.array([(1.-np.cos(a)) * nTn_i + np.cos(a) * one + np.sin(a) * nx_i
for a, nTn_i, nx_i in zip(alpha, nTn, nx)])
vec = np.array([np.dot(R_i, vec_i.T) for R_i, vec_i in zip(R, vec3)])
ra = np.arctan2(vec[:, 1], vec[:, 0])
dec = np.arcsin(vec[:, 2])
ra += np.where(ra < 0., 2. * np.pi, 0.)
return ra, dec
| kkrings/skylab | skylab/utils.py | Python | gpl-3.0 | 18,124 | [
"Gaussian"
] | bee88ab5ba7794f61267b7dcf73de0e40a6450495cef5e518dd2206393d36a73 |
"""Tests for user-friendly public interface to polynomial functions. """
from sympy.polys.polytools import (
Poly, PurePoly, poly,
parallel_poly_from_expr,
degree, degree_list,
LC, LM, LT,
pdiv, prem, pquo, pexquo,
div, rem, quo, exquo,
half_gcdex, gcdex, invert,
subresultants,
resultant, discriminant,
terms_gcd, cofactors,
gcd, gcd_list,
lcm, lcm_list,
trunc,
monic, content, primitive,
compose, decompose,
sturm,
gff_list, gff,
sqf_norm, sqf_part, sqf_list, sqf,
factor_list, factor,
intervals, refine_root, count_roots,
real_roots, nroots, ground_roots,
nth_power_roots_poly,
cancel, reduced, groebner,
GroebnerBasis, is_zero_dimensional,
_torational_factor_list,
to_rational_coeffs)
from sympy.polys.polyerrors import (
MultivariatePolynomialError,
ExactQuotientFailed,
PolificationFailed,
ComputationFailed,
UnificationFailed,
RefinementFailed,
GeneratorsNeeded,
GeneratorsError,
PolynomialError,
CoercionFailed,
DomainError,
OptionError,
FlagError)
from sympy.polys.polyclasses import DMP
from sympy.polys.fields import field
from sympy.polys.domains import FF, ZZ, QQ, RR, EX
from sympy.polys.domains.realfield import RealField
from sympy.polys.orderings import lex, grlex, grevlex
from sympy import (
S, Integer, Rational, Float, Mul, Symbol, sqrt, Piecewise, Derivative,
exp, sin, tanh, expand, oo, I, pi, re, im, rootof, Eq, Tuple, Expr, diff)
from sympy.core.basic import _aresame
from sympy.core.compatibility import iterable
from sympy.core.mul import _keep_coeff
from sympy.utilities.pytest import raises, XFAIL
from sympy.simplify import simplify
from sympy.abc import a, b, c, d, p, q, t, w, x, y, z
from sympy import MatrixSymbol
def _epsilon_eq(a, b):
for x, y in zip(a, b):
if abs(x - y) > 1e-10:
return False
return True
def _strict_eq(a, b):
if type(a) == type(b):
if iterable(a):
if len(a) == len(b):
return all(_strict_eq(c, d) for c, d in zip(a, b))
else:
return False
else:
return isinstance(a, Poly) and a.eq(b, strict=True)
else:
return False
def test_Poly_from_dict():
K = FF(3)
assert Poly.from_dict(
{0: 1, 1: 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_dict(
{0: 1, 1: 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_dict(
{(0,): 1, (1,): 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_dict(
{(0,): 1, (1,): 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_dict({(0, 0): 1, (1, 1): 2}, gens=(
x, y), domain=K).rep == DMP([[K(2), K(0)], [K(1)]], K)
assert Poly.from_dict({0: 1, 1: 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_dict(
{0: 1, 1: 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_dict(
{0: 1, 1: 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_dict(
{0: 1, 1: 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_dict(
{(0,): 1, (1,): 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_dict(
{(0,): 1, (1,): 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_dict(
{(0,): 1, (1,): 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_dict(
{(0,): 1, (1,): 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_dict({(1,): sin(y)}, gens=x, composite=False) == \
Poly(sin(y)*x, x, domain='EX')
assert Poly.from_dict({(1,): y}, gens=x, composite=False) == \
Poly(y*x, x, domain='EX')
assert Poly.from_dict({(1, 1): 1}, gens=(x, y), composite=False) == \
Poly(x*y, x, y, domain='ZZ')
assert Poly.from_dict({(1, 0): y}, gens=(x, z), composite=False) == \
Poly(y*x, x, z, domain='EX')
def test_Poly_from_list():
K = FF(3)
assert Poly.from_list([2, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_list([5, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K)
assert Poly.from_list([2, 1], gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_list([2, 1], gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_list([2, 1], gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ)
assert Poly.from_list([2, 1], gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ)
assert Poly.from_list([0, 1.0], gens=x).rep == DMP([RR(1.0)], RR)
assert Poly.from_list([1.0, 0], gens=x).rep == DMP([RR(1.0), RR(0.0)], RR)
raises(MultivariatePolynomialError, lambda: Poly.from_list([[]], gens=(x, y)))
def test_Poly_from_poly():
f = Poly(x + 7, x, domain=ZZ)
g = Poly(x + 2, x, modulus=3)
h = Poly(x + y, x, y, domain=ZZ)
K = FF(3)
assert Poly.from_poly(f) == f
assert Poly.from_poly(f, domain=K).rep == DMP([K(1), K(1)], K)
assert Poly.from_poly(f, domain=ZZ).rep == DMP([1, 7], ZZ)
assert Poly.from_poly(f, domain=QQ).rep == DMP([1, 7], QQ)
assert Poly.from_poly(f, gens=x) == f
assert Poly.from_poly(f, gens=x, domain=K).rep == DMP([K(1), K(1)], K)
assert Poly.from_poly(f, gens=x, domain=ZZ).rep == DMP([1, 7], ZZ)
assert Poly.from_poly(f, gens=x, domain=QQ).rep == DMP([1, 7], QQ)
assert Poly.from_poly(f, gens=y) == Poly(x + 7, y, domain='ZZ[x]')
raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=K))
raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=ZZ))
raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=QQ))
assert Poly.from_poly(f, gens=(x, y)) == Poly(x + 7, x, y, domain='ZZ')
assert Poly.from_poly(
f, gens=(x, y), domain=ZZ) == Poly(x + 7, x, y, domain='ZZ')
assert Poly.from_poly(
f, gens=(x, y), domain=QQ) == Poly(x + 7, x, y, domain='QQ')
assert Poly.from_poly(
f, gens=(x, y), modulus=3) == Poly(x + 7, x, y, domain='FF(3)')
K = FF(2)
assert Poly.from_poly(g) == g
assert Poly.from_poly(g, domain=ZZ).rep == DMP([1, -1], ZZ)
raises(CoercionFailed, lambda: Poly.from_poly(g, domain=QQ))
assert Poly.from_poly(g, domain=K).rep == DMP([K(1), K(0)], K)
assert Poly.from_poly(g, gens=x) == g
assert Poly.from_poly(g, gens=x, domain=ZZ).rep == DMP([1, -1], ZZ)
raises(CoercionFailed, lambda: Poly.from_poly(g, gens=x, domain=QQ))
assert Poly.from_poly(g, gens=x, domain=K).rep == DMP([K(1), K(0)], K)
K = FF(3)
assert Poly.from_poly(h) == h
assert Poly.from_poly(
h, domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ)
assert Poly.from_poly(
h, domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ)
assert Poly.from_poly(h, domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K)
assert Poly.from_poly(h, gens=x) == Poly(x + y, x, domain=ZZ[y])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=ZZ))
assert Poly.from_poly(
h, gens=x, domain=ZZ[y]) == Poly(x + y, x, domain=ZZ[y])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=QQ))
assert Poly.from_poly(
h, gens=x, domain=QQ[y]) == Poly(x + y, x, domain=QQ[y])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, modulus=3))
assert Poly.from_poly(h, gens=y) == Poly(x + y, y, domain=ZZ[x])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=ZZ))
assert Poly.from_poly(
h, gens=y, domain=ZZ[x]) == Poly(x + y, y, domain=ZZ[x])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=QQ))
assert Poly.from_poly(
h, gens=y, domain=QQ[x]) == Poly(x + y, y, domain=QQ[x])
raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, modulus=3))
assert Poly.from_poly(h, gens=(x, y)) == h
assert Poly.from_poly(
h, gens=(x, y), domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ)
assert Poly.from_poly(
h, gens=(x, y), domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ)
assert Poly.from_poly(
h, gens=(x, y), domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K)
assert Poly.from_poly(
h, gens=(y, x)).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ)
assert Poly.from_poly(
h, gens=(y, x), domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ)
assert Poly.from_poly(
h, gens=(y, x), domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ)
assert Poly.from_poly(
h, gens=(y, x), domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K)
assert Poly.from_poly(
h, gens=(x, y), field=True).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ)
assert Poly.from_poly(
h, gens=(x, y), field=True).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ)
def test_Poly_from_expr():
raises(GeneratorsNeeded, lambda: Poly.from_expr(S(0)))
raises(GeneratorsNeeded, lambda: Poly.from_expr(S(7)))
F3 = FF(3)
assert Poly.from_expr(x + 5, domain=F3).rep == DMP([F3(1), F3(2)], F3)
assert Poly.from_expr(y + 5, domain=F3).rep == DMP([F3(1), F3(2)], F3)
assert Poly.from_expr(x + 5, x, domain=F3).rep == DMP([F3(1), F3(2)], F3)
assert Poly.from_expr(y + 5, y, domain=F3).rep == DMP([F3(1), F3(2)], F3)
assert Poly.from_expr(x + y, domain=F3).rep == DMP([[F3(1)], [F3(1), F3(0)]], F3)
assert Poly.from_expr(x + y, x, y, domain=F3).rep == DMP([[F3(1)], [F3(1), F3(0)]], F3)
assert Poly.from_expr(x + 5).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(y + 5).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(x + 5, x).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(y + 5, y).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(x + 5, domain=ZZ).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(y + 5, domain=ZZ).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(x + 5, x, domain=ZZ).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(y + 5, y, domain=ZZ).rep == DMP([1, 5], ZZ)
assert Poly.from_expr(x + 5, x, y, domain=ZZ).rep == DMP([[1], [5]], ZZ)
assert Poly.from_expr(y + 5, x, y, domain=ZZ).rep == DMP([[1, 5]], ZZ)
def test_Poly__new__():
raises(GeneratorsError, lambda: Poly(x + 1, x, x))
raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ[x]))
raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ[y]))
raises(OptionError, lambda: Poly(x, x, symmetric=True))
raises(OptionError, lambda: Poly(x + 2, x, modulus=3, domain=QQ))
raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, gaussian=True))
raises(OptionError, lambda: Poly(x + 2, x, modulus=3, gaussian=True))
raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=[sqrt(3)]))
raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=[sqrt(3)]))
raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=True))
raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=True))
raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=True))
raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=True))
raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=False))
raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=False))
raises(NotImplementedError, lambda: Poly(x + 1, x, modulus=3, order='grlex'))
raises(NotImplementedError, lambda: Poly(x + 1, x, order='grlex'))
raises(GeneratorsNeeded, lambda: Poly({1: 2, 0: 1}))
raises(GeneratorsNeeded, lambda: Poly([2, 1]))
raises(GeneratorsNeeded, lambda: Poly((2, 1)))
raises(GeneratorsNeeded, lambda: Poly(1))
f = a*x**2 + b*x + c
assert Poly({2: a, 1: b, 0: c}, x) == f
assert Poly(iter([a, b, c]), x) == f
assert Poly([a, b, c], x) == f
assert Poly((a, b, c), x) == f
f = Poly({}, x, y, z)
assert f.gens == (x, y, z) and f.as_expr() == 0
assert Poly(Poly(a*x + b*y, x, y), x) == Poly(a*x + b*y, x)
assert Poly(3*x**2 + 2*x + 1, domain='ZZ').all_coeffs() == [3, 2, 1]
assert Poly(3*x**2 + 2*x + 1, domain='QQ').all_coeffs() == [3, 2, 1]
assert Poly(3*x**2 + 2*x + 1, domain='RR').all_coeffs() == [3.0, 2.0, 1.0]
raises(CoercionFailed, lambda: Poly(3*x**2/5 + 2*x/5 + 1, domain='ZZ'))
assert Poly(
3*x**2/5 + 2*x/5 + 1, domain='QQ').all_coeffs() == [S(3)/5, S(2)/5, 1]
assert _epsilon_eq(
Poly(3*x**2/5 + 2*x/5 + 1, domain='RR').all_coeffs(), [0.6, 0.4, 1.0])
assert Poly(3.0*x**2 + 2.0*x + 1, domain='ZZ').all_coeffs() == [3, 2, 1]
assert Poly(3.0*x**2 + 2.0*x + 1, domain='QQ').all_coeffs() == [3, 2, 1]
assert Poly(
3.0*x**2 + 2.0*x + 1, domain='RR').all_coeffs() == [3.0, 2.0, 1.0]
raises(CoercionFailed, lambda: Poly(3.1*x**2 + 2.1*x + 1, domain='ZZ'))
assert Poly(3.1*x**2 + 2.1*x + 1, domain='QQ').all_coeffs() == [S(31)/10, S(21)/10, 1]
assert Poly(3.1*x**2 + 2.1*x + 1, domain='RR').all_coeffs() == [3.1, 2.1, 1.0]
assert Poly({(2, 1): 1, (1, 2): 2, (1, 1): 3}, x, y) == \
Poly(x**2*y + 2*x*y**2 + 3*x*y, x, y)
assert Poly(x**2 + 1, extension=I).get_domain() == QQ.algebraic_field(I)
f = 3*x**5 - x**4 + x**3 - x** 2 + 65538
assert Poly(f, x, modulus=65537, symmetric=True) == \
Poly(3*x**5 - x**4 + x**3 - x** 2 + 1, x, modulus=65537,
symmetric=True)
assert Poly(f, x, modulus=65537, symmetric=False) == \
Poly(3*x**5 + 65536*x**4 + x**3 + 65536*x** 2 + 1, x,
modulus=65537, symmetric=False)
assert isinstance(Poly(x**2 + x + 1.0).get_domain(), RealField)
def test_Poly__args():
assert Poly(x**2 + 1).args == (x**2 + 1,)
def test_Poly__gens():
assert Poly((x - p)*(x - q), x).gens == (x,)
assert Poly((x - p)*(x - q), p).gens == (p,)
assert Poly((x - p)*(x - q), q).gens == (q,)
assert Poly((x - p)*(x - q), x, p).gens == (x, p)
assert Poly((x - p)*(x - q), x, q).gens == (x, q)
assert Poly((x - p)*(x - q), x, p, q).gens == (x, p, q)
assert Poly((x - p)*(x - q), p, x, q).gens == (p, x, q)
assert Poly((x - p)*(x - q), p, q, x).gens == (p, q, x)
assert Poly((x - p)*(x - q)).gens == (x, p, q)
assert Poly((x - p)*(x - q), sort='x > p > q').gens == (x, p, q)
assert Poly((x - p)*(x - q), sort='p > x > q').gens == (p, x, q)
assert Poly((x - p)*(x - q), sort='p > q > x').gens == (p, q, x)
assert Poly((x - p)*(x - q), x, p, q, sort='p > q > x').gens == (x, p, q)
assert Poly((x - p)*(x - q), wrt='x').gens == (x, p, q)
assert Poly((x - p)*(x - q), wrt='p').gens == (p, x, q)
assert Poly((x - p)*(x - q), wrt='q').gens == (q, x, p)
assert Poly((x - p)*(x - q), wrt=x).gens == (x, p, q)
assert Poly((x - p)*(x - q), wrt=p).gens == (p, x, q)
assert Poly((x - p)*(x - q), wrt=q).gens == (q, x, p)
assert Poly((x - p)*(x - q), x, p, q, wrt='p').gens == (x, p, q)
assert Poly((x - p)*(x - q), wrt='p', sort='q > x').gens == (p, q, x)
assert Poly((x - p)*(x - q), wrt='q', sort='p > x').gens == (q, p, x)
def test_Poly_zero():
assert Poly(x).zero == Poly(0, x, domain=ZZ)
assert Poly(x/2).zero == Poly(0, x, domain=QQ)
def test_Poly_one():
assert Poly(x).one == Poly(1, x, domain=ZZ)
assert Poly(x/2).one == Poly(1, x, domain=QQ)
def test_Poly__unify():
raises(UnificationFailed, lambda: Poly(x)._unify(y))
F3 = FF(3)
F5 = FF(5)
assert Poly(x, x, modulus=3)._unify(Poly(y, y, modulus=3))[2:] == (
DMP([[F3(1)], []], F3), DMP([[F3(1), F3(0)]], F3))
assert Poly(x, x, modulus=3)._unify(Poly(y, y, modulus=5))[2:] == (
DMP([[F5(1)], []], F5), DMP([[F5(1), F5(0)]], F5))
assert Poly(y, x, y)._unify(Poly(x, x, modulus=3))[2:] == (DMP([[F3(1), F3(0)]], F3), DMP([[F3(1)], []], F3))
assert Poly(x, x, modulus=3)._unify(Poly(y, x, y))[2:] == (DMP([[F3(1)], []], F3), DMP([[F3(1), F3(0)]], F3))
assert Poly(x + 1, x)._unify(Poly(x + 2, x))[2:] == (DMP([1, 1], ZZ), DMP([1, 2], ZZ))
assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([1, 1], QQ), DMP([1, 2], QQ))
assert Poly(x + 1, x)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([1, 1], QQ), DMP([1, 2], QQ))
assert Poly(x + 1, x)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ))
assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, x))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ))
assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ))
assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x)._unify(Poly(x + 2, y, x))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ))
assert Poly(x + 1, x, domain='QQ')._unify(Poly(x + 2, y, x))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
assert Poly(x + 1, x)._unify(Poly(x + 2, y, x, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
assert Poly(x + 1, y, x)._unify(Poly(x + 2, x))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ))
assert Poly(x + 1, y, x, domain='QQ')._unify(Poly(x + 2, x))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, y, x))[2:] == (DMP([[1], [1]], ZZ), DMP([[1], [2]], ZZ))
assert Poly(x + 1, x, y, domain='QQ')._unify(Poly(x + 2, y, x))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, x, y)._unify(Poly(x + 2, y, x, domain='QQ'))[2:] == (DMP([[1], [1]], QQ), DMP([[1], [2]], QQ))
assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, y))[2:] == (DMP([[1, 1]], ZZ), DMP([[1, 2]], ZZ))
assert Poly(x + 1, y, x, domain='QQ')._unify(Poly(x + 2, x, y))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
assert Poly(x + 1, y, x)._unify(Poly(x + 2, x, y, domain='QQ'))[2:] == (DMP([[1, 1]], QQ), DMP([[1, 2]], QQ))
F, A, B = field("a,b", ZZ)
assert Poly(a*x, x, domain='ZZ[a]')._unify(Poly(a*b*x, x, domain='ZZ(a,b)'))[2:] == \
(DMP([A, F(0)], F.to_domain()), DMP([A*B, F(0)], F.to_domain()))
assert Poly(a*x, x, domain='ZZ(a)')._unify(Poly(a*b*x, x, domain='ZZ(a,b)'))[2:] == \
(DMP([A, F(0)], F.to_domain()), DMP([A*B, F(0)], F.to_domain()))
raises(CoercionFailed, lambda: Poly(Poly(x**2 + x**2*z, y, field=True), domain='ZZ(x)'))
f = Poly(t**2 + t/3 + x, t, domain='QQ(x)')
g = Poly(t**2 + t/3 + x, t, domain='QQ[x]')
assert f._unify(g)[2:] == (f.rep, f.rep)
def test_Poly_free_symbols():
assert Poly(x**2 + 1).free_symbols == {x}
assert Poly(x**2 + y*z).free_symbols == {x, y, z}
assert Poly(x**2 + y*z, x).free_symbols == {x, y, z}
assert Poly(x**2 + sin(y*z)).free_symbols == {x, y, z}
assert Poly(x**2 + sin(y*z), x).free_symbols == {x, y, z}
assert Poly(x**2 + sin(y*z), x, domain=EX).free_symbols == {x, y, z}
def test_PurePoly_free_symbols():
assert PurePoly(x**2 + 1).free_symbols == set([])
assert PurePoly(x**2 + y*z).free_symbols == set([])
assert PurePoly(x**2 + y*z, x).free_symbols == {y, z}
assert PurePoly(x**2 + sin(y*z)).free_symbols == set([])
assert PurePoly(x**2 + sin(y*z), x).free_symbols == {y, z}
assert PurePoly(x**2 + sin(y*z), x, domain=EX).free_symbols == {y, z}
def test_Poly__eq__():
assert (Poly(x, x) == Poly(x, x)) is True
assert (Poly(x, x, domain=QQ) == Poly(x, x)) is True
assert (Poly(x, x) == Poly(x, x, domain=QQ)) is True
assert (Poly(x, x, domain=ZZ[a]) == Poly(x, x)) is True
assert (Poly(x, x) == Poly(x, x, domain=ZZ[a])) is True
assert (Poly(x*y, x, y) == Poly(x, x)) is False
assert (Poly(x, x, y) == Poly(x, x)) is False
assert (Poly(x, x) == Poly(x, x, y)) is False
assert (Poly(x**2 + 1, x) == Poly(y**2 + 1, y)) is False
assert (Poly(y**2 + 1, y) == Poly(x**2 + 1, x)) is False
f = Poly(x, x, domain=ZZ)
g = Poly(x, x, domain=QQ)
assert f.eq(g) is True
assert f.ne(g) is False
assert f.eq(g, strict=True) is False
assert f.ne(g, strict=True) is True
t0 = Symbol('t0')
f = Poly((t0/2 + x**2)*t**2 - x**2*t, t, domain='QQ[x,t0]')
g = Poly((t0/2 + x**2)*t**2 - x**2*t, t, domain='ZZ(x,t0)')
assert (f == g) is True
def test_PurePoly__eq__():
assert (PurePoly(x, x) == PurePoly(x, x)) is True
assert (PurePoly(x, x, domain=QQ) == PurePoly(x, x)) is True
assert (PurePoly(x, x) == PurePoly(x, x, domain=QQ)) is True
assert (PurePoly(x, x, domain=ZZ[a]) == PurePoly(x, x)) is True
assert (PurePoly(x, x) == PurePoly(x, x, domain=ZZ[a])) is True
assert (PurePoly(x*y, x, y) == PurePoly(x, x)) is False
assert (PurePoly(x, x, y) == PurePoly(x, x)) is False
assert (PurePoly(x, x) == PurePoly(x, x, y)) is False
assert (PurePoly(x**2 + 1, x) == PurePoly(y**2 + 1, y)) is True
assert (PurePoly(y**2 + 1, y) == PurePoly(x**2 + 1, x)) is True
f = PurePoly(x, x, domain=ZZ)
g = PurePoly(x, x, domain=QQ)
assert f.eq(g) is True
assert f.ne(g) is False
assert f.eq(g, strict=True) is False
assert f.ne(g, strict=True) is True
f = PurePoly(x, x, domain=ZZ)
g = PurePoly(y, y, domain=QQ)
assert f.eq(g) is True
assert f.ne(g) is False
assert f.eq(g, strict=True) is False
assert f.ne(g, strict=True) is True
def test_PurePoly_Poly():
assert isinstance(PurePoly(Poly(x**2 + 1)), PurePoly) is True
assert isinstance(Poly(PurePoly(x**2 + 1)), Poly) is True
def test_Poly_get_domain():
assert Poly(2*x).get_domain() == ZZ
assert Poly(2*x, domain='ZZ').get_domain() == ZZ
assert Poly(2*x, domain='QQ').get_domain() == QQ
assert Poly(x/2).get_domain() == QQ
raises(CoercionFailed, lambda: Poly(x/2, domain='ZZ'))
assert Poly(x/2, domain='QQ').get_domain() == QQ
assert isinstance(Poly(0.2*x).get_domain(), RealField)
def test_Poly_set_domain():
assert Poly(2*x + 1).set_domain(ZZ) == Poly(2*x + 1)
assert Poly(2*x + 1).set_domain('ZZ') == Poly(2*x + 1)
assert Poly(2*x + 1).set_domain(QQ) == Poly(2*x + 1, domain='QQ')
assert Poly(2*x + 1).set_domain('QQ') == Poly(2*x + 1, domain='QQ')
assert Poly(S(2)/10*x + S(1)/10).set_domain('RR') == Poly(0.2*x + 0.1)
assert Poly(0.2*x + 0.1).set_domain('QQ') == Poly(S(2)/10*x + S(1)/10)
raises(CoercionFailed, lambda: Poly(x/2 + 1).set_domain(ZZ))
raises(CoercionFailed, lambda: Poly(x + 1, modulus=2).set_domain(QQ))
raises(GeneratorsError, lambda: Poly(x*y, x, y).set_domain(ZZ[y]))
def test_Poly_get_modulus():
assert Poly(x**2 + 1, modulus=2).get_modulus() == 2
raises(PolynomialError, lambda: Poly(x**2 + 1).get_modulus())
def test_Poly_set_modulus():
assert Poly(
x**2 + 1, modulus=2).set_modulus(7) == Poly(x**2 + 1, modulus=7)
assert Poly(
x**2 + 5, modulus=7).set_modulus(2) == Poly(x**2 + 1, modulus=2)
assert Poly(x**2 + 1).set_modulus(2) == Poly(x**2 + 1, modulus=2)
raises(CoercionFailed, lambda: Poly(x/2 + 1).set_modulus(2))
def test_Poly_add_ground():
assert Poly(x + 1).add_ground(2) == Poly(x + 3)
def test_Poly_sub_ground():
assert Poly(x + 1).sub_ground(2) == Poly(x - 1)
def test_Poly_mul_ground():
assert Poly(x + 1).mul_ground(2) == Poly(2*x + 2)
def test_Poly_quo_ground():
assert Poly(2*x + 4).quo_ground(2) == Poly(x + 2)
assert Poly(2*x + 3).quo_ground(2) == Poly(x + 1)
def test_Poly_exquo_ground():
assert Poly(2*x + 4).exquo_ground(2) == Poly(x + 2)
raises(ExactQuotientFailed, lambda: Poly(2*x + 3).exquo_ground(2))
def test_Poly_abs():
assert Poly(-x + 1, x).abs() == abs(Poly(-x + 1, x)) == Poly(x + 1, x)
def test_Poly_neg():
assert Poly(-x + 1, x).neg() == -Poly(-x + 1, x) == Poly(x - 1, x)
def test_Poly_add():
assert Poly(0, x).add(Poly(0, x)) == Poly(0, x)
assert Poly(0, x) + Poly(0, x) == Poly(0, x)
assert Poly(1, x).add(Poly(0, x)) == Poly(1, x)
assert Poly(1, x, y) + Poly(0, x) == Poly(1, x, y)
assert Poly(0, x).add(Poly(1, x, y)) == Poly(1, x, y)
assert Poly(0, x, y) + Poly(1, x, y) == Poly(1, x, y)
assert Poly(1, x) + x == Poly(x + 1, x)
assert Poly(1, x) + sin(x) == 1 + sin(x)
assert Poly(x, x) + 1 == Poly(x + 1, x)
assert 1 + Poly(x, x) == Poly(x + 1, x)
def test_Poly_sub():
assert Poly(0, x).sub(Poly(0, x)) == Poly(0, x)
assert Poly(0, x) - Poly(0, x) == Poly(0, x)
assert Poly(1, x).sub(Poly(0, x)) == Poly(1, x)
assert Poly(1, x, y) - Poly(0, x) == Poly(1, x, y)
assert Poly(0, x).sub(Poly(1, x, y)) == Poly(-1, x, y)
assert Poly(0, x, y) - Poly(1, x, y) == Poly(-1, x, y)
assert Poly(1, x) - x == Poly(1 - x, x)
assert Poly(1, x) - sin(x) == 1 - sin(x)
assert Poly(x, x) - 1 == Poly(x - 1, x)
assert 1 - Poly(x, x) == Poly(1 - x, x)
def test_Poly_mul():
assert Poly(0, x).mul(Poly(0, x)) == Poly(0, x)
assert Poly(0, x) * Poly(0, x) == Poly(0, x)
assert Poly(2, x).mul(Poly(4, x)) == Poly(8, x)
assert Poly(2, x, y) * Poly(4, x) == Poly(8, x, y)
assert Poly(4, x).mul(Poly(2, x, y)) == Poly(8, x, y)
assert Poly(4, x, y) * Poly(2, x, y) == Poly(8, x, y)
assert Poly(1, x) * x == Poly(x, x)
assert Poly(1, x) * sin(x) == sin(x)
assert Poly(x, x) * 2 == Poly(2*x, x)
assert 2 * Poly(x, x) == Poly(2*x, x)
def test_Poly_sqr():
assert Poly(x*y, x, y).sqr() == Poly(x**2*y**2, x, y)
def test_Poly_pow():
assert Poly(x, x).pow(10) == Poly(x**10, x)
assert Poly(x, x).pow(Integer(10)) == Poly(x**10, x)
assert Poly(2*y, x, y).pow(4) == Poly(16*y**4, x, y)
assert Poly(2*y, x, y).pow(Integer(4)) == Poly(16*y**4, x, y)
assert Poly(7*x*y, x, y)**3 == Poly(343*x**3*y**3, x, y)
assert Poly(x*y + 1, x, y)**(-1) == (x*y + 1)**(-1)
assert Poly(x*y + 1, x, y)**x == (x*y + 1)**x
def test_Poly_divmod():
f, g = Poly(x**2), Poly(x)
q, r = g, Poly(0, x)
assert divmod(f, g) == (q, r)
assert f // g == q
assert f % g == r
assert divmod(f, x) == (q, r)
assert f // x == q
assert f % x == r
q, r = Poly(0, x), Poly(2, x)
assert divmod(2, g) == (q, r)
assert 2 // g == q
assert 2 % g == r
assert Poly(x)/Poly(x) == 1
assert Poly(x**2)/Poly(x) == x
assert Poly(x)/Poly(x**2) == 1/x
def test_Poly_eq_ne():
assert (Poly(x + y, x, y) == Poly(x + y, x, y)) is True
assert (Poly(x + y, x) == Poly(x + y, x, y)) is False
assert (Poly(x + y, x, y) == Poly(x + y, x)) is False
assert (Poly(x + y, x) == Poly(x + y, x)) is True
assert (Poly(x + y, y) == Poly(x + y, y)) is True
assert (Poly(x + y, x, y) == x + y) is True
assert (Poly(x + y, x) == x + y) is True
assert (Poly(x + y, x, y) == x + y) is True
assert (Poly(x + y, x) == x + y) is True
assert (Poly(x + y, y) == x + y) is True
assert (Poly(x + y, x, y) != Poly(x + y, x, y)) is False
assert (Poly(x + y, x) != Poly(x + y, x, y)) is True
assert (Poly(x + y, x, y) != Poly(x + y, x)) is True
assert (Poly(x + y, x) != Poly(x + y, x)) is False
assert (Poly(x + y, y) != Poly(x + y, y)) is False
assert (Poly(x + y, x, y) != x + y) is False
assert (Poly(x + y, x) != x + y) is False
assert (Poly(x + y, x, y) != x + y) is False
assert (Poly(x + y, x) != x + y) is False
assert (Poly(x + y, y) != x + y) is False
assert (Poly(x, x) == sin(x)) is False
assert (Poly(x, x) != sin(x)) is True
def test_Poly_nonzero():
assert not bool(Poly(0, x)) is True
assert not bool(Poly(1, x)) is False
def test_Poly_properties():
assert Poly(0, x).is_zero is True
assert Poly(1, x).is_zero is False
assert Poly(1, x).is_one is True
assert Poly(2, x).is_one is False
assert Poly(x - 1, x).is_sqf is True
assert Poly((x - 1)**2, x).is_sqf is False
assert Poly(x - 1, x).is_monic is True
assert Poly(2*x - 1, x).is_monic is False
assert Poly(3*x + 2, x).is_primitive is True
assert Poly(4*x + 2, x).is_primitive is False
assert Poly(1, x).is_ground is True
assert Poly(x, x).is_ground is False
assert Poly(x + y + z + 1).is_linear is True
assert Poly(x*y*z + 1).is_linear is False
assert Poly(x*y + z + 1).is_quadratic is True
assert Poly(x*y*z + 1).is_quadratic is False
assert Poly(x*y).is_monomial is True
assert Poly(x*y + 1).is_monomial is False
assert Poly(x**2 + x*y).is_homogeneous is True
assert Poly(x**3 + x*y).is_homogeneous is False
assert Poly(x).is_univariate is True
assert Poly(x*y).is_univariate is False
assert Poly(x*y).is_multivariate is True
assert Poly(x).is_multivariate is False
assert Poly(
x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1).is_cyclotomic is False
assert Poly(
x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1).is_cyclotomic is True
def test_Poly_is_irreducible():
assert Poly(x**2 + x + 1).is_irreducible is True
assert Poly(x**2 + 2*x + 1).is_irreducible is False
assert Poly(7*x + 3, modulus=11).is_irreducible is True
assert Poly(7*x**2 + 3*x + 1, modulus=11).is_irreducible is False
def test_Poly_subs():
assert Poly(x + 1).subs(x, 0) == 1
assert Poly(x + 1).subs(x, x) == Poly(x + 1)
assert Poly(x + 1).subs(x, y) == Poly(y + 1)
assert Poly(x*y, x).subs(y, x) == x**2
assert Poly(x*y, x).subs(x, y) == y**2
def test_Poly_replace():
assert Poly(x + 1).replace(x) == Poly(x + 1)
assert Poly(x + 1).replace(y) == Poly(y + 1)
raises(PolynomialError, lambda: Poly(x + y).replace(z))
assert Poly(x + 1).replace(x, x) == Poly(x + 1)
assert Poly(x + 1).replace(x, y) == Poly(y + 1)
assert Poly(x + y).replace(x, x) == Poly(x + y)
assert Poly(x + y).replace(x, z) == Poly(z + y, z, y)
assert Poly(x + y).replace(y, y) == Poly(x + y)
assert Poly(x + y).replace(y, z) == Poly(x + z, x, z)
raises(PolynomialError, lambda: Poly(x + y).replace(x, y))
raises(PolynomialError, lambda: Poly(x + y).replace(z, t))
assert Poly(x + y, x).replace(x, z) == Poly(z + y, z)
assert Poly(x + y, y).replace(y, z) == Poly(x + z, z)
raises(PolynomialError, lambda: Poly(x + y, x).replace(x, y))
raises(PolynomialError, lambda: Poly(x + y, y).replace(y, x))
def test_Poly_reorder():
raises(PolynomialError, lambda: Poly(x + y).reorder(x, z))
assert Poly(x + y, x, y).reorder(x, y) == Poly(x + y, x, y)
assert Poly(x + y, x, y).reorder(y, x) == Poly(x + y, y, x)
assert Poly(x + y, y, x).reorder(x, y) == Poly(x + y, x, y)
assert Poly(x + y, y, x).reorder(y, x) == Poly(x + y, y, x)
assert Poly(x + y, x, y).reorder(wrt=x) == Poly(x + y, x, y)
assert Poly(x + y, x, y).reorder(wrt=y) == Poly(x + y, y, x)
def test_Poly_ltrim():
f = Poly(y**2 + y*z**2, x, y, z).ltrim(y)
assert f.as_expr() == y**2 + y*z**2 and f.gens == (y, z)
raises(PolynomialError, lambda: Poly(x*y**2 + y**2, x, y).ltrim(y))
def test_Poly_has_only_gens():
assert Poly(x*y + 1, x, y, z).has_only_gens(x, y) is True
assert Poly(x*y + z, x, y, z).has_only_gens(x, y) is False
raises(GeneratorsError, lambda: Poly(x*y**2 + y**2, x, y).has_only_gens(t))
def test_Poly_to_ring():
assert Poly(2*x + 1, domain='ZZ').to_ring() == Poly(2*x + 1, domain='ZZ')
assert Poly(2*x + 1, domain='QQ').to_ring() == Poly(2*x + 1, domain='ZZ')
raises(CoercionFailed, lambda: Poly(x/2 + 1).to_ring())
raises(DomainError, lambda: Poly(2*x + 1, modulus=3).to_ring())
def test_Poly_to_field():
assert Poly(2*x + 1, domain='ZZ').to_field() == Poly(2*x + 1, domain='QQ')
assert Poly(2*x + 1, domain='QQ').to_field() == Poly(2*x + 1, domain='QQ')
assert Poly(x/2 + 1, domain='QQ').to_field() == Poly(x/2 + 1, domain='QQ')
assert Poly(2*x + 1, modulus=3).to_field() == Poly(2*x + 1, modulus=3)
assert Poly(2.0*x + 1.0).to_field() == Poly(2.0*x + 1.0)
def test_Poly_to_exact():
assert Poly(2*x).to_exact() == Poly(2*x)
assert Poly(x/2).to_exact() == Poly(x/2)
assert Poly(0.1*x).to_exact() == Poly(x/10)
def test_Poly_retract():
f = Poly(x**2 + 1, x, domain=QQ[y])
assert f.retract() == Poly(x**2 + 1, x, domain='ZZ')
assert f.retract(field=True) == Poly(x**2 + 1, x, domain='QQ')
assert Poly(0, x, y).retract() == Poly(0, x, y)
def test_Poly_slice():
f = Poly(x**3 + 2*x**2 + 3*x + 4)
assert f.slice(0, 0) == Poly(0, x)
assert f.slice(0, 1) == Poly(4, x)
assert f.slice(0, 2) == Poly(3*x + 4, x)
assert f.slice(0, 3) == Poly(2*x**2 + 3*x + 4, x)
assert f.slice(0, 4) == Poly(x**3 + 2*x**2 + 3*x + 4, x)
assert f.slice(x, 0, 0) == Poly(0, x)
assert f.slice(x, 0, 1) == Poly(4, x)
assert f.slice(x, 0, 2) == Poly(3*x + 4, x)
assert f.slice(x, 0, 3) == Poly(2*x**2 + 3*x + 4, x)
assert f.slice(x, 0, 4) == Poly(x**3 + 2*x**2 + 3*x + 4, x)
def test_Poly_coeffs():
assert Poly(0, x).coeffs() == [0]
assert Poly(1, x).coeffs() == [1]
assert Poly(2*x + 1, x).coeffs() == [2, 1]
assert Poly(7*x**2 + 2*x + 1, x).coeffs() == [7, 2, 1]
assert Poly(7*x**4 + 2*x + 1, x).coeffs() == [7, 2, 1]
assert Poly(x*y**7 + 2*x**2*y**3).coeffs('lex') == [2, 1]
assert Poly(x*y**7 + 2*x**2*y**3).coeffs('grlex') == [1, 2]
def test_Poly_monoms():
assert Poly(0, x).monoms() == [(0,)]
assert Poly(1, x).monoms() == [(0,)]
assert Poly(2*x + 1, x).monoms() == [(1,), (0,)]
assert Poly(7*x**2 + 2*x + 1, x).monoms() == [(2,), (1,), (0,)]
assert Poly(7*x**4 + 2*x + 1, x).monoms() == [(4,), (1,), (0,)]
assert Poly(x*y**7 + 2*x**2*y**3).monoms('lex') == [(2, 3), (1, 7)]
assert Poly(x*y**7 + 2*x**2*y**3).monoms('grlex') == [(1, 7), (2, 3)]
def test_Poly_terms():
assert Poly(0, x).terms() == [((0,), 0)]
assert Poly(1, x).terms() == [((0,), 1)]
assert Poly(2*x + 1, x).terms() == [((1,), 2), ((0,), 1)]
assert Poly(7*x**2 + 2*x + 1, x).terms() == [((2,), 7), ((1,), 2), ((0,), 1)]
assert Poly(7*x**4 + 2*x + 1, x).terms() == [((4,), 7), ((1,), 2), ((0,), 1)]
assert Poly(
x*y**7 + 2*x**2*y**3).terms('lex') == [((2, 3), 2), ((1, 7), 1)]
assert Poly(
x*y**7 + 2*x**2*y**3).terms('grlex') == [((1, 7), 1), ((2, 3), 2)]
def test_Poly_all_coeffs():
assert Poly(0, x).all_coeffs() == [0]
assert Poly(1, x).all_coeffs() == [1]
assert Poly(2*x + 1, x).all_coeffs() == [2, 1]
assert Poly(7*x**2 + 2*x + 1, x).all_coeffs() == [7, 2, 1]
assert Poly(7*x**4 + 2*x + 1, x).all_coeffs() == [7, 0, 0, 2, 1]
def test_Poly_all_monoms():
assert Poly(0, x).all_monoms() == [(0,)]
assert Poly(1, x).all_monoms() == [(0,)]
assert Poly(2*x + 1, x).all_monoms() == [(1,), (0,)]
assert Poly(7*x**2 + 2*x + 1, x).all_monoms() == [(2,), (1,), (0,)]
assert Poly(7*x**4 + 2*x + 1, x).all_monoms() == [(4,), (3,), (2,), (1,), (0,)]
def test_Poly_all_terms():
assert Poly(0, x).all_terms() == [((0,), 0)]
assert Poly(1, x).all_terms() == [((0,), 1)]
assert Poly(2*x + 1, x).all_terms() == [((1,), 2), ((0,), 1)]
assert Poly(7*x**2 + 2*x + 1, x).all_terms() == \
[((2,), 7), ((1,), 2), ((0,), 1)]
assert Poly(7*x**4 + 2*x + 1, x).all_terms() == \
[((4,), 7), ((3,), 0), ((2,), 0), ((1,), 2), ((0,), 1)]
def test_Poly_termwise():
f = Poly(x**2 + 20*x + 400)
g = Poly(x**2 + 2*x + 4)
def func(monom, coeff):
(k,) = monom
return coeff//10**(2 - k)
assert f.termwise(func) == g
def func(monom, coeff):
(k,) = monom
return (k,), coeff//10**(2 - k)
assert f.termwise(func) == g
def test_Poly_length():
assert Poly(0, x).length() == 0
assert Poly(1, x).length() == 1
assert Poly(x, x).length() == 1
assert Poly(x + 1, x).length() == 2
assert Poly(x**2 + 1, x).length() == 2
assert Poly(x**2 + x + 1, x).length() == 3
def test_Poly_as_dict():
assert Poly(0, x).as_dict() == {}
assert Poly(0, x, y, z).as_dict() == {}
assert Poly(1, x).as_dict() == {(0,): 1}
assert Poly(1, x, y, z).as_dict() == {(0, 0, 0): 1}
assert Poly(x**2 + 3, x).as_dict() == {(2,): 1, (0,): 3}
assert Poly(x**2 + 3, x, y, z).as_dict() == {(2, 0, 0): 1, (0, 0, 0): 3}
assert Poly(3*x**2*y*z**3 + 4*x*y + 5*x*z).as_dict() == {(2, 1, 3): 3,
(1, 1, 0): 4, (1, 0, 1): 5}
def test_Poly_as_expr():
assert Poly(0, x).as_expr() == 0
assert Poly(0, x, y, z).as_expr() == 0
assert Poly(1, x).as_expr() == 1
assert Poly(1, x, y, z).as_expr() == 1
assert Poly(x**2 + 3, x).as_expr() == x**2 + 3
assert Poly(x**2 + 3, x, y, z).as_expr() == x**2 + 3
assert Poly(
3*x**2*y*z**3 + 4*x*y + 5*x*z).as_expr() == 3*x**2*y*z**3 + 4*x*y + 5*x*z
f = Poly(x**2 + 2*x*y**2 - y, x, y)
assert f.as_expr() == -y + x**2 + 2*x*y**2
assert f.as_expr({x: 5}) == 25 - y + 10*y**2
assert f.as_expr({y: 6}) == -6 + 72*x + x**2
assert f.as_expr({x: 5, y: 6}) == 379
assert f.as_expr(5, 6) == 379
raises(GeneratorsError, lambda: f.as_expr({z: 7}))
def test_Poly_lift():
assert Poly(x**4 - I*x + 17*I, x, gaussian=True).lift() == \
Poly(x**16 + 2*x**10 + 578*x**8 + x**4 - 578*x**2 + 83521,
x, domain='QQ')
def test_Poly_deflate():
assert Poly(0, x).deflate() == ((1,), Poly(0, x))
assert Poly(1, x).deflate() == ((1,), Poly(1, x))
assert Poly(x, x).deflate() == ((1,), Poly(x, x))
assert Poly(x**2, x).deflate() == ((2,), Poly(x, x))
assert Poly(x**17, x).deflate() == ((17,), Poly(x, x))
assert Poly(
x**2*y*z**11 + x**4*z**11).deflate() == ((2, 1, 11), Poly(x*y*z + x**2*z))
def test_Poly_inject():
f = Poly(x**2*y + x*y**3 + x*y + 1, x)
assert f.inject() == Poly(x**2*y + x*y**3 + x*y + 1, x, y)
assert f.inject(front=True) == Poly(y**3*x + y*x**2 + y*x + 1, y, x)
def test_Poly_eject():
f = Poly(x**2*y + x*y**3 + x*y + 1, x, y)
assert f.eject(x) == Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]')
assert f.eject(y) == Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]')
ex = x + y + z + t + w
g = Poly(ex, x, y, z, t, w)
assert g.eject(x) == Poly(ex, y, z, t, w, domain='ZZ[x]')
assert g.eject(x, y) == Poly(ex, z, t, w, domain='ZZ[x, y]')
assert g.eject(x, y, z) == Poly(ex, t, w, domain='ZZ[x, y, z]')
assert g.eject(w) == Poly(ex, x, y, z, t, domain='ZZ[w]')
assert g.eject(t, w) == Poly(ex, x, y, z, domain='ZZ[w, t]')
assert g.eject(z, t, w) == Poly(ex, x, y, domain='ZZ[w, t, z]')
raises(DomainError, lambda: Poly(x*y, x, y, domain=ZZ[z]).eject(y))
raises(NotImplementedError, lambda: Poly(x*y, x, y, z).eject(y))
def test_Poly_exclude():
assert Poly(x, x, y).exclude() == Poly(x, x)
assert Poly(x*y, x, y).exclude() == Poly(x*y, x, y)
assert Poly(1, x, y).exclude() == Poly(1, x, y)
def test_Poly__gen_to_level():
assert Poly(1, x, y)._gen_to_level(-2) == 0
assert Poly(1, x, y)._gen_to_level(-1) == 1
assert Poly(1, x, y)._gen_to_level( 0) == 0
assert Poly(1, x, y)._gen_to_level( 1) == 1
raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level(-3))
raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level( 2))
assert Poly(1, x, y)._gen_to_level(x) == 0
assert Poly(1, x, y)._gen_to_level(y) == 1
assert Poly(1, x, y)._gen_to_level('x') == 0
assert Poly(1, x, y)._gen_to_level('y') == 1
raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level(z))
raises(PolynomialError, lambda: Poly(1, x, y)._gen_to_level('z'))
def test_Poly_degree():
assert Poly(0, x).degree() == -oo
assert Poly(1, x).degree() == 0
assert Poly(x, x).degree() == 1
assert Poly(0, x).degree(gen=0) == -oo
assert Poly(1, x).degree(gen=0) == 0
assert Poly(x, x).degree(gen=0) == 1
assert Poly(0, x).degree(gen=x) == -oo
assert Poly(1, x).degree(gen=x) == 0
assert Poly(x, x).degree(gen=x) == 1
assert Poly(0, x).degree(gen='x') == -oo
assert Poly(1, x).degree(gen='x') == 0
assert Poly(x, x).degree(gen='x') == 1
raises(PolynomialError, lambda: Poly(1, x).degree(gen=1))
raises(PolynomialError, lambda: Poly(1, x).degree(gen=y))
raises(PolynomialError, lambda: Poly(1, x).degree(gen='y'))
assert Poly(1, x, y).degree() == 0
assert Poly(2*y, x, y).degree() == 0
assert Poly(x*y, x, y).degree() == 1
assert Poly(1, x, y).degree(gen=x) == 0
assert Poly(2*y, x, y).degree(gen=x) == 0
assert Poly(x*y, x, y).degree(gen=x) == 1
assert Poly(1, x, y).degree(gen=y) == 0
assert Poly(2*y, x, y).degree(gen=y) == 1
assert Poly(x*y, x, y).degree(gen=y) == 1
assert degree(1, x) == 0
assert degree(x, x) == 1
assert degree(x*y**2, gen=x) == 1
assert degree(x*y**2, gen=y) == 2
assert degree(x*y**2, x, y) == 1
assert degree(x*y**2, y, x) == 2
raises(ComputationFailed, lambda: degree(1))
def test_Poly_degree_list():
assert Poly(0, x).degree_list() == (-oo,)
assert Poly(0, x, y).degree_list() == (-oo, -oo)
assert Poly(0, x, y, z).degree_list() == (-oo, -oo, -oo)
assert Poly(1, x).degree_list() == (0,)
assert Poly(1, x, y).degree_list() == (0, 0)
assert Poly(1, x, y, z).degree_list() == (0, 0, 0)
assert Poly(x**2*y + x**3*z**2 + 1).degree_list() == (3, 1, 2)
assert degree_list(1, x) == (0,)
assert degree_list(x, x) == (1,)
assert degree_list(x*y**2) == (1, 2)
raises(ComputationFailed, lambda: degree_list(1))
def test_Poly_total_degree():
assert Poly(x**2*y + x**3*z**2 + 1).total_degree() == 5
assert Poly(x**2 + z**3).total_degree() == 3
assert Poly(x*y*z + z**4).total_degree() == 4
assert Poly(x**3 + x + 1).total_degree() == 3
def test_Poly_homogenize():
assert Poly(x**2+y).homogenize(z) == Poly(x**2+y*z)
assert Poly(x+y).homogenize(z) == Poly(x+y, x, y, z)
assert Poly(x+y**2).homogenize(y) == Poly(x*y+y**2)
def test_Poly_homogeneous_order():
assert Poly(0, x, y).homogeneous_order() == -oo
assert Poly(1, x, y).homogeneous_order() == 0
assert Poly(x, x, y).homogeneous_order() == 1
assert Poly(x*y, x, y).homogeneous_order() == 2
assert Poly(x + 1, x, y).homogeneous_order() is None
assert Poly(x*y + x, x, y).homogeneous_order() is None
assert Poly(x**5 + 2*x**3*y**2 + 9*x*y**4).homogeneous_order() == 5
assert Poly(x**5 + 2*x**3*y**3 + 9*x*y**4).homogeneous_order() is None
def test_Poly_LC():
assert Poly(0, x).LC() == 0
assert Poly(1, x).LC() == 1
assert Poly(2*x**2 + x, x).LC() == 2
assert Poly(x*y**7 + 2*x**2*y**3).LC('lex') == 2
assert Poly(x*y**7 + 2*x**2*y**3).LC('grlex') == 1
assert LC(x*y**7 + 2*x**2*y**3, order='lex') == 2
assert LC(x*y**7 + 2*x**2*y**3, order='grlex') == 1
def test_Poly_TC():
assert Poly(0, x).TC() == 0
assert Poly(1, x).TC() == 1
assert Poly(2*x**2 + x, x).TC() == 0
def test_Poly_EC():
assert Poly(0, x).EC() == 0
assert Poly(1, x).EC() == 1
assert Poly(2*x**2 + x, x).EC() == 1
assert Poly(x*y**7 + 2*x**2*y**3).EC('lex') == 1
assert Poly(x*y**7 + 2*x**2*y**3).EC('grlex') == 2
def test_Poly_coeff():
assert Poly(0, x).coeff_monomial(1) == 0
assert Poly(0, x).coeff_monomial(x) == 0
assert Poly(1, x).coeff_monomial(1) == 1
assert Poly(1, x).coeff_monomial(x) == 0
assert Poly(x**8, x).coeff_monomial(1) == 0
assert Poly(x**8, x).coeff_monomial(x**7) == 0
assert Poly(x**8, x).coeff_monomial(x**8) == 1
assert Poly(x**8, x).coeff_monomial(x**9) == 0
assert Poly(3*x*y**2 + 1, x, y).coeff_monomial(1) == 1
assert Poly(3*x*y**2 + 1, x, y).coeff_monomial(x*y**2) == 3
p = Poly(24*x*y*exp(8) + 23*x, x, y)
assert p.coeff_monomial(x) == 23
assert p.coeff_monomial(y) == 0
assert p.coeff_monomial(x*y) == 24*exp(8)
assert p.as_expr().coeff(x) == 24*y*exp(8) + 23
raises(NotImplementedError, lambda: p.coeff(x))
raises(ValueError, lambda: Poly(x + 1).coeff_monomial(0))
raises(ValueError, lambda: Poly(x + 1).coeff_monomial(3*x))
raises(ValueError, lambda: Poly(x + 1).coeff_monomial(3*x*y))
def test_Poly_nth():
assert Poly(0, x).nth(0) == 0
assert Poly(0, x).nth(1) == 0
assert Poly(1, x).nth(0) == 1
assert Poly(1, x).nth(1) == 0
assert Poly(x**8, x).nth(0) == 0
assert Poly(x**8, x).nth(7) == 0
assert Poly(x**8, x).nth(8) == 1
assert Poly(x**8, x).nth(9) == 0
assert Poly(3*x*y**2 + 1, x, y).nth(0, 0) == 1
assert Poly(3*x*y**2 + 1, x, y).nth(1, 2) == 3
raises(ValueError, lambda: Poly(x*y + 1, x, y).nth(1))
def test_Poly_LM():
assert Poly(0, x).LM() == (0,)
assert Poly(1, x).LM() == (0,)
assert Poly(2*x**2 + x, x).LM() == (2,)
assert Poly(x*y**7 + 2*x**2*y**3).LM('lex') == (2, 3)
assert Poly(x*y**7 + 2*x**2*y**3).LM('grlex') == (1, 7)
assert LM(x*y**7 + 2*x**2*y**3, order='lex') == x**2*y**3
assert LM(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7
def test_Poly_LM_custom_order():
f = Poly(x**2*y**3*z + x**2*y*z**3 + x*y*z + 1)
rev_lex = lambda monom: tuple(reversed(monom))
assert f.LM(order='lex') == (2, 3, 1)
assert f.LM(order=rev_lex) == (2, 1, 3)
def test_Poly_EM():
assert Poly(0, x).EM() == (0,)
assert Poly(1, x).EM() == (0,)
assert Poly(2*x**2 + x, x).EM() == (1,)
assert Poly(x*y**7 + 2*x**2*y**3).EM('lex') == (1, 7)
assert Poly(x*y**7 + 2*x**2*y**3).EM('grlex') == (2, 3)
def test_Poly_LT():
assert Poly(0, x).LT() == ((0,), 0)
assert Poly(1, x).LT() == ((0,), 1)
assert Poly(2*x**2 + x, x).LT() == ((2,), 2)
assert Poly(x*y**7 + 2*x**2*y**3).LT('lex') == ((2, 3), 2)
assert Poly(x*y**7 + 2*x**2*y**3).LT('grlex') == ((1, 7), 1)
assert LT(x*y**7 + 2*x**2*y**3, order='lex') == 2*x**2*y**3
assert LT(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7
def test_Poly_ET():
assert Poly(0, x).ET() == ((0,), 0)
assert Poly(1, x).ET() == ((0,), 1)
assert Poly(2*x**2 + x, x).ET() == ((1,), 1)
assert Poly(x*y**7 + 2*x**2*y**3).ET('lex') == ((1, 7), 1)
assert Poly(x*y**7 + 2*x**2*y**3).ET('grlex') == ((2, 3), 2)
def test_Poly_max_norm():
assert Poly(-1, x).max_norm() == 1
assert Poly( 0, x).max_norm() == 0
assert Poly( 1, x).max_norm() == 1
def test_Poly_l1_norm():
assert Poly(-1, x).l1_norm() == 1
assert Poly( 0, x).l1_norm() == 0
assert Poly( 1, x).l1_norm() == 1
def test_Poly_clear_denoms():
coeff, poly = Poly(x + 2, x).clear_denoms()
assert coeff == 1 and poly == Poly(
x + 2, x, domain='ZZ') and poly.get_domain() == ZZ
coeff, poly = Poly(x/2 + 1, x).clear_denoms()
assert coeff == 2 and poly == Poly(
x + 2, x, domain='QQ') and poly.get_domain() == QQ
coeff, poly = Poly(x/2 + 1, x).clear_denoms(convert=True)
assert coeff == 2 and poly == Poly(
x + 2, x, domain='ZZ') and poly.get_domain() == ZZ
coeff, poly = Poly(x/y + 1, x).clear_denoms(convert=True)
assert coeff == y and poly == Poly(
x + y, x, domain='ZZ[y]') and poly.get_domain() == ZZ[y]
coeff, poly = Poly(x/3 + sqrt(2), x, domain='EX').clear_denoms()
assert coeff == 3 and poly == Poly(
x + 3*sqrt(2), x, domain='EX') and poly.get_domain() == EX
coeff, poly = Poly(
x/3 + sqrt(2), x, domain='EX').clear_denoms(convert=True)
assert coeff == 3 and poly == Poly(
x + 3*sqrt(2), x, domain='EX') and poly.get_domain() == EX
def test_Poly_rat_clear_denoms():
f = Poly(x**2/y + 1, x)
g = Poly(x**3 + y, x)
assert f.rat_clear_denoms(g) == \
(Poly(x**2 + y, x), Poly(y*x**3 + y**2, x))
f = f.set_domain(EX)
g = g.set_domain(EX)
assert f.rat_clear_denoms(g) == (f, g)
def test_Poly_integrate():
assert Poly(x + 1).integrate() == Poly(x**2/2 + x)
assert Poly(x + 1).integrate(x) == Poly(x**2/2 + x)
assert Poly(x + 1).integrate((x, 1)) == Poly(x**2/2 + x)
assert Poly(x*y + 1).integrate(x) == Poly(x**2*y/2 + x)
assert Poly(x*y + 1).integrate(y) == Poly(x*y**2/2 + y)
assert Poly(x*y + 1).integrate(x, x) == Poly(x**3*y/6 + x**2/2)
assert Poly(x*y + 1).integrate(y, y) == Poly(x*y**3/6 + y**2/2)
assert Poly(x*y + 1).integrate((x, 2)) == Poly(x**3*y/6 + x**2/2)
assert Poly(x*y + 1).integrate((y, 2)) == Poly(x*y**3/6 + y**2/2)
assert Poly(x*y + 1).integrate(x, y) == Poly(x**2*y**2/4 + x*y)
assert Poly(x*y + 1).integrate(y, x) == Poly(x**2*y**2/4 + x*y)
def test_Poly_diff():
assert Poly(x**2 + x).diff() == Poly(2*x + 1)
assert Poly(x**2 + x).diff(x) == Poly(2*x + 1)
assert Poly(x**2 + x).diff((x, 1)) == Poly(2*x + 1)
assert Poly(x**2*y**2 + x*y).diff(x) == Poly(2*x*y**2 + y)
assert Poly(x**2*y**2 + x*y).diff(y) == Poly(2*x**2*y + x)
assert Poly(x**2*y**2 + x*y).diff(x, x) == Poly(2*y**2, x, y)
assert Poly(x**2*y**2 + x*y).diff(y, y) == Poly(2*x**2, x, y)
assert Poly(x**2*y**2 + x*y).diff((x, 2)) == Poly(2*y**2, x, y)
assert Poly(x**2*y**2 + x*y).diff((y, 2)) == Poly(2*x**2, x, y)
assert Poly(x**2*y**2 + x*y).diff(x, y) == Poly(4*x*y + 1)
assert Poly(x**2*y**2 + x*y).diff(y, x) == Poly(4*x*y + 1)
def test_issue_9585():
assert diff(Poly(x**2 + x)) == Poly(2*x + 1)
assert diff(Poly(x**2 + x), x, evaluate=False) == \
Derivative(Poly(x**2 + x), x)
assert Derivative(Poly(x**2 + x), x).doit() == Poly(2*x + 1)
def test_Poly_eval():
assert Poly(0, x).eval(7) == 0
assert Poly(1, x).eval(7) == 1
assert Poly(x, x).eval(7) == 7
assert Poly(0, x).eval(0, 7) == 0
assert Poly(1, x).eval(0, 7) == 1
assert Poly(x, x).eval(0, 7) == 7
assert Poly(0, x).eval(x, 7) == 0
assert Poly(1, x).eval(x, 7) == 1
assert Poly(x, x).eval(x, 7) == 7
assert Poly(0, x).eval('x', 7) == 0
assert Poly(1, x).eval('x', 7) == 1
assert Poly(x, x).eval('x', 7) == 7
raises(PolynomialError, lambda: Poly(1, x).eval(1, 7))
raises(PolynomialError, lambda: Poly(1, x).eval(y, 7))
raises(PolynomialError, lambda: Poly(1, x).eval('y', 7))
assert Poly(123, x, y).eval(7) == Poly(123, y)
assert Poly(2*y, x, y).eval(7) == Poly(2*y, y)
assert Poly(x*y, x, y).eval(7) == Poly(7*y, y)
assert Poly(123, x, y).eval(x, 7) == Poly(123, y)
assert Poly(2*y, x, y).eval(x, 7) == Poly(2*y, y)
assert Poly(x*y, x, y).eval(x, 7) == Poly(7*y, y)
assert Poly(123, x, y).eval(y, 7) == Poly(123, x)
assert Poly(2*y, x, y).eval(y, 7) == Poly(14, x)
assert Poly(x*y, x, y).eval(y, 7) == Poly(7*x, x)
assert Poly(x*y + y, x, y).eval({x: 7}) == Poly(8*y, y)
assert Poly(x*y + y, x, y).eval({y: 7}) == Poly(7*x + 7, x)
assert Poly(x*y + y, x, y).eval({x: 6, y: 7}) == 49
assert Poly(x*y + y, x, y).eval({x: 7, y: 6}) == 48
assert Poly(x*y + y, x, y).eval((6, 7)) == 49
assert Poly(x*y + y, x, y).eval([6, 7]) == 49
assert Poly(x + 1, domain='ZZ').eval(S(1)/2) == S(3)/2
assert Poly(x + 1, domain='ZZ').eval(sqrt(2)) == sqrt(2) + 1
raises(ValueError, lambda: Poly(x*y + y, x, y).eval((6, 7, 8)))
raises(DomainError, lambda: Poly(x + 1, domain='ZZ').eval(S(1)/2, auto=False))
# issue 6344
alpha = Symbol('alpha')
result = (2*alpha*z - 2*alpha + z**2 + 3)/(z**2 - 2*z + 1)
f = Poly(x**2 + (alpha - 1)*x - alpha + 1, x, domain='ZZ[alpha]')
assert f.eval((z + 1)/(z - 1)) == result
g = Poly(x**2 + (alpha - 1)*x - alpha + 1, x, y, domain='ZZ[alpha]')
assert g.eval((z + 1)/(z - 1)) == Poly(result, y, domain='ZZ(alpha,z)')
def test_Poly___call__():
f = Poly(2*x*y + 3*x + y + 2*z)
assert f(2) == Poly(5*y + 2*z + 6)
assert f(2, 5) == Poly(2*z + 31)
assert f(2, 5, 7) == 45
def test_parallel_poly_from_expr():
assert parallel_poly_from_expr(
[x - 1, x**2 - 1], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[Poly(x - 1, x), x**2 - 1], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[x - 1, Poly(x**2 - 1, x)], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr([Poly(
x - 1, x), Poly(x**2 - 1, x)], x)[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[x - 1, x**2 - 1], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)]
assert parallel_poly_from_expr([Poly(
x - 1, x), x**2 - 1], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)]
assert parallel_poly_from_expr([x - 1, Poly(
x**2 - 1, x)], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)]
assert parallel_poly_from_expr([Poly(x - 1, x), Poly(
x**2 - 1, x)], x, y)[0] == [Poly(x - 1, x, y), Poly(x**2 - 1, x, y)]
assert parallel_poly_from_expr(
[x - 1, x**2 - 1])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[Poly(x - 1, x), x**2 - 1])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[x - 1, Poly(x**2 - 1, x)])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[Poly(x - 1, x), Poly(x**2 - 1, x)])[0] == [Poly(x - 1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[1, x**2 - 1])[0] == [Poly(1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[1, x**2 - 1])[0] == [Poly(1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[1, Poly(x**2 - 1, x)])[0] == [Poly(1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[1, Poly(x**2 - 1, x)])[0] == [Poly(1, x), Poly(x**2 - 1, x)]
assert parallel_poly_from_expr(
[x**2 - 1, 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)]
assert parallel_poly_from_expr(
[x**2 - 1, 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)]
assert parallel_poly_from_expr(
[Poly(x**2 - 1, x), 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)]
assert parallel_poly_from_expr(
[Poly(x**2 - 1, x), 1])[0] == [Poly(x**2 - 1, x), Poly(1, x)]
assert parallel_poly_from_expr([Poly(x, x, y), Poly(y, x, y)], x, y, order='lex')[0] == \
[Poly(x, x, y, domain='ZZ'), Poly(y, x, y, domain='ZZ')]
raises(PolificationFailed, lambda: parallel_poly_from_expr([0, 1]))
def test_pdiv():
f, g = x**2 - y**2, x - y
q, r = x + y, 0
F, G, Q, R = [ Poly(h, x, y) for h in (f, g, q, r) ]
assert F.pdiv(G) == (Q, R)
assert F.prem(G) == R
assert F.pquo(G) == Q
assert F.pexquo(G) == Q
assert pdiv(f, g) == (q, r)
assert prem(f, g) == r
assert pquo(f, g) == q
assert pexquo(f, g) == q
assert pdiv(f, g, x, y) == (q, r)
assert prem(f, g, x, y) == r
assert pquo(f, g, x, y) == q
assert pexquo(f, g, x, y) == q
assert pdiv(f, g, (x, y)) == (q, r)
assert prem(f, g, (x, y)) == r
assert pquo(f, g, (x, y)) == q
assert pexquo(f, g, (x, y)) == q
assert pdiv(F, G) == (Q, R)
assert prem(F, G) == R
assert pquo(F, G) == Q
assert pexquo(F, G) == Q
assert pdiv(f, g, polys=True) == (Q, R)
assert prem(f, g, polys=True) == R
assert pquo(f, g, polys=True) == Q
assert pexquo(f, g, polys=True) == Q
assert pdiv(F, G, polys=False) == (q, r)
assert prem(F, G, polys=False) == r
assert pquo(F, G, polys=False) == q
assert pexquo(F, G, polys=False) == q
raises(ComputationFailed, lambda: pdiv(4, 2))
raises(ComputationFailed, lambda: prem(4, 2))
raises(ComputationFailed, lambda: pquo(4, 2))
raises(ComputationFailed, lambda: pexquo(4, 2))
def test_div():
f, g = x**2 - y**2, x - y
q, r = x + y, 0
F, G, Q, R = [ Poly(h, x, y) for h in (f, g, q, r) ]
assert F.div(G) == (Q, R)
assert F.rem(G) == R
assert F.quo(G) == Q
assert F.exquo(G) == Q
assert div(f, g) == (q, r)
assert rem(f, g) == r
assert quo(f, g) == q
assert exquo(f, g) == q
assert div(f, g, x, y) == (q, r)
assert rem(f, g, x, y) == r
assert quo(f, g, x, y) == q
assert exquo(f, g, x, y) == q
assert div(f, g, (x, y)) == (q, r)
assert rem(f, g, (x, y)) == r
assert quo(f, g, (x, y)) == q
assert exquo(f, g, (x, y)) == q
assert div(F, G) == (Q, R)
assert rem(F, G) == R
assert quo(F, G) == Q
assert exquo(F, G) == Q
assert div(f, g, polys=True) == (Q, R)
assert rem(f, g, polys=True) == R
assert quo(f, g, polys=True) == Q
assert exquo(f, g, polys=True) == Q
assert div(F, G, polys=False) == (q, r)
assert rem(F, G, polys=False) == r
assert quo(F, G, polys=False) == q
assert exquo(F, G, polys=False) == q
raises(ComputationFailed, lambda: div(4, 2))
raises(ComputationFailed, lambda: rem(4, 2))
raises(ComputationFailed, lambda: quo(4, 2))
raises(ComputationFailed, lambda: exquo(4, 2))
f, g = x**2 + 1, 2*x - 4
qz, rz = 0, x**2 + 1
qq, rq = x/2 + 1, 5
assert div(f, g) == (qq, rq)
assert div(f, g, auto=True) == (qq, rq)
assert div(f, g, auto=False) == (qz, rz)
assert div(f, g, domain=ZZ) == (qz, rz)
assert div(f, g, domain=QQ) == (qq, rq)
assert div(f, g, domain=ZZ, auto=True) == (qq, rq)
assert div(f, g, domain=ZZ, auto=False) == (qz, rz)
assert div(f, g, domain=QQ, auto=True) == (qq, rq)
assert div(f, g, domain=QQ, auto=False) == (qq, rq)
assert rem(f, g) == rq
assert rem(f, g, auto=True) == rq
assert rem(f, g, auto=False) == rz
assert rem(f, g, domain=ZZ) == rz
assert rem(f, g, domain=QQ) == rq
assert rem(f, g, domain=ZZ, auto=True) == rq
assert rem(f, g, domain=ZZ, auto=False) == rz
assert rem(f, g, domain=QQ, auto=True) == rq
assert rem(f, g, domain=QQ, auto=False) == rq
assert quo(f, g) == qq
assert quo(f, g, auto=True) == qq
assert quo(f, g, auto=False) == qz
assert quo(f, g, domain=ZZ) == qz
assert quo(f, g, domain=QQ) == qq
assert quo(f, g, domain=ZZ, auto=True) == qq
assert quo(f, g, domain=ZZ, auto=False) == qz
assert quo(f, g, domain=QQ, auto=True) == qq
assert quo(f, g, domain=QQ, auto=False) == qq
f, g, q = x**2, 2*x, x/2
assert exquo(f, g) == q
assert exquo(f, g, auto=True) == q
raises(ExactQuotientFailed, lambda: exquo(f, g, auto=False))
raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ))
assert exquo(f, g, domain=QQ) == q
assert exquo(f, g, domain=ZZ, auto=True) == q
raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ, auto=False))
assert exquo(f, g, domain=QQ, auto=True) == q
assert exquo(f, g, domain=QQ, auto=False) == q
f, g = Poly(x**2), Poly(x)
q, r = f.div(g)
assert q.get_domain().is_ZZ and r.get_domain().is_ZZ
r = f.rem(g)
assert r.get_domain().is_ZZ
q = f.quo(g)
assert q.get_domain().is_ZZ
q = f.exquo(g)
assert q.get_domain().is_ZZ
def test_gcdex():
f, g = 2*x, x**2 - 16
s, t, h = x/32, -Rational(1, 16), 1
F, G, S, T, H = [ Poly(u, x, domain='QQ') for u in (f, g, s, t, h) ]
assert F.half_gcdex(G) == (S, H)
assert F.gcdex(G) == (S, T, H)
assert F.invert(G) == S
assert half_gcdex(f, g) == (s, h)
assert gcdex(f, g) == (s, t, h)
assert invert(f, g) == s
assert half_gcdex(f, g, x) == (s, h)
assert gcdex(f, g, x) == (s, t, h)
assert invert(f, g, x) == s
assert half_gcdex(f, g, (x,)) == (s, h)
assert gcdex(f, g, (x,)) == (s, t, h)
assert invert(f, g, (x,)) == s
assert half_gcdex(F, G) == (S, H)
assert gcdex(F, G) == (S, T, H)
assert invert(F, G) == S
assert half_gcdex(f, g, polys=True) == (S, H)
assert gcdex(f, g, polys=True) == (S, T, H)
assert invert(f, g, polys=True) == S
assert half_gcdex(F, G, polys=False) == (s, h)
assert gcdex(F, G, polys=False) == (s, t, h)
assert invert(F, G, polys=False) == s
assert half_gcdex(100, 2004) == (-20, 4)
assert gcdex(100, 2004) == (-20, 1, 4)
assert invert(3, 7) == 5
raises(DomainError, lambda: half_gcdex(x + 1, 2*x + 1, auto=False))
raises(DomainError, lambda: gcdex(x + 1, 2*x + 1, auto=False))
raises(DomainError, lambda: invert(x + 1, 2*x + 1, auto=False))
def test_revert():
f = Poly(1 - x**2/2 + x**4/24 - x**6/720)
g = Poly(61*x**6/720 + 5*x**4/24 + x**2/2 + 1)
assert f.revert(8) == g
def test_subresultants():
f, g, h = x**2 - 2*x + 1, x**2 - 1, 2*x - 2
F, G, H = Poly(f), Poly(g), Poly(h)
assert F.subresultants(G) == [F, G, H]
assert subresultants(f, g) == [f, g, h]
assert subresultants(f, g, x) == [f, g, h]
assert subresultants(f, g, (x,)) == [f, g, h]
assert subresultants(F, G) == [F, G, H]
assert subresultants(f, g, polys=True) == [F, G, H]
assert subresultants(F, G, polys=False) == [f, g, h]
raises(ComputationFailed, lambda: subresultants(4, 2))
def test_resultant():
f, g, h = x**2 - 2*x + 1, x**2 - 1, 0
F, G = Poly(f), Poly(g)
assert F.resultant(G) == h
assert resultant(f, g) == h
assert resultant(f, g, x) == h
assert resultant(f, g, (x,)) == h
assert resultant(F, G) == h
assert resultant(f, g, polys=True) == h
assert resultant(F, G, polys=False) == h
assert resultant(f, g, includePRS=True) == (h, [f, g, 2*x - 2])
f, g, h = x - a, x - b, a - b
F, G, H = Poly(f), Poly(g), Poly(h)
assert F.resultant(G) == H
assert resultant(f, g) == h
assert resultant(f, g, x) == h
assert resultant(f, g, (x,)) == h
assert resultant(F, G) == H
assert resultant(f, g, polys=True) == H
assert resultant(F, G, polys=False) == h
raises(ComputationFailed, lambda: resultant(4, 2))
def test_discriminant():
f, g = x**3 + 3*x**2 + 9*x - 13, -11664
F = Poly(f)
assert F.discriminant() == g
assert discriminant(f) == g
assert discriminant(f, x) == g
assert discriminant(f, (x,)) == g
assert discriminant(F) == g
assert discriminant(f, polys=True) == g
assert discriminant(F, polys=False) == g
f, g = a*x**2 + b*x + c, b**2 - 4*a*c
F, G = Poly(f), Poly(g)
assert F.discriminant() == G
assert discriminant(f) == g
assert discriminant(f, x, a, b, c) == g
assert discriminant(f, (x, a, b, c)) == g
assert discriminant(F) == G
assert discriminant(f, polys=True) == G
assert discriminant(F, polys=False) == g
raises(ComputationFailed, lambda: discriminant(4))
def test_dispersion():
# We test only the API here. For more mathematical
# tests see the dedicated test file.
fp = poly((x + 1)*(x + 2), x)
assert sorted(fp.dispersionset()) == [0, 1]
assert fp.dispersion() == 1
fp = poly(x**4 - 3*x**2 + 1, x)
gp = fp.shift(-3)
assert sorted(fp.dispersionset(gp)) == [2, 3, 4]
assert fp.dispersion(gp) == 4
def test_gcd_list():
F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2]
assert gcd_list(F) == x - 1
assert gcd_list(F, polys=True) == Poly(x - 1)
assert gcd_list([]) == 0
assert gcd_list([1, 2]) == 1
assert gcd_list([4, 6, 8]) == 2
assert gcd_list([x*(y + 42) - x*y - x*42]) == 0
gcd = gcd_list([], x)
assert gcd.is_Number and gcd is S.Zero
gcd = gcd_list([], x, polys=True)
assert gcd.is_Poly and gcd.is_zero
raises(ComputationFailed, lambda: gcd_list([], polys=True))
def test_lcm_list():
F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2]
assert lcm_list(F) == x**5 - x**4 - 2*x**3 - x**2 + x + 2
assert lcm_list(F, polys=True) == Poly(x**5 - x**4 - 2*x**3 - x**2 + x + 2)
assert lcm_list([]) == 1
assert lcm_list([1, 2]) == 2
assert lcm_list([4, 6, 8]) == 24
assert lcm_list([x*(y + 42) - x*y - x*42]) == 0
lcm = lcm_list([], x)
assert lcm.is_Number and lcm is S.One
lcm = lcm_list([], x, polys=True)
assert lcm.is_Poly and lcm.is_one
raises(ComputationFailed, lambda: lcm_list([], polys=True))
def test_gcd():
f, g = x**3 - 1, x**2 - 1
s, t = x**2 + x + 1, x + 1
h, r = x - 1, x**4 + x**3 - x - 1
F, G, S, T, H, R = [ Poly(u) for u in (f, g, s, t, h, r) ]
assert F.cofactors(G) == (H, S, T)
assert F.gcd(G) == H
assert F.lcm(G) == R
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == r
assert cofactors(f, g, x) == (h, s, t)
assert gcd(f, g, x) == h
assert lcm(f, g, x) == r
assert cofactors(f, g, (x,)) == (h, s, t)
assert gcd(f, g, (x,)) == h
assert lcm(f, g, (x,)) == r
assert cofactors(F, G) == (H, S, T)
assert gcd(F, G) == H
assert lcm(F, G) == R
assert cofactors(f, g, polys=True) == (H, S, T)
assert gcd(f, g, polys=True) == H
assert lcm(f, g, polys=True) == R
assert cofactors(F, G, polys=False) == (h, s, t)
assert gcd(F, G, polys=False) == h
assert lcm(F, G, polys=False) == r
f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0
h, s, t = g, 1.0*x + 1.0, 1.0
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == f
f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0
h, s, t = g, 1.0*x + 1.0, 1.0
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == f
assert cofactors(8, 6) == (2, 4, 3)
assert gcd(8, 6) == 2
assert lcm(8, 6) == 24
f, g = x**2 - 3*x - 4, x**3 - 4*x**2 + x - 4
l = x**4 - 3*x**3 - 3*x**2 - 3*x - 4
h, s, t = x - 4, x + 1, x**2 + 1
assert cofactors(f, g, modulus=11) == (h, s, t)
assert gcd(f, g, modulus=11) == h
assert lcm(f, g, modulus=11) == l
f, g = x**2 + 8*x + 7, x**3 + 7*x**2 + x + 7
l = x**4 + 8*x**3 + 8*x**2 + 8*x + 7
h, s, t = x + 7, x + 1, x**2 + 1
assert cofactors(f, g, modulus=11, symmetric=False) == (h, s, t)
assert gcd(f, g, modulus=11, symmetric=False) == h
assert lcm(f, g, modulus=11, symmetric=False) == l
raises(TypeError, lambda: gcd(x))
raises(TypeError, lambda: lcm(x))
def test_gcd_numbers_vs_polys():
assert isinstance(gcd(3, 9), Integer)
assert isinstance(gcd(3*x, 9), Integer)
assert gcd(3, 9) == 3
assert gcd(3*x, 9) == 3
assert isinstance(gcd(S(3)/2, S(9)/4), Rational)
assert isinstance(gcd(S(3)/2*x, S(9)/4), Rational)
assert gcd(S(3)/2, S(9)/4) == S(3)/4
assert gcd(S(3)/2*x, S(9)/4) == 1
assert isinstance(gcd(3.0, 9.0), Float)
assert isinstance(gcd(3.0*x, 9.0), Float)
assert gcd(3.0, 9.0) == 1.0
assert gcd(3.0*x, 9.0) == 1.0
def test_terms_gcd():
assert terms_gcd(1) == 1
assert terms_gcd(1, x) == 1
assert terms_gcd(x - 1) == x - 1
assert terms_gcd(-x - 1) == -x - 1
assert terms_gcd(2*x + 3) == 2*x + 3
assert terms_gcd(6*x + 4) == Mul(2, 3*x + 2, evaluate=False)
assert terms_gcd(x**3*y + x*y**3) == x*y*(x**2 + y**2)
assert terms_gcd(2*x**3*y + 2*x*y**3) == 2*x*y*(x**2 + y**2)
assert terms_gcd(x**3*y/2 + x*y**3/2) == x*y/2*(x**2 + y**2)
assert terms_gcd(x**3*y + 2*x*y**3) == x*y*(x**2 + 2*y**2)
assert terms_gcd(2*x**3*y + 4*x*y**3) == 2*x*y*(x**2 + 2*y**2)
assert terms_gcd(2*x**3*y/3 + 4*x*y**3/5) == 2*x*y/15*(5*x**2 + 6*y**2)
assert terms_gcd(2.0*x**3*y + 4.1*x*y**3) == x*y*(2.0*x**2 + 4.1*y**2)
assert _aresame(terms_gcd(2.0*x + 3), 2.0*x + 3)
assert terms_gcd((3 + 3*x)*(x + x*y), expand=False) == \
(3*x + 3)*(x*y + x)
assert terms_gcd((3 + 3*x)*(x + x*sin(3 + 3*y)), expand=False, deep=True) == \
3*x*(x + 1)*(sin(Mul(3, y + 1, evaluate=False)) + 1)
assert terms_gcd(sin(x + x*y), deep=True) == \
sin(x*(y + 1))
eq = Eq(2*x, 2*y + 2*z*y)
assert terms_gcd(eq) == eq
assert terms_gcd(eq, deep=True) == Eq(2*x, 2*y*(z + 1))
def test_trunc():
f, g = x**5 + 2*x**4 + 3*x**3 + 4*x**2 + 5*x + 6, x**5 - x**4 + x**2 - x
F, G = Poly(f), Poly(g)
assert F.trunc(3) == G
assert trunc(f, 3) == g
assert trunc(f, 3, x) == g
assert trunc(f, 3, (x,)) == g
assert trunc(F, 3) == G
assert trunc(f, 3, polys=True) == G
assert trunc(F, 3, polys=False) == g
f, g = 6*x**5 + 5*x**4 + 4*x**3 + 3*x**2 + 2*x + 1, -x**4 + x**3 - x + 1
F, G = Poly(f), Poly(g)
assert F.trunc(3) == G
assert trunc(f, 3) == g
assert trunc(f, 3, x) == g
assert trunc(f, 3, (x,)) == g
assert trunc(F, 3) == G
assert trunc(f, 3, polys=True) == G
assert trunc(F, 3, polys=False) == g
f = Poly(x**2 + 2*x + 3, modulus=5)
assert f.trunc(2) == Poly(x**2 + 1, modulus=5)
def test_monic():
f, g = 2*x - 1, x - S(1)/2
F, G = Poly(f, domain='QQ'), Poly(g)
assert F.monic() == G
assert monic(f) == g
assert monic(f, x) == g
assert monic(f, (x,)) == g
assert monic(F) == G
assert monic(f, polys=True) == G
assert monic(F, polys=False) == g
raises(ComputationFailed, lambda: monic(4))
assert monic(2*x**2 + 6*x + 4, auto=False) == x**2 + 3*x + 2
raises(ExactQuotientFailed, lambda: monic(2*x + 6*x + 1, auto=False))
assert monic(2.0*x**2 + 6.0*x + 4.0) == 1.0*x**2 + 3.0*x + 2.0
assert monic(2*x**2 + 3*x + 4, modulus=5) == x**2 - x + 2
def test_content():
f, F = 4*x + 2, Poly(4*x + 2)
assert F.content() == 2
assert content(f) == 2
raises(ComputationFailed, lambda: content(4))
f = Poly(2*x, modulus=3)
assert f.content() == 1
def test_primitive():
f, g = 4*x + 2, 2*x + 1
F, G = Poly(f), Poly(g)
assert F.primitive() == (2, G)
assert primitive(f) == (2, g)
assert primitive(f, x) == (2, g)
assert primitive(f, (x,)) == (2, g)
assert primitive(F) == (2, G)
assert primitive(f, polys=True) == (2, G)
assert primitive(F, polys=False) == (2, g)
raises(ComputationFailed, lambda: primitive(4))
f = Poly(2*x, modulus=3)
g = Poly(2.0*x, domain=RR)
assert f.primitive() == (1, f)
assert g.primitive() == (1.0, g)
assert primitive(S('-3*x/4 + y + 11/8')) == \
S('(1/8, -6*x + 8*y + 11)')
def test_compose():
f = x**12 + 20*x**10 + 150*x**8 + 500*x**6 + 625*x**4 - 2*x**3 - 10*x + 9
g = x**4 - 2*x + 9
h = x**3 + 5*x
F, G, H = map(Poly, (f, g, h))
assert G.compose(H) == F
assert compose(g, h) == f
assert compose(g, h, x) == f
assert compose(g, h, (x,)) == f
assert compose(G, H) == F
assert compose(g, h, polys=True) == F
assert compose(G, H, polys=False) == f
assert F.decompose() == [G, H]
assert decompose(f) == [g, h]
assert decompose(f, x) == [g, h]
assert decompose(f, (x,)) == [g, h]
assert decompose(F) == [G, H]
assert decompose(f, polys=True) == [G, H]
assert decompose(F, polys=False) == [g, h]
raises(ComputationFailed, lambda: compose(4, 2))
raises(ComputationFailed, lambda: decompose(4))
assert compose(x**2 - y**2, x - y, x, y) == x**2 - 2*x*y
assert compose(x**2 - y**2, x - y, y, x) == -y**2 + 2*x*y
def test_shift():
assert Poly(x**2 - 2*x + 1, x).shift(2) == Poly(x**2 + 2*x + 1, x)
def test_sturm():
f, F = x, Poly(x, domain='QQ')
g, G = 1, Poly(1, x, domain='QQ')
assert F.sturm() == [F, G]
assert sturm(f) == [f, g]
assert sturm(f, x) == [f, g]
assert sturm(f, (x,)) == [f, g]
assert sturm(F) == [F, G]
assert sturm(f, polys=True) == [F, G]
assert sturm(F, polys=False) == [f, g]
raises(ComputationFailed, lambda: sturm(4))
raises(DomainError, lambda: sturm(f, auto=False))
f = Poly(S(1024)/(15625*pi**8)*x**5
- S(4096)/(625*pi**8)*x**4
+ S(32)/(15625*pi**4)*x**3
- S(128)/(625*pi**4)*x**2
+ S(1)/62500*x
- S(1)/625, x, domain='ZZ(pi)')
assert sturm(f) == \
[Poly(x**3 - 100*x**2 + pi**4/64*x - 25*pi**4/16, x, domain='ZZ(pi)'),
Poly(3*x**2 - 200*x + pi**4/64, x, domain='ZZ(pi)'),
Poly((S(20000)/9 - pi**4/96)*x + 25*pi**4/18, x, domain='ZZ(pi)'),
Poly((-3686400000000*pi**4 - 11520000*pi**8 - 9*pi**12)/(26214400000000 - 245760000*pi**4 + 576*pi**8), x, domain='ZZ(pi)')]
def test_gff():
f = x**5 + 2*x**4 - x**3 - 2*x**2
assert Poly(f).gff_list() == [(Poly(x), 1), (Poly(x + 2), 4)]
assert gff_list(f) == [(x, 1), (x + 2, 4)]
raises(NotImplementedError, lambda: gff(f))
f = x*(x - 1)**3*(x - 2)**2*(x - 4)**2*(x - 5)
assert Poly(f).gff_list() == [(
Poly(x**2 - 5*x + 4), 1), (Poly(x**2 - 5*x + 4), 2), (Poly(x), 3)]
assert gff_list(f) == [(x**2 - 5*x + 4, 1), (x**2 - 5*x + 4, 2), (x, 3)]
raises(NotImplementedError, lambda: gff(f))
def test_sqf_norm():
assert sqf_norm(x**2 - 2, extension=sqrt(3)) == \
(1, x**2 - 2*sqrt(3)*x + 1, x**4 - 10*x**2 + 1)
assert sqf_norm(x**2 - 3, extension=sqrt(2)) == \
(1, x**2 - 2*sqrt(2)*x - 1, x**4 - 10*x**2 + 1)
assert Poly(x**2 - 2, extension=sqrt(3)).sqf_norm() == \
(1, Poly(x**2 - 2*sqrt(3)*x + 1, x, extension=sqrt(3)),
Poly(x**4 - 10*x**2 + 1, x, domain='QQ'))
assert Poly(x**2 - 3, extension=sqrt(2)).sqf_norm() == \
(1, Poly(x**2 - 2*sqrt(2)*x - 1, x, extension=sqrt(2)),
Poly(x**4 - 10*x**2 + 1, x, domain='QQ'))
def test_sqf():
f = x**5 - x**3 - x**2 + 1
g = x**3 + 2*x**2 + 2*x + 1
h = x - 1
p = x**4 + x**3 - x - 1
F, G, H, P = map(Poly, (f, g, h, p))
assert F.sqf_part() == P
assert sqf_part(f) == p
assert sqf_part(f, x) == p
assert sqf_part(f, (x,)) == p
assert sqf_part(F) == P
assert sqf_part(f, polys=True) == P
assert sqf_part(F, polys=False) == p
assert F.sqf_list() == (1, [(G, 1), (H, 2)])
assert sqf_list(f) == (1, [(g, 1), (h, 2)])
assert sqf_list(f, x) == (1, [(g, 1), (h, 2)])
assert sqf_list(f, (x,)) == (1, [(g, 1), (h, 2)])
assert sqf_list(F) == (1, [(G, 1), (H, 2)])
assert sqf_list(f, polys=True) == (1, [(G, 1), (H, 2)])
assert sqf_list(F, polys=False) == (1, [(g, 1), (h, 2)])
assert F.sqf_list_include() == [(G, 1), (H, 2)]
raises(ComputationFailed, lambda: sqf_part(4))
assert sqf(1) == 1
assert sqf_list(1) == (1, [])
assert sqf((2*x**2 + 2)**7) == 128*(x**2 + 1)**7
assert sqf(f) == g*h**2
assert sqf(f, x) == g*h**2
assert sqf(f, (x,)) == g*h**2
d = x**2 + y**2
assert sqf(f/d) == (g*h**2)/d
assert sqf(f/d, x) == (g*h**2)/d
assert sqf(f/d, (x,)) == (g*h**2)/d
assert sqf(x - 1) == x - 1
assert sqf(-x - 1) == -x - 1
assert sqf(x - 1) == x - 1
assert sqf(6*x - 10) == Mul(2, 3*x - 5, evaluate=False)
assert sqf((6*x - 10)/(3*x - 6)) == S(2)/3*((3*x - 5)/(x - 2))
assert sqf(Poly(x**2 - 2*x + 1)) == (x - 1)**2
f = 3 + x - x*(1 + x) + x**2
assert sqf(f) == 3
f = (x**2 + 2*x + 1)**20000000000
assert sqf(f) == (x + 1)**40000000000
assert sqf_list(f) == (1, [(x + 1, 40000000000)])
def test_factor():
f = x**5 - x**3 - x**2 + 1
u = x + 1
v = x - 1
w = x**2 + x + 1
F, U, V, W = map(Poly, (f, u, v, w))
assert F.factor_list() == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(f) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(f, x) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(f, (x,)) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(F) == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(f, polys=True) == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(F, polys=False) == (1, [(u, 1), (v, 2), (w, 1)])
assert F.factor_list_include() == [(U, 1), (V, 2), (W, 1)]
assert factor_list(1) == (1, [])
assert factor_list(6) == (6, [])
assert factor_list(sqrt(3), x) == (sqrt(3), [])
assert factor_list((-1)**x, x) == (1, [(-1, x)])
assert factor_list((2*x)**y, x) == (1, [(2, y), (x, y)])
assert factor_list(sqrt(x*y), x) == (1, [(x*y, S.Half)])
assert factor(6) == 6 and factor(6).is_Integer
assert factor_list(3*x) == (3, [(x, 1)])
assert factor_list(3*x**2) == (3, [(x, 2)])
assert factor(3*x) == 3*x
assert factor(3*x**2) == 3*x**2
assert factor((2*x**2 + 2)**7) == 128*(x**2 + 1)**7
assert factor(f) == u*v**2*w
assert factor(f, x) == u*v**2*w
assert factor(f, (x,)) == u*v**2*w
g, p, q, r = x**2 - y**2, x - y, x + y, x**2 + 1
assert factor(f/g) == (u*v**2*w)/(p*q)
assert factor(f/g, x) == (u*v**2*w)/(p*q)
assert factor(f/g, (x,)) == (u*v**2*w)/(p*q)
p = Symbol('p', positive=True)
i = Symbol('i', integer=True)
r = Symbol('r', real=True)
assert factor(sqrt(x*y)).is_Pow is True
assert factor(sqrt(3*x**2 - 3)) == sqrt(3)*sqrt((x - 1)*(x + 1))
assert factor(sqrt(3*x**2 + 3)) == sqrt(3)*sqrt(x**2 + 1)
assert factor((y*x**2 - y)**i) == y**i*(x - 1)**i*(x + 1)**i
assert factor((y*x**2 + y)**i) == y**i*(x**2 + 1)**i
assert factor((y*x**2 - y)**t) == (y*(x - 1)*(x + 1))**t
assert factor((y*x**2 + y)**t) == (y*(x**2 + 1))**t
f = sqrt(expand((r**2 + 1)*(p + 1)*(p - 1)*(p - 2)**3))
g = sqrt((p - 2)**3*(p - 1))*sqrt(p + 1)*sqrt(r**2 + 1)
assert factor(f) == g
assert factor(g) == g
g = (x - 1)**5*(r**2 + 1)
f = sqrt(expand(g))
assert factor(f) == sqrt(g)
f = Poly(sin(1)*x + 1, x, domain=EX)
assert f.factor_list() == (1, [(f, 1)])
f = x**4 + 1
assert factor(f) == f
assert factor(f, extension=I) == (x**2 - I)*(x**2 + I)
assert factor(f, gaussian=True) == (x**2 - I)*(x**2 + I)
assert factor(
f, extension=sqrt(2)) == (x**2 + sqrt(2)*x + 1)*(x**2 - sqrt(2)*x + 1)
f = x**2 + 2*sqrt(2)*x + 2
assert factor(f, extension=sqrt(2)) == (x + sqrt(2))**2
assert factor(f**3, extension=sqrt(2)) == (x + sqrt(2))**6
assert factor(x**2 - 2*y**2, extension=sqrt(2)) == \
(x + sqrt(2)*y)*(x - sqrt(2)*y)
assert factor(2*x**2 - 4*y**2, extension=sqrt(2)) == \
2*((x + sqrt(2)*y)*(x - sqrt(2)*y))
assert factor(x - 1) == x - 1
assert factor(-x - 1) == -x - 1
assert factor(x - 1) == x - 1
assert factor(6*x - 10) == Mul(2, 3*x - 5, evaluate=False)
assert factor(x**11 + x + 1, modulus=65537, symmetric=True) == \
(x**2 + x + 1)*(x**9 - x**8 + x**6 - x**5 + x**3 - x** 2 + 1)
assert factor(x**11 + x + 1, modulus=65537, symmetric=False) == \
(x**2 + x + 1)*(x**9 + 65536*x**8 + x**6 + 65536*x**5 +
x**3 + 65536*x** 2 + 1)
f = x/pi + x*sin(x)/pi
g = y/(pi**2 + 2*pi + 1) + y*sin(x)/(pi**2 + 2*pi + 1)
assert factor(f) == x*(sin(x) + 1)/pi
assert factor(g) == y*(sin(x) + 1)/(pi + 1)**2
assert factor(Eq(
x**2 + 2*x + 1, x**3 + 1)) == Eq((x + 1)**2, (x + 1)*(x**2 - x + 1))
f = (x**2 - 1)/(x**2 + 4*x + 4)
assert factor(f) == (x + 1)*(x - 1)/(x + 2)**2
assert factor(f, x) == (x + 1)*(x - 1)/(x + 2)**2
f = 3 + x - x*(1 + x) + x**2
assert factor(f) == 3
assert factor(f, x) == 3
assert factor(1/(x**2 + 2*x + 1/x) - 1) == -((1 - x + 2*x**2 +
x**3)/(1 + 2*x**2 + x**3))
assert factor(f, expand=False) == f
raises(PolynomialError, lambda: factor(f, x, expand=False))
raises(FlagError, lambda: factor(x**2 - 1, polys=True))
assert factor([x, Eq(x**2 - y**2, Tuple(x**2 - z**2, 1/x + 1/y))]) == \
[x, Eq((x - y)*(x + y), Tuple((x - z)*(x + z), (x + y)/x/y))]
assert not isinstance(
Poly(x**3 + x + 1).factor_list()[1][0][0], PurePoly) is True
assert isinstance(
PurePoly(x**3 + x + 1).factor_list()[1][0][0], PurePoly) is True
assert factor(sqrt(-x)) == sqrt(-x)
# issue 5917
e = (-2*x*(-x + 1)*(x - 1)*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2)*(x**2*(x -
1) - x*(x - 1) - x) - (-2*x**2*(x - 1)**2 - x*(-x + 1)*(-x*(-x + 1) +
x*(x - 1)))*(x**2*(x - 1)**4 - x*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2)))
assert factor(e) == 0
# deep option
assert factor(sin(x**2 + x) + x, deep=True) == sin(x*(x + 1)) + x
assert factor(sqrt(x**2)) == sqrt(x**2)
def test_factor_large():
f = (x**2 + 4*x + 4)**10000000*(x**2 + 1)*(x**2 + 2*x + 1)**1234567
g = ((x**2 + 2*x + 1)**3000*y**2 + (x**2 + 2*x + 1)**3000*2*y + (
x**2 + 2*x + 1)**3000)
assert factor(f) == (x + 2)**20000000*(x**2 + 1)*(x + 1)**2469134
assert factor(g) == (x + 1)**6000*(y + 1)**2
assert factor_list(
f) == (1, [(x + 1, 2469134), (x + 2, 20000000), (x**2 + 1, 1)])
assert factor_list(g) == (1, [(y + 1, 2), (x + 1, 6000)])
f = (x**2 - y**2)**200000*(x**7 + 1)
g = (x**2 + y**2)**200000*(x**7 + 1)
assert factor(f) == \
(x + 1)*(x - y)**200000*(x + y)**200000*(x**6 - x**5 +
x**4 - x**3 + x**2 - x + 1)
assert factor(g, gaussian=True) == \
(x + 1)*(x - I*y)**200000*(x + I*y)**200000*(x**6 - x**5 +
x**4 - x**3 + x**2 - x + 1)
assert factor_list(f) == \
(1, [(x + 1, 1), (x - y, 200000), (x + y, 200000), (x**6 -
x**5 + x**4 - x**3 + x**2 - x + 1, 1)])
assert factor_list(g, gaussian=True) == \
(1, [(x + 1, 1), (x - I*y, 200000), (x + I*y, 200000), (
x**6 - x**5 + x**4 - x**3 + x**2 - x + 1, 1)])
@XFAIL
def test_factor_noeval():
assert factor(6*x - 10) == 2*(3*x - 5)
assert factor((6*x - 10)/(3*x - 6)) == S(2)/3*((3*x - 5)/(x - 2))
def test_intervals():
assert intervals(0) == []
assert intervals(1) == []
assert intervals(x, sqf=True) == [(0, 0)]
assert intervals(x) == [((0, 0), 1)]
assert intervals(x**128) == [((0, 0), 128)]
assert intervals([x**2, x**4]) == [((0, 0), {0: 2, 1: 4})]
f = Poly((2*x/5 - S(17)/3)*(4*x + S(1)/257))
assert f.intervals(sqf=True) == [(-1, 0), (14, 15)]
assert f.intervals() == [((-1, 0), 1), ((14, 15), 1)]
assert f.intervals(fast=True, sqf=True) == [(-1, 0), (14, 15)]
assert f.intervals(fast=True) == [((-1, 0), 1), ((14, 15), 1)]
assert f.intervals(eps=S(1)/10) == f.intervals(eps=0.1) == \
[((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert f.intervals(eps=S(1)/100) == f.intervals(eps=0.01) == \
[((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert f.intervals(eps=S(1)/1000) == f.intervals(eps=0.001) == \
[((-S(1)/1002, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert f.intervals(eps=S(1)/10000) == f.intervals(eps=0.0001) == \
[((-S(1)/1028, -S(1)/1028), 1), ((S(85)/6, S(85)/6), 1)]
f = (2*x/5 - S(17)/3)*(4*x + S(1)/257)
assert intervals(f, sqf=True) == [(-1, 0), (14, 15)]
assert intervals(f) == [((-1, 0), 1), ((14, 15), 1)]
assert intervals(f, eps=S(1)/10) == intervals(f, eps=0.1) == \
[((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert intervals(f, eps=S(1)/100) == intervals(f, eps=0.01) == \
[((-S(1)/258, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert intervals(f, eps=S(1)/1000) == intervals(f, eps=0.001) == \
[((-S(1)/1002, 0), 1), ((S(85)/6, S(85)/6), 1)]
assert intervals(f, eps=S(1)/10000) == intervals(f, eps=0.0001) == \
[((-S(1)/1028, -S(1)/1028), 1), ((S(85)/6, S(85)/6), 1)]
f = Poly((x**2 - 2)*(x**2 - 3)**7*(x + 1)*(7*x + 3)**3)
assert f.intervals() == \
[((-2, -S(3)/2), 7), ((-S(3)/2, -1), 1),
((-1, -1), 1), ((-1, 0), 3),
((1, S(3)/2), 1), ((S(3)/2, 2), 7)]
assert intervals([x**5 - 200, x**5 - 201]) == \
[((S(75)/26, S(101)/35), {0: 1}), ((S(309)/107, S(26)/9), {1: 1})]
assert intervals([x**5 - 200, x**5 - 201], fast=True) == \
[((S(75)/26, S(101)/35), {0: 1}), ((S(309)/107, S(26)/9), {1: 1})]
assert intervals([x**2 - 200, x**2 - 201]) == \
[((-S(71)/5, -S(85)/6), {1: 1}), ((-S(85)/6, -14), {0: 1}),
((14, S(85)/6), {0: 1}), ((S(85)/6, S(71)/5), {1: 1})]
assert intervals([x + 1, x + 2, x - 1, x + 1, 1, x - 1, x - 1, (x - 2)**2]) == \
[((-2, -2), {1: 1}), ((-1, -1), {0: 1, 3: 1}), ((1, 1), {2:
1, 5: 1, 6: 1}), ((2, 2), {7: 2})]
f, g, h = x**2 - 2, x**4 - 4*x**2 + 4, x - 1
assert intervals(f, inf=S(7)/4, sqf=True) == []
assert intervals(f, inf=S(7)/5, sqf=True) == [(S(7)/5, S(3)/2)]
assert intervals(f, sup=S(7)/4, sqf=True) == [(-2, -1), (1, S(3)/2)]
assert intervals(f, sup=S(7)/5, sqf=True) == [(-2, -1)]
assert intervals(g, inf=S(7)/4) == []
assert intervals(g, inf=S(7)/5) == [((S(7)/5, S(3)/2), 2)]
assert intervals(g, sup=S(7)/4) == [((-2, -1), 2), ((1, S(3)/2), 2)]
assert intervals(g, sup=S(7)/5) == [((-2, -1), 2)]
assert intervals([g, h], inf=S(7)/4) == []
assert intervals([g, h], inf=S(7)/5) == [((S(7)/5, S(3)/2), {0: 2})]
assert intervals([g, h], sup=S(
7)/4) == [((-2, -1), {0: 2}), ((1, 1), {1: 1}), ((1, S(3)/2), {0: 2})]
assert intervals(
[g, h], sup=S(7)/5) == [((-2, -1), {0: 2}), ((1, 1), {1: 1})]
assert intervals([x + 2, x**2 - 2]) == \
[((-2, -2), {0: 1}), ((-2, -1), {1: 1}), ((1, 2), {1: 1})]
assert intervals([x + 2, x**2 - 2], strict=True) == \
[((-2, -2), {0: 1}), ((-S(3)/2, -1), {1: 1}), ((1, 2), {1: 1})]
f = 7*z**4 - 19*z**3 + 20*z**2 + 17*z + 20
assert intervals(f) == []
real_part, complex_part = intervals(f, all=True, sqf=True)
assert real_part == []
assert all(re(a) < re(r) < re(b) and im(
a) < im(r) < im(b) for (a, b), r in zip(complex_part, nroots(f)))
assert complex_part == [(-S(40)/7 - 40*I/7, 0), (-S(40)/7, 40*I/7),
(-40*I/7, S(40)/7), (0, S(40)/7 + 40*I/7)]
real_part, complex_part = intervals(f, all=True, sqf=True, eps=S(1)/10)
assert real_part == []
assert all(re(a) < re(r) < re(b) and im(
a) < im(r) < im(b) for (a, b), r in zip(complex_part, nroots(f)))
raises(ValueError, lambda: intervals(x**2 - 2, eps=10**-100000))
raises(ValueError, lambda: Poly(x**2 - 2).intervals(eps=10**-100000))
raises(
ValueError, lambda: intervals([x**2 - 2, x**2 - 3], eps=10**-100000))
def test_refine_root():
f = Poly(x**2 - 2)
assert f.refine_root(1, 2, steps=0) == (1, 2)
assert f.refine_root(-2, -1, steps=0) == (-2, -1)
assert f.refine_root(1, 2, steps=None) == (1, S(3)/2)
assert f.refine_root(-2, -1, steps=None) == (-S(3)/2, -1)
assert f.refine_root(1, 2, steps=1) == (1, S(3)/2)
assert f.refine_root(-2, -1, steps=1) == (-S(3)/2, -1)
assert f.refine_root(1, 2, steps=1, fast=True) == (1, S(3)/2)
assert f.refine_root(-2, -1, steps=1, fast=True) == (-S(3)/2, -1)
assert f.refine_root(1, 2, eps=S(1)/100) == (S(24)/17, S(17)/12)
assert f.refine_root(1, 2, eps=1e-2) == (S(24)/17, S(17)/12)
raises(PolynomialError, lambda: (f**2).refine_root(1, 2, check_sqf=True))
raises(RefinementFailed, lambda: (f**2).refine_root(1, 2))
raises(RefinementFailed, lambda: (f**2).refine_root(2, 3))
f = x**2 - 2
assert refine_root(f, 1, 2, steps=1) == (1, S(3)/2)
assert refine_root(f, -2, -1, steps=1) == (-S(3)/2, -1)
assert refine_root(f, 1, 2, steps=1, fast=True) == (1, S(3)/2)
assert refine_root(f, -2, -1, steps=1, fast=True) == (-S(3)/2, -1)
assert refine_root(f, 1, 2, eps=S(1)/100) == (S(24)/17, S(17)/12)
assert refine_root(f, 1, 2, eps=1e-2) == (S(24)/17, S(17)/12)
raises(PolynomialError, lambda: refine_root(1, 7, 8, eps=S(1)/100))
raises(ValueError, lambda: Poly(f).refine_root(1, 2, eps=10**-100000))
raises(ValueError, lambda: refine_root(f, 1, 2, eps=10**-100000))
def test_count_roots():
assert count_roots(x**2 - 2) == 2
assert count_roots(x**2 - 2, inf=-oo) == 2
assert count_roots(x**2 - 2, sup=+oo) == 2
assert count_roots(x**2 - 2, inf=-oo, sup=+oo) == 2
assert count_roots(x**2 - 2, inf=-2) == 2
assert count_roots(x**2 - 2, inf=-1) == 1
assert count_roots(x**2 - 2, sup=1) == 1
assert count_roots(x**2 - 2, sup=2) == 2
assert count_roots(x**2 - 2, inf=-1, sup=1) == 0
assert count_roots(x**2 - 2, inf=-2, sup=2) == 2
assert count_roots(x**2 - 2, inf=-1, sup=1) == 0
assert count_roots(x**2 - 2, inf=-2, sup=2) == 2
assert count_roots(x**2 + 2) == 0
assert count_roots(x**2 + 2, inf=-2*I) == 2
assert count_roots(x**2 + 2, sup=+2*I) == 2
assert count_roots(x**2 + 2, inf=-2*I, sup=+2*I) == 2
assert count_roots(x**2 + 2, inf=0) == 0
assert count_roots(x**2 + 2, sup=0) == 0
assert count_roots(x**2 + 2, inf=-I) == 1
assert count_roots(x**2 + 2, sup=+I) == 1
assert count_roots(x**2 + 2, inf=+I/2, sup=+I) == 0
assert count_roots(x**2 + 2, inf=-I, sup=-I/2) == 0
raises(PolynomialError, lambda: count_roots(1))
def test_Poly_root():
f = Poly(2*x**3 - 7*x**2 + 4*x + 4)
assert f.root(0) == -S(1)/2
assert f.root(1) == 2
assert f.root(2) == 2
raises(IndexError, lambda: f.root(3))
assert Poly(x**5 + x + 1).root(0) == rootof(x**3 - x**2 + 1, 0)
def test_real_roots():
assert real_roots(x) == [0]
assert real_roots(x, multiple=False) == [(0, 1)]
assert real_roots(x**3) == [0, 0, 0]
assert real_roots(x**3, multiple=False) == [(0, 3)]
assert real_roots(x*(x**3 + x + 3)) == [rootof(x**3 + x + 3, 0), 0]
assert real_roots(x*(x**3 + x + 3), multiple=False) == [(rootof(
x**3 + x + 3, 0), 1), (0, 1)]
assert real_roots(
x**3*(x**3 + x + 3)) == [rootof(x**3 + x + 3, 0), 0, 0, 0]
assert real_roots(x**3*(x**3 + x + 3), multiple=False) == [(rootof(
x**3 + x + 3, 0), 1), (0, 3)]
f = 2*x**3 - 7*x**2 + 4*x + 4
g = x**3 + x + 1
assert Poly(f).real_roots() == [-S(1)/2, 2, 2]
assert Poly(g).real_roots() == [rootof(g, 0)]
def test_all_roots():
f = 2*x**3 - 7*x**2 + 4*x + 4
g = x**3 + x + 1
assert Poly(f).all_roots() == [-S(1)/2, 2, 2]
assert Poly(g).all_roots() == [rootof(g, 0), rootof(g, 1), rootof(g, 2)]
def test_nroots():
assert Poly(0, x).nroots() == []
assert Poly(1, x).nroots() == []
assert Poly(x**2 - 1, x).nroots() == [-1.0, 1.0]
assert Poly(x**2 + 1, x).nroots() == [-1.0*I, 1.0*I]
roots = Poly(x**2 - 1, x).nroots()
assert roots == [-1.0, 1.0]
roots = Poly(x**2 + 1, x).nroots()
assert roots == [-1.0*I, 1.0*I]
roots = Poly(x**2/3 - S(1)/3, x).nroots()
assert roots == [-1.0, 1.0]
roots = Poly(x**2/3 + S(1)/3, x).nroots()
assert roots == [-1.0*I, 1.0*I]
assert Poly(x**2 + 2*I, x).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I]
assert Poly(
x**2 + 2*I, x, extension=I).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I]
assert Poly(0.2*x + 0.1).nroots() == [-0.5]
roots = nroots(x**5 + x + 1, n=5)
eps = Float("1e-5")
assert re(roots[0]).epsilon_eq(-0.75487, eps) is S.true
assert im(roots[0]) == 0.0
assert re(roots[1]) == -0.5
assert im(roots[1]).epsilon_eq(-0.86602, eps) is S.true
assert re(roots[2]) == -0.5
assert im(roots[2]).epsilon_eq(+0.86602, eps) is S.true
assert re(roots[3]).epsilon_eq(+0.87743, eps) is S.true
assert im(roots[3]).epsilon_eq(-0.74486, eps) is S.true
assert re(roots[4]).epsilon_eq(+0.87743, eps) is S.true
assert im(roots[4]).epsilon_eq(+0.74486, eps) is S.true
eps = Float("1e-6")
assert re(roots[0]).epsilon_eq(-0.75487, eps) is S.false
assert im(roots[0]) == 0.0
assert re(roots[1]) == -0.5
assert im(roots[1]).epsilon_eq(-0.86602, eps) is S.false
assert re(roots[2]) == -0.5
assert im(roots[2]).epsilon_eq(+0.86602, eps) is S.false
assert re(roots[3]).epsilon_eq(+0.87743, eps) is S.false
assert im(roots[3]).epsilon_eq(-0.74486, eps) is S.false
assert re(roots[4]).epsilon_eq(+0.87743, eps) is S.false
assert im(roots[4]).epsilon_eq(+0.74486, eps) is S.false
raises(DomainError, lambda: Poly(x + y, x).nroots())
raises(MultivariatePolynomialError, lambda: Poly(x + y).nroots())
assert nroots(x**2 - 1) == [-1.0, 1.0]
roots = nroots(x**2 - 1)
assert roots == [-1.0, 1.0]
assert nroots(x + I) == [-1.0*I]
assert nroots(x + 2*I) == [-2.0*I]
raises(PolynomialError, lambda: nroots(0))
# issue 8296
f = Poly(x**4 - 1)
assert f.nroots(2) == [w.n(2) for w in f.all_roots()]
def test_ground_roots():
f = x**6 - 4*x**4 + 4*x**3 - x**2
assert Poly(f).ground_roots() == {S(1): 2, S(0): 2}
assert ground_roots(f) == {S(1): 2, S(0): 2}
def test_nth_power_roots_poly():
f = x**4 - x**2 + 1
f_2 = (x**2 - x + 1)**2
f_3 = (x**2 + 1)**2
f_4 = (x**2 + x + 1)**2
f_12 = (x - 1)**4
assert nth_power_roots_poly(f, 1) == f
raises(ValueError, lambda: nth_power_roots_poly(f, 0))
raises(ValueError, lambda: nth_power_roots_poly(f, x))
assert factor(nth_power_roots_poly(f, 2)) == f_2
assert factor(nth_power_roots_poly(f, 3)) == f_3
assert factor(nth_power_roots_poly(f, 4)) == f_4
assert factor(nth_power_roots_poly(f, 12)) == f_12
raises(MultivariatePolynomialError, lambda: nth_power_roots_poly(
x + y, 2, x, y))
def test_torational_factor_list():
p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))}))
assert _torational_factor_list(p, x) == (-2, [
(-x*(1 + sqrt(2))/2 + 1, 1),
(-x*(1 + sqrt(2)) - 1, 1),
(-x*(1 + sqrt(2)) + 1, 1)])
p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + 2**Rational(1, 4))}))
assert _torational_factor_list(p, x) is None
def test_cancel():
assert cancel(0) == 0
assert cancel(7) == 7
assert cancel(x) == x
assert cancel(oo) == oo
assert cancel((2, 3)) == (1, 2, 3)
assert cancel((1, 0), x) == (1, 1, 0)
assert cancel((0, 1), x) == (1, 0, 1)
f, g, p, q = 4*x**2 - 4, 2*x - 2, 2*x + 2, 1
F, G, P, Q = [ Poly(u, x) for u in (f, g, p, q) ]
assert F.cancel(G) == (1, P, Q)
assert cancel((f, g)) == (1, p, q)
assert cancel((f, g), x) == (1, p, q)
assert cancel((f, g), (x,)) == (1, p, q)
assert cancel((F, G)) == (1, P, Q)
assert cancel((f, g), polys=True) == (1, P, Q)
assert cancel((F, G), polys=False) == (1, p, q)
f = (x**2 - 2)/(x + sqrt(2))
assert cancel(f) == f
assert cancel(f, greedy=False) == x - sqrt(2)
f = (x**2 - 2)/(x - sqrt(2))
assert cancel(f) == f
assert cancel(f, greedy=False) == x + sqrt(2)
assert cancel((x**2/4 - 1, x/2 - 1)) == (S(1)/2, x + 2, 1)
assert cancel((x**2 - y)/(x - y)) == 1/(x - y)*(x**2 - y)
assert cancel((x**2 - y**2)/(x - y), x) == x + y
assert cancel((x**2 - y**2)/(x - y), y) == x + y
assert cancel((x**2 - y**2)/(x - y)) == x + y
assert cancel((x**3 - 1)/(x**2 - 1)) == (x**2 + x + 1)/(x + 1)
assert cancel((x**3/2 - S(1)/2)/(x**2 - 1)) == (x**2 + x + 1)/(2*x + 2)
assert cancel((exp(2*x) + 2*exp(x) + 1)/(exp(x) + 1)) == exp(x) + 1
f = Poly(x**2 - a**2, x)
g = Poly(x - a, x)
F = Poly(x + a, x)
G = Poly(1, x)
assert cancel((f, g)) == (1, F, G)
f = x**3 + (sqrt(2) - 2)*x**2 - (2*sqrt(2) + 3)*x - 3*sqrt(2)
g = x**2 - 2
assert cancel((f, g), extension=True) == (1, x**2 - 2*x - 3, x - sqrt(2))
f = Poly(-2*x + 3, x)
g = Poly(-x**9 + x**8 + x**6 - x**5 + 2*x**2 - 3*x + 1, x)
assert cancel((f, g)) == (1, -f, -g)
f = Poly(y, y, domain='ZZ(x)')
g = Poly(1, y, domain='ZZ[x]')
assert f.cancel(
g) == (1, Poly(y, y, domain='ZZ(x)'), Poly(1, y, domain='ZZ(x)'))
assert f.cancel(g, include=True) == (
Poly(y, y, domain='ZZ(x)'), Poly(1, y, domain='ZZ(x)'))
f = Poly(5*x*y + x, y, domain='ZZ(x)')
g = Poly(2*x**2*y, y, domain='ZZ(x)')
assert f.cancel(g, include=True) == (
Poly(5*y + 1, y, domain='ZZ(x)'), Poly(2*x*y, y, domain='ZZ(x)'))
f = -(-2*x - 4*y + 0.005*(z - y)**2)/((z - y)*(-z + y + 2))
assert cancel(f).is_Mul == True
P = tanh(x - 3.0)
Q = tanh(x + 3.0)
f = ((-2*P**2 + 2)*(-P**2 + 1)*Q**2/2 + (-2*P**2 + 2)*(-2*Q**2 + 2)*P*Q - (-2*P**2 + 2)*P**2*Q**2 + (-2*Q**2 + 2)*(-Q**2 + 1)*P**2/2 - (-2*Q**2 + 2)*P**2*Q**2)/(2*sqrt(P**2*Q**2 + 0.0001)) \
+ (-(-2*P**2 + 2)*P*Q**2/2 - (-2*Q**2 + 2)*P**2*Q/2)*((-2*P**2 + 2)*P*Q**2/2 + (-2*Q**2 + 2)*P**2*Q/2)/(2*(P**2*Q**2 + 0.0001)**(S(3)/2))
assert cancel(f).is_Mul == True
# issue 7022
A = Symbol('A', commutative=False)
p1 = Piecewise((A*(x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True))
p2 = Piecewise((A*(x - 1), x > 1), (1/x, True))
assert cancel(p1) == p2
assert cancel(2*p1) == 2*p2
assert cancel(1 + p1) == 1 + p2
assert cancel((x**2 - 1)/(x + 1)*p1) == (x - 1)*p2
assert cancel((x**2 - 1)/(x + 1) + p1) == (x - 1) + p2
p3 = Piecewise(((x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True))
p4 = Piecewise(((x - 1), x > 1), (1/x, True))
assert cancel(p3) == p4
assert cancel(2*p3) == 2*p4
assert cancel(1 + p3) == 1 + p4
assert cancel((x**2 - 1)/(x + 1)*p3) == (x - 1)*p4
assert cancel((x**2 - 1)/(x + 1) + p3) == (x - 1) + p4
# issue 9363
M = MatrixSymbol('M', 5, 5)
assert cancel(M[0,0] + 7) == M[0,0] + 7
expr = sin(M[1, 4] + M[2, 1] * 5 * M[4, 0]) - 5 * M[1, 2] / z
assert cancel(expr) == (z*sin(M[1, 4] + M[2, 1] * 5 * M[4, 0]) - 5 * M[1, 2]) / z
def test_reduced():
f = 2*x**4 + y**2 - x**2 + y**3
G = [x**3 - x, y**3 - y]
Q = [2*x, 1]
r = x**2 + y**2 + y
assert reduced(f, G) == (Q, r)
assert reduced(f, G, x, y) == (Q, r)
H = groebner(G)
assert H.reduce(f) == (Q, r)
Q = [Poly(2*x, x, y), Poly(1, x, y)]
r = Poly(x**2 + y**2 + y, x, y)
assert _strict_eq(reduced(f, G, polys=True), (Q, r))
assert _strict_eq(reduced(f, G, x, y, polys=True), (Q, r))
H = groebner(G, polys=True)
assert _strict_eq(H.reduce(f), (Q, r))
f = 2*x**3 + y**3 + 3*y
G = groebner([x**2 + y**2 - 1, x*y - 2])
Q = [x**2 - x*y**3/2 + x*y/2 + y**6/4 - y**4/2 + y**2/4, -y**5/4 + y**3/2 + 3*y/4]
r = 0
assert reduced(f, G) == (Q, r)
assert G.reduce(f) == (Q, r)
assert reduced(f, G, auto=False)[1] != 0
assert G.reduce(f, auto=False)[1] != 0
assert G.contains(f) is True
assert G.contains(f + 1) is False
assert reduced(1, [1], x) == ([1], 0)
raises(ComputationFailed, lambda: reduced(1, [1]))
def test_groebner():
assert groebner([], x, y, z) == []
assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex') == [1 + x**2, -1 + y**4]
assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex') == [-1 + y**4, z**3, 1 + x**2]
assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex', polys=True) == \
[Poly(1 + x**2, x, y), Poly(-1 + y**4, x, y)]
assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex', polys=True) == \
[Poly(-1 + y**4, x, y, z), Poly(z**3, x, y, z), Poly(1 + x**2, x, y, z)]
assert groebner([x**3 - 1, x**2 - 1]) == [x - 1]
assert groebner([Eq(x**3, 1), Eq(x**2, 1)]) == [x - 1]
F = [3*x**2 + y*z - 5*x - 1, 2*x + 3*x*y + y**2, x - 3*y + x*z - 2*z**2]
f = z**9 - x**2*y**3 - 3*x*y**2*z + 11*y*z**2 + x**2*z**2 - 5
G = groebner(F, x, y, z, modulus=7, symmetric=False)
assert G == [1 + x + y + 3*z + 2*z**2 + 2*z**3 + 6*z**4 + z**5,
1 + 3*y + y**2 + 6*z**2 + 3*z**3 + 3*z**4 + 3*z**5 + 4*z**6,
1 + 4*y + 4*z + y*z + 4*z**3 + z**4 + z**6,
6 + 6*z + z**2 + 4*z**3 + 3*z**4 + 6*z**5 + 3*z**6 + z**7]
Q, r = reduced(f, G, x, y, z, modulus=7, symmetric=False, polys=True)
assert sum([ q*g for q, g in zip(Q, G.polys)], r) == Poly(f, modulus=7)
F = [x*y - 2*y, 2*y**2 - x**2]
assert groebner(F, x, y, order='grevlex') == \
[y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
assert groebner(F, y, x, order='grevlex') == \
[x**3 - 2*x**2, -x**2 + 2*y**2, x*y - 2*y]
assert groebner(F, order='grevlex', field=True) == \
[y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
assert groebner([1], x) == [1]
assert groebner([x**2 + 2.0*y], x, y) == [1.0*x**2 + 2.0*y]
raises(ComputationFailed, lambda: groebner([1]))
assert groebner([x**2 - 1, x**3 + 1], method='buchberger') == [x + 1]
assert groebner([x**2 - 1, x**3 + 1], method='f5b') == [x + 1]
raises(ValueError, lambda: groebner([x, y], method='unknown'))
def test_fglm():
F = [a + b + c + d, a*b + a*d + b*c + b*d, a*b*c + a*b*d + a*c*d + b*c*d, a*b*c*d - 1]
G = groebner(F, a, b, c, d, order=grlex)
B = [
4*a + 3*d**9 - 4*d**5 - 3*d,
4*b + 4*c - 3*d**9 + 4*d**5 + 7*d,
4*c**2 + 3*d**10 - 4*d**6 - 3*d**2,
4*c*d**4 + 4*c - d**9 + 4*d**5 + 5*d,
d**12 - d**8 - d**4 + 1,
]
assert groebner(F, a, b, c, d, order=lex) == B
assert G.fglm(lex) == B
F = [9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9,
-72*t*x**7 - 252*t*x**6 + 192*t*x**5 + 1260*t*x**4 + 312*t*x**3 - 404*t*x**2 - 576*t*x + \
108*t - 72*x**7 - 256*x**6 + 192*x**5 + 1280*x**4 + 312*x**3 - 576*x + 96]
G = groebner(F, t, x, order=grlex)
B = [
203577793572507451707*t + 627982239411707112*x**7 - 666924143779443762*x**6 - \
10874593056632447619*x**5 + 5119998792707079562*x**4 + 72917161949456066376*x**3 + \
20362663855832380362*x**2 - 142079311455258371571*x + 183756699868981873194,
9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9,
]
assert groebner(F, t, x, order=lex) == B
assert G.fglm(lex) == B
F = [x**2 - x - 3*y + 1, -2*x + y**2 + y - 1]
G = groebner(F, x, y, order=lex)
B = [
x**2 - x - 3*y + 1,
y**2 - 2*x + y - 1,
]
assert groebner(F, x, y, order=grlex) == B
assert G.fglm(grlex) == B
def test_is_zero_dimensional():
assert is_zero_dimensional([x, y], x, y) is True
assert is_zero_dimensional([x**3 + y**2], x, y) is False
assert is_zero_dimensional([x, y, z], x, y, z) is True
assert is_zero_dimensional([x, y, z], x, y, z, t) is False
F = [x*y - z, y*z - x, x*y - y]
assert is_zero_dimensional(F, x, y, z) is True
F = [x**2 - 2*x*z + 5, x*y**2 + y*z**3, 3*y**2 - 8*z**2]
assert is_zero_dimensional(F, x, y, z) is True
def test_GroebnerBasis():
F = [x*y - 2*y, 2*y**2 - x**2]
G = groebner(F, x, y, order='grevlex')
H = [y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
P = [ Poly(h, x, y) for h in H ]
assert isinstance(G, GroebnerBasis) is True
assert len(G) == 3
assert G[0] == H[0] and not G[0].is_Poly
assert G[1] == H[1] and not G[1].is_Poly
assert G[2] == H[2] and not G[2].is_Poly
assert G[1:] == H[1:] and not any(g.is_Poly for g in G[1:])
assert G[:2] == H[:2] and not any(g.is_Poly for g in G[1:])
assert G.exprs == H
assert G.polys == P
assert G.gens == (x, y)
assert G.domain == ZZ
assert G.order == grevlex
assert G == H
assert G == tuple(H)
assert G == P
assert G == tuple(P)
assert G != []
G = groebner(F, x, y, order='grevlex', polys=True)
assert G[0] == P[0] and G[0].is_Poly
assert G[1] == P[1] and G[1].is_Poly
assert G[2] == P[2] and G[2].is_Poly
assert G[1:] == P[1:] and all(g.is_Poly for g in G[1:])
assert G[:2] == P[:2] and all(g.is_Poly for g in G[1:])
def test_poly():
assert poly(x) == Poly(x, x)
assert poly(y) == Poly(y, y)
assert poly(x + y) == Poly(x + y, x, y)
assert poly(x + sin(x)) == Poly(x + sin(x), x, sin(x))
assert poly(x + y, wrt=y) == Poly(x + y, y, x)
assert poly(x + sin(x), wrt=sin(x)) == Poly(x + sin(x), sin(x), x)
assert poly(x*y + 2*x*z**2 + 17) == Poly(x*y + 2*x*z**2 + 17, x, y, z)
assert poly(2*(y + z)**2 - 1) == Poly(2*y**2 + 4*y*z + 2*z**2 - 1, y, z)
assert poly(
x*(y + z)**2 - 1) == Poly(x*y**2 + 2*x*y*z + x*z**2 - 1, x, y, z)
assert poly(2*x*(
y + z)**2 - 1) == Poly(2*x*y**2 + 4*x*y*z + 2*x*z**2 - 1, x, y, z)
assert poly(2*(
y + z)**2 - x - 1) == Poly(2*y**2 + 4*y*z + 2*z**2 - x - 1, x, y, z)
assert poly(x*(
y + z)**2 - x - 1) == Poly(x*y**2 + 2*x*y*z + x*z**2 - x - 1, x, y, z)
assert poly(2*x*(y + z)**2 - x - 1) == Poly(2*x*y**2 + 4*x*y*z + 2*
x*z**2 - x - 1, x, y, z)
assert poly(x*y + (x + y)**2 + (x + z)**2) == \
Poly(2*x*z + 3*x*y + y**2 + z**2 + 2*x**2, x, y, z)
assert poly(x*y*(x + y)*(x + z)**2) == \
Poly(x**3*y**2 + x*y**2*z**2 + y*x**2*z**2 + 2*z*x**2*
y**2 + 2*y*z*x**3 + y*x**4, x, y, z)
assert poly(Poly(x + y + z, y, x, z)) == Poly(x + y + z, y, x, z)
assert poly((x + y)**2, x) == Poly(x**2 + 2*x*y + y**2, x, domain=ZZ[y])
assert poly((x + y)**2, y) == Poly(x**2 + 2*x*y + y**2, y, domain=ZZ[x])
assert poly(1, x) == Poly(1, x)
raises(GeneratorsNeeded, lambda: poly(1))
# issue 6184
assert poly(x + y, x, y) == Poly(x + y, x, y)
assert poly(x + y, y, x) == Poly(x + y, y, x)
def test_keep_coeff():
u = Mul(2, x + 1, evaluate=False)
assert _keep_coeff(S(1), x) == x
assert _keep_coeff(S(-1), x) == -x
assert _keep_coeff(S(1.0), x) == 1.0*x
assert _keep_coeff(S(-1.0), x) == -1.0*x
assert _keep_coeff(S(1), 2*x) == 2*x
assert _keep_coeff(S(2), x/2) == x
assert _keep_coeff(S(2), sin(x)) == 2*sin(x)
assert _keep_coeff(S(2), x + 1) == u
assert _keep_coeff(x, 1/x) == 1
assert _keep_coeff(x + 1, S(2)) == u
@XFAIL
def test_poly_matching_consistency():
# Test for this issue:
# https://github.com/sympy/sympy/issues/5514
assert I * Poly(x, x) == Poly(I*x, x)
assert Poly(x, x) * I == Poly(I*x, x)
@XFAIL
def test_issue_5786():
assert expand(factor(expand(
(x - I*y)*(z - I*t)), extension=[I])) == -I*t*x - t*y + x*z - I*y*z
def test_noncommutative():
class foo(Expr):
is_commutative=False
e = x/(x + x*y)
c = 1/( 1 + y)
assert cancel(foo(e)) == foo(c)
assert cancel(e + foo(e)) == c + foo(c)
assert cancel(e*foo(c)) == c*foo(c)
def test_to_rational_coeffs():
assert to_rational_coeffs(
Poly(x**3 + y*x**2 + sqrt(y), x, domain='EX')) == None
def test_factor_terms():
# issue 7067
assert factor_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)])
assert sqf_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)])
def test_issue_11198():
assert factor_list(sqrt(2)*x) == (sqrt(2), [(x, 1)])
assert factor_list(sqrt(2)*sin(x), sin(x)) == (sqrt(2), [(sin(x), 1)])
| postvakje/sympy | sympy/polys/tests/test_polytools.py | Python | bsd-3-clause | 106,722 | [
"Gaussian"
] | 49ef06db3e68749e15765dbfb3543e21b2f8e1ff77068982932a5591b82349e8 |
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import os
import shutil
import unittest
import time
import mooseutils
class TestMooseDataFrame(unittest.TestCase):
"""
Test use of MooseDataFrame for loading/reloading csv files.
"""
def setUp(self):
"""
Define the test filename.
"""
self._filename = '../../test_files/white_elephant_jan_2016.csv'
self._keys = ['air_temp_low_24_hour_set_1', 'snow_depth_set_1']
def testBasic(self):
"""
Test that if a file exists it is loaded w/o error.
"""
# Test basic read
data = mooseutils.MooseDataFrame(self._filename)
self.assertEqual(self._filename, data.filename)
self.assertTrue(data)
# Key Testing
for k in self._keys:
self.assertTrue(k in data)
# Check data
x = data[self._keys]
self.assertEqual(x.loc[10][self._keys[0]], 2.12)
self.assertEqual(x.loc[10][self._keys[1]], 51.00)
def testNoFile(self):
"""
Test that no-file doesn't fail.
"""
filename = 'not_a_file.csv'
data = mooseutils.MooseDataFrame(filename)
self.assertEqual(filename, data.filename)
self.assertFalse(data)
# Key Testing
self.assertFalse('key' in data)
x = data[ ['key1', 'key2'] ]
self.assertTrue(x.empty)
def testEmptyUpdateRemove(self):
"""
Test that data appears when file is loaded.
"""
# Temporary filename
filename = "{}_{}.csv".format(self.__class__.__name__, 'tmp')
if os.path.exists(filename):
os.remove(filename)
# (1) No-file
data = mooseutils.MooseDataFrame(filename)
self.assertEqual(filename, data.filename)
for k in self._keys:
self.assertFalse(k in data)
x = data[self._keys]
self.assertTrue(x.empty)
# (2) Data exists
shutil.copyfile(self._filename, filename)
data.update()
for k in self._keys:
self.assertTrue(k in data)
x = data[self._keys]
self.assertEqual(x.loc[10][self._keys[0]], 2.12)
self.assertEqual(x.loc[10][self._keys[1]], 51.00)
self.assertFalse(x.empty)
# (3) Data remove
os.remove(filename)
data.update()
for k in self._keys:
self.assertFalse(k in data)
x = data[self._keys]
self.assertTrue(x.empty)
def testIndex(self):
"""
Test that the index of the data may be set.
"""
data = mooseutils.MooseDataFrame(self._filename, index='time')
x = data[self._keys]
idx = 29.42
self.assertEqual(x.loc[idx][self._keys[0]], 20.12)
self.assertEqual(x.loc[idx][self._keys[1]], 59.00)
def testOldFile(self):
"""
Test that "old" files do not load.
"""
data = mooseutils.MooseDataFrame(self._filename, index='time')
self.assertTrue(data)
data = mooseutils.MooseDataFrame(self._filename, index='time', run_start_time=time.time())
self.assertFalse(data)
if __name__ == '__main__':
unittest.main(module=__name__, verbosity=2)
| nuclear-wizard/moose | python/mooseutils/tests/test_MooseDataFrame.py | Python | lgpl-2.1 | 3,516 | [
"MOOSE"
] | 8d6dd20b805cfb926c5a558f5a2805011144f166500e62f0fb4bed51b9e9bf2e |
'''
Created on Jul 14, 2011
@author: sean
'''
from __future__ import print_function
from opcode import *
import _ast
import sys
from graphlab.meta.utils import py3, py3op, py2op
from graphlab.meta.asttools.visitors.print_visitor import print_ast, dump_ast
from graphlab.meta.asttools import cmp_ast
if py3:
class _ast_Print: pass
else:
_ast_Print = _ast.Print
def isNone(node):
if node is None:
return True
elif isinstance(node, _ast.Name) and (node.id == 'None') and isinstance(node.ctx, _ast.Load):
return True
return False
def BINARY_(OP):
def BINARY_OP(self, instr):
right = self.ast_stack.pop()
left = self.ast_stack.pop()
add = _ast.BinOp(left=left, right=right, op=OP(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(add)
return BINARY_OP
def INPLACE_(OP):
def INPLACE_OP(self, instr):
right = self.ast_stack.pop()
left = self.ast_stack.pop()
left.ctx = _ast.Store()
aug_assign = _ast.AugAssign(target=left, op=OP(), value=right, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(aug_assign)
return INPLACE_OP
def UNARY_(OP):
def UNARY_OP(self, instr):
expr = self.ast_stack.pop()
not_ = _ast.UnaryOp(op=OP(), operand=expr, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(not_)
return UNARY_OP
CMP_OPMAP = {'>=' :_ast.GtE,
'<=' :_ast.LtE,
'>' :_ast.Gt,
'<' :_ast.Lt,
'==': _ast.Eq,
'!=': _ast.NotEq,
'in': _ast.In,
'not in': _ast.NotIn,
'is':_ast.Is,
'is not':_ast.IsNot,
}
def make_const(arg, lineno=0, col_offset=0):
kw = {'lineno':lineno, 'col_offset':col_offset}
if isinstance(arg, str):
const = _ast.Str(s=arg, **kw)
elif isinstance(arg, (int, float, complex)):
const = _ast.Num(n=arg, **kw)
elif arg is None:
const = _ast.Name(id='None', ctx=_ast.Load(), **kw)
elif isinstance(arg, tuple):
elts = []
for item in arg:
elts.append(make_const(item, **kw))
const = _ast.Tuple(elts=elts, ctx=_ast.Load(), **kw)
else:
const = arg
return const
class SimpleInstructions(object):
def LOAD_CONST(self, instr):
const = make_const(instr.arg, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(const)
def LOAD_NAME(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(name)
def LOAD_DEREF(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(name)
def CALL_FUNCTION_VAR(self, instr):
arg = self.ast_stack.pop()
self.CALL_FUNCTION(instr)
callfunc = self.ast_stack.pop()
callfunc.starargs = arg
self.ast_stack.append(callfunc)
def CALL_FUNCTION_KW(self, instr):
kwarg = self.ast_stack.pop()
self.CALL_FUNCTION(instr)
callfunc = self.ast_stack.pop()
callfunc.kwargs = kwarg
self.ast_stack.append(callfunc)
def CALL_FUNCTION_VAR_KW(self, instr):
kwarg = self.ast_stack.pop()
arg = self.ast_stack.pop()
self.CALL_FUNCTION(instr)
callfunc = self.ast_stack.pop()
callfunc.starargs = arg
callfunc.kwargs = kwarg
self.ast_stack.append(callfunc)
def CALL_FUNCTION(self, instr):
nkwargs = instr.oparg >> 8
nargs = (~(nkwargs << 8)) & instr.oparg
args = []
keywords = []
for _ in range(nkwargs):
expr = self.ast_stack.pop()
name = self.ast_stack.pop()
keyword = _ast.keyword(arg=name.s, value=expr, lineno=instr.lineno)
keywords.insert(0, keyword)
for _ in range(nargs):
arg = self.ast_stack.pop()
args.insert(0, arg)
if len(args) == 1 and isinstance(args[0], (_ast.FunctionDef, _ast.ClassDef)):
function = args[0]
if function.decorator_list is None:
function.decorator_list = []
node = self.ast_stack.pop()
function.decorator_list.insert(0, node)
self.ast_stack.append(function)
return
node = self.ast_stack.pop()
callfunc = _ast.Call(func=node, args=args, keywords=keywords, starargs=None, kwargs=None,
lineno=instr.lineno, col_offset=0)
self.ast_stack.append(callfunc)
def LOAD_FAST(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(name)
def LOAD_GLOBAL(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(name)
def STORE_FAST(self, instr):
self.STORE_NAME(instr)
def STORE_DEREF(self, instr):
self.STORE_NAME(instr)
def STORE_NAME(self, instr):
value = self.ast_stack.pop()
value = self.process_ifexpr(value)
if isinstance(value, _ast.Import):
if value.from_:
assert isinstance(self.ast_stack[-1], _ast.ImportFrom)
from_ = self.ast_stack.pop()
as_name = instr.arg
name = from_.names[0].name
if as_name != name:
from_.names[0].asname = as_name
self.ast_stack.append(from_)
else:
as_name = instr.arg
if value.names[0].asname is None:
base_name = value.names[0].name.split('.')[0]
if base_name != as_name:
value.names[0].asname = as_name
self.ast_stack.append(value)
elif isinstance(value, (_ast.Attribute)) and isinstance(value.value, (_ast.Import)):
asname = instr.arg
value = value.value
value.names[0].asname = asname
self.ast_stack.append(value)
elif isinstance(value, (_ast.ClassDef, _ast.FunctionDef)):
as_name = instr.arg
value.name = as_name
self.ast_stack.append(value)
elif isinstance(value, _ast.AugAssign):
self.ast_stack.append(value)
elif isinstance(value, _ast.Assign):
_ = self.ast_stack.pop()
assname = _ast.Name(instr.arg, _ast.Store(), lineno=instr.lineno, col_offset=0)
value.targets.append(assname)
self.ast_stack.append(value)
else:
assname = _ast.Name(instr.arg, _ast.Store(), lineno=instr.lineno, col_offset=0)
assign = _ast.Assign(targets=[assname], value=value, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(assign)
@py3op
def STORE_LOCALS(self, instr):
'remove Locals from class def'
self.ast_stack.pop()
def STORE_GLOBAL(self, instr):
if not isinstance(self.ast_stack[0], _ast.Global):
self.ast_stack.insert(0, _ast.Global(names=[]))
if instr.arg not in self.ast_stack[0].names:
self.ast_stack[0].names.append(instr.arg)
self.STORE_NAME(instr)
def RETURN_VALUE(self, instr):
value = self.ast_stack.pop()
value = self.process_ifexpr(value)
ret = _ast.Return(value=value, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(ret)
def LOAD_ATTR(self, instr):
name = self.ast_stack.pop()
attr = instr.arg
get_attr = _ast.Attribute(value=name, attr=attr, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
self.ast_stack.append(get_attr)
def STORE_ATTR(self, instr):
attrname = instr.arg
node = self.ast_stack.pop()
expr = self.ast_stack.pop()
expr = self.process_ifexpr(expr)
assattr = _ast.Attribute(value=node, attr=attrname, ctx=_ast.Store(), lineno=instr.lineno, col_offset=0)
set_attr = _ast.Assign(targets=[assattr], value=expr, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(set_attr)
def IMPORT_NAME(self, instr):
from_ = self.ast_stack.pop()
hmm = self.ast_stack.pop()
names = [_ast.alias(name=instr.arg, asname=None)]
import_ = _ast.Import(names=names, lineno=instr.lineno, col_offset=0)
import_.from_ = not isNone(from_)
self.ast_stack.append(import_)
def IMPORT_FROM(self, instr):
import_ = self.ast_stack.pop()
names = [_ast.alias(instr.arg, None)]
modname = import_.names[0].name
from_ = _ast.ImportFrom(module=modname, names=names, level=0, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(from_)
self.ast_stack.append(import_)
def IMPORT_STAR(self, instr):
import_ = self.ast_stack.pop()
names = import_.names
alias = _ast.alias(name='*', asname=None)
from_ = _ast.ImportFrom(module=names[0].name, names=[alias], level=0, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(from_)
def process_ifexpr(self, node):
if isinstance(node, _ast.If):
test = node.test
then = node.body
else_ = node.orelse
assert len(then) == 1
then = then[0]
assert len(else_) == 1
else_ = else_[0]
if_exp = _ast.IfExp(test, then, else_, lineno=node.lineno, col_offset=0)
return if_exp
else:
return node
def POP_TOP(self, instr):
node = self.ast_stack.pop()
node = self.process_ifexpr(node)
if isinstance(node, _ast.Import):
return
if isinstance(node, _ast_Print):
_ = self.ast_stack.pop()
self.ast_stack.append(node)
return
discard = _ast.Expr(value=node, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(discard)
def ROT_TWO(self, instr):
one = self.ast_stack.pop()
two = self.ast_stack.pop()
if self.ilst[0].opname == 'STORE_NAME':
kw = dict(lineno=instr.lineno, col_offset=0)
stores = []
while self.ilst[0].opname == 'STORE_NAME':
stores.append(self.ilst.pop(0))
assert len(stores) <= 3, stores
elts_load = [one, two]
if len(stores) == 3:
elts_load.insert(0, self.ast_stack.pop())
tup_load = _ast.Tuple(elts=elts_load[::-1], ctx=_ast.Load(), **kw)
elts_store = [_ast.Name(id=store.arg, ctx=_ast.Store(), **kw) for store in stores]
tup_store = _ast.Tuple(elts=elts_store, ctx=_ast.Store(), **kw)
assgn = _ast.Assign(value=tup_load, targets=[tup_store], **kw)
self.ast_stack.append(assgn)
# self.ast_stack.append(tup_store)
else:
self.ast_stack.append(one)
self.ast_stack.append(two)
BINARY_ADD = BINARY_(_ast.Add)
BINARY_SUBTRACT = BINARY_(_ast.Sub)
BINARY_DIVIDE = BINARY_(_ast.Div)
BINARY_TRUE_DIVIDE = BINARY_(_ast.Div)
BINARY_MULTIPLY = BINARY_(_ast.Mult)
BINARY_FLOOR_DIVIDE = BINARY_(_ast.FloorDiv)
BINARY_POWER = BINARY_(_ast.Pow)
BINARY_AND = BINARY_(_ast.BitAnd)
BINARY_OR = BINARY_(_ast.BitOr)
BINARY_XOR = BINARY_(_ast.BitXor)
BINARY_LSHIFT = BINARY_(_ast.LShift)
BINARY_RSHIFT = BINARY_(_ast.RShift)
BINARY_MODULO = BINARY_(_ast.Mod)
INPLACE_ADD = INPLACE_(_ast.Add)
INPLACE_SUBTRACT = INPLACE_(_ast.Sub)
INPLACE_DIVIDE = INPLACE_(_ast.Div)
INPLACE_FLOOR_DIVIDE = INPLACE_(_ast.FloorDiv)
INPLACE_MULTIPLY = INPLACE_(_ast.Mult)
INPLACE_AND = INPLACE_(_ast.BitAnd)
INPLACE_OR = INPLACE_(_ast.BitOr)
INPLACE_LSHIFT = INPLACE_(_ast.LShift)
INPLACE_RSHIFT = INPLACE_(_ast.RShift)
INPLACE_POWER = INPLACE_(_ast.Pow)
INPLACE_MODULO = INPLACE_(_ast.Mod)
INPLACE_XOR = INPLACE_(_ast.BitXor)
UNARY_NOT = UNARY_(_ast.Not)
UNARY_NEGATIVE = UNARY_(_ast.USub)
UNARY_INVERT = UNARY_(_ast.Invert)
UNARY_POSITIVE = UNARY_(_ast.UAdd)
def COMPARE_OP(self, instr):
op = instr.arg
right = self.ast_stack.pop()
expr = self.ast_stack.pop()
OP = CMP_OPMAP[op]
compare = _ast.Compare(left=expr, ops=[OP()], comparators=[right], lineno=instr.lineno, col_offset=0)
self.ast_stack.append(compare)
def YIELD_VALUE(self, instr):
value = self.ast_stack.pop()
yield_ = _ast.Yield(value=value, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(yield_)
self.seen_yield = True
def BUILD_LIST(self, instr):
nitems = instr.oparg
nodes = []
list_ = _ast.List(elts=nodes, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
for i in range(nitems):
nodes.insert(0, self.ast_stack.pop())
self.ast_stack.append(list_)
def BUILD_TUPLE(self, instr):
nitems = instr.oparg
nodes = []
list_ = _ast.Tuple(elts=nodes, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
for i in range(nitems):
nodes.insert(0, self.ast_stack.pop())
if any([item == 'CLOSURE' for item in nodes]):
assert all([item == 'CLOSURE' for item in nodes])
return
self.ast_stack.append(list_)
def BUILD_SET(self, instr):
nitems = instr.oparg
nodes = []
list_ = _ast.Set(elts=nodes, ctx=_ast.Load(), lineno=instr.lineno, col_offset=0)
for i in range(nitems):
nodes.insert(0, self.ast_stack.pop())
self.ast_stack.append(list_)
def BUILD_MAP(self, instr):
nitems = instr.oparg
keys = []
values = []
for i in range(nitems):
map_instrs = []
while 1:
new_instr = self.ilst.pop(0)
if new_instr.opname == 'STORE_MAP':
break
map_instrs.append(new_instr)
items = self.decompile_block(map_instrs).stmnt()
assert len(items) == 2
values.append(items[0])
keys.append(items[1])
list_ = _ast.Dict(keys=keys, values=values, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(list_)
def UNPACK_SEQUENCE(self, instr):
nargs = instr.oparg
nodes = []
ast_tuple = _ast.Tuple(elts=nodes, ctx=_ast.Store(), lineno=instr.lineno, col_offset=0)
for i in range(nargs):
nex_instr = self.ilst.pop(0)
self.ast_stack.append(None)
self.visit(nex_instr)
node = self.ast_stack.pop()
nodes.append(node.targets[0])
expr = self.ast_stack.pop()
if isinstance(expr, _ast.Assign):
assgn = expr
assgn.targets.append(ast_tuple)
value_dup = self.ast_stack.pop()
assert cmp_ast(assgn.value, value_dup)
else:
assgn = _ast.Assign(targets=[ast_tuple], value=expr, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(assgn)
def DELETE_NAME(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Del(), lineno=instr.lineno, col_offset=0)
delete = _ast.Delete(targets=[name], lineno=instr.lineno, col_offset=0)
self.ast_stack.append(delete)
def DELETE_FAST(self, instr):
name = _ast.Name(id=instr.arg, ctx=_ast.Del(), lineno=instr.lineno, col_offset=0)
delete = _ast.Delete(targets=[name], lineno=instr.lineno, col_offset=0)
self.ast_stack.append(delete)
def DELETE_ATTR(self, instr):
expr = self.ast_stack.pop()
attr = _ast.Attribute(value=expr, attr=instr.arg, ctx=_ast.Del(), lineno=instr.lineno, col_offset=0)
delete = _ast.Delete(targets=[attr], lineno=instr.lineno, col_offset=0)
self.ast_stack.append(delete)
def EXEC_STMT(self, instr):
locals_ = self.ast_stack.pop()
globals_ = self.ast_stack.pop()
expr = self.ast_stack.pop()
if locals_ is globals_:
locals_ = None
if isinstance(globals_, _ast.Name) and getattr(globals_, 'id',) == 'None':
globals_ = None
exec_ = _ast.Exec(body=expr, globals=globals_, locals=locals_, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(exec_)
def DUP_TOP(self, instr):
expr = self.ast_stack.pop()
self.ast_stack.append(expr)
self.ast_stack.append(expr)
@py3op
def DUP_TOP_TWO(self, instr):
expr1 = self.ast_stack.pop()
expr2 = self.ast_stack.pop()
self.ast_stack.append(expr2)
self.ast_stack.append(expr1)
self.ast_stack.append(expr2)
self.ast_stack.append(expr1)
def DUP_TOPX(self, instr):
exprs = []
for i in range(instr.oparg):
expr = self.ast_stack.pop()
exprs.insert(0, expr)
self.ast_stack.extend(exprs)
self.ast_stack.extend(exprs)
def ROT_THREE(self, instr):
expr1 = self.ast_stack.pop()
expr2 = self.ast_stack.pop()
expr3 = self.ast_stack.pop()
self.ast_stack.append(expr1)
self.ast_stack.append(expr3)
self.ast_stack.append(expr2)
def ROT_FOUR(self, instr):
expr1 = self.ast_stack.pop()
expr2 = self.ast_stack.pop()
expr3 = self.ast_stack.pop()
expr4 = self.ast_stack.pop()
self.ast_stack.append(expr1)
self.ast_stack.append(expr4)
self.ast_stack.append(expr3)
self.ast_stack.append(expr2)
def PRINT_ITEM(self, instr):
item = self.ast_stack.pop()
if self.ast_stack:
print_ = self.ast_stack[-1]
else:
print_ = None
if isinstance(print_, _ast_Print) and not print_.nl and print_.dest == None:
print_.values.append(item)
else:
print_ = _ast_Print(dest=None, values=[item], nl=False, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(print_)
def PRINT_NEWLINE(self, instr):
item = self.ast_stack[-1]
if isinstance(item, _ast_Print) and not item.nl and item.dest == None:
item.nl = True
else:
print_ = _ast_Print(dest=None, values=[], nl=True, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(print_)
def PRINT_ITEM_TO(self, instr):
stream = self.ast_stack.pop()
print_ = None
if isinstance(stream, _ast_Print) and not stream.nl:
print_ = stream
stream = self.ast_stack.pop()
dup_print = self.ast_stack.pop()
assert dup_print is print_
self.ast_stack.append(stream)
else:
print_ = _ast_Print(dest=stream, values=[], nl=False, lineno=instr.lineno, col_offset=0)
item = self.ast_stack.pop()
print_.values.append(item)
self.ast_stack.append(print_)
def PRINT_NEWLINE_TO(self, instr):
item = self.ast_stack.pop()
stream = self.ast_stack.pop()
self.ast_stack.append(item)
if isinstance(item, _ast_Print) and not item.nl and item.dest is stream:
item.nl = True
else:
print_ = _ast_Print(dest=stream, values=[], nl=True, lineno=instr.lineno, col_offset=0)
self.ast_stack.append(print_)
def format_slice(self, index, kw):
if isinstance(index, _ast.Tuple):
dims = []
have_slice = False
for dim in index.elts:
if not isinstance(dim, _ast.Slice):
dim = _ast.Index(value=dim, **kw)
else:
have_slice = True
dims.append(dim)
if have_slice:
index = _ast.ExtSlice(dims=dims, **kw)
else:
index = _ast.Index(value=index, **kw)
elif not isinstance(index, _ast.Slice):
index = _ast.Index(value=index, **kw)
return index
def BINARY_SUBSCR(self, instr):
index = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
index = self.format_slice(index, kw)
subscr = _ast.Subscript(value=value, slice=index, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr)
def SLICE_0(self, instr):
'obj[:]'
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr)
def SLICE_1(self, instr):
'obj[lower:]'
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr)
def SLICE_2(self, instr):
'obj[:stop]'
upper = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr)
def SLICE_3(self, instr):
'obj[lower:upper]'
upper = self.ast_stack.pop()
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr)
def BUILD_SLICE(self, instr):
step = None
upper = None
lower = None
if instr.oparg > 2:
step = self.ast_stack.pop()
if instr.oparg > 1:
upper = self.ast_stack.pop()
if instr.oparg > 0:
lower = self.ast_stack.pop()
upper = None if isNone(upper) else upper
lower = None if isNone(lower) else lower
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=step, upper=upper, **kw)
self.ast_stack.append(slice)
def STORE_SLICE_0(self, instr):
'obj[:] = expr'
value = self.ast_stack.pop()
expr = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Store(), **kw)
assign = _ast.Assign(targets=[subscr], value=expr, **kw)
self.ast_stack.append(assign)
def STORE_SLICE_1(self, instr):
'obj[lower:] = expr'
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
expr = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Store(), **kw)
assign = _ast.Assign(targets=[subscr], value=expr, **kw)
self.ast_stack.append(assign)
def STORE_SLICE_2(self, instr):
'obj[:upper] = expr'
upper = self.ast_stack.pop()
value = self.ast_stack.pop()
expr = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Store(), **kw)
assign = _ast.Assign(targets=[subscr], value=expr, **kw)
self.ast_stack.append(assign)
def STORE_SLICE_3(self, instr):
'obj[lower:upper] = expr'
upper = self.ast_stack.pop()
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
expr = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Store(), **kw)
if isinstance(expr, _ast.AugAssign):
assign = expr
result = cmp_ast(expr.target, subscr)
assert result
else:
assign = _ast.Assign(targets=[subscr], value=expr, **kw)
self.ast_stack.append(assign)
def DELETE_SLICE_0(self, instr):
'obj[:] = expr'
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Del(), **kw)
delete = _ast.Delete(targets=[subscr], **kw)
self.ast_stack.append(delete)
def DELETE_SLICE_1(self, instr):
'obj[lower:] = expr'
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Del(), **kw)
delete = _ast.Delete(targets=[subscr], **kw)
self.ast_stack.append(delete)
def DELETE_SLICE_2(self, instr):
'obj[:upper] = expr'
upper = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=None, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Del(), **kw)
delete = _ast.Delete(targets=[subscr], **kw)
self.ast_stack.append(delete)
def DELETE_SLICE_3(self, instr):
'obj[lower:upper] = expr'
upper = self.ast_stack.pop()
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=upper, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Del(), **kw)
delete = _ast.Delete(targets=[subscr], **kw)
self.ast_stack.append(delete)
def STORE_SUBSCR(self, instr):
index = self.ast_stack.pop()
value = self.ast_stack.pop()
expr = self.ast_stack.pop()
expr = self.process_ifexpr(expr)
if isinstance(expr, _ast.AugAssign):
self.ast_stack.append(expr)
else:
kw = dict(lineno=instr.lineno, col_offset=0)
index = self.format_slice(index, kw)
subscr = _ast.Subscript(value=value, slice=index, ctx=_ast.Store(), **kw)
assign = _ast.Assign(targets=[subscr], value=expr, **kw)
self.ast_stack.append(assign)
def DELETE_SUBSCR(self, instr):
index = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
index = self.format_slice(index, kw)
subscr = _ast.Subscript(value=value, slice=index, ctx=_ast.Del(), **kw)
delete = _ast.Delete(targets=[subscr], **kw)
self.ast_stack.append(delete)
@py2op
def RAISE_VARARGS(self, instr):
nargs = instr.oparg
tback = None
inst = None
type = None
if nargs > 2:
tback = self.ast_stack.pop()
if nargs > 1:
inst = self.ast_stack.pop()
if nargs > 0:
type = self.ast_stack.pop()
raise_ = _ast.Raise(tback=tback, inst=inst, type=type,
lineno=instr.lineno, col_offset=0)
self.ast_stack.append(raise_)
@RAISE_VARARGS.py3op
def RAISE_VARARGS(self, instr):
nargs = instr.oparg
cause = None
exc = None
if nargs > 1:
cause = self.ast_stack.pop()
if nargs > 0:
exc = self.ast_stack.pop()
raise_ = _ast.Raise(exc=exc, cause=cause,
lineno=instr.lineno, col_offset=0)
self.ast_stack.append(raise_)
@py3op
def EXTENDED_ARG(self, instr):
code = self.ast_stack.pop()
argument_names = self.ast_stack.pop()
assert len(argument_names.elts) == (instr.oparg - 1)
args = []
kw = dict(lineno=instr.lineno, col_offset=0)
for argument_name in argument_names.elts[::-1]:
annotation = self.ast_stack.pop()
arg = _ast.arg(annotation=annotation, arg=argument_name.s, **kw) #@UndefinedVariable
args.append(arg)
for arg in args:
self.ast_stack.append(arg)
self.ast_stack.append(code)
@EXTENDED_ARG.py2op
def EXTENDED_ARG(self, instr):
raise Exception("This is not available in python 2.x")
| ypkang/Dato-Core | src/unity/python/graphlab/meta/decompiler/simple_instructions.py | Python | agpl-3.0 | 29,481 | [
"VisIt"
] | facdf6e351439cf1b99132484f8f8478b39f36a529e4ef7376fac672f656a010 |
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests
import cf_units
import numpy as np
import numpy.ma as ma
import iris
import iris.analysis.cartography
import iris.analysis.maths
import iris.coord_systems
import iris.coords
import iris.cube
import iris.tests.stock
class TestAnalysisCubeCoordComparison(tests.IrisTest):
def assertComparisonDict(self, comparison_dict, reference_filename):
string = ""
for key in sorted(comparison_dict):
coord_groups = comparison_dict[key]
string += "%40s " % key
names = [
[
coord.name() if coord is not None else "None"
for coord in coords
]
for coords in coord_groups
]
string += str(sorted(names))
string += "\n"
self.assertString(string, reference_filename)
def test_coord_comparison(self):
cube1 = iris.cube.Cube(np.zeros((41, 41)))
lonlat_cs = iris.coord_systems.GeogCS(6371229)
lon_points1 = -180 + 4.5 * np.arange(41, dtype=np.float32)
lat_points = -90 + 4.5 * np.arange(41, dtype=np.float32)
cube1.add_dim_coord(
iris.coords.DimCoord(
lon_points1,
"longitude",
units="degrees",
coord_system=lonlat_cs,
),
0,
)
cube1.add_dim_coord(
iris.coords.DimCoord(
lat_points, "latitude", units="degrees", coord_system=lonlat_cs
),
1,
)
cube1.add_aux_coord(iris.coords.AuxCoord(0, long_name="z"))
cube1.add_aux_coord(
iris.coords.AuxCoord(["foobar"], long_name="f", units="no_unit")
)
cube2 = iris.cube.Cube(np.zeros((41, 41, 5)))
lonlat_cs = iris.coord_systems.GeogCS(6371229)
lon_points2 = -160 + 4.5 * np.arange(41, dtype=np.float32)
cube2.add_dim_coord(
iris.coords.DimCoord(
lon_points2,
"longitude",
units="degrees",
coord_system=lonlat_cs,
),
0,
)
cube2.add_dim_coord(
iris.coords.DimCoord(
lat_points, "latitude", units="degrees", coord_system=lonlat_cs
),
1,
)
cube2.add_dim_coord(
iris.coords.DimCoord([5, 7, 9, 11, 13], long_name="z"), 2
)
cube3 = cube1.copy()
lon = cube3.coord("longitude")
lat = cube3.coord("latitude")
cube3.remove_coord(lon)
cube3.remove_coord(lat)
cube3.add_dim_coord(lon, 1)
cube3.add_dim_coord(lat, 0)
cube3.coord("z").points = [20]
cube4 = cube2.copy()
lon = cube4.coord("longitude")
lat = cube4.coord("latitude")
cube4.remove_coord(lon)
cube4.remove_coord(lat)
cube4.add_dim_coord(lon, 1)
cube4.add_dim_coord(lat, 0)
# Test when coords are the same object
lon = cube1.coord("longitude")
lat = cube1.coord("latitude")
cube5 = iris.cube.Cube(np.zeros((41, 41)))
cube5.add_dim_coord(lon, 0)
cube5.add_dim_coord(lat, 1)
coord_comparison = iris.analysis._dimensional_metadata_comparison
self.assertComparisonDict(
coord_comparison(cube1, cube1),
("analysis", "coord_comparison", "cube1_cube1.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube2),
("analysis", "coord_comparison", "cube1_cube2.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube3),
("analysis", "coord_comparison", "cube1_cube3.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube4),
("analysis", "coord_comparison", "cube1_cube4.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube5),
("analysis", "coord_comparison", "cube1_cube5.txt"),
)
self.assertComparisonDict(
coord_comparison(cube2, cube3),
("analysis", "coord_comparison", "cube2_cube3.txt"),
)
self.assertComparisonDict(
coord_comparison(cube2, cube4),
("analysis", "coord_comparison", "cube2_cube4.txt"),
)
self.assertComparisonDict(
coord_comparison(cube2, cube5),
("analysis", "coord_comparison", "cube2_cube5.txt"),
)
self.assertComparisonDict(
coord_comparison(cube3, cube4),
("analysis", "coord_comparison", "cube3_cube4.txt"),
)
self.assertComparisonDict(
coord_comparison(cube3, cube5),
("analysis", "coord_comparison", "cube3_cube5.txt"),
)
self.assertComparisonDict(
coord_comparison(cube4, cube5),
("analysis", "coord_comparison", "cube4_cube5.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube1, cube1),
("analysis", "coord_comparison", "cube1_cube1_cube1.txt"),
)
self.assertComparisonDict(
coord_comparison(cube1, cube2, cube1),
("analysis", "coord_comparison", "cube1_cube2_cube1.txt"),
)
# get a coord comparison result and check that we are getting back what was expected
coord_group = coord_comparison(cube1, cube2)["grouped_coords"][0]
self.assertIsInstance(coord_group, iris.analysis._CoordGroup)
self.assertIsInstance(list(coord_group)[0], iris.coords.Coord)
class TestAnalysisWeights(tests.IrisTest):
def test_weighted_mean_little(self):
data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32)
weights = np.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=np.float32)
cube = iris.cube.Cube(data, long_name="test_data", units="1")
hcs = iris.coord_systems.GeogCS(6371229)
lat_coord = iris.coords.DimCoord(
np.array([1, 2, 3], dtype=np.float32),
long_name="lat",
units="1",
coord_system=hcs,
)
lon_coord = iris.coords.DimCoord(
np.array([1, 2, 3], dtype=np.float32),
long_name="lon",
units="1",
coord_system=hcs,
)
cube.add_dim_coord(lat_coord, 0)
cube.add_dim_coord(lon_coord, 1)
cube.add_aux_coord(
iris.coords.AuxCoord(
np.arange(3, dtype=np.float32), long_name="dummy", units=1
),
1,
)
self.assertCML(cube, ("analysis", "weighted_mean_source.cml"))
a = cube.collapsed("lat", iris.analysis.MEAN, weights=weights)
# np.ma.average doesn't apply type promotion rules in some versions,
# and instead makes the result type float64. To ignore that case we
# fix up the dtype here if it is promotable from float32. We still want
# to catch cases where there is a loss of precision however.
if a.dtype > np.float32:
cast_data = a.data.astype(np.float32)
a.data = cast_data
self.assertCMLApproxData(a, ("analysis", "weighted_mean_lat.cml"))
b = cube.collapsed(lon_coord, iris.analysis.MEAN, weights=weights)
if b.dtype > np.float32:
cast_data = b.data.astype(np.float32)
b.data = cast_data
b.data = np.asarray(b.data)
self.assertCMLApproxData(b, ("analysis", "weighted_mean_lon.cml"))
self.assertEqual(b.coord("dummy").shape, (1,))
# test collapsing multiple coordinates (and the fact that one of the coordinates isn't the same coordinate instance as on the cube)
c = cube.collapsed(
[lat_coord[:], lon_coord], iris.analysis.MEAN, weights=weights
)
if c.dtype > np.float32:
cast_data = c.data.astype(np.float32)
c.data = cast_data
self.assertCMLApproxData(c, ("analysis", "weighted_mean_latlon.cml"))
self.assertEqual(c.coord("dummy").shape, (1,))
# Check new coord bounds - made from points
self.assertArrayEqual(c.coord("lat").bounds, [[1, 3]])
# Check new coord bounds - made from bounds
cube.coord("lat").bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]
c = cube.collapsed(["lat", "lon"], iris.analysis.MEAN, weights=weights)
self.assertArrayEqual(c.coord("lat").bounds, [[0.5, 3.5]])
cube.coord("lat").bounds = None
# Check there was no residual change
self.assertCML(cube, ("analysis", "weighted_mean_source.cml"))
@tests.skip_data
def test_weighted_mean(self):
# compare with pp_area_avg - which collapses both lat and lon
#
# pp = ppa('/data/local/dataZoo/PP/simple_pp/global.pp', 0)
# print, pp_area(pp, /box)
# print, pp_area_avg(pp, /box) #287.927
# ;gives an answer of 287.927
#
e = iris.tests.stock.simple_pp()
self.assertCML(e, ("analysis", "weighted_mean_original.cml"))
e.coord("latitude").guess_bounds()
e.coord("longitude").guess_bounds()
area_weights = iris.analysis.cartography.area_weights(e)
e.coord("latitude").bounds = None
e.coord("longitude").bounds = None
f, collapsed_area_weights = e.collapsed(
"latitude", iris.analysis.MEAN, weights=area_weights, returned=True
)
g = f.collapsed(
"longitude", iris.analysis.MEAN, weights=collapsed_area_weights
)
# check it's a 0d, scalar cube
self.assertEqual(g.shape, ())
# check the value - pp_area_avg's result of 287.927 differs by factor of 1.00002959
np.testing.assert_approx_equal(g.data, 287.935, significant=5)
# check we get summed weights even if we don't give any
h, summed_weights = e.collapsed(
"latitude", iris.analysis.MEAN, returned=True
)
assert summed_weights is not None
# Check there was no residual change
e.coord("latitude").bounds = None
e.coord("longitude").bounds = None
self.assertCML(e, ("analysis", "weighted_mean_original.cml"))
# Test collapsing of missing coord
self.assertRaises(
iris.exceptions.CoordinateNotFoundError,
e.collapsed,
"platitude",
iris.analysis.MEAN,
)
# Test collpasing of non data coord
self.assertRaises(
iris.exceptions.CoordinateCollapseError,
e.collapsed,
"pressure",
iris.analysis.MEAN,
)
@tests.skip_data
class TestAnalysisBasic(tests.IrisTest):
def setUp(self):
file = tests.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp"))
cubes = iris.load(file)
self.cube = cubes[0]
self.assertCML(self.cube, ("analysis", "original.cml"))
def _common(
self,
name,
aggregate,
original_name="original_common.cml",
*args,
**kwargs,
):
self.cube.data = self.cube.data.astype(np.float64)
self.assertCML(self.cube, ("analysis", original_name))
a = self.cube.collapsed("grid_latitude", aggregate)
self.assertCMLApproxData(
a, ("analysis", "%s_latitude.cml" % name), *args, **kwargs
)
b = a.collapsed("grid_longitude", aggregate)
self.assertCMLApproxData(
b,
("analysis", "%s_latitude_longitude.cml" % name),
*args,
**kwargs,
)
c = self.cube.collapsed(["grid_latitude", "grid_longitude"], aggregate)
self.assertCMLApproxData(
c,
("analysis", "%s_latitude_longitude_1call.cml" % name),
*args,
**kwargs,
)
# Check there was no residual change
self.assertCML(self.cube, ("analysis", original_name))
def test_mean(self):
self._common("mean", iris.analysis.MEAN, rtol=1e-05)
def test_std_dev(self):
# as the numbers are so high, trim off some trailing digits & compare to 0dp
self._common("std_dev", iris.analysis.STD_DEV, rtol=1e-05)
def test_hmean(self):
# harmonic mean requires data > 0
self.cube.data *= self.cube.data
self._common(
"hmean", iris.analysis.HMEAN, "original_hmean.cml", rtol=1e-05
)
def test_gmean(self):
self._common("gmean", iris.analysis.GMEAN, rtol=1e-05)
def test_variance(self):
# as the numbers are so high, trim off some trailing digits & compare to 0dp
self._common("variance", iris.analysis.VARIANCE, rtol=1e-05)
def test_median(self):
self._common("median", iris.analysis.MEDIAN)
def test_sum(self):
# as the numbers are so high, trim off some trailing digits & compare to 0dp
self._common("sum", iris.analysis.SUM, rtol=1e-05)
def test_max(self):
self._common("max", iris.analysis.MAX)
def test_min(self):
self._common("min", iris.analysis.MIN)
def test_rms(self):
self._common("rms", iris.analysis.RMS)
def test_duplicate_coords(self):
self.assertRaises(ValueError, tests.stock.track_1d, duplicate_x=True)
class TestMissingData(tests.IrisTest):
def setUp(self):
self.cube_with_nan = tests.stock.simple_2d()
data = self.cube_with_nan.data.astype(np.float32)
self.cube_with_nan.data = data.copy()
self.cube_with_nan.data[1, 0] = np.nan
self.cube_with_nan.data[2, 2] = np.nan
self.cube_with_nan.data[2, 3] = np.nan
self.cube_with_mask = tests.stock.simple_2d()
self.cube_with_mask.data = ma.array(
self.cube_with_nan.data, mask=np.isnan(self.cube_with_nan.data)
)
def test_max(self):
cube = self.cube_with_nan.collapsed("foo", iris.analysis.MAX)
np.testing.assert_array_equal(cube.data, np.array([3, np.nan, np.nan]))
cube = self.cube_with_mask.collapsed("foo", iris.analysis.MAX)
np.testing.assert_array_equal(cube.data, np.array([3, 7, 9]))
def test_min(self):
cube = self.cube_with_nan.collapsed("foo", iris.analysis.MIN)
np.testing.assert_array_equal(cube.data, np.array([0, np.nan, np.nan]))
cube = self.cube_with_mask.collapsed("foo", iris.analysis.MIN)
np.testing.assert_array_equal(cube.data, np.array([0, 5, 8]))
def test_sum(self):
cube = self.cube_with_nan.collapsed("foo", iris.analysis.SUM)
np.testing.assert_array_equal(cube.data, np.array([6, np.nan, np.nan]))
cube = self.cube_with_mask.collapsed("foo", iris.analysis.SUM)
np.testing.assert_array_equal(cube.data, np.array([6, 18, 17]))
class TestAuxCoordCollapse(tests.IrisTest):
def setUp(self):
self.cube_with_aux_coord = tests.stock.simple_4d_with_hybrid_height()
# Guess bounds to get the weights
self.cube_with_aux_coord.coord("grid_latitude").guess_bounds()
self.cube_with_aux_coord.coord("grid_longitude").guess_bounds()
def test_max(self):
cube = self.cube_with_aux_coord.collapsed(
"grid_latitude", iris.analysis.MAX
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").points,
np.array([112, 113, 114, 115, 116, 117]),
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").bounds,
np.array(
[
[100, 124],
[101, 125],
[102, 126],
[103, 127],
[104, 128],
[105, 129],
]
),
)
# Check collapsing over the whole coord still works
cube = self.cube_with_aux_coord.collapsed(
"altitude", iris.analysis.MAX
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").points, np.array([114])
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").bounds, np.array([[100, 129]])
)
cube = self.cube_with_aux_coord.collapsed(
"grid_longitude", iris.analysis.MAX
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").points,
np.array([102, 108, 114, 120, 126]),
)
np.testing.assert_array_equal(
cube.coord("surface_altitude").bounds,
np.array(
[[100, 105], [106, 111], [112, 117], [118, 123], [124, 129]]
),
)
class TestAggregator_mdtol_keyword(tests.IrisTest):
def setUp(self):
data = ma.array(
[[1, 2], [4, 5]],
dtype=np.float32,
mask=[[False, True], [False, True]],
)
cube = iris.cube.Cube(data, long_name="test_data", units="1")
lat_coord = iris.coords.DimCoord(
np.array([1, 2], dtype=np.float32), long_name="lat", units="1"
)
lon_coord = iris.coords.DimCoord(
np.array([3, 4], dtype=np.float32), long_name="lon", units="1"
)
cube.add_dim_coord(lat_coord, 0)
cube.add_dim_coord(lon_coord, 1)
self.cube = cube
def test_single_coord_no_mdtol(self):
collapsed = self.cube.collapsed(
self.cube.coord("lat"), iris.analysis.MEAN
)
t = ma.array([2.5, 5.0], mask=[False, True])
self.assertMaskedArrayEqual(collapsed.data, t)
def test_single_coord_mdtol(self):
self.cube.data.mask = np.array([[False, True], [False, False]])
collapsed = self.cube.collapsed(
self.cube.coord("lat"), iris.analysis.MEAN, mdtol=0.5
)
t = ma.array([2.5, 5], mask=[False, False])
self.assertMaskedArrayEqual(collapsed.data, t)
def test_single_coord_mdtol_alt(self):
self.cube.data.mask = np.array([[False, True], [False, False]])
collapsed = self.cube.collapsed(
self.cube.coord("lat"), iris.analysis.MEAN, mdtol=0.4
)
t = ma.array([2.5, 5], mask=[False, True])
self.assertMaskedArrayEqual(collapsed.data, t)
def test_multi_coord_no_mdtol(self):
collapsed = self.cube.collapsed(
[self.cube.coord("lat"), self.cube.coord("lon")],
iris.analysis.MEAN,
)
t = np.array(2.5)
self.assertArrayEqual(collapsed.data, t)
def test_multi_coord_mdtol(self):
collapsed = self.cube.collapsed(
[self.cube.coord("lat"), self.cube.coord("lon")],
iris.analysis.MEAN,
mdtol=0.4,
)
t = ma.array(2.5, mask=True)
self.assertMaskedArrayEqual(collapsed.data, t)
class TestAggregators(tests.IrisTest):
def _check_collapsed_percentile(
self,
cube,
percents,
collapse_coord,
expected_result,
CML_filename=None,
**kwargs,
):
cube_data_type = type(cube.data)
expected_result = np.array(expected_result, dtype=np.float32)
result = cube.collapsed(
collapse_coord,
iris.analysis.PERCENTILE,
percent=percents,
**kwargs,
)
np.testing.assert_array_almost_equal(result.data, expected_result)
self.assertEqual(type(result.data), cube_data_type)
if CML_filename is not None:
self.assertCML(result, ("analysis", CML_filename), checksum=False)
def _check_percentile(
self, data, axis, percents, expected_result, **kwargs
):
result = iris.analysis._percentile(data, axis, percents, **kwargs)
np.testing.assert_array_almost_equal(result, expected_result)
self.assertEqual(type(result), type(expected_result))
def test_percentile_1d_25_percent(self):
cube = tests.stock.simple_1d()
self._check_collapsed_percentile(
cube, 25, "foo", 2.5, CML_filename="first_quartile_foo_1d.cml"
)
def test_percentile_1d_75_percent(self):
cube = tests.stock.simple_1d()
self._check_collapsed_percentile(
cube, 75, "foo", 7.5, CML_filename="third_quartile_foo_1d.cml"
)
def test_fast_percentile_1d_25_percent(self):
cube = tests.stock.simple_1d()
self._check_collapsed_percentile(
cube,
25,
"foo",
2.5,
fast_percentile_method=True,
CML_filename="first_quartile_foo_1d_fast_percentile.cml",
)
def test_fast_percentile_1d_75_percent(self):
cube = tests.stock.simple_1d()
self._check_collapsed_percentile(
cube,
75,
"foo",
7.5,
fast_percentile_method=True,
CML_filename="third_quartile_foo_1d_fast_percentile.cml",
)
def test_fast_percentile_1d_75_percent_masked_type_no_mask(self):
cube = tests.stock.simple_1d()
cube.data = ma.MaskedArray(cube.data)
self._check_collapsed_percentile(
cube,
75,
"foo",
7.5,
fast_percentile_method=True,
CML_filename="third_quartile_foo_1d_fast_percentile.cml",
)
def test_percentile_2d_single_coord(self):
cube = tests.stock.simple_2d()
self._check_collapsed_percentile(
cube,
25,
"foo",
[0.75, 4.75, 8.75],
CML_filename="first_quartile_foo_2d.cml",
)
def test_percentile_2d_two_coords(self):
cube = tests.stock.simple_2d()
self._check_collapsed_percentile(
cube,
25,
["foo", "bar"],
[2.75],
CML_filename="first_quartile_foo_bar_2d.cml",
)
def test_fast_percentile_2d_single_coord(self):
cube = tests.stock.simple_2d()
self._check_collapsed_percentile(
cube,
25,
"foo",
[0.75, 4.75, 8.75],
fast_percentile_method=True,
CML_filename="first_quartile_foo_2d_fast_percentile.cml",
)
def test_fast_percentile_2d_two_coords(self):
cube = tests.stock.simple_2d()
self._check_collapsed_percentile(
cube,
25,
["foo", "bar"],
[2.75],
fast_percentile_method=True,
CML_filename="first_quartile_foo_bar_2d_fast_percentile.cml",
)
def test_fast_percentile_2d_single_coord_masked_type_no_mask(self):
cube = tests.stock.simple_2d()
cube.data = ma.MaskedArray(cube.data)
self._check_collapsed_percentile(
cube,
25,
"foo",
[0.75, 4.75, 8.75],
fast_percentile_method=True,
CML_filename="first_quartile_foo_2d_fast_percentile.cml",
)
def test_fast_percentile_2d_two_coords_masked_type_no_mask(self):
cube = tests.stock.simple_2d()
cube.data = ma.MaskedArray(cube.data)
self._check_collapsed_percentile(
cube,
25,
["foo", "bar"],
[2.75],
fast_percentile_method=True,
CML_filename="first_quartile_foo_bar_2d_fast_percentile.cml",
)
def test_percentile_3d(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[
[6.0, 7.0, 8.0, 9.0],
[10.0, 11.0, 12.0, 13.0],
[14.0, 15.0, 16.0, 17.0],
],
dtype=np.float32,
)
self._check_percentile(array_3d, 0, 50, expected_result)
def test_fast_percentile_3d(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[
[6.0, 7.0, 8.0, 9.0],
[10.0, 11.0, 12.0, 13.0],
[14.0, 15.0, 16.0, 17.0],
],
dtype=np.float32,
)
self._check_percentile(
array_3d, 0, 50, expected_result, fast_percentile_method=True
)
def test_percentile_3d_axis_one(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[[4.0, 5.0, 6.0, 7.0], [16.0, 17.0, 18.0, 19.0]], dtype=np.float32
)
self._check_percentile(array_3d, 1, 50, expected_result)
def test_fast_percentile_3d_axis_one(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[[4.0, 5.0, 6.0, 7.0], [16.0, 17.0, 18.0, 19.0]], dtype=np.float32
)
self._check_percentile(
array_3d, 1, 50, expected_result, fast_percentile_method=True
)
def test_fast_percentile_3d_axis_one_masked_type_no_mask(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
array_3d = np.ma.MaskedArray(array_3d)
expected_result = ma.MaskedArray(
[[4.0, 5.0, 6.0, 7.0], [16.0, 17.0, 18.0, 19.0]], dtype=np.float32
)
self._check_percentile(
array_3d, 1, 50, expected_result, fast_percentile_method=True
)
def test_percentile_3d_axis_two(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[[1.5, 5.5, 9.5], [13.5, 17.5, 21.5]], dtype=np.float32
)
self._check_percentile(array_3d, 2, 50, expected_result)
def test_fast_percentile_3d_axis_two(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array(
[[1.5, 5.5, 9.5], [13.5, 17.5, 21.5]], dtype=np.float32
)
self._check_percentile(
array_3d, 2, 50, expected_result, fast_percentile_method=True
)
def test_fast_percentile_3d_axis_two_masked_type_no_mask(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
array_3d = ma.MaskedArray(array_3d)
expected_result = ma.MaskedArray(
[[1.5, 5.5, 9.5], [13.5, 17.5, 21.5]], dtype=np.float32
)
self._check_percentile(
array_3d, 2, 50, expected_result, fast_percentile_method=True
)
def test_percentile_3d_masked(self):
cube = tests.stock.simple_3d_mask()
expected_result = [
[12.0, 13.0, 14.0, 15.0],
[16.0, 17.0, 18.0, 19.0],
[20.0, 18.0, 19.0, 20.0],
]
self._check_collapsed_percentile(
cube,
75,
"wibble",
expected_result,
CML_filename="last_quartile_foo_3d_masked.cml",
)
def test_fast_percentile_3d_masked_type_masked(self):
cube = tests.stock.simple_3d_mask()
msg = "Cannot use fast np.percentile method with masked array."
with self.assertRaisesRegex(TypeError, msg):
cube.collapsed(
"wibble",
iris.analysis.PERCENTILE,
percent=75,
fast_percentile_method=True,
)
def test_percentile_3d_notmasked(self):
cube = tests.stock.simple_3d()
expected_result = [
[9.0, 10.0, 11.0, 12.0],
[13.0, 14.0, 15.0, 16.0],
[17.0, 18.0, 19.0, 20.0],
]
self._check_collapsed_percentile(
cube,
75,
"wibble",
expected_result,
CML_filename="last_quartile_foo_3d_notmasked.cml",
)
def test_fast_percentile_3d_notmasked(self):
cube = tests.stock.simple_3d()
expected_result = [
[9.0, 10.0, 11.0, 12.0],
[13.0, 14.0, 15.0, 16.0],
[17.0, 18.0, 19.0, 20.0],
]
self._check_collapsed_percentile(
cube,
75,
"wibble",
expected_result,
fast_percentile_method=True,
CML_filename="last_quartile_foo_3d_notmasked_fast_percentile.cml",
)
def test_proportion(self):
cube = tests.stock.simple_1d()
assert np.any(cube.data >= 5)
gt5 = cube.collapsed(
"foo", iris.analysis.PROPORTION, function=lambda val: val >= 5
)
np.testing.assert_array_almost_equal(gt5.data, np.array([6 / 11.0]))
self.assertCML(
gt5, ("analysis", "proportion_foo_1d.cml"), checksum=False
)
def test_proportion_2d(self):
cube = tests.stock.simple_2d()
gt6 = cube.collapsed(
"foo", iris.analysis.PROPORTION, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([0, 0.5, 1], dtype=np.float32)
)
self.assertCML(
gt6, ("analysis", "proportion_foo_2d.cml"), checksum=False
)
gt6 = cube.collapsed(
"bar", iris.analysis.PROPORTION, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([1 / 3, 1 / 3, 2 / 3, 2 / 3], dtype=np.float32)
)
self.assertCML(
gt6, ("analysis", "proportion_bar_2d.cml"), checksum=False
)
gt6 = cube.collapsed(
("foo", "bar"),
iris.analysis.PROPORTION,
function=lambda val: val >= 6,
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([0.5], dtype=np.float32)
)
self.assertCML(
gt6, ("analysis", "proportion_foo_bar_2d.cml"), checksum=False
)
# mask the data
cube.data = ma.array(cube.data, mask=cube.data % 2)
cube.data.mask[1, 2] = True
gt6_masked = cube.collapsed(
"bar", iris.analysis.PROPORTION, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6_masked.data,
ma.array(
[1 / 3, None, 1 / 2, None],
mask=[False, True, False, True],
dtype=np.float32,
),
)
self.assertCML(
gt6_masked,
("analysis", "proportion_foo_2d_masked.cml"),
checksum=False,
)
def test_count(self):
cube = tests.stock.simple_1d()
gt5 = cube.collapsed(
"foo", iris.analysis.COUNT, function=lambda val: val >= 5
)
np.testing.assert_array_almost_equal(gt5.data, np.array([6]))
gt5.data = gt5.data.astype("i8")
self.assertCML(gt5, ("analysis", "count_foo_1d.cml"), checksum=False)
def test_count_2d(self):
cube = tests.stock.simple_2d()
gt6 = cube.collapsed(
"foo", iris.analysis.COUNT, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([0, 2, 4], dtype=np.float32)
)
gt6.data = gt6.data.astype("i8")
self.assertCML(gt6, ("analysis", "count_foo_2d.cml"), checksum=False)
gt6 = cube.collapsed(
"bar", iris.analysis.COUNT, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([1, 1, 2, 2], dtype=np.float32)
)
gt6.data = gt6.data.astype("i8")
self.assertCML(gt6, ("analysis", "count_bar_2d.cml"), checksum=False)
gt6 = cube.collapsed(
("foo", "bar"), iris.analysis.COUNT, function=lambda val: val >= 6
)
np.testing.assert_array_almost_equal(
gt6.data, np.array([6], dtype=np.float32)
)
gt6.data = gt6.data.astype("i8")
self.assertCML(
gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False
)
def test_weighted_sum_consistency(self):
# weighted sum with unit weights should be the same as a sum
cube = tests.stock.simple_1d()
normal_sum = cube.collapsed("foo", iris.analysis.SUM)
weights = np.ones_like(cube.data)
weighted_sum = cube.collapsed(
"foo", iris.analysis.SUM, weights=weights
)
self.assertArrayAlmostEqual(normal_sum.data, weighted_sum.data)
def test_weighted_sum_1d(self):
# verify 1d weighted sum is correct
cube = tests.stock.simple_1d()
weights = np.array(
[0.05, 0.05, 0.1, 0.1, 0.2, 0.3, 0.2, 0.1, 0.1, 0.05, 0.05]
)
result = cube.collapsed("foo", iris.analysis.SUM, weights=weights)
self.assertAlmostEqual(result.data, 6.5)
self.assertCML(
result, ("analysis", "sum_weighted_1d.cml"), checksum=False
)
def test_weighted_sum_2d(self):
# verify 2d weighted sum is correct
cube = tests.stock.simple_2d()
weights = np.array([0.3, 0.4, 0.3])
weights = iris.util.broadcast_to_shape(weights, cube.shape, [0])
result = cube.collapsed("bar", iris.analysis.SUM, weights=weights)
self.assertArrayAlmostEqual(
result.data, np.array([4.0, 5.0, 6.0, 7.0])
)
self.assertCML(
result, ("analysis", "sum_weighted_2d.cml"), checksum=False
)
def test_weighted_rms(self):
cube = tests.stock.simple_2d()
# modify cube data so that the results are nice numbers
cube.data = np.array(
[[4, 7, 10, 8], [21, 30, 12, 24], [14, 16, 20, 8]],
dtype=np.float64,
)
weights = np.array(
[[1, 4, 3, 2], [6, 4.5, 1.5, 3], [2, 1, 1.5, 0.5]],
dtype=np.float64,
)
expected_result = np.array([8.0, 24.0, 16.0])
result = cube.collapsed("foo", iris.analysis.RMS, weights=weights)
self.assertArrayAlmostEqual(result.data, expected_result)
self.assertCML(
result, ("analysis", "rms_weighted_2d.cml"), checksum=False
)
@tests.skip_data
class TestRotatedPole(tests.IrisTest):
def _check_both_conversions(self, cube, index):
rlons, rlats = iris.analysis.cartography.get_xy_grids(cube)
rcs = cube.coord_system("RotatedGeogCS")
x, y = iris.analysis.cartography.unrotate_pole(
rlons,
rlats,
rcs.grid_north_pole_longitude,
rcs.grid_north_pole_latitude,
)
self.assertDataAlmostEqual(
x, ("analysis", "rotated_pole.{}.x.json".format(index))
)
self.assertDataAlmostEqual(
y, ("analysis", "rotated_pole.{}.y.json".format(index))
)
self.assertDataAlmostEqual(
rlons, ("analysis", "rotated_pole.{}.rlon.json".format(index))
)
self.assertDataAlmostEqual(
rlats, ("analysis", "rotated_pole.{}.rlat.json".format(index))
)
def test_all(self):
path = tests.get_data_path(("PP", "ukVorog", "ukv_orog_refonly.pp"))
master_cube = iris.load_cube(path)
# Check overall behaviour.
cube = master_cube[::10, ::10]
self._check_both_conversions(cube, 0)
# Check numerical stability.
cube = master_cube[210:238, 424:450]
self._check_both_conversions(cube, 1)
def test_unrotate_nd(self):
rlons = np.array([[350.0, 352.0], [350.0, 352.0]])
rlats = np.array([[-5.0, -0.0], [-4.0, -1.0]])
resx, resy = iris.analysis.cartography.unrotate_pole(
rlons, rlats, 178.0, 38.0
)
# Solutions derived by proj4 direct.
solx = np.array(
[[-16.42176094, -14.85892262], [-16.71055023, -14.58434624]]
)
soly = np.array(
[[46.00724251, 51.29188893], [46.98728486, 50.30706042]]
)
self.assertArrayAlmostEqual(resx, solx)
self.assertArrayAlmostEqual(resy, soly)
def test_unrotate_1d(self):
rlons = np.array([350.0, 352.0, 354.0, 356.0])
rlats = np.array([-5.0, -0.0, 5.0, 10.0])
resx, resy = iris.analysis.cartography.unrotate_pole(
rlons.flatten(), rlats.flatten(), 178.0, 38.0
)
# Solutions derived by proj4 direct.
solx = np.array(
[-16.42176094, -14.85892262, -12.88946157, -10.35078336]
)
soly = np.array([46.00724251, 51.29188893, 56.55031485, 61.77015703])
self.assertArrayAlmostEqual(resx, solx)
self.assertArrayAlmostEqual(resy, soly)
def test_rotate_nd(self):
rlons = np.array([[350.0, 351.0], [352.0, 353.0]])
rlats = np.array([[10.0, 15.0], [20.0, 25.0]])
resx, resy = iris.analysis.cartography.rotate_pole(
rlons, rlats, 20.0, 80.0
)
# Solutions derived by proj4 direct.
solx = np.array(
[[148.69672569, 149.24727087], [149.79067025, 150.31754368]]
)
soly = np.array(
[[18.60905789, 23.67749384], [28.74419024, 33.8087963]]
)
self.assertArrayAlmostEqual(resx, solx)
self.assertArrayAlmostEqual(resy, soly)
def test_rotate_1d(self):
rlons = np.array([350.0, 351.0, 352.0, 353.0])
rlats = np.array([10.0, 15.0, 20.0, 25.0])
resx, resy = iris.analysis.cartography.rotate_pole(
rlons.flatten(), rlats.flatten(), 20.0, 80.0
)
# Solutions derived by proj4 direct.
solx = np.array(
[148.69672569, 149.24727087, 149.79067025, 150.31754368]
)
soly = np.array([18.60905789, 23.67749384, 28.74419024, 33.8087963])
self.assertArrayAlmostEqual(resx, solx)
self.assertArrayAlmostEqual(resy, soly)
@tests.skip_data
class TestAreaWeights(tests.IrisTest):
def test_area_weights(self):
small_cube = iris.tests.stock.simple_pp()
# Get offset, subsampled region: small enough to test against literals
small_cube = small_cube[10:, 35:]
small_cube = small_cube[::8, ::8]
small_cube = small_cube[:5, :4]
# pre-check non-data properties
self.assertCML(
small_cube,
("analysis", "areaweights_original.cml"),
checksum=False,
)
# check area-weights values
small_cube.coord("latitude").guess_bounds()
small_cube.coord("longitude").guess_bounds()
area_weights = iris.analysis.cartography.area_weights(small_cube)
expected_results = np.array(
[
[3.11955866e12, 3.11956008e12, 3.11955866e12, 3.11956008e12],
[5.21951065e12, 5.21951303e12, 5.21951065e12, 5.21951303e12],
[6.68991281e12, 6.68991585e12, 6.68991281e12, 6.68991585e12],
[7.35341305e12, 7.35341640e12, 7.35341305e12, 7.35341640e12],
[7.12998335e12, 7.12998660e12, 7.12998335e12, 7.12998660e12],
],
dtype=np.float64,
)
self.assertArrayAllClose(area_weights, expected_results, rtol=1e-8)
# Check there was no residual change
small_cube.coord("latitude").bounds = None
small_cube.coord("longitude").bounds = None
self.assertCML(
small_cube,
("analysis", "areaweights_original.cml"),
checksum=False,
)
@tests.skip_data
class TestAreaWeightGeneration(tests.IrisTest):
def setUp(self):
self.cube = iris.tests.stock.realistic_4d()
def test_area_weights_std(self):
# weights for stock 4d data
weights = iris.analysis.cartography.area_weights(self.cube)
self.assertEqual(weights.shape, self.cube.shape)
def test_area_weights_order(self):
# weights for data with dimensions in a different order
order = [3, 2, 1, 0] # (lon, lat, level, time)
self.cube.transpose(order)
weights = iris.analysis.cartography.area_weights(self.cube)
self.assertEqual(weights.shape, self.cube.shape)
def test_area_weights_non_adjacent(self):
# weights for cube with non-adjacent latitude/longitude dimensions
order = [0, 3, 1, 2] # (time, lon, level, lat)
self.cube.transpose(order)
weights = iris.analysis.cartography.area_weights(self.cube)
self.assertEqual(weights.shape, self.cube.shape)
def test_area_weights_scalar_latitude(self):
# weights for cube with a scalar latitude dimension
cube = self.cube[:, :, 0, :]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_scalar_longitude(self):
# weights for cube with a scalar longitude dimension
cube = self.cube[:, :, :, 0]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_scalar(self):
# weights for cube with scalar latitude and longitude dimensions
cube = self.cube[:, :, 0, 0]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_singleton_latitude(self):
# singleton (1-point) latitude dimension
cube = self.cube[:, :, 0:1, :]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_singleton_longitude(self):
# singleton (1-point) longitude dimension
cube = self.cube[:, :, :, 0:1]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_singletons(self):
# singleton (1-point) latitude and longitude dimensions
cube = self.cube[:, :, 0:1, 0:1]
weights = iris.analysis.cartography.area_weights(cube)
self.assertEqual(weights.shape, cube.shape)
def test_area_weights_normalized(self):
# normalized area weights must sum to one over lat/lon dimensions.
weights = iris.analysis.cartography.area_weights(
self.cube, normalize=True
)
sumweights = weights.sum(axis=3).sum(axis=2) # sum over lon and lat
self.assertArrayAlmostEqual(sumweights, 1)
def test_area_weights_non_contiguous(self):
# Slice the cube so that we have non-contiguous longitude
# bounds.
ind = (0, 1, 2, -3, -2, -1)
cube = self.cube[..., ind]
weights = iris.analysis.cartography.area_weights(cube)
expected = iris.analysis.cartography.area_weights(self.cube)[..., ind]
self.assertArrayEqual(weights, expected)
def test_area_weights_no_lon_bounds(self):
self.cube.coord("grid_longitude").bounds = None
with self.assertRaises(ValueError):
iris.analysis.cartography.area_weights(self.cube)
def test_area_weights_no_lat_bounds(self):
self.cube.coord("grid_latitude").bounds = None
with self.assertRaises(ValueError):
iris.analysis.cartography.area_weights(self.cube)
@tests.skip_data
class TestLatitudeWeightGeneration(tests.IrisTest):
def setUp(self):
path = iris.tests.get_data_path(
["NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc"]
)
self.cube = iris.load_cube(path)
self.cube_dim_lat = self.cube.copy()
self.cube_dim_lat.remove_coord("latitude")
self.cube_dim_lat.remove_coord("longitude")
# The 2d cubes are unrealistic, you would not want to weight by
# anything other than grid latitude in real-world scenarios. However,
# the technical details are suitable for testing purposes, providing
# a nice analog for a 2d latitude coordinate from a curvilinear grid.
self.cube_aux_lat = self.cube.copy()
self.cube_aux_lat.remove_coord("grid_latitude")
self.cube_aux_lat.remove_coord("grid_longitude")
self.lat1d = self.cube.coord("grid_latitude").points
self.lat2d = self.cube.coord("latitude").points
def test_cosine_latitude_weights_range(self):
# check the range of returned values, needs a cube that spans the full
# latitude range
lat_coord = iris.coords.DimCoord(
np.linspace(-90, 90, 73),
standard_name="latitude",
units=cf_units.Unit("degrees_north"),
)
cube = iris.cube.Cube(
np.ones([73], dtype=np.float64), long_name="test_cube", units="1"
)
cube.add_dim_coord(lat_coord, 0)
weights = iris.analysis.cartography.cosine_latitude_weights(cube)
self.assertTrue(weights.max() <= 1)
self.assertTrue(weights.min() >= 0)
def test_cosine_latitude_weights_0d(self):
# 0d latitude dimension (scalar coordinate)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_dim_lat[:, 0, :]
)
self.assertEqual(weights.shape, self.cube_dim_lat[:, 0, :].shape)
self.assertAlmostEqual(
weights[0, 0], np.cos(np.deg2rad(self.lat1d[0]))
)
def test_cosine_latitude_weights_1d_singleton(self):
# singleton (1-point) 1d latitude coordinate (time, lat, lon)
cube = self.cube_dim_lat[:, 0:1, :]
weights = iris.analysis.cartography.cosine_latitude_weights(cube)
self.assertEqual(weights.shape, cube.shape)
self.assertAlmostEqual(
weights[0, 0, 0], np.cos(np.deg2rad(self.lat1d[0]))
)
def test_cosine_latitude_weights_1d(self):
# 1d latitude coordinate (time, lat, lon)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_dim_lat
)
self.assertEqual(weights.shape, self.cube.shape)
self.assertArrayAlmostEqual(
weights[0, :, 0], np.cos(np.deg2rad(self.lat1d))
)
def test_cosine_latitude_weights_1d_latitude_first(self):
# 1d latitude coordinate with latitude first (lat, time, lon)
order = [1, 0, 2] # (lat, time, lon)
self.cube_dim_lat.transpose(order)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_dim_lat
)
self.assertEqual(weights.shape, self.cube_dim_lat.shape)
self.assertArrayAlmostEqual(
weights[:, 0, 0], np.cos(np.deg2rad(self.lat1d))
)
def test_cosine_latitude_weights_1d_latitude_last(self):
# 1d latitude coordinate with latitude last (time, lon, lat)
order = [0, 2, 1] # (time, lon, lat)
self.cube_dim_lat.transpose(order)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_dim_lat
)
self.assertEqual(weights.shape, self.cube_dim_lat.shape)
self.assertArrayAlmostEqual(
weights[0, 0, :], np.cos(np.deg2rad(self.lat1d))
)
def test_cosine_latitude_weights_2d_singleton1(self):
# 2d latitude coordinate with first dimension singleton
cube = self.cube_aux_lat[:, 0:1, :]
weights = iris.analysis.cartography.cosine_latitude_weights(cube)
self.assertEqual(weights.shape, cube.shape)
self.assertArrayAlmostEqual(
weights[0, :, :], np.cos(np.deg2rad(self.lat2d[0:1, :]))
)
def test_cosine_latitude_weights_2d_singleton2(self):
# 2d latitude coordinate with second dimension singleton
cube = self.cube_aux_lat[:, :, 0:1]
weights = iris.analysis.cartography.cosine_latitude_weights(cube)
self.assertEqual(weights.shape, cube.shape)
self.assertArrayAlmostEqual(
weights[0, :, :], np.cos(np.deg2rad(self.lat2d[:, 0:1]))
)
def test_cosine_latitude_weights_2d_singleton3(self):
# 2d latitude coordinate with both dimensions singleton
cube = self.cube_aux_lat[:, 0:1, 0:1]
weights = iris.analysis.cartography.cosine_latitude_weights(cube)
self.assertEqual(weights.shape, cube.shape)
self.assertArrayAlmostEqual(
weights[0, :, :], np.cos(np.deg2rad(self.lat2d[0:1, 0:1]))
)
def test_cosine_latitude_weights_2d(self):
# 2d latitude coordinate (time, lat, lon)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_aux_lat
)
self.assertEqual(weights.shape, self.cube_aux_lat.shape)
self.assertArrayAlmostEqual(
weights[0, :, :], np.cos(np.deg2rad(self.lat2d))
)
def test_cosine_latitude_weights_2d_latitude_first(self):
# 2d latitude coordinate with latitude first (lat, time, lon)
order = [1, 0, 2] # (lat, time, lon)
self.cube_aux_lat.transpose(order)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_aux_lat
)
self.assertEqual(weights.shape, self.cube_aux_lat.shape)
self.assertArrayAlmostEqual(
weights[:, 0, :], np.cos(np.deg2rad(self.lat2d))
)
def test_cosine_latitude_weights_2d_latitude_last(self):
# 2d latitude coordinate with latitude last (time, lon, lat)
order = [0, 2, 1] # (time, lon, lat)
self.cube_aux_lat.transpose(order)
weights = iris.analysis.cartography.cosine_latitude_weights(
self.cube_aux_lat
)
self.assertEqual(weights.shape, self.cube_aux_lat.shape)
self.assertArrayAlmostEqual(
weights[0, :, :], np.cos(np.deg2rad(self.lat2d.T))
)
def test_cosine_latitude_weights_no_latitude(self):
# no coordinate identified as latitude
self.cube_dim_lat.remove_coord("grid_latitude")
with self.assertRaises(ValueError):
_ = iris.analysis.cartography.cosine_latitude_weights(
self.cube_dim_lat
)
def test_cosine_latitude_weights_multiple_latitude(self):
# two coordinates identified as latitude
with self.assertRaises(ValueError):
_ = iris.analysis.cartography.cosine_latitude_weights(self.cube)
class TestRollingWindow(tests.IrisTest):
def setUp(self):
# XXX Comes from test_aggregated_by
cube = iris.cube.Cube(
np.array([[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]]),
long_name="temperature",
units="kelvin",
)
cube.add_dim_coord(
iris.coords.DimCoord(
np.array([0, 5, 10], dtype=np.float64),
"latitude",
units="degrees",
),
0,
)
cube.add_dim_coord(
iris.coords.DimCoord(
np.array([0, 2, 4, 6], dtype=np.float64),
"longitude",
units="degrees",
),
1,
)
self.cube = cube
def test_non_mean_operator(self):
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MAX, window=2
)
expected_result = np.array(
[[10, 12, 18], [12, 14, 20], [18, 12, 10]], dtype=np.float64
)
self.assertArrayEqual(expected_result, res_cube.data)
def test_longitude_simple(self):
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=2
)
expected_result = np.array(
[[8.0, 11.0, 15.0], [10.0, 13.0, 17.0], [15.0, 11.0, 8.0]],
dtype=np.float64,
)
self.assertArrayEqual(expected_result, res_cube.data)
self.assertCML(
res_cube, ("analysis", "rolling_window", "simple_longitude.cml")
)
self.assertRaises(
ValueError,
self.cube.rolling_window,
"longitude",
iris.analysis.MEAN,
window=0,
)
def test_longitude_masked(self):
self.cube.data = ma.array(
self.cube.data,
mask=[
[True, True, True, True],
[True, False, True, True],
[False, False, False, False],
],
)
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=2
)
expected_result = np.ma.array(
[[-99.0, -99.0, -99.0], [12.0, 12.0, -99.0], [15.0, 11.0, 8.0]],
mask=[
[True, True, True],
[False, False, True],
[False, False, False],
],
dtype=np.float64,
)
self.assertMaskedArrayEqual(expected_result, res_cube.data)
def test_longitude_circular(self):
cube = self.cube
cube.coord("longitude").circular = True
self.assertRaises(
iris.exceptions.NotYetImplementedError,
self.cube.rolling_window,
"longitude",
iris.analysis.MEAN,
window=0,
)
def test_different_length_windows(self):
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=4
)
expected_result = np.array([[11.5], [13.5], [11.5]], dtype=np.float64)
self.assertArrayEqual(expected_result, res_cube.data)
self.assertCML(
res_cube, ("analysis", "rolling_window", "size_4_longitude.cml")
)
# Window too long:
self.assertRaises(
ValueError,
self.cube.rolling_window,
"longitude",
iris.analysis.MEAN,
window=6,
)
# Window too small:
self.assertRaises(
ValueError,
self.cube.rolling_window,
"longitude",
iris.analysis.MEAN,
window=0,
)
def test_bad_coordinate(self):
self.assertRaises(
KeyError,
self.cube.rolling_window,
"wibble",
iris.analysis.MEAN,
window=0,
)
def test_latitude_simple(self):
res_cube = self.cube.rolling_window(
"latitude", iris.analysis.MEAN, window=2
)
expected_result = np.array(
[[7.0, 11.0, 13.0, 19.0], [13.0, 12.0, 12.0, 13.0]],
dtype=np.float64,
)
self.assertArrayEqual(expected_result, res_cube.data)
self.assertCML(
res_cube, ("analysis", "rolling_window", "simple_latitude.cml")
)
def test_mean_with_weights_consistency(self):
# equal weights should be the same as the mean with no weights
wts = np.array([0.5, 0.5], dtype=np.float64)
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=2, weights=wts
)
expected_result = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=2
)
self.assertArrayEqual(expected_result.data, res_cube.data)
def test_mean_with_weights(self):
# rolling window mean with weights
wts = np.array([0.1, 0.6, 0.3], dtype=np.float64)
res_cube = self.cube.rolling_window(
"longitude", iris.analysis.MEAN, window=3, weights=wts
)
expected_result = np.array(
[[10.2, 13.6], [12.2, 15.6], [12.0, 9.0]], dtype=np.float64
)
# use almost equal to compare floats
self.assertArrayAlmostEqual(expected_result, res_cube.data)
if __name__ == "__main__":
tests.main()
| pp-mo/iris | lib/iris/tests/test_analysis.py | Python | lgpl-3.0 | 55,620 | [
"NetCDF"
] | 09e3034030d49d75df4f452be61514df974399223cc66e9b7e392c2cad05752d |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2022 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import sys
from typing import Callable
from functools import partial
import numpy as np
import qcelemental as qcel
from .exceptions import TestComparisonError, UpgradeHelper
__all__ = [
'compare', 'compare_integers', 'compare_strings', 'compare_values', 'compare_arrays', 'compare_recursive',
'compare_molrecs', 'compare_matrices', 'compare_dicts'
]
def _merge_psi4_qcel_apis(args, kwargs):
"""Outer shim to allow both Psi4-style and QCA-style testing interfaces through the same function.
Notes
-----
`kwargs` modified (and returned) in-place
"""
def process_digits(digits):
if digits >= 1:
return 10**-digits
return digits
if len(args) == 0:
kwargs['label'] = sys._getframe().f_back.f_back.f_code.co_name
elif len(args) == 1:
if isinstance(args[0], str):
kwargs['label'] = args[0]
else:
kwargs['atol'] = process_digits(args[0])
kwargs['label'] = sys._getframe().f_back.f_back.f_code.co_name
if 'verbose' in kwargs:
kwargs['quiet'] = (kwargs.pop('verbose') < 1)
elif len(args) == 2:
kwargs['atol'] = process_digits(args[0])
kwargs['label'] = args[1]
if 'verbose' in kwargs:
kwargs['quiet'] = (kwargs.pop('verbose') < 1)
else:
raise ValueError("""Not following either Psi4 or QCElemental API pattern for comparison.""")
def _psi4_compare_integers(expected, computed, label: str = None, *, verbose: int = 1,
return_handler: Callable = None):
"""Shim between Psi4-style and QCA-style testing interfaces for scalar ints, strings."""
# uncomment to ferret out old function name
#raise UpgradeHelper('qcdb.compare_integers', 'qcdb.compare', 'someday', ' Same API, just rename the function and convert `verbose` to `quiet`.')
if label is None:
label = sys._getframe().f_back.f_code.co_name
return qcel.testing.compare(expected,
computed,
label=label,
quiet=(verbose == 0),
return_message=False,
return_handler=return_handler)
def _mergedapis_compare_values(expected, computed, *args, **kwargs):
"""Outer shim to allow both Psi4-style and QCA-style testing interfaces through the same function."""
"""Shim between Psi4-style and QCA-style testing interfaces for scalar and array floats."""
_merge_psi4_qcel_apis(args, kwargs)
return qcel.testing.compare_values(expected, computed, **kwargs)
def _mergedapis_compare_recursive(expected, computed, *args, **kwargs):
if (len(args) > 0) and not isinstance(args[0], str):
raise UpgradeHelper(
'qcdb.compare_recursive', 'qcdb.compare_recursive', 1.4,
' Use the new `qcel.testing.compare_recursive` API, being sure to convert positional arg `digits` decimal places to keyword arg `atol` literal absolute tolerance.'
)
return qcel.testing.compare_recursive(expected, computed, *args, **kwargs)
def _mergedapis_compare_molrecs(expected, computed, *args, **kwargs):
if (len(args) > 0) and not isinstance(args[0], str):
raise UpgradeHelper(
'qcdb.compare_molrecs', 'qcdb.compare_molrecs', 1.4,
' Use the new `qcel.testing.compare_molrecs` API, being sure to convert positional arg `digits` decimal places to keyword arg `atol` literal absolute tolerance.'
)
_merge_psi4_qcel_apis(args, kwargs)
return qcel.testing.compare_molrecs(expected, computed, **kwargs)
def compare_matrices(expected, computed, *args, **kwargs):
raise UpgradeHelper(
'qcdb.compare_matrices', 'qcdb.compare_values', 1.4,
' Use the new qcel.testing.compare_values` API, being sure to convert `digits` decimal places to `atol` literal absolute tolerance.'
)
def compare_dicts(expected, computed, *args, **kwargs):
raise UpgradeHelper(
'qcdb.compare_dicts', 'qcdb.compare_recursive', 1.4,
' Use the new `qcel.testing.compare_recursive` API, being sure to convert `tol` decimal places to `atol` literal absolute tolerance.'
)
def _qcdb_true_raise_handler(passfail, label, message, return_message=False, quiet=False):
"""Handle comparison result by printing to screen and raising qcdb.TestComparisonError or returning True."""
width = 66
if passfail:
if not quiet:
print(f' {label:.<{width}}PASSED')
sys.stdout.flush()
else:
print(f' {label:.<{width}}FAILED')
sys.stdout.flush()
raise TestComparisonError(message)
return passfail
compare = partial(qcel.testing.compare, return_handler=_qcdb_true_raise_handler)
compare_integers = partial(_psi4_compare_integers, return_handler=_qcdb_true_raise_handler)
compare_strings = compare_integers
compare_values = partial(_mergedapis_compare_values, return_handler=_qcdb_true_raise_handler)
compare_arrays = compare_values
compare_recursive = partial(_mergedapis_compare_recursive, return_handler=_qcdb_true_raise_handler)
compare_molrecs = partial(_mergedapis_compare_molrecs, return_handler=_qcdb_true_raise_handler)
# Notes on testing fns migration
# PSI4
# ADDED def compare SINGLE
# MERGED-APIs def compare_integers(expected, computed, label, verbose=1): SINGLE
# MERGED-APIs def compare_strings(expected, computed, label, verbose=1): SINGLE
# MERGED-APIs def compare_values(expected, computed, digits, label, *, rtol=1.e-16, passnone=False, verbose=1): SINGLE
# MERGED-APIs def compare_arrays(expected, computed, digits, label, rtol=1.e-16, verbose=1): SINGLE
#
# ADDED-NEW-API def compare_recursive SINGLE
# ADDED-NEW-API def compare_molrecs SINGLE
#
# MERGED-APIs def compare_cubes(expected, computed, label, verbose=1): SINGLE
# MERGED-APIs def compare_vectors(expected, computed, digits, label, *, rtol=1.e-16, verbose=1): MULTI
# MERGED-APIs def compare_matrices(expected, computed, digits, label, *, rtol=1.e-16, verbose=1): MULTI
# MERGED-APIs def compare_wavefunctions(expected, computed, digits=9, label='Wavefunctions equal'): MULTI
# def compare_fcidumps(expected, computed, label): MULTI
# QCDB
# ADDED def compare SINGLE
# MERGED-APIs-TRIVIAL def compare_integers(expected, computed, label, verbose=1): SINGLE
# MERGED-APIs-TRIVIAL def compare_strings(expected, computed, label, verbose=1): SINGLE
# MERGED-APIs def compare_values(expected, computed, digits, label, passnone=False, verbose=1): SINGLE
# MERGED-APIs def compare_arrays(expected, computed, digits, label, verbose=1): SINGLE
#
# ADDED-NEW-API def compare_recursive SINGLE
# STOPCONVERT/NEW-API def compare_molrecs(expected, computed, tol, label, forgive=None, verbose=1, relative_geoms='exact' SINGLE
#
# STOPCONVERT/NEW-FN def compare_matrices(expected, computed, digits, label, verbose=1): ---
# STOPCONVERT/NEW-FN def compare_dicts(expected, computed, tol, label, forgive=None, verbose=1): ---
# vib.py:def compare_vibinfos(expected, computed, tol, label, verbose=1, forgive=None, required=None, toldict=None): SINGLE
| psi4/psi4 | psi4/driver/qcdb/testing.py | Python | lgpl-3.0 | 9,111 | [
"Psi4"
] | cc31e83dfab1e19fb815a13e0552714d130c1e8eada6a383ca78713e3d9e1b1d |
"""
sentry.conf.server
~~~~~~~~~~~~~~~~~~
These settings act as the default (base) settings for the Sentry-provided web-server
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.conf.global_settings import * # NOQA
from datetime import timedelta
import hashlib
import os
import os.path
import socket
import sys
import tempfile
import urlparse
gettext_noop = lambda s: s
socket.setdefaulttimeout(5)
DEBUG = False
TEMPLATE_DEBUG = True
MAINTENANCE = False
ADMINS = ()
INTERNAL_IPS = ('127.0.0.1',)
MANAGERS = ADMINS
APPEND_SLASH = True
PROJECT_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir))
NODE_MODULES_ROOT = os.path.join(PROJECT_ROOT, os.pardir, os.pardir, 'node_modules')
sys.path.insert(0, os.path.normpath(os.path.join(PROJECT_ROOT, os.pardir)))
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'sentry.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'AUTOCOMMIT': True,
'ATOMIC_REQUESTS': False,
}
}
if 'DATABASE_URL' in os.environ:
url = urlparse.urlparse(os.environ['DATABASE_URL'])
# Ensure default database exists.
DATABASES['default'] = DATABASES.get('default', {})
# Update with environment configuration.
DATABASES['default'].update({
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
})
if url.scheme == 'postgres':
DATABASES['default']['ENGINE'] = 'sentry.db.postgres'
if url.scheme == 'mysql':
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
EMAIL_SUBJECT_PREFIX = '[Sentry] '
# This should always be UTC.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('af', gettext_noop('Afrikaans')),
('ar', gettext_noop('Arabic')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('be', gettext_noop('Belarusian')),
('bn', gettext_noop('Bengali')),
('br', gettext_noop('Breton')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('ga', gettext_noop('Irish')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hu', gettext_noop('Hungarian')),
('ia', gettext_noop('Interlingua')),
('id', gettext_noop('Indonesian')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lb', gettext_noop('Luxembourgish')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('my', gettext_noop('Burmese')),
('nb', gettext_noop('Norwegian Bokmal')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('os', gettext_noop('Ossetic')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sv-se', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('udm', gettext_noop('Udmurt')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-cn', gettext_noop('Traditional Chinese')),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
USE_TZ = True
# Make this unique, and don't share it with anybody.
SECRET_KEY = hashlib.md5(socket.gethostname() + ')*)&8a36)6%74e@-ne5(-!8a(vv#tkv)(eyg&@0=zd^pl!7=y@').hexdigest()
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'sentry.middleware.maintenance.ServicesUnavailableMiddleware',
'sentry.middleware.env.SentryEnvMiddleware',
'sentry.middleware.proxy.SetRemoteAddrFromForwardedFor',
'sentry.middleware.debug.NoIfModifiedSinceMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'sentry.middleware.auth.AuthenticationMiddleware',
'sentry.middleware.sudo.SudoMiddleware',
'sentry.middleware.locale.SentryLocaleMiddleware',
'sentry.middleware.social_auth.SentrySocialAuthExceptionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'sentry.debug.middleware.DebugMiddleware',
)
ROOT_URLCONF = 'sentry.conf.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.csrf',
'social_auth.context_processors.social_auth_by_name_backends',
'social_auth.context_processors.social_auth_backends',
'social_auth.context_processors.social_auth_by_type_backends',
'social_auth.context_processors.social_auth_login_redirect'
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'captcha',
'crispy_forms',
'debug_toolbar',
'gunicorn',
'kombu.transport.django',
'raven.contrib.django.raven_compat',
'rest_framework',
'sentry',
'sentry.nodestore',
'sentry.search',
'sentry.lang.javascript',
'sentry.plugins.sentry_interface_types',
'sentry.plugins.sentry_mail',
'sentry.plugins.sentry_urls',
'sentry.plugins.sentry_useragents',
'sentry.plugins.sentry_webhooks',
'social_auth',
'south',
'sudo',
)
STATIC_ROOT = os.path.realpath(os.path.join(PROJECT_ROOT, 'static'))
STATIC_URL = '/_static/'
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
)
# setup a default media root to somewhere useless
MEDIA_ROOT = '/tmp/sentry-media'
LOCALE_PATHS = (
os.path.join(PROJECT_ROOT, 'locale'),
)
CSRF_FAILURE_VIEW = 'sentry.web.frontend.csrf_failure.view'
CSRF_COOKIE_NAME = 'csrf'
# Auth configuration
try:
from django.core.urlresolvers import reverse_lazy
except ImportError:
LOGIN_REDIRECT_URL = '/login-redirect/'
LOGIN_URL = '/auth/login/'
else:
LOGIN_REDIRECT_URL = reverse_lazy('sentry-login-redirect')
LOGIN_URL = reverse_lazy('sentry-login')
AUTHENTICATION_BACKENDS = (
'social_auth.backends.twitter.TwitterBackend',
'social_auth.backends.facebook.FacebookBackend',
# TODO: migrate to GoogleOAuth2Backend
'social_auth.backends.google.GoogleBackend',
'social_auth.backends.contrib.github.GithubBackend',
'social_auth.backends.contrib.bitbucket.BitbucketBackend',
'social_auth.backends.contrib.trello.TrelloBackend',
'sentry.utils.auth.EmailAuthBackend',
)
SOCIAL_AUTH_USER_MODEL = AUTH_USER_MODEL = 'sentry.User'
SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies"
SESSION_COOKIE_NAME = "sentrysid"
SESSION_SERIALIZER = "django.contrib.sessions.serializers.PickleSerializer"
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET = ''
FACEBOOK_APP_ID = ''
FACEBOOK_API_SECRET = ''
FACEBOOK_EXTENDED_PERMISSIONS = ['email']
GOOGLE_OAUTH2_CLIENT_ID = ''
GOOGLE_OAUTH2_CLIENT_SECRET = ''
GITHUB_APP_ID = ''
GITHUB_API_SECRET = ''
TRELLO_API_KEY = ''
TRELLO_API_SECRET = ''
BITBUCKET_CONSUMER_KEY = ''
BITBUCKET_CONSUMER_SECRET = ''
MAILGUN_API_KEY = ''
SOCIAL_AUTH_PIPELINE = (
'social_auth.backends.pipeline.user.get_username',
'social_auth.backends.pipeline.social.social_auth_user',
'social_auth.backends.pipeline.associate.associate_by_email',
'social_auth.backends.pipeline.misc.save_status_to_session',
'social_auth.backends.pipeline.social.associate_user',
'social_auth.backends.pipeline.social.load_extra_data',
'social_auth.backends.pipeline.user.update_user_details',
'social_auth.backends.pipeline.misc.save_status_to_session',
)
INITIAL_CUSTOM_USER_MIGRATION = '0108_fix_user'
# Auth engines and the settings required for them to be listed
AUTH_PROVIDERS = {
'github': ('GITHUB_APP_ID', 'GITHUB_API_SECRET'),
'trello': ('TRELLO_API_KEY', 'TRELLO_API_SECRET'),
'bitbucket': ('BITBUCKET_CONSUMER_KEY', 'BITBUCKET_CONSUMER_SECRET'),
}
import random
SOCIAL_AUTH_DEFAULT_USERNAME = lambda: random.choice(['Darth Vader', 'Obi-Wan Kenobi', 'R2-D2', 'C-3PO', 'Yoda'])
SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email']
# Queue configuration
from kombu import Exchange, Queue
BROKER_URL = "django://"
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_IGNORE_RESULT = True
CELERY_SEND_EVENTS = False
CELERY_RESULT_BACKEND = None
CELERY_TASK_RESULT_EXPIRES = 1
CELERY_DISABLE_RATE_LIMITS = True
CELERY_DEFAULT_QUEUE = "default"
CELERY_DEFAULT_EXCHANGE = "default"
CELERY_DEFAULT_EXCHANGE_TYPE = "direct"
CELERY_DEFAULT_ROUTING_KEY = "default"
CELERY_CREATE_MISSING_QUEUES = True
CELERY_IMPORTS = (
'sentry.tasks.beacon',
'sentry.tasks.check_auth',
'sentry.tasks.deletion',
'sentry.tasks.email',
'sentry.tasks.index',
'sentry.tasks.merge',
'sentry.tasks.store',
'sentry.tasks.options',
'sentry.tasks.ping',
'sentry.tasks.post_process',
'sentry.tasks.process_buffer',
)
CELERY_QUEUES = [
Queue('default', routing_key='default'),
Queue('alerts', routing_key='alerts'),
Queue('auth', routing_key='auth'),
Queue('cleanup', routing_key='cleanup'),
Queue('sourcemaps', routing_key='sourcemaps'),
Queue('search', routing_key='search'),
Queue('events', routing_key='events'),
Queue('update', routing_key='update'),
Queue('email', routing_key='email'),
Queue('options', routing_key='options'),
]
CELERY_ROUTES = ('sentry.queue.routers.SplitQueueRouter',)
def create_partitioned_queues(name):
exchange = Exchange(name, type='direct')
for num in range(1):
CELERY_QUEUES.append(Queue(
'{0}-{1}'.format(name, num),
exchange=exchange,
))
create_partitioned_queues('counters')
create_partitioned_queues('triggers')
CELERYBEAT_SCHEDULE_FILENAME = os.path.join(tempfile.gettempdir(), 'sentry-celerybeat')
CELERYBEAT_SCHEDULE = {
'check-auth': {
'task': 'sentry.tasks.check_auth',
'schedule': timedelta(minutes=1),
'options': {
'expires': 60,
'queue': 'auth',
}
},
'send-beacon': {
'task': 'sentry.tasks.send_beacon',
'schedule': timedelta(hours=1),
'options': {
'expires': 3600,
},
},
'send-ping': {
'task': 'sentry.tasks.send_ping',
'schedule': timedelta(minutes=1),
'options': {
'expires': 60,
},
},
'flush-buffers': {
'task': 'sentry.tasks.process_buffer.process_pending',
'schedule': timedelta(seconds=10),
'options': {
'expires': 10,
'queue': 'counters-0',
}
},
'sync-options': {
'task': 'sentry.tasks.options.sync_options',
'schedule': timedelta(seconds=10),
'options': {
'expires': 10,
'queue': 'options',
}
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'handlers': {
'console': {
'level': 'WARNING',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
'sentry': {
'level': 'ERROR',
'filters': ['sentry:internal'],
'class': 'raven.contrib.django.handlers.SentryHandler',
},
'console:api': {
'level': 'WARNING',
'class': 'logging.StreamHandler',
'formatter': 'client_info',
},
},
'filters': {
'sentry:internal': {
'()': 'sentry.utils.raven.SentryInternalFilter',
},
},
'formatters': {
'simple': {
'format': '[%(levelname)s] %(message)s',
},
'client_info': {
'format': '[%(levelname)s] [%(project)s] [%(agent)s] %(message)s',
},
},
'root': {
'handlers': ['console', 'sentry'],
},
'loggers': {
'sentry': {
'level': 'ERROR',
},
'sentry.api': {
'handlers': ['console:api', 'sentry'],
'propagate': False,
},
'sentry.errors': {
'handlers': ['console'],
'propagate': False,
},
'sentry.rules': {
'handlers': ['console'],
'propagate': False,
},
'static_compiler': {
'level': 'INFO',
},
'django.request': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'toronado.cssutils': {
'level': 'ERROR',
'propagate': False,
},
}
}
# django-rest-framework
REST_FRAMEWORK = {
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'DEFAULT_PERMISSION_CLASSES': (
'sentry.api.permissions.NoPermission',
)
}
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# django-recaptcha
RECAPTCHA_PUBLIC_KEY = None
RECAPTCHA_PRIVATE_KEY = None
NOCAPTCHA = True
CAPTCHA_WIDGET_TEMPLATE = "sentry/partial/form_captcha.html"
# Debugger
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.timer.TimerPanel',
'sentry.debug.panels.route.RoutePanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.sql.SQLPanel',
# TODO(dcramer): https://github.com/getsentry/sentry/issues/1722
# 'sentry.debug.panels.redis.RedisPanel',
)
DEBUG_TOOLBAR_PATCH_SETTINGS = False
# Sentry and Raven configuration
SENTRY_CLIENT = 'sentry.utils.raven.SentryInternalClient'
SENTRY_CACHE_BACKEND = 'default'
SENTRY_FEATURES = {
'auth:register': True,
'organizations:create': True,
'organizations:sso': False,
'projects:quotas': True,
'projects:user-reports': True,
}
# Default time zone for localization in the UI.
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
SENTRY_DEFAULT_TIME_ZONE = 'UTC'
# Enable the Sentry Debugger (Beta)
SENTRY_DEBUGGER = False
SENTRY_IGNORE_EXCEPTIONS = (
'OperationalError',
)
# Absolute URL to the sentry root directory. Should not include a trailing slash.
SENTRY_URL_PREFIX = ''
# Should we send the beacon to the upstream server?
SENTRY_BEACON = True
# The administrative contact for this installation
SENTRY_ADMIN_EMAIL = ''
# Allow access to Sentry without authentication.
SENTRY_PUBLIC = False
# Instruct Sentry that this install intends to be run by a single organization
# and thus various UI optimizations should be enabled.
SENTRY_SINGLE_ORGANIZATION = False
# Login url (defaults to LOGIN_URL)
SENTRY_LOGIN_URL = None
# Default project ID (for internal errors)
SENTRY_PROJECT = 1
# Project ID for recording frontend (javascript) exceptions
SENTRY_FRONTEND_PROJECT = None
# Only store a portion of all messages per unique group.
SENTRY_SAMPLE_DATA = True
# The following values control the sampling rates
SENTRY_SAMPLE_RATES = (
# up until N events, store 1 in M
(50, 1),
(1000, 2),
(10000, 10),
(100000, 50),
(1000000, 300),
(10000000, 2000),
)
SENTRY_MAX_SAMPLE_RATE = 10000
SENTRY_SAMPLE_TIMES = (
(3600, 1),
(360, 10),
(60, 60),
)
SENTRY_MAX_SAMPLE_TIME = 10000
# Web Service
SENTRY_WEB_HOST = 'localhost'
SENTRY_WEB_PORT = 9000
SENTRY_WEB_OPTIONS = {}
# UDP Service
SENTRY_UDP_HOST = 'localhost'
SENTRY_UDP_PORT = 9001
SENTRY_USE_IPV6_UDP = False
# SMTP Service
SENTRY_ENABLE_EMAIL_REPLIES = False
SENTRY_SMTP_HOSTNAME = 'localhost'
SENTRY_SMTP_HOST = 'localhost'
SENTRY_SMTP_PORT = 1025
SENTRY_INTERFACES = {
'exception': 'sentry.interfaces.exception.Exception',
'logentry': 'sentry.interfaces.message.Message',
'request': 'sentry.interfaces.http.Http',
'stacktrace': 'sentry.interfaces.stacktrace.Stacktrace',
'template': 'sentry.interfaces.template.Template',
'query': 'sentry.interfaces.query.Query',
'user': 'sentry.interfaces.user.User',
'sentry.interfaces.Exception': 'sentry.interfaces.exception.Exception',
'sentry.interfaces.Message': 'sentry.interfaces.message.Message',
'sentry.interfaces.Stacktrace': 'sentry.interfaces.stacktrace.Stacktrace',
'sentry.interfaces.Template': 'sentry.interfaces.template.Template',
'sentry.interfaces.Query': 'sentry.interfaces.query.Query',
'sentry.interfaces.Http': 'sentry.interfaces.http.Http',
'sentry.interfaces.User': 'sentry.interfaces.user.User',
}
# Should users without superuser permissions be allowed to
# make projects public
SENTRY_ALLOW_PUBLIC_PROJECTS = True
# Can users be invited to organizations?
SENTRY_ENABLE_INVITES = True
# Default to not sending the Access-Control-Allow-Origin header on api/store
SENTRY_ALLOW_ORIGIN = None
# Enable scraping of javascript context for source code
SENTRY_SCRAPE_JAVASCRIPT_CONTEXT = True
# Redis connection information (see Nydus documentation)
SENTRY_REDIS_OPTIONS = {}
# Buffer backend
SENTRY_BUFFER = 'sentry.buffer.Buffer'
SENTRY_BUFFER_OPTIONS = {}
# Cache backend
# XXX: We explicitly require the cache to be configured as its not optional
# and causes serious confusion with the default django cache
SENTRY_CACHE = None
SENTRY_CACHE_OPTIONS = {}
# Quota backend
SENTRY_QUOTAS = 'sentry.quotas.Quota'
SENTRY_QUOTA_OPTIONS = {}
# Rate limiting backend
SENTRY_RATELIMITER = 'sentry.ratelimits.base.RateLimiter'
SENTRY_RATELIMITER_OPTIONS = {}
# The default value for project-level quotas
SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE = '90%'
# The maximum number of events per minute the system should accept.
SENTRY_SYSTEM_MAX_EVENTS_PER_MINUTE = 0
# Node storage backend
SENTRY_NODESTORE = 'sentry.nodestore.django.DjangoNodeStorage'
SENTRY_NODESTORE_OPTIONS = {}
# Search backend
SENTRY_SEARCH = 'sentry.search.django.DjangoSearchBackend'
SENTRY_SEARCH_OPTIONS = {}
# SENTRY_SEARCH_OPTIONS = {
# 'urls': ['http://localhost:9200/'],
# 'timeout': 5,
# }
# Time-series storage backend
SENTRY_TSDB = 'sentry.tsdb.dummy.DummyTSDB'
SENTRY_TSDB_OPTIONS = {}
# rollups must be ordered from highest granularity to lowest
SENTRY_TSDB_ROLLUPS = (
# (time in seconds, samples to keep)
(10, 360), # 60 minutes at 10 seconds
(3600, 24 * 7), # 7 days at 1 hour
(3600 * 24, 60), # 60 days at 1 day
)
# File storage
SENTRY_FILESTORE = 'django.core.files.storage.FileSystemStorage'
SENTRY_FILESTORE_OPTIONS = {'location': '/tmp/sentry-files'}
# Internal metrics
SENTRY_METRICS_BACKEND = 'sentry.metrics.dummy.DummyMetricsBackend'
SENTRY_METRICS_OPTIONS = {}
SENTRY_METRICS_SAMPLE_RATE = 1.0
SENTRY_METRICS_PREFIX = ''
# URL to embed in js documentation
SENTRY_RAVEN_JS_URL = 'cdn.ravenjs.com/1.1.20/jquery,native/raven.min.js'
# URI Prefixes for generating DSN URLs
# (Defaults to URL_PREFIX by default)
SENTRY_ENDPOINT = None
SENTRY_PUBLIC_ENDPOINT = None
# Early draft features. Not slated or public release yet.
SENTRY_ENABLE_EXPLORE_CODE = False
# Prevent variables (e.g. context locals, http data, etc) from exceeding this
# size in characters
SENTRY_MAX_VARIABLE_SIZE = 512
# Prevent variables within extra context from exceeding this size in
# characters
SENTRY_MAX_EXTRA_VARIABLE_SIZE = 4096
# For changing the amount of data seen in Http Response Body part.
SENTRY_MAX_HTTP_BODY_SIZE = 4096 * 4 # 16kb
# For various attributes we don't limit the entire attribute on size, but the
# individual item. In those cases we also want to limit the maximum number of
# keys
SENTRY_MAX_DICTIONARY_ITEMS = 50
SENTRY_MAX_MESSAGE_LENGTH = 1024 * 8
SENTRY_MAX_STACKTRACE_FRAMES = 25
SENTRY_MAX_EXCEPTIONS = 25
# Gravatar service base url
SENTRY_GRAVATAR_BASE_URL = 'https://secure.gravatar.com'
# Timeout (in seconds) for fetching remote source files (e.g. JS)
SENTRY_SOURCE_FETCH_TIMEOUT = 5
# http://en.wikipedia.org/wiki/Reserved_IP_addresses
SENTRY_DISALLOWED_IPS = (
'0.0.0.0/8',
'10.0.0.0/8',
'100.64.0.0/10',
'127.0.0.0/8',
'169.254.0.0/16',
'172.16.0.0/12',
'192.0.0.0/29',
'192.0.2.0/24',
'192.88.99.0/24',
'192.168.0.0/16',
'198.18.0.0/15',
'198.51.100.0/24',
'224.0.0.0/4',
'240.0.0.0/4',
'255.255.255.255/32',
)
# Fields which managed users cannot change via Sentry UI. Username and password
# cannot be changed by managed users. Optionally include 'email' and
# 'first_name' in SENTRY_MANAGED_USER_FIELDS.
SENTRY_MANAGED_USER_FIELDS = ('email',)
# See sentry/options/__init__.py for more information
SENTRY_OPTIONS = {}
# You should not change this setting after your database has been created
# unless you have altered all schemas first
SENTRY_USE_BIG_INTS = False
# Delay (in ms) to induce on API responses
SENTRY_API_RESPONSE_DELAY = 0
# Watchers for various application purposes (such as compiling static media)
SENTRY_WATCHERS = (
[os.path.join(NODE_MODULES_ROOT, '.bin', 'gulp'), 'watch:css'],
[os.path.join(NODE_MODULES_ROOT, '.bin', 'webpack'), '-d', '--watch',
"--config={}".format(os.path.join(PROJECT_ROOT, os.pardir, os.pardir, "webpack.config.js"))],
)
| wong2/sentry | src/sentry/conf/server.py | Python | bsd-3-clause | 23,387 | [
"GULP"
] | a58770d38c4fe0f4d1ad23e80dd1c4b6b167907881a7c4881c0f63582b8d0a28 |
#
# Copyright 2015-2016, 2020 James Kermode (Warwick U.)
# 2019 James Brixey (Warwick U.)
# 2016 Henry Lambert (King's College London)
# 2015 Lars Pastewka (U. Freiburg)
#
# matscipy - Materials science with Python at the atomic-scale
# https://github.com/libAtoms/matscipy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import shutil
import subprocess
import socket
import socketserver
from io import StringIO
import time
import threading
from queue import Queue
import numpy as np
from matscipy.elasticity import full_3x3_to_Voigt_6_stress
from matscipy.logger import quiet, screen
from ase.atoms import Atoms
from ase.io.extxyz import read_xyz, write_xyz
from ase.io.vasp import write_vasp
from ase.io.castep import write_castep_cell, write_param
from ase.calculators.calculator import Calculator
from ase.calculators.vasp import Vasp
from ase.calculators.castep import Castep
MSG_LEN_SIZE = 8
MSG_END_MARKER = b'done.\n'
MSG_END_MARKER_SIZE = len(MSG_END_MARKER)
MSG_INT_SIZE = 6
MSG_FLOAT_SIZE = 25
MSG_FLOAT_FORMAT = '%25.16f'
MSG_INT_FORMAT = '%6d'
ATOMS_REQUESTS = {ord('A'): 'REFTRAJ', ord('X'): 'XYZ'}
RESULTS_REQUESTS = {ord('R'): 'REFTRAJ', ord('Y'): 'XYZ'}
ZERO_ATOMS_DATA = {'REFTRAJ': b' 242 0\n 0\n 0.0000000000000000 0.0000000000000000 0.0000000000000000\n 0.0000000000000000 0.0000000000000000 0.0000000000000000\n 0.0000000000000000 0.0000000000000000 0.0000000000000000\n',
'XYZ': b' 2500\nlabel=0 cutoff_factor=1.20000000 nneightol=1.20000000 Lattice="0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000" Properties=species:S:1:pos:R:3:Z:I:1\n'}
CLIENT_TIMEOUT = 60
MAX_POS_DIFF = 1.0 # angstrom
MAX_CELL_DIFF = 1e-3 # angstrom
MAX_POS_DIFF_CASTEP = 1.0 # angstrom
MAX_CELL_DIFF_CASTEP = 1.0 # angstrom
def pack_atoms_to_reftraj_str(at, label):
data = ''
data += MSG_INT_FORMAT % label + '\n'
data += MSG_INT_FORMAT % len(at) + '\n'
for i in range(3):
data += (3*MSG_FLOAT_FORMAT) % tuple(at.cell[:, i]) + '\n'
s = at.get_scaled_positions()
for i in range(len(at)):
data += (3*MSG_FLOAT_FORMAT) % tuple(s[i, :]) + '\n'
# preceed message by its length
data_length = ('%8d' % len(data)).encode('ascii')
data = data_length + data.encode('ascii')
return data
def pack_atoms_to_xyz_str(at, label):
at.info['label'] = label
buffer = StringIO.StringIO()
write_xyz(buffer, at)
data = str(buffer)
buffer.close()
# preceed message by its length
data_length = ('%8d' % len(data)).encode('ascii')
data = data_length + data.encode('ascii')
return data
def unpack_reftraj_str_to_atoms(data):
lines = data.split(b'\n')
label = int(lines[0])
n_atoms = int(lines[1])
at = Atoms(symbols=[' ']*n_atoms, cell=np.eye(3))
at.info['label'] = label
for i in range(3):
at.cell[:, i] = [float(x) for x in lines[i].split()]
for i, line in enumerate(lines[4:]):
t = [float(x) for x in line.split()]
at.positions[i, :] = np.dot(t, at.cell)
return at
def pack_results_to_reftraj_output_str(at):
data = ''
data += MSG_INT_FORMAT % len(at) + '\n'
data += MSG_FLOAT_FORMAT % at.energy + '\n'
force = at.get_array('force')
virial = at.info['virial']
for i in at.indices:
data += (3*MSG_FLOAT_FORMAT) % tuple(force[i, :]) + '\n'
# NB: not in Voigt order (xx, yy, zz, yz, xz, xy)
data += (6*MSG_FLOAT_FORMAT) % (virial[0,0], virial[1,1], virial[2,2],
virial[0,1], virial[1,2], virial[0,2])
# preceed message by its length
data_length = ('%8s' % len(data)).encode('ascii')
data = data_length + data
return data
def unpack_reftraj_output_str_to_results(data):
lines = data.strip().split(b'\n')
label = int(lines[0])
natoms = int(lines[1])
energy = float(lines[2])
force = np.zeros((natoms,3))
for i, line in enumerate(lines[3:-1]):
force[i, :] = [float(f) for f in line.split()]
v6 = [float(v) for v in lines[-1].split()]
virial = np.zeros((3,3))
# NB: not in Voigt order (xx, yy, zz, yz, xz, xy)
virial[0,0], virial[1,1], virial[2,2], virial[0,1], virial[1,2], virial[0,2] = v6
virial[1,0] = virial[0,1]
virial[2,1] = virial[1,2]
virial[2,0] = virial[0,2]
return (label, (natoms, energy, force, virial))
def unpack_xyz_str_to_results(data):
buffer = StringIO.StringIO(data)
at = read_xyz(buffer)
buffer.close()
label = at.info['label']
return (label, at)
class AtomsRequestHandler(socketserver.StreamRequestHandler):
def handle(self):
ip, port = self.client_address
task = None
# receive request code and client ID
request_str = self.rfile.read(MSG_LEN_SIZE)
request = request_str[0]
client_id = int(request_str[1:])
if client_id > self.server.njobs-1:
raise RuntimeError('Unknown client ID %d outside of range 0 < ID < %d' %
(client_id, self.server.njobs-1))
self.server.logger.pr('"%s" request from %s:%d client %d' % (chr(request), ip, port, client_id))
#print 'input queue lengths ', ''.join(['%d:%d ' % (i,q.qsize()) for (i,q) in enumerate(input_qs)])
#print 'output queue length %d' % output_q.qsize()
if request in ATOMS_REQUESTS:
# client is ready for Atoms (in either REFTRAJ or XYZ format)
data, fmt, label, at = self.server.input_qs[client_id].get()
assert ATOMS_REQUESTS[request] == fmt
if data == b'shutdown' or data == b'restart':
task = data
data = ZERO_ATOMS_DATA[fmt]
self.wfile.write(data)
elif request in RESULTS_REQUESTS:
# results are available from client in REFTRAJ or XYZ format
data_size = int(self.rfile.read(MSG_LEN_SIZE))
data = self.rfile.read(data_size)
fmt = RESULTS_REQUESTS[request]
self.server.output_q.put((client_id, fmt, data))
self.server.input_qs[client_id].task_done()
else:
raise RuntimeError('Unknown request code "%s"' % request)
# say goodbye to this client
self.wfile.write(MSG_END_MARKER)
if (request == ord('A') or request == ord('X')) and task == b'restart':
# if we're restarting a client, get the next thing out of the queue
# and re-initialise. Restart won't do anything until shutdown
# of old client has completed.
data, fmt, label, at = self.server.input_qs[client_id].get()
self.server.logger.pr('"%s" request from client %d triggering restart for calculation with label %d' %
(request, client_id, label))
self.server.clients[client_id].start_or_restart(at, label, restart=True)
class AtomsServerSync(socketserver.TCPServer):
allow_reuse_address = True
def __init__(self, server_address, RequestHandlerClass, clients,
bind_and_activate=True, max_attempts=3, bgq=False, logger=screen):
self.njobs = len(clients)
# allow up to twice as many threads as sub-block jobs
self.request_queue_size = 2*self.njobs
self.max_attempts = max_attempts
self.bgq = bgq # If True, we're running on IBM Blue Gene/Q platform
self.logger = logger
socketserver.TCPServer.__init__(self,
server_address,
RequestHandlerClass,
bind_and_activate)
self.clients = clients
for client in self.clients:
client.server = self # FIXME circular reference
# record all input in the order in which it is put()
self.input_q = Queue()
# we also need an input Queue for each client: this is so that we can
# exploit wavefunction reuse by sending consecutive clusters belonging
# to the same atom to the same QM partition
self.input_qs = [Queue() for i in range(self.njobs) ]
self.output_q = Queue()
def server_activate(self):
socketserver.TCPServer.server_activate(self)
self.ip, self.port = self.server_address
if self.bgq:
# If we're on a Blue Gene, note that IP address returned
# by server.server_address is not the correct one for CNs
# to talk to FEN, so we discard it, and use the InfiniBand
# address returned by get_hostname_ip()
import bgqtools
hostname, self.ip = bgqtools.get_hostname_ip()
else:
hostname = socket.gethostname()
self.logger.pr('AtomsServer running on %s %s:%d with njobs=%d' %
(hostname, self.ip, self.port, self.njobs))
def shutdown_clients(self):
self.logger.pr('shutting down all clients')
wait_threads = []
for client_id, client in enumerate(self.clients):
if (client.process is not None and client.process.poll() is None and
(client.wait_thread is None or not client.wait_thread.isAlive())):
wait_threads.append(client.shutdown(block=False))
self.handle_request() # dispatch the shutdown request via socket
# wait for them all to finish shutting down
for wait_thread in wait_threads:
if wait_thread is None or not wait_thread.isAlive():
continue
wait_thread.join()
self.logger.pr('all client shutdowns complete')
def shutdown(self):
self.shutdown_clients()
self.server_close()
def put(self, at, client_id, label, force_restart=False):
self.logger.pr('Putting Atoms to client %d label %d' % (client_id, label))
# allow client to modify atoms (e.g. sort them)
at, fmt, first_time = self.clients[client_id].preprocess(at, label, force_restart)
# store what we actually did- `at` may have been modified by preprocess()
self.input_q.put((label, client_id, at))
if fmt == 'REFTRAJ':
data = pack_atoms_to_reftraj_str(at, label)
elif fmt == 'XYZ':
data = pack_atoms_to_xyz_str(at, label)
else:
raise ValueError('Unknown format "%s"' % fmt)
self.input_qs[client_id].put((data, fmt, label, at))
if first_time:
# throw away what we just put(), as it's in the input files.
# note that we don't call task_done() until results come in
discard = self.input_qs[client_id].get()
def join_all(self):
self.logger.pr('AtomsServer waiting for input queues to empty')
for input_q in self.input_qs:
input_q.join()
self.logger.pr('all AtomsServer queues drained.')
def get_results(self):
self.logger.pr('AtomsServer getting results')
results = {}
for attempt in range(self.max_attempts):
rejects = []
self.join_all()
self.logger.pr('AtomsServer.get_results() attempt %d of %d jobs finished' %
(attempt+1, self.max_attempts))
while self.output_q.unfinished_tasks:
client_id, fmt, data = self.output_q.get()
if fmt == 'REFTRAJ':
label, res = unpack_reftraj_output_str_to_results(data)
elif fmt == 'XYZ':
label, res = unpack_xyz_str_to_results(data)
else:
raise ValueError('get_results() got unknown format "%s"' % fmt)
if label > 0: # WARNING: labels must start from 1, or first calc never passes test
# calculation converged, save the results
self.logger.pr('calculation label %d client %d CONVERGED' % (label, client_id))
results[label] = res
else:
# calculation did not converge, we need to repeat it
self.logger.pr('calculation label %d client %d DID NOT CONVERGE' % (label, client_id))
rejects.append(-label)
self.output_q.task_done()
self.logger.pr('AtomsServer.get_results() rejects=%r' % rejects)
self.logger.pr('AtomsServer.get_results() sorted(results.keys())=%r' % sorted(results.keys()))
# collect all input task so we can see if anything is missing
input = {}
while self.input_q.unfinished_tasks:
label, client_id, at = self.input_q.get()
input[label] = (client_id, at)
self.input_q.task_done()
self.logger.pr('AtomsServer.get_results() sorted(input.keys())=%r' % sorted(input.keys()))
# resubmit any failed calculations
for label in rejects:
client_id, at = input[label]
self.logger.pr('Resubmiting calculation label %d client_id %d' % (label, client_id))
self.put(at, client_id, label, force_restart=True)
assert len(results) + len(rejects) == len(input)
# if all calculations converged we are done
if len(rejects) == 0:
break
else:
raise RuntimeError('max_attempts (%d) exceeded without all calculations completing successfully' %
self.max_attempts)
assert(len(results) == len(input))
assert(len(rejects) == 0)
results_atoms = []
for (inp_label, label) in zip(sorted(input.keys()), sorted(results.keys())):
assert inp_label == label
client_id, inp_at = input[inp_label]
res = results[label]
if isinstance(res, Atoms):
at = res
else:
(natoms, energy, force, virial) = res
assert len(inp_at) == natoms
at = inp_at.copy() # FIXME could possibly store results inplace, but need to think about sorting
at.info['label'] = label
at.info['energy'] = energy
at.set_array('force', force)
at.info['virial'] = virial
# allow client to modify results (e.g. reverse sort order)
at = self.clients[client_id].postprocess(at, label)
results_atoms.append(at)
self.logger.pr('AtomsServer processed %d results' % len(results))
return results_atoms
class AtomsServerAsync(AtomsServerSync, socketserver.ThreadingMixIn):
"""
Asynchronous (threaded) version of AtomsServer
"""
def shutdown(self):
self.shutdown_clients()
return socketserver.TCPServer.shutdown(self)
def shutdown_clients(self):
self.logger.pr('shutting down all clients')
wait_threads = []
for client_id, client in enumerate(self.clients):
if (client.process is not None and client.process.poll() is None and
(client.wait_thread is None or not client.wait_thread.isAlive())):
wait_threads.append(client.shutdown(block=False))
# wait for them all to finish shutting down
for wait_thread in wait_threads:
if wait_thread is None or not wait_thread.isAlive():
continue
wait_thread.join()
self.logger.pr('all client shutdowns complete')
AtomsServer = AtomsServerAsync # backwards compatibility
class Client(object):
"""
Represents a single Client job
Used by AtomsServer to start, restart and shutdown clients
running on the Compute Nodes.
"""
def __init__(self, client_id, exe, env=None, npj=1, ppn=1,
block=None, corner=None, shape=None,
jobname='socketcalc', rundir=None,
fmt='REFTRAJ', parmode=None, mpirun='mpirun',
mpirun_args=['-np'], logger=screen,
max_pos_diff=MAX_POS_DIFF,
max_cell_diff=MAX_CELL_DIFF):
self.client_id = client_id
self.process = None # handle for the runjob process
self.log = None # stdout file
self.wait_thread = None # used by shutdown(block=False)
self.last_atoms = None # used to check if we can continue from previous task
self.lock = threading.Lock() # avoid concurrancy issues
if env is None:
env = {}
self.env = env # environment
self.exe = exe # executable
self.npj = npj # nodes per job
self.ppn = ppn # processes per node
self.block, self.corner, self.shape = block, corner, shape
self.jobname = jobname
self.fmt = fmt
self.parmode = parmode
self.mpirun = mpirun
self.mpirun_args = mpirun_args
self.logger = logger
self.max_pos_diff = max_pos_diff
self.max_cell_diff = max_cell_diff
self.rundir = rundir or os.getcwd()
self.subdir = os.path.join(self.rundir, '%s-%03d' % (jobname, self.client_id))
if not os.path.exists(self.subdir):
self.logger.pr('Making subdir %s' % self.subdir)
os.mkdir(self.subdir)
def extra_args(self, label=None):
"""
Return list of additional command line arguments to be passed to client
"""
args = [self.server.ip, str(self.server.port), str(self.client_id)]
if label is not None:
args.append(str(label))
return args
def start(self, label=None):
"""
Start an individual client.
Raises RuntimeError if this client is already running.
"""
if self.process is not None:
raise RuntimeError('client %d is already running' % client_id)
runjob_args = []
popen_args = {}
if self.parmode == 'cobalt':
# Convert env to "--envs KEY=value" arguments for runjob
envargs = []
for (k, v) in self.env.iteritems():
envargs.extend(['--envs', '%s=%s' % (k, v) ])
runjob_args += ['runjob', '--block', self.block]
if self.corner is not None:
runjob_args += ['--corner', self.corner]
if self.shape is not None:
runjob_args += ['--shape', self.shape]
runjob_args += (['-n', str(self.npj*self.ppn), '-p', str(self.ppn)] + envargs +
['--cwd', self.subdir, ':'])
elif self.parmode == 'mpi':
runjob_args += [self.mpirun]
for mpirun_arg in self.mpirun_args:
runjob_args += [mpirun_arg]
if mpirun_arg in ['-n', '-np']:
runjob_args += [str(self.npj*self.ppn)]
popen_args['cwd'] = self.subdir
popen_args['env'] = os.environ # for mpi, let mpirun inherit environment of script
else:
popen_args['cwd'] = self.subdir
popen_args['env'] = self.env
runjob_args += [self.exe]
runjob_args += self.extra_args(label)
self.logger.pr('starting client %d args %r' % (self.client_id, runjob_args))
self.log = open(os.path.join(self.rundir, '%s-%03d.output' % (self.jobname, self.client_id)), 'a')
# send stdout and stderr to same file
self.process = subprocess.Popen(runjob_args, stdout=self.log, stderr=self.log, **popen_args)
def shutdown(self, block=True):
"""Request a client to shutdown.
If block=True, does not return until shutdown is complete. If
block=False, waits for the client to shutdown in a new
thread. Check self.waits_thread.isAlive() to see when shutdown
has finished. (This function also returns a handle to the wait
thread when block=False).
"""
if self.process is None:
self.logger.pr('client %d (requested to shutdown) has never been started' % self.client_id)
return
if self.process.poll() is not None:
self.logger.pr('client %d is already shutdown' % self.client_id)
return
if (self.wait_thread is not None and self.wait_thread.isAlive()):
raise RuntimeError('client %d is already in the process of shutting down' % self.client_id)
input_q = self.server.input_qs[self.client_id]
input_q.put((b'shutdown', self.fmt, -1, None))
if block:
self.wait_for_shutdown()
else:
self.wait_thread = threading.Thread(target=self.wait_for_shutdown)
self.wait_thread.start()
return self.wait_thread
def wait_for_shutdown(self):
"""
Block until a client has shutdown.
Typically called automatically by shutdown() or
start_or_restart().
Shutdown should previously have been initiated by queuing a
'shutdown' or 'restart' request. Waits CLIENT_TIMEOUT for
graceful shutdown. If client is still alive, a SIGTERM signal
is sent. If this has had no effect after a further
CLIENT_TIMEOUT, then a SIGKILL is sent. Does not return until
the SIGKILL has taken effect.
This function also marks shutdown task as complete in
servers's input_q for this client.
"""
wait_thread = threading.Thread(target=self.process.wait)
self.logger.pr('waiting for client %d to shutdown' % self.client_id)
wait_thread.start()
wait_thread.join(CLIENT_TIMEOUT)
if wait_thread.isAlive():
self.logger.pr('client %d did not shutdown gracefully in %d seconds - sending SIGTERM' %
(self.client_id, CLIENT_TIMEOUT))
self.process.terminate()
wait_thread.join(CLIENT_TIMEOUT)
if wait_thread.isAlive():
self.logger.pr('client %d did not respond to SIGTERM - sending SIGKILL' % self.client_id)
self.process.kill()
wait_thread.join() # no timeout for kill
else:
self.logger.pr('client %d responded to SIGTERM' % self.client_id)
else:
self.logger.pr('client %d shutdown within timeout' % self.client_id)
self.logger.pr('client %d shutdown complete - exit code %r' % (self.client_id, self.process.poll()))
self.log.close()
self.process = None
self.log = None
self.server.input_qs[self.client_id].task_done()
self.logger.pr('wait_for_shutdown done')
def start_or_restart(self, at, label, restart=False):
"""
Start or restart a client
If restart=True, wait for previous client to shutdown first.
Calls write_input_files() followed by start().
"""
if restart:
self.wait_for_shutdown()
self.write_input_files(at, label)
self.start(label)
def preprocess(self, at, label, force_restart=False):
"""
Prepare client for a calculation.
Starts client if this is the first task for it, or schedules a
restart if new configuration is not compatible with the last
one submitted to the queue (see is_compatible() method).
Many be extended in subclasses to e.g. sort the atoms by
atomic number. If Atoms object needs to be changed, a copy
should be returned rather than updating it inplace.
Returns (at, first_time).
"""
first_time = self.process is None
restart_reqd = (not first_time and (force_restart or
(not self.is_compatible(self.last_atoms, at, label))))
# keep a copy of last config queued for this client.
# acquire a lock in case multiple put() calls to the same client
# occur concurrently.
try:
self.lock.acquire()
self.last_atoms = at.copy()
finally:
self.lock.release()
if restart_reqd:
# put a shutdown command into the queue, ahead of this config.
# once it gets completed, restart_client() will be called as below
self.logger.pr('restart scheduled for client %d label %d' % (self.client_id, label))
self.server.input_qs[self.client_id].put((b'restart', self.fmt, -1, None))
if first_time:
self.start_or_restart(at, label, restart=False)
return at, self.fmt, first_time
def postprocess(self, at, label):
"""
Post-process results of calculation.
May be overrriden in subclasses to e.g. reverse sort order
applied in preprocess().
"""
return at
def is_compatible(self, old_at, new_at, label):
"""
Check if new_at and old_at are compatible.
Returns True if calculation can be continued, or False
if client must be restarted before it can process new_at.
"""
if old_at is None:
return True
return True
def write_input_files(self, at, label):
raise NotImplementedError('to be implemented in subclasses')
class QUIPClient(Client):
"""
Subclass of Client for running QUIP calculations.
Initial input files are written in extended XYZ format, and
subsequent communication is via sockets, in either REFTRAJ
or XYZ format.
"""
def __init__(self, client_id, exe, env=None, npj=1, ppn=1,
block=None, corner=None, shape=None,
jobname='socketcalc', rundir=None,
fmt='REFTRAJ', parmode=None, mpirun='mpirun',
mpirun_args=['-np'], logger=screen,
max_pos_diff=MAX_POS_DIFF,
max_cell_diff=MAX_CELL_DIFF,
param_files=None):
Client.__init__(self, client_id, exe, env, npj, ppn,
block, corner, shape, jobname, rundir, fmt, parmode,
mpirun, mpirun_args, logger, max_pos_diff,
max_cell_diff)
self.param_files = param_files
def write_input_files(self, at, label):
write_xyz(os.path.join(self.subdir, 'atoms.%d.xyz' % self.client_id), at)
# copy in parameter files
if self.param_files is not None:
for param_file in self.param_files:
param_file_basename = os.path.basename(param_file)
shutil.copyfile(param_file, os.path.join(self.subdir, param_file_basename))
_chdir_lock = threading.Lock()
class QMClient(Client):
"""
Abstract subclass of Client for QM calculations
"""
def is_compatible(self, old_at, new_at, label):
# first time, anything goes
if old_at is None:
return True
if not Client.is_compatible(self, old_at, new_at, label):
return False
if len(old_at) != len(new_at):
self.logger.pr('is_compatible() on client %d label %d got number of atoms mismatch: %d != %d' % (self.client_id,
label,
len(old_at),
len(new_at)))
return False # number of atoms must match
if abs(old_at.cell - new_at.cell).max() > self.max_cell_diff:
self.logger.pr('is_compatible() on client %d label %d got cell mismatch: %r != %r' % (self.client_id,
label,
old_at.cell,
new_at.cell))
return False # cells must match
# RMS difference in positions must be less than max_pos_diff
old_p = old_at.get_positions()
new_p = new_at.get_positions()
old_z = old_at.get_chemical_symbols()
new_z = new_at.get_chemical_symbols()
if 'index' in old_at.arrays:
old_index = old_at.get_array('index')
new_index = new_at.get_array('index')
# if termination exists, undo ordering differences due to cluster hopping
if ('termindex_%d' % self.client_id) in old_at.arrays:
old_termindex = old_at.get_array('termindex_%d' % self.client_id)
new_termindex = new_at.get_array('termindex_%d' % self.client_id)
a1s = sorted([(old_index[i], old_z[i], list(old_p[i]))
for i in range(len(old_at)) if old_termindex == 0])
a2s = sorted([(new_index[i], new_z[i], list(new_p[i]))
for i in range(len(new_at)) if new_termindex == 0])
else:
a1s = sorted([(old_index[i], old_z[i], list(old_p[i])) for i in range(len(old_at)) ])
a2s = sorted([(new_index[i], new_z[i], list(new_p[i])) for i in range(len(new_at)) ])
old_p = np.r_[[p for (i, z, p) in a1s]]
new_p = np.r_[[p for (i, z, p) in a2s]]
old_z = np.r_[[z for (i, z, p) in a1s]]
new_z = np.r_[[z for (i, z, p) in a2s]]
if not np.all(old_z == new_z):
self.logger.pr('is_compatible() on client %d label %d got atomic number mismatch: %r != %r' % (self.client_id,
label,
old_z, new_z))
return False # atomic numbers must match
# undo jumps across PBC - approach is that of QUIP's undo_pbc_jumps() routine
old_g = np.linalg.inv(old_at.cell.T).T
d = new_p.T - old_p.T - (np.dot(old_at.cell, np.floor(np.dot(old_g, (new_p - old_p).T)+0.5)))
rms_diff = np.sqrt((d**2).mean())
self.logger.pr('is_compatible() on client %d label %d got RMS position difference %.3f' % (self.client_id, label, rms_diff))
if rms_diff > self.max_pos_diff:
self.logger.pr('is_compatible() on client %d label %d got RMS position difference %.3f > max_pos_diff=%.3f' %
(self.client_id, label, rms_diff, self.max_pos_diff))
return False
return True
class VaspClient(QMClient):
"""
Subclass of Client for running VASP calculations.
Initial input files are written in POSCAR, INCAR, POTCAR and KPOINTS
formats, and subsequent communicatin is via sockets in REFTRAJ format.
"""
def __init__(self, client_id, exe, env=None, npj=1, ppn=1,
block=None, corner=None, shape=None,
jobname='socketcalc', rundir=None,
fmt='REFTRAJ', parmode=None, mpirun='mpirun',
mpirun_args=['-np'], logger=screen,
max_pos_diff=MAX_POS_DIFF,
max_cell_diff=MAX_CELL_DIFF,
**vasp_args):
Client.__init__(self, client_id, exe, env, npj, ppn,
block, corner, shape, jobname, rundir,
fmt, parmode, mpirun, mpirun_args, logger,
max_pos_diff, max_cell_diff)
if 'ibrion' not in vasp_args:
self.logger.pr('No ibrion key in vasp_args, setting ibrion=13')
vasp_args['ibrion'] = 13
if 'nsw' not in vasp_args:
self.logger.pr('No nsw key in vasp_args, setting nsw=1000000')
vasp_args['nsw'] = 1000000
self.vasp_args = vasp_args
def preprocess(self, at, label, force_restart=False):
self.logger.pr('vasp client %d preprocessing atoms label %d' % (self.client_id, label))
# make a copy and then sort atoms in the same way that vasp
# calculator will when it writes POSCAR. We use a new
# calculator and store the sort order in the Atoms so it can
# be reversed when results are ready.
vasp = Vasp(**self.vasp_args)
vasp.initialize(at)
at = at.copy()
order = np.array(range(len(at)))
at.set_array('vasp_sort_order', order)
at = at[vasp.resort]
# finally, call the parent method
return Client.preprocess(self, at, label, force_restart)
def postprocess(self, at, label):
self.logger.pr('vasp client %d postprocessing atoms label %d' % (self.client_id, label))
# call the parent method first
at = Client.postprocess(self, at, label)
# restore original atom ordering
at = at[at.arrays['vasp_sort_order'].tolist()]
return at
def write_input_files(self, at, label):
global _chdir_lock
# For LOTF Simulations active number of quantum
# atoms vary and must wait to this stage in order for
# magnetic moments to be set properly. If magnetic moments
# not set defaults to 0.
self.vasp_args['magmom'] = at.get_initial_magnetic_moments()
vasp = Vasp(**self.vasp_args)
vasp.initialize(at)
# chdir not thread safe, so acquire global lock before using it
orig_dir = os.getcwd()
try:
_chdir_lock.acquire()
os.chdir(self.subdir)
if os.path.exists('OUTCAR'):
n = 1
while os.path.exists('OUTCAR.%d' % n):
n += 1
shutil.copyfile('OUTCAR', 'OUTCAR.%d' % n)
shutil.copyfile('POSCAR', 'POSCAR.%d' % n)
write_vasp('POSCAR', vasp.atoms_sorted,
symbol_count=vasp.symbol_count,
vasp5='5' in self.exe)
vasp.write_incar(at)
vasp.write_potcar()
vasp.write_kpoints()
finally:
os.chdir(orig_dir)
_chdir_lock.release()
class CastepClient(QMClient):
"""
Subclass of Client for running CASTEP calculations.
Initial input files are written in .cell and .param
formats, and subsequent communication is via sockets in REFTRAJ format.
"""
def __init__(self, client_id, exe, env=None, npj=1, ppn=1,
block=None, corner=None, shape=None,
jobname='socketcalc', rundir=None,
fmt='REFTRAJ', parmode=None, mpirun='mpirun',
mpirun_args=['-np'], logger=screen,
max_pos_diff=MAX_POS_DIFF_CASTEP,
max_cell_diff=MAX_CELL_DIFF_CASTEP,
**castep_args):
Client.__init__(self, client_id, exe, env, npj, ppn,
block, corner, shape, jobname, rundir,
fmt, parmode, mpirun, mpirun_args, logger,
max_pos_diff, max_cell_diff)
if 'task' not in castep_args:
self.logger.pr('No task key in castep_args, setting task=MD')
castep_args['task'] = 'MD'
if 'md_ensemble' not in castep_args:
self.logger.pr('No md_ensemble key in castep_args, setting md_ensemble=SKT')
castep_args['md_ensemble'] = 'SKT'
if 'md_num_iter' not in castep_args:
self.logger.pr('No md_num_iter key in castep_args, setting md_num_iter=1000000')
castep_args['md_num_iter'] = 1000000
castep_args['_rename_existing_dir'] = False
self.castep_args = castep_args
self.logger.pr('constructing Castep instance with args %r' % castep_args)
self.castep = Castep(directory=self.subdir, **castep_args)
self._orig_devel_code = ''
if self.castep.param.devel_code.value is not None:
self._orig_devel_code = self.castep.param.devel_code.value.strip()+'\n'
def preprocess(self, at, label, force_restart=False):
self.logger.pr('Castep client %d preprocessing atoms label %d' % (self.client_id, label))
# make a copy and then sort atoms by atomic number
# in the same way that Castep will internally. We store the sort
# order in the Atoms so it can be reversed when results are ready.
at = at.copy()
order = np.array(range(len(at)))
at.set_array('castep_sort_order', order)
resort = order[np.argsort(at.get_atomic_numbers())]
#print 'resort = ', resort
#print at.get_scaled_positions()[resort[0]]
at = at[resort]
#print at.get_scaled_positions()[0]
#print 'castep_sort_order', at.get_array('castep_sort_order')
# finally, call the parent method (potentially writing input files)
return Client.preprocess(self, at, label, force_restart)
def postprocess(self, at, label):
self.logger.pr('Castep client %d postprocessing atoms label %d' % (self.client_id, label))
# call the parent method first
at = Client.postprocess(self, at, label)
# restore original atom ordering
at = at[at.arrays['castep_sort_order'].tolist()]
return at
def write_input_files(self, at, label):
global _chdir_lock
devel_code = self._orig_devel_code
devel_code += ('SOCKET_IP=%s\nSOCKET_PORT=%d\nSOCKET_CLIENT_ID=%d\nSOCKET_LABEL=%d' % \
(self.server.ip, self.server.port, self.client_id, label))
self.castep.param.devel_code = devel_code
# chdir not thread safe, so acquire global lock before using it
orig_dir = os.getcwd()
try:
_chdir_lock.acquire()
os.chdir(self.subdir)
cellf = open('castep.cell', 'w')
write_castep_cell(cellf, at, castep_cell=self.castep.cell)
cellf.close()
write_param('castep.param', self.castep.param, force_write=True)
finally:
os.chdir(orig_dir)
_chdir_lock.release()
def extra_args(self, label=None):
return ['castep']
class SocketCalculator(Calculator):
"""
ASE-compatible calculator which communicates with remote
force engines via sockets using a (synchronous) AtomsServer.
"""
implemented_properties = ['energy', 'forces', 'stress']
default_parameters = {}
name = 'SocketCalculator'
def __init__(self, client, ip=None, atoms=None, port=0, logger=screen, bgq=False):
Calculator.__init__(self)
self.client = client
if ip is None:
ip = '127.0.0.1' # default to localhost
self.logger = logger
self.bgq=bgq
self.server = AtomsServerSync((ip, port), AtomsRequestHandler,
[self.client], logger=self.logger,
bgq=self.bgq)
self._label = 1
self.atoms = atoms
def calculate(self, atoms, properties, system_changes):
Calculator.calculate(self, atoms, properties, system_changes)
if system_changes: # if anything at all changed (could be made more fine-grained)
self.logger.pr('calculation triggered with properties={0}, system_changes={1}'.format(properties,
system_changes))
self.server.put(atoms, 0, self._label)
if self._label != 1:
# send atoms over socket, unless first time
self.logger.pr('socket calculator sending Atoms label={0}'.format(self._label))
self.server.handle_request()
# wait for results to be ready
self.logger.pr('socket calculator waiting for results label={0}'.format(self._label))
self.server.handle_request()
self._label += 1
[results] = self.server.get_results()
# we always compute energy, forces and stresses, regardless of what was requested
stress = -(results.info['virial']/results.get_volume())
self.results = {'energy': results.info['energy'],
'forces': results.arrays['force'],
'stress': full_3x3_to_Voigt_6_stress(stress)}
else:
self.logger.pr('calculation avoided with properties={0}, system_changes={1}'.format(properties,
system_changes))
def shutdown(self):
self.server.shutdown()
| libAtoms/matscipy | matscipy/socketcalc.py | Python | lgpl-2.1 | 41,228 | [
"ASE",
"CASTEP",
"Matscipy",
"VASP"
] | 61aa1cbb3ad98aecd1ddd35ae78ab7b3c549f26f8c432be0a0de306c0d0ec082 |
"""
DIRAC - Distributed Infrastructure with Remote Agent Control
The distributed data production and analysis system of LHCb and other VOs.
DIRAC is a software framework for distributed computing which
allows to integrate various computing resources in a single
system. At the same time it integrates all kinds of computing
activities like Monte Carlo simulations, data processing, or
final user analysis.
It is build as number of cooperating systems:
- Accounting
- Configuration
- Core
- Base
- DISET
- Security
- Utilities
- Workflow
- Framework
- RequestManagement
- Resources
- Transformation
Which are used by other system providing functionality to
the end user:
- DataManagement
- Interfaces
- ResourceStatus
- StorageManagement
- WorkloadManagement
It defines the following data members:
- majorVersion: DIRAC Major version number
- minorVersion: DIRAC Minor version number
- patchLevel: DIRAC Patch level number
- preVersion: DIRAC Pre release number
- version: DIRAC version string
- buildVersion: DIRAC version string
- errorMail: mail address for important errors
- alarmMail: mail address for important alarms
- pythonPath: absolute real path to the directory that contains this file
- rootPath: absolute real path to the parent of DIRAC.pythonPath
It loads Modules from :
- DIRAC.Core.Utililies
It loads:
- S_OK: OK return structure
- S_ERROR: ERROR return structure
- gLogger: global Logger object
- gConfig: global Config object
It defines the following functions:
- abort: aborts execution
- exit: finish execution using callbacks
- siteName: returns DIRAC name for current site
- getPlatform(): DIRAC platform string for current host
- getPlatformTuple(): DIRAC platform tuple for current host
"""
import sys
import os
import platform as pyPlatform
from pkgutil import extend_path
__path__ = extend_path( __path__, __name__ )
__RCSID__ = "$Id$"
# Define Version
majorVersion = 6
minorVersion = 20
patchLevel = 0
preVersion = 17
version = "v%sr%s" % ( majorVersion, minorVersion )
buildVersion = "v%dr%d" % ( majorVersion, minorVersion )
if patchLevel:
version = "%sp%s" % ( version, patchLevel )
buildVersion = "%s build %s" % ( buildVersion, patchLevel )
if preVersion:
version = "%s-pre%s" % ( version, preVersion )
buildVersion = "%s pre %s" % ( buildVersion, preVersion )
# Check of python version
__pythonMajorVersion = ( "2", )
__pythonMinorVersion = ( "7" )
pythonVersion = pyPlatform.python_version_tuple()
if str( pythonVersion[0] ) not in __pythonMajorVersion or str( pythonVersion[1] ) not in __pythonMinorVersion:
print "Python Version %s not supported by DIRAC" % pyPlatform.python_version()
print "Supported versions are: "
for major in __pythonMajorVersion:
for minor in __pythonMinorVersion:
print "%s.%s.x" % ( major, minor )
sys.exit( 1 )
errorMail = "dirac.alarms@gmail.com"
alarmMail = "dirac.alarms@gmail.com"
# Set rootPath of DIRAC installation
pythonPath = os.path.realpath( __path__[0] )
rootPath = os.path.dirname( pythonPath )
# Import DIRAC.Core.Utils modules
#from DIRAC.Core.Utilities import *
from DIRAC.Core.Utilities.Network import getFQDN
import DIRAC.Core.Utilities.ExitCallback as ExitCallback
from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR
# Logger
from DIRAC.FrameworkSystem.Client.Logger import gLogger
#Configuration client
from DIRAC.ConfigurationSystem.Client.Config import gConfig
# Some Defaults if not present in the configuration
FQDN = getFQDN()
if len( FQDN.split( '.' ) ) > 2 :
# Use the last component of the FQDN as country code if there are more than 2 components
_siteName = 'DIRAC.Client.%s' % FQDN.split( '.' )[-1]
else:
# else use local as country code
_siteName = 'DIRAC.Client.local'
__siteName = False
# # Update DErrno with the extensions errors
# from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
# from DIRAC.ConfigurationSystem.Client.Helpers import CSGlobals
# allExtensions = CSGlobals.getCSExtensions()
#
# # Update for each extension. Careful to conflict :-)
# for extension in allExtensions:
# ol = ObjectLoader( baseModules = ["%sDIRAC" % extension] )
# extraErrorModule = ol.loadModule( 'Core.Utilities.DErrno' )
# if extraErrorModule['OK']:
# extraErrorModule = extraErrorModule['Value']
#
# # The next 3 dictionary MUST be present for consistency
#
# # Global name of errors
# DErrno.__dict__.update( extraErrorModule.extra_dErrName )
# # Dictionary with the error codes
# DErrno.dErrorCode.update( extraErrorModule.extra_dErrorCode )
# # Error description string
# DErrno.dStrError.update( extraErrorModule.extra_dStrError )
#
# # extra_compatErrorString is optional
# for err in getattr( extraErrorModule, 'extra_compatErrorString', [] ) :
# DErrno.compatErrorString.setdefault( err, [] ).extend( extraErrorModule.extra_compatErrorString[err] )
def siteName():
"""
Determine and return DIRAC name for current site
"""
global __siteName
if not __siteName:
__siteName = gConfig.getValue( '/LocalSite/Site', _siteName )
return __siteName
#Callbacks
ExitCallback.registerSignals()
# platform detection
from DIRAC.Core.Utilities.Platform import getPlatformString, getPlatform, getPlatformTuple
def exit( exitCode = 0 ):
"""
Finish execution using callbacks
"""
ExitCallback.execute( exitCode, [] )
sys.exit( exitCode )
def abort( exitCode, *args, **kwargs ):
"""
Abort execution
"""
try:
gLogger.fatal( *args, **kwargs )
os._exit( exitCode )
except OSError:
gLogger.exception( 'Error while executing DIRAC.abort' )
os._exit( exitCode )
| arrabito/DIRAC | __init__.py | Python | gpl-3.0 | 5,925 | [
"DIRAC"
] | 707974fec48bc8d2d52ce875c2263412613d20c27a31557fe5d1b17b5d46038e |
from matplotlib import pyplot as plt
from skimage.feature import blob_dog, blob_log, blob_doh
from math import sqrt
from skimage.color import rgb2gray
from PIL import Image, ImageOps
import numpy as np
im = Image.open("assets/mycheck.jpg", 'r')
im = ImageOps.invert(im)
width, height = im.size
pixel_values = np.array(np.asarray(list(im.getdata())), dtype=np.uint8)
image = pixel_values.reshape((height, width, 3))
image_gray = rgb2gray(image)
blobs_log = blob_log(pixel_matrix, max_sigma=60, num_sigma=10, threshold=.1)
# Compute radii in the 3rd column.
blobs_log[:, 2] = blobs_log[:, 2] * sqrt(2)
blobs_dog = blob_dog(pixel_matrix, max_sigma=30, threshold=.1)
blobs_dog[:, 2] = blobs_dog[:, 2] * sqrt(2)
blobs_doh = blob_doh(pixel_matrix, max_sigma=30, threshold=.01)
blobs_list = [blobs_log, blobs_dog, blobs_doh]
colors = ['yellow', 'lime', 'red']
titles = ['Laplacian of Gaussian', 'Difference of Gaussian',
'Determinant of Hessian']
sequence = zip(blobs_list, colors, titles)
fig,axes = plt.subplots(1, 3, sharex=True, sharey=True, subplot_kw={'adjustable':'box-forced'})
axes = axes.ravel()
for blobs, color, title in sequence:
ax = axes[0]
axes = axes[1:]
ax.set_title(title)
ax.imshow(pixel_matrix, interpolation='nearest')
for blob in blobs:
y, x, r = blob
c = plt.Circle((x, y), r, color=color, linewidth=2, fill=False)
ax.add_patch(c)
plt.show()
| avicorp/firstLook | src/dataManipulation/checkLayout.py | Python | apache-2.0 | 1,423 | [
"Gaussian"
] | 2e873594b8be08f5fecfbde0b7a3f8c2df2fadffb8011ff4ab42fac5ea4abddb |
###########################################################################
#
# This program is part of Zenoss Core, an open source monitoring platform.
# Copyright (C) 2008-2010, Zenoss Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2, or (at your
# option) any later version, as published by the Free Software Foundation.
#
# For complete information please visit: http://www.zenoss.com/oss/
#
###########################################################################
__doc__="Define the rpc_request structure."
from pysamba.library import *
class rpc_request(Structure): pass
class dcerpc_pipe(Structure): pass
class GUID(Structure):
_fields_ = [
]
class DATA_BLOB(Structure):
_fields_ = [
('data', POINTER(uint8_t)),
('length', size_t),
]
class ndr(Structure):
_fields_ = [
('table', c_void_p), # lie: POINTER(dcerpc_interface_table)
('opnum', uint32_t),
('struct_ptr', c_void_p),
('mem_ctx', c_void_p), # lie: POINTER(TALLOC_CTX)
]
class async(Structure):
_fields_ = [
('callback', CFUNCTYPE(None, POINTER(rpc_request))),
('private', c_void_p),
]
rpc_request._fields_ = [
('next', POINTER(rpc_request)),
('prev', POINTER(rpc_request)),
('p', POINTER(dcerpc_pipe)),
('status', NTSTATUS),
('call_id', uint32_t),
('state', enum),
('payload', DATA_BLOB),
('flags', uint32_t),
('fault_code', uint32_t),
('recv_handler', c_void_p), # lie
('object', POINTER(GUID)),
('opnum', uint16_t),
('request_data', DATA_BLOB),
('async_call', BOOL),
('ndr', ndr),
('async', async),
]
| NetNow/wmi-samba | pysamba/rpc/rpc_request.py | Python | gpl-2.0 | 1,782 | [
"VisIt"
] | 061a50d7de3946748e6456fab862622b5e7691fa4f9f86e281515941adbb0a81 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.